| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.340744709349049, | |
| "eval_steps": 500, | |
| "global_step": 14932, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.000669702652022502, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.693440428380188e-07, | |
| "loss": 2.3902, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.001339405304045004, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.3386880856760376e-06, | |
| "loss": 2.4415, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.002009107956067506, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.0080321285140564e-06, | |
| "loss": 2.3648, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.002678810608090008, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.6773761713520752e-06, | |
| "loss": 2.2811, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.00334851326011251, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.346720214190094e-06, | |
| "loss": 2.4023, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.004018215912135012, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.016064257028113e-06, | |
| "loss": 2.3196, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.004687918564157514, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.685408299866132e-06, | |
| "loss": 2.2711, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.005357621216180016, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.3547523427041504e-06, | |
| "loss": 2.3224, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.006027323868202518, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.024096385542169e-06, | |
| "loss": 2.287, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.00669702652022502, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.693440428380188e-06, | |
| "loss": 2.2395, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.0073667291722475225, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.362784471218207e-06, | |
| "loss": 2.3105, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.008036431824270024, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.032128514056226e-06, | |
| "loss": 2.4276, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.008706134476292525, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.701472556894244e-06, | |
| "loss": 2.3688, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.009375837128315028, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.370816599732263e-06, | |
| "loss": 2.3826, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.01004553978033753, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.0040160642570281e-05, | |
| "loss": 2.383, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.010715242432360031, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.0709504685408301e-05, | |
| "loss": 2.3427, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.011384945084382534, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.1378848728246319e-05, | |
| "loss": 2.2256, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.012054647736405036, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.2048192771084338e-05, | |
| "loss": 2.3393, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.012724350388427539, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.2717536813922356e-05, | |
| "loss": 2.3954, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.01339405304045004, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.3386880856760376e-05, | |
| "loss": 2.2947, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.014063755692472542, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.4056224899598394e-05, | |
| "loss": 2.2528, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.014733458344495045, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.4725568942436414e-05, | |
| "loss": 2.2722, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.015403160996517546, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.5394912985274433e-05, | |
| "loss": 2.3249, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.016072863648540048, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.606425702811245e-05, | |
| "loss": 2.3077, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.01674256630056255, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.673360107095047e-05, | |
| "loss": 2.3056, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.01741226895258505, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.7402945113788487e-05, | |
| "loss": 2.3655, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.018081971604607554, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.8072289156626505e-05, | |
| "loss": 2.2622, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.018751674256630057, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.8741633199464527e-05, | |
| "loss": 2.2843, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.019421376908652557, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.9410977242302544e-05, | |
| "loss": 2.1858, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.02009107956067506, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.0080321285140562e-05, | |
| "loss": 2.3016, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.020760782212697563, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.074966532797858e-05, | |
| "loss": 2.2798, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.021430484864720063, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.1419009370816602e-05, | |
| "loss": 2.2639, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.022100187516742566, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.208835341365462e-05, | |
| "loss": 2.2364, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.02276989016876507, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.2757697456492638e-05, | |
| "loss": 2.2894, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.023439592820787572, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.3427041499330656e-05, | |
| "loss": 2.3324, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.02410929547281007, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.4096385542168677e-05, | |
| "loss": 2.3137, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.024778998124832575, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.4765729585006695e-05, | |
| "loss": 2.2814, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.025448700776855078, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.5435073627844713e-05, | |
| "loss": 2.3252, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.026118403428877578, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.6104417670682734e-05, | |
| "loss": 2.3306, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.02678810608090008, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.6773761713520752e-05, | |
| "loss": 2.3344, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.027457808732922584, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.7443105756358774e-05, | |
| "loss": 2.2686, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.028127511384945084, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.8112449799196788e-05, | |
| "loss": 2.2904, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.028797214036967587, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.878179384203481e-05, | |
| "loss": 2.3387, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.02946691668899009, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.9451137884872827e-05, | |
| "loss": 2.2865, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.03013661934101259, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.012048192771085e-05, | |
| "loss": 2.2472, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.030806321993035093, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.078982597054887e-05, | |
| "loss": 2.2773, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.031476024645057596, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.1459170013386885e-05, | |
| "loss": 2.3224, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.032145727297080096, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.21285140562249e-05, | |
| "loss": 2.1539, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.032815429949102595, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.279785809906292e-05, | |
| "loss": 2.3025, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.0334851326011251, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.346720214190094e-05, | |
| "loss": 2.2328, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.0341548352531476, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.413654618473896e-05, | |
| "loss": 2.2389, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.0348245379051701, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.4805890227576974e-05, | |
| "loss": 2.2465, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.03549424055719261, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.5475234270415e-05, | |
| "loss": 2.2608, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.03616394320921511, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.614457831325301e-05, | |
| "loss": 2.2254, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.03683364586123761, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.6813922356091035e-05, | |
| "loss": 2.2041, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.037503348513260114, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.748326639892905e-05, | |
| "loss": 2.3156, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.038173051165282613, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.815261044176707e-05, | |
| "loss": 2.3493, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.03884275381730511, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.882195448460509e-05, | |
| "loss": 2.2966, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.03951245646932762, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.949129852744311e-05, | |
| "loss": 2.2741, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.04018215912135012, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.0160642570281125e-05, | |
| "loss": 2.198, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.04085186177337262, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.082998661311915e-05, | |
| "loss": 2.2987, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.041521564425395126, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.149933065595716e-05, | |
| "loss": 2.3219, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.042191267077417625, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.2168674698795186e-05, | |
| "loss": 2.2609, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.042860969729440125, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.2838018741633203e-05, | |
| "loss": 2.1874, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.04353067238146263, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.350736278447122e-05, | |
| "loss": 2.2077, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.04420037503348513, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.417670682730924e-05, | |
| "loss": 2.187, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.04487007768550764, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.484605087014726e-05, | |
| "loss": 2.2983, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.04553978033753014, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.5515394912985275e-05, | |
| "loss": 2.3472, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.04620948298955264, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.61847389558233e-05, | |
| "loss": 2.2707, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.046879185641575144, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.685408299866131e-05, | |
| "loss": 2.3201, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.047548888293597644, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.7523427041499336e-05, | |
| "loss": 2.2628, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.04821859094562014, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.8192771084337354e-05, | |
| "loss": 2.3217, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.04888829359764265, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.886211512717537e-05, | |
| "loss": 2.3157, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.04955799624966515, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.953145917001339e-05, | |
| "loss": 2.2407, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.05022769890168765, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.020080321285141e-05, | |
| "loss": 2.2607, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.050897401553710156, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.0870147255689426e-05, | |
| "loss": 2.2192, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.051567104205732656, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.1539491298527444e-05, | |
| "loss": 2.2501, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.052236806857755155, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.220883534136547e-05, | |
| "loss": 2.2901, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.05290650950977766, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.2878179384203486e-05, | |
| "loss": 2.2391, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.05357621216180016, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.3547523427041504e-05, | |
| "loss": 2.2306, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.05424591481382266, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.4216867469879516e-05, | |
| "loss": 2.322, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.05491561746584517, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.488621151271755e-05, | |
| "loss": 2.2978, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.05558532011786767, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.555555555555556e-05, | |
| "loss": 2.3016, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.05625502276989017, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.6224899598393576e-05, | |
| "loss": 2.1525, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.056924725421912674, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.6894243641231594e-05, | |
| "loss": 2.2663, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.057594428073935174, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.756358768406962e-05, | |
| "loss": 2.2755, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.05826413072595767, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.823293172690764e-05, | |
| "loss": 2.2842, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.05893383337798018, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.8902275769745655e-05, | |
| "loss": 2.2635, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.05960353603000268, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.9571619812583666e-05, | |
| "loss": 2.2726, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.06027323868202518, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.02409638554217e-05, | |
| "loss": 2.2318, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.060942941334047686, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.091030789825971e-05, | |
| "loss": 2.3523, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.061612643986070185, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.157965194109773e-05, | |
| "loss": 2.317, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.062282346638092685, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.224899598393574e-05, | |
| "loss": 2.1225, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.06295204929011519, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.291834002677377e-05, | |
| "loss": 2.2357, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.06362175194213769, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.358768406961179e-05, | |
| "loss": 2.2447, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.06429145459416019, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.42570281124498e-05, | |
| "loss": 2.1638, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.06496115724618269, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.492637215528782e-05, | |
| "loss": 2.1353, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.06563085989820519, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.559571619812584e-05, | |
| "loss": 2.1564, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.0663005625502277, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.626506024096386e-05, | |
| "loss": 2.1921, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.0669702652022502, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.693440428380188e-05, | |
| "loss": 2.2064, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.0676399678542727, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.76037483266399e-05, | |
| "loss": 2.223, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.0683096705062952, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.827309236947793e-05, | |
| "loss": 2.3054, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.0689793731583177, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.894243641231593e-05, | |
| "loss": 2.2002, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.0696490758103402, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.961178045515395e-05, | |
| "loss": 2.249, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.07031877846236272, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.028112449799197e-05, | |
| "loss": 2.1752, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.07098848111438522, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.095046854083e-05, | |
| "loss": 2.2828, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.07165818376640772, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.161981258366802e-05, | |
| "loss": 2.2456, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.07232788641843022, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.228915662650602e-05, | |
| "loss": 2.3329, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.07299758907045271, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.295850066934404e-05, | |
| "loss": 2.2911, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.07366729172247521, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.362784471218207e-05, | |
| "loss": 2.3092, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.07433699437449773, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.429718875502009e-05, | |
| "loss": 2.3058, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.07500669702652023, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.49665327978581e-05, | |
| "loss": 2.3024, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.07567639967854273, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.563587684069612e-05, | |
| "loss": 2.3491, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.07634610233056523, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.630522088353414e-05, | |
| "loss": 2.3675, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.07701580498258773, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.697456492637216e-05, | |
| "loss": 2.3257, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.07768550763461023, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.764390896921018e-05, | |
| "loss": 2.3037, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.07835521028663274, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.83132530120482e-05, | |
| "loss": 2.2627, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.07902491293865524, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.898259705488621e-05, | |
| "loss": 2.3351, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.07969461559067774, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.965194109772423e-05, | |
| "loss": 2.2829, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.08036431824270024, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.032128514056225e-05, | |
| "loss": 2.2612, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.08103402089472274, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.099062918340027e-05, | |
| "loss": 2.2377, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.08170372354674524, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.16599732262383e-05, | |
| "loss": 2.2353, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.08237342619876775, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.232931726907632e-05, | |
| "loss": 2.2509, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.08304312885079025, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.299866131191432e-05, | |
| "loss": 2.2449, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.08371283150281275, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.366800535475234e-05, | |
| "loss": 2.2076, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.08438253415483525, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.433734939759037e-05, | |
| "loss": 2.2714, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.08505223680685775, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.500669344042839e-05, | |
| "loss": 2.2424, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.08572193945888025, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.567603748326641e-05, | |
| "loss": 2.2711, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.08639164211090276, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.634538152610442e-05, | |
| "loss": 2.2266, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.08706134476292526, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.701472556894244e-05, | |
| "loss": 2.2329, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.08773104741494776, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.768406961178046e-05, | |
| "loss": 2.2388, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.08840075006697026, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.835341365461848e-05, | |
| "loss": 2.3047, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.08907045271899276, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.90227576974565e-05, | |
| "loss": 2.2679, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.08974015537101528, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.969210174029451e-05, | |
| "loss": 2.3128, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.09040985802303778, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.036144578313253e-05, | |
| "loss": 2.2134, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.09107956067506028, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.103078982597055e-05, | |
| "loss": 2.2883, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.09174926332708278, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.170013386880857e-05, | |
| "loss": 2.2464, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.09241896597910527, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.23694779116466e-05, | |
| "loss": 2.2318, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.09308866863112777, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.303882195448462e-05, | |
| "loss": 2.229, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.09375837128315029, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.370816599732262e-05, | |
| "loss": 2.2054, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.09442807393517279, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.437751004016064e-05, | |
| "loss": 2.2667, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.09509777658719529, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.504685408299867e-05, | |
| "loss": 2.2674, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.09576747923921779, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.571619812583669e-05, | |
| "loss": 2.2865, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.09643718189124029, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.638554216867471e-05, | |
| "loss": 2.2337, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.09710688454326279, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.705488621151271e-05, | |
| "loss": 2.291, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.0977765871952853, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.772423025435074e-05, | |
| "loss": 2.3411, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.0984462898473078, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.839357429718876e-05, | |
| "loss": 2.1827, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.0991159924993303, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.906291834002678e-05, | |
| "loss": 2.1987, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.0997856951513528, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.97322623828648e-05, | |
| "loss": 2.2079, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.1004553978033753, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.999995081044314e-05, | |
| "loss": 2.2484, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.1011251004553978, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.999965020794615e-05, | |
| "loss": 2.3123, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.10179480310742031, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.99990763321247e-05, | |
| "loss": 2.2429, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.10246450575944281, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.999822918611533e-05, | |
| "loss": 2.2205, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.10313420841146531, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.999710877454811e-05, | |
| "loss": 2.2077, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.10380391106348781, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.999571510354664e-05, | |
| "loss": 2.3028, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.10447361371551031, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.999404818072808e-05, | |
| "loss": 2.252, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.10514331636753281, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.999210801520296e-05, | |
| "loss": 2.3754, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.10581301901955532, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.998989461757526e-05, | |
| "loss": 2.2761, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.10648272167157782, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.998740799994235e-05, | |
| "loss": 2.2168, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.10715242432360032, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.998464817589484e-05, | |
| "loss": 2.2639, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.10782212697562282, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.998161516051656e-05, | |
| "loss": 2.2687, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.10849182962764532, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.997830897038446e-05, | |
| "loss": 2.2239, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.10916153227966782, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.997472962356854e-05, | |
| "loss": 2.1802, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.10983123493169034, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.997087713963174e-05, | |
| "loss": 2.2154, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.11050093758371284, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.996675153962984e-05, | |
| "loss": 2.212, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.11117064023573534, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.996235284611131e-05, | |
| "loss": 2.251, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.11184034288775783, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.995768108311722e-05, | |
| "loss": 2.299, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.11251004553978033, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.995273627618116e-05, | |
| "loss": 2.2021, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.11317974819180283, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.994751845232894e-05, | |
| "loss": 2.244, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.11384945084382535, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.994202764007865e-05, | |
| "loss": 2.3063, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.11451915349584785, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.993626386944031e-05, | |
| "loss": 2.2809, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.11518885614787035, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.993022717191584e-05, | |
| "loss": 2.2767, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.11585855879989285, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.992391758049889e-05, | |
| "loss": 2.2354, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.11652826145191535, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.99173351296745e-05, | |
| "loss": 2.2889, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.11719796410393785, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.99104798554191e-05, | |
| "loss": 2.22, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.11786766675596036, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.990335179520023e-05, | |
| "loss": 2.2941, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.11853736940798286, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.989595098797635e-05, | |
| "loss": 2.2584, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.11920707206000536, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.988827747419659e-05, | |
| "loss": 2.2408, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.11987677471202786, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.988033129580059e-05, | |
| "loss": 2.1831, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.12054647736405036, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.98721124962182e-05, | |
| "loss": 2.2853, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.12121618001607286, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.986362112036935e-05, | |
| "loss": 2.2537, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.12188588266809537, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.985485721466366e-05, | |
| "loss": 2.2257, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.12255558532011787, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.984582082700029e-05, | |
| "loss": 2.3031, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.12322528797214037, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.98365120067677e-05, | |
| "loss": 2.2435, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.12389499062416287, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.982693080484327e-05, | |
| "loss": 2.1834, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.12456469327618537, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.981707727359308e-05, | |
| "loss": 2.2215, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.12523439592820787, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.98069514668717e-05, | |
| "loss": 2.3128, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.12590409858023038, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.979655344002172e-05, | |
| "loss": 2.2512, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.12657380123225287, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.978588324987365e-05, | |
| "loss": 2.2278, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.12724350388427538, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.977494095474546e-05, | |
| "loss": 2.2445, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.1279132065362979, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.97637266144423e-05, | |
| "loss": 2.2834, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.12858290918832038, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.975224029025619e-05, | |
| "loss": 2.2807, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.1292526118403429, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.974048204496572e-05, | |
| "loss": 2.3837, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.12992231449236538, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.97284519428356e-05, | |
| "loss": 2.2318, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.1305920171443879, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.971615004961645e-05, | |
| "loss": 2.1961, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.13126171979641038, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.970357643254429e-05, | |
| "loss": 2.301, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.1319314224484329, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.96907311603403e-05, | |
| "loss": 2.2454, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.1326011251004554, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.967761430321037e-05, | |
| "loss": 2.2505, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.1332708277524779, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.966422593284474e-05, | |
| "loss": 2.2103, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.1339405304045004, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.965056612241764e-05, | |
| "loss": 2.3467, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.1346102330565229, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.963663494658681e-05, | |
| "loss": 2.174, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.1352799357085454, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.962243248149314e-05, | |
| "loss": 2.3771, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 0.13594963836056792, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.960795880476029e-05, | |
| "loss": 2.3623, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 0.1366193410125904, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.95932139954942e-05, | |
| "loss": 2.27, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.13728904366461292, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.95781981342827e-05, | |
| "loss": 2.2534, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.1379587463166354, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.956291130319506e-05, | |
| "loss": 2.2726, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 0.13862844896865792, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.95473535857815e-05, | |
| "loss": 2.3065, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 0.1392981516206804, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.953152506707283e-05, | |
| "loss": 2.1472, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 0.13996785427270292, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.951542583357986e-05, | |
| "loss": 2.2893, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 0.14063755692472543, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.949905597329306e-05, | |
| "loss": 2.1606, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.14130725957674792, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.948241557568196e-05, | |
| "loss": 2.2155, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 0.14197696222877043, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.946550473169474e-05, | |
| "loss": 2.2544, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 0.14264666488079292, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.944832353375769e-05, | |
| "loss": 2.2661, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 0.14331636753281543, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.943087207577473e-05, | |
| "loss": 2.2873, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 0.14398607018483794, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.941315045312689e-05, | |
| "loss": 2.2044, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.14465577283686043, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.939515876267178e-05, | |
| "loss": 2.3315, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 0.14532547548888294, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.937689710274308e-05, | |
| "loss": 2.2413, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 0.14599517814090543, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.935836557314995e-05, | |
| "loss": 2.2494, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 0.14666488079292794, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.933956427517657e-05, | |
| "loss": 2.2623, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 0.14733458344495043, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.932049331158153e-05, | |
| "loss": 2.2775, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.14800428609697294, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.930115278659727e-05, | |
| "loss": 2.1811, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 0.14867398874899546, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.928154280592952e-05, | |
| "loss": 2.2731, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 0.14934369140101794, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.92616634767567e-05, | |
| "loss": 2.2588, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 0.15001339405304046, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.924151490772942e-05, | |
| "loss": 2.2073, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 0.15068309670506294, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.922109720896973e-05, | |
| "loss": 2.2965, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.15135279935708545, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.92004104920707e-05, | |
| "loss": 2.1766, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 0.15202250200910797, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.917945487009566e-05, | |
| "loss": 2.2915, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 0.15269220466113045, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.915823045757765e-05, | |
| "loss": 2.2717, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 0.15336190731315297, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.913673737051882e-05, | |
| "loss": 2.2301, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 0.15403160996517545, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.911497572638973e-05, | |
| "loss": 2.2289, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.15470131261719797, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.909294564412874e-05, | |
| "loss": 2.3655, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 0.15537101526922045, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.907064724414139e-05, | |
| "loss": 2.1503, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 0.15604071792124297, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.904808064829967e-05, | |
| "loss": 2.2254, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 0.15671042057326548, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.902524597994143e-05, | |
| "loss": 2.28, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 0.15738012322528797, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.900214336386964e-05, | |
| "loss": 2.18, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.15804982587731048, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.897877292635179e-05, | |
| "loss": 2.3032, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 0.15871952852933296, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.895513479511907e-05, | |
| "loss": 2.2472, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 0.15938923118135548, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.893122909936583e-05, | |
| "loss": 2.1331, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 0.160058933833378, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.890705596974875e-05, | |
| "loss": 2.1931, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 0.16072863648540048, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.88826155383862e-05, | |
| "loss": 2.2551, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.161398339137423, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.885790793885746e-05, | |
| "loss": 2.1717, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 0.16206804178944548, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.883293330620205e-05, | |
| "loss": 2.2582, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 0.162737744441468, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.880769177691892e-05, | |
| "loss": 2.2466, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 0.16340744709349048, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.878218348896577e-05, | |
| "loss": 2.2734, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 0.164077149745513, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.875640858175827e-05, | |
| "loss": 2.3381, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 0.1647468523975355, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.87303671961693e-05, | |
| "loss": 2.2529, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 0.165416555049558, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.870405947452819e-05, | |
| "loss": 2.2889, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 0.1660862577015805, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.86774855606199e-05, | |
| "loss": 2.1831, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 0.166755960353603, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.86506455996843e-05, | |
| "loss": 2.2241, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 0.1674256630056255, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.862353973841526e-05, | |
| "loss": 2.2539, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.16809536565764802, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.859616812496008e-05, | |
| "loss": 2.263, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 0.1687650683096705, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.856853090891843e-05, | |
| "loss": 2.2179, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 0.16943477096169302, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.854062824134159e-05, | |
| "loss": 2.2326, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 0.1701044736137155, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.851246027473173e-05, | |
| "loss": 2.3374, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 0.17077417626573801, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.848402716304106e-05, | |
| "loss": 2.2265, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 0.1714438789177605, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.845532906167083e-05, | |
| "loss": 2.1868, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 0.17211358156978301, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.842636612747069e-05, | |
| "loss": 2.2097, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 0.17278328422180553, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.839713851873766e-05, | |
| "loss": 2.2601, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 0.173452986873828, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.836764639521539e-05, | |
| "loss": 2.2559, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 0.17412268952585053, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.833788991809323e-05, | |
| "loss": 2.2896, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.174792392177873, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.830786925000533e-05, | |
| "loss": 2.2095, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 0.17546209482989553, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.827758455502978e-05, | |
| "loss": 2.206, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 0.17613179748191804, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.824703599868776e-05, | |
| "loss": 2.3201, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 0.17680150013394053, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.821622374794253e-05, | |
| "loss": 2.2804, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 0.17747120278596304, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.81851479711986e-05, | |
| "loss": 2.1435, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 0.17814090543798553, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.815380883830076e-05, | |
| "loss": 2.1878, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 0.17881060809000804, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.812220652053318e-05, | |
| "loss": 2.3302, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 0.17948031074203055, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.80903411906185e-05, | |
| "loss": 2.2151, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 0.18015001339405304, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.805821302271685e-05, | |
| "loss": 2.2155, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 0.18081971604607555, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.802582219242484e-05, | |
| "loss": 2.2942, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.18148941869809804, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.799316887677471e-05, | |
| "loss": 2.2229, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 0.18215912135012055, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.796025325423334e-05, | |
| "loss": 2.2516, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 0.18282882400214304, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.792707550470122e-05, | |
| "loss": 2.1446, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 0.18349852665416555, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.789363580951145e-05, | |
| "loss": 2.2074, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 0.18416822930618806, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.785993435142891e-05, | |
| "loss": 2.236, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 0.18483793195821055, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.782597131464901e-05, | |
| "loss": 2.2744, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 0.18550763461023306, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.779174688479693e-05, | |
| "loss": 2.2472, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 0.18617733726225555, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.775726124892646e-05, | |
| "loss": 2.4463, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 0.18684703991427806, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.772251459551897e-05, | |
| "loss": 2.1792, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 0.18751674256630058, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.768750711448249e-05, | |
| "loss": 2.2459, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 0.18818644521832306, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.765223899715054e-05, | |
| "loss": 2.1656, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 0.18885614787034558, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.761671043628124e-05, | |
| "loss": 2.3268, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 0.18952585052236806, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.758092162605604e-05, | |
| "loss": 2.2815, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 0.19019555317439057, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.754487276207889e-05, | |
| "loss": 2.2414, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 0.19086525582641306, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.750856404137502e-05, | |
| "loss": 2.1398, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 0.19153495847843557, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.747199566238991e-05, | |
| "loss": 2.2683, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 0.1922046611304581, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.743516782498822e-05, | |
| "loss": 2.1707, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 0.19287436378248057, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.739808073045264e-05, | |
| "loss": 2.3202, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 0.1935440664345031, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.73607345814829e-05, | |
| "loss": 2.2712, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 0.19421376908652557, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.732312958219453e-05, | |
| "loss": 2.2749, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 0.1948834717385481, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.728526593811783e-05, | |
| "loss": 2.1855, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 0.1955531743905706, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.724714385619673e-05, | |
| "loss": 2.1736, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 0.19622287704259309, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.720876354478765e-05, | |
| "loss": 2.2247, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 0.1968925796946156, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.717012521365836e-05, | |
| "loss": 2.2102, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 0.19756228234663809, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.713122907398686e-05, | |
| "loss": 2.275, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 0.1982319849986606, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.709207533836016e-05, | |
| "loss": 2.0444, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 0.19890168765068308, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.70526642207732e-05, | |
| "loss": 2.2226, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 0.1995713903027056, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.701299593662763e-05, | |
| "loss": 2.2936, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 0.2002410929547281, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.697307070273062e-05, | |
| "loss": 2.2361, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 0.2009107956067506, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.693288873729376e-05, | |
| "loss": 2.1928, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.2015804982587731, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.689245025993175e-05, | |
| "loss": 2.185, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 0.2022502009107956, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.68517554916613e-05, | |
| "loss": 2.1397, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 0.2029199035628181, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.681080465489983e-05, | |
| "loss": 2.3028, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 0.20358960621484062, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.676959797346435e-05, | |
| "loss": 2.2324, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 0.2042593088668631, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.672813567257017e-05, | |
| "loss": 2.2146, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 0.20492901151888562, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.668641797882969e-05, | |
| "loss": 2.1497, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 0.2055987141709081, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.664444512025116e-05, | |
| "loss": 2.2361, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 0.20626841682293062, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.660221732623744e-05, | |
| "loss": 2.2336, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 0.2069381194749531, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.655973482758473e-05, | |
| "loss": 2.2446, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 0.20760782212697562, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.651699785648135e-05, | |
| "loss": 2.2724, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 0.20827752477899814, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.647400664650638e-05, | |
| "loss": 2.2362, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 0.20894722743102062, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.643076143262851e-05, | |
| "loss": 2.2481, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 0.20961693008304313, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.638726245120466e-05, | |
| "loss": 2.1208, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 0.21028663273506562, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.634350993997871e-05, | |
| "loss": 2.2085, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 0.21095633538708813, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.629950413808022e-05, | |
| "loss": 2.2974, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 0.21162603803911065, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.62552452860231e-05, | |
| "loss": 2.2781, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 0.21229574069113313, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.621073362570432e-05, | |
| "loss": 2.3054, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 0.21296544334315565, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.616596940040257e-05, | |
| "loss": 2.1255, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 0.21363514599517813, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.612095285477694e-05, | |
| "loss": 2.1918, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 0.21430484864720065, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.607568423486558e-05, | |
| "loss": 2.2317, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 0.21497455129922313, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.603016378808432e-05, | |
| "loss": 2.0927, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 0.21564425395124565, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.59843917632254e-05, | |
| "loss": 2.2694, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 0.21631395660326816, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.593836841045602e-05, | |
| "loss": 2.2625, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 0.21698365925529065, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.589209398131706e-05, | |
| "loss": 2.3238, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 0.21765336190731316, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.584556872872159e-05, | |
| "loss": 2.2813, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 0.21832306455933564, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.579879290695364e-05, | |
| "loss": 2.2298, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 0.21899276721135816, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.575176677166667e-05, | |
| "loss": 2.2254, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 0.21966246986338067, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.570449057988222e-05, | |
| "loss": 2.3366, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 0.22033217251540316, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.565696458998858e-05, | |
| "loss": 2.2601, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 0.22100187516742567, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.560918906173923e-05, | |
| "loss": 2.231, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 0.22167157781944816, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.556116425625159e-05, | |
| "loss": 2.2668, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 0.22234128047147067, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.551289043600542e-05, | |
| "loss": 2.2073, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 0.22301098312349316, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.546436786484155e-05, | |
| "loss": 2.3307, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 0.22368068577551567, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.541559680796029e-05, | |
| "loss": 2.2272, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 0.22435038842753818, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.536657753192011e-05, | |
| "loss": 2.2672, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 0.22502009107956067, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.531731030463607e-05, | |
| "loss": 2.2473, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 0.22568979373158318, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.526779539537845e-05, | |
| "loss": 2.2187, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 0.22635949638360567, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.52180330747712e-05, | |
| "loss": 2.3373, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 0.22702919903562818, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.516802361479056e-05, | |
| "loss": 2.1831, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 0.2276989016876507, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.511776728876341e-05, | |
| "loss": 2.1993, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 0.22836860433967318, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.506726437136599e-05, | |
| "loss": 2.1827, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 0.2290383069916957, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.501651513862222e-05, | |
| "loss": 2.2418, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 0.22970800964371818, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.496551986790225e-05, | |
| "loss": 2.1535, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 0.2303777122957407, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.4914278837921e-05, | |
| "loss": 2.37, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 0.23104741494776318, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.486279232873654e-05, | |
| "loss": 2.1839, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 0.2317171175997857, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.481106062174863e-05, | |
| "loss": 2.2899, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 0.2323868202518082, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.475908399969718e-05, | |
| "loss": 2.2152, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 0.2330565229038307, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.470686274666065e-05, | |
| "loss": 2.2035, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 0.2337262255558532, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.465439714805455e-05, | |
| "loss": 2.2696, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 0.2343959282078757, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.460168749062985e-05, | |
| "loss": 2.2141, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.2350656308598982, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.454873406247143e-05, | |
| "loss": 2.2786, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 0.23573533351192072, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.449553715299652e-05, | |
| "loss": 2.2578, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 0.2364050361639432, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.444209705295305e-05, | |
| "loss": 2.2044, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 0.23707473881596572, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.438841405441816e-05, | |
| "loss": 2.2658, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 0.2377444414679882, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.433448845079653e-05, | |
| "loss": 2.2988, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 0.23841414412001072, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.42803205368188e-05, | |
| "loss": 2.2158, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 0.2390838467720332, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.422591060853997e-05, | |
| "loss": 2.3033, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 0.23975354942405572, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.417125896333774e-05, | |
| "loss": 2.3241, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 0.24042325207607823, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.411636589991095e-05, | |
| "loss": 2.2695, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 0.24109295472810072, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.406123171827789e-05, | |
| "loss": 2.2711, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 0.24176265738012323, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.400585671977469e-05, | |
| "loss": 2.1886, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 0.24243236003214572, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.395024120705367e-05, | |
| "loss": 2.2764, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 0.24310206268416823, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.389438548408167e-05, | |
| "loss": 2.1141, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 0.24377176533619074, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.383828985613843e-05, | |
| "loss": 2.2824, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 0.24444146798821323, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.378195462981484e-05, | |
| "loss": 2.2951, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 0.24511117064023574, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.372538011301135e-05, | |
| "loss": 2.2363, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 0.24578087329225823, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.366856661493628e-05, | |
| "loss": 2.2701, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 0.24645057594428074, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.361151444610404e-05, | |
| "loss": 2.2683, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 0.24712027859630323, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.355422391833353e-05, | |
| "loss": 2.3463, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 0.24778998124832574, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.349669534474641e-05, | |
| "loss": 2.2302, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 0.24845968390034825, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.343892903976539e-05, | |
| "loss": 2.2496, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 0.24912938655237074, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.338092531911245e-05, | |
| "loss": 2.2797, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 0.24979908920439325, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.33226844998072e-05, | |
| "loss": 2.2403, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 0.25046879185641574, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.326420690016513e-05, | |
| "loss": 2.1988, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 0.2511384945084382, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.320549283979584e-05, | |
| "loss": 2.249, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 0.25180819716046077, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.314654263960128e-05, | |
| "loss": 2.2201, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 0.25247789981248325, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.308735662177407e-05, | |
| "loss": 2.2789, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 0.25314760246450574, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.302793510979568e-05, | |
| "loss": 2.2515, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 0.2538173051165283, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.296827842843463e-05, | |
| "loss": 2.2134, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 0.25448700776855077, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.290838690374483e-05, | |
| "loss": 2.2498, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 0.25515671042057325, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.284826086306366e-05, | |
| "loss": 2.1802, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 0.2558264130725958, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.278790063501029e-05, | |
| "loss": 2.2067, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 0.2564961157246183, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.272730654948384e-05, | |
| "loss": 2.2824, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 0.25716581837664076, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.266647893766157e-05, | |
| "loss": 2.2489, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 0.25783552102866325, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.260541813199706e-05, | |
| "loss": 2.2607, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 0.2585052236806858, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.254412446621845e-05, | |
| "loss": 2.3607, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 0.2591749263327083, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.248259827532656e-05, | |
| "loss": 2.2326, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 0.25984462898473076, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.242083989559308e-05, | |
| "loss": 2.2679, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 0.2605143316367533, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.235884966455872e-05, | |
| "loss": 2.1288, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 0.2611840342887758, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.229662792103137e-05, | |
| "loss": 2.2834, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 0.2618537369407983, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.223417500508427e-05, | |
| "loss": 2.0785, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 0.26252343959282076, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.217149125805416e-05, | |
| "loss": 2.2012, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 0.2631931422448433, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.21085770225393e-05, | |
| "loss": 2.2774, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 0.2638628448968658, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.204543264239778e-05, | |
| "loss": 2.2201, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 0.2645325475488883, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.198205846274548e-05, | |
| "loss": 2.3057, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 0.2652022502009108, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.191845482995431e-05, | |
| "loss": 2.2332, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 0.2658719528529333, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.185462209165021e-05, | |
| "loss": 2.1952, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 0.2665416555049558, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.179056059671129e-05, | |
| "loss": 2.3655, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 0.26721135815697833, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.1726270695266e-05, | |
| "loss": 2.2544, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 0.2678810608090008, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.166175273869107e-05, | |
| "loss": 2.2741, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.2685507634610233, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.15970070796097e-05, | |
| "loss": 2.2881, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 0.2692204661130458, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.15320340718896e-05, | |
| "loss": 2.2729, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 0.26989016876506833, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.146683407064105e-05, | |
| "loss": 2.238, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 0.2705598714170908, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.140140743221496e-05, | |
| "loss": 2.3336, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 0.2712295740691133, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.133575451420093e-05, | |
| "loss": 2.2249, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 0.27189927672113584, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.126987567542532e-05, | |
| "loss": 2.2385, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 0.2725689793731583, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.12037712759492e-05, | |
| "loss": 2.2343, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 0.2732386820251808, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.113744167706648e-05, | |
| "loss": 2.2202, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 0.2739083846772033, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.107088724130192e-05, | |
| "loss": 2.2392, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 0.27457808732922584, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.100410833240908e-05, | |
| "loss": 2.2371, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 0.2752477899812483, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.093710531536842e-05, | |
| "loss": 2.3052, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 0.2759174926332708, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.086987855638525e-05, | |
| "loss": 2.1853, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 0.27658719528529335, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.08024284228877e-05, | |
| "loss": 2.2073, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 0.27725689793731584, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.073475528352484e-05, | |
| "loss": 2.1914, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 0.2779266005893383, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.066685950816451e-05, | |
| "loss": 2.2104, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 0.2785963032413608, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.059874146789139e-05, | |
| "loss": 2.1314, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 0.27926600589338335, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.053040153500496e-05, | |
| "loss": 2.1836, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 0.27993570854540584, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.046184008301743e-05, | |
| "loss": 2.2723, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 0.2806054111974283, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.039305748665176e-05, | |
| "loss": 2.2827, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 0.28127511384945086, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.032405412183956e-05, | |
| "loss": 2.1977, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 0.28194481650147335, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.025483036571902e-05, | |
| "loss": 2.2147, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 0.28261451915349584, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.018538659663293e-05, | |
| "loss": 2.2783, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 0.2832842218055184, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.01157231941265e-05, | |
| "loss": 2.1797, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 0.28395392445754086, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.004584053894545e-05, | |
| "loss": 2.2945, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 0.28462362710956335, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.997573901303372e-05, | |
| "loss": 2.2139, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 0.28529332976158583, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.990541899953151e-05, | |
| "loss": 2.2501, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 0.2859630324136084, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.983488088277323e-05, | |
| "loss": 2.233, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 0.28663273506563086, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.976412504828526e-05, | |
| "loss": 2.2957, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 0.28730243771765335, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.969315188278396e-05, | |
| "loss": 2.2512, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 0.2879721403696759, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.962196177417353e-05, | |
| "loss": 2.1994, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 0.2886418430216984, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.955055511154378e-05, | |
| "loss": 2.189, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 0.28931154567372086, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.947893228516821e-05, | |
| "loss": 2.2342, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 0.28998124832574335, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.940709368650173e-05, | |
| "loss": 2.1649, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 0.2906509509777659, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.933503970817849e-05, | |
| "loss": 2.232, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 0.2913206536297884, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.926277074400987e-05, | |
| "loss": 2.2633, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 0.29199035628181086, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.919028718898226e-05, | |
| "loss": 2.2506, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 0.2926600589338334, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.911758943925483e-05, | |
| "loss": 2.2919, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 0.2933297615858559, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.904467789215751e-05, | |
| "loss": 2.2114, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 0.29399946423787837, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.897155294618869e-05, | |
| "loss": 2.2352, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 0.29466916688990086, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.88982150010131e-05, | |
| "loss": 2.2521, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 0.2953388695419234, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.882466445745964e-05, | |
| "loss": 2.2084, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 0.2960085721939459, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.875090171751915e-05, | |
| "loss": 2.2102, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 0.29667827484596837, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.867692718434223e-05, | |
| "loss": 2.1578, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 0.2973479774979909, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.860274126223705e-05, | |
| "loss": 2.2352, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 0.2980176801500134, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.85283443566671e-05, | |
| "loss": 2.2075, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 0.2986873828020359, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.845373687424903e-05, | |
| "loss": 2.3332, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 0.2993570854540584, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.83789192227504e-05, | |
| "loss": 2.1775, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 0.3000267881060809, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.83038918110874e-05, | |
| "loss": 2.2348, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 0.3006964907581034, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.822865504932275e-05, | |
| "loss": 2.1897, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 0.3013661934101259, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.815320934866329e-05, | |
| "loss": 2.2539, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.3020358960621484, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.807755512145788e-05, | |
| "loss": 2.2351, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 0.3027055987141709, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.800169278119503e-05, | |
| "loss": 2.3542, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 0.3033753013661934, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.792562274250075e-05, | |
| "loss": 2.2724, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 0.30404500401821594, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.784934542113617e-05, | |
| "loss": 2.204, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 0.3047147066702384, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.777286123399536e-05, | |
| "loss": 2.2944, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 0.3053844093222609, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.7696170599103e-05, | |
| "loss": 2.1318, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 0.3060541119742834, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.761927393561214e-05, | |
| "loss": 2.1441, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 0.30672381462630594, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.754217166380184e-05, | |
| "loss": 2.303, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 0.3073935172783284, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.746486420507491e-05, | |
| "loss": 2.2769, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 0.3080632199303509, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.738735198195566e-05, | |
| "loss": 2.3048, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 0.30873292258237345, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.73096354180875e-05, | |
| "loss": 2.2582, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 0.30940262523439593, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.72317149382307e-05, | |
| "loss": 2.3533, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 0.3100723278864184, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.715359096825999e-05, | |
| "loss": 2.2417, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 0.3107420305384409, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.707526393516228e-05, | |
| "loss": 2.215, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 0.31141173319046345, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.699673426703436e-05, | |
| "loss": 2.1881, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 0.31208143584248593, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.691800239308052e-05, | |
| "loss": 2.2907, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 0.3127511384945084, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.683906874361017e-05, | |
| "loss": 2.221, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 0.31342084114653096, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.675993375003553e-05, | |
| "loss": 2.1868, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 0.31409054379855345, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.668059784486929e-05, | |
| "loss": 2.244, | |
| "step": 4690 | |
| }, | |
| { | |
| "epoch": 0.31476024645057593, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.660106146172223e-05, | |
| "loss": 2.2294, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 0.3154299491025985, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.652132503530082e-05, | |
| "loss": 2.2347, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 0.31609965175462096, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.644138900140485e-05, | |
| "loss": 2.2633, | |
| "step": 4720 | |
| }, | |
| { | |
| "epoch": 0.31676935440664344, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.636125379692515e-05, | |
| "loss": 2.2383, | |
| "step": 4730 | |
| }, | |
| { | |
| "epoch": 0.31743905705866593, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.628091985984099e-05, | |
| "loss": 2.2282, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 0.31810875971068847, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.620038762921794e-05, | |
| "loss": 2.172, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 0.31877846236271096, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.611965754520526e-05, | |
| "loss": 2.2849, | |
| "step": 4760 | |
| }, | |
| { | |
| "epoch": 0.31944816501473344, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.60387300490336e-05, | |
| "loss": 2.3451, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 0.320117867666756, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.595760558301257e-05, | |
| "loss": 2.2314, | |
| "step": 4780 | |
| }, | |
| { | |
| "epoch": 0.32078757031877847, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.587628459052834e-05, | |
| "loss": 2.155, | |
| "step": 4790 | |
| }, | |
| { | |
| "epoch": 0.32145727297080096, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.579476751604119e-05, | |
| "loss": 2.2874, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 0.32212697562282344, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.571305480508302e-05, | |
| "loss": 2.2094, | |
| "step": 4810 | |
| }, | |
| { | |
| "epoch": 0.322796678274846, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.563114690425511e-05, | |
| "loss": 2.2631, | |
| "step": 4820 | |
| }, | |
| { | |
| "epoch": 0.32346638092686847, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.554904426122543e-05, | |
| "loss": 2.287, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 0.32413608357889095, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.546674732472638e-05, | |
| "loss": 2.2728, | |
| "step": 4840 | |
| }, | |
| { | |
| "epoch": 0.3248057862309135, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.538425654455225e-05, | |
| "loss": 2.2106, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 0.325475488882936, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.530157237155681e-05, | |
| "loss": 2.2396, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 0.32614519153495847, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.521869525765076e-05, | |
| "loss": 2.2975, | |
| "step": 4870 | |
| }, | |
| { | |
| "epoch": 0.32681489418698095, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.513562565579936e-05, | |
| "loss": 2.2552, | |
| "step": 4880 | |
| }, | |
| { | |
| "epoch": 0.3274845968390035, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.505236402001996e-05, | |
| "loss": 2.2631, | |
| "step": 4890 | |
| }, | |
| { | |
| "epoch": 0.328154299491026, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.496891080537939e-05, | |
| "loss": 2.1826, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 0.32882400214304847, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.488526646799158e-05, | |
| "loss": 2.2127, | |
| "step": 4910 | |
| }, | |
| { | |
| "epoch": 0.329493704795071, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.480143146501506e-05, | |
| "loss": 2.1991, | |
| "step": 4920 | |
| }, | |
| { | |
| "epoch": 0.3301634074470935, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.471740625465044e-05, | |
| "loss": 2.2428, | |
| "step": 4930 | |
| }, | |
| { | |
| "epoch": 0.330833110099116, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.463319129613791e-05, | |
| "loss": 2.2601, | |
| "step": 4940 | |
| }, | |
| { | |
| "epoch": 0.3315028127511385, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.45487870497547e-05, | |
| "loss": 2.2814, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 0.332172515403161, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.446419397681265e-05, | |
| "loss": 2.1667, | |
| "step": 4960 | |
| }, | |
| { | |
| "epoch": 0.3328422180551835, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.437941253965558e-05, | |
| "loss": 2.3395, | |
| "step": 4970 | |
| }, | |
| { | |
| "epoch": 0.333511920707206, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.429444320165683e-05, | |
| "loss": 2.2234, | |
| "step": 4980 | |
| }, | |
| { | |
| "epoch": 0.3341816233592285, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.420928642721672e-05, | |
| "loss": 2.3486, | |
| "step": 4990 | |
| }, | |
| { | |
| "epoch": 0.334851326011251, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.412394268176003e-05, | |
| "loss": 2.2212, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.3355210286632735, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.403841243173338e-05, | |
| "loss": 2.2387, | |
| "step": 5010 | |
| }, | |
| { | |
| "epoch": 0.33619073131529603, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.395269614460275e-05, | |
| "loss": 2.1898, | |
| "step": 5020 | |
| }, | |
| { | |
| "epoch": 0.3368604339673185, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.386679428885092e-05, | |
| "loss": 2.245, | |
| "step": 5030 | |
| }, | |
| { | |
| "epoch": 0.337530136619341, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.37807073339749e-05, | |
| "loss": 2.2294, | |
| "step": 5040 | |
| }, | |
| { | |
| "epoch": 0.3381998392713635, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.369443575048332e-05, | |
| "loss": 2.2182, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 0.33886954192338603, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.360798000989394e-05, | |
| "loss": 2.2475, | |
| "step": 5060 | |
| }, | |
| { | |
| "epoch": 0.3395392445754085, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.352134058473106e-05, | |
| "loss": 2.2286, | |
| "step": 5070 | |
| }, | |
| { | |
| "epoch": 0.340208947227431, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.343451794852282e-05, | |
| "loss": 2.2071, | |
| "step": 5080 | |
| }, | |
| { | |
| "epoch": 0.34087864987945354, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.334751257579874e-05, | |
| "loss": 2.2365, | |
| "step": 5090 | |
| }, | |
| { | |
| "epoch": 0.34154835253147603, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.326032494208713e-05, | |
| "loss": 2.3365, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 0.3422180551834985, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.31729555239124e-05, | |
| "loss": 2.2691, | |
| "step": 5110 | |
| }, | |
| { | |
| "epoch": 0.342887757835521, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.308540479879252e-05, | |
| "loss": 2.1833, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 0.34355746048754354, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.299767324523638e-05, | |
| "loss": 2.1165, | |
| "step": 5130 | |
| }, | |
| { | |
| "epoch": 0.34422716313956603, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.290976134274123e-05, | |
| "loss": 2.3461, | |
| "step": 5140 | |
| }, | |
| { | |
| "epoch": 0.3448968657915885, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.282166957178995e-05, | |
| "loss": 2.2149, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 0.34556656844361106, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.273339841384855e-05, | |
| "loss": 2.2644, | |
| "step": 5160 | |
| }, | |
| { | |
| "epoch": 0.34623627109563354, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.264494835136347e-05, | |
| "loss": 2.2684, | |
| "step": 5170 | |
| }, | |
| { | |
| "epoch": 0.346905973747656, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.255631986775894e-05, | |
| "loss": 2.2436, | |
| "step": 5180 | |
| }, | |
| { | |
| "epoch": 0.34757567639967857, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.246751344743433e-05, | |
| "loss": 2.1691, | |
| "step": 5190 | |
| }, | |
| { | |
| "epoch": 0.34824537905170105, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.237852957576158e-05, | |
| "loss": 2.2467, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 0.34891508170372354, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.228936873908244e-05, | |
| "loss": 2.2633, | |
| "step": 5210 | |
| }, | |
| { | |
| "epoch": 0.349584784355746, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.220003142470592e-05, | |
| "loss": 2.247, | |
| "step": 5220 | |
| }, | |
| { | |
| "epoch": 0.35025448700776857, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.211051812090548e-05, | |
| "loss": 2.3088, | |
| "step": 5230 | |
| }, | |
| { | |
| "epoch": 0.35092418965979105, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.202082931691655e-05, | |
| "loss": 2.2103, | |
| "step": 5240 | |
| }, | |
| { | |
| "epoch": 0.35159389231181354, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.193096550293369e-05, | |
| "loss": 2.0959, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 0.3522635949638361, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.184092717010801e-05, | |
| "loss": 2.2747, | |
| "step": 5260 | |
| }, | |
| { | |
| "epoch": 0.35293329761585857, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.175071481054444e-05, | |
| "loss": 2.2294, | |
| "step": 5270 | |
| }, | |
| { | |
| "epoch": 0.35360300026788105, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.166032891729905e-05, | |
| "loss": 2.1772, | |
| "step": 5280 | |
| }, | |
| { | |
| "epoch": 0.35427270291990354, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.15697699843764e-05, | |
| "loss": 2.2499, | |
| "step": 5290 | |
| }, | |
| { | |
| "epoch": 0.3549424055719261, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.147903850672671e-05, | |
| "loss": 2.2, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 0.35561210822394856, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.138813498024332e-05, | |
| "loss": 2.2583, | |
| "step": 5310 | |
| }, | |
| { | |
| "epoch": 0.35628181087597105, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.129705990175991e-05, | |
| "loss": 2.2661, | |
| "step": 5320 | |
| }, | |
| { | |
| "epoch": 0.3569515135279936, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.120581376904773e-05, | |
| "loss": 2.2114, | |
| "step": 5330 | |
| }, | |
| { | |
| "epoch": 0.3576212161800161, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.111439708081297e-05, | |
| "loss": 2.2011, | |
| "step": 5340 | |
| }, | |
| { | |
| "epoch": 0.35829091883203856, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.102281033669393e-05, | |
| "loss": 2.252, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 0.3589606214840611, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.093105403725842e-05, | |
| "loss": 2.1967, | |
| "step": 5360 | |
| }, | |
| { | |
| "epoch": 0.3596303241360836, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.083912868400094e-05, | |
| "loss": 2.2036, | |
| "step": 5370 | |
| }, | |
| { | |
| "epoch": 0.3603000267881061, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.074703477933991e-05, | |
| "loss": 2.2294, | |
| "step": 5380 | |
| }, | |
| { | |
| "epoch": 0.36096972944012856, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.065477282661504e-05, | |
| "loss": 2.2663, | |
| "step": 5390 | |
| }, | |
| { | |
| "epoch": 0.3616394320921511, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.056234333008445e-05, | |
| "loss": 2.2182, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 0.3623091347441736, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.046974679492197e-05, | |
| "loss": 2.2519, | |
| "step": 5410 | |
| }, | |
| { | |
| "epoch": 0.3629788373961961, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.037698372721442e-05, | |
| "loss": 2.1965, | |
| "step": 5420 | |
| }, | |
| { | |
| "epoch": 0.3636485400482186, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.028405463395878e-05, | |
| "loss": 2.1936, | |
| "step": 5430 | |
| }, | |
| { | |
| "epoch": 0.3643182427002411, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.019096002305946e-05, | |
| "loss": 2.2061, | |
| "step": 5440 | |
| }, | |
| { | |
| "epoch": 0.3649879453522636, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.00977004033255e-05, | |
| "loss": 2.2627, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 0.3656576480042861, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.000427628446776e-05, | |
| "loss": 2.3286, | |
| "step": 5460 | |
| }, | |
| { | |
| "epoch": 0.3663273506563086, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.991068817709624e-05, | |
| "loss": 2.1929, | |
| "step": 5470 | |
| }, | |
| { | |
| "epoch": 0.3669970533083311, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.981693659271716e-05, | |
| "loss": 2.2085, | |
| "step": 5480 | |
| }, | |
| { | |
| "epoch": 0.3676667559603536, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.972302204373024e-05, | |
| "loss": 2.2468, | |
| "step": 5490 | |
| }, | |
| { | |
| "epoch": 0.3683364586123761, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.962894504342591e-05, | |
| "loss": 2.2503, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.3690061612643986, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.953470610598244e-05, | |
| "loss": 2.2457, | |
| "step": 5510 | |
| }, | |
| { | |
| "epoch": 0.3696758639164211, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.944030574646323e-05, | |
| "loss": 2.2607, | |
| "step": 5520 | |
| }, | |
| { | |
| "epoch": 0.3703455665684436, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.934574448081385e-05, | |
| "loss": 2.4296, | |
| "step": 5530 | |
| }, | |
| { | |
| "epoch": 0.3710152692204661, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.925102282585936e-05, | |
| "loss": 2.2071, | |
| "step": 5540 | |
| }, | |
| { | |
| "epoch": 0.3716849718724886, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.91561412993014e-05, | |
| "loss": 2.2177, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 0.3723546745245111, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.906110041971541e-05, | |
| "loss": 2.2693, | |
| "step": 5560 | |
| }, | |
| { | |
| "epoch": 0.37302437717653364, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.896590070654777e-05, | |
| "loss": 2.1922, | |
| "step": 5570 | |
| }, | |
| { | |
| "epoch": 0.3736940798285561, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.887054268011297e-05, | |
| "loss": 2.216, | |
| "step": 5580 | |
| }, | |
| { | |
| "epoch": 0.3743637824805786, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.877502686159074e-05, | |
| "loss": 2.2372, | |
| "step": 5590 | |
| }, | |
| { | |
| "epoch": 0.37503348513260115, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.867935377302324e-05, | |
| "loss": 2.2107, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 0.37570318778462364, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.858352393731219e-05, | |
| "loss": 2.2494, | |
| "step": 5610 | |
| }, | |
| { | |
| "epoch": 0.3763728904366461, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.8487537878216e-05, | |
| "loss": 2.1675, | |
| "step": 5620 | |
| }, | |
| { | |
| "epoch": 0.3770425930886686, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.839139612034695e-05, | |
| "loss": 2.2335, | |
| "step": 5630 | |
| }, | |
| { | |
| "epoch": 0.37771229574069115, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.829509918916825e-05, | |
| "loss": 2.2172, | |
| "step": 5640 | |
| }, | |
| { | |
| "epoch": 0.37838199839271364, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.819864761099125e-05, | |
| "loss": 2.2317, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 0.3790517010447361, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.81020419129725e-05, | |
| "loss": 2.2232, | |
| "step": 5660 | |
| }, | |
| { | |
| "epoch": 0.37972140369675866, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.800528262311089e-05, | |
| "loss": 2.2933, | |
| "step": 5670 | |
| }, | |
| { | |
| "epoch": 0.38039110634878115, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.790837027024478e-05, | |
| "loss": 2.1793, | |
| "step": 5680 | |
| }, | |
| { | |
| "epoch": 0.38106080900080364, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.781130538404911e-05, | |
| "loss": 2.1571, | |
| "step": 5690 | |
| }, | |
| { | |
| "epoch": 0.3817305116528261, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.771408849503245e-05, | |
| "loss": 2.1802, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 0.38240021430484866, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.761672013453418e-05, | |
| "loss": 2.247, | |
| "step": 5710 | |
| }, | |
| { | |
| "epoch": 0.38306991695687115, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.751920083472153e-05, | |
| "loss": 2.2638, | |
| "step": 5720 | |
| }, | |
| { | |
| "epoch": 0.38373961960889363, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.74215311285867e-05, | |
| "loss": 2.2909, | |
| "step": 5730 | |
| }, | |
| { | |
| "epoch": 0.3844093222609162, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.732371154994392e-05, | |
| "loss": 2.1795, | |
| "step": 5740 | |
| }, | |
| { | |
| "epoch": 0.38507902491293866, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.722574263342656e-05, | |
| "loss": 2.2015, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 0.38574872756496115, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.71276249144842e-05, | |
| "loss": 2.2313, | |
| "step": 5760 | |
| }, | |
| { | |
| "epoch": 0.38641843021698363, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.70293589293797e-05, | |
| "loss": 2.2416, | |
| "step": 5770 | |
| }, | |
| { | |
| "epoch": 0.3870881328690062, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.693094521518627e-05, | |
| "loss": 2.2684, | |
| "step": 5780 | |
| }, | |
| { | |
| "epoch": 0.38775783552102866, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.683238430978452e-05, | |
| "loss": 2.237, | |
| "step": 5790 | |
| }, | |
| { | |
| "epoch": 0.38842753817305115, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.673367675185953e-05, | |
| "loss": 2.053, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 0.3890972408250737, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.663482308089795e-05, | |
| "loss": 2.228, | |
| "step": 5810 | |
| }, | |
| { | |
| "epoch": 0.3897669434770962, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.653582383718498e-05, | |
| "loss": 2.1679, | |
| "step": 5820 | |
| }, | |
| { | |
| "epoch": 0.39043664612911866, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.643667956180146e-05, | |
| "loss": 2.2186, | |
| "step": 5830 | |
| }, | |
| { | |
| "epoch": 0.3911063487811412, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.633739079662088e-05, | |
| "loss": 2.2065, | |
| "step": 5840 | |
| }, | |
| { | |
| "epoch": 0.3917760514331637, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.623795808430645e-05, | |
| "loss": 2.2311, | |
| "step": 5850 | |
| }, | |
| { | |
| "epoch": 0.39244575408518617, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.613838196830816e-05, | |
| "loss": 2.222, | |
| "step": 5860 | |
| }, | |
| { | |
| "epoch": 0.39311545673720866, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.603866299285972e-05, | |
| "loss": 2.2572, | |
| "step": 5870 | |
| }, | |
| { | |
| "epoch": 0.3937851593892312, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.593880170297564e-05, | |
| "loss": 2.209, | |
| "step": 5880 | |
| }, | |
| { | |
| "epoch": 0.3944548620412537, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.583879864444832e-05, | |
| "loss": 2.259, | |
| "step": 5890 | |
| }, | |
| { | |
| "epoch": 0.39512456469327617, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.573865436384491e-05, | |
| "loss": 2.1778, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 0.3957942673452987, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.563836940850449e-05, | |
| "loss": 2.2173, | |
| "step": 5910 | |
| }, | |
| { | |
| "epoch": 0.3964639699973212, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.55379443265349e-05, | |
| "loss": 2.2584, | |
| "step": 5920 | |
| }, | |
| { | |
| "epoch": 0.3971336726493437, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.543737966680994e-05, | |
| "loss": 2.2348, | |
| "step": 5930 | |
| }, | |
| { | |
| "epoch": 0.39780337530136617, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.533667597896623e-05, | |
| "loss": 2.1596, | |
| "step": 5940 | |
| }, | |
| { | |
| "epoch": 0.3984730779533887, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.523583381340025e-05, | |
| "loss": 2.2926, | |
| "step": 5950 | |
| }, | |
| { | |
| "epoch": 0.3991427806054112, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.513485372126531e-05, | |
| "loss": 2.2573, | |
| "step": 5960 | |
| }, | |
| { | |
| "epoch": 0.3998124832574337, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.503373625446862e-05, | |
| "loss": 2.2771, | |
| "step": 5970 | |
| }, | |
| { | |
| "epoch": 0.4004821859094562, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.493248196566816e-05, | |
| "loss": 2.2364, | |
| "step": 5980 | |
| }, | |
| { | |
| "epoch": 0.4011518885614787, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.483109140826968e-05, | |
| "loss": 2.163, | |
| "step": 5990 | |
| }, | |
| { | |
| "epoch": 0.4018215912135012, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.472956513642379e-05, | |
| "loss": 2.1557, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.4024912938655237, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.462790370502284e-05, | |
| "loss": 2.2514, | |
| "step": 6010 | |
| }, | |
| { | |
| "epoch": 0.4031609965175462, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.452610766969781e-05, | |
| "loss": 2.1986, | |
| "step": 6020 | |
| }, | |
| { | |
| "epoch": 0.4038306991695687, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.442417758681542e-05, | |
| "loss": 2.2523, | |
| "step": 6030 | |
| }, | |
| { | |
| "epoch": 0.4045004018215912, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.432211401347504e-05, | |
| "loss": 2.3855, | |
| "step": 6040 | |
| }, | |
| { | |
| "epoch": 0.40517010447361373, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.421991750750559e-05, | |
| "loss": 2.1914, | |
| "step": 6050 | |
| }, | |
| { | |
| "epoch": 0.4058398071256362, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.411758862746258e-05, | |
| "loss": 2.122, | |
| "step": 6060 | |
| }, | |
| { | |
| "epoch": 0.4065095097776587, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.401512793262496e-05, | |
| "loss": 2.1607, | |
| "step": 6070 | |
| }, | |
| { | |
| "epoch": 0.40717921242968125, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.391253598299217e-05, | |
| "loss": 2.1664, | |
| "step": 6080 | |
| }, | |
| { | |
| "epoch": 0.40784891508170373, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.380981333928097e-05, | |
| "loss": 2.2631, | |
| "step": 6090 | |
| }, | |
| { | |
| "epoch": 0.4085186177337262, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.370696056292249e-05, | |
| "loss": 2.2071, | |
| "step": 6100 | |
| }, | |
| { | |
| "epoch": 0.4091883203857487, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.360397821605902e-05, | |
| "loss": 2.2336, | |
| "step": 6110 | |
| }, | |
| { | |
| "epoch": 0.40985802303777125, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.350086686154111e-05, | |
| "loss": 2.2513, | |
| "step": 6120 | |
| }, | |
| { | |
| "epoch": 0.41052772568979373, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.33976270629243e-05, | |
| "loss": 2.2542, | |
| "step": 6130 | |
| }, | |
| { | |
| "epoch": 0.4111974283418162, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.329425938446625e-05, | |
| "loss": 2.2058, | |
| "step": 6140 | |
| }, | |
| { | |
| "epoch": 0.41186713099383876, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.319076439112347e-05, | |
| "loss": 2.2086, | |
| "step": 6150 | |
| }, | |
| { | |
| "epoch": 0.41253683364586125, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.308714264854833e-05, | |
| "loss": 2.1696, | |
| "step": 6160 | |
| }, | |
| { | |
| "epoch": 0.41320653629788373, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.298339472308598e-05, | |
| "loss": 2.2503, | |
| "step": 6170 | |
| }, | |
| { | |
| "epoch": 0.4138762389499062, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.287952118177117e-05, | |
| "loss": 2.1965, | |
| "step": 6180 | |
| }, | |
| { | |
| "epoch": 0.41454594160192876, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.277552259232522e-05, | |
| "loss": 2.1826, | |
| "step": 6190 | |
| }, | |
| { | |
| "epoch": 0.41521564425395124, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.267139952315295e-05, | |
| "loss": 2.1862, | |
| "step": 6200 | |
| }, | |
| { | |
| "epoch": 0.41588534690597373, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.256715254333946e-05, | |
| "loss": 2.1264, | |
| "step": 6210 | |
| }, | |
| { | |
| "epoch": 0.41655504955799627, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.246278222264713e-05, | |
| "loss": 2.2027, | |
| "step": 6220 | |
| }, | |
| { | |
| "epoch": 0.41722475221001876, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.235828913151242e-05, | |
| "loss": 2.2236, | |
| "step": 6230 | |
| }, | |
| { | |
| "epoch": 0.41789445486204124, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.225367384104282e-05, | |
| "loss": 2.2782, | |
| "step": 6240 | |
| }, | |
| { | |
| "epoch": 0.41856415751406373, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.21489369230137e-05, | |
| "loss": 2.2659, | |
| "step": 6250 | |
| }, | |
| { | |
| "epoch": 0.41923386016608627, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.204407894986518e-05, | |
| "loss": 2.1987, | |
| "step": 6260 | |
| }, | |
| { | |
| "epoch": 0.41990356281810876, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.1939100494699e-05, | |
| "loss": 2.2765, | |
| "step": 6270 | |
| }, | |
| { | |
| "epoch": 0.42057326547013124, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.183400213127543e-05, | |
| "loss": 2.1841, | |
| "step": 6280 | |
| }, | |
| { | |
| "epoch": 0.4212429681221538, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.172878443401003e-05, | |
| "loss": 2.2169, | |
| "step": 6290 | |
| }, | |
| { | |
| "epoch": 0.42191267077417627, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.162344797797065e-05, | |
| "loss": 2.1275, | |
| "step": 6300 | |
| }, | |
| { | |
| "epoch": 0.42258237342619875, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.151799333887415e-05, | |
| "loss": 2.1387, | |
| "step": 6310 | |
| }, | |
| { | |
| "epoch": 0.4232520760782213, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.141242109308343e-05, | |
| "loss": 2.3116, | |
| "step": 6320 | |
| }, | |
| { | |
| "epoch": 0.4239217787302438, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.130673181760403e-05, | |
| "loss": 2.2473, | |
| "step": 6330 | |
| }, | |
| { | |
| "epoch": 0.42459148138226627, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.120092609008122e-05, | |
| "loss": 2.2514, | |
| "step": 6340 | |
| }, | |
| { | |
| "epoch": 0.42526118403428875, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.10950044887967e-05, | |
| "loss": 2.2596, | |
| "step": 6350 | |
| }, | |
| { | |
| "epoch": 0.4259308866863113, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.098896759266547e-05, | |
| "loss": 2.1879, | |
| "step": 6360 | |
| }, | |
| { | |
| "epoch": 0.4266005893383338, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.088281598123267e-05, | |
| "loss": 2.1676, | |
| "step": 6370 | |
| }, | |
| { | |
| "epoch": 0.42727029199035627, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.077655023467049e-05, | |
| "loss": 2.2725, | |
| "step": 6380 | |
| }, | |
| { | |
| "epoch": 0.4279399946423788, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.067017093377484e-05, | |
| "loss": 2.1804, | |
| "step": 6390 | |
| }, | |
| { | |
| "epoch": 0.4286096972944013, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.056367865996226e-05, | |
| "loss": 2.2629, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 0.4292793999464238, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.045707399526682e-05, | |
| "loss": 2.1842, | |
| "step": 6410 | |
| }, | |
| { | |
| "epoch": 0.42994910259844626, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.035035752233681e-05, | |
| "loss": 2.217, | |
| "step": 6420 | |
| }, | |
| { | |
| "epoch": 0.4306188052504688, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.024352982443159e-05, | |
| "loss": 2.2961, | |
| "step": 6430 | |
| }, | |
| { | |
| "epoch": 0.4312885079024913, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.013659148541849e-05, | |
| "loss": 2.2467, | |
| "step": 6440 | |
| }, | |
| { | |
| "epoch": 0.4319582105545138, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.002954308976948e-05, | |
| "loss": 2.3547, | |
| "step": 6450 | |
| }, | |
| { | |
| "epoch": 0.4326279132065363, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.992238522255805e-05, | |
| "loss": 2.2718, | |
| "step": 6460 | |
| }, | |
| { | |
| "epoch": 0.4332976158585588, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.981511846945608e-05, | |
| "loss": 2.2483, | |
| "step": 6470 | |
| }, | |
| { | |
| "epoch": 0.4339673185105813, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.97077434167305e-05, | |
| "loss": 2.2501, | |
| "step": 6480 | |
| }, | |
| { | |
| "epoch": 0.43463702116260383, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.960026065124013e-05, | |
| "loss": 2.3018, | |
| "step": 6490 | |
| }, | |
| { | |
| "epoch": 0.4353067238146263, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.94926707604326e-05, | |
| "loss": 2.2755, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.4359764264666488, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.938497433234091e-05, | |
| "loss": 2.2152, | |
| "step": 6510 | |
| }, | |
| { | |
| "epoch": 0.4366461291186713, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.927717195558041e-05, | |
| "loss": 2.2302, | |
| "step": 6520 | |
| }, | |
| { | |
| "epoch": 0.43731583177069383, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.916926421934553e-05, | |
| "loss": 2.2201, | |
| "step": 6530 | |
| }, | |
| { | |
| "epoch": 0.4379855344227163, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.906125171340646e-05, | |
| "loss": 2.2863, | |
| "step": 6540 | |
| }, | |
| { | |
| "epoch": 0.4386552370747388, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.89531350281061e-05, | |
| "loss": 2.2234, | |
| "step": 6550 | |
| }, | |
| { | |
| "epoch": 0.43932493972676134, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.88449147543567e-05, | |
| "loss": 2.1963, | |
| "step": 6560 | |
| }, | |
| { | |
| "epoch": 0.43999464237878383, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.873659148363667e-05, | |
| "loss": 2.2119, | |
| "step": 6570 | |
| }, | |
| { | |
| "epoch": 0.4406643450308063, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.862816580798734e-05, | |
| "loss": 2.2322, | |
| "step": 6580 | |
| }, | |
| { | |
| "epoch": 0.4413340476828288, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.85196383200098e-05, | |
| "loss": 2.2327, | |
| "step": 6590 | |
| }, | |
| { | |
| "epoch": 0.44200375033485134, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.841100961286151e-05, | |
| "loss": 2.2449, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 0.4426734529868738, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.830228028025319e-05, | |
| "loss": 2.1634, | |
| "step": 6610 | |
| }, | |
| { | |
| "epoch": 0.4433431556388963, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.819345091644552e-05, | |
| "loss": 2.2752, | |
| "step": 6620 | |
| }, | |
| { | |
| "epoch": 0.44401285829091885, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.80845221162459e-05, | |
| "loss": 2.2577, | |
| "step": 6630 | |
| }, | |
| { | |
| "epoch": 0.44468256094294134, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.797549447500522e-05, | |
| "loss": 2.2371, | |
| "step": 6640 | |
| }, | |
| { | |
| "epoch": 0.4453522635949638, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.786636858861456e-05, | |
| "loss": 2.1673, | |
| "step": 6650 | |
| }, | |
| { | |
| "epoch": 0.4460219662469863, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.775714505350195e-05, | |
| "loss": 2.162, | |
| "step": 6660 | |
| }, | |
| { | |
| "epoch": 0.44669166889900885, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.764782446662915e-05, | |
| "loss": 2.3001, | |
| "step": 6670 | |
| }, | |
| { | |
| "epoch": 0.44736137155103134, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.753840742548836e-05, | |
| "loss": 2.2371, | |
| "step": 6680 | |
| }, | |
| { | |
| "epoch": 0.4480310742030538, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.742889452809891e-05, | |
| "loss": 2.214, | |
| "step": 6690 | |
| }, | |
| { | |
| "epoch": 0.44870077685507637, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.731928637300407e-05, | |
| "loss": 2.1805, | |
| "step": 6700 | |
| }, | |
| { | |
| "epoch": 0.44937047950709885, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.720958355926773e-05, | |
| "loss": 2.2062, | |
| "step": 6710 | |
| }, | |
| { | |
| "epoch": 0.45004018215912134, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.709978668647112e-05, | |
| "loss": 2.2897, | |
| "step": 6720 | |
| }, | |
| { | |
| "epoch": 0.4507098848111439, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.698989635470959e-05, | |
| "loss": 2.2066, | |
| "step": 6730 | |
| }, | |
| { | |
| "epoch": 0.45137958746316637, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.687991316458927e-05, | |
| "loss": 2.1965, | |
| "step": 6740 | |
| }, | |
| { | |
| "epoch": 0.45204929011518885, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.676983771722379e-05, | |
| "loss": 2.2724, | |
| "step": 6750 | |
| }, | |
| { | |
| "epoch": 0.45271899276721134, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.665967061423103e-05, | |
| "loss": 2.2437, | |
| "step": 6760 | |
| }, | |
| { | |
| "epoch": 0.4533886954192339, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.654941245772981e-05, | |
| "loss": 2.2341, | |
| "step": 6770 | |
| }, | |
| { | |
| "epoch": 0.45405839807125636, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.643906385033663e-05, | |
| "loss": 2.2412, | |
| "step": 6780 | |
| }, | |
| { | |
| "epoch": 0.45472810072327885, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.632862539516227e-05, | |
| "loss": 2.183, | |
| "step": 6790 | |
| }, | |
| { | |
| "epoch": 0.4553978033753014, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.621809769580867e-05, | |
| "loss": 2.3024, | |
| "step": 6800 | |
| }, | |
| { | |
| "epoch": 0.4560675060273239, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.61074813563655e-05, | |
| "loss": 2.1565, | |
| "step": 6810 | |
| }, | |
| { | |
| "epoch": 0.45673720867934636, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.599677698140688e-05, | |
| "loss": 2.2655, | |
| "step": 6820 | |
| }, | |
| { | |
| "epoch": 0.45740691133136885, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.588598517598808e-05, | |
| "loss": 2.2271, | |
| "step": 6830 | |
| }, | |
| { | |
| "epoch": 0.4580766139833914, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.577510654564222e-05, | |
| "loss": 2.1505, | |
| "step": 6840 | |
| }, | |
| { | |
| "epoch": 0.4587463166354139, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.566414169637702e-05, | |
| "loss": 2.2722, | |
| "step": 6850 | |
| }, | |
| { | |
| "epoch": 0.45941601928743636, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.555309123467136e-05, | |
| "loss": 2.3141, | |
| "step": 6860 | |
| }, | |
| { | |
| "epoch": 0.4600857219394589, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.544195576747208e-05, | |
| "loss": 2.2745, | |
| "step": 6870 | |
| }, | |
| { | |
| "epoch": 0.4607554245914814, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.533073590219058e-05, | |
| "loss": 2.2846, | |
| "step": 6880 | |
| }, | |
| { | |
| "epoch": 0.4614251272435039, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.52194322466996e-05, | |
| "loss": 2.2694, | |
| "step": 6890 | |
| }, | |
| { | |
| "epoch": 0.46209482989552636, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.510804540932974e-05, | |
| "loss": 2.2835, | |
| "step": 6900 | |
| }, | |
| { | |
| "epoch": 0.4627645325475489, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.49965759988663e-05, | |
| "loss": 2.2306, | |
| "step": 6910 | |
| }, | |
| { | |
| "epoch": 0.4634342351995714, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.48850246245459e-05, | |
| "loss": 2.2775, | |
| "step": 6920 | |
| }, | |
| { | |
| "epoch": 0.4641039378515939, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.477339189605311e-05, | |
| "loss": 2.3575, | |
| "step": 6930 | |
| }, | |
| { | |
| "epoch": 0.4647736405036164, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.466167842351708e-05, | |
| "loss": 2.2569, | |
| "step": 6940 | |
| }, | |
| { | |
| "epoch": 0.4654433431556389, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.454988481750836e-05, | |
| "loss": 2.207, | |
| "step": 6950 | |
| }, | |
| { | |
| "epoch": 0.4661130458076614, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.443801168903542e-05, | |
| "loss": 2.2561, | |
| "step": 6960 | |
| }, | |
| { | |
| "epoch": 0.4667827484596839, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.432605964954139e-05, | |
| "loss": 2.1582, | |
| "step": 6970 | |
| }, | |
| { | |
| "epoch": 0.4674524511117064, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.421402931090064e-05, | |
| "loss": 2.2948, | |
| "step": 6980 | |
| }, | |
| { | |
| "epoch": 0.4681221537637289, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.410192128541554e-05, | |
| "loss": 2.1836, | |
| "step": 6990 | |
| }, | |
| { | |
| "epoch": 0.4687918564157514, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.398973618581303e-05, | |
| "loss": 2.2952, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.4694615590677739, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.38774746252413e-05, | |
| "loss": 2.2567, | |
| "step": 7010 | |
| }, | |
| { | |
| "epoch": 0.4701312617197964, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.376513721726642e-05, | |
| "loss": 2.1953, | |
| "step": 7020 | |
| }, | |
| { | |
| "epoch": 0.4708009643718189, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.365272457586906e-05, | |
| "loss": 2.2673, | |
| "step": 7030 | |
| }, | |
| { | |
| "epoch": 0.47147066702384144, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.354023731544101e-05, | |
| "loss": 2.1898, | |
| "step": 7040 | |
| }, | |
| { | |
| "epoch": 0.4721403696758639, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.342767605078195e-05, | |
| "loss": 2.2221, | |
| "step": 7050 | |
| }, | |
| { | |
| "epoch": 0.4728100723278864, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.331504139709597e-05, | |
| "loss": 2.3085, | |
| "step": 7060 | |
| }, | |
| { | |
| "epoch": 0.4734797749799089, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.320233396998835e-05, | |
| "loss": 2.2103, | |
| "step": 7070 | |
| }, | |
| { | |
| "epoch": 0.47414947763193144, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.308955438546202e-05, | |
| "loss": 2.2137, | |
| "step": 7080 | |
| }, | |
| { | |
| "epoch": 0.4748191802839539, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.297670325991436e-05, | |
| "loss": 2.2429, | |
| "step": 7090 | |
| }, | |
| { | |
| "epoch": 0.4754888829359764, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.286378121013372e-05, | |
| "loss": 2.1838, | |
| "step": 7100 | |
| }, | |
| { | |
| "epoch": 0.47615858558799895, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.275078885329609e-05, | |
| "loss": 2.2697, | |
| "step": 7110 | |
| }, | |
| { | |
| "epoch": 0.47682828824002144, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.263772680696173e-05, | |
| "loss": 2.2319, | |
| "step": 7120 | |
| }, | |
| { | |
| "epoch": 0.4774979908920439, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.252459568907181e-05, | |
| "loss": 2.2726, | |
| "step": 7130 | |
| }, | |
| { | |
| "epoch": 0.4781676935440664, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.241139611794499e-05, | |
| "loss": 2.2054, | |
| "step": 7140 | |
| }, | |
| { | |
| "epoch": 0.47883739619608895, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.229812871227401e-05, | |
| "loss": 2.1353, | |
| "step": 7150 | |
| }, | |
| { | |
| "epoch": 0.47950709884811143, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.218479409112246e-05, | |
| "loss": 2.2005, | |
| "step": 7160 | |
| }, | |
| { | |
| "epoch": 0.4801768015001339, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.207139287392126e-05, | |
| "loss": 2.1865, | |
| "step": 7170 | |
| }, | |
| { | |
| "epoch": 0.48084650415215646, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.195792568046524e-05, | |
| "loss": 2.1478, | |
| "step": 7180 | |
| }, | |
| { | |
| "epoch": 0.48151620680417895, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.184439313090992e-05, | |
| "loss": 2.1964, | |
| "step": 7190 | |
| }, | |
| { | |
| "epoch": 0.48218590945620143, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.1730795845768e-05, | |
| "loss": 2.2652, | |
| "step": 7200 | |
| }, | |
| { | |
| "epoch": 0.482855612108224, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.161713444590594e-05, | |
| "loss": 2.1486, | |
| "step": 7210 | |
| }, | |
| { | |
| "epoch": 0.48352531476024646, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.150340955254067e-05, | |
| "loss": 2.3471, | |
| "step": 7220 | |
| }, | |
| { | |
| "epoch": 0.48419501741226895, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.138962178723615e-05, | |
| "loss": 2.3191, | |
| "step": 7230 | |
| }, | |
| { | |
| "epoch": 0.48486472006429143, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.127577177189994e-05, | |
| "loss": 2.1875, | |
| "step": 7240 | |
| }, | |
| { | |
| "epoch": 0.485534422716314, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.116186012877984e-05, | |
| "loss": 2.2136, | |
| "step": 7250 | |
| }, | |
| { | |
| "epoch": 0.48620412536833646, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.104788748046046e-05, | |
| "loss": 2.1942, | |
| "step": 7260 | |
| }, | |
| { | |
| "epoch": 0.48687382802035895, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.093385444985985e-05, | |
| "loss": 2.2411, | |
| "step": 7270 | |
| }, | |
| { | |
| "epoch": 0.4875435306723815, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.0819761660226095e-05, | |
| "loss": 2.221, | |
| "step": 7280 | |
| }, | |
| { | |
| "epoch": 0.488213233324404, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.070560973513386e-05, | |
| "loss": 2.2098, | |
| "step": 7290 | |
| }, | |
| { | |
| "epoch": 0.48888293597642646, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.0591399298481036e-05, | |
| "loss": 2.2021, | |
| "step": 7300 | |
| }, | |
| { | |
| "epoch": 0.48955263862844894, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.047713097448531e-05, | |
| "loss": 2.3027, | |
| "step": 7310 | |
| }, | |
| { | |
| "epoch": 0.4902223412804715, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.036280538768073e-05, | |
| "loss": 2.1895, | |
| "step": 7320 | |
| }, | |
| { | |
| "epoch": 0.49089204393249397, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.0248423162914346e-05, | |
| "loss": 2.1976, | |
| "step": 7330 | |
| }, | |
| { | |
| "epoch": 0.49156174658451646, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.013398492534274e-05, | |
| "loss": 2.2668, | |
| "step": 7340 | |
| }, | |
| { | |
| "epoch": 0.492231449236539, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.001949130042864e-05, | |
| "loss": 2.1931, | |
| "step": 7350 | |
| }, | |
| { | |
| "epoch": 0.4929011518885615, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.99049429139375e-05, | |
| "loss": 2.2773, | |
| "step": 7360 | |
| }, | |
| { | |
| "epoch": 0.49357085454058397, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.9790340391934076e-05, | |
| "loss": 2.1634, | |
| "step": 7370 | |
| }, | |
| { | |
| "epoch": 0.49424055719260646, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.9675684360778984e-05, | |
| "loss": 2.3121, | |
| "step": 7380 | |
| }, | |
| { | |
| "epoch": 0.494910259844629, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.956097544712529e-05, | |
| "loss": 2.289, | |
| "step": 7390 | |
| }, | |
| { | |
| "epoch": 0.4955799624966515, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.944621427791513e-05, | |
| "loss": 2.3509, | |
| "step": 7400 | |
| }, | |
| { | |
| "epoch": 0.49624966514867397, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.933140148037617e-05, | |
| "loss": 2.2824, | |
| "step": 7410 | |
| }, | |
| { | |
| "epoch": 0.4969193678006965, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.9216537682018345e-05, | |
| "loss": 2.2424, | |
| "step": 7420 | |
| }, | |
| { | |
| "epoch": 0.497589070452719, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.910162351063029e-05, | |
| "loss": 2.2644, | |
| "step": 7430 | |
| }, | |
| { | |
| "epoch": 0.4982587731047415, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.898665959427593e-05, | |
| "loss": 2.2801, | |
| "step": 7440 | |
| }, | |
| { | |
| "epoch": 0.498928475756764, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.8871646561291085e-05, | |
| "loss": 2.2242, | |
| "step": 7450 | |
| }, | |
| { | |
| "epoch": 0.4995981784087865, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.875658504028004e-05, | |
| "loss": 2.2587, | |
| "step": 7460 | |
| }, | |
| { | |
| "epoch": 0.500267881060809, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.864147566011209e-05, | |
| "loss": 2.3066, | |
| "step": 7470 | |
| }, | |
| { | |
| "epoch": 0.5009375837128315, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.8526319049918085e-05, | |
| "loss": 2.2228, | |
| "step": 7480 | |
| }, | |
| { | |
| "epoch": 0.501607286364854, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.841111583908705e-05, | |
| "loss": 2.1845, | |
| "step": 7490 | |
| }, | |
| { | |
| "epoch": 0.5022769890168765, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.8295866657262634e-05, | |
| "loss": 2.2567, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 0.502946691668899, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.8180572134339805e-05, | |
| "loss": 2.2037, | |
| "step": 7510 | |
| }, | |
| { | |
| "epoch": 0.5036163943209215, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.806523290046134e-05, | |
| "loss": 2.1284, | |
| "step": 7520 | |
| }, | |
| { | |
| "epoch": 0.504286096972944, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.7949849586014337e-05, | |
| "loss": 2.1658, | |
| "step": 7530 | |
| }, | |
| { | |
| "epoch": 0.5049557996249665, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.7834422821626855e-05, | |
| "loss": 2.2954, | |
| "step": 7540 | |
| }, | |
| { | |
| "epoch": 0.505625502276989, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.771895323816444e-05, | |
| "loss": 2.1094, | |
| "step": 7550 | |
| }, | |
| { | |
| "epoch": 0.5062952049290115, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.7603441466726637e-05, | |
| "loss": 2.2384, | |
| "step": 7560 | |
| }, | |
| { | |
| "epoch": 0.5069649075810341, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.7487888138643555e-05, | |
| "loss": 2.2502, | |
| "step": 7570 | |
| }, | |
| { | |
| "epoch": 0.5076346102330566, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.737229388547248e-05, | |
| "loss": 2.0901, | |
| "step": 7580 | |
| }, | |
| { | |
| "epoch": 0.508304312885079, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.7256659338994334e-05, | |
| "loss": 2.2689, | |
| "step": 7590 | |
| }, | |
| { | |
| "epoch": 0.5089740155371015, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.714098513121029e-05, | |
| "loss": 2.2226, | |
| "step": 7600 | |
| }, | |
| { | |
| "epoch": 0.509643718189124, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.702527189433827e-05, | |
| "loss": 2.2495, | |
| "step": 7610 | |
| }, | |
| { | |
| "epoch": 0.5103134208411465, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.690952026080954e-05, | |
| "loss": 2.2461, | |
| "step": 7620 | |
| }, | |
| { | |
| "epoch": 0.510983123493169, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.6793730863265134e-05, | |
| "loss": 2.205, | |
| "step": 7630 | |
| }, | |
| { | |
| "epoch": 0.5116528261451916, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.6677904334552636e-05, | |
| "loss": 2.1968, | |
| "step": 7640 | |
| }, | |
| { | |
| "epoch": 0.5123225287972141, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.656204130772244e-05, | |
| "loss": 2.1998, | |
| "step": 7650 | |
| }, | |
| { | |
| "epoch": 0.5129922314492366, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.644614241602445e-05, | |
| "loss": 2.1372, | |
| "step": 7660 | |
| }, | |
| { | |
| "epoch": 0.513661934101259, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.633020829290463e-05, | |
| "loss": 2.2227, | |
| "step": 7670 | |
| }, | |
| { | |
| "epoch": 0.5143316367532815, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.621423957200149e-05, | |
| "loss": 2.2054, | |
| "step": 7680 | |
| }, | |
| { | |
| "epoch": 0.515001339405304, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.609823688714261e-05, | |
| "loss": 2.1844, | |
| "step": 7690 | |
| }, | |
| { | |
| "epoch": 0.5156710420573265, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.598220087234121e-05, | |
| "loss": 2.1766, | |
| "step": 7700 | |
| }, | |
| { | |
| "epoch": 0.5163407447093491, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.5866132161792676e-05, | |
| "loss": 2.2252, | |
| "step": 7710 | |
| }, | |
| { | |
| "epoch": 0.5170104473613716, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.575003138987108e-05, | |
| "loss": 2.2189, | |
| "step": 7720 | |
| }, | |
| { | |
| "epoch": 0.5176801500133941, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.563389919112576e-05, | |
| "loss": 2.2178, | |
| "step": 7730 | |
| }, | |
| { | |
| "epoch": 0.5183498526654166, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.55177362002778e-05, | |
| "loss": 2.2234, | |
| "step": 7740 | |
| }, | |
| { | |
| "epoch": 0.519019555317439, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.5401543052216565e-05, | |
| "loss": 2.3178, | |
| "step": 7750 | |
| }, | |
| { | |
| "epoch": 0.5196892579694615, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.5285320381996266e-05, | |
| "loss": 2.3021, | |
| "step": 7760 | |
| }, | |
| { | |
| "epoch": 0.520358960621484, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.5169068824832436e-05, | |
| "loss": 2.2454, | |
| "step": 7770 | |
| }, | |
| { | |
| "epoch": 0.5210286632735066, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.505278901609852e-05, | |
| "loss": 2.2416, | |
| "step": 7780 | |
| }, | |
| { | |
| "epoch": 0.5216983659255291, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.493648159132237e-05, | |
| "loss": 2.196, | |
| "step": 7790 | |
| }, | |
| { | |
| "epoch": 0.5223680685775516, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.4820147186182743e-05, | |
| "loss": 2.3359, | |
| "step": 7800 | |
| }, | |
| { | |
| "epoch": 0.5230377712295741, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.470378643650591e-05, | |
| "loss": 2.2629, | |
| "step": 7810 | |
| }, | |
| { | |
| "epoch": 0.5237074738815966, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.458739997826209e-05, | |
| "loss": 2.2305, | |
| "step": 7820 | |
| }, | |
| { | |
| "epoch": 0.524377176533619, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.4470988447562e-05, | |
| "loss": 2.1962, | |
| "step": 7830 | |
| }, | |
| { | |
| "epoch": 0.5250468791856415, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.435455248065343e-05, | |
| "loss": 2.1846, | |
| "step": 7840 | |
| }, | |
| { | |
| "epoch": 0.5257165818376641, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.42380927139177e-05, | |
| "loss": 2.2579, | |
| "step": 7850 | |
| }, | |
| { | |
| "epoch": 0.5263862844896866, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.412160978386622e-05, | |
| "loss": 2.1761, | |
| "step": 7860 | |
| }, | |
| { | |
| "epoch": 0.5270559871417091, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.400510432713699e-05, | |
| "loss": 2.138, | |
| "step": 7870 | |
| }, | |
| { | |
| "epoch": 0.5277256897937316, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.3888576980491145e-05, | |
| "loss": 2.2061, | |
| "step": 7880 | |
| }, | |
| { | |
| "epoch": 0.5283953924457541, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.3772028380809434e-05, | |
| "loss": 2.2719, | |
| "step": 7890 | |
| }, | |
| { | |
| "epoch": 0.5290650950977765, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.3655459165088784e-05, | |
| "loss": 2.3146, | |
| "step": 7900 | |
| }, | |
| { | |
| "epoch": 0.529734797749799, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.3538869970438795e-05, | |
| "loss": 2.268, | |
| "step": 7910 | |
| }, | |
| { | |
| "epoch": 0.5304045004018216, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.342226143407827e-05, | |
| "loss": 2.0887, | |
| "step": 7920 | |
| }, | |
| { | |
| "epoch": 0.5310742030538441, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.33056341933317e-05, | |
| "loss": 2.1917, | |
| "step": 7930 | |
| }, | |
| { | |
| "epoch": 0.5317439057058666, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.318898888562584e-05, | |
| "loss": 2.2067, | |
| "step": 7940 | |
| }, | |
| { | |
| "epoch": 0.5324136083578891, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.307232614848615e-05, | |
| "loss": 2.2597, | |
| "step": 7950 | |
| }, | |
| { | |
| "epoch": 0.5330833110099116, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.295564661953337e-05, | |
| "loss": 2.2602, | |
| "step": 7960 | |
| }, | |
| { | |
| "epoch": 0.5337530136619341, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.283895093648002e-05, | |
| "loss": 2.2489, | |
| "step": 7970 | |
| }, | |
| { | |
| "epoch": 0.5344227163139567, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.272223973712694e-05, | |
| "loss": 2.2289, | |
| "step": 7980 | |
| }, | |
| { | |
| "epoch": 0.5350924189659791, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.26055136593597e-05, | |
| "loss": 2.3036, | |
| "step": 7990 | |
| }, | |
| { | |
| "epoch": 0.5357621216180016, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.248877334114525e-05, | |
| "loss": 2.2411, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 0.5364318242700241, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.237201942052834e-05, | |
| "loss": 2.196, | |
| "step": 8010 | |
| }, | |
| { | |
| "epoch": 0.5371015269220466, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.2255252535628084e-05, | |
| "loss": 2.2915, | |
| "step": 8020 | |
| }, | |
| { | |
| "epoch": 0.5377712295740691, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.213847332463443e-05, | |
| "loss": 2.2728, | |
| "step": 8030 | |
| }, | |
| { | |
| "epoch": 0.5384409322260916, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.202168242580472e-05, | |
| "loss": 2.2747, | |
| "step": 8040 | |
| }, | |
| { | |
| "epoch": 0.5391106348781142, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.190488047746017e-05, | |
| "loss": 2.2406, | |
| "step": 8050 | |
| }, | |
| { | |
| "epoch": 0.5397803375301367, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.178806811798236e-05, | |
| "loss": 2.2754, | |
| "step": 8060 | |
| }, | |
| { | |
| "epoch": 0.5404500401821591, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.167124598580981e-05, | |
| "loss": 2.2857, | |
| "step": 8070 | |
| }, | |
| { | |
| "epoch": 0.5411197428341816, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.155441471943442e-05, | |
| "loss": 2.2487, | |
| "step": 8080 | |
| }, | |
| { | |
| "epoch": 0.5417894454862041, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.1437574957398026e-05, | |
| "loss": 2.2719, | |
| "step": 8090 | |
| }, | |
| { | |
| "epoch": 0.5424591481382266, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.132072733828893e-05, | |
| "loss": 2.2708, | |
| "step": 8100 | |
| }, | |
| { | |
| "epoch": 0.5431288507902491, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.120387250073833e-05, | |
| "loss": 2.3129, | |
| "step": 8110 | |
| }, | |
| { | |
| "epoch": 0.5437985534422717, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.1087011083416866e-05, | |
| "loss": 2.2324, | |
| "step": 8120 | |
| }, | |
| { | |
| "epoch": 0.5444682560942942, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.097014372503121e-05, | |
| "loss": 2.258, | |
| "step": 8130 | |
| }, | |
| { | |
| "epoch": 0.5451379587463167, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.0853271064320427e-05, | |
| "loss": 2.2671, | |
| "step": 8140 | |
| }, | |
| { | |
| "epoch": 0.5458076613983391, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.073639374005261e-05, | |
| "loss": 2.1627, | |
| "step": 8150 | |
| }, | |
| { | |
| "epoch": 0.5464773640503616, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.0619512391021354e-05, | |
| "loss": 2.1862, | |
| "step": 8160 | |
| }, | |
| { | |
| "epoch": 0.5471470667023841, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.0502627656042177e-05, | |
| "loss": 2.2419, | |
| "step": 8170 | |
| }, | |
| { | |
| "epoch": 0.5478167693544066, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.038574017394917e-05, | |
| "loss": 2.2802, | |
| "step": 8180 | |
| }, | |
| { | |
| "epoch": 0.5484864720064292, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.026885058359142e-05, | |
| "loss": 2.2084, | |
| "step": 8190 | |
| }, | |
| { | |
| "epoch": 0.5491561746584517, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.015195952382954e-05, | |
| "loss": 2.1723, | |
| "step": 8200 | |
| }, | |
| { | |
| "epoch": 0.5498258773104742, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.003506763353215e-05, | |
| "loss": 2.2422, | |
| "step": 8210 | |
| }, | |
| { | |
| "epoch": 0.5504955799624967, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.991817555157246e-05, | |
| "loss": 2.2529, | |
| "step": 8220 | |
| }, | |
| { | |
| "epoch": 0.5511652826145191, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.980128391682468e-05, | |
| "loss": 2.1813, | |
| "step": 8230 | |
| }, | |
| { | |
| "epoch": 0.5518349852665416, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.968439336816058e-05, | |
| "loss": 2.2105, | |
| "step": 8240 | |
| }, | |
| { | |
| "epoch": 0.5525046879185641, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.956750454444603e-05, | |
| "loss": 2.2075, | |
| "step": 8250 | |
| }, | |
| { | |
| "epoch": 0.5531743905705867, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.9450618084537425e-05, | |
| "loss": 2.1755, | |
| "step": 8260 | |
| }, | |
| { | |
| "epoch": 0.5538440932226092, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.9333734627278284e-05, | |
| "loss": 2.1517, | |
| "step": 8270 | |
| }, | |
| { | |
| "epoch": 0.5545137958746317, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.921685481149569e-05, | |
| "loss": 2.1668, | |
| "step": 8280 | |
| }, | |
| { | |
| "epoch": 0.5551834985266542, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.9099979275996806e-05, | |
| "loss": 2.205, | |
| "step": 8290 | |
| }, | |
| { | |
| "epoch": 0.5558532011786766, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.898310865956544e-05, | |
| "loss": 2.249, | |
| "step": 8300 | |
| }, | |
| { | |
| "epoch": 0.5565229038306991, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.886624360095851e-05, | |
| "loss": 2.2103, | |
| "step": 8310 | |
| }, | |
| { | |
| "epoch": 0.5571926064827216, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.8749384738902545e-05, | |
| "loss": 2.2059, | |
| "step": 8320 | |
| }, | |
| { | |
| "epoch": 0.5578623091347442, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.863253271209018e-05, | |
| "loss": 2.2839, | |
| "step": 8330 | |
| }, | |
| { | |
| "epoch": 0.5585320117867667, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.851568815917675e-05, | |
| "loss": 2.1547, | |
| "step": 8340 | |
| }, | |
| { | |
| "epoch": 0.5592017144387892, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.8398851718776703e-05, | |
| "loss": 2.2948, | |
| "step": 8350 | |
| }, | |
| { | |
| "epoch": 0.5598714170908117, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.828202402946014e-05, | |
| "loss": 2.1743, | |
| "step": 8360 | |
| }, | |
| { | |
| "epoch": 0.5605411197428342, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.8165205729749375e-05, | |
| "loss": 2.2389, | |
| "step": 8370 | |
| }, | |
| { | |
| "epoch": 0.5612108223948566, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.804839745811535e-05, | |
| "loss": 2.2072, | |
| "step": 8380 | |
| }, | |
| { | |
| "epoch": 0.5618805250468791, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.7931599852974244e-05, | |
| "loss": 2.1628, | |
| "step": 8390 | |
| }, | |
| { | |
| "epoch": 0.5625502276989017, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.781481355268393e-05, | |
| "loss": 2.1809, | |
| "step": 8400 | |
| }, | |
| { | |
| "epoch": 0.5632199303509242, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.769803919554044e-05, | |
| "loss": 2.2693, | |
| "step": 8410 | |
| }, | |
| { | |
| "epoch": 0.5638896330029467, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.7581277419774616e-05, | |
| "loss": 2.1879, | |
| "step": 8420 | |
| }, | |
| { | |
| "epoch": 0.5645593356549692, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.7464528863548466e-05, | |
| "loss": 2.1736, | |
| "step": 8430 | |
| }, | |
| { | |
| "epoch": 0.5652290383069917, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.734779416495182e-05, | |
| "loss": 2.2325, | |
| "step": 8440 | |
| }, | |
| { | |
| "epoch": 0.5658987409590142, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.723107396199867e-05, | |
| "loss": 2.1428, | |
| "step": 8450 | |
| }, | |
| { | |
| "epoch": 0.5665684436110368, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.711436889262388e-05, | |
| "loss": 2.1846, | |
| "step": 8460 | |
| }, | |
| { | |
| "epoch": 0.5672381462630592, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.699767959467955e-05, | |
| "loss": 2.1416, | |
| "step": 8470 | |
| }, | |
| { | |
| "epoch": 0.5679078489150817, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.688100670593156e-05, | |
| "loss": 2.2282, | |
| "step": 8480 | |
| }, | |
| { | |
| "epoch": 0.5685775515671042, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.6764350864056174e-05, | |
| "loss": 2.2048, | |
| "step": 8490 | |
| }, | |
| { | |
| "epoch": 0.5692472542191267, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.6647712706636427e-05, | |
| "loss": 2.204, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 0.5699169568711492, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.6531092871158706e-05, | |
| "loss": 2.1583, | |
| "step": 8510 | |
| }, | |
| { | |
| "epoch": 0.5705866595231717, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.64144919950093e-05, | |
| "loss": 2.2748, | |
| "step": 8520 | |
| }, | |
| { | |
| "epoch": 0.5712563621751943, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.6297910715470824e-05, | |
| "loss": 2.2072, | |
| "step": 8530 | |
| }, | |
| { | |
| "epoch": 0.5719260648272168, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.6181349669718836e-05, | |
| "loss": 2.2438, | |
| "step": 8540 | |
| }, | |
| { | |
| "epoch": 0.5725957674792392, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.606480949481825e-05, | |
| "loss": 2.2714, | |
| "step": 8550 | |
| }, | |
| { | |
| "epoch": 0.5732654701312617, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.594829082771999e-05, | |
| "loss": 2.1909, | |
| "step": 8560 | |
| }, | |
| { | |
| "epoch": 0.5739351727832842, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.583179430525734e-05, | |
| "loss": 2.2508, | |
| "step": 8570 | |
| }, | |
| { | |
| "epoch": 0.5746048754353067, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.571532056414261e-05, | |
| "loss": 2.2314, | |
| "step": 8580 | |
| }, | |
| { | |
| "epoch": 0.5752745780873292, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.559887024096359e-05, | |
| "loss": 2.2437, | |
| "step": 8590 | |
| }, | |
| { | |
| "epoch": 0.5759442807393518, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.548244397218005e-05, | |
| "loss": 2.216, | |
| "step": 8600 | |
| }, | |
| { | |
| "epoch": 0.5766139833913743, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.536604239412034e-05, | |
| "loss": 2.2446, | |
| "step": 8610 | |
| }, | |
| { | |
| "epoch": 0.5772836860433967, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.5249666142977815e-05, | |
| "loss": 2.1948, | |
| "step": 8620 | |
| }, | |
| { | |
| "epoch": 0.5779533886954192, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.513331585480742e-05, | |
| "loss": 2.1927, | |
| "step": 8630 | |
| }, | |
| { | |
| "epoch": 0.5786230913474417, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.501699216552222e-05, | |
| "loss": 2.2892, | |
| "step": 8640 | |
| }, | |
| { | |
| "epoch": 0.5792927939994642, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.4900695710889885e-05, | |
| "loss": 2.222, | |
| "step": 8650 | |
| }, | |
| { | |
| "epoch": 0.5799624966514867, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.478442712652924e-05, | |
| "loss": 2.2294, | |
| "step": 8660 | |
| }, | |
| { | |
| "epoch": 0.5806321993035093, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.466818704790675e-05, | |
| "loss": 2.2264, | |
| "step": 8670 | |
| }, | |
| { | |
| "epoch": 0.5813019019555318, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.455197611033317e-05, | |
| "loss": 2.1831, | |
| "step": 8680 | |
| }, | |
| { | |
| "epoch": 0.5819716046075543, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.443579494895989e-05, | |
| "loss": 2.3108, | |
| "step": 8690 | |
| }, | |
| { | |
| "epoch": 0.5826413072595767, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.431964419877561e-05, | |
| "loss": 2.094, | |
| "step": 8700 | |
| }, | |
| { | |
| "epoch": 0.5833110099115992, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.42035244946028e-05, | |
| "loss": 2.1602, | |
| "step": 8710 | |
| }, | |
| { | |
| "epoch": 0.5839807125636217, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.408743647109425e-05, | |
| "loss": 2.2146, | |
| "step": 8720 | |
| }, | |
| { | |
| "epoch": 0.5846504152156442, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.397138076272961e-05, | |
| "loss": 2.1913, | |
| "step": 8730 | |
| }, | |
| { | |
| "epoch": 0.5853201178676668, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.38553580038119e-05, | |
| "loss": 2.1041, | |
| "step": 8740 | |
| }, | |
| { | |
| "epoch": 0.5859898205196893, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.3739368828464036e-05, | |
| "loss": 2.1198, | |
| "step": 8750 | |
| }, | |
| { | |
| "epoch": 0.5866595231717118, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.362341387062543e-05, | |
| "loss": 2.1642, | |
| "step": 8760 | |
| }, | |
| { | |
| "epoch": 0.5873292258237343, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.350749376404844e-05, | |
| "loss": 2.2242, | |
| "step": 8770 | |
| }, | |
| { | |
| "epoch": 0.5879989284757567, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.339160914229495e-05, | |
| "loss": 2.1732, | |
| "step": 8780 | |
| }, | |
| { | |
| "epoch": 0.5886686311277792, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.327576063873293e-05, | |
| "loss": 2.2439, | |
| "step": 8790 | |
| }, | |
| { | |
| "epoch": 0.5893383337798017, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.315994888653288e-05, | |
| "loss": 2.2989, | |
| "step": 8800 | |
| }, | |
| { | |
| "epoch": 0.5900080364318243, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.304417451866454e-05, | |
| "loss": 2.2347, | |
| "step": 8810 | |
| }, | |
| { | |
| "epoch": 0.5906777390838468, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.2928438167893224e-05, | |
| "loss": 2.2218, | |
| "step": 8820 | |
| }, | |
| { | |
| "epoch": 0.5913474417358693, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.281274046677653e-05, | |
| "loss": 2.1046, | |
| "step": 8830 | |
| }, | |
| { | |
| "epoch": 0.5920171443878918, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.269708204766074e-05, | |
| "loss": 2.19, | |
| "step": 8840 | |
| }, | |
| { | |
| "epoch": 0.5926868470399143, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.2581463542677554e-05, | |
| "loss": 2.1805, | |
| "step": 8850 | |
| }, | |
| { | |
| "epoch": 0.5933565496919367, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.2465885583740415e-05, | |
| "loss": 2.2131, | |
| "step": 8860 | |
| }, | |
| { | |
| "epoch": 0.5940262523439592, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.2350348802541205e-05, | |
| "loss": 2.1286, | |
| "step": 8870 | |
| }, | |
| { | |
| "epoch": 0.5946959549959818, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.2234853830546775e-05, | |
| "loss": 2.1751, | |
| "step": 8880 | |
| }, | |
| { | |
| "epoch": 0.5953656576480043, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.211940129899542e-05, | |
| "loss": 2.2916, | |
| "step": 8890 | |
| }, | |
| { | |
| "epoch": 0.5960353603000268, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.200399183889349e-05, | |
| "loss": 2.213, | |
| "step": 8900 | |
| }, | |
| { | |
| "epoch": 0.5967050629520493, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.1888626081011954e-05, | |
| "loss": 2.2073, | |
| "step": 8910 | |
| }, | |
| { | |
| "epoch": 0.5973747656040718, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.177330465588288e-05, | |
| "loss": 2.2888, | |
| "step": 8920 | |
| }, | |
| { | |
| "epoch": 0.5980444682560943, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.165802819379609e-05, | |
| "loss": 2.2332, | |
| "step": 8930 | |
| }, | |
| { | |
| "epoch": 0.5987141709081168, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.154279732479562e-05, | |
| "loss": 2.2595, | |
| "step": 8940 | |
| }, | |
| { | |
| "epoch": 0.5993838735601393, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.142761267867634e-05, | |
| "loss": 2.2216, | |
| "step": 8950 | |
| }, | |
| { | |
| "epoch": 0.6000535762121618, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.1312474884980454e-05, | |
| "loss": 2.23, | |
| "step": 8960 | |
| }, | |
| { | |
| "epoch": 0.6007232788641843, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.119738457299415e-05, | |
| "loss": 2.2446, | |
| "step": 8970 | |
| }, | |
| { | |
| "epoch": 0.6013929815162068, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.1082342371744044e-05, | |
| "loss": 2.2392, | |
| "step": 8980 | |
| }, | |
| { | |
| "epoch": 0.6020626841682293, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.096734890999382e-05, | |
| "loss": 2.1591, | |
| "step": 8990 | |
| }, | |
| { | |
| "epoch": 0.6027323868202518, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.085240481624083e-05, | |
| "loss": 2.277, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 0.6034020894722744, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.073751071871254e-05, | |
| "loss": 2.1811, | |
| "step": 9010 | |
| }, | |
| { | |
| "epoch": 0.6040717921242968, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.062266724536317e-05, | |
| "loss": 2.2649, | |
| "step": 9020 | |
| }, | |
| { | |
| "epoch": 0.6047414947763193, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.050787502387028e-05, | |
| "loss": 2.2494, | |
| "step": 9030 | |
| }, | |
| { | |
| "epoch": 0.6054111974283418, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.039313468163128e-05, | |
| "loss": 2.1891, | |
| "step": 9040 | |
| }, | |
| { | |
| "epoch": 0.6060809000803643, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.0278446845760077e-05, | |
| "loss": 2.248, | |
| "step": 9050 | |
| }, | |
| { | |
| "epoch": 0.6067506027323868, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.0163812143083566e-05, | |
| "loss": 2.1845, | |
| "step": 9060 | |
| }, | |
| { | |
| "epoch": 0.6074203053844093, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.004923120013828e-05, | |
| "loss": 2.2027, | |
| "step": 9070 | |
| }, | |
| { | |
| "epoch": 0.6080900080364319, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.993470464316686e-05, | |
| "loss": 2.2485, | |
| "step": 9080 | |
| }, | |
| { | |
| "epoch": 0.6087597106884544, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.98202330981148e-05, | |
| "loss": 2.1538, | |
| "step": 9090 | |
| }, | |
| { | |
| "epoch": 0.6094294133404768, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.9705817190626827e-05, | |
| "loss": 2.2527, | |
| "step": 9100 | |
| }, | |
| { | |
| "epoch": 0.6100991159924993, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.959145754604363e-05, | |
| "loss": 2.2026, | |
| "step": 9110 | |
| }, | |
| { | |
| "epoch": 0.6107688186445218, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.94771547893984e-05, | |
| "loss": 2.1733, | |
| "step": 9120 | |
| }, | |
| { | |
| "epoch": 0.6114385212965443, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.93629095454134e-05, | |
| "loss": 2.3497, | |
| "step": 9130 | |
| }, | |
| { | |
| "epoch": 0.6121082239485668, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.924872243849651e-05, | |
| "loss": 2.2275, | |
| "step": 9140 | |
| }, | |
| { | |
| "epoch": 0.6127779266005894, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.9134594092737944e-05, | |
| "loss": 2.3132, | |
| "step": 9150 | |
| }, | |
| { | |
| "epoch": 0.6134476292526119, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.902052513190671e-05, | |
| "loss": 2.2949, | |
| "step": 9160 | |
| }, | |
| { | |
| "epoch": 0.6141173319046344, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.890651617944722e-05, | |
| "loss": 2.199, | |
| "step": 9170 | |
| }, | |
| { | |
| "epoch": 0.6147870345566568, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.879256785847597e-05, | |
| "loss": 2.1144, | |
| "step": 9180 | |
| }, | |
| { | |
| "epoch": 0.6154567372086793, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.867868079177805e-05, | |
| "loss": 2.1892, | |
| "step": 9190 | |
| }, | |
| { | |
| "epoch": 0.6161264398607018, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.8564855601803754e-05, | |
| "loss": 2.221, | |
| "step": 9200 | |
| }, | |
| { | |
| "epoch": 0.6167961425127243, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.8451092910665206e-05, | |
| "loss": 2.2979, | |
| "step": 9210 | |
| }, | |
| { | |
| "epoch": 0.6174658451647469, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.833739334013292e-05, | |
| "loss": 2.2331, | |
| "step": 9220 | |
| }, | |
| { | |
| "epoch": 0.6181355478167694, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.8223757511632424e-05, | |
| "loss": 2.2855, | |
| "step": 9230 | |
| }, | |
| { | |
| "epoch": 0.6188052504687919, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.81101860462409e-05, | |
| "loss": 2.1975, | |
| "step": 9240 | |
| }, | |
| { | |
| "epoch": 0.6194749531208144, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.7996679564683714e-05, | |
| "loss": 2.1869, | |
| "step": 9250 | |
| }, | |
| { | |
| "epoch": 0.6201446557728368, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.788323868733107e-05, | |
| "loss": 2.2632, | |
| "step": 9260 | |
| }, | |
| { | |
| "epoch": 0.6208143584248593, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.7769864034194646e-05, | |
| "loss": 2.1872, | |
| "step": 9270 | |
| }, | |
| { | |
| "epoch": 0.6214840610768818, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.7656556224924125e-05, | |
| "loss": 2.2888, | |
| "step": 9280 | |
| }, | |
| { | |
| "epoch": 0.6221537637289044, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.7543315878803863e-05, | |
| "loss": 2.1669, | |
| "step": 9290 | |
| }, | |
| { | |
| "epoch": 0.6228234663809269, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.743014361474953e-05, | |
| "loss": 2.2148, | |
| "step": 9300 | |
| }, | |
| { | |
| "epoch": 0.6234931690329494, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.731704005130466e-05, | |
| "loss": 2.0846, | |
| "step": 9310 | |
| }, | |
| { | |
| "epoch": 0.6241628716849719, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.720400580663732e-05, | |
| "loss": 2.319, | |
| "step": 9320 | |
| }, | |
| { | |
| "epoch": 0.6248325743369944, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.709104149853669e-05, | |
| "loss": 2.2351, | |
| "step": 9330 | |
| }, | |
| { | |
| "epoch": 0.6255022769890168, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.697814774440976e-05, | |
| "loss": 2.1733, | |
| "step": 9340 | |
| }, | |
| { | |
| "epoch": 0.6261719796410394, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.686532516127785e-05, | |
| "loss": 2.3272, | |
| "step": 9350 | |
| }, | |
| { | |
| "epoch": 0.6268416822930619, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.675257436577333e-05, | |
| "loss": 2.1916, | |
| "step": 9360 | |
| }, | |
| { | |
| "epoch": 0.6275113849450844, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.663989597413621e-05, | |
| "loss": 2.2904, | |
| "step": 9370 | |
| }, | |
| { | |
| "epoch": 0.6281810875971069, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.6527290602210763e-05, | |
| "loss": 2.2809, | |
| "step": 9380 | |
| }, | |
| { | |
| "epoch": 0.6288507902491294, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.6414758865442186e-05, | |
| "loss": 2.1766, | |
| "step": 9390 | |
| }, | |
| { | |
| "epoch": 0.6295204929011519, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.6302301378873225e-05, | |
| "loss": 2.223, | |
| "step": 9400 | |
| }, | |
| { | |
| "epoch": 0.6301901955531743, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.618991875714078e-05, | |
| "loss": 2.2235, | |
| "step": 9410 | |
| }, | |
| { | |
| "epoch": 0.630859898205197, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.6077611614472616e-05, | |
| "loss": 2.2017, | |
| "step": 9420 | |
| }, | |
| { | |
| "epoch": 0.6315296008572194, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.596538056468397e-05, | |
| "loss": 2.3, | |
| "step": 9430 | |
| }, | |
| { | |
| "epoch": 0.6321993035092419, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.5853226221174155e-05, | |
| "loss": 2.1629, | |
| "step": 9440 | |
| }, | |
| { | |
| "epoch": 0.6328690061612644, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.5741149196923264e-05, | |
| "loss": 2.1419, | |
| "step": 9450 | |
| }, | |
| { | |
| "epoch": 0.6335387088132869, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.5629150104488816e-05, | |
| "loss": 2.1754, | |
| "step": 9460 | |
| }, | |
| { | |
| "epoch": 0.6342084114653094, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.5517229556002366e-05, | |
| "loss": 2.2461, | |
| "step": 9470 | |
| }, | |
| { | |
| "epoch": 0.6348781141173319, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.540538816316621e-05, | |
| "loss": 2.1987, | |
| "step": 9480 | |
| }, | |
| { | |
| "epoch": 0.6355478167693545, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.529362653725001e-05, | |
| "loss": 2.1122, | |
| "step": 9490 | |
| }, | |
| { | |
| "epoch": 0.6362175194213769, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.5181945289087445e-05, | |
| "loss": 2.2948, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 0.6368872220733994, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.507034502907292e-05, | |
| "loss": 2.2711, | |
| "step": 9510 | |
| }, | |
| { | |
| "epoch": 0.6375569247254219, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.495882636715818e-05, | |
| "loss": 2.1809, | |
| "step": 9520 | |
| }, | |
| { | |
| "epoch": 0.6382266273774444, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.484738991284899e-05, | |
| "loss": 2.2134, | |
| "step": 9530 | |
| }, | |
| { | |
| "epoch": 0.6388963300294669, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.473603627520181e-05, | |
| "loss": 2.2256, | |
| "step": 9540 | |
| }, | |
| { | |
| "epoch": 0.6395660326814894, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.462476606282051e-05, | |
| "loss": 2.2067, | |
| "step": 9550 | |
| }, | |
| { | |
| "epoch": 0.640235735333512, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.4513579883852945e-05, | |
| "loss": 2.2316, | |
| "step": 9560 | |
| }, | |
| { | |
| "epoch": 0.6409054379855345, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.440247834598768e-05, | |
| "loss": 2.2769, | |
| "step": 9570 | |
| }, | |
| { | |
| "epoch": 0.6415751406375569, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.429146205645072e-05, | |
| "loss": 2.2183, | |
| "step": 9580 | |
| }, | |
| { | |
| "epoch": 0.6422448432895794, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.41805316220021e-05, | |
| "loss": 2.154, | |
| "step": 9590 | |
| }, | |
| { | |
| "epoch": 0.6429145459416019, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.406968764893266e-05, | |
| "loss": 2.2405, | |
| "step": 9600 | |
| }, | |
| { | |
| "epoch": 0.6435842485936244, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.3958930743060635e-05, | |
| "loss": 2.2594, | |
| "step": 9610 | |
| }, | |
| { | |
| "epoch": 0.6442539512456469, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.384826150972842e-05, | |
| "loss": 2.1802, | |
| "step": 9620 | |
| }, | |
| { | |
| "epoch": 0.6449236538976695, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.373768055379924e-05, | |
| "loss": 2.171, | |
| "step": 9630 | |
| }, | |
| { | |
| "epoch": 0.645593356549692, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.3627188479653836e-05, | |
| "loss": 2.3012, | |
| "step": 9640 | |
| }, | |
| { | |
| "epoch": 0.6462630592017145, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.351678589118714e-05, | |
| "loss": 2.2603, | |
| "step": 9650 | |
| }, | |
| { | |
| "epoch": 0.6469327618537369, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.340647339180503e-05, | |
| "loss": 2.2594, | |
| "step": 9660 | |
| }, | |
| { | |
| "epoch": 0.6476024645057594, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.329625158442102e-05, | |
| "loss": 2.2487, | |
| "step": 9670 | |
| }, | |
| { | |
| "epoch": 0.6482721671577819, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.3186121071452895e-05, | |
| "loss": 2.2431, | |
| "step": 9680 | |
| }, | |
| { | |
| "epoch": 0.6489418698098044, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.307608245481949e-05, | |
| "loss": 2.2311, | |
| "step": 9690 | |
| }, | |
| { | |
| "epoch": 0.649611572461827, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.2966136335937395e-05, | |
| "loss": 2.2746, | |
| "step": 9700 | |
| }, | |
| { | |
| "epoch": 0.6502812751138495, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.285628331571764e-05, | |
| "loss": 2.1033, | |
| "step": 9710 | |
| }, | |
| { | |
| "epoch": 0.650950977765872, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.2746523994562416e-05, | |
| "loss": 2.2116, | |
| "step": 9720 | |
| }, | |
| { | |
| "epoch": 0.6516206804178944, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.263685897236183e-05, | |
| "loss": 2.2938, | |
| "step": 9730 | |
| }, | |
| { | |
| "epoch": 0.6522903830699169, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.252728884849056e-05, | |
| "loss": 2.1125, | |
| "step": 9740 | |
| }, | |
| { | |
| "epoch": 0.6529600857219394, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.241781422180464e-05, | |
| "loss": 2.2638, | |
| "step": 9750 | |
| }, | |
| { | |
| "epoch": 0.6536297883739619, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.2308435690638186e-05, | |
| "loss": 2.1999, | |
| "step": 9760 | |
| }, | |
| { | |
| "epoch": 0.6542994910259845, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.219915385280002e-05, | |
| "loss": 2.2453, | |
| "step": 9770 | |
| }, | |
| { | |
| "epoch": 0.654969193678007, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.2089969305570606e-05, | |
| "loss": 2.2918, | |
| "step": 9780 | |
| }, | |
| { | |
| "epoch": 0.6556388963300295, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.198088264569854e-05, | |
| "loss": 2.2353, | |
| "step": 9790 | |
| }, | |
| { | |
| "epoch": 0.656308598982052, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.1871894469397544e-05, | |
| "loss": 2.1873, | |
| "step": 9800 | |
| }, | |
| { | |
| "epoch": 0.6569783016340744, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.1763005372342966e-05, | |
| "loss": 2.2251, | |
| "step": 9810 | |
| }, | |
| { | |
| "epoch": 0.6576480042860969, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.1654215949668695e-05, | |
| "loss": 2.296, | |
| "step": 9820 | |
| }, | |
| { | |
| "epoch": 0.6583177069381195, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.154552679596383e-05, | |
| "loss": 2.3183, | |
| "step": 9830 | |
| }, | |
| { | |
| "epoch": 0.658987409590142, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.143693850526945e-05, | |
| "loss": 2.1321, | |
| "step": 9840 | |
| }, | |
| { | |
| "epoch": 1.0004018215912136, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.132845167107539e-05, | |
| "loss": 2.1889, | |
| "step": 9850 | |
| }, | |
| { | |
| "epoch": 1.001071524243236, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.122006688631693e-05, | |
| "loss": 2.1428, | |
| "step": 9860 | |
| }, | |
| { | |
| "epoch": 1.0017412268952586, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.1111784743371644e-05, | |
| "loss": 2.0989, | |
| "step": 9870 | |
| }, | |
| { | |
| "epoch": 1.002410929547281, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.100360583405608e-05, | |
| "loss": 2.1649, | |
| "step": 9880 | |
| }, | |
| { | |
| "epoch": 1.0030806321993035, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.0895530749622584e-05, | |
| "loss": 2.2272, | |
| "step": 9890 | |
| }, | |
| { | |
| "epoch": 1.003750334851326, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.078756008075605e-05, | |
| "loss": 2.0914, | |
| "step": 9900 | |
| }, | |
| { | |
| "epoch": 1.0044200375033485, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.0679694417570635e-05, | |
| "loss": 2.0899, | |
| "step": 9910 | |
| }, | |
| { | |
| "epoch": 1.0050897401553711, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.05719343496067e-05, | |
| "loss": 1.9538, | |
| "step": 9920 | |
| }, | |
| { | |
| "epoch": 1.0057594428073935, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.046428046582736e-05, | |
| "loss": 2.155, | |
| "step": 9930 | |
| }, | |
| { | |
| "epoch": 1.006429145459416, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.0356733354615446e-05, | |
| "loss": 2.1708, | |
| "step": 9940 | |
| }, | |
| { | |
| "epoch": 1.0070988481114385, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.024929360377019e-05, | |
| "loss": 2.2296, | |
| "step": 9950 | |
| }, | |
| { | |
| "epoch": 1.007768550763461, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.014196180050406e-05, | |
| "loss": 2.0675, | |
| "step": 9960 | |
| }, | |
| { | |
| "epoch": 1.0084382534154834, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.0034738531439533e-05, | |
| "loss": 2.118, | |
| "step": 9970 | |
| }, | |
| { | |
| "epoch": 1.009107956067506, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.992762438260589e-05, | |
| "loss": 2.1154, | |
| "step": 9980 | |
| }, | |
| { | |
| "epoch": 1.0097776587195286, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.9820619939436035e-05, | |
| "loss": 2.1501, | |
| "step": 9990 | |
| }, | |
| { | |
| "epoch": 1.010447361371551, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.9713725786763248e-05, | |
| "loss": 2.0916, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 1.0111170640235736, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.9606942508818e-05, | |
| "loss": 2.0645, | |
| "step": 10010 | |
| }, | |
| { | |
| "epoch": 1.011786766675596, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.950027068922484e-05, | |
| "loss": 2.179, | |
| "step": 10020 | |
| }, | |
| { | |
| "epoch": 1.0124564693276186, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.9393710910999073e-05, | |
| "loss": 2.1498, | |
| "step": 10030 | |
| }, | |
| { | |
| "epoch": 1.013126171979641, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.928726375654369e-05, | |
| "loss": 2.1339, | |
| "step": 10040 | |
| }, | |
| { | |
| "epoch": 1.0137958746316635, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.9180929807646118e-05, | |
| "loss": 2.1826, | |
| "step": 10050 | |
| }, | |
| { | |
| "epoch": 1.0144655772836861, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.9074709645475036e-05, | |
| "loss": 2.1725, | |
| "step": 10060 | |
| }, | |
| { | |
| "epoch": 1.0151352799357085, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.896860385057726e-05, | |
| "loss": 2.1184, | |
| "step": 10070 | |
| }, | |
| { | |
| "epoch": 1.015804982587731, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.8862613002874517e-05, | |
| "loss": 2.1758, | |
| "step": 10080 | |
| }, | |
| { | |
| "epoch": 1.0164746852397535, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.875673768166026e-05, | |
| "loss": 2.1563, | |
| "step": 10090 | |
| }, | |
| { | |
| "epoch": 1.017144387891776, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.865097846559658e-05, | |
| "loss": 2.1865, | |
| "step": 10100 | |
| }, | |
| { | |
| "epoch": 1.0178140905437985, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.8545335932710985e-05, | |
| "loss": 2.1202, | |
| "step": 10110 | |
| }, | |
| { | |
| "epoch": 1.018483793195821, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.8439810660393197e-05, | |
| "loss": 2.2371, | |
| "step": 10120 | |
| }, | |
| { | |
| "epoch": 1.0191534958478436, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.8334403225392116e-05, | |
| "loss": 2.2004, | |
| "step": 10130 | |
| }, | |
| { | |
| "epoch": 1.019823198499866, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.822911420381259e-05, | |
| "loss": 2.236, | |
| "step": 10140 | |
| }, | |
| { | |
| "epoch": 1.0204929011518886, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.8123944171112226e-05, | |
| "loss": 2.1106, | |
| "step": 10150 | |
| }, | |
| { | |
| "epoch": 1.021162603803911, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.8018893702098337e-05, | |
| "loss": 2.1454, | |
| "step": 10160 | |
| }, | |
| { | |
| "epoch": 1.0218323064559336, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.7913963370924817e-05, | |
| "loss": 2.2404, | |
| "step": 10170 | |
| }, | |
| { | |
| "epoch": 1.022502009107956, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.7809153751088823e-05, | |
| "loss": 2.2138, | |
| "step": 10180 | |
| }, | |
| { | |
| "epoch": 1.0231717117599786, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.7704465415427854e-05, | |
| "loss": 2.1251, | |
| "step": 10190 | |
| }, | |
| { | |
| "epoch": 1.0238414144120012, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.7599898936116518e-05, | |
| "loss": 2.2301, | |
| "step": 10200 | |
| }, | |
| { | |
| "epoch": 1.0245111170640235, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.7495454884663364e-05, | |
| "loss": 2.1531, | |
| "step": 10210 | |
| }, | |
| { | |
| "epoch": 1.0251808197160461, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.7391133831907862e-05, | |
| "loss": 2.1389, | |
| "step": 10220 | |
| }, | |
| { | |
| "epoch": 1.0258505223680685, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.7286936348017245e-05, | |
| "loss": 2.1855, | |
| "step": 10230 | |
| }, | |
| { | |
| "epoch": 1.026520225020091, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.7182863002483317e-05, | |
| "loss": 2.1678, | |
| "step": 10240 | |
| }, | |
| { | |
| "epoch": 1.0271899276721135, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.7078914364119458e-05, | |
| "loss": 2.106, | |
| "step": 10250 | |
| }, | |
| { | |
| "epoch": 1.027859630324136, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.697509100105745e-05, | |
| "loss": 2.2005, | |
| "step": 10260 | |
| }, | |
| { | |
| "epoch": 1.0285293329761587, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.6871393480744356e-05, | |
| "loss": 2.1844, | |
| "step": 10270 | |
| }, | |
| { | |
| "epoch": 1.029199035628181, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.6767822369939442e-05, | |
| "loss": 2.1796, | |
| "step": 10280 | |
| }, | |
| { | |
| "epoch": 1.0298687382802036, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.6664378234711185e-05, | |
| "loss": 2.1288, | |
| "step": 10290 | |
| }, | |
| { | |
| "epoch": 1.030538440932226, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.6561061640433916e-05, | |
| "loss": 2.1717, | |
| "step": 10300 | |
| }, | |
| { | |
| "epoch": 1.0312081435842486, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.645787315178499e-05, | |
| "loss": 2.0873, | |
| "step": 10310 | |
| }, | |
| { | |
| "epoch": 1.031877846236271, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.6354813332741612e-05, | |
| "loss": 2.1526, | |
| "step": 10320 | |
| }, | |
| { | |
| "epoch": 1.0325475488882936, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.625188274657766e-05, | |
| "loss": 2.0779, | |
| "step": 10330 | |
| }, | |
| { | |
| "epoch": 1.0332172515403162, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.6149081955860765e-05, | |
| "loss": 2.0766, | |
| "step": 10340 | |
| }, | |
| { | |
| "epoch": 1.0338869541923386, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.6046411522449148e-05, | |
| "loss": 2.1073, | |
| "step": 10350 | |
| }, | |
| { | |
| "epoch": 1.0345566568443612, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.594387200748851e-05, | |
| "loss": 2.1058, | |
| "step": 10360 | |
| }, | |
| { | |
| "epoch": 1.0352263594963835, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.5841463971409085e-05, | |
| "loss": 2.1599, | |
| "step": 10370 | |
| }, | |
| { | |
| "epoch": 1.0358960621484061, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.5739187973922484e-05, | |
| "loss": 1.9751, | |
| "step": 10380 | |
| }, | |
| { | |
| "epoch": 1.0365657648004287, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.563704457401862e-05, | |
| "loss": 2.0324, | |
| "step": 10390 | |
| }, | |
| { | |
| "epoch": 1.037235467452451, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.5535034329962726e-05, | |
| "loss": 2.1225, | |
| "step": 10400 | |
| }, | |
| { | |
| "epoch": 1.0379051701044737, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.543315779929233e-05, | |
| "loss": 2.0986, | |
| "step": 10410 | |
| }, | |
| { | |
| "epoch": 1.038574872756496, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.533141553881404e-05, | |
| "loss": 2.0317, | |
| "step": 10420 | |
| }, | |
| { | |
| "epoch": 1.0392445754085187, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.5229808104600684e-05, | |
| "loss": 2.0995, | |
| "step": 10430 | |
| }, | |
| { | |
| "epoch": 1.039914278060541, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.512833605198818e-05, | |
| "loss": 2.1134, | |
| "step": 10440 | |
| }, | |
| { | |
| "epoch": 1.0405839807125636, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.502699993557247e-05, | |
| "loss": 2.1163, | |
| "step": 10450 | |
| }, | |
| { | |
| "epoch": 1.041253683364586, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.4925800309206614e-05, | |
| "loss": 2.0981, | |
| "step": 10460 | |
| }, | |
| { | |
| "epoch": 1.0419233860166086, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.482473772599766e-05, | |
| "loss": 2.1944, | |
| "step": 10470 | |
| }, | |
| { | |
| "epoch": 1.0425930886686312, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.472381273830359e-05, | |
| "loss": 2.0236, | |
| "step": 10480 | |
| }, | |
| { | |
| "epoch": 1.0432627913206536, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.4623025897730427e-05, | |
| "loss": 2.1266, | |
| "step": 10490 | |
| }, | |
| { | |
| "epoch": 1.0439324939726762, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.452237775512914e-05, | |
| "loss": 2.1275, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 1.0446021966246986, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.442186886059259e-05, | |
| "loss": 2.1504, | |
| "step": 10510 | |
| }, | |
| { | |
| "epoch": 1.0452718992767212, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.4321499763452637e-05, | |
| "loss": 2.1843, | |
| "step": 10520 | |
| }, | |
| { | |
| "epoch": 1.0459416019287437, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.4221271012277047e-05, | |
| "loss": 2.1632, | |
| "step": 10530 | |
| }, | |
| { | |
| "epoch": 1.0466113045807661, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.4121183154866546e-05, | |
| "loss": 2.1631, | |
| "step": 10540 | |
| }, | |
| { | |
| "epoch": 1.0472810072327887, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.402123673825178e-05, | |
| "loss": 2.0182, | |
| "step": 10550 | |
| }, | |
| { | |
| "epoch": 1.047950709884811, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.392143230869039e-05, | |
| "loss": 2.1462, | |
| "step": 10560 | |
| }, | |
| { | |
| "epoch": 1.0486204125368337, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.382177041166391e-05, | |
| "loss": 2.1991, | |
| "step": 10570 | |
| }, | |
| { | |
| "epoch": 1.049290115188856, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.3722251591874932e-05, | |
| "loss": 2.1854, | |
| "step": 10580 | |
| }, | |
| { | |
| "epoch": 1.0499598178408787, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.3622876393244053e-05, | |
| "loss": 2.1261, | |
| "step": 10590 | |
| }, | |
| { | |
| "epoch": 1.0506295204929013, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.352364535890686e-05, | |
| "loss": 2.2759, | |
| "step": 10600 | |
| }, | |
| { | |
| "epoch": 1.0512992231449236, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.342455903121105e-05, | |
| "loss": 2.1393, | |
| "step": 10610 | |
| }, | |
| { | |
| "epoch": 1.0519689257969462, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.3325617951713445e-05, | |
| "loss": 2.2647, | |
| "step": 10620 | |
| }, | |
| { | |
| "epoch": 1.0526386284489686, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.3226822661176933e-05, | |
| "loss": 2.1644, | |
| "step": 10630 | |
| }, | |
| { | |
| "epoch": 1.0533083311009912, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.312817369956766e-05, | |
| "loss": 2.1273, | |
| "step": 10640 | |
| }, | |
| { | |
| "epoch": 1.0539780337530136, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.3029671606052004e-05, | |
| "loss": 2.1682, | |
| "step": 10650 | |
| }, | |
| { | |
| "epoch": 1.0546477364050362, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.2931316918993613e-05, | |
| "loss": 2.1651, | |
| "step": 10660 | |
| }, | |
| { | |
| "epoch": 1.0553174390570588, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.2833110175950496e-05, | |
| "loss": 2.2218, | |
| "step": 10670 | |
| }, | |
| { | |
| "epoch": 1.0559871417090811, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.273505191367209e-05, | |
| "loss": 2.1568, | |
| "step": 10680 | |
| }, | |
| { | |
| "epoch": 1.0566568443611037, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.263714266809625e-05, | |
| "loss": 2.1827, | |
| "step": 10690 | |
| }, | |
| { | |
| "epoch": 1.0573265470131261, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.2539382974346445e-05, | |
| "loss": 2.2167, | |
| "step": 10700 | |
| }, | |
| { | |
| "epoch": 1.0579962496651487, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.2441773366728758e-05, | |
| "loss": 2.0347, | |
| "step": 10710 | |
| }, | |
| { | |
| "epoch": 1.058665952317171, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.2344314378728926e-05, | |
| "loss": 2.1217, | |
| "step": 10720 | |
| }, | |
| { | |
| "epoch": 1.0593356549691937, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.2247006543009518e-05, | |
| "loss": 2.0515, | |
| "step": 10730 | |
| }, | |
| { | |
| "epoch": 1.0600053576212163, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.2149850391407002e-05, | |
| "loss": 2.0677, | |
| "step": 10740 | |
| }, | |
| { | |
| "epoch": 1.0606750602732387, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.2052846454928737e-05, | |
| "loss": 2.1297, | |
| "step": 10750 | |
| }, | |
| { | |
| "epoch": 1.0613447629252613, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.1955995263750225e-05, | |
| "loss": 2.1283, | |
| "step": 10760 | |
| }, | |
| { | |
| "epoch": 1.0620144655772836, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.1859297347212094e-05, | |
| "loss": 2.0728, | |
| "step": 10770 | |
| }, | |
| { | |
| "epoch": 1.0626841682293062, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.1762753233817274e-05, | |
| "loss": 2.1211, | |
| "step": 10780 | |
| }, | |
| { | |
| "epoch": 1.0633538708813286, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.1666363451228073e-05, | |
| "loss": 2.2635, | |
| "step": 10790 | |
| }, | |
| { | |
| "epoch": 1.0640235735333512, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.1570128526263317e-05, | |
| "loss": 2.1496, | |
| "step": 10800 | |
| }, | |
| { | |
| "epoch": 1.0646932761853738, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.1474048984895406e-05, | |
| "loss": 2.1991, | |
| "step": 10810 | |
| }, | |
| { | |
| "epoch": 1.0653629788373962, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.137812535224755e-05, | |
| "loss": 2.1058, | |
| "step": 10820 | |
| }, | |
| { | |
| "epoch": 1.0660326814894188, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.128235815259083e-05, | |
| "loss": 2.2513, | |
| "step": 10830 | |
| }, | |
| { | |
| "epoch": 1.0667023841414411, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.1186747909341298e-05, | |
| "loss": 2.0678, | |
| "step": 10840 | |
| }, | |
| { | |
| "epoch": 1.0673720867934637, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.1091295145057207e-05, | |
| "loss": 2.0552, | |
| "step": 10850 | |
| }, | |
| { | |
| "epoch": 1.0680417894454861, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.0996000381436104e-05, | |
| "loss": 2.1286, | |
| "step": 10860 | |
| }, | |
| { | |
| "epoch": 1.0687114920975087, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.090086413931194e-05, | |
| "loss": 2.2085, | |
| "step": 10870 | |
| }, | |
| { | |
| "epoch": 1.0693811947495313, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.0805886938652302e-05, | |
| "loss": 2.1108, | |
| "step": 10880 | |
| }, | |
| { | |
| "epoch": 1.0700508974015537, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.0711069298555564e-05, | |
| "loss": 2.1562, | |
| "step": 10890 | |
| }, | |
| { | |
| "epoch": 1.0707206000535763, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.061641173724792e-05, | |
| "loss": 2.1488, | |
| "step": 10900 | |
| }, | |
| { | |
| "epoch": 1.0713903027055987, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.0521914772080776e-05, | |
| "loss": 2.1766, | |
| "step": 10910 | |
| }, | |
| { | |
| "epoch": 1.0720600053576212, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.0427578919527747e-05, | |
| "loss": 2.2053, | |
| "step": 10920 | |
| }, | |
| { | |
| "epoch": 1.0727297080096436, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.0333404695181836e-05, | |
| "loss": 2.1427, | |
| "step": 10930 | |
| }, | |
| { | |
| "epoch": 1.0733994106616662, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.0239392613752728e-05, | |
| "loss": 2.1092, | |
| "step": 10940 | |
| }, | |
| { | |
| "epoch": 1.0740691133136888, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.0145543189063927e-05, | |
| "loss": 2.153, | |
| "step": 10950 | |
| }, | |
| { | |
| "epoch": 1.0747388159657112, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.005185693404986e-05, | |
| "loss": 2.143, | |
| "step": 10960 | |
| }, | |
| { | |
| "epoch": 1.0754085186177338, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.9958334360753228e-05, | |
| "loss": 2.1103, | |
| "step": 10970 | |
| }, | |
| { | |
| "epoch": 1.0760782212697562, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.9864975980322098e-05, | |
| "loss": 2.1149, | |
| "step": 10980 | |
| }, | |
| { | |
| "epoch": 1.0767479239217788, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.9771782303007118e-05, | |
| "loss": 2.2324, | |
| "step": 10990 | |
| }, | |
| { | |
| "epoch": 1.0774176265738011, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.967875383815878e-05, | |
| "loss": 2.1108, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 1.0780873292258237, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.9585891094224623e-05, | |
| "loss": 2.2484, | |
| "step": 11010 | |
| }, | |
| { | |
| "epoch": 1.0787570318778463, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.9493194578746356e-05, | |
| "loss": 2.1761, | |
| "step": 11020 | |
| }, | |
| { | |
| "epoch": 1.0794267345298687, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.9400664798357278e-05, | |
| "loss": 2.1615, | |
| "step": 11030 | |
| }, | |
| { | |
| "epoch": 1.0800964371818913, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.930830225877933e-05, | |
| "loss": 2.1355, | |
| "step": 11040 | |
| }, | |
| { | |
| "epoch": 1.0807661398339137, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.921610746482039e-05, | |
| "loss": 2.0492, | |
| "step": 11050 | |
| }, | |
| { | |
| "epoch": 1.0814358424859363, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.9124080920371544e-05, | |
| "loss": 2.1128, | |
| "step": 11060 | |
| }, | |
| { | |
| "epoch": 1.0821055451379586, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.903222312840433e-05, | |
| "loss": 2.195, | |
| "step": 11070 | |
| }, | |
| { | |
| "epoch": 1.0827752477899812, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.8940534590967907e-05, | |
| "loss": 2.1437, | |
| "step": 11080 | |
| }, | |
| { | |
| "epoch": 1.0834449504420038, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.8849015809186427e-05, | |
| "loss": 2.0666, | |
| "step": 11090 | |
| }, | |
| { | |
| "epoch": 1.0841146530940262, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.875766728325625e-05, | |
| "loss": 2.1769, | |
| "step": 11100 | |
| }, | |
| { | |
| "epoch": 1.0847843557460488, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.866648951244312e-05, | |
| "loss": 2.109, | |
| "step": 11110 | |
| }, | |
| { | |
| "epoch": 1.0854540583980712, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.857548299507961e-05, | |
| "loss": 2.162, | |
| "step": 11120 | |
| }, | |
| { | |
| "epoch": 1.0861237610500938, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.848464822856229e-05, | |
| "loss": 2.1181, | |
| "step": 11130 | |
| }, | |
| { | |
| "epoch": 1.0867934637021162, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.8393985709348954e-05, | |
| "loss": 2.1641, | |
| "step": 11140 | |
| }, | |
| { | |
| "epoch": 1.0874631663541388, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.8303495932956045e-05, | |
| "loss": 2.0839, | |
| "step": 11150 | |
| }, | |
| { | |
| "epoch": 1.0881328690061614, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.8213179393955908e-05, | |
| "loss": 2.1487, | |
| "step": 11160 | |
| }, | |
| { | |
| "epoch": 1.0888025716581837, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.812303658597396e-05, | |
| "loss": 2.2031, | |
| "step": 11170 | |
| }, | |
| { | |
| "epoch": 1.0894722743102063, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.803306800168617e-05, | |
| "loss": 2.0955, | |
| "step": 11180 | |
| }, | |
| { | |
| "epoch": 1.0901419769622287, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.7943274132816267e-05, | |
| "loss": 2.1206, | |
| "step": 11190 | |
| }, | |
| { | |
| "epoch": 1.0908116796142513, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.7853655470133042e-05, | |
| "loss": 2.0943, | |
| "step": 11200 | |
| }, | |
| { | |
| "epoch": 1.0914813822662737, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.7764212503447725e-05, | |
| "loss": 2.2108, | |
| "step": 11210 | |
| }, | |
| { | |
| "epoch": 1.0921510849182963, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.7674945721611297e-05, | |
| "loss": 2.1896, | |
| "step": 11220 | |
| }, | |
| { | |
| "epoch": 1.0928207875703189, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.7585855612511737e-05, | |
| "loss": 2.1392, | |
| "step": 11230 | |
| }, | |
| { | |
| "epoch": 1.0934904902223412, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.749694266307148e-05, | |
| "loss": 2.098, | |
| "step": 11240 | |
| }, | |
| { | |
| "epoch": 1.0941601928743638, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.7408207359244687e-05, | |
| "loss": 2.1777, | |
| "step": 11250 | |
| }, | |
| { | |
| "epoch": 1.0948298955263862, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.7319650186014547e-05, | |
| "loss": 2.0452, | |
| "step": 11260 | |
| }, | |
| { | |
| "epoch": 1.0954995981784088, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.723127162739071e-05, | |
| "loss": 2.1313, | |
| "step": 11270 | |
| }, | |
| { | |
| "epoch": 1.0961693008304314, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.7143072166406648e-05, | |
| "loss": 2.0861, | |
| "step": 11280 | |
| }, | |
| { | |
| "epoch": 1.0968390034824538, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.705505228511689e-05, | |
| "loss": 2.1566, | |
| "step": 11290 | |
| }, | |
| { | |
| "epoch": 1.0975087061344764, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.6967212464594524e-05, | |
| "loss": 2.1025, | |
| "step": 11300 | |
| }, | |
| { | |
| "epoch": 1.0981784087864987, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.687955318492852e-05, | |
| "loss": 2.1918, | |
| "step": 11310 | |
| }, | |
| { | |
| "epoch": 1.0988481114385213, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.679207492522106e-05, | |
| "loss": 2.0869, | |
| "step": 11320 | |
| }, | |
| { | |
| "epoch": 1.0995178140905437, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.6704778163585e-05, | |
| "loss": 2.1169, | |
| "step": 11330 | |
| }, | |
| { | |
| "epoch": 1.1001875167425663, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.6617663377141214e-05, | |
| "loss": 2.1397, | |
| "step": 11340 | |
| }, | |
| { | |
| "epoch": 1.1008572193945887, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.653073104201595e-05, | |
| "loss": 2.0632, | |
| "step": 11350 | |
| }, | |
| { | |
| "epoch": 1.1015269220466113, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.6443981633338314e-05, | |
| "loss": 2.1112, | |
| "step": 11360 | |
| }, | |
| { | |
| "epoch": 1.1021966246986339, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.6357415625237622e-05, | |
| "loss": 2.2122, | |
| "step": 11370 | |
| }, | |
| { | |
| "epoch": 1.1028663273506563, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.6271033490840764e-05, | |
| "loss": 2.1889, | |
| "step": 11380 | |
| }, | |
| { | |
| "epoch": 1.1035360300026789, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.6184835702269728e-05, | |
| "loss": 2.1106, | |
| "step": 11390 | |
| }, | |
| { | |
| "epoch": 1.1042057326547012, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.6098822730638934e-05, | |
| "loss": 2.1479, | |
| "step": 11400 | |
| }, | |
| { | |
| "epoch": 1.1048754353067238, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.6012995046052687e-05, | |
| "loss": 2.1759, | |
| "step": 11410 | |
| }, | |
| { | |
| "epoch": 1.1055451379587464, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.592735311760259e-05, | |
| "loss": 2.1718, | |
| "step": 11420 | |
| }, | |
| { | |
| "epoch": 1.1062148406107688, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.5841897413365038e-05, | |
| "loss": 2.1863, | |
| "step": 11430 | |
| }, | |
| { | |
| "epoch": 1.1068845432627914, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.5756628400398543e-05, | |
| "loss": 2.1882, | |
| "step": 11440 | |
| }, | |
| { | |
| "epoch": 1.1075542459148138, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.567154654474132e-05, | |
| "loss": 2.1132, | |
| "step": 11450 | |
| }, | |
| { | |
| "epoch": 1.1082239485668364, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.558665231140867e-05, | |
| "loss": 2.1689, | |
| "step": 11460 | |
| }, | |
| { | |
| "epoch": 1.1088936512188587, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.5501946164390397e-05, | |
| "loss": 2.1887, | |
| "step": 11470 | |
| }, | |
| { | |
| "epoch": 1.1095633538708813, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.541742856664835e-05, | |
| "loss": 2.1643, | |
| "step": 11480 | |
| }, | |
| { | |
| "epoch": 1.1102330565229037, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.533309998011389e-05, | |
| "loss": 2.1496, | |
| "step": 11490 | |
| }, | |
| { | |
| "epoch": 1.1109027591749263, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.5248960865685268e-05, | |
| "loss": 2.0605, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 1.111572461826949, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.5165011683225228e-05, | |
| "loss": 2.1085, | |
| "step": 11510 | |
| }, | |
| { | |
| "epoch": 1.1122421644789713, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.5081252891558418e-05, | |
| "loss": 2.039, | |
| "step": 11520 | |
| }, | |
| { | |
| "epoch": 1.1129118671309939, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.499768494846892e-05, | |
| "loss": 2.189, | |
| "step": 11530 | |
| }, | |
| { | |
| "epoch": 1.1135815697830163, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.4914308310697712e-05, | |
| "loss": 2.0645, | |
| "step": 11540 | |
| }, | |
| { | |
| "epoch": 1.1142512724350389, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.483112343394022e-05, | |
| "loss": 2.1368, | |
| "step": 11550 | |
| }, | |
| { | |
| "epoch": 1.1149209750870614, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.4748130772843755e-05, | |
| "loss": 2.097, | |
| "step": 11560 | |
| }, | |
| { | |
| "epoch": 1.1155906777390838, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.4665330781005105e-05, | |
| "loss": 2.1421, | |
| "step": 11570 | |
| }, | |
| { | |
| "epoch": 1.1162603803911064, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.4582723910968037e-05, | |
| "loss": 2.1087, | |
| "step": 11580 | |
| }, | |
| { | |
| "epoch": 1.1169300830431288, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.4500310614220763e-05, | |
| "loss": 2.1139, | |
| "step": 11590 | |
| }, | |
| { | |
| "epoch": 1.1175997856951514, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.4418091341193552e-05, | |
| "loss": 2.1825, | |
| "step": 11600 | |
| }, | |
| { | |
| "epoch": 1.1182694883471738, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.4336066541256249e-05, | |
| "loss": 2.1644, | |
| "step": 11610 | |
| }, | |
| { | |
| "epoch": 1.1189391909991964, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.4254236662715759e-05, | |
| "loss": 2.1694, | |
| "step": 11620 | |
| }, | |
| { | |
| "epoch": 1.119608893651219, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.4172602152813679e-05, | |
| "loss": 2.1323, | |
| "step": 11630 | |
| }, | |
| { | |
| "epoch": 1.1202785963032413, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.409116345772381e-05, | |
| "loss": 2.1212, | |
| "step": 11640 | |
| }, | |
| { | |
| "epoch": 1.120948298955264, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.4009921022549727e-05, | |
| "loss": 2.2011, | |
| "step": 11650 | |
| }, | |
| { | |
| "epoch": 1.1216180016072863, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.392887529132234e-05, | |
| "loss": 2.137, | |
| "step": 11660 | |
| }, | |
| { | |
| "epoch": 1.122287704259309, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.3848026706997496e-05, | |
| "loss": 2.0782, | |
| "step": 11670 | |
| }, | |
| { | |
| "epoch": 1.1229574069113313, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.3767375711453479e-05, | |
| "loss": 2.1234, | |
| "step": 11680 | |
| }, | |
| { | |
| "epoch": 1.1236271095633539, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.36869227454887e-05, | |
| "loss": 2.0887, | |
| "step": 11690 | |
| }, | |
| { | |
| "epoch": 1.1242968122153765, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.3606668248819249e-05, | |
| "loss": 2.1095, | |
| "step": 11700 | |
| }, | |
| { | |
| "epoch": 1.1249665148673988, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.3526612660076421e-05, | |
| "loss": 2.0477, | |
| "step": 11710 | |
| }, | |
| { | |
| "epoch": 1.1256362175194214, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.344675641680443e-05, | |
| "loss": 2.1671, | |
| "step": 11720 | |
| }, | |
| { | |
| "epoch": 1.1263059201714438, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.3367099955457962e-05, | |
| "loss": 2.18, | |
| "step": 11730 | |
| }, | |
| { | |
| "epoch": 1.1269756228234664, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.3287643711399756e-05, | |
| "loss": 2.1475, | |
| "step": 11740 | |
| }, | |
| { | |
| "epoch": 1.1276453254754888, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.3208388118898307e-05, | |
| "loss": 2.1707, | |
| "step": 11750 | |
| }, | |
| { | |
| "epoch": 1.1283150281275114, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.3129333611125427e-05, | |
| "loss": 2.2242, | |
| "step": 11760 | |
| }, | |
| { | |
| "epoch": 1.1289847307795338, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.3050480620153904e-05, | |
| "loss": 2.2287, | |
| "step": 11770 | |
| }, | |
| { | |
| "epoch": 1.1296544334315564, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.2971829576955141e-05, | |
| "loss": 2.15, | |
| "step": 11780 | |
| }, | |
| { | |
| "epoch": 1.130324136083579, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.2893380911396768e-05, | |
| "loss": 2.0854, | |
| "step": 11790 | |
| }, | |
| { | |
| "epoch": 1.1309938387356013, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.2815135052240346e-05, | |
| "loss": 2.1068, | |
| "step": 11800 | |
| }, | |
| { | |
| "epoch": 1.131663541387624, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.273709242713898e-05, | |
| "loss": 2.1606, | |
| "step": 11810 | |
| }, | |
| { | |
| "epoch": 1.1323332440396463, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.2659253462635034e-05, | |
| "loss": 2.2733, | |
| "step": 11820 | |
| }, | |
| { | |
| "epoch": 1.133002946691669, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.2581618584157706e-05, | |
| "loss": 2.2681, | |
| "step": 11830 | |
| }, | |
| { | |
| "epoch": 1.1336726493436915, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.2504188216020796e-05, | |
| "loss": 2.133, | |
| "step": 11840 | |
| }, | |
| { | |
| "epoch": 1.1343423519957139, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.2426962781420365e-05, | |
| "loss": 2.1469, | |
| "step": 11850 | |
| }, | |
| { | |
| "epoch": 1.1350120546477365, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.2349942702432365e-05, | |
| "loss": 2.098, | |
| "step": 11860 | |
| }, | |
| { | |
| "epoch": 1.1356817572997588, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.227312840001042e-05, | |
| "loss": 2.1423, | |
| "step": 11870 | |
| }, | |
| { | |
| "epoch": 1.1363514599517814, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.2196520293983466e-05, | |
| "loss": 2.1606, | |
| "step": 11880 | |
| }, | |
| { | |
| "epoch": 1.1370211626038038, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.2120118803053432e-05, | |
| "loss": 2.1523, | |
| "step": 11890 | |
| }, | |
| { | |
| "epoch": 1.1376908652558264, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.2043924344793083e-05, | |
| "loss": 2.0652, | |
| "step": 11900 | |
| }, | |
| { | |
| "epoch": 1.138360567907849, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.1967937335643553e-05, | |
| "loss": 2.1047, | |
| "step": 11910 | |
| }, | |
| { | |
| "epoch": 1.1390302705598714, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.1892158190912207e-05, | |
| "loss": 2.1319, | |
| "step": 11920 | |
| }, | |
| { | |
| "epoch": 1.139699973211894, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.1816587324770317e-05, | |
| "loss": 2.0914, | |
| "step": 11930 | |
| }, | |
| { | |
| "epoch": 1.1403696758639164, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.1741225150250835e-05, | |
| "loss": 2.1607, | |
| "step": 11940 | |
| }, | |
| { | |
| "epoch": 1.141039378515939, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.1666072079246043e-05, | |
| "loss": 2.1669, | |
| "step": 11950 | |
| }, | |
| { | |
| "epoch": 1.1417090811679613, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.1591128522505423e-05, | |
| "loss": 2.1226, | |
| "step": 11960 | |
| }, | |
| { | |
| "epoch": 1.142378783819984, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.1516394889633347e-05, | |
| "loss": 2.2758, | |
| "step": 11970 | |
| }, | |
| { | |
| "epoch": 1.1430484864720065, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.1441871589086822e-05, | |
| "loss": 2.1448, | |
| "step": 11980 | |
| }, | |
| { | |
| "epoch": 1.143718189124029, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.1367559028173307e-05, | |
| "loss": 2.1372, | |
| "step": 11990 | |
| }, | |
| { | |
| "epoch": 1.1443878917760515, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.1293457613048465e-05, | |
| "loss": 2.1874, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 1.1450575944280739, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.1219567748713894e-05, | |
| "loss": 2.1447, | |
| "step": 12010 | |
| }, | |
| { | |
| "epoch": 1.1457272970800965, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.1145889839015056e-05, | |
| "loss": 2.1918, | |
| "step": 12020 | |
| }, | |
| { | |
| "epoch": 1.146396999732119, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.1072424286638861e-05, | |
| "loss": 2.1311, | |
| "step": 12030 | |
| }, | |
| { | |
| "epoch": 1.1470667023841414, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.0999171493111654e-05, | |
| "loss": 2.2333, | |
| "step": 12040 | |
| }, | |
| { | |
| "epoch": 1.147736405036164, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.0926131858796924e-05, | |
| "loss": 2.1357, | |
| "step": 12050 | |
| }, | |
| { | |
| "epoch": 1.1484061076881864, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.0853305782893153e-05, | |
| "loss": 2.1856, | |
| "step": 12060 | |
| }, | |
| { | |
| "epoch": 1.149075810340209, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.0780693663431569e-05, | |
| "loss": 2.1265, | |
| "step": 12070 | |
| }, | |
| { | |
| "epoch": 1.1497455129922314, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.0708295897274073e-05, | |
| "loss": 2.0987, | |
| "step": 12080 | |
| }, | |
| { | |
| "epoch": 1.150415215644254, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.063611288011101e-05, | |
| "loss": 2.2172, | |
| "step": 12090 | |
| }, | |
| { | |
| "epoch": 1.1510849182962763, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.0564145006458976e-05, | |
| "loss": 2.1524, | |
| "step": 12100 | |
| }, | |
| { | |
| "epoch": 1.151754620948299, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.0492392669658735e-05, | |
| "loss": 2.1419, | |
| "step": 12110 | |
| }, | |
| { | |
| "epoch": 1.1524243236003215, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.0420856261873041e-05, | |
| "loss": 2.1354, | |
| "step": 12120 | |
| }, | |
| { | |
| "epoch": 1.153094026252344, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.0349536174084417e-05, | |
| "loss": 2.2442, | |
| "step": 12130 | |
| }, | |
| { | |
| "epoch": 1.1537637289043665, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.0278432796093206e-05, | |
| "loss": 2.0998, | |
| "step": 12140 | |
| }, | |
| { | |
| "epoch": 1.154433431556389, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.0207546516515215e-05, | |
| "loss": 2.1719, | |
| "step": 12150 | |
| }, | |
| { | |
| "epoch": 1.1551031342084115, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.0136877722779748e-05, | |
| "loss": 2.18, | |
| "step": 12160 | |
| }, | |
| { | |
| "epoch": 1.155772836860434, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.0066426801127448e-05, | |
| "loss": 2.1423, | |
| "step": 12170 | |
| }, | |
| { | |
| "epoch": 1.1564425395124565, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.996194136608128e-06, | |
| "loss": 2.1923, | |
| "step": 12180 | |
| }, | |
| { | |
| "epoch": 1.157112242164479, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.92618011307877e-06, | |
| "loss": 2.0779, | |
| "step": 12190 | |
| }, | |
| { | |
| "epoch": 1.1577819448165014, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.856385113201344e-06, | |
| "loss": 2.1922, | |
| "step": 12200 | |
| }, | |
| { | |
| "epoch": 1.158451647468524, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.786809518440765e-06, | |
| "loss": 2.141, | |
| "step": 12210 | |
| }, | |
| { | |
| "epoch": 1.1591213501205464, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.717453709062752e-06, | |
| "loss": 2.242, | |
| "step": 12220 | |
| }, | |
| { | |
| "epoch": 1.159791052772569, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.648318064131817e-06, | |
| "loss": 2.1027, | |
| "step": 12230 | |
| }, | |
| { | |
| "epoch": 1.1604607554245914, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.579402961509171e-06, | |
| "loss": 2.1743, | |
| "step": 12240 | |
| }, | |
| { | |
| "epoch": 1.161130458076614, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.510708777850602e-06, | |
| "loss": 2.1378, | |
| "step": 12250 | |
| }, | |
| { | |
| "epoch": 1.1618001607286366, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.442235888604518e-06, | |
| "loss": 2.1996, | |
| "step": 12260 | |
| }, | |
| { | |
| "epoch": 1.162469863380659, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.373984668009811e-06, | |
| "loss": 2.1038, | |
| "step": 12270 | |
| }, | |
| { | |
| "epoch": 1.1631395660326815, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.305955489093853e-06, | |
| "loss": 2.1266, | |
| "step": 12280 | |
| }, | |
| { | |
| "epoch": 1.163809268684704, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.238148723670454e-06, | |
| "loss": 2.2141, | |
| "step": 12290 | |
| }, | |
| { | |
| "epoch": 1.1644789713367265, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.170564742337784e-06, | |
| "loss": 2.1873, | |
| "step": 12300 | |
| }, | |
| { | |
| "epoch": 1.165148673988749, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.103203914476427e-06, | |
| "loss": 2.1729, | |
| "step": 12310 | |
| }, | |
| { | |
| "epoch": 1.1658183766407715, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.0360666082473e-06, | |
| "loss": 2.1411, | |
| "step": 12320 | |
| }, | |
| { | |
| "epoch": 1.166488079292794, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.969153190589691e-06, | |
| "loss": 2.2096, | |
| "step": 12330 | |
| }, | |
| { | |
| "epoch": 1.1671577819448165, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.902464027219171e-06, | |
| "loss": 2.1246, | |
| "step": 12340 | |
| }, | |
| { | |
| "epoch": 1.167827484596839, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.835999482625684e-06, | |
| "loss": 2.1925, | |
| "step": 12350 | |
| }, | |
| { | |
| "epoch": 1.1684971872488614, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.769759920071536e-06, | |
| "loss": 2.0945, | |
| "step": 12360 | |
| }, | |
| { | |
| "epoch": 1.169166889900884, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.703745701589332e-06, | |
| "loss": 2.098, | |
| "step": 12370 | |
| }, | |
| { | |
| "epoch": 1.1698365925529064, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.637957187980116e-06, | |
| "loss": 2.1349, | |
| "step": 12380 | |
| }, | |
| { | |
| "epoch": 1.170506295204929, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.572394738811318e-06, | |
| "loss": 2.1894, | |
| "step": 12390 | |
| }, | |
| { | |
| "epoch": 1.1711759978569516, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.507058712414811e-06, | |
| "loss": 2.1568, | |
| "step": 12400 | |
| }, | |
| { | |
| "epoch": 1.171845700508974, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.441949465884969e-06, | |
| "loss": 2.1275, | |
| "step": 12410 | |
| }, | |
| { | |
| "epoch": 1.1725154031609966, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.377067355076668e-06, | |
| "loss": 2.1202, | |
| "step": 12420 | |
| }, | |
| { | |
| "epoch": 1.173185105813019, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.312412734603392e-06, | |
| "loss": 2.1644, | |
| "step": 12430 | |
| }, | |
| { | |
| "epoch": 1.1738548084650415, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.247985957835286e-06, | |
| "loss": 2.2205, | |
| "step": 12440 | |
| }, | |
| { | |
| "epoch": 1.1745245111170641, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.183787376897212e-06, | |
| "loss": 2.1565, | |
| "step": 12450 | |
| }, | |
| { | |
| "epoch": 1.1751942137690865, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.119817342666785e-06, | |
| "loss": 2.0464, | |
| "step": 12460 | |
| }, | |
| { | |
| "epoch": 1.175863916421109, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.056076204772556e-06, | |
| "loss": 2.0674, | |
| "step": 12470 | |
| }, | |
| { | |
| "epoch": 1.1765336190731315, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.99256431159201e-06, | |
| "loss": 2.0096, | |
| "step": 12480 | |
| }, | |
| { | |
| "epoch": 1.177203321725154, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.92928201024969e-06, | |
| "loss": 2.1404, | |
| "step": 12490 | |
| }, | |
| { | |
| "epoch": 1.1778730243771764, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.86622964661532e-06, | |
| "loss": 2.1675, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 1.178542727029199, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.803407565301891e-06, | |
| "loss": 2.1568, | |
| "step": 12510 | |
| }, | |
| { | |
| "epoch": 1.1792124296812214, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.740816109663785e-06, | |
| "loss": 2.1097, | |
| "step": 12520 | |
| }, | |
| { | |
| "epoch": 1.179882132333244, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.678455621794906e-06, | |
| "loss": 2.1512, | |
| "step": 12530 | |
| }, | |
| { | |
| "epoch": 1.1805518349852666, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.616326442526784e-06, | |
| "loss": 2.0985, | |
| "step": 12540 | |
| }, | |
| { | |
| "epoch": 1.181221537637289, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.5544289114267365e-06, | |
| "loss": 2.175, | |
| "step": 12550 | |
| }, | |
| { | |
| "epoch": 1.1818912402893116, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.492763366796041e-06, | |
| "loss": 2.0826, | |
| "step": 12560 | |
| }, | |
| { | |
| "epoch": 1.182560942941334, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.431330145667986e-06, | |
| "loss": 2.2003, | |
| "step": 12570 | |
| }, | |
| { | |
| "epoch": 1.1832306455933566, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.370129583806151e-06, | |
| "loss": 1.9417, | |
| "step": 12580 | |
| }, | |
| { | |
| "epoch": 1.1839003482453792, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.309162015702492e-06, | |
| "loss": 2.2006, | |
| "step": 12590 | |
| }, | |
| { | |
| "epoch": 1.1845700508974015, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.2484277745755615e-06, | |
| "loss": 2.1589, | |
| "step": 12600 | |
| }, | |
| { | |
| "epoch": 1.1852397535494241, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.187927192368604e-06, | |
| "loss": 2.1157, | |
| "step": 12610 | |
| }, | |
| { | |
| "epoch": 1.1859094562014465, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.127660599747859e-06, | |
| "loss": 2.1022, | |
| "step": 12620 | |
| }, | |
| { | |
| "epoch": 1.186579158853469, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.067628326100667e-06, | |
| "loss": 2.0934, | |
| "step": 12630 | |
| }, | |
| { | |
| "epoch": 1.1872488615054915, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.0078306995337076e-06, | |
| "loss": 2.202, | |
| "step": 12640 | |
| }, | |
| { | |
| "epoch": 1.187918564157514, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.948268046871203e-06, | |
| "loss": 2.1258, | |
| "step": 12650 | |
| }, | |
| { | |
| "epoch": 1.1885882668095364, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.88894069365309e-06, | |
| "loss": 2.1314, | |
| "step": 12660 | |
| }, | |
| { | |
| "epoch": 1.189257969461559, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.8298489641333005e-06, | |
| "loss": 2.2186, | |
| "step": 12670 | |
| }, | |
| { | |
| "epoch": 1.1899276721135816, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.77099318127799e-06, | |
| "loss": 2.1139, | |
| "step": 12680 | |
| }, | |
| { | |
| "epoch": 1.190597374765604, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.7123736667636776e-06, | |
| "loss": 2.1249, | |
| "step": 12690 | |
| }, | |
| { | |
| "epoch": 1.1912670774176266, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.6539907409756185e-06, | |
| "loss": 2.0937, | |
| "step": 12700 | |
| }, | |
| { | |
| "epoch": 1.191936780069649, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.5958447230059675e-06, | |
| "loss": 2.1653, | |
| "step": 12710 | |
| }, | |
| { | |
| "epoch": 1.1926064827216716, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.537935930652067e-06, | |
| "loss": 2.1334, | |
| "step": 12720 | |
| }, | |
| { | |
| "epoch": 1.1932761853736942, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.480264680414683e-06, | |
| "loss": 2.1211, | |
| "step": 12730 | |
| }, | |
| { | |
| "epoch": 1.1939458880257166, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.422831287496306e-06, | |
| "loss": 2.1794, | |
| "step": 12740 | |
| }, | |
| { | |
| "epoch": 1.1946155906777391, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.365636065799424e-06, | |
| "loss": 2.1811, | |
| "step": 12750 | |
| }, | |
| { | |
| "epoch": 1.1952852933297615, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.308679327924788e-06, | |
| "loss": 2.1791, | |
| "step": 12760 | |
| }, | |
| { | |
| "epoch": 1.1959549959817841, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.25196138516973e-06, | |
| "loss": 1.9809, | |
| "step": 12770 | |
| }, | |
| { | |
| "epoch": 1.1966246986338065, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.19548254752641e-06, | |
| "loss": 2.0739, | |
| "step": 12780 | |
| }, | |
| { | |
| "epoch": 1.197294401285829, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.139243123680194e-06, | |
| "loss": 2.1205, | |
| "step": 12790 | |
| }, | |
| { | |
| "epoch": 1.1979641039378517, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.083243421007934e-06, | |
| "loss": 2.1492, | |
| "step": 12800 | |
| }, | |
| { | |
| "epoch": 1.198633806589874, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.027483745576246e-06, | |
| "loss": 2.0709, | |
| "step": 12810 | |
| }, | |
| { | |
| "epoch": 1.1993035092418967, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.971964402139907e-06, | |
| "loss": 2.1815, | |
| "step": 12820 | |
| }, | |
| { | |
| "epoch": 1.199973211893919, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.9166856941401505e-06, | |
| "loss": 2.0973, | |
| "step": 12830 | |
| }, | |
| { | |
| "epoch": 1.2006429145459416, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.861647923703028e-06, | |
| "loss": 2.1153, | |
| "step": 12840 | |
| }, | |
| { | |
| "epoch": 1.201312617197964, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.806851391637719e-06, | |
| "loss": 2.1091, | |
| "step": 12850 | |
| }, | |
| { | |
| "epoch": 1.2019823198499866, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.752296397434931e-06, | |
| "loss": 2.1239, | |
| "step": 12860 | |
| }, | |
| { | |
| "epoch": 1.2026520225020092, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.697983239265242e-06, | |
| "loss": 2.0781, | |
| "step": 12870 | |
| }, | |
| { | |
| "epoch": 1.2033217251540316, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.6439122139774815e-06, | |
| "loss": 2.0746, | |
| "step": 12880 | |
| }, | |
| { | |
| "epoch": 1.2039914278060542, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.590083617097097e-06, | |
| "loss": 2.1967, | |
| "step": 12890 | |
| }, | |
| { | |
| "epoch": 1.2046611304580765, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.5364977428245255e-06, | |
| "loss": 2.0922, | |
| "step": 12900 | |
| }, | |
| { | |
| "epoch": 1.2053308331100991, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.48315488403362e-06, | |
| "loss": 2.138, | |
| "step": 12910 | |
| }, | |
| { | |
| "epoch": 1.2060005357621217, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.430055332270045e-06, | |
| "loss": 2.0713, | |
| "step": 12920 | |
| }, | |
| { | |
| "epoch": 1.2066702384141441, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.37719937774962e-06, | |
| "loss": 2.0803, | |
| "step": 12930 | |
| }, | |
| { | |
| "epoch": 1.2073399410661667, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.324587309356832e-06, | |
| "loss": 2.1192, | |
| "step": 12940 | |
| }, | |
| { | |
| "epoch": 1.208009643718189, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.272219414643203e-06, | |
| "loss": 2.1146, | |
| "step": 12950 | |
| }, | |
| { | |
| "epoch": 1.2086793463702117, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.2200959798256785e-06, | |
| "loss": 2.175, | |
| "step": 12960 | |
| }, | |
| { | |
| "epoch": 1.209349049022234, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.168217289785154e-06, | |
| "loss": 2.1732, | |
| "step": 12970 | |
| }, | |
| { | |
| "epoch": 1.2100187516742567, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.116583628064858e-06, | |
| "loss": 2.115, | |
| "step": 12980 | |
| }, | |
| { | |
| "epoch": 1.210688454326279, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.0651952768688115e-06, | |
| "loss": 2.0194, | |
| "step": 12990 | |
| }, | |
| { | |
| "epoch": 1.2113581569783016, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.0140525170602744e-06, | |
| "loss": 2.2069, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 1.2120278596303242, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.963155628160276e-06, | |
| "loss": 2.0356, | |
| "step": 13010 | |
| }, | |
| { | |
| "epoch": 1.2126975622823466, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.9125048883459705e-06, | |
| "loss": 2.1624, | |
| "step": 13020 | |
| }, | |
| { | |
| "epoch": 1.2133672649343692, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.862100574449224e-06, | |
| "loss": 2.1486, | |
| "step": 13030 | |
| }, | |
| { | |
| "epoch": 1.2140369675863916, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.811942961955074e-06, | |
| "loss": 2.1325, | |
| "step": 13040 | |
| }, | |
| { | |
| "epoch": 1.2147066702384142, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.762032325000154e-06, | |
| "loss": 2.215, | |
| "step": 13050 | |
| }, | |
| { | |
| "epoch": 1.2153763728904368, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.712368936371309e-06, | |
| "loss": 2.1144, | |
| "step": 13060 | |
| }, | |
| { | |
| "epoch": 1.2160460755424591, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.662953067504022e-06, | |
| "loss": 2.1361, | |
| "step": 13070 | |
| }, | |
| { | |
| "epoch": 1.2167157781944817, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.613784988480946e-06, | |
| "loss": 2.1634, | |
| "step": 13080 | |
| }, | |
| { | |
| "epoch": 1.217385480846504, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.564864968030464e-06, | |
| "loss": 2.1118, | |
| "step": 13090 | |
| }, | |
| { | |
| "epoch": 1.2180551834985267, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.516193273525165e-06, | |
| "loss": 2.1313, | |
| "step": 13100 | |
| }, | |
| { | |
| "epoch": 1.218724886150549, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.46777017098044e-06, | |
| "loss": 2.1132, | |
| "step": 13110 | |
| }, | |
| { | |
| "epoch": 1.2193945888025717, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.41959592505296e-06, | |
| "loss": 2.1188, | |
| "step": 13120 | |
| }, | |
| { | |
| "epoch": 1.220064291454594, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.371670799039335e-06, | |
| "loss": 2.17, | |
| "step": 13130 | |
| }, | |
| { | |
| "epoch": 1.2207339941066166, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.32399505487453e-06, | |
| "loss": 2.1296, | |
| "step": 13140 | |
| }, | |
| { | |
| "epoch": 1.2214036967586392, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.276568953130561e-06, | |
| "loss": 2.1647, | |
| "step": 13150 | |
| }, | |
| { | |
| "epoch": 1.2220733994106616, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.229392753015027e-06, | |
| "loss": 2.1463, | |
| "step": 13160 | |
| }, | |
| { | |
| "epoch": 1.2227431020626842, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.182466712369648e-06, | |
| "loss": 2.1315, | |
| "step": 13170 | |
| }, | |
| { | |
| "epoch": 1.2234128047147066, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.135791087668933e-06, | |
| "loss": 2.1308, | |
| "step": 13180 | |
| }, | |
| { | |
| "epoch": 1.2240825073667292, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.089366134018735e-06, | |
| "loss": 2.1655, | |
| "step": 13190 | |
| }, | |
| { | |
| "epoch": 1.2247522100187518, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.043192105154842e-06, | |
| "loss": 2.0646, | |
| "step": 13200 | |
| }, | |
| { | |
| "epoch": 1.2254219126707742, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.997269253441638e-06, | |
| "loss": 2.1805, | |
| "step": 13210 | |
| }, | |
| { | |
| "epoch": 1.2260916153227968, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.951597829870685e-06, | |
| "loss": 2.0898, | |
| "step": 13220 | |
| }, | |
| { | |
| "epoch": 1.2267613179748191, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.90617808405937e-06, | |
| "loss": 2.2064, | |
| "step": 13230 | |
| }, | |
| { | |
| "epoch": 1.2274310206268417, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.861010264249504e-06, | |
| "loss": 2.1149, | |
| "step": 13240 | |
| }, | |
| { | |
| "epoch": 1.228100723278864, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.816094617306054e-06, | |
| "loss": 2.2301, | |
| "step": 13250 | |
| }, | |
| { | |
| "epoch": 1.2287704259308867, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.7714313887156793e-06, | |
| "loss": 2.2351, | |
| "step": 13260 | |
| }, | |
| { | |
| "epoch": 1.229440128582909, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.7270208225854696e-06, | |
| "loss": 2.1545, | |
| "step": 13270 | |
| }, | |
| { | |
| "epoch": 1.2301098312349317, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.682863161641592e-06, | |
| "loss": 2.2179, | |
| "step": 13280 | |
| }, | |
| { | |
| "epoch": 1.2307795338869543, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.638958647227936e-06, | |
| "loss": 2.063, | |
| "step": 13290 | |
| }, | |
| { | |
| "epoch": 1.2314492365389766, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.595307519304841e-06, | |
| "loss": 2.1517, | |
| "step": 13300 | |
| }, | |
| { | |
| "epoch": 1.2321189391909992, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.5519100164477625e-06, | |
| "loss": 2.0889, | |
| "step": 13310 | |
| }, | |
| { | |
| "epoch": 1.2327886418430216, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.5087663758459397e-06, | |
| "loss": 2.2184, | |
| "step": 13320 | |
| }, | |
| { | |
| "epoch": 1.2334583444950442, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.4658768333011514e-06, | |
| "loss": 2.075, | |
| "step": 13330 | |
| }, | |
| { | |
| "epoch": 1.2341280471470668, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.423241623226403e-06, | |
| "loss": 2.1403, | |
| "step": 13340 | |
| }, | |
| { | |
| "epoch": 1.2347977497990892, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.380860978644623e-06, | |
| "loss": 2.2063, | |
| "step": 13350 | |
| }, | |
| { | |
| "epoch": 1.2354674524511118, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.3387351311874314e-06, | |
| "loss": 2.1884, | |
| "step": 13360 | |
| }, | |
| { | |
| "epoch": 1.2361371551031342, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.2968643110938444e-06, | |
| "loss": 2.2006, | |
| "step": 13370 | |
| }, | |
| { | |
| "epoch": 1.2368068577551568, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.255248747209033e-06, | |
| "loss": 2.0195, | |
| "step": 13380 | |
| }, | |
| { | |
| "epoch": 1.2374765604071791, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.2138886669830504e-06, | |
| "loss": 2.0719, | |
| "step": 13390 | |
| }, | |
| { | |
| "epoch": 1.2381462630592017, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.1727842964696274e-06, | |
| "loss": 2.1849, | |
| "step": 13400 | |
| }, | |
| { | |
| "epoch": 1.238815965711224, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.1319358603248694e-06, | |
| "loss": 2.071, | |
| "step": 13410 | |
| }, | |
| { | |
| "epoch": 1.2394856683632467, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.091343581806111e-06, | |
| "loss": 2.0483, | |
| "step": 13420 | |
| }, | |
| { | |
| "epoch": 1.2401553710152693, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.0510076827706456e-06, | |
| "loss": 2.0878, | |
| "step": 13430 | |
| }, | |
| { | |
| "epoch": 1.2408250736672917, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.010928383674516e-06, | |
| "loss": 2.1341, | |
| "step": 13440 | |
| }, | |
| { | |
| "epoch": 1.2414947763193143, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.971105903571314e-06, | |
| "loss": 2.1568, | |
| "step": 13450 | |
| }, | |
| { | |
| "epoch": 1.2421644789713366, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.9315404601110096e-06, | |
| "loss": 2.1335, | |
| "step": 13460 | |
| }, | |
| { | |
| "epoch": 1.2428341816233592, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.8922322695386974e-06, | |
| "loss": 2.1339, | |
| "step": 13470 | |
| }, | |
| { | |
| "epoch": 1.2435038842753818, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.853181546693501e-06, | |
| "loss": 2.203, | |
| "step": 13480 | |
| }, | |
| { | |
| "epoch": 1.2441735869274042, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.8143885050073148e-06, | |
| "loss": 2.0968, | |
| "step": 13490 | |
| }, | |
| { | |
| "epoch": 1.2448432895794268, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.7758533565036993e-06, | |
| "loss": 2.1765, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 1.2455129922314492, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.7375763117966857e-06, | |
| "loss": 2.088, | |
| "step": 13510 | |
| }, | |
| { | |
| "epoch": 1.2461826948834718, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.699557580089662e-06, | |
| "loss": 2.1812, | |
| "step": 13520 | |
| }, | |
| { | |
| "epoch": 1.2468523975354942, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.661797369174157e-06, | |
| "loss": 2.1773, | |
| "step": 13530 | |
| }, | |
| { | |
| "epoch": 1.2475221001875167, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.624295885428796e-06, | |
| "loss": 2.131, | |
| "step": 13540 | |
| }, | |
| { | |
| "epoch": 1.2481918028395391, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.5870533338181235e-06, | |
| "loss": 2.1509, | |
| "step": 13550 | |
| }, | |
| { | |
| "epoch": 1.2488615054915617, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.5500699178914623e-06, | |
| "loss": 2.1224, | |
| "step": 13560 | |
| }, | |
| { | |
| "epoch": 1.2495312081435843, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.513345839781861e-06, | |
| "loss": 2.1377, | |
| "step": 13570 | |
| }, | |
| { | |
| "epoch": 1.2502009107956067, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.4768813002049416e-06, | |
| "loss": 2.0525, | |
| "step": 13580 | |
| }, | |
| { | |
| "epoch": 1.2508706134476293, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.440676498457811e-06, | |
| "loss": 2.1756, | |
| "step": 13590 | |
| }, | |
| { | |
| "epoch": 1.2515403160996517, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.4047316324179837e-06, | |
| "loss": 2.1819, | |
| "step": 13600 | |
| }, | |
| { | |
| "epoch": 1.2522100187516743, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.3690468985423096e-06, | |
| "loss": 2.0861, | |
| "step": 13610 | |
| }, | |
| { | |
| "epoch": 1.2528797214036969, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.3336224918658446e-06, | |
| "loss": 2.1533, | |
| "step": 13620 | |
| }, | |
| { | |
| "epoch": 1.2535494240557192, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.2984586060008696e-06, | |
| "loss": 2.1075, | |
| "step": 13630 | |
| }, | |
| { | |
| "epoch": 1.2542191267077418, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.2635554331357724e-06, | |
| "loss": 2.158, | |
| "step": 13640 | |
| }, | |
| { | |
| "epoch": 1.2548888293597642, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.2289131640339913e-06, | |
| "loss": 2.0522, | |
| "step": 13650 | |
| }, | |
| { | |
| "epoch": 1.2555585320117868, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.1945319880330274e-06, | |
| "loss": 2.1394, | |
| "step": 13660 | |
| }, | |
| { | |
| "epoch": 1.2562282346638094, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.160412093043357e-06, | |
| "loss": 2.1565, | |
| "step": 13670 | |
| }, | |
| { | |
| "epoch": 1.2568979373158318, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.126553665547426e-06, | |
| "loss": 2.1117, | |
| "step": 13680 | |
| }, | |
| { | |
| "epoch": 1.2575676399678541, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.0929568905986242e-06, | |
| "loss": 2.1721, | |
| "step": 13690 | |
| }, | |
| { | |
| "epoch": 1.2582373426198767, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.0596219518202953e-06, | |
| "loss": 2.1621, | |
| "step": 13700 | |
| }, | |
| { | |
| "epoch": 1.2589070452718993, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.02654903140469e-06, | |
| "loss": 2.1366, | |
| "step": 13710 | |
| }, | |
| { | |
| "epoch": 1.2595767479239217, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.9937383101120255e-06, | |
| "loss": 2.0949, | |
| "step": 13720 | |
| }, | |
| { | |
| "epoch": 1.2602464505759443, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.96118996726945e-06, | |
| "loss": 2.1662, | |
| "step": 13730 | |
| }, | |
| { | |
| "epoch": 1.2609161532279667, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.928904180770069e-06, | |
| "loss": 2.1523, | |
| "step": 13740 | |
| }, | |
| { | |
| "epoch": 1.2615858558799893, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.8968811270720256e-06, | |
| "loss": 2.1152, | |
| "step": 13750 | |
| }, | |
| { | |
| "epoch": 1.2622555585320119, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.8651209811974657e-06, | |
| "loss": 2.1852, | |
| "step": 13760 | |
| }, | |
| { | |
| "epoch": 1.2629252611840343, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.8336239167316027e-06, | |
| "loss": 2.1504, | |
| "step": 13770 | |
| }, | |
| { | |
| "epoch": 1.2635949638360569, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.802390105821805e-06, | |
| "loss": 2.1188, | |
| "step": 13780 | |
| }, | |
| { | |
| "epoch": 1.2642646664880792, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.771419719176609e-06, | |
| "loss": 2.1576, | |
| "step": 13790 | |
| }, | |
| { | |
| "epoch": 1.2649343691401018, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.740712926064808e-06, | |
| "loss": 2.1234, | |
| "step": 13800 | |
| }, | |
| { | |
| "epoch": 1.2656040717921244, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.7102698943145256e-06, | |
| "loss": 2.1045, | |
| "step": 13810 | |
| }, | |
| { | |
| "epoch": 1.2662737744441468, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.6800907903122998e-06, | |
| "loss": 2.15, | |
| "step": 13820 | |
| }, | |
| { | |
| "epoch": 1.2669434770961692, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.6501757790021555e-06, | |
| "loss": 2.1931, | |
| "step": 13830 | |
| }, | |
| { | |
| "epoch": 1.2676131797481918, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.620525023884739e-06, | |
| "loss": 2.1509, | |
| "step": 13840 | |
| }, | |
| { | |
| "epoch": 1.2682828824002144, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.5911386870163913e-06, | |
| "loss": 2.1728, | |
| "step": 13850 | |
| }, | |
| { | |
| "epoch": 1.2689525850522367, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.5620169290082642e-06, | |
| "loss": 2.1134, | |
| "step": 13860 | |
| }, | |
| { | |
| "epoch": 1.2696222877042593, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.5331599090254778e-06, | |
| "loss": 2.1672, | |
| "step": 13870 | |
| }, | |
| { | |
| "epoch": 1.2702919903562817, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.5045677847862094e-06, | |
| "loss": 2.1091, | |
| "step": 13880 | |
| }, | |
| { | |
| "epoch": 1.2709616930083043, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.476240712560839e-06, | |
| "loss": 2.169, | |
| "step": 13890 | |
| }, | |
| { | |
| "epoch": 1.271631395660327, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.4481788471711222e-06, | |
| "loss": 2.2051, | |
| "step": 13900 | |
| }, | |
| { | |
| "epoch": 1.2723010983123493, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.4203823419893237e-06, | |
| "loss": 2.1638, | |
| "step": 13910 | |
| }, | |
| { | |
| "epoch": 1.2729708009643719, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.3928513489373518e-06, | |
| "loss": 2.1165, | |
| "step": 13920 | |
| }, | |
| { | |
| "epoch": 1.2736405036163942, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.365586018485998e-06, | |
| "loss": 2.182, | |
| "step": 13930 | |
| }, | |
| { | |
| "epoch": 1.2743102062684168, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.3385864996540488e-06, | |
| "loss": 2.1222, | |
| "step": 13940 | |
| }, | |
| { | |
| "epoch": 1.2749799089204394, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.311852940007502e-06, | |
| "loss": 2.2216, | |
| "step": 13950 | |
| }, | |
| { | |
| "epoch": 1.2756496115724618, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.2853854856587521e-06, | |
| "loss": 2.1033, | |
| "step": 13960 | |
| }, | |
| { | |
| "epoch": 1.2763193142244842, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.2591842812658017e-06, | |
| "loss": 2.0526, | |
| "step": 13970 | |
| }, | |
| { | |
| "epoch": 1.2769890168765068, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.2332494700314556e-06, | |
| "loss": 2.1177, | |
| "step": 13980 | |
| }, | |
| { | |
| "epoch": 1.2776587195285294, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.207581193702534e-06, | |
| "loss": 2.1358, | |
| "step": 13990 | |
| }, | |
| { | |
| "epoch": 1.2783284221805518, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.1821795925691436e-06, | |
| "loss": 2.104, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 1.2789981248325744, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.1570448054638417e-06, | |
| "loss": 2.1815, | |
| "step": 14010 | |
| }, | |
| { | |
| "epoch": 1.2796678274845967, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.1321769697609286e-06, | |
| "loss": 2.1766, | |
| "step": 14020 | |
| }, | |
| { | |
| "epoch": 1.2803375301366193, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.1075762213756835e-06, | |
| "loss": 2.1828, | |
| "step": 14030 | |
| }, | |
| { | |
| "epoch": 1.281007232788642, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.083242694763603e-06, | |
| "loss": 2.0712, | |
| "step": 14040 | |
| }, | |
| { | |
| "epoch": 1.2816769354406643, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.0591765229197026e-06, | |
| "loss": 2.1697, | |
| "step": 14050 | |
| }, | |
| { | |
| "epoch": 1.282346638092687, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.035377837377749e-06, | |
| "loss": 2.21, | |
| "step": 14060 | |
| }, | |
| { | |
| "epoch": 1.2830163407447093, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.0118467682095733e-06, | |
| "loss": 2.1687, | |
| "step": 14070 | |
| }, | |
| { | |
| "epoch": 1.2836860433967319, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.88583444024349e-07, | |
| "loss": 2.1748, | |
| "step": 14080 | |
| }, | |
| { | |
| "epoch": 1.2843557460487545, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.655879919678811e-07, | |
| "loss": 2.1528, | |
| "step": 14090 | |
| }, | |
| { | |
| "epoch": 1.2850254487007768, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.428605377219235e-07, | |
| "loss": 2.1177, | |
| "step": 14100 | |
| }, | |
| { | |
| "epoch": 1.2856951513527994, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.204012055034739e-07, | |
| "loss": 2.1339, | |
| "step": 14110 | |
| }, | |
| { | |
| "epoch": 1.2863648540048218, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.982101180641355e-07, | |
| "loss": 2.2136, | |
| "step": 14120 | |
| }, | |
| { | |
| "epoch": 1.2870345566568444, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.762873966893959e-07, | |
| "loss": 2.1921, | |
| "step": 14130 | |
| }, | |
| { | |
| "epoch": 1.2877042593088668, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.546331611979818e-07, | |
| "loss": 2.204, | |
| "step": 14140 | |
| }, | |
| { | |
| "epoch": 1.2883739619608894, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.33247529941239e-07, | |
| "loss": 2.1083, | |
| "step": 14150 | |
| }, | |
| { | |
| "epoch": 1.2890436646129118, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.121306198024204e-07, | |
| "loss": 2.0582, | |
| "step": 14160 | |
| }, | |
| { | |
| "epoch": 1.2897133672649344, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.912825461960982e-07, | |
| "loss": 2.1152, | |
| "step": 14170 | |
| }, | |
| { | |
| "epoch": 1.290383069916957, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.707034230675314e-07, | |
| "loss": 2.1319, | |
| "step": 14180 | |
| }, | |
| { | |
| "epoch": 1.2910527725689793, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.50393362891999e-07, | |
| "loss": 2.1414, | |
| "step": 14190 | |
| }, | |
| { | |
| "epoch": 1.291722475221002, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.303524766742398e-07, | |
| "loss": 2.0645, | |
| "step": 14200 | |
| }, | |
| { | |
| "epoch": 1.2923921778730243, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.105808739478082e-07, | |
| "loss": 2.0771, | |
| "step": 14210 | |
| }, | |
| { | |
| "epoch": 1.293061880525047, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.910786627744914e-07, | |
| "loss": 2.2023, | |
| "step": 14220 | |
| }, | |
| { | |
| "epoch": 1.2937315831770695, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.718459497437213e-07, | |
| "loss": 2.0567, | |
| "step": 14230 | |
| }, | |
| { | |
| "epoch": 1.2944012858290919, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.5288283997198e-07, | |
| "loss": 2.1083, | |
| "step": 14240 | |
| }, | |
| { | |
| "epoch": 1.2950709884811145, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.341894371022339e-07, | |
| "loss": 2.1249, | |
| "step": 14250 | |
| }, | |
| { | |
| "epoch": 1.2957406911331368, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.15765843303362e-07, | |
| "loss": 2.1732, | |
| "step": 14260 | |
| }, | |
| { | |
| "epoch": 1.2964103937851594, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.97612159269606e-07, | |
| "loss": 2.1813, | |
| "step": 14270 | |
| }, | |
| { | |
| "epoch": 1.297080096437182, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.797284842200102e-07, | |
| "loss": 2.0738, | |
| "step": 14280 | |
| }, | |
| { | |
| "epoch": 1.2977497990892044, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.621149158978822e-07, | |
| "loss": 2.1295, | |
| "step": 14290 | |
| }, | |
| { | |
| "epoch": 1.2984195017412268, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.447715505702722e-07, | |
| "loss": 2.1226, | |
| "step": 14300 | |
| }, | |
| { | |
| "epoch": 1.2990892043932494, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.276984830274167e-07, | |
| "loss": 2.1751, | |
| "step": 14310 | |
| }, | |
| { | |
| "epoch": 1.299758907045272, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.108958065822511e-07, | |
| "loss": 2.1895, | |
| "step": 14320 | |
| }, | |
| { | |
| "epoch": 1.3004286096972943, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.943636130698814e-07, | |
| "loss": 2.1437, | |
| "step": 14330 | |
| }, | |
| { | |
| "epoch": 1.301098312349317, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.781019928470909e-07, | |
| "loss": 2.1998, | |
| "step": 14340 | |
| }, | |
| { | |
| "epoch": 1.3017680150013393, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.6211103479184006e-07, | |
| "loss": 2.1283, | |
| "step": 14350 | |
| }, | |
| { | |
| "epoch": 1.302437717653362, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.4639082630278383e-07, | |
| "loss": 2.1758, | |
| "step": 14360 | |
| }, | |
| { | |
| "epoch": 1.3031074203053845, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.3094145329879987e-07, | |
| "loss": 2.1329, | |
| "step": 14370 | |
| }, | |
| { | |
| "epoch": 1.3037771229574069, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.157630002185053e-07, | |
| "loss": 2.175, | |
| "step": 14380 | |
| }, | |
| { | |
| "epoch": 1.3044468256094295, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.00855550019813e-07, | |
| "loss": 2.1512, | |
| "step": 14390 | |
| }, | |
| { | |
| "epoch": 1.3051165282614519, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.862191841794538e-07, | |
| "loss": 2.1549, | |
| "step": 14400 | |
| }, | |
| { | |
| "epoch": 1.3057862309134745, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.71853982692566e-07, | |
| "loss": 2.1426, | |
| "step": 14410 | |
| }, | |
| { | |
| "epoch": 1.306455933565497, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.5776002407221785e-07, | |
| "loss": 2.1325, | |
| "step": 14420 | |
| }, | |
| { | |
| "epoch": 1.3071256362175194, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.43937385349008e-07, | |
| "loss": 2.16, | |
| "step": 14430 | |
| }, | |
| { | |
| "epoch": 1.3077953388695418, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.3038614207063224e-07, | |
| "loss": 2.0809, | |
| "step": 14440 | |
| }, | |
| { | |
| "epoch": 1.3084650415215644, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.171063683014841e-07, | |
| "loss": 2.0655, | |
| "step": 14450 | |
| }, | |
| { | |
| "epoch": 1.309134744173587, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.040981366222162e-07, | |
| "loss": 2.1228, | |
| "step": 14460 | |
| }, | |
| { | |
| "epoch": 1.3098044468256094, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.913615181293905e-07, | |
| "loss": 2.1572, | |
| "step": 14470 | |
| }, | |
| { | |
| "epoch": 1.310474149477632, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.7889658243504536e-07, | |
| "loss": 2.1039, | |
| "step": 14480 | |
| }, | |
| { | |
| "epoch": 1.3111438521296543, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.6670339766635686e-07, | |
| "loss": 2.2135, | |
| "step": 14490 | |
| }, | |
| { | |
| "epoch": 1.311813554781677, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.547820304652282e-07, | |
| "loss": 2.1868, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 1.3124832574336995, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.4313254598795075e-07, | |
| "loss": 2.1494, | |
| "step": 14510 | |
| }, | |
| { | |
| "epoch": 1.313152960085722, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.3175500790483252e-07, | |
| "loss": 2.1583, | |
| "step": 14520 | |
| }, | |
| { | |
| "epoch": 1.3138226627377445, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.2064947839987027e-07, | |
| "loss": 2.2234, | |
| "step": 14530 | |
| }, | |
| { | |
| "epoch": 1.3144923653897669, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.098160181703779e-07, | |
| "loss": 2.0454, | |
| "step": 14540 | |
| }, | |
| { | |
| "epoch": 1.3151620680417895, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.9925468642668643e-07, | |
| "loss": 2.1914, | |
| "step": 14550 | |
| }, | |
| { | |
| "epoch": 1.315831770693812, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.8896554089181118e-07, | |
| "loss": 2.0893, | |
| "step": 14560 | |
| }, | |
| { | |
| "epoch": 1.3165014733458344, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.7894863780111847e-07, | |
| "loss": 2.1605, | |
| "step": 14570 | |
| }, | |
| { | |
| "epoch": 1.3171711759978568, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.6920403190203714e-07, | |
| "loss": 2.0833, | |
| "step": 14580 | |
| }, | |
| { | |
| "epoch": 1.3178408786498794, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.5973177645376426e-07, | |
| "loss": 2.17, | |
| "step": 14590 | |
| }, | |
| { | |
| "epoch": 1.318510581301902, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.5053192322694863e-07, | |
| "loss": 2.1729, | |
| "step": 14600 | |
| }, | |
| { | |
| "epoch": 1.3191802839539244, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.4160452250344125e-07, | |
| "loss": 2.1936, | |
| "step": 14610 | |
| }, | |
| { | |
| "epoch": 1.319849986605947, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.3294962307598967e-07, | |
| "loss": 2.1658, | |
| "step": 14620 | |
| }, | |
| { | |
| "epoch": 1.3205196892579694, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.245672722479829e-07, | |
| "loss": 2.1066, | |
| "step": 14630 | |
| }, | |
| { | |
| "epoch": 1.321189391909992, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.1645751583320152e-07, | |
| "loss": 2.1153, | |
| "step": 14640 | |
| }, | |
| { | |
| "epoch": 1.3218590945620146, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.0862039815555669e-07, | |
| "loss": 2.1155, | |
| "step": 14650 | |
| }, | |
| { | |
| "epoch": 1.322528797214037, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.0105596204885715e-07, | |
| "loss": 2.0831, | |
| "step": 14660 | |
| }, | |
| { | |
| "epoch": 1.3231984998660595, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.376424885655932e-08, | |
| "loss": 2.2167, | |
| "step": 14670 | |
| }, | |
| { | |
| "epoch": 1.323868202518082, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.674529843155643e-08, | |
| "loss": 2.2026, | |
| "step": 14680 | |
| }, | |
| { | |
| "epoch": 1.3245379051701045, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.999914913595641e-08, | |
| "loss": 2.2008, | |
| "step": 14690 | |
| }, | |
| { | |
| "epoch": 1.325207607822127, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.352583784087096e-08, | |
| "loss": 2.2153, | |
| "step": 14700 | |
| }, | |
| { | |
| "epoch": 1.3258773104741495, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.732539992621023e-08, | |
| "loss": 2.12, | |
| "step": 14710 | |
| }, | |
| { | |
| "epoch": 1.3265470131261718, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.139786928050506e-08, | |
| "loss": 2.1566, | |
| "step": 14720 | |
| }, | |
| { | |
| "epoch": 1.3272167157781944, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.5743278300690596e-08, | |
| "loss": 2.1574, | |
| "step": 14730 | |
| }, | |
| { | |
| "epoch": 1.327886418430217, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.0361657891967454e-08, | |
| "loss": 2.0934, | |
| "step": 14740 | |
| }, | |
| { | |
| "epoch": 1.3285561210822394, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.52530374676019e-08, | |
| "loss": 2.2051, | |
| "step": 14750 | |
| }, | |
| { | |
| "epoch": 1.329225823734262, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.0417444948792627e-08, | |
| "loss": 2.2382, | |
| "step": 14760 | |
| }, | |
| { | |
| "epoch": 1.3298955263862844, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.585490676448755e-08, | |
| "loss": 2.2488, | |
| "step": 14770 | |
| }, | |
| { | |
| "epoch": 1.330565229038307, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.15654478512617e-08, | |
| "loss": 2.1103, | |
| "step": 14780 | |
| }, | |
| { | |
| "epoch": 1.3312349316903296, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.7549091653167325e-08, | |
| "loss": 2.1718, | |
| "step": 14790 | |
| }, | |
| { | |
| "epoch": 1.331904634342352, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.3805860121628442e-08, | |
| "loss": 2.0955, | |
| "step": 14800 | |
| }, | |
| { | |
| "epoch": 1.3325743369943746, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.0335773715285388e-08, | |
| "loss": 2.1328, | |
| "step": 14810 | |
| }, | |
| { | |
| "epoch": 1.333244039646397, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.713885139991711e-08, | |
| "loss": 2.2143, | |
| "step": 14820 | |
| }, | |
| { | |
| "epoch": 1.3339137422984195, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.4215110648319042e-08, | |
| "loss": 2.0206, | |
| "step": 14830 | |
| }, | |
| { | |
| "epoch": 1.3345834449504421, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.156456744020873e-08, | |
| "loss": 2.1667, | |
| "step": 14840 | |
| }, | |
| { | |
| "epoch": 1.3352531476024645, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.187236262137022e-09, | |
| "loss": 2.1365, | |
| "step": 14850 | |
| }, | |
| { | |
| "epoch": 1.3359228502544869, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.083130107421454e-09, | |
| "loss": 2.1685, | |
| "step": 14860 | |
| }, | |
| { | |
| "epoch": 1.3365925529065095, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.252260476074078e-09, | |
| "loss": 2.2017, | |
| "step": 14870 | |
| }, | |
| { | |
| "epoch": 1.337262255558532, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.6946373747070994e-09, | |
| "loss": 2.1203, | |
| "step": 14880 | |
| }, | |
| { | |
| "epoch": 1.3379319582105544, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.410269316521774e-09, | |
| "loss": 2.1974, | |
| "step": 14890 | |
| }, | |
| { | |
| "epoch": 1.338601660862577, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.3991633212417921e-09, | |
| "loss": 2.152, | |
| "step": 14900 | |
| }, | |
| { | |
| "epoch": 1.3392713635145994, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.613249150688727e-10, | |
| "loss": 2.137, | |
| "step": 14910 | |
| }, | |
| { | |
| "epoch": 1.339941066166622, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.9675813066055704e-10, | |
| "loss": 2.1729, | |
| "step": 14920 | |
| }, | |
| { | |
| "epoch": 1.3406107688186446, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.465507113555645e-12, | |
| "loss": 2.1984, | |
| "step": 14930 | |
| }, | |
| { | |
| "epoch": 1.340744709349049, | |
| "step": 14932, | |
| "total_flos": 5.508146882263646e+18, | |
| "train_loss": 2.206626670404634, | |
| "train_runtime": 64853.4008, | |
| "train_samples_per_second": 0.23, | |
| "train_steps_per_second": 0.23 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 14932, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 9223372036854775807, | |
| "save_steps": 1660, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 5.508146882263646e+18, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |