| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 1.0, |
| "eval_steps": 500, |
| "global_step": 9480, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0010548523206751054, |
| "grad_norm": 1.2186888456344604, |
| "learning_rate": 0.00015822784810126583, |
| "loss": 7.4756, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.002109704641350211, |
| "grad_norm": 1.1075139045715332, |
| "learning_rate": 0.00031645569620253165, |
| "loss": 6.7987, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.0031645569620253164, |
| "grad_norm": 0.8604756593704224, |
| "learning_rate": 0.00047468354430379745, |
| "loss": 6.1951, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.004219409282700422, |
| "grad_norm": 0.6423186659812927, |
| "learning_rate": 0.0006329113924050633, |
| "loss": 5.7039, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.005274261603375527, |
| "grad_norm": 0.6575109362602234, |
| "learning_rate": 0.0007911392405063291, |
| "loss": 5.2429, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.006329113924050633, |
| "grad_norm": 0.7181506156921387, |
| "learning_rate": 0.0009493670886075949, |
| "loss": 4.7705, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.007383966244725738, |
| "grad_norm": 0.9496937990188599, |
| "learning_rate": 0.0011075949367088608, |
| "loss": 4.3984, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.008438818565400843, |
| "grad_norm": 1.0826588869094849, |
| "learning_rate": 0.0012658227848101266, |
| "loss": 4.1411, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.00949367088607595, |
| "grad_norm": 0.7389441728591919, |
| "learning_rate": 0.0014240506329113926, |
| "loss": 3.9336, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.010548523206751054, |
| "grad_norm": 1.0077191591262817, |
| "learning_rate": 0.0015, |
| "loss": 3.7884, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.011603375527426161, |
| "grad_norm": 0.7541709542274475, |
| "learning_rate": 0.0015, |
| "loss": 3.6578, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.012658227848101266, |
| "grad_norm": 0.7902190685272217, |
| "learning_rate": 0.0015, |
| "loss": 3.5503, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.013713080168776372, |
| "grad_norm": 0.7221662998199463, |
| "learning_rate": 0.0015, |
| "loss": 3.4618, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.014767932489451477, |
| "grad_norm": 1.2441911697387695, |
| "learning_rate": 0.0015, |
| "loss": 3.3819, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.015822784810126583, |
| "grad_norm": 0.9825108647346497, |
| "learning_rate": 0.0015, |
| "loss": 3.3147, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.016877637130801686, |
| "grad_norm": 0.9007912874221802, |
| "learning_rate": 0.0015, |
| "loss": 3.2649, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.017932489451476793, |
| "grad_norm": 0.86688232421875, |
| "learning_rate": 0.0015, |
| "loss": 3.1977, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.0189873417721519, |
| "grad_norm": 0.8259088397026062, |
| "learning_rate": 0.0015, |
| "loss": 3.1623, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.020042194092827006, |
| "grad_norm": 1.230103850364685, |
| "learning_rate": 0.0015, |
| "loss": 3.1005, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.02109704641350211, |
| "grad_norm": 0.9208290576934814, |
| "learning_rate": 0.0015, |
| "loss": 3.0592, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.022151898734177215, |
| "grad_norm": 0.7172234654426575, |
| "learning_rate": 0.0015, |
| "loss": 3.0197, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.023206751054852322, |
| "grad_norm": 0.691364049911499, |
| "learning_rate": 0.0015, |
| "loss": 2.9838, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.024261603375527425, |
| "grad_norm": 0.882768452167511, |
| "learning_rate": 0.0015, |
| "loss": 2.9276, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.02531645569620253, |
| "grad_norm": 1.0899426937103271, |
| "learning_rate": 0.0015, |
| "loss": 2.898, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.026371308016877638, |
| "grad_norm": 0.835606038570404, |
| "learning_rate": 0.0015, |
| "loss": 2.8611, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.027426160337552744, |
| "grad_norm": 0.9906381964683533, |
| "learning_rate": 0.0015, |
| "loss": 2.8228, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.028481012658227847, |
| "grad_norm": 0.837228000164032, |
| "learning_rate": 0.0015, |
| "loss": 2.7884, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.029535864978902954, |
| "grad_norm": 0.9279177784919739, |
| "learning_rate": 0.0015, |
| "loss": 2.7519, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.03059071729957806, |
| "grad_norm": 1.240341305732727, |
| "learning_rate": 0.0015, |
| "loss": 2.7221, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.03164556962025317, |
| "grad_norm": 0.9779770374298096, |
| "learning_rate": 0.0015, |
| "loss": 2.6969, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.03270042194092827, |
| "grad_norm": 1.3490874767303467, |
| "learning_rate": 0.0015, |
| "loss": 2.6669, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.03375527426160337, |
| "grad_norm": 1.3380482196807861, |
| "learning_rate": 0.0015, |
| "loss": 2.6312, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.03481012658227848, |
| "grad_norm": 0.8288021087646484, |
| "learning_rate": 0.0015, |
| "loss": 2.6254, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.035864978902953586, |
| "grad_norm": 1.363772988319397, |
| "learning_rate": 0.0015, |
| "loss": 2.5977, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.03691983122362869, |
| "grad_norm": 1.2266892194747925, |
| "learning_rate": 0.0015, |
| "loss": 2.5532, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.0379746835443038, |
| "grad_norm": 0.9206980466842651, |
| "learning_rate": 0.0015, |
| "loss": 2.5316, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.039029535864978905, |
| "grad_norm": 1.0549575090408325, |
| "learning_rate": 0.0015, |
| "loss": 2.5158, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.04008438818565401, |
| "grad_norm": 1.217510461807251, |
| "learning_rate": 0.0015, |
| "loss": 2.4978, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.04113924050632911, |
| "grad_norm": 1.0540152788162231, |
| "learning_rate": 0.0015, |
| "loss": 2.4792, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.04219409282700422, |
| "grad_norm": 1.0417619943618774, |
| "learning_rate": 0.0015, |
| "loss": 2.4573, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.043248945147679324, |
| "grad_norm": 0.7437645196914673, |
| "learning_rate": 0.0015, |
| "loss": 2.442, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.04430379746835443, |
| "grad_norm": 1.1524747610092163, |
| "learning_rate": 0.0015, |
| "loss": 2.4248, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.04535864978902954, |
| "grad_norm": 1.0705971717834473, |
| "learning_rate": 0.0015, |
| "loss": 2.3973, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.046413502109704644, |
| "grad_norm": 0.9775382280349731, |
| "learning_rate": 0.0015, |
| "loss": 2.3833, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.04746835443037975, |
| "grad_norm": 1.025772213935852, |
| "learning_rate": 0.0015, |
| "loss": 2.3803, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.04852320675105485, |
| "grad_norm": 1.246941328048706, |
| "learning_rate": 0.0015, |
| "loss": 2.3622, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.049578059071729956, |
| "grad_norm": 1.0031028985977173, |
| "learning_rate": 0.0015, |
| "loss": 2.3334, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.05063291139240506, |
| "grad_norm": 1.5016298294067383, |
| "learning_rate": 0.0015, |
| "loss": 2.3327, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.05168776371308017, |
| "grad_norm": 0.7244430184364319, |
| "learning_rate": 0.0015, |
| "loss": 2.3008, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.052742616033755275, |
| "grad_norm": 1.213437795639038, |
| "learning_rate": 0.0015, |
| "loss": 2.2994, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.05379746835443038, |
| "grad_norm": 1.2152141332626343, |
| "learning_rate": 0.0015, |
| "loss": 2.2937, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.05485232067510549, |
| "grad_norm": 0.9512096047401428, |
| "learning_rate": 0.0015, |
| "loss": 2.2921, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.05590717299578059, |
| "grad_norm": 0.7877956628799438, |
| "learning_rate": 0.0015, |
| "loss": 2.256, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.056962025316455694, |
| "grad_norm": 1.007867693901062, |
| "learning_rate": 0.0015, |
| "loss": 2.2389, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.0580168776371308, |
| "grad_norm": 1.2216438055038452, |
| "learning_rate": 0.0015, |
| "loss": 2.2341, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.05907172995780591, |
| "grad_norm": 0.769692599773407, |
| "learning_rate": 0.0015, |
| "loss": 2.2007, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.060126582278481014, |
| "grad_norm": 1.202372431755066, |
| "learning_rate": 0.0015, |
| "loss": 2.2073, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.06118143459915612, |
| "grad_norm": 1.0290395021438599, |
| "learning_rate": 0.0015, |
| "loss": 2.2108, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.06223628691983123, |
| "grad_norm": 1.367793321609497, |
| "learning_rate": 0.0015, |
| "loss": 2.1828, |
| "step": 590 |
| }, |
| { |
| "epoch": 0.06329113924050633, |
| "grad_norm": 0.8654493093490601, |
| "learning_rate": 0.0015, |
| "loss": 2.1673, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.06434599156118144, |
| "grad_norm": 0.8177367448806763, |
| "learning_rate": 0.0015, |
| "loss": 2.1687, |
| "step": 610 |
| }, |
| { |
| "epoch": 0.06540084388185655, |
| "grad_norm": 1.0598950386047363, |
| "learning_rate": 0.0015, |
| "loss": 2.1564, |
| "step": 620 |
| }, |
| { |
| "epoch": 0.06645569620253164, |
| "grad_norm": 1.063185691833496, |
| "learning_rate": 0.0015, |
| "loss": 2.1392, |
| "step": 630 |
| }, |
| { |
| "epoch": 0.06751054852320675, |
| "grad_norm": 0.8926445245742798, |
| "learning_rate": 0.0015, |
| "loss": 2.1467, |
| "step": 640 |
| }, |
| { |
| "epoch": 0.06856540084388185, |
| "grad_norm": 0.7596344947814941, |
| "learning_rate": 0.0015, |
| "loss": 2.1377, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.06962025316455696, |
| "grad_norm": 0.8228513598442078, |
| "learning_rate": 0.0015, |
| "loss": 2.1232, |
| "step": 660 |
| }, |
| { |
| "epoch": 0.07067510548523206, |
| "grad_norm": 0.8619157075881958, |
| "learning_rate": 0.0015, |
| "loss": 2.108, |
| "step": 670 |
| }, |
| { |
| "epoch": 0.07172995780590717, |
| "grad_norm": 2.402742862701416, |
| "learning_rate": 0.0015, |
| "loss": 2.1152, |
| "step": 680 |
| }, |
| { |
| "epoch": 0.07278481012658228, |
| "grad_norm": 0.8077136278152466, |
| "learning_rate": 0.0015, |
| "loss": 2.135, |
| "step": 690 |
| }, |
| { |
| "epoch": 0.07383966244725738, |
| "grad_norm": 0.8595909476280212, |
| "learning_rate": 0.0015, |
| "loss": 2.0909, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.07489451476793249, |
| "grad_norm": 0.8992576003074646, |
| "learning_rate": 0.0015, |
| "loss": 2.0899, |
| "step": 710 |
| }, |
| { |
| "epoch": 0.0759493670886076, |
| "grad_norm": 1.1452020406723022, |
| "learning_rate": 0.0015, |
| "loss": 2.0698, |
| "step": 720 |
| }, |
| { |
| "epoch": 0.0770042194092827, |
| "grad_norm": 0.9489206075668335, |
| "learning_rate": 0.0015, |
| "loss": 2.0649, |
| "step": 730 |
| }, |
| { |
| "epoch": 0.07805907172995781, |
| "grad_norm": 0.6932376027107239, |
| "learning_rate": 0.0015, |
| "loss": 2.0725, |
| "step": 740 |
| }, |
| { |
| "epoch": 0.07911392405063292, |
| "grad_norm": 0.9433043599128723, |
| "learning_rate": 0.0015, |
| "loss": 2.0514, |
| "step": 750 |
| }, |
| { |
| "epoch": 0.08016877637130802, |
| "grad_norm": 1.7378337383270264, |
| "learning_rate": 0.0015, |
| "loss": 2.0728, |
| "step": 760 |
| }, |
| { |
| "epoch": 0.08122362869198312, |
| "grad_norm": 1.3360079526901245, |
| "learning_rate": 0.0015, |
| "loss": 2.0565, |
| "step": 770 |
| }, |
| { |
| "epoch": 0.08227848101265822, |
| "grad_norm": 0.9339075088500977, |
| "learning_rate": 0.0015, |
| "loss": 2.0525, |
| "step": 780 |
| }, |
| { |
| "epoch": 0.08333333333333333, |
| "grad_norm": 0.6675604581832886, |
| "learning_rate": 0.0015, |
| "loss": 2.0403, |
| "step": 790 |
| }, |
| { |
| "epoch": 0.08438818565400844, |
| "grad_norm": 0.6963586211204529, |
| "learning_rate": 0.0015, |
| "loss": 2.0174, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.08544303797468354, |
| "grad_norm": 0.756433367729187, |
| "learning_rate": 0.0015, |
| "loss": 2.0048, |
| "step": 810 |
| }, |
| { |
| "epoch": 0.08649789029535865, |
| "grad_norm": 0.7605345845222473, |
| "learning_rate": 0.0015, |
| "loss": 2.0148, |
| "step": 820 |
| }, |
| { |
| "epoch": 0.08755274261603375, |
| "grad_norm": 0.9260172843933105, |
| "learning_rate": 0.0015, |
| "loss": 2.0224, |
| "step": 830 |
| }, |
| { |
| "epoch": 0.08860759493670886, |
| "grad_norm": 1.2491339445114136, |
| "learning_rate": 0.0015, |
| "loss": 2.0016, |
| "step": 840 |
| }, |
| { |
| "epoch": 0.08966244725738397, |
| "grad_norm": 0.9123998880386353, |
| "learning_rate": 0.0015, |
| "loss": 1.9979, |
| "step": 850 |
| }, |
| { |
| "epoch": 0.09071729957805907, |
| "grad_norm": 1.066710114479065, |
| "learning_rate": 0.0015, |
| "loss": 1.9967, |
| "step": 860 |
| }, |
| { |
| "epoch": 0.09177215189873418, |
| "grad_norm": 0.6761780381202698, |
| "learning_rate": 0.0015, |
| "loss": 1.9925, |
| "step": 870 |
| }, |
| { |
| "epoch": 0.09282700421940929, |
| "grad_norm": 0.7574045062065125, |
| "learning_rate": 0.0015, |
| "loss": 1.9863, |
| "step": 880 |
| }, |
| { |
| "epoch": 0.0938818565400844, |
| "grad_norm": 0.8483901023864746, |
| "learning_rate": 0.0015, |
| "loss": 1.9792, |
| "step": 890 |
| }, |
| { |
| "epoch": 0.0949367088607595, |
| "grad_norm": 0.9919297099113464, |
| "learning_rate": 0.0015, |
| "loss": 1.9766, |
| "step": 900 |
| }, |
| { |
| "epoch": 0.09599156118143459, |
| "grad_norm": 0.9863075613975525, |
| "learning_rate": 0.0015, |
| "loss": 1.985, |
| "step": 910 |
| }, |
| { |
| "epoch": 0.0970464135021097, |
| "grad_norm": 1.0680369138717651, |
| "learning_rate": 0.0015, |
| "loss": 1.9688, |
| "step": 920 |
| }, |
| { |
| "epoch": 0.0981012658227848, |
| "grad_norm": 0.7299118041992188, |
| "learning_rate": 0.0015, |
| "loss": 1.9583, |
| "step": 930 |
| }, |
| { |
| "epoch": 0.09915611814345991, |
| "grad_norm": 0.7109554409980774, |
| "learning_rate": 0.0015, |
| "loss": 1.9685, |
| "step": 940 |
| }, |
| { |
| "epoch": 0.10021097046413502, |
| "grad_norm": 0.6925338506698608, |
| "learning_rate": 0.0015, |
| "loss": 1.9396, |
| "step": 950 |
| }, |
| { |
| "epoch": 0.10126582278481013, |
| "grad_norm": 0.989055335521698, |
| "learning_rate": 0.0015, |
| "loss": 1.9474, |
| "step": 960 |
| }, |
| { |
| "epoch": 0.10232067510548523, |
| "grad_norm": 0.7260894179344177, |
| "learning_rate": 0.0015, |
| "loss": 1.9499, |
| "step": 970 |
| }, |
| { |
| "epoch": 0.10337552742616034, |
| "grad_norm": 1.1013668775558472, |
| "learning_rate": 0.0015, |
| "loss": 1.9332, |
| "step": 980 |
| }, |
| { |
| "epoch": 0.10443037974683544, |
| "grad_norm": 0.6925954818725586, |
| "learning_rate": 0.0015, |
| "loss": 1.9296, |
| "step": 990 |
| }, |
| { |
| "epoch": 0.10548523206751055, |
| "grad_norm": 1.757063865661621, |
| "learning_rate": 0.0015, |
| "loss": 1.9455, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.10654008438818566, |
| "grad_norm": 1.0348504781723022, |
| "learning_rate": 0.0015, |
| "loss": 1.9542, |
| "step": 1010 |
| }, |
| { |
| "epoch": 0.10759493670886076, |
| "grad_norm": 0.7209298610687256, |
| "learning_rate": 0.0015, |
| "loss": 1.919, |
| "step": 1020 |
| }, |
| { |
| "epoch": 0.10864978902953587, |
| "grad_norm": 0.7077058553695679, |
| "learning_rate": 0.0015, |
| "loss": 1.9186, |
| "step": 1030 |
| }, |
| { |
| "epoch": 0.10970464135021098, |
| "grad_norm": 0.6635832786560059, |
| "learning_rate": 0.0015, |
| "loss": 1.9155, |
| "step": 1040 |
| }, |
| { |
| "epoch": 0.11075949367088607, |
| "grad_norm": 0.8985444903373718, |
| "learning_rate": 0.0015, |
| "loss": 1.918, |
| "step": 1050 |
| }, |
| { |
| "epoch": 0.11181434599156118, |
| "grad_norm": 0.7381898164749146, |
| "learning_rate": 0.0015, |
| "loss": 1.9115, |
| "step": 1060 |
| }, |
| { |
| "epoch": 0.11286919831223628, |
| "grad_norm": 0.7176501750946045, |
| "learning_rate": 0.0015, |
| "loss": 1.9009, |
| "step": 1070 |
| }, |
| { |
| "epoch": 0.11392405063291139, |
| "grad_norm": 0.6653502583503723, |
| "learning_rate": 0.0015, |
| "loss": 1.8975, |
| "step": 1080 |
| }, |
| { |
| "epoch": 0.1149789029535865, |
| "grad_norm": 0.6939038634300232, |
| "learning_rate": 0.0015, |
| "loss": 1.9065, |
| "step": 1090 |
| }, |
| { |
| "epoch": 0.1160337552742616, |
| "grad_norm": 0.819861888885498, |
| "learning_rate": 0.0015, |
| "loss": 1.8948, |
| "step": 1100 |
| }, |
| { |
| "epoch": 0.11708860759493671, |
| "grad_norm": 0.7226906418800354, |
| "learning_rate": 0.0015, |
| "loss": 1.8988, |
| "step": 1110 |
| }, |
| { |
| "epoch": 0.11814345991561181, |
| "grad_norm": 0.9487333297729492, |
| "learning_rate": 0.0015, |
| "loss": 1.8923, |
| "step": 1120 |
| }, |
| { |
| "epoch": 0.11919831223628692, |
| "grad_norm": 0.8971943855285645, |
| "learning_rate": 0.0015, |
| "loss": 1.8775, |
| "step": 1130 |
| }, |
| { |
| "epoch": 0.12025316455696203, |
| "grad_norm": 0.7244747281074524, |
| "learning_rate": 0.0015, |
| "loss": 1.8967, |
| "step": 1140 |
| }, |
| { |
| "epoch": 0.12130801687763713, |
| "grad_norm": 0.753216028213501, |
| "learning_rate": 0.0015, |
| "loss": 1.8789, |
| "step": 1150 |
| }, |
| { |
| "epoch": 0.12236286919831224, |
| "grad_norm": 0.7540823221206665, |
| "learning_rate": 0.0015, |
| "loss": 1.8828, |
| "step": 1160 |
| }, |
| { |
| "epoch": 0.12341772151898735, |
| "grad_norm": 0.7866358757019043, |
| "learning_rate": 0.0015, |
| "loss": 1.8744, |
| "step": 1170 |
| }, |
| { |
| "epoch": 0.12447257383966245, |
| "grad_norm": 0.752935528755188, |
| "learning_rate": 0.0015, |
| "loss": 1.8599, |
| "step": 1180 |
| }, |
| { |
| "epoch": 0.12552742616033755, |
| "grad_norm": 0.6611055731773376, |
| "learning_rate": 0.0015, |
| "loss": 1.8637, |
| "step": 1190 |
| }, |
| { |
| "epoch": 0.12658227848101267, |
| "grad_norm": 1.2516565322875977, |
| "learning_rate": 0.0015, |
| "loss": 1.8793, |
| "step": 1200 |
| }, |
| { |
| "epoch": 0.12763713080168776, |
| "grad_norm": 0.6632084846496582, |
| "learning_rate": 0.0015, |
| "loss": 1.8685, |
| "step": 1210 |
| }, |
| { |
| "epoch": 0.12869198312236288, |
| "grad_norm": 0.6862037181854248, |
| "learning_rate": 0.0015, |
| "loss": 1.8506, |
| "step": 1220 |
| }, |
| { |
| "epoch": 0.12974683544303797, |
| "grad_norm": 0.8717275261878967, |
| "learning_rate": 0.0015, |
| "loss": 1.8432, |
| "step": 1230 |
| }, |
| { |
| "epoch": 0.1308016877637131, |
| "grad_norm": 1.1524758338928223, |
| "learning_rate": 0.0015, |
| "loss": 1.866, |
| "step": 1240 |
| }, |
| { |
| "epoch": 0.13185654008438819, |
| "grad_norm": 0.9125797748565674, |
| "learning_rate": 0.0015, |
| "loss": 1.8598, |
| "step": 1250 |
| }, |
| { |
| "epoch": 0.13291139240506328, |
| "grad_norm": 0.8476076722145081, |
| "learning_rate": 0.0015, |
| "loss": 1.8439, |
| "step": 1260 |
| }, |
| { |
| "epoch": 0.1339662447257384, |
| "grad_norm": 0.9498160481452942, |
| "learning_rate": 0.0015, |
| "loss": 1.8459, |
| "step": 1270 |
| }, |
| { |
| "epoch": 0.1350210970464135, |
| "grad_norm": 0.7211402654647827, |
| "learning_rate": 0.0015, |
| "loss": 1.8615, |
| "step": 1280 |
| }, |
| { |
| "epoch": 0.1360759493670886, |
| "grad_norm": 0.6844839453697205, |
| "learning_rate": 0.0015, |
| "loss": 1.8446, |
| "step": 1290 |
| }, |
| { |
| "epoch": 0.1371308016877637, |
| "grad_norm": 0.8232640027999878, |
| "learning_rate": 0.0015, |
| "loss": 1.832, |
| "step": 1300 |
| }, |
| { |
| "epoch": 0.13818565400843882, |
| "grad_norm": 0.6936826705932617, |
| "learning_rate": 0.0015, |
| "loss": 1.8344, |
| "step": 1310 |
| }, |
| { |
| "epoch": 0.13924050632911392, |
| "grad_norm": 0.9277529716491699, |
| "learning_rate": 0.0015, |
| "loss": 1.8353, |
| "step": 1320 |
| }, |
| { |
| "epoch": 0.14029535864978904, |
| "grad_norm": 0.7764731049537659, |
| "learning_rate": 0.0015, |
| "loss": 1.831, |
| "step": 1330 |
| }, |
| { |
| "epoch": 0.14135021097046413, |
| "grad_norm": 0.9051993489265442, |
| "learning_rate": 0.0015, |
| "loss": 1.8301, |
| "step": 1340 |
| }, |
| { |
| "epoch": 0.14240506329113925, |
| "grad_norm": 0.6625475883483887, |
| "learning_rate": 0.0015, |
| "loss": 1.8343, |
| "step": 1350 |
| }, |
| { |
| "epoch": 0.14345991561181434, |
| "grad_norm": 0.6393569111824036, |
| "learning_rate": 0.0015, |
| "loss": 1.8297, |
| "step": 1360 |
| }, |
| { |
| "epoch": 0.14451476793248946, |
| "grad_norm": 0.6683023571968079, |
| "learning_rate": 0.0015, |
| "loss": 1.8235, |
| "step": 1370 |
| }, |
| { |
| "epoch": 0.14556962025316456, |
| "grad_norm": 0.7225092649459839, |
| "learning_rate": 0.0015, |
| "loss": 1.8168, |
| "step": 1380 |
| }, |
| { |
| "epoch": 0.14662447257383968, |
| "grad_norm": 1.3273886442184448, |
| "learning_rate": 0.0015, |
| "loss": 1.8285, |
| "step": 1390 |
| }, |
| { |
| "epoch": 0.14767932489451477, |
| "grad_norm": 1.3077683448791504, |
| "learning_rate": 0.0015, |
| "loss": 1.8139, |
| "step": 1400 |
| }, |
| { |
| "epoch": 0.14873417721518986, |
| "grad_norm": 0.6626515984535217, |
| "learning_rate": 0.0015, |
| "loss": 1.812, |
| "step": 1410 |
| }, |
| { |
| "epoch": 0.14978902953586498, |
| "grad_norm": 0.7292690277099609, |
| "learning_rate": 0.0015, |
| "loss": 1.8165, |
| "step": 1420 |
| }, |
| { |
| "epoch": 0.15084388185654007, |
| "grad_norm": 0.7270740270614624, |
| "learning_rate": 0.0015, |
| "loss": 1.8093, |
| "step": 1430 |
| }, |
| { |
| "epoch": 0.1518987341772152, |
| "grad_norm": 0.9681549072265625, |
| "learning_rate": 0.0015, |
| "loss": 1.8034, |
| "step": 1440 |
| }, |
| { |
| "epoch": 0.1529535864978903, |
| "grad_norm": 0.6339258551597595, |
| "learning_rate": 0.0015, |
| "loss": 1.805, |
| "step": 1450 |
| }, |
| { |
| "epoch": 0.1540084388185654, |
| "grad_norm": 0.6470240950584412, |
| "learning_rate": 0.0015, |
| "loss": 1.8001, |
| "step": 1460 |
| }, |
| { |
| "epoch": 0.1550632911392405, |
| "grad_norm": 0.8807370662689209, |
| "learning_rate": 0.0015, |
| "loss": 1.8002, |
| "step": 1470 |
| }, |
| { |
| "epoch": 0.15611814345991562, |
| "grad_norm": 0.7718443274497986, |
| "learning_rate": 0.0015, |
| "loss": 1.8049, |
| "step": 1480 |
| }, |
| { |
| "epoch": 0.1571729957805907, |
| "grad_norm": 0.8173696994781494, |
| "learning_rate": 0.0015, |
| "loss": 1.7981, |
| "step": 1490 |
| }, |
| { |
| "epoch": 0.15822784810126583, |
| "grad_norm": 0.6859337687492371, |
| "learning_rate": 0.0015, |
| "loss": 1.7994, |
| "step": 1500 |
| }, |
| { |
| "epoch": 0.15928270042194093, |
| "grad_norm": 0.7179945707321167, |
| "learning_rate": 0.0015, |
| "loss": 1.7937, |
| "step": 1510 |
| }, |
| { |
| "epoch": 0.16033755274261605, |
| "grad_norm": 0.6422714591026306, |
| "learning_rate": 0.0015, |
| "loss": 1.7976, |
| "step": 1520 |
| }, |
| { |
| "epoch": 0.16139240506329114, |
| "grad_norm": 0.8165238499641418, |
| "learning_rate": 0.0015, |
| "loss": 1.7832, |
| "step": 1530 |
| }, |
| { |
| "epoch": 0.16244725738396623, |
| "grad_norm": 0.6381688714027405, |
| "learning_rate": 0.0015, |
| "loss": 1.7716, |
| "step": 1540 |
| }, |
| { |
| "epoch": 0.16350210970464135, |
| "grad_norm": 0.6977247595787048, |
| "learning_rate": 0.0015, |
| "loss": 1.7763, |
| "step": 1550 |
| }, |
| { |
| "epoch": 0.16455696202531644, |
| "grad_norm": 0.6377383470535278, |
| "learning_rate": 0.0015, |
| "loss": 1.7759, |
| "step": 1560 |
| }, |
| { |
| "epoch": 0.16561181434599156, |
| "grad_norm": 0.9726520776748657, |
| "learning_rate": 0.0015, |
| "loss": 1.7675, |
| "step": 1570 |
| }, |
| { |
| "epoch": 0.16666666666666666, |
| "grad_norm": 0.6959405541419983, |
| "learning_rate": 0.0015, |
| "loss": 1.7814, |
| "step": 1580 |
| }, |
| { |
| "epoch": 0.16772151898734178, |
| "grad_norm": 0.8633522987365723, |
| "learning_rate": 0.0015, |
| "loss": 1.7783, |
| "step": 1590 |
| }, |
| { |
| "epoch": 0.16877637130801687, |
| "grad_norm": 0.7004960179328918, |
| "learning_rate": 0.0015, |
| "loss": 1.7804, |
| "step": 1600 |
| }, |
| { |
| "epoch": 0.169831223628692, |
| "grad_norm": 0.7215518951416016, |
| "learning_rate": 0.0015, |
| "loss": 1.7773, |
| "step": 1610 |
| }, |
| { |
| "epoch": 0.17088607594936708, |
| "grad_norm": 0.6039355397224426, |
| "learning_rate": 0.0015, |
| "loss": 1.7504, |
| "step": 1620 |
| }, |
| { |
| "epoch": 0.1719409282700422, |
| "grad_norm": 0.9246911406517029, |
| "learning_rate": 0.0015, |
| "loss": 1.7648, |
| "step": 1630 |
| }, |
| { |
| "epoch": 0.1729957805907173, |
| "grad_norm": 0.8042846918106079, |
| "learning_rate": 0.0015, |
| "loss": 1.7651, |
| "step": 1640 |
| }, |
| { |
| "epoch": 0.17405063291139242, |
| "grad_norm": 0.6565026044845581, |
| "learning_rate": 0.0015, |
| "loss": 1.7564, |
| "step": 1650 |
| }, |
| { |
| "epoch": 0.1751054852320675, |
| "grad_norm": 0.7790629267692566, |
| "learning_rate": 0.0015, |
| "loss": 1.7574, |
| "step": 1660 |
| }, |
| { |
| "epoch": 0.17616033755274263, |
| "grad_norm": 0.6511490345001221, |
| "learning_rate": 0.0015, |
| "loss": 1.7633, |
| "step": 1670 |
| }, |
| { |
| "epoch": 0.17721518987341772, |
| "grad_norm": 0.6874651312828064, |
| "learning_rate": 0.0015, |
| "loss": 1.7522, |
| "step": 1680 |
| }, |
| { |
| "epoch": 0.17827004219409281, |
| "grad_norm": 0.6518383622169495, |
| "learning_rate": 0.0015, |
| "loss": 1.7446, |
| "step": 1690 |
| }, |
| { |
| "epoch": 0.17932489451476794, |
| "grad_norm": 0.9337310194969177, |
| "learning_rate": 0.0015, |
| "loss": 1.7634, |
| "step": 1700 |
| }, |
| { |
| "epoch": 0.18037974683544303, |
| "grad_norm": 1.0004547834396362, |
| "learning_rate": 0.0015, |
| "loss": 1.7414, |
| "step": 1710 |
| }, |
| { |
| "epoch": 0.18143459915611815, |
| "grad_norm": 0.9592145681381226, |
| "learning_rate": 0.0015, |
| "loss": 1.7396, |
| "step": 1720 |
| }, |
| { |
| "epoch": 0.18248945147679324, |
| "grad_norm": 0.8522003889083862, |
| "learning_rate": 0.0015, |
| "loss": 1.7394, |
| "step": 1730 |
| }, |
| { |
| "epoch": 0.18354430379746836, |
| "grad_norm": 0.6941075921058655, |
| "learning_rate": 0.0015, |
| "loss": 1.7461, |
| "step": 1740 |
| }, |
| { |
| "epoch": 0.18459915611814345, |
| "grad_norm": 0.6232513785362244, |
| "learning_rate": 0.0015, |
| "loss": 1.7665, |
| "step": 1750 |
| }, |
| { |
| "epoch": 0.18565400843881857, |
| "grad_norm": 1.227797269821167, |
| "learning_rate": 0.0015, |
| "loss": 1.7449, |
| "step": 1760 |
| }, |
| { |
| "epoch": 0.18670886075949367, |
| "grad_norm": 0.689431369304657, |
| "learning_rate": 0.0015, |
| "loss": 1.7496, |
| "step": 1770 |
| }, |
| { |
| "epoch": 0.1877637130801688, |
| "grad_norm": 0.7479903101921082, |
| "learning_rate": 0.0015, |
| "loss": 1.7448, |
| "step": 1780 |
| }, |
| { |
| "epoch": 0.18881856540084388, |
| "grad_norm": 0.6916646361351013, |
| "learning_rate": 0.0015, |
| "loss": 1.7481, |
| "step": 1790 |
| }, |
| { |
| "epoch": 0.189873417721519, |
| "grad_norm": 0.8803281188011169, |
| "learning_rate": 0.0015, |
| "loss": 1.7404, |
| "step": 1800 |
| }, |
| { |
| "epoch": 0.1909282700421941, |
| "grad_norm": 0.8555834889411926, |
| "learning_rate": 0.0015, |
| "loss": 1.7316, |
| "step": 1810 |
| }, |
| { |
| "epoch": 0.19198312236286919, |
| "grad_norm": 0.6191942691802979, |
| "learning_rate": 0.0015, |
| "loss": 1.7284, |
| "step": 1820 |
| }, |
| { |
| "epoch": 0.1930379746835443, |
| "grad_norm": 0.5880500078201294, |
| "learning_rate": 0.0015, |
| "loss": 1.7269, |
| "step": 1830 |
| }, |
| { |
| "epoch": 0.1940928270042194, |
| "grad_norm": 0.6119520664215088, |
| "learning_rate": 0.0015, |
| "loss": 1.7287, |
| "step": 1840 |
| }, |
| { |
| "epoch": 0.19514767932489452, |
| "grad_norm": 0.8929871916770935, |
| "learning_rate": 0.0015, |
| "loss": 1.731, |
| "step": 1850 |
| }, |
| { |
| "epoch": 0.1962025316455696, |
| "grad_norm": 0.8229356408119202, |
| "learning_rate": 0.0015, |
| "loss": 1.7355, |
| "step": 1860 |
| }, |
| { |
| "epoch": 0.19725738396624473, |
| "grad_norm": 0.9643657803535461, |
| "learning_rate": 0.0015, |
| "loss": 1.7365, |
| "step": 1870 |
| }, |
| { |
| "epoch": 0.19831223628691982, |
| "grad_norm": 0.6754857301712036, |
| "learning_rate": 0.0015, |
| "loss": 1.7198, |
| "step": 1880 |
| }, |
| { |
| "epoch": 0.19936708860759494, |
| "grad_norm": 0.7569988369941711, |
| "learning_rate": 0.0015, |
| "loss": 1.7248, |
| "step": 1890 |
| }, |
| { |
| "epoch": 0.20042194092827004, |
| "grad_norm": 0.7129053473472595, |
| "learning_rate": 0.0015, |
| "loss": 1.7223, |
| "step": 1900 |
| }, |
| { |
| "epoch": 0.20147679324894516, |
| "grad_norm": 1.1085187196731567, |
| "learning_rate": 0.0015, |
| "loss": 1.726, |
| "step": 1910 |
| }, |
| { |
| "epoch": 0.20253164556962025, |
| "grad_norm": 0.8156635761260986, |
| "learning_rate": 0.0015, |
| "loss": 1.7233, |
| "step": 1920 |
| }, |
| { |
| "epoch": 0.20358649789029537, |
| "grad_norm": 0.6475657224655151, |
| "learning_rate": 0.0015, |
| "loss": 1.7241, |
| "step": 1930 |
| }, |
| { |
| "epoch": 0.20464135021097046, |
| "grad_norm": 0.6763522028923035, |
| "learning_rate": 0.0015, |
| "loss": 1.7149, |
| "step": 1940 |
| }, |
| { |
| "epoch": 0.20569620253164558, |
| "grad_norm": 0.7413578629493713, |
| "learning_rate": 0.0015, |
| "loss": 1.7129, |
| "step": 1950 |
| }, |
| { |
| "epoch": 0.20675105485232068, |
| "grad_norm": 0.6201931834220886, |
| "learning_rate": 0.0015, |
| "loss": 1.7185, |
| "step": 1960 |
| }, |
| { |
| "epoch": 0.20780590717299577, |
| "grad_norm": 1.0813745260238647, |
| "learning_rate": 0.0015, |
| "loss": 1.7222, |
| "step": 1970 |
| }, |
| { |
| "epoch": 0.2088607594936709, |
| "grad_norm": 0.7272746562957764, |
| "learning_rate": 0.0015, |
| "loss": 1.7075, |
| "step": 1980 |
| }, |
| { |
| "epoch": 0.20991561181434598, |
| "grad_norm": 0.6317844986915588, |
| "learning_rate": 0.0015, |
| "loss": 1.7012, |
| "step": 1990 |
| }, |
| { |
| "epoch": 0.2109704641350211, |
| "grad_norm": 0.8212335705757141, |
| "learning_rate": 0.0015, |
| "loss": 1.7043, |
| "step": 2000 |
| }, |
| { |
| "epoch": 0.2120253164556962, |
| "grad_norm": 0.8446721434593201, |
| "learning_rate": 0.0015, |
| "loss": 1.7112, |
| "step": 2010 |
| }, |
| { |
| "epoch": 0.21308016877637131, |
| "grad_norm": 0.7635065317153931, |
| "learning_rate": 0.0015, |
| "loss": 1.7201, |
| "step": 2020 |
| }, |
| { |
| "epoch": 0.2141350210970464, |
| "grad_norm": 0.6259655952453613, |
| "learning_rate": 0.0015, |
| "loss": 1.707, |
| "step": 2030 |
| }, |
| { |
| "epoch": 0.21518987341772153, |
| "grad_norm": 0.6341089606285095, |
| "learning_rate": 0.0015, |
| "loss": 1.7077, |
| "step": 2040 |
| }, |
| { |
| "epoch": 0.21624472573839662, |
| "grad_norm": 0.6744656562805176, |
| "learning_rate": 0.0015, |
| "loss": 1.7092, |
| "step": 2050 |
| }, |
| { |
| "epoch": 0.21729957805907174, |
| "grad_norm": 0.7680793404579163, |
| "learning_rate": 0.0015, |
| "loss": 1.6997, |
| "step": 2060 |
| }, |
| { |
| "epoch": 0.21835443037974683, |
| "grad_norm": 0.855820894241333, |
| "learning_rate": 0.0015, |
| "loss": 1.6979, |
| "step": 2070 |
| }, |
| { |
| "epoch": 0.21940928270042195, |
| "grad_norm": 0.6359190940856934, |
| "learning_rate": 0.0015, |
| "loss": 1.7029, |
| "step": 2080 |
| }, |
| { |
| "epoch": 0.22046413502109705, |
| "grad_norm": 0.739136815071106, |
| "learning_rate": 0.0015, |
| "loss": 1.6946, |
| "step": 2090 |
| }, |
| { |
| "epoch": 0.22151898734177214, |
| "grad_norm": 0.6583918333053589, |
| "learning_rate": 0.0015, |
| "loss": 1.7005, |
| "step": 2100 |
| }, |
| { |
| "epoch": 0.22257383966244726, |
| "grad_norm": 0.6532536745071411, |
| "learning_rate": 0.0015, |
| "loss": 1.6911, |
| "step": 2110 |
| }, |
| { |
| "epoch": 0.22362869198312235, |
| "grad_norm": 1.0495470762252808, |
| "learning_rate": 0.0015, |
| "loss": 1.6959, |
| "step": 2120 |
| }, |
| { |
| "epoch": 0.22468354430379747, |
| "grad_norm": 0.700576901435852, |
| "learning_rate": 0.0015, |
| "loss": 1.6932, |
| "step": 2130 |
| }, |
| { |
| "epoch": 0.22573839662447256, |
| "grad_norm": 0.6586419939994812, |
| "learning_rate": 0.0015, |
| "loss": 1.684, |
| "step": 2140 |
| }, |
| { |
| "epoch": 0.22679324894514769, |
| "grad_norm": 0.6587622761726379, |
| "learning_rate": 0.0015, |
| "loss": 1.6935, |
| "step": 2150 |
| }, |
| { |
| "epoch": 0.22784810126582278, |
| "grad_norm": 0.681542694568634, |
| "learning_rate": 0.0015, |
| "loss": 1.7028, |
| "step": 2160 |
| }, |
| { |
| "epoch": 0.2289029535864979, |
| "grad_norm": 0.6412506103515625, |
| "learning_rate": 0.0015, |
| "loss": 1.6956, |
| "step": 2170 |
| }, |
| { |
| "epoch": 0.229957805907173, |
| "grad_norm": 0.7530674934387207, |
| "learning_rate": 0.0015, |
| "loss": 1.6761, |
| "step": 2180 |
| }, |
| { |
| "epoch": 0.2310126582278481, |
| "grad_norm": 0.6285898685455322, |
| "learning_rate": 0.0015, |
| "loss": 1.69, |
| "step": 2190 |
| }, |
| { |
| "epoch": 0.2320675105485232, |
| "grad_norm": 1.0234174728393555, |
| "learning_rate": 0.0015, |
| "loss": 1.6931, |
| "step": 2200 |
| }, |
| { |
| "epoch": 0.23312236286919832, |
| "grad_norm": 0.7023261189460754, |
| "learning_rate": 0.0015, |
| "loss": 1.6776, |
| "step": 2210 |
| }, |
| { |
| "epoch": 0.23417721518987342, |
| "grad_norm": 0.6849989295005798, |
| "learning_rate": 0.0015, |
| "loss": 1.6911, |
| "step": 2220 |
| }, |
| { |
| "epoch": 0.23523206751054854, |
| "grad_norm": 0.8580043911933899, |
| "learning_rate": 0.0015, |
| "loss": 1.6807, |
| "step": 2230 |
| }, |
| { |
| "epoch": 0.23628691983122363, |
| "grad_norm": 0.8025040030479431, |
| "learning_rate": 0.0015, |
| "loss": 1.682, |
| "step": 2240 |
| }, |
| { |
| "epoch": 0.23734177215189872, |
| "grad_norm": 0.6510624885559082, |
| "learning_rate": 0.0015, |
| "loss": 1.6767, |
| "step": 2250 |
| }, |
| { |
| "epoch": 0.23839662447257384, |
| "grad_norm": 0.8475005626678467, |
| "learning_rate": 0.0015, |
| "loss": 1.6703, |
| "step": 2260 |
| }, |
| { |
| "epoch": 0.23945147679324894, |
| "grad_norm": 0.5999197363853455, |
| "learning_rate": 0.0015, |
| "loss": 1.7013, |
| "step": 2270 |
| }, |
| { |
| "epoch": 0.24050632911392406, |
| "grad_norm": 0.6120131015777588, |
| "learning_rate": 0.0015, |
| "loss": 1.687, |
| "step": 2280 |
| }, |
| { |
| "epoch": 0.24156118143459915, |
| "grad_norm": 0.8520112633705139, |
| "learning_rate": 0.0015, |
| "loss": 1.67, |
| "step": 2290 |
| }, |
| { |
| "epoch": 0.24261603375527427, |
| "grad_norm": 0.6505146622657776, |
| "learning_rate": 0.0015, |
| "loss": 1.6715, |
| "step": 2300 |
| }, |
| { |
| "epoch": 0.24367088607594936, |
| "grad_norm": 0.8636083602905273, |
| "learning_rate": 0.0015, |
| "loss": 1.6736, |
| "step": 2310 |
| }, |
| { |
| "epoch": 0.24472573839662448, |
| "grad_norm": 0.6786522269248962, |
| "learning_rate": 0.0015, |
| "loss": 1.6814, |
| "step": 2320 |
| }, |
| { |
| "epoch": 0.24578059071729957, |
| "grad_norm": 0.6315372586250305, |
| "learning_rate": 0.0015, |
| "loss": 1.6611, |
| "step": 2330 |
| }, |
| { |
| "epoch": 0.2468354430379747, |
| "grad_norm": 0.6420019268989563, |
| "learning_rate": 0.0015, |
| "loss": 1.6601, |
| "step": 2340 |
| }, |
| { |
| "epoch": 0.2478902953586498, |
| "grad_norm": 0.9882775545120239, |
| "learning_rate": 0.0015, |
| "loss": 1.6627, |
| "step": 2350 |
| }, |
| { |
| "epoch": 0.2489451476793249, |
| "grad_norm": 0.7462952733039856, |
| "learning_rate": 0.0015, |
| "loss": 1.6721, |
| "step": 2360 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 0.6469263434410095, |
| "learning_rate": 0.0015, |
| "loss": 1.6693, |
| "step": 2370 |
| }, |
| { |
| "epoch": 0.2510548523206751, |
| "grad_norm": 0.714177131652832, |
| "learning_rate": 0.0015, |
| "loss": 1.6692, |
| "step": 2380 |
| }, |
| { |
| "epoch": 0.2521097046413502, |
| "grad_norm": 1.074702262878418, |
| "learning_rate": 0.0015, |
| "loss": 1.6695, |
| "step": 2390 |
| }, |
| { |
| "epoch": 0.25316455696202533, |
| "grad_norm": 0.6026642918586731, |
| "learning_rate": 0.0015, |
| "loss": 1.6631, |
| "step": 2400 |
| }, |
| { |
| "epoch": 0.2542194092827004, |
| "grad_norm": 0.9359568953514099, |
| "learning_rate": 0.0015, |
| "loss": 1.6685, |
| "step": 2410 |
| }, |
| { |
| "epoch": 0.2552742616033755, |
| "grad_norm": 0.9025223851203918, |
| "learning_rate": 0.0015, |
| "loss": 1.6624, |
| "step": 2420 |
| }, |
| { |
| "epoch": 0.2563291139240506, |
| "grad_norm": 0.6915332674980164, |
| "learning_rate": 0.0015, |
| "loss": 1.665, |
| "step": 2430 |
| }, |
| { |
| "epoch": 0.25738396624472576, |
| "grad_norm": 1.0445406436920166, |
| "learning_rate": 0.0015, |
| "loss": 1.6697, |
| "step": 2440 |
| }, |
| { |
| "epoch": 0.25843881856540085, |
| "grad_norm": 0.8682367205619812, |
| "learning_rate": 0.0015, |
| "loss": 1.6582, |
| "step": 2450 |
| }, |
| { |
| "epoch": 0.25949367088607594, |
| "grad_norm": 0.6498146057128906, |
| "learning_rate": 0.0015, |
| "loss": 1.6709, |
| "step": 2460 |
| }, |
| { |
| "epoch": 0.26054852320675104, |
| "grad_norm": 0.773652970790863, |
| "learning_rate": 0.0015, |
| "loss": 1.6583, |
| "step": 2470 |
| }, |
| { |
| "epoch": 0.2616033755274262, |
| "grad_norm": 0.6476708054542542, |
| "learning_rate": 0.0015, |
| "loss": 1.6601, |
| "step": 2480 |
| }, |
| { |
| "epoch": 0.2626582278481013, |
| "grad_norm": 0.7904232740402222, |
| "learning_rate": 0.0015, |
| "loss": 1.6499, |
| "step": 2490 |
| }, |
| { |
| "epoch": 0.26371308016877637, |
| "grad_norm": 0.961468517780304, |
| "learning_rate": 0.0015, |
| "loss": 1.6616, |
| "step": 2500 |
| }, |
| { |
| "epoch": 0.26476793248945146, |
| "grad_norm": 0.6232831478118896, |
| "learning_rate": 0.0015, |
| "loss": 1.6544, |
| "step": 2510 |
| }, |
| { |
| "epoch": 0.26582278481012656, |
| "grad_norm": 0.6506131291389465, |
| "learning_rate": 0.0015, |
| "loss": 1.656, |
| "step": 2520 |
| }, |
| { |
| "epoch": 0.2668776371308017, |
| "grad_norm": 0.7402883768081665, |
| "learning_rate": 0.0015, |
| "loss": 1.6513, |
| "step": 2530 |
| }, |
| { |
| "epoch": 0.2679324894514768, |
| "grad_norm": 0.9401949644088745, |
| "learning_rate": 0.0015, |
| "loss": 1.6607, |
| "step": 2540 |
| }, |
| { |
| "epoch": 0.2689873417721519, |
| "grad_norm": 0.8450409770011902, |
| "learning_rate": 0.0015, |
| "loss": 1.6536, |
| "step": 2550 |
| }, |
| { |
| "epoch": 0.270042194092827, |
| "grad_norm": 0.6884193420410156, |
| "learning_rate": 0.0015, |
| "loss": 1.6565, |
| "step": 2560 |
| }, |
| { |
| "epoch": 0.27109704641350213, |
| "grad_norm": 0.6372545957565308, |
| "learning_rate": 0.0015, |
| "loss": 1.6506, |
| "step": 2570 |
| }, |
| { |
| "epoch": 0.2721518987341772, |
| "grad_norm": 0.6220951676368713, |
| "learning_rate": 0.0015, |
| "loss": 1.6522, |
| "step": 2580 |
| }, |
| { |
| "epoch": 0.2732067510548523, |
| "grad_norm": 0.8408181071281433, |
| "learning_rate": 0.0015, |
| "loss": 1.6557, |
| "step": 2590 |
| }, |
| { |
| "epoch": 0.2742616033755274, |
| "grad_norm": 0.6427624225616455, |
| "learning_rate": 0.0015, |
| "loss": 1.6569, |
| "step": 2600 |
| }, |
| { |
| "epoch": 0.27531645569620256, |
| "grad_norm": 0.8172615170478821, |
| "learning_rate": 0.0015, |
| "loss": 1.654, |
| "step": 2610 |
| }, |
| { |
| "epoch": 0.27637130801687765, |
| "grad_norm": 0.9558082222938538, |
| "learning_rate": 0.0015, |
| "loss": 1.6505, |
| "step": 2620 |
| }, |
| { |
| "epoch": 0.27742616033755274, |
| "grad_norm": 0.7412358522415161, |
| "learning_rate": 0.0015, |
| "loss": 1.6441, |
| "step": 2630 |
| }, |
| { |
| "epoch": 0.27848101265822783, |
| "grad_norm": 0.6658812165260315, |
| "learning_rate": 0.0015, |
| "loss": 1.6528, |
| "step": 2640 |
| }, |
| { |
| "epoch": 0.2795358649789029, |
| "grad_norm": 0.9503615498542786, |
| "learning_rate": 0.0015, |
| "loss": 1.6444, |
| "step": 2650 |
| }, |
| { |
| "epoch": 0.2805907172995781, |
| "grad_norm": 0.5820896625518799, |
| "learning_rate": 0.0015, |
| "loss": 1.6394, |
| "step": 2660 |
| }, |
| { |
| "epoch": 0.28164556962025317, |
| "grad_norm": 0.737178385257721, |
| "learning_rate": 0.0015, |
| "loss": 1.6411, |
| "step": 2670 |
| }, |
| { |
| "epoch": 0.28270042194092826, |
| "grad_norm": 0.6155056357383728, |
| "learning_rate": 0.0015, |
| "loss": 1.6426, |
| "step": 2680 |
| }, |
| { |
| "epoch": 0.28375527426160335, |
| "grad_norm": 0.8358173370361328, |
| "learning_rate": 0.0015, |
| "loss": 1.6418, |
| "step": 2690 |
| }, |
| { |
| "epoch": 0.2848101265822785, |
| "grad_norm": 0.6908949017524719, |
| "learning_rate": 0.0015, |
| "loss": 1.6366, |
| "step": 2700 |
| }, |
| { |
| "epoch": 0.2858649789029536, |
| "grad_norm": 0.5848388671875, |
| "learning_rate": 0.0015, |
| "loss": 1.6412, |
| "step": 2710 |
| }, |
| { |
| "epoch": 0.2869198312236287, |
| "grad_norm": 0.7473011612892151, |
| "learning_rate": 0.0015, |
| "loss": 1.6435, |
| "step": 2720 |
| }, |
| { |
| "epoch": 0.2879746835443038, |
| "grad_norm": 0.7751439809799194, |
| "learning_rate": 0.0015, |
| "loss": 1.625, |
| "step": 2730 |
| }, |
| { |
| "epoch": 0.2890295358649789, |
| "grad_norm": 0.9881758689880371, |
| "learning_rate": 0.0015, |
| "loss": 1.6345, |
| "step": 2740 |
| }, |
| { |
| "epoch": 0.290084388185654, |
| "grad_norm": 0.9220196008682251, |
| "learning_rate": 0.0015, |
| "loss": 1.641, |
| "step": 2750 |
| }, |
| { |
| "epoch": 0.2911392405063291, |
| "grad_norm": 0.6724462509155273, |
| "learning_rate": 0.0015, |
| "loss": 1.6508, |
| "step": 2760 |
| }, |
| { |
| "epoch": 0.2921940928270042, |
| "grad_norm": 0.6800593733787537, |
| "learning_rate": 0.0015, |
| "loss": 1.6453, |
| "step": 2770 |
| }, |
| { |
| "epoch": 0.29324894514767935, |
| "grad_norm": 0.6859573125839233, |
| "learning_rate": 0.0015, |
| "loss": 1.6378, |
| "step": 2780 |
| }, |
| { |
| "epoch": 0.29430379746835444, |
| "grad_norm": 0.785236120223999, |
| "learning_rate": 0.0015, |
| "loss": 1.6414, |
| "step": 2790 |
| }, |
| { |
| "epoch": 0.29535864978902954, |
| "grad_norm": 0.6508416533470154, |
| "learning_rate": 0.0015, |
| "loss": 1.6394, |
| "step": 2800 |
| }, |
| { |
| "epoch": 0.29641350210970463, |
| "grad_norm": 0.7265350222587585, |
| "learning_rate": 0.0015, |
| "loss": 1.6387, |
| "step": 2810 |
| }, |
| { |
| "epoch": 0.2974683544303797, |
| "grad_norm": 0.7346289753913879, |
| "learning_rate": 0.0015, |
| "loss": 1.642, |
| "step": 2820 |
| }, |
| { |
| "epoch": 0.29852320675105487, |
| "grad_norm": 0.5750663876533508, |
| "learning_rate": 0.0015, |
| "loss": 1.6179, |
| "step": 2830 |
| }, |
| { |
| "epoch": 0.29957805907172996, |
| "grad_norm": 0.8585373163223267, |
| "learning_rate": 0.0015, |
| "loss": 1.6211, |
| "step": 2840 |
| }, |
| { |
| "epoch": 0.30063291139240506, |
| "grad_norm": 0.6604123115539551, |
| "learning_rate": 0.0015, |
| "loss": 1.6266, |
| "step": 2850 |
| }, |
| { |
| "epoch": 0.30168776371308015, |
| "grad_norm": 0.7079337239265442, |
| "learning_rate": 0.0015, |
| "loss": 1.6293, |
| "step": 2860 |
| }, |
| { |
| "epoch": 0.3027426160337553, |
| "grad_norm": 0.9872538447380066, |
| "learning_rate": 0.0015, |
| "loss": 1.6332, |
| "step": 2870 |
| }, |
| { |
| "epoch": 0.3037974683544304, |
| "grad_norm": 0.7314916849136353, |
| "learning_rate": 0.0015, |
| "loss": 1.6207, |
| "step": 2880 |
| }, |
| { |
| "epoch": 0.3048523206751055, |
| "grad_norm": 0.6987850069999695, |
| "learning_rate": 0.0015, |
| "loss": 1.6224, |
| "step": 2890 |
| }, |
| { |
| "epoch": 0.3059071729957806, |
| "grad_norm": 0.658786416053772, |
| "learning_rate": 0.0015, |
| "loss": 1.6279, |
| "step": 2900 |
| }, |
| { |
| "epoch": 0.3069620253164557, |
| "grad_norm": 1.8567450046539307, |
| "learning_rate": 0.0015, |
| "loss": 1.6421, |
| "step": 2910 |
| }, |
| { |
| "epoch": 0.3080168776371308, |
| "grad_norm": 0.7476452589035034, |
| "learning_rate": 0.0015, |
| "loss": 1.625, |
| "step": 2920 |
| }, |
| { |
| "epoch": 0.3090717299578059, |
| "grad_norm": 0.6860174536705017, |
| "learning_rate": 0.0015, |
| "loss": 1.6179, |
| "step": 2930 |
| }, |
| { |
| "epoch": 0.310126582278481, |
| "grad_norm": 0.8042653203010559, |
| "learning_rate": 0.0015, |
| "loss": 1.6247, |
| "step": 2940 |
| }, |
| { |
| "epoch": 0.3111814345991561, |
| "grad_norm": 0.8760249018669128, |
| "learning_rate": 0.0015, |
| "loss": 1.633, |
| "step": 2950 |
| }, |
| { |
| "epoch": 0.31223628691983124, |
| "grad_norm": 1.5029122829437256, |
| "learning_rate": 0.0015, |
| "loss": 1.6288, |
| "step": 2960 |
| }, |
| { |
| "epoch": 0.31329113924050633, |
| "grad_norm": 0.8444796800613403, |
| "learning_rate": 0.0015, |
| "loss": 1.6184, |
| "step": 2970 |
| }, |
| { |
| "epoch": 0.3143459915611814, |
| "grad_norm": 0.6066569685935974, |
| "learning_rate": 0.0015, |
| "loss": 1.6102, |
| "step": 2980 |
| }, |
| { |
| "epoch": 0.3154008438818565, |
| "grad_norm": 0.6329298615455627, |
| "learning_rate": 0.0015, |
| "loss": 1.6283, |
| "step": 2990 |
| }, |
| { |
| "epoch": 0.31645569620253167, |
| "grad_norm": 0.6336819529533386, |
| "learning_rate": 0.0015, |
| "loss": 1.6086, |
| "step": 3000 |
| }, |
| { |
| "epoch": 0.31751054852320676, |
| "grad_norm": 0.6879556775093079, |
| "learning_rate": 0.0015, |
| "loss": 1.6157, |
| "step": 3010 |
| }, |
| { |
| "epoch": 0.31856540084388185, |
| "grad_norm": 0.7391799092292786, |
| "learning_rate": 0.0015, |
| "loss": 1.6197, |
| "step": 3020 |
| }, |
| { |
| "epoch": 0.31962025316455694, |
| "grad_norm": 0.5708714723587036, |
| "learning_rate": 0.0015, |
| "loss": 1.6042, |
| "step": 3030 |
| }, |
| { |
| "epoch": 0.3206751054852321, |
| "grad_norm": 0.5482274293899536, |
| "learning_rate": 0.0015, |
| "loss": 1.6152, |
| "step": 3040 |
| }, |
| { |
| "epoch": 0.3217299578059072, |
| "grad_norm": 0.5858886241912842, |
| "learning_rate": 0.0015, |
| "loss": 1.6097, |
| "step": 3050 |
| }, |
| { |
| "epoch": 0.3227848101265823, |
| "grad_norm": 0.7412643432617188, |
| "learning_rate": 0.0015, |
| "loss": 1.6219, |
| "step": 3060 |
| }, |
| { |
| "epoch": 0.32383966244725737, |
| "grad_norm": 0.5655277967453003, |
| "learning_rate": 0.0015, |
| "loss": 1.6132, |
| "step": 3070 |
| }, |
| { |
| "epoch": 0.32489451476793246, |
| "grad_norm": 0.7304298877716064, |
| "learning_rate": 0.0015, |
| "loss": 1.6011, |
| "step": 3080 |
| }, |
| { |
| "epoch": 0.3259493670886076, |
| "grad_norm": 0.8199241757392883, |
| "learning_rate": 0.0015, |
| "loss": 1.6198, |
| "step": 3090 |
| }, |
| { |
| "epoch": 0.3270042194092827, |
| "grad_norm": 0.5427016615867615, |
| "learning_rate": 0.0015, |
| "loss": 1.6049, |
| "step": 3100 |
| }, |
| { |
| "epoch": 0.3280590717299578, |
| "grad_norm": 0.5689124464988708, |
| "learning_rate": 0.0015, |
| "loss": 1.6085, |
| "step": 3110 |
| }, |
| { |
| "epoch": 0.3291139240506329, |
| "grad_norm": 0.6812858581542969, |
| "learning_rate": 0.0015, |
| "loss": 1.6118, |
| "step": 3120 |
| }, |
| { |
| "epoch": 0.33016877637130804, |
| "grad_norm": 0.5897095203399658, |
| "learning_rate": 0.0015, |
| "loss": 1.6239, |
| "step": 3130 |
| }, |
| { |
| "epoch": 0.33122362869198313, |
| "grad_norm": 0.9220945835113525, |
| "learning_rate": 0.0015, |
| "loss": 1.6001, |
| "step": 3140 |
| }, |
| { |
| "epoch": 0.3322784810126582, |
| "grad_norm": 0.9273051619529724, |
| "learning_rate": 0.0015, |
| "loss": 1.6101, |
| "step": 3150 |
| }, |
| { |
| "epoch": 0.3333333333333333, |
| "grad_norm": 0.6259636282920837, |
| "learning_rate": 0.0015, |
| "loss": 1.6116, |
| "step": 3160 |
| }, |
| { |
| "epoch": 0.33438818565400846, |
| "grad_norm": 0.6592155694961548, |
| "learning_rate": 0.0015, |
| "loss": 1.6147, |
| "step": 3170 |
| }, |
| { |
| "epoch": 0.33544303797468356, |
| "grad_norm": 0.6287351846694946, |
| "learning_rate": 0.0015, |
| "loss": 1.6056, |
| "step": 3180 |
| }, |
| { |
| "epoch": 0.33649789029535865, |
| "grad_norm": 0.982848584651947, |
| "learning_rate": 0.0015, |
| "loss": 1.6079, |
| "step": 3190 |
| }, |
| { |
| "epoch": 0.33755274261603374, |
| "grad_norm": 0.6584777235984802, |
| "learning_rate": 0.0015, |
| "loss": 1.5998, |
| "step": 3200 |
| }, |
| { |
| "epoch": 0.33860759493670883, |
| "grad_norm": 0.6095257997512817, |
| "learning_rate": 0.0015, |
| "loss": 1.6171, |
| "step": 3210 |
| }, |
| { |
| "epoch": 0.339662447257384, |
| "grad_norm": 0.6312419772148132, |
| "learning_rate": 0.0015, |
| "loss": 1.6054, |
| "step": 3220 |
| }, |
| { |
| "epoch": 0.3407172995780591, |
| "grad_norm": 0.5978062152862549, |
| "learning_rate": 0.0015, |
| "loss": 1.5997, |
| "step": 3230 |
| }, |
| { |
| "epoch": 0.34177215189873417, |
| "grad_norm": 0.5813004970550537, |
| "learning_rate": 0.0015, |
| "loss": 1.5785, |
| "step": 3240 |
| }, |
| { |
| "epoch": 0.34282700421940926, |
| "grad_norm": 0.6352910995483398, |
| "learning_rate": 0.0015, |
| "loss": 1.603, |
| "step": 3250 |
| }, |
| { |
| "epoch": 0.3438818565400844, |
| "grad_norm": 0.5998354554176331, |
| "learning_rate": 0.0015, |
| "loss": 1.5949, |
| "step": 3260 |
| }, |
| { |
| "epoch": 0.3449367088607595, |
| "grad_norm": 0.6111003756523132, |
| "learning_rate": 0.0015, |
| "loss": 1.6095, |
| "step": 3270 |
| }, |
| { |
| "epoch": 0.3459915611814346, |
| "grad_norm": 0.566878616809845, |
| "learning_rate": 0.0015, |
| "loss": 1.6106, |
| "step": 3280 |
| }, |
| { |
| "epoch": 0.3470464135021097, |
| "grad_norm": 0.6212012767791748, |
| "learning_rate": 0.0015, |
| "loss": 1.5931, |
| "step": 3290 |
| }, |
| { |
| "epoch": 0.34810126582278483, |
| "grad_norm": 0.6310331225395203, |
| "learning_rate": 0.0015, |
| "loss": 1.6032, |
| "step": 3300 |
| }, |
| { |
| "epoch": 0.3491561181434599, |
| "grad_norm": 0.6017879247665405, |
| "learning_rate": 0.0015, |
| "loss": 1.6022, |
| "step": 3310 |
| }, |
| { |
| "epoch": 0.350210970464135, |
| "grad_norm": 0.6400523781776428, |
| "learning_rate": 0.0015, |
| "loss": 1.607, |
| "step": 3320 |
| }, |
| { |
| "epoch": 0.3512658227848101, |
| "grad_norm": 0.5609440207481384, |
| "learning_rate": 0.0015, |
| "loss": 1.599, |
| "step": 3330 |
| }, |
| { |
| "epoch": 0.35232067510548526, |
| "grad_norm": 0.6740028262138367, |
| "learning_rate": 0.0015, |
| "loss": 1.6011, |
| "step": 3340 |
| }, |
| { |
| "epoch": 0.35337552742616035, |
| "grad_norm": 0.7476164102554321, |
| "learning_rate": 0.0015, |
| "loss": 1.5933, |
| "step": 3350 |
| }, |
| { |
| "epoch": 0.35443037974683544, |
| "grad_norm": 0.8268814086914062, |
| "learning_rate": 0.0015, |
| "loss": 1.5934, |
| "step": 3360 |
| }, |
| { |
| "epoch": 0.35548523206751054, |
| "grad_norm": 0.67869633436203, |
| "learning_rate": 0.0015, |
| "loss": 1.6028, |
| "step": 3370 |
| }, |
| { |
| "epoch": 0.35654008438818563, |
| "grad_norm": 0.5998952984809875, |
| "learning_rate": 0.0015, |
| "loss": 1.5976, |
| "step": 3380 |
| }, |
| { |
| "epoch": 0.3575949367088608, |
| "grad_norm": 0.8241248726844788, |
| "learning_rate": 0.0015, |
| "loss": 1.5924, |
| "step": 3390 |
| }, |
| { |
| "epoch": 0.35864978902953587, |
| "grad_norm": 0.6471379995346069, |
| "learning_rate": 0.0015, |
| "loss": 1.5999, |
| "step": 3400 |
| }, |
| { |
| "epoch": 0.35970464135021096, |
| "grad_norm": 0.6880522966384888, |
| "learning_rate": 0.0015, |
| "loss": 1.5978, |
| "step": 3410 |
| }, |
| { |
| "epoch": 0.36075949367088606, |
| "grad_norm": 0.5927819013595581, |
| "learning_rate": 0.0015, |
| "loss": 1.5752, |
| "step": 3420 |
| }, |
| { |
| "epoch": 0.3618143459915612, |
| "grad_norm": 0.623607337474823, |
| "learning_rate": 0.0015, |
| "loss": 1.5933, |
| "step": 3430 |
| }, |
| { |
| "epoch": 0.3628691983122363, |
| "grad_norm": 0.7323747277259827, |
| "learning_rate": 0.0015, |
| "loss": 1.5918, |
| "step": 3440 |
| }, |
| { |
| "epoch": 0.3639240506329114, |
| "grad_norm": 0.6532187461853027, |
| "learning_rate": 0.0015, |
| "loss": 1.588, |
| "step": 3450 |
| }, |
| { |
| "epoch": 0.3649789029535865, |
| "grad_norm": 0.5976588129997253, |
| "learning_rate": 0.0015, |
| "loss": 1.5803, |
| "step": 3460 |
| }, |
| { |
| "epoch": 0.36603375527426163, |
| "grad_norm": 0.6030303835868835, |
| "learning_rate": 0.0015, |
| "loss": 1.5976, |
| "step": 3470 |
| }, |
| { |
| "epoch": 0.3670886075949367, |
| "grad_norm": 0.6205447912216187, |
| "learning_rate": 0.0015, |
| "loss": 1.6031, |
| "step": 3480 |
| }, |
| { |
| "epoch": 0.3681434599156118, |
| "grad_norm": 0.7707657814025879, |
| "learning_rate": 0.0015, |
| "loss": 1.5968, |
| "step": 3490 |
| }, |
| { |
| "epoch": 0.3691983122362869, |
| "grad_norm": 0.651380717754364, |
| "learning_rate": 0.0015, |
| "loss": 1.5739, |
| "step": 3500 |
| }, |
| { |
| "epoch": 0.370253164556962, |
| "grad_norm": 0.6881337761878967, |
| "learning_rate": 0.0015, |
| "loss": 1.5931, |
| "step": 3510 |
| }, |
| { |
| "epoch": 0.37130801687763715, |
| "grad_norm": 0.589093804359436, |
| "learning_rate": 0.0015, |
| "loss": 1.5906, |
| "step": 3520 |
| }, |
| { |
| "epoch": 0.37236286919831224, |
| "grad_norm": 0.774224579334259, |
| "learning_rate": 0.0015, |
| "loss": 1.6007, |
| "step": 3530 |
| }, |
| { |
| "epoch": 0.37341772151898733, |
| "grad_norm": 0.579230546951294, |
| "learning_rate": 0.0015, |
| "loss": 1.5936, |
| "step": 3540 |
| }, |
| { |
| "epoch": 0.3744725738396624, |
| "grad_norm": 0.6935818195343018, |
| "learning_rate": 0.0015, |
| "loss": 1.5876, |
| "step": 3550 |
| }, |
| { |
| "epoch": 0.3755274261603376, |
| "grad_norm": 0.6207526326179504, |
| "learning_rate": 0.0015, |
| "loss": 1.5856, |
| "step": 3560 |
| }, |
| { |
| "epoch": 0.37658227848101267, |
| "grad_norm": 0.8157477974891663, |
| "learning_rate": 0.0015, |
| "loss": 1.5808, |
| "step": 3570 |
| }, |
| { |
| "epoch": 0.37763713080168776, |
| "grad_norm": 0.5693010687828064, |
| "learning_rate": 0.0015, |
| "loss": 1.5799, |
| "step": 3580 |
| }, |
| { |
| "epoch": 0.37869198312236285, |
| "grad_norm": 0.6131789088249207, |
| "learning_rate": 0.0015, |
| "loss": 1.5949, |
| "step": 3590 |
| }, |
| { |
| "epoch": 0.379746835443038, |
| "grad_norm": 0.8241103887557983, |
| "learning_rate": 0.0015, |
| "loss": 1.5764, |
| "step": 3600 |
| }, |
| { |
| "epoch": 0.3808016877637131, |
| "grad_norm": 0.7060911655426025, |
| "learning_rate": 0.0015, |
| "loss": 1.5798, |
| "step": 3610 |
| }, |
| { |
| "epoch": 0.3818565400843882, |
| "grad_norm": 0.6012842059135437, |
| "learning_rate": 0.0015, |
| "loss": 1.5833, |
| "step": 3620 |
| }, |
| { |
| "epoch": 0.3829113924050633, |
| "grad_norm": 0.7175701260566711, |
| "learning_rate": 0.0015, |
| "loss": 1.5823, |
| "step": 3630 |
| }, |
| { |
| "epoch": 0.38396624472573837, |
| "grad_norm": 0.6527909636497498, |
| "learning_rate": 0.0015, |
| "loss": 1.5838, |
| "step": 3640 |
| }, |
| { |
| "epoch": 0.3850210970464135, |
| "grad_norm": 0.5886304974555969, |
| "learning_rate": 0.0015, |
| "loss": 1.5878, |
| "step": 3650 |
| }, |
| { |
| "epoch": 0.3860759493670886, |
| "grad_norm": 0.5935819149017334, |
| "learning_rate": 0.0015, |
| "loss": 1.5827, |
| "step": 3660 |
| }, |
| { |
| "epoch": 0.3871308016877637, |
| "grad_norm": 0.5793882012367249, |
| "learning_rate": 0.0015, |
| "loss": 1.5743, |
| "step": 3670 |
| }, |
| { |
| "epoch": 0.3881856540084388, |
| "grad_norm": 0.6356774568557739, |
| "learning_rate": 0.0015, |
| "loss": 1.5783, |
| "step": 3680 |
| }, |
| { |
| "epoch": 0.38924050632911394, |
| "grad_norm": 0.6113144159317017, |
| "learning_rate": 0.0015, |
| "loss": 1.5893, |
| "step": 3690 |
| }, |
| { |
| "epoch": 0.39029535864978904, |
| "grad_norm": 0.6362749934196472, |
| "learning_rate": 0.0015, |
| "loss": 1.5917, |
| "step": 3700 |
| }, |
| { |
| "epoch": 0.39135021097046413, |
| "grad_norm": 0.8249803781509399, |
| "learning_rate": 0.0015, |
| "loss": 1.5781, |
| "step": 3710 |
| }, |
| { |
| "epoch": 0.3924050632911392, |
| "grad_norm": 0.8612954616546631, |
| "learning_rate": 0.0015, |
| "loss": 1.5608, |
| "step": 3720 |
| }, |
| { |
| "epoch": 0.39345991561181437, |
| "grad_norm": 1.7621455192565918, |
| "learning_rate": 0.0015, |
| "loss": 1.5877, |
| "step": 3730 |
| }, |
| { |
| "epoch": 0.39451476793248946, |
| "grad_norm": 1.3036092519760132, |
| "learning_rate": 0.0015, |
| "loss": 1.5916, |
| "step": 3740 |
| }, |
| { |
| "epoch": 0.39556962025316456, |
| "grad_norm": 0.9393406510353088, |
| "learning_rate": 0.0015, |
| "loss": 1.5864, |
| "step": 3750 |
| }, |
| { |
| "epoch": 0.39662447257383965, |
| "grad_norm": 0.9737085103988647, |
| "learning_rate": 0.0015, |
| "loss": 1.5715, |
| "step": 3760 |
| }, |
| { |
| "epoch": 0.39767932489451474, |
| "grad_norm": 0.8785778880119324, |
| "learning_rate": 0.0015, |
| "loss": 1.5872, |
| "step": 3770 |
| }, |
| { |
| "epoch": 0.3987341772151899, |
| "grad_norm": 0.5576906800270081, |
| "learning_rate": 0.0015, |
| "loss": 1.5743, |
| "step": 3780 |
| }, |
| { |
| "epoch": 0.399789029535865, |
| "grad_norm": 0.5176705718040466, |
| "learning_rate": 0.0015, |
| "loss": 1.5734, |
| "step": 3790 |
| }, |
| { |
| "epoch": 0.4008438818565401, |
| "grad_norm": 0.6921336650848389, |
| "learning_rate": 0.0015, |
| "loss": 1.5808, |
| "step": 3800 |
| }, |
| { |
| "epoch": 0.40189873417721517, |
| "grad_norm": 0.8691692352294922, |
| "learning_rate": 0.0015, |
| "loss": 1.5723, |
| "step": 3810 |
| }, |
| { |
| "epoch": 0.4029535864978903, |
| "grad_norm": 0.6363836526870728, |
| "learning_rate": 0.0015, |
| "loss": 1.5627, |
| "step": 3820 |
| }, |
| { |
| "epoch": 0.4040084388185654, |
| "grad_norm": 0.6024162769317627, |
| "learning_rate": 0.0015, |
| "loss": 1.5676, |
| "step": 3830 |
| }, |
| { |
| "epoch": 0.4050632911392405, |
| "grad_norm": 0.628873884677887, |
| "learning_rate": 0.0015, |
| "loss": 1.5875, |
| "step": 3840 |
| }, |
| { |
| "epoch": 0.4061181434599156, |
| "grad_norm": 0.5797845721244812, |
| "learning_rate": 0.0015, |
| "loss": 1.5663, |
| "step": 3850 |
| }, |
| { |
| "epoch": 0.40717299578059074, |
| "grad_norm": 0.7347355484962463, |
| "learning_rate": 0.0015, |
| "loss": 1.5797, |
| "step": 3860 |
| }, |
| { |
| "epoch": 0.40822784810126583, |
| "grad_norm": 0.8189390897750854, |
| "learning_rate": 0.0015, |
| "loss": 1.5715, |
| "step": 3870 |
| }, |
| { |
| "epoch": 0.4092827004219409, |
| "grad_norm": 0.5799738764762878, |
| "learning_rate": 0.0015, |
| "loss": 1.5603, |
| "step": 3880 |
| }, |
| { |
| "epoch": 0.410337552742616, |
| "grad_norm": 0.6734434366226196, |
| "learning_rate": 0.0015, |
| "loss": 1.5592, |
| "step": 3890 |
| }, |
| { |
| "epoch": 0.41139240506329117, |
| "grad_norm": 1.40089750289917, |
| "learning_rate": 0.0015, |
| "loss": 1.563, |
| "step": 3900 |
| }, |
| { |
| "epoch": 0.41244725738396626, |
| "grad_norm": 0.7925665974617004, |
| "learning_rate": 0.0015, |
| "loss": 1.5689, |
| "step": 3910 |
| }, |
| { |
| "epoch": 0.41350210970464135, |
| "grad_norm": 0.7021671533584595, |
| "learning_rate": 0.0015, |
| "loss": 1.5624, |
| "step": 3920 |
| }, |
| { |
| "epoch": 0.41455696202531644, |
| "grad_norm": 0.5486526489257812, |
| "learning_rate": 0.0015, |
| "loss": 1.5664, |
| "step": 3930 |
| }, |
| { |
| "epoch": 0.41561181434599154, |
| "grad_norm": 0.6393633484840393, |
| "learning_rate": 0.0015, |
| "loss": 1.5618, |
| "step": 3940 |
| }, |
| { |
| "epoch": 0.4166666666666667, |
| "grad_norm": 0.5666303634643555, |
| "learning_rate": 0.0015, |
| "loss": 1.5677, |
| "step": 3950 |
| }, |
| { |
| "epoch": 0.4177215189873418, |
| "grad_norm": 0.6497644782066345, |
| "learning_rate": 0.0015, |
| "loss": 1.5697, |
| "step": 3960 |
| }, |
| { |
| "epoch": 0.41877637130801687, |
| "grad_norm": 0.5655925273895264, |
| "learning_rate": 0.0015, |
| "loss": 1.5562, |
| "step": 3970 |
| }, |
| { |
| "epoch": 0.41983122362869196, |
| "grad_norm": 0.8136340975761414, |
| "learning_rate": 0.0015, |
| "loss": 1.5709, |
| "step": 3980 |
| }, |
| { |
| "epoch": 0.4208860759493671, |
| "grad_norm": 0.874515950679779, |
| "learning_rate": 0.0015, |
| "loss": 1.5607, |
| "step": 3990 |
| }, |
| { |
| "epoch": 0.4219409282700422, |
| "grad_norm": 0.5788293480873108, |
| "learning_rate": 0.0015, |
| "loss": 1.576, |
| "step": 4000 |
| }, |
| { |
| "epoch": 0.4229957805907173, |
| "grad_norm": 0.5628449320793152, |
| "learning_rate": 0.0015, |
| "loss": 1.5605, |
| "step": 4010 |
| }, |
| { |
| "epoch": 0.4240506329113924, |
| "grad_norm": 0.5451663136482239, |
| "learning_rate": 0.0015, |
| "loss": 1.5619, |
| "step": 4020 |
| }, |
| { |
| "epoch": 0.42510548523206754, |
| "grad_norm": 0.5687470436096191, |
| "learning_rate": 0.0015, |
| "loss": 1.5556, |
| "step": 4030 |
| }, |
| { |
| "epoch": 0.42616033755274263, |
| "grad_norm": 0.6235162019729614, |
| "learning_rate": 0.0015, |
| "loss": 1.5632, |
| "step": 4040 |
| }, |
| { |
| "epoch": 0.4272151898734177, |
| "grad_norm": 0.5888081192970276, |
| "learning_rate": 0.0015, |
| "loss": 1.5638, |
| "step": 4050 |
| }, |
| { |
| "epoch": 0.4282700421940928, |
| "grad_norm": 1.0608152151107788, |
| "learning_rate": 0.0015, |
| "loss": 1.5717, |
| "step": 4060 |
| }, |
| { |
| "epoch": 0.4293248945147679, |
| "grad_norm": 0.8200925588607788, |
| "learning_rate": 0.0015, |
| "loss": 1.5724, |
| "step": 4070 |
| }, |
| { |
| "epoch": 0.43037974683544306, |
| "grad_norm": 0.6000164151191711, |
| "learning_rate": 0.0015, |
| "loss": 1.5819, |
| "step": 4080 |
| }, |
| { |
| "epoch": 0.43143459915611815, |
| "grad_norm": 0.632230281829834, |
| "learning_rate": 0.0015, |
| "loss": 1.5607, |
| "step": 4090 |
| }, |
| { |
| "epoch": 0.43248945147679324, |
| "grad_norm": 0.6781566143035889, |
| "learning_rate": 0.0015, |
| "loss": 1.5579, |
| "step": 4100 |
| }, |
| { |
| "epoch": 0.43354430379746833, |
| "grad_norm": 0.6161074042320251, |
| "learning_rate": 0.0015, |
| "loss": 1.5549, |
| "step": 4110 |
| }, |
| { |
| "epoch": 0.4345991561181435, |
| "grad_norm": 0.7849648594856262, |
| "learning_rate": 0.0015, |
| "loss": 1.5565, |
| "step": 4120 |
| }, |
| { |
| "epoch": 0.4356540084388186, |
| "grad_norm": 0.7117999196052551, |
| "learning_rate": 0.0015, |
| "loss": 1.5533, |
| "step": 4130 |
| }, |
| { |
| "epoch": 0.43670886075949367, |
| "grad_norm": 0.559719443321228, |
| "learning_rate": 0.0015, |
| "loss": 1.5558, |
| "step": 4140 |
| }, |
| { |
| "epoch": 0.43776371308016876, |
| "grad_norm": 1.0182809829711914, |
| "learning_rate": 0.0015, |
| "loss": 1.5608, |
| "step": 4150 |
| }, |
| { |
| "epoch": 0.4388185654008439, |
| "grad_norm": 0.5550006628036499, |
| "learning_rate": 0.0015, |
| "loss": 1.5536, |
| "step": 4160 |
| }, |
| { |
| "epoch": 0.439873417721519, |
| "grad_norm": 0.5445747375488281, |
| "learning_rate": 0.0015, |
| "loss": 1.554, |
| "step": 4170 |
| }, |
| { |
| "epoch": 0.4409282700421941, |
| "grad_norm": 0.5958618521690369, |
| "learning_rate": 0.0015, |
| "loss": 1.5497, |
| "step": 4180 |
| }, |
| { |
| "epoch": 0.4419831223628692, |
| "grad_norm": 0.5892060995101929, |
| "learning_rate": 0.0015, |
| "loss": 1.559, |
| "step": 4190 |
| }, |
| { |
| "epoch": 0.4430379746835443, |
| "grad_norm": 0.5875847339630127, |
| "learning_rate": 0.0015, |
| "loss": 1.5576, |
| "step": 4200 |
| }, |
| { |
| "epoch": 0.4440928270042194, |
| "grad_norm": 0.7794286608695984, |
| "learning_rate": 0.0015, |
| "loss": 1.5495, |
| "step": 4210 |
| }, |
| { |
| "epoch": 0.4451476793248945, |
| "grad_norm": 0.5659272074699402, |
| "learning_rate": 0.0015, |
| "loss": 1.5555, |
| "step": 4220 |
| }, |
| { |
| "epoch": 0.4462025316455696, |
| "grad_norm": 0.5580011010169983, |
| "learning_rate": 0.0015, |
| "loss": 1.55, |
| "step": 4230 |
| }, |
| { |
| "epoch": 0.4472573839662447, |
| "grad_norm": 0.6203771829605103, |
| "learning_rate": 0.0015, |
| "loss": 1.5515, |
| "step": 4240 |
| }, |
| { |
| "epoch": 0.44831223628691985, |
| "grad_norm": 0.682816743850708, |
| "learning_rate": 0.0015, |
| "loss": 1.5575, |
| "step": 4250 |
| }, |
| { |
| "epoch": 0.44936708860759494, |
| "grad_norm": 0.6003069281578064, |
| "learning_rate": 0.0015, |
| "loss": 1.5646, |
| "step": 4260 |
| }, |
| { |
| "epoch": 0.45042194092827004, |
| "grad_norm": 0.5731751918792725, |
| "learning_rate": 0.0015, |
| "loss": 1.5498, |
| "step": 4270 |
| }, |
| { |
| "epoch": 0.45147679324894513, |
| "grad_norm": 1.3089572191238403, |
| "learning_rate": 0.0015, |
| "loss": 1.5434, |
| "step": 4280 |
| }, |
| { |
| "epoch": 0.4525316455696203, |
| "grad_norm": 0.8871122598648071, |
| "learning_rate": 0.0015, |
| "loss": 1.557, |
| "step": 4290 |
| }, |
| { |
| "epoch": 0.45358649789029537, |
| "grad_norm": 0.7614080905914307, |
| "learning_rate": 0.0015, |
| "loss": 1.5533, |
| "step": 4300 |
| }, |
| { |
| "epoch": 0.45464135021097046, |
| "grad_norm": 0.5850949883460999, |
| "learning_rate": 0.0015, |
| "loss": 1.5542, |
| "step": 4310 |
| }, |
| { |
| "epoch": 0.45569620253164556, |
| "grad_norm": 0.6122797727584839, |
| "learning_rate": 0.0015, |
| "loss": 1.5463, |
| "step": 4320 |
| }, |
| { |
| "epoch": 0.45675105485232065, |
| "grad_norm": 0.6340963840484619, |
| "learning_rate": 0.0015, |
| "loss": 1.5578, |
| "step": 4330 |
| }, |
| { |
| "epoch": 0.4578059071729958, |
| "grad_norm": 0.7485887408256531, |
| "learning_rate": 0.0015, |
| "loss": 1.5457, |
| "step": 4340 |
| }, |
| { |
| "epoch": 0.4588607594936709, |
| "grad_norm": 0.8456482887268066, |
| "learning_rate": 0.0015, |
| "loss": 1.5487, |
| "step": 4350 |
| }, |
| { |
| "epoch": 0.459915611814346, |
| "grad_norm": 0.5885475277900696, |
| "learning_rate": 0.0015, |
| "loss": 1.5621, |
| "step": 4360 |
| }, |
| { |
| "epoch": 0.4609704641350211, |
| "grad_norm": 0.7908206582069397, |
| "learning_rate": 0.0015, |
| "loss": 1.5433, |
| "step": 4370 |
| }, |
| { |
| "epoch": 0.4620253164556962, |
| "grad_norm": 0.5468949675559998, |
| "learning_rate": 0.0015, |
| "loss": 1.5446, |
| "step": 4380 |
| }, |
| { |
| "epoch": 0.4630801687763713, |
| "grad_norm": 0.5600817799568176, |
| "learning_rate": 0.0015, |
| "loss": 1.5413, |
| "step": 4390 |
| }, |
| { |
| "epoch": 0.4641350210970464, |
| "grad_norm": 0.5512800216674805, |
| "learning_rate": 0.0015, |
| "loss": 1.5467, |
| "step": 4400 |
| }, |
| { |
| "epoch": 0.4651898734177215, |
| "grad_norm": 0.7056983113288879, |
| "learning_rate": 0.0015, |
| "loss": 1.5638, |
| "step": 4410 |
| }, |
| { |
| "epoch": 0.46624472573839665, |
| "grad_norm": 0.9344276785850525, |
| "learning_rate": 0.0015, |
| "loss": 1.5457, |
| "step": 4420 |
| }, |
| { |
| "epoch": 0.46729957805907174, |
| "grad_norm": 0.8951518535614014, |
| "learning_rate": 0.0015, |
| "loss": 1.5528, |
| "step": 4430 |
| }, |
| { |
| "epoch": 0.46835443037974683, |
| "grad_norm": 0.6155388355255127, |
| "learning_rate": 0.0015, |
| "loss": 1.5467, |
| "step": 4440 |
| }, |
| { |
| "epoch": 0.4694092827004219, |
| "grad_norm": 0.7321780323982239, |
| "learning_rate": 0.0015, |
| "loss": 1.5539, |
| "step": 4450 |
| }, |
| { |
| "epoch": 0.4704641350210971, |
| "grad_norm": 0.7223412394523621, |
| "learning_rate": 0.0015, |
| "loss": 1.542, |
| "step": 4460 |
| }, |
| { |
| "epoch": 0.47151898734177217, |
| "grad_norm": 0.580058753490448, |
| "learning_rate": 0.0015, |
| "loss": 1.5512, |
| "step": 4470 |
| }, |
| { |
| "epoch": 0.47257383966244726, |
| "grad_norm": 0.626334547996521, |
| "learning_rate": 0.0015, |
| "loss": 1.5538, |
| "step": 4480 |
| }, |
| { |
| "epoch": 0.47362869198312235, |
| "grad_norm": 0.9320026636123657, |
| "learning_rate": 0.0015, |
| "loss": 1.5435, |
| "step": 4490 |
| }, |
| { |
| "epoch": 0.47468354430379744, |
| "grad_norm": 0.6046232581138611, |
| "learning_rate": 0.0015, |
| "loss": 1.5446, |
| "step": 4500 |
| }, |
| { |
| "epoch": 0.4757383966244726, |
| "grad_norm": 0.7325782775878906, |
| "learning_rate": 0.0015, |
| "loss": 1.5463, |
| "step": 4510 |
| }, |
| { |
| "epoch": 0.4767932489451477, |
| "grad_norm": 0.571357786655426, |
| "learning_rate": 0.0015, |
| "loss": 1.5328, |
| "step": 4520 |
| }, |
| { |
| "epoch": 0.4778481012658228, |
| "grad_norm": 0.5945020318031311, |
| "learning_rate": 0.0015, |
| "loss": 1.5487, |
| "step": 4530 |
| }, |
| { |
| "epoch": 0.47890295358649787, |
| "grad_norm": 0.6285760402679443, |
| "learning_rate": 0.0015, |
| "loss": 1.5507, |
| "step": 4540 |
| }, |
| { |
| "epoch": 0.479957805907173, |
| "grad_norm": 0.5691685676574707, |
| "learning_rate": 0.0015, |
| "loss": 1.5472, |
| "step": 4550 |
| }, |
| { |
| "epoch": 0.4810126582278481, |
| "grad_norm": 0.5442836880683899, |
| "learning_rate": 0.0015, |
| "loss": 1.537, |
| "step": 4560 |
| }, |
| { |
| "epoch": 0.4820675105485232, |
| "grad_norm": 0.6255428194999695, |
| "learning_rate": 0.0015, |
| "loss": 1.5193, |
| "step": 4570 |
| }, |
| { |
| "epoch": 0.4831223628691983, |
| "grad_norm": 0.554628849029541, |
| "learning_rate": 0.0015, |
| "loss": 1.5447, |
| "step": 4580 |
| }, |
| { |
| "epoch": 0.48417721518987344, |
| "grad_norm": 0.6360525488853455, |
| "learning_rate": 0.0015, |
| "loss": 1.5315, |
| "step": 4590 |
| }, |
| { |
| "epoch": 0.48523206751054854, |
| "grad_norm": 0.7742726802825928, |
| "learning_rate": 0.0015, |
| "loss": 1.5487, |
| "step": 4600 |
| }, |
| { |
| "epoch": 0.48628691983122363, |
| "grad_norm": 0.8166155219078064, |
| "learning_rate": 0.0015, |
| "loss": 1.5497, |
| "step": 4610 |
| }, |
| { |
| "epoch": 0.4873417721518987, |
| "grad_norm": 0.5890775918960571, |
| "learning_rate": 0.0015, |
| "loss": 1.527, |
| "step": 4620 |
| }, |
| { |
| "epoch": 0.4883966244725738, |
| "grad_norm": 0.6394803524017334, |
| "learning_rate": 0.0015, |
| "loss": 1.5464, |
| "step": 4630 |
| }, |
| { |
| "epoch": 0.48945147679324896, |
| "grad_norm": 0.6207756996154785, |
| "learning_rate": 0.0015, |
| "loss": 1.5459, |
| "step": 4640 |
| }, |
| { |
| "epoch": 0.49050632911392406, |
| "grad_norm": 0.5840123891830444, |
| "learning_rate": 0.0015, |
| "loss": 1.5507, |
| "step": 4650 |
| }, |
| { |
| "epoch": 0.49156118143459915, |
| "grad_norm": 0.5675470232963562, |
| "learning_rate": 0.0015, |
| "loss": 1.5353, |
| "step": 4660 |
| }, |
| { |
| "epoch": 0.49261603375527424, |
| "grad_norm": 0.5648407340049744, |
| "learning_rate": 0.0015, |
| "loss": 1.5372, |
| "step": 4670 |
| }, |
| { |
| "epoch": 0.4936708860759494, |
| "grad_norm": 0.5731132626533508, |
| "learning_rate": 0.0015, |
| "loss": 1.5304, |
| "step": 4680 |
| }, |
| { |
| "epoch": 0.4947257383966245, |
| "grad_norm": 0.8239709138870239, |
| "learning_rate": 0.0015, |
| "loss": 1.5294, |
| "step": 4690 |
| }, |
| { |
| "epoch": 0.4957805907172996, |
| "grad_norm": 0.8988879919052124, |
| "learning_rate": 0.0015, |
| "loss": 1.5411, |
| "step": 4700 |
| }, |
| { |
| "epoch": 0.49683544303797467, |
| "grad_norm": 0.5612457990646362, |
| "learning_rate": 0.0015, |
| "loss": 1.5405, |
| "step": 4710 |
| }, |
| { |
| "epoch": 0.4978902953586498, |
| "grad_norm": 0.7755550146102905, |
| "learning_rate": 0.0015, |
| "loss": 1.5394, |
| "step": 4720 |
| }, |
| { |
| "epoch": 0.4989451476793249, |
| "grad_norm": 0.5818630456924438, |
| "learning_rate": 0.0015, |
| "loss": 1.5331, |
| "step": 4730 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 0.5022954344749451, |
| "learning_rate": 0.0015, |
| "loss": 1.5316, |
| "step": 4740 |
| }, |
| { |
| "epoch": 0.5010548523206751, |
| "grad_norm": 0.9314385056495667, |
| "learning_rate": 0.0015, |
| "loss": 1.5524, |
| "step": 4750 |
| }, |
| { |
| "epoch": 0.5021097046413502, |
| "grad_norm": 0.7994999885559082, |
| "learning_rate": 0.0015, |
| "loss": 1.5383, |
| "step": 4760 |
| }, |
| { |
| "epoch": 0.5031645569620253, |
| "grad_norm": 0.6273106932640076, |
| "learning_rate": 0.0015, |
| "loss": 1.5322, |
| "step": 4770 |
| }, |
| { |
| "epoch": 0.5042194092827004, |
| "grad_norm": 0.600748598575592, |
| "learning_rate": 0.0015, |
| "loss": 1.5433, |
| "step": 4780 |
| }, |
| { |
| "epoch": 0.5052742616033755, |
| "grad_norm": 0.7291206121444702, |
| "learning_rate": 0.0015, |
| "loss": 1.527, |
| "step": 4790 |
| }, |
| { |
| "epoch": 0.5063291139240507, |
| "grad_norm": 0.7947571873664856, |
| "learning_rate": 0.0015, |
| "loss": 1.5326, |
| "step": 4800 |
| }, |
| { |
| "epoch": 0.5073839662447257, |
| "grad_norm": 0.6562824845314026, |
| "learning_rate": 0.0015, |
| "loss": 1.5337, |
| "step": 4810 |
| }, |
| { |
| "epoch": 0.5084388185654009, |
| "grad_norm": 0.6121402382850647, |
| "learning_rate": 0.0015, |
| "loss": 1.554, |
| "step": 4820 |
| }, |
| { |
| "epoch": 0.509493670886076, |
| "grad_norm": 0.9882345795631409, |
| "learning_rate": 0.0015, |
| "loss": 1.5372, |
| "step": 4830 |
| }, |
| { |
| "epoch": 0.510548523206751, |
| "grad_norm": 0.6104786992073059, |
| "learning_rate": 0.0015, |
| "loss": 1.5303, |
| "step": 4840 |
| }, |
| { |
| "epoch": 0.5116033755274262, |
| "grad_norm": 0.8607736229896545, |
| "learning_rate": 0.0015, |
| "loss": 1.5444, |
| "step": 4850 |
| }, |
| { |
| "epoch": 0.5126582278481012, |
| "grad_norm": 0.6029435992240906, |
| "learning_rate": 0.0015, |
| "loss": 1.5298, |
| "step": 4860 |
| }, |
| { |
| "epoch": 0.5137130801687764, |
| "grad_norm": 0.7495979070663452, |
| "learning_rate": 0.0015, |
| "loss": 1.5225, |
| "step": 4870 |
| }, |
| { |
| "epoch": 0.5147679324894515, |
| "grad_norm": 0.6200342178344727, |
| "learning_rate": 0.0015, |
| "loss": 1.5405, |
| "step": 4880 |
| }, |
| { |
| "epoch": 0.5158227848101266, |
| "grad_norm": 0.5241333842277527, |
| "learning_rate": 0.0015, |
| "loss": 1.5306, |
| "step": 4890 |
| }, |
| { |
| "epoch": 0.5168776371308017, |
| "grad_norm": 0.5704604387283325, |
| "learning_rate": 0.0015, |
| "loss": 1.5394, |
| "step": 4900 |
| }, |
| { |
| "epoch": 0.5179324894514767, |
| "grad_norm": 0.6147574782371521, |
| "learning_rate": 0.0015, |
| "loss": 1.5335, |
| "step": 4910 |
| }, |
| { |
| "epoch": 0.5189873417721519, |
| "grad_norm": 0.6230430006980896, |
| "learning_rate": 0.0015, |
| "loss": 1.5283, |
| "step": 4920 |
| }, |
| { |
| "epoch": 0.520042194092827, |
| "grad_norm": 0.678521454334259, |
| "learning_rate": 0.0015, |
| "loss": 1.5343, |
| "step": 4930 |
| }, |
| { |
| "epoch": 0.5210970464135021, |
| "grad_norm": 0.8999406695365906, |
| "learning_rate": 0.0015, |
| "loss": 1.5324, |
| "step": 4940 |
| }, |
| { |
| "epoch": 0.5221518987341772, |
| "grad_norm": 0.7650752067565918, |
| "learning_rate": 0.0015, |
| "loss": 1.5426, |
| "step": 4950 |
| }, |
| { |
| "epoch": 0.5232067510548524, |
| "grad_norm": 0.7141919732093811, |
| "learning_rate": 0.0015, |
| "loss": 1.5218, |
| "step": 4960 |
| }, |
| { |
| "epoch": 0.5242616033755274, |
| "grad_norm": 0.5926218628883362, |
| "learning_rate": 0.0015, |
| "loss": 1.5294, |
| "step": 4970 |
| }, |
| { |
| "epoch": 0.5253164556962026, |
| "grad_norm": 0.6089852452278137, |
| "learning_rate": 0.0015, |
| "loss": 1.5233, |
| "step": 4980 |
| }, |
| { |
| "epoch": 0.5263713080168776, |
| "grad_norm": 0.7046988010406494, |
| "learning_rate": 0.0015, |
| "loss": 1.5261, |
| "step": 4990 |
| }, |
| { |
| "epoch": 0.5274261603375527, |
| "grad_norm": 0.6000553369522095, |
| "learning_rate": 0.0015, |
| "loss": 1.5373, |
| "step": 5000 |
| }, |
| { |
| "epoch": 0.5284810126582279, |
| "grad_norm": 0.5103377103805542, |
| "learning_rate": 0.0015, |
| "loss": 1.5296, |
| "step": 5010 |
| }, |
| { |
| "epoch": 0.5295358649789029, |
| "grad_norm": 0.7494931221008301, |
| "learning_rate": 0.0015, |
| "loss": 1.5253, |
| "step": 5020 |
| }, |
| { |
| "epoch": 0.5305907172995781, |
| "grad_norm": 0.5808805823326111, |
| "learning_rate": 0.0015, |
| "loss": 1.5073, |
| "step": 5030 |
| }, |
| { |
| "epoch": 0.5316455696202531, |
| "grad_norm": 0.5486202239990234, |
| "learning_rate": 0.0015, |
| "loss": 1.5374, |
| "step": 5040 |
| }, |
| { |
| "epoch": 0.5327004219409283, |
| "grad_norm": 0.5495449900627136, |
| "learning_rate": 0.0015, |
| "loss": 1.5272, |
| "step": 5050 |
| }, |
| { |
| "epoch": 0.5337552742616034, |
| "grad_norm": 0.7107338905334473, |
| "learning_rate": 0.0015, |
| "loss": 1.5377, |
| "step": 5060 |
| }, |
| { |
| "epoch": 0.5348101265822784, |
| "grad_norm": 0.6358274817466736, |
| "learning_rate": 0.0015, |
| "loss": 1.526, |
| "step": 5070 |
| }, |
| { |
| "epoch": 0.5358649789029536, |
| "grad_norm": 0.5813426375389099, |
| "learning_rate": 0.0015, |
| "loss": 1.5372, |
| "step": 5080 |
| }, |
| { |
| "epoch": 0.5369198312236287, |
| "grad_norm": 0.7069512009620667, |
| "learning_rate": 0.0015, |
| "loss": 1.529, |
| "step": 5090 |
| }, |
| { |
| "epoch": 0.5379746835443038, |
| "grad_norm": 0.5345435738563538, |
| "learning_rate": 0.0015, |
| "loss": 1.5103, |
| "step": 5100 |
| }, |
| { |
| "epoch": 0.5390295358649789, |
| "grad_norm": 0.5351413488388062, |
| "learning_rate": 0.0015, |
| "loss": 1.5245, |
| "step": 5110 |
| }, |
| { |
| "epoch": 0.540084388185654, |
| "grad_norm": 0.672135055065155, |
| "learning_rate": 0.0015, |
| "loss": 1.5229, |
| "step": 5120 |
| }, |
| { |
| "epoch": 0.5411392405063291, |
| "grad_norm": 0.5577877163887024, |
| "learning_rate": 0.0015, |
| "loss": 1.5213, |
| "step": 5130 |
| }, |
| { |
| "epoch": 0.5421940928270043, |
| "grad_norm": 0.6954469084739685, |
| "learning_rate": 0.0015, |
| "loss": 1.5422, |
| "step": 5140 |
| }, |
| { |
| "epoch": 0.5432489451476793, |
| "grad_norm": 0.721547544002533, |
| "learning_rate": 0.0015, |
| "loss": 1.5346, |
| "step": 5150 |
| }, |
| { |
| "epoch": 0.5443037974683544, |
| "grad_norm": 0.5934624075889587, |
| "learning_rate": 0.0015, |
| "loss": 1.5253, |
| "step": 5160 |
| }, |
| { |
| "epoch": 0.5453586497890295, |
| "grad_norm": 0.8245863318443298, |
| "learning_rate": 0.0015, |
| "loss": 1.5312, |
| "step": 5170 |
| }, |
| { |
| "epoch": 0.5464135021097046, |
| "grad_norm": 0.632466733455658, |
| "learning_rate": 0.0015, |
| "loss": 1.5377, |
| "step": 5180 |
| }, |
| { |
| "epoch": 0.5474683544303798, |
| "grad_norm": 0.7374550104141235, |
| "learning_rate": 0.0015, |
| "loss": 1.5187, |
| "step": 5190 |
| }, |
| { |
| "epoch": 0.5485232067510548, |
| "grad_norm": 0.8513514399528503, |
| "learning_rate": 0.0015, |
| "loss": 1.5297, |
| "step": 5200 |
| }, |
| { |
| "epoch": 0.54957805907173, |
| "grad_norm": 0.7695537209510803, |
| "learning_rate": 0.0015, |
| "loss": 1.5163, |
| "step": 5210 |
| }, |
| { |
| "epoch": 0.5506329113924051, |
| "grad_norm": 0.746254026889801, |
| "learning_rate": 0.0015, |
| "loss": 1.5258, |
| "step": 5220 |
| }, |
| { |
| "epoch": 0.5516877637130801, |
| "grad_norm": 0.5853145122528076, |
| "learning_rate": 0.0015, |
| "loss": 1.5185, |
| "step": 5230 |
| }, |
| { |
| "epoch": 0.5527426160337553, |
| "grad_norm": 0.5808828473091125, |
| "learning_rate": 0.0015, |
| "loss": 1.5253, |
| "step": 5240 |
| }, |
| { |
| "epoch": 0.5537974683544303, |
| "grad_norm": 0.7577324509620667, |
| "learning_rate": 0.0015, |
| "loss": 1.5204, |
| "step": 5250 |
| }, |
| { |
| "epoch": 0.5548523206751055, |
| "grad_norm": 0.5829281210899353, |
| "learning_rate": 0.0015, |
| "loss": 1.5279, |
| "step": 5260 |
| }, |
| { |
| "epoch": 0.5559071729957806, |
| "grad_norm": 0.8018589019775391, |
| "learning_rate": 0.0015, |
| "loss": 1.532, |
| "step": 5270 |
| }, |
| { |
| "epoch": 0.5569620253164557, |
| "grad_norm": 0.6416056752204895, |
| "learning_rate": 0.0015, |
| "loss": 1.5254, |
| "step": 5280 |
| }, |
| { |
| "epoch": 0.5580168776371308, |
| "grad_norm": 0.7368462681770325, |
| "learning_rate": 0.0015, |
| "loss": 1.5221, |
| "step": 5290 |
| }, |
| { |
| "epoch": 0.5590717299578059, |
| "grad_norm": 0.541665256023407, |
| "learning_rate": 0.0015, |
| "loss": 1.5287, |
| "step": 5300 |
| }, |
| { |
| "epoch": 0.560126582278481, |
| "grad_norm": 0.7606929540634155, |
| "learning_rate": 0.0015, |
| "loss": 1.518, |
| "step": 5310 |
| }, |
| { |
| "epoch": 0.5611814345991561, |
| "grad_norm": 0.647328794002533, |
| "learning_rate": 0.0015, |
| "loss": 1.5171, |
| "step": 5320 |
| }, |
| { |
| "epoch": 0.5622362869198312, |
| "grad_norm": 0.8251186013221741, |
| "learning_rate": 0.0015, |
| "loss": 1.5291, |
| "step": 5330 |
| }, |
| { |
| "epoch": 0.5632911392405063, |
| "grad_norm": 0.6676337122917175, |
| "learning_rate": 0.0015, |
| "loss": 1.5206, |
| "step": 5340 |
| }, |
| { |
| "epoch": 0.5643459915611815, |
| "grad_norm": 0.6196624040603638, |
| "learning_rate": 0.0015, |
| "loss": 1.5094, |
| "step": 5350 |
| }, |
| { |
| "epoch": 0.5654008438818565, |
| "grad_norm": 0.8831680417060852, |
| "learning_rate": 0.0015, |
| "loss": 1.5197, |
| "step": 5360 |
| }, |
| { |
| "epoch": 0.5664556962025317, |
| "grad_norm": 0.5513179898262024, |
| "learning_rate": 0.0015, |
| "loss": 1.5197, |
| "step": 5370 |
| }, |
| { |
| "epoch": 0.5675105485232067, |
| "grad_norm": 0.5404736399650574, |
| "learning_rate": 0.0015, |
| "loss": 1.5132, |
| "step": 5380 |
| }, |
| { |
| "epoch": 0.5685654008438819, |
| "grad_norm": 0.7697404026985168, |
| "learning_rate": 0.0015, |
| "loss": 1.516, |
| "step": 5390 |
| }, |
| { |
| "epoch": 0.569620253164557, |
| "grad_norm": 0.5695388317108154, |
| "learning_rate": 0.0015, |
| "loss": 1.5182, |
| "step": 5400 |
| }, |
| { |
| "epoch": 0.570675105485232, |
| "grad_norm": 0.636220395565033, |
| "learning_rate": 0.0015, |
| "loss": 1.5201, |
| "step": 5410 |
| }, |
| { |
| "epoch": 0.5717299578059072, |
| "grad_norm": 0.5687567591667175, |
| "learning_rate": 0.0015, |
| "loss": 1.5111, |
| "step": 5420 |
| }, |
| { |
| "epoch": 0.5727848101265823, |
| "grad_norm": 0.5303071141242981, |
| "learning_rate": 0.0015, |
| "loss": 1.5107, |
| "step": 5430 |
| }, |
| { |
| "epoch": 0.5738396624472574, |
| "grad_norm": 0.8346840143203735, |
| "learning_rate": 0.0015, |
| "loss": 1.5094, |
| "step": 5440 |
| }, |
| { |
| "epoch": 0.5748945147679325, |
| "grad_norm": 0.6687101125717163, |
| "learning_rate": 0.0015, |
| "loss": 1.5143, |
| "step": 5450 |
| }, |
| { |
| "epoch": 0.5759493670886076, |
| "grad_norm": 0.6760293245315552, |
| "learning_rate": 0.0015, |
| "loss": 1.5148, |
| "step": 5460 |
| }, |
| { |
| "epoch": 0.5770042194092827, |
| "grad_norm": 0.6732572913169861, |
| "learning_rate": 0.0015, |
| "loss": 1.5144, |
| "step": 5470 |
| }, |
| { |
| "epoch": 0.5780590717299579, |
| "grad_norm": 0.8290615677833557, |
| "learning_rate": 0.0015, |
| "loss": 1.5165, |
| "step": 5480 |
| }, |
| { |
| "epoch": 0.5791139240506329, |
| "grad_norm": 0.5455368757247925, |
| "learning_rate": 0.0015, |
| "loss": 1.5232, |
| "step": 5490 |
| }, |
| { |
| "epoch": 0.580168776371308, |
| "grad_norm": 0.5541859865188599, |
| "learning_rate": 0.0015, |
| "loss": 1.5133, |
| "step": 5500 |
| }, |
| { |
| "epoch": 0.5812236286919831, |
| "grad_norm": 0.7072927951812744, |
| "learning_rate": 0.0015, |
| "loss": 1.518, |
| "step": 5510 |
| }, |
| { |
| "epoch": 0.5822784810126582, |
| "grad_norm": 0.5367401242256165, |
| "learning_rate": 0.0015, |
| "loss": 1.5118, |
| "step": 5520 |
| }, |
| { |
| "epoch": 0.5833333333333334, |
| "grad_norm": 0.5651088356971741, |
| "learning_rate": 0.0015, |
| "loss": 1.5039, |
| "step": 5530 |
| }, |
| { |
| "epoch": 0.5843881856540084, |
| "grad_norm": 0.5084605813026428, |
| "learning_rate": 0.0015, |
| "loss": 1.5069, |
| "step": 5540 |
| }, |
| { |
| "epoch": 0.5854430379746836, |
| "grad_norm": 0.6395454406738281, |
| "learning_rate": 0.0015, |
| "loss": 1.5226, |
| "step": 5550 |
| }, |
| { |
| "epoch": 0.5864978902953587, |
| "grad_norm": 1.0206706523895264, |
| "learning_rate": 0.0015, |
| "loss": 1.5269, |
| "step": 5560 |
| }, |
| { |
| "epoch": 0.5875527426160337, |
| "grad_norm": 0.5466570854187012, |
| "learning_rate": 0.0015, |
| "loss": 1.5075, |
| "step": 5570 |
| }, |
| { |
| "epoch": 0.5886075949367089, |
| "grad_norm": 0.8130196928977966, |
| "learning_rate": 0.0015, |
| "loss": 1.5113, |
| "step": 5580 |
| }, |
| { |
| "epoch": 0.5896624472573839, |
| "grad_norm": 0.7004330158233643, |
| "learning_rate": 0.0015, |
| "loss": 1.5151, |
| "step": 5590 |
| }, |
| { |
| "epoch": 0.5907172995780591, |
| "grad_norm": 0.5617148876190186, |
| "learning_rate": 0.0015, |
| "loss": 1.5074, |
| "step": 5600 |
| }, |
| { |
| "epoch": 0.5917721518987342, |
| "grad_norm": 0.6354098916053772, |
| "learning_rate": 0.0015, |
| "loss": 1.5117, |
| "step": 5610 |
| }, |
| { |
| "epoch": 0.5928270042194093, |
| "grad_norm": 0.6242541670799255, |
| "learning_rate": 0.0015, |
| "loss": 1.5189, |
| "step": 5620 |
| }, |
| { |
| "epoch": 0.5938818565400844, |
| "grad_norm": 0.6715850234031677, |
| "learning_rate": 0.0015, |
| "loss": 1.5157, |
| "step": 5630 |
| }, |
| { |
| "epoch": 0.5949367088607594, |
| "grad_norm": 0.5890056490898132, |
| "learning_rate": 0.0015, |
| "loss": 1.5265, |
| "step": 5640 |
| }, |
| { |
| "epoch": 0.5959915611814346, |
| "grad_norm": 0.6360265612602234, |
| "learning_rate": 0.0015, |
| "loss": 1.52, |
| "step": 5650 |
| }, |
| { |
| "epoch": 0.5970464135021097, |
| "grad_norm": 0.6696105599403381, |
| "learning_rate": 0.0015, |
| "loss": 1.5175, |
| "step": 5660 |
| }, |
| { |
| "epoch": 0.5981012658227848, |
| "grad_norm": 0.5383630990982056, |
| "learning_rate": 0.0015, |
| "loss": 1.5218, |
| "step": 5670 |
| }, |
| { |
| "epoch": 0.5991561181434599, |
| "grad_norm": 0.7002443671226501, |
| "learning_rate": 0.0015, |
| "loss": 1.5228, |
| "step": 5680 |
| }, |
| { |
| "epoch": 0.6002109704641351, |
| "grad_norm": 0.5610836744308472, |
| "learning_rate": 0.0015, |
| "loss": 1.5001, |
| "step": 5690 |
| }, |
| { |
| "epoch": 0.6012658227848101, |
| "grad_norm": 0.5491235852241516, |
| "learning_rate": 0.0015, |
| "loss": 1.5048, |
| "step": 5700 |
| }, |
| { |
| "epoch": 0.6023206751054853, |
| "grad_norm": 0.5745838284492493, |
| "learning_rate": 0.0015, |
| "loss": 1.5103, |
| "step": 5710 |
| }, |
| { |
| "epoch": 0.6033755274261603, |
| "grad_norm": 0.5973967909812927, |
| "learning_rate": 0.0015, |
| "loss": 1.5142, |
| "step": 5720 |
| }, |
| { |
| "epoch": 0.6044303797468354, |
| "grad_norm": 0.7536993622779846, |
| "learning_rate": 0.0015, |
| "loss": 1.5092, |
| "step": 5730 |
| }, |
| { |
| "epoch": 0.6054852320675106, |
| "grad_norm": 0.5494557619094849, |
| "learning_rate": 0.0015, |
| "loss": 1.5117, |
| "step": 5740 |
| }, |
| { |
| "epoch": 0.6065400843881856, |
| "grad_norm": 0.8905473947525024, |
| "learning_rate": 0.0015, |
| "loss": 1.4849, |
| "step": 5750 |
| }, |
| { |
| "epoch": 0.6075949367088608, |
| "grad_norm": 0.5961906313896179, |
| "learning_rate": 0.0015, |
| "loss": 1.4959, |
| "step": 5760 |
| }, |
| { |
| "epoch": 0.6086497890295358, |
| "grad_norm": 0.7787330746650696, |
| "learning_rate": 0.0015, |
| "loss": 1.5001, |
| "step": 5770 |
| }, |
| { |
| "epoch": 0.609704641350211, |
| "grad_norm": 0.7014888525009155, |
| "learning_rate": 0.0015, |
| "loss": 1.517, |
| "step": 5780 |
| }, |
| { |
| "epoch": 0.6107594936708861, |
| "grad_norm": 0.7262461185455322, |
| "learning_rate": 0.0015, |
| "loss": 1.5071, |
| "step": 5790 |
| }, |
| { |
| "epoch": 0.6118143459915611, |
| "grad_norm": 0.550659716129303, |
| "learning_rate": 0.0015, |
| "loss": 1.5077, |
| "step": 5800 |
| }, |
| { |
| "epoch": 0.6128691983122363, |
| "grad_norm": 0.5086367726325989, |
| "learning_rate": 0.0015, |
| "loss": 1.5111, |
| "step": 5810 |
| }, |
| { |
| "epoch": 0.6139240506329114, |
| "grad_norm": 0.881079375743866, |
| "learning_rate": 0.0015, |
| "loss": 1.5241, |
| "step": 5820 |
| }, |
| { |
| "epoch": 0.6149789029535865, |
| "grad_norm": 0.6603014469146729, |
| "learning_rate": 0.0015, |
| "loss": 1.5025, |
| "step": 5830 |
| }, |
| { |
| "epoch": 0.6160337552742616, |
| "grad_norm": 0.5636839866638184, |
| "learning_rate": 0.0015, |
| "loss": 1.5192, |
| "step": 5840 |
| }, |
| { |
| "epoch": 0.6170886075949367, |
| "grad_norm": 0.5623931884765625, |
| "learning_rate": 0.0015, |
| "loss": 1.5224, |
| "step": 5850 |
| }, |
| { |
| "epoch": 0.6181434599156118, |
| "grad_norm": 0.5545900464057922, |
| "learning_rate": 0.0015, |
| "loss": 1.5094, |
| "step": 5860 |
| }, |
| { |
| "epoch": 0.619198312236287, |
| "grad_norm": 0.5038663744926453, |
| "learning_rate": 0.0015, |
| "loss": 1.5034, |
| "step": 5870 |
| }, |
| { |
| "epoch": 0.620253164556962, |
| "grad_norm": 0.634153425693512, |
| "learning_rate": 0.0015, |
| "loss": 1.5147, |
| "step": 5880 |
| }, |
| { |
| "epoch": 0.6213080168776371, |
| "grad_norm": 0.7164548635482788, |
| "learning_rate": 0.0015, |
| "loss": 1.506, |
| "step": 5890 |
| }, |
| { |
| "epoch": 0.6223628691983122, |
| "grad_norm": 0.6937264800071716, |
| "learning_rate": 0.0015, |
| "loss": 1.5062, |
| "step": 5900 |
| }, |
| { |
| "epoch": 0.6234177215189873, |
| "grad_norm": 0.5849927663803101, |
| "learning_rate": 0.0015, |
| "loss": 1.5028, |
| "step": 5910 |
| }, |
| { |
| "epoch": 0.6244725738396625, |
| "grad_norm": 0.6183120608329773, |
| "learning_rate": 0.0015, |
| "loss": 1.4944, |
| "step": 5920 |
| }, |
| { |
| "epoch": 0.6255274261603375, |
| "grad_norm": 0.6934881210327148, |
| "learning_rate": 0.0015, |
| "loss": 1.4992, |
| "step": 5930 |
| }, |
| { |
| "epoch": 0.6265822784810127, |
| "grad_norm": 0.5181916356086731, |
| "learning_rate": 0.0015, |
| "loss": 1.5054, |
| "step": 5940 |
| }, |
| { |
| "epoch": 0.6276371308016878, |
| "grad_norm": 0.5753829479217529, |
| "learning_rate": 0.0015, |
| "loss": 1.4961, |
| "step": 5950 |
| }, |
| { |
| "epoch": 0.6286919831223629, |
| "grad_norm": 0.5150362849235535, |
| "learning_rate": 0.0015, |
| "loss": 1.5038, |
| "step": 5960 |
| }, |
| { |
| "epoch": 0.629746835443038, |
| "grad_norm": 0.5351850390434265, |
| "learning_rate": 0.0015, |
| "loss": 1.5054, |
| "step": 5970 |
| }, |
| { |
| "epoch": 0.630801687763713, |
| "grad_norm": 0.6032434701919556, |
| "learning_rate": 0.0015, |
| "loss": 1.4895, |
| "step": 5980 |
| }, |
| { |
| "epoch": 0.6318565400843882, |
| "grad_norm": 0.6674609780311584, |
| "learning_rate": 0.0015, |
| "loss": 1.5079, |
| "step": 5990 |
| }, |
| { |
| "epoch": 0.6329113924050633, |
| "grad_norm": 0.6922264099121094, |
| "learning_rate": 0.0015, |
| "loss": 1.5173, |
| "step": 6000 |
| }, |
| { |
| "epoch": 0.6339662447257384, |
| "grad_norm": 0.7690093517303467, |
| "learning_rate": 0.0015, |
| "loss": 1.5061, |
| "step": 6010 |
| }, |
| { |
| "epoch": 0.6350210970464135, |
| "grad_norm": 0.859092652797699, |
| "learning_rate": 0.0015, |
| "loss": 1.5093, |
| "step": 6020 |
| }, |
| { |
| "epoch": 0.6360759493670886, |
| "grad_norm": 0.5710610151290894, |
| "learning_rate": 0.0015, |
| "loss": 1.4942, |
| "step": 6030 |
| }, |
| { |
| "epoch": 0.6371308016877637, |
| "grad_norm": 0.8480361104011536, |
| "learning_rate": 0.0015, |
| "loss": 1.511, |
| "step": 6040 |
| }, |
| { |
| "epoch": 0.6381856540084389, |
| "grad_norm": 0.5757660269737244, |
| "learning_rate": 0.0015, |
| "loss": 1.491, |
| "step": 6050 |
| }, |
| { |
| "epoch": 0.6392405063291139, |
| "grad_norm": 0.5221954584121704, |
| "learning_rate": 0.0015, |
| "loss": 1.5079, |
| "step": 6060 |
| }, |
| { |
| "epoch": 0.640295358649789, |
| "grad_norm": 0.5378153920173645, |
| "learning_rate": 0.0015, |
| "loss": 1.4992, |
| "step": 6070 |
| }, |
| { |
| "epoch": 0.6413502109704642, |
| "grad_norm": 0.5231079459190369, |
| "learning_rate": 0.0015, |
| "loss": 1.5003, |
| "step": 6080 |
| }, |
| { |
| "epoch": 0.6424050632911392, |
| "grad_norm": 0.510814368724823, |
| "learning_rate": 0.0015, |
| "loss": 1.4984, |
| "step": 6090 |
| }, |
| { |
| "epoch": 0.6434599156118144, |
| "grad_norm": 0.651321530342102, |
| "learning_rate": 0.0015, |
| "loss": 1.4928, |
| "step": 6100 |
| }, |
| { |
| "epoch": 0.6445147679324894, |
| "grad_norm": 0.621930718421936, |
| "learning_rate": 0.0015, |
| "loss": 1.5054, |
| "step": 6110 |
| }, |
| { |
| "epoch": 0.6455696202531646, |
| "grad_norm": 0.9920527935028076, |
| "learning_rate": 0.0015, |
| "loss": 1.5025, |
| "step": 6120 |
| }, |
| { |
| "epoch": 0.6466244725738397, |
| "grad_norm": 0.4889945387840271, |
| "learning_rate": 0.0015, |
| "loss": 1.4995, |
| "step": 6130 |
| }, |
| { |
| "epoch": 0.6476793248945147, |
| "grad_norm": 0.5933585166931152, |
| "learning_rate": 0.0015, |
| "loss": 1.497, |
| "step": 6140 |
| }, |
| { |
| "epoch": 0.6487341772151899, |
| "grad_norm": 0.5071729421615601, |
| "learning_rate": 0.0015, |
| "loss": 1.5068, |
| "step": 6150 |
| }, |
| { |
| "epoch": 0.6497890295358649, |
| "grad_norm": 0.5711946487426758, |
| "learning_rate": 0.0015, |
| "loss": 1.5037, |
| "step": 6160 |
| }, |
| { |
| "epoch": 0.6508438818565401, |
| "grad_norm": 0.8071870803833008, |
| "learning_rate": 0.0014834368975312174, |
| "loss": 1.4822, |
| "step": 6170 |
| }, |
| { |
| "epoch": 0.6518987341772152, |
| "grad_norm": 0.8835534453392029, |
| "learning_rate": 0.0014629899726345957, |
| "loss": 1.5064, |
| "step": 6180 |
| }, |
| { |
| "epoch": 0.6529535864978903, |
| "grad_norm": 0.5751279592514038, |
| "learning_rate": 0.0014428248775471316, |
| "loss": 1.5064, |
| "step": 6190 |
| }, |
| { |
| "epoch": 0.6540084388185654, |
| "grad_norm": 0.5673179626464844, |
| "learning_rate": 0.00142293772767289, |
| "loss": 1.4965, |
| "step": 6200 |
| }, |
| { |
| "epoch": 0.6550632911392406, |
| "grad_norm": 0.6210446357727051, |
| "learning_rate": 0.001403324691959192, |
| "loss": 1.4913, |
| "step": 6210 |
| }, |
| { |
| "epoch": 0.6561181434599156, |
| "grad_norm": 0.6187479496002197, |
| "learning_rate": 0.0013839819921586025, |
| "loss": 1.4976, |
| "step": 6220 |
| }, |
| { |
| "epoch": 0.6571729957805907, |
| "grad_norm": 0.5539543032646179, |
| "learning_rate": 0.0013649059021010894, |
| "loss": 1.486, |
| "step": 6230 |
| }, |
| { |
| "epoch": 0.6582278481012658, |
| "grad_norm": 0.5763696432113647, |
| "learning_rate": 0.0013460927469762154, |
| "loss": 1.4909, |
| "step": 6240 |
| }, |
| { |
| "epoch": 0.6592827004219409, |
| "grad_norm": 0.5042856931686401, |
| "learning_rate": 0.0013275389026252255, |
| "loss": 1.4913, |
| "step": 6250 |
| }, |
| { |
| "epoch": 0.6603375527426161, |
| "grad_norm": 0.6170564293861389, |
| "learning_rate": 0.0013092407948428887, |
| "loss": 1.4834, |
| "step": 6260 |
| }, |
| { |
| "epoch": 0.6613924050632911, |
| "grad_norm": 0.5373573303222656, |
| "learning_rate": 0.001291194898688966, |
| "loss": 1.4911, |
| "step": 6270 |
| }, |
| { |
| "epoch": 0.6624472573839663, |
| "grad_norm": 0.7562010288238525, |
| "learning_rate": 0.001273397737809166, |
| "loss": 1.4895, |
| "step": 6280 |
| }, |
| { |
| "epoch": 0.6635021097046413, |
| "grad_norm": 0.6941890716552734, |
| "learning_rate": 0.001255845883765463, |
| "loss": 1.485, |
| "step": 6290 |
| }, |
| { |
| "epoch": 0.6645569620253164, |
| "grad_norm": 0.5045270919799805, |
| "learning_rate": 0.001238535955375642, |
| "loss": 1.4783, |
| "step": 6300 |
| }, |
| { |
| "epoch": 0.6656118143459916, |
| "grad_norm": 0.4871998727321625, |
| "learning_rate": 0.0012214646180619506, |
| "loss": 1.4748, |
| "step": 6310 |
| }, |
| { |
| "epoch": 0.6666666666666666, |
| "grad_norm": 0.5172607898712158, |
| "learning_rate": 0.001204628583208727, |
| "loss": 1.4706, |
| "step": 6320 |
| }, |
| { |
| "epoch": 0.6677215189873418, |
| "grad_norm": 0.5898618102073669, |
| "learning_rate": 0.0011880246075288827, |
| "loss": 1.4782, |
| "step": 6330 |
| }, |
| { |
| "epoch": 0.6687763713080169, |
| "grad_norm": 0.5182415843009949, |
| "learning_rate": 0.001171649492439115, |
| "loss": 1.4702, |
| "step": 6340 |
| }, |
| { |
| "epoch": 0.669831223628692, |
| "grad_norm": 0.49977177381515503, |
| "learning_rate": 0.0011555000834437364, |
| "loss": 1.4745, |
| "step": 6350 |
| }, |
| { |
| "epoch": 0.6708860759493671, |
| "grad_norm": 0.8687050938606262, |
| "learning_rate": 0.0011395732695269908, |
| "loss": 1.4714, |
| "step": 6360 |
| }, |
| { |
| "epoch": 0.6719409282700421, |
| "grad_norm": 0.5547814965248108, |
| "learning_rate": 0.0011238659825537505, |
| "loss": 1.4521, |
| "step": 6370 |
| }, |
| { |
| "epoch": 0.6729957805907173, |
| "grad_norm": 0.6606083512306213, |
| "learning_rate": 0.0011083751966784717, |
| "loss": 1.4589, |
| "step": 6380 |
| }, |
| { |
| "epoch": 0.6740506329113924, |
| "grad_norm": 0.6311693787574768, |
| "learning_rate": 0.0010930979277622953, |
| "loss": 1.4735, |
| "step": 6390 |
| }, |
| { |
| "epoch": 0.6751054852320675, |
| "grad_norm": 0.5820292234420776, |
| "learning_rate": 0.0010780312327981854, |
| "loss": 1.4718, |
| "step": 6400 |
| }, |
| { |
| "epoch": 0.6761603375527426, |
| "grad_norm": 0.8019455671310425, |
| "learning_rate": 0.0010631722093439888, |
| "loss": 1.4671, |
| "step": 6410 |
| }, |
| { |
| "epoch": 0.6772151898734177, |
| "grad_norm": 0.761712908744812, |
| "learning_rate": 0.00104851799496331, |
| "loss": 1.4531, |
| "step": 6420 |
| }, |
| { |
| "epoch": 0.6782700421940928, |
| "grad_norm": 0.514397144317627, |
| "learning_rate": 0.0010340657666740914, |
| "loss": 1.4596, |
| "step": 6430 |
| }, |
| { |
| "epoch": 0.679324894514768, |
| "grad_norm": 0.49260208010673523, |
| "learning_rate": 0.0010198127404047975, |
| "loss": 1.4427, |
| "step": 6440 |
| }, |
| { |
| "epoch": 0.680379746835443, |
| "grad_norm": 0.5700627565383911, |
| "learning_rate": 0.0010057561704580897, |
| "loss": 1.4506, |
| "step": 6450 |
| }, |
| { |
| "epoch": 0.6814345991561181, |
| "grad_norm": 0.4923916459083557, |
| "learning_rate": 0.0009918933489818985, |
| "loss": 1.4626, |
| "step": 6460 |
| }, |
| { |
| "epoch": 0.6824894514767933, |
| "grad_norm": 0.6934185028076172, |
| "learning_rate": 0.0009782216054477827, |
| "loss": 1.4533, |
| "step": 6470 |
| }, |
| { |
| "epoch": 0.6835443037974683, |
| "grad_norm": 0.8038368821144104, |
| "learning_rate": 0.0009647383061364801, |
| "loss": 1.4629, |
| "step": 6480 |
| }, |
| { |
| "epoch": 0.6845991561181435, |
| "grad_norm": 0.5632407665252686, |
| "learning_rate": 0.0009514408536305495, |
| "loss": 1.4534, |
| "step": 6490 |
| }, |
| { |
| "epoch": 0.6856540084388185, |
| "grad_norm": 0.6466336250305176, |
| "learning_rate": 0.0009383266863140042, |
| "loss": 1.4668, |
| "step": 6500 |
| }, |
| { |
| "epoch": 0.6867088607594937, |
| "grad_norm": 0.6142919659614563, |
| "learning_rate": 0.000925393277878844, |
| "loss": 1.4675, |
| "step": 6510 |
| }, |
| { |
| "epoch": 0.6877637130801688, |
| "grad_norm": 0.5444507598876953, |
| "learning_rate": 0.0009126381368383879, |
| "loss": 1.4484, |
| "step": 6520 |
| }, |
| { |
| "epoch": 0.6888185654008439, |
| "grad_norm": 0.5479179620742798, |
| "learning_rate": 0.0009000588060473156, |
| "loss": 1.438, |
| "step": 6530 |
| }, |
| { |
| "epoch": 0.689873417721519, |
| "grad_norm": 0.6018847823143005, |
| "learning_rate": 0.0008876528622283235, |
| "loss": 1.4504, |
| "step": 6540 |
| }, |
| { |
| "epoch": 0.6909282700421941, |
| "grad_norm": 0.5311291217803955, |
| "learning_rate": 0.0008754179155053053, |
| "loss": 1.4477, |
| "step": 6550 |
| }, |
| { |
| "epoch": 0.6919831223628692, |
| "grad_norm": 0.5240741968154907, |
| "learning_rate": 0.0008633516089429683, |
| "loss": 1.4453, |
| "step": 6560 |
| }, |
| { |
| "epoch": 0.6930379746835443, |
| "grad_norm": 0.4696906805038452, |
| "learning_rate": 0.0008514516180927928, |
| "loss": 1.4399, |
| "step": 6570 |
| }, |
| { |
| "epoch": 0.6940928270042194, |
| "grad_norm": 0.6514448523521423, |
| "learning_rate": 0.0008397156505452524, |
| "loss": 1.4389, |
| "step": 6580 |
| }, |
| { |
| "epoch": 0.6951476793248945, |
| "grad_norm": 0.49357330799102783, |
| "learning_rate": 0.0008281414454882051, |
| "loss": 1.4449, |
| "step": 6590 |
| }, |
| { |
| "epoch": 0.6962025316455697, |
| "grad_norm": 0.5959960222244263, |
| "learning_rate": 0.0008167267732713704, |
| "loss": 1.4476, |
| "step": 6600 |
| }, |
| { |
| "epoch": 0.6972573839662447, |
| "grad_norm": 0.5163862109184265, |
| "learning_rate": 0.0008054694349768117, |
| "loss": 1.4315, |
| "step": 6610 |
| }, |
| { |
| "epoch": 0.6983122362869199, |
| "grad_norm": 0.5023782849311829, |
| "learning_rate": 0.0007943672619953359, |
| "loss": 1.4392, |
| "step": 6620 |
| }, |
| { |
| "epoch": 0.6993670886075949, |
| "grad_norm": 0.5141021609306335, |
| "learning_rate": 0.0007834181156087356, |
| "loss": 1.4311, |
| "step": 6630 |
| }, |
| { |
| "epoch": 0.70042194092827, |
| "grad_norm": 0.5499473214149475, |
| "learning_rate": 0.0007726198865777852, |
| "loss": 1.4371, |
| "step": 6640 |
| }, |
| { |
| "epoch": 0.7014767932489452, |
| "grad_norm": 0.6825447678565979, |
| "learning_rate": 0.0007619704947359191, |
| "loss": 1.4288, |
| "step": 6650 |
| }, |
| { |
| "epoch": 0.7025316455696202, |
| "grad_norm": 0.47925615310668945, |
| "learning_rate": 0.0007514678885885087, |
| "loss": 1.4312, |
| "step": 6660 |
| }, |
| { |
| "epoch": 0.7035864978902954, |
| "grad_norm": 0.48817384243011475, |
| "learning_rate": 0.0007411100449176633, |
| "loss": 1.4313, |
| "step": 6670 |
| }, |
| { |
| "epoch": 0.7046413502109705, |
| "grad_norm": 0.5127274990081787, |
| "learning_rate": 0.0007308949683924791, |
| "loss": 1.4353, |
| "step": 6680 |
| }, |
| { |
| "epoch": 0.7056962025316456, |
| "grad_norm": 0.6060632467269897, |
| "learning_rate": 0.000720820691184658, |
| "loss": 1.4261, |
| "step": 6690 |
| }, |
| { |
| "epoch": 0.7067510548523207, |
| "grad_norm": 0.553632378578186, |
| "learning_rate": 0.0007108852725894269, |
| "loss": 1.4253, |
| "step": 6700 |
| }, |
| { |
| "epoch": 0.7078059071729957, |
| "grad_norm": 0.4965393543243408, |
| "learning_rate": 0.000701086798651681, |
| "loss": 1.4279, |
| "step": 6710 |
| }, |
| { |
| "epoch": 0.7088607594936709, |
| "grad_norm": 0.539612889289856, |
| "learning_rate": 0.0006914233817972798, |
| "loss": 1.4161, |
| "step": 6720 |
| }, |
| { |
| "epoch": 0.709915611814346, |
| "grad_norm": 0.5040695071220398, |
| "learning_rate": 0.0006818931604694261, |
| "loss": 1.4232, |
| "step": 6730 |
| }, |
| { |
| "epoch": 0.7109704641350211, |
| "grad_norm": 0.4720032215118408, |
| "learning_rate": 0.0006724942987700563, |
| "loss": 1.4277, |
| "step": 6740 |
| }, |
| { |
| "epoch": 0.7120253164556962, |
| "grad_norm": 0.5587788820266724, |
| "learning_rate": 0.0006632249861061732, |
| "loss": 1.4281, |
| "step": 6750 |
| }, |
| { |
| "epoch": 0.7130801687763713, |
| "grad_norm": 0.5153480172157288, |
| "learning_rate": 0.0006540834368410549, |
| "loss": 1.4242, |
| "step": 6760 |
| }, |
| { |
| "epoch": 0.7141350210970464, |
| "grad_norm": 0.484110027551651, |
| "learning_rate": 0.0006450678899502701, |
| "loss": 1.4261, |
| "step": 6770 |
| }, |
| { |
| "epoch": 0.7151898734177216, |
| "grad_norm": 0.5298638343811035, |
| "learning_rate": 0.0006361766086824345, |
| "loss": 1.4218, |
| "step": 6780 |
| }, |
| { |
| "epoch": 0.7162447257383966, |
| "grad_norm": 0.6543071269989014, |
| "learning_rate": 0.000627407880224645, |
| "loss": 1.4302, |
| "step": 6790 |
| }, |
| { |
| "epoch": 0.7172995780590717, |
| "grad_norm": 0.5062084794044495, |
| "learning_rate": 0.0006187600153725225, |
| "loss": 1.4139, |
| "step": 6800 |
| }, |
| { |
| "epoch": 0.7183544303797469, |
| "grad_norm": 0.6418647766113281, |
| "learning_rate": 0.0006102313482048055, |
| "loss": 1.4156, |
| "step": 6810 |
| }, |
| { |
| "epoch": 0.7194092827004219, |
| "grad_norm": 0.5333866477012634, |
| "learning_rate": 0.0006018202357624274, |
| "loss": 1.4207, |
| "step": 6820 |
| }, |
| { |
| "epoch": 0.7204641350210971, |
| "grad_norm": 0.6157364249229431, |
| "learning_rate": 0.0005935250577320168, |
| "loss": 1.4154, |
| "step": 6830 |
| }, |
| { |
| "epoch": 0.7215189873417721, |
| "grad_norm": 0.49300333857536316, |
| "learning_rate": 0.0005853442161337618, |
| "loss": 1.4051, |
| "step": 6840 |
| }, |
| { |
| "epoch": 0.7225738396624473, |
| "grad_norm": 0.5593222975730896, |
| "learning_rate": 0.0005772761350135759, |
| "loss": 1.4136, |
| "step": 6850 |
| }, |
| { |
| "epoch": 0.7236286919831224, |
| "grad_norm": 0.536490797996521, |
| "learning_rate": 0.0005693192601395058, |
| "loss": 1.4077, |
| "step": 6860 |
| }, |
| { |
| "epoch": 0.7246835443037974, |
| "grad_norm": 0.526736855506897, |
| "learning_rate": 0.000561472058702326, |
| "loss": 1.4052, |
| "step": 6870 |
| }, |
| { |
| "epoch": 0.7257383966244726, |
| "grad_norm": 0.46280238032341003, |
| "learning_rate": 0.000553733019020258, |
| "loss": 1.4118, |
| "step": 6880 |
| }, |
| { |
| "epoch": 0.7267932489451476, |
| "grad_norm": 0.5084342956542969, |
| "learning_rate": 0.0005461006502477612, |
| "loss": 1.4011, |
| "step": 6890 |
| }, |
| { |
| "epoch": 0.7278481012658228, |
| "grad_norm": 0.5471343398094177, |
| "learning_rate": 0.0005385734820883369, |
| "loss": 1.4024, |
| "step": 6900 |
| }, |
| { |
| "epoch": 0.7289029535864979, |
| "grad_norm": 0.512230634689331, |
| "learning_rate": 0.0005311500645112907, |
| "loss": 1.4292, |
| "step": 6910 |
| }, |
| { |
| "epoch": 0.729957805907173, |
| "grad_norm": 0.5206472277641296, |
| "learning_rate": 0.0005238289674723993, |
| "loss": 1.4066, |
| "step": 6920 |
| }, |
| { |
| "epoch": 0.7310126582278481, |
| "grad_norm": 0.5572535991668701, |
| "learning_rate": 0.0005166087806384274, |
| "loss": 1.4115, |
| "step": 6930 |
| }, |
| { |
| "epoch": 0.7320675105485233, |
| "grad_norm": 0.5156189203262329, |
| "learning_rate": 0.0005094881131154418, |
| "loss": 1.4123, |
| "step": 6940 |
| }, |
| { |
| "epoch": 0.7331223628691983, |
| "grad_norm": 0.503831684589386, |
| "learning_rate": 0.0005024655931808696, |
| "loss": 1.41, |
| "step": 6950 |
| }, |
| { |
| "epoch": 0.7341772151898734, |
| "grad_norm": 0.47319483757019043, |
| "learning_rate": 0.0004955398680192508, |
| "loss": 1.401, |
| "step": 6960 |
| }, |
| { |
| "epoch": 0.7352320675105485, |
| "grad_norm": 0.4656444489955902, |
| "learning_rate": 0.000488709603461632, |
| "loss": 1.3946, |
| "step": 6970 |
| }, |
| { |
| "epoch": 0.7362869198312236, |
| "grad_norm": 0.4967908561229706, |
| "learning_rate": 0.000481973483728553, |
| "loss": 1.3971, |
| "step": 6980 |
| }, |
| { |
| "epoch": 0.7373417721518988, |
| "grad_norm": 0.5195211172103882, |
| "learning_rate": 0.0004753302111765748, |
| "loss": 1.3981, |
| "step": 6990 |
| }, |
| { |
| "epoch": 0.7383966244725738, |
| "grad_norm": 0.4845932424068451, |
| "learning_rate": 0.0004687785060483032, |
| "loss": 1.4101, |
| "step": 7000 |
| }, |
| { |
| "epoch": 0.739451476793249, |
| "grad_norm": 0.4656439423561096, |
| "learning_rate": 0.0004623171062258558, |
| "loss": 1.3801, |
| "step": 7010 |
| }, |
| { |
| "epoch": 0.740506329113924, |
| "grad_norm": 0.6125307083129883, |
| "learning_rate": 0.0004559447669877288, |
| "loss": 1.3971, |
| "step": 7020 |
| }, |
| { |
| "epoch": 0.7415611814345991, |
| "grad_norm": 0.5421500205993652, |
| "learning_rate": 0.00044966026076901413, |
| "loss": 1.3991, |
| "step": 7030 |
| }, |
| { |
| "epoch": 0.7426160337552743, |
| "grad_norm": 0.6151002049446106, |
| "learning_rate": 0.00044346237692492177, |
| "loss": 1.4056, |
| "step": 7040 |
| }, |
| { |
| "epoch": 0.7436708860759493, |
| "grad_norm": 0.5110399723052979, |
| "learning_rate": 0.0004373499214975615, |
| "loss": 1.392, |
| "step": 7050 |
| }, |
| { |
| "epoch": 0.7447257383966245, |
| "grad_norm": 0.4953923225402832, |
| "learning_rate": 0.0004313217169859396, |
| "loss": 1.3992, |
| "step": 7060 |
| }, |
| { |
| "epoch": 0.7457805907172996, |
| "grad_norm": 0.5136517286300659, |
| "learning_rate": 0.0004253766021191256, |
| "loss": 1.4037, |
| "step": 7070 |
| }, |
| { |
| "epoch": 0.7468354430379747, |
| "grad_norm": 0.5029652118682861, |
| "learning_rate": 0.00041951343163254497, |
| "loss": 1.399, |
| "step": 7080 |
| }, |
| { |
| "epoch": 0.7478902953586498, |
| "grad_norm": 0.4731636643409729, |
| "learning_rate": 0.00041373107604735626, |
| "loss": 1.3959, |
| "step": 7090 |
| }, |
| { |
| "epoch": 0.7489451476793249, |
| "grad_norm": 0.5364992022514343, |
| "learning_rate": 0.0004080284214528687, |
| "loss": 1.39, |
| "step": 7100 |
| }, |
| { |
| "epoch": 0.75, |
| "grad_norm": 0.5382775664329529, |
| "learning_rate": 0.0004024043692919589, |
| "loss": 1.4037, |
| "step": 7110 |
| }, |
| { |
| "epoch": 0.7510548523206751, |
| "grad_norm": 0.5207314491271973, |
| "learning_rate": 0.0003968578361494449, |
| "loss": 1.3999, |
| "step": 7120 |
| }, |
| { |
| "epoch": 0.7521097046413502, |
| "grad_norm": 0.4977900981903076, |
| "learning_rate": 0.000391387753543378, |
| "loss": 1.4073, |
| "step": 7130 |
| }, |
| { |
| "epoch": 0.7531645569620253, |
| "grad_norm": 0.5196287631988525, |
| "learning_rate": 0.00038599306771921023, |
| "loss": 1.3891, |
| "step": 7140 |
| }, |
| { |
| "epoch": 0.7542194092827004, |
| "grad_norm": 0.6989203095436096, |
| "learning_rate": 0.0003806727394468004, |
| "loss": 1.3864, |
| "step": 7150 |
| }, |
| { |
| "epoch": 0.7552742616033755, |
| "grad_norm": 0.5948571562767029, |
| "learning_rate": 0.0003754257438202162, |
| "loss": 1.3941, |
| "step": 7160 |
| }, |
| { |
| "epoch": 0.7563291139240507, |
| "grad_norm": 0.6483874320983887, |
| "learning_rate": 0.0003702510700602974, |
| "loss": 1.3999, |
| "step": 7170 |
| }, |
| { |
| "epoch": 0.7573839662447257, |
| "grad_norm": 0.5051309466362, |
| "learning_rate": 0.0003651477213199393, |
| "loss": 1.3803, |
| "step": 7180 |
| }, |
| { |
| "epoch": 0.7584388185654009, |
| "grad_norm": 0.506965696811676, |
| "learning_rate": 0.000360114714492061, |
| "loss": 1.3813, |
| "step": 7190 |
| }, |
| { |
| "epoch": 0.759493670886076, |
| "grad_norm": 0.4690167009830475, |
| "learning_rate": 0.0003551510800202195, |
| "loss": 1.3839, |
| "step": 7200 |
| }, |
| { |
| "epoch": 0.760548523206751, |
| "grad_norm": 0.5723272562026978, |
| "learning_rate": 0.0003502558617118353, |
| "loss": 1.3915, |
| "step": 7210 |
| }, |
| { |
| "epoch": 0.7616033755274262, |
| "grad_norm": 0.5610151290893555, |
| "learning_rate": 0.0003454281165539914, |
| "loss": 1.4038, |
| "step": 7220 |
| }, |
| { |
| "epoch": 0.7626582278481012, |
| "grad_norm": 0.4634474813938141, |
| "learning_rate": 0.00034066691453177176, |
| "loss": 1.4008, |
| "step": 7230 |
| }, |
| { |
| "epoch": 0.7637130801687764, |
| "grad_norm": 0.47573161125183105, |
| "learning_rate": 0.0003359713384491037, |
| "loss": 1.397, |
| "step": 7240 |
| }, |
| { |
| "epoch": 0.7647679324894515, |
| "grad_norm": 0.5212197303771973, |
| "learning_rate": 0.00033134048375206944, |
| "loss": 1.389, |
| "step": 7250 |
| }, |
| { |
| "epoch": 0.7658227848101266, |
| "grad_norm": 0.49535438418388367, |
| "learning_rate": 0.0003267734583546536, |
| "loss": 1.3859, |
| "step": 7260 |
| }, |
| { |
| "epoch": 0.7668776371308017, |
| "grad_norm": 0.5158471465110779, |
| "learning_rate": 0.00032226938246689157, |
| "loss": 1.3852, |
| "step": 7270 |
| }, |
| { |
| "epoch": 0.7679324894514767, |
| "grad_norm": 0.5728384852409363, |
| "learning_rate": 0.0003178273884253874, |
| "loss": 1.3856, |
| "step": 7280 |
| }, |
| { |
| "epoch": 0.7689873417721519, |
| "grad_norm": 0.5211251378059387, |
| "learning_rate": 0.0003134466205261674, |
| "loss": 1.3971, |
| "step": 7290 |
| }, |
| { |
| "epoch": 0.770042194092827, |
| "grad_norm": 0.5732220411300659, |
| "learning_rate": 0.0003091262348598378, |
| "loss": 1.4041, |
| "step": 7300 |
| }, |
| { |
| "epoch": 0.7710970464135021, |
| "grad_norm": 0.47373560070991516, |
| "learning_rate": 0.0003048653991490141, |
| "loss": 1.3822, |
| "step": 7310 |
| }, |
| { |
| "epoch": 0.7721518987341772, |
| "grad_norm": 0.6221377849578857, |
| "learning_rate": 0.00030066329258799187, |
| "loss": 1.3776, |
| "step": 7320 |
| }, |
| { |
| "epoch": 0.7732067510548524, |
| "grad_norm": 0.5708879828453064, |
| "learning_rate": 0.0002965191056846266, |
| "loss": 1.3808, |
| "step": 7330 |
| }, |
| { |
| "epoch": 0.7742616033755274, |
| "grad_norm": 0.603161633014679, |
| "learning_rate": 0.000292432040104394, |
| "loss": 1.3802, |
| "step": 7340 |
| }, |
| { |
| "epoch": 0.7753164556962026, |
| "grad_norm": 0.5277611613273621, |
| "learning_rate": 0.00028840130851659853, |
| "loss": 1.375, |
| "step": 7350 |
| }, |
| { |
| "epoch": 0.7763713080168776, |
| "grad_norm": 0.5325142741203308, |
| "learning_rate": 0.0002844261344427028, |
| "loss": 1.3831, |
| "step": 7360 |
| }, |
| { |
| "epoch": 0.7774261603375527, |
| "grad_norm": 0.5006171464920044, |
| "learning_rate": 0.0002805057521067471, |
| "loss": 1.3836, |
| "step": 7370 |
| }, |
| { |
| "epoch": 0.7784810126582279, |
| "grad_norm": 0.48984476923942566, |
| "learning_rate": 0.00027663940628783017, |
| "loss": 1.3707, |
| "step": 7380 |
| }, |
| { |
| "epoch": 0.7795358649789029, |
| "grad_norm": 0.4523366093635559, |
| "learning_rate": 0.00027282635217462393, |
| "loss": 1.3811, |
| "step": 7390 |
| }, |
| { |
| "epoch": 0.7805907172995781, |
| "grad_norm": 0.6498004198074341, |
| "learning_rate": 0.0002690658552218937, |
| "loss": 1.389, |
| "step": 7400 |
| }, |
| { |
| "epoch": 0.7816455696202531, |
| "grad_norm": 0.5409725308418274, |
| "learning_rate": 0.00026535719100899516, |
| "loss": 1.3711, |
| "step": 7410 |
| }, |
| { |
| "epoch": 0.7827004219409283, |
| "grad_norm": 0.5616211891174316, |
| "learning_rate": 0.00026169964510032245, |
| "loss": 1.3747, |
| "step": 7420 |
| }, |
| { |
| "epoch": 0.7837552742616034, |
| "grad_norm": 0.487123042345047, |
| "learning_rate": 0.00025809251290767984, |
| "loss": 1.3619, |
| "step": 7430 |
| }, |
| { |
| "epoch": 0.7848101265822784, |
| "grad_norm": 0.4605257511138916, |
| "learning_rate": 0.00025453509955454957, |
| "loss": 1.3643, |
| "step": 7440 |
| }, |
| { |
| "epoch": 0.7858649789029536, |
| "grad_norm": 0.5088199973106384, |
| "learning_rate": 0.00025102671974223175, |
| "loss": 1.3698, |
| "step": 7450 |
| }, |
| { |
| "epoch": 0.7869198312236287, |
| "grad_norm": 0.4879826307296753, |
| "learning_rate": 0.00024756669761782815, |
| "loss": 1.3797, |
| "step": 7460 |
| }, |
| { |
| "epoch": 0.7879746835443038, |
| "grad_norm": 0.48447489738464355, |
| "learning_rate": 0.0002441543666440464, |
| "loss": 1.368, |
| "step": 7470 |
| }, |
| { |
| "epoch": 0.7890295358649789, |
| "grad_norm": 0.5670198798179626, |
| "learning_rate": 0.00024078906947079878, |
| "loss": 1.3742, |
| "step": 7480 |
| }, |
| { |
| "epoch": 0.790084388185654, |
| "grad_norm": 0.4819045066833496, |
| "learning_rate": 0.00023747015780857005, |
| "loss": 1.383, |
| "step": 7490 |
| }, |
| { |
| "epoch": 0.7911392405063291, |
| "grad_norm": 0.49387362599372864, |
| "learning_rate": 0.00023419699230353144, |
| "loss": 1.3774, |
| "step": 7500 |
| }, |
| { |
| "epoch": 0.7921940928270043, |
| "grad_norm": 0.5370501279830933, |
| "learning_rate": 0.00023096894241437586, |
| "loss": 1.3828, |
| "step": 7510 |
| }, |
| { |
| "epoch": 0.7932489451476793, |
| "grad_norm": 0.47914108633995056, |
| "learning_rate": 0.00022778538629085056, |
| "loss": 1.3672, |
| "step": 7520 |
| }, |
| { |
| "epoch": 0.7943037974683544, |
| "grad_norm": 0.5119479298591614, |
| "learning_rate": 0.00022464571065396427, |
| "loss": 1.3694, |
| "step": 7530 |
| }, |
| { |
| "epoch": 0.7953586497890295, |
| "grad_norm": 0.5128620266914368, |
| "learning_rate": 0.00022154931067784521, |
| "loss": 1.3666, |
| "step": 7540 |
| }, |
| { |
| "epoch": 0.7964135021097046, |
| "grad_norm": 0.46946507692337036, |
| "learning_rate": 0.00021849558987322782, |
| "loss": 1.3642, |
| "step": 7550 |
| }, |
| { |
| "epoch": 0.7974683544303798, |
| "grad_norm": 0.5166003704071045, |
| "learning_rate": 0.0002154839599725452, |
| "loss": 1.365, |
| "step": 7560 |
| }, |
| { |
| "epoch": 0.7985232067510548, |
| "grad_norm": 0.5451986789703369, |
| "learning_rate": 0.00021251384081660544, |
| "loss": 1.3778, |
| "step": 7570 |
| }, |
| { |
| "epoch": 0.79957805907173, |
| "grad_norm": 0.4987885057926178, |
| "learning_rate": 0.0002095846602428303, |
| "loss": 1.3732, |
| "step": 7580 |
| }, |
| { |
| "epoch": 0.8006329113924051, |
| "grad_norm": 0.4632894694805145, |
| "learning_rate": 0.00020669585397503358, |
| "loss": 1.3643, |
| "step": 7590 |
| }, |
| { |
| "epoch": 0.8016877637130801, |
| "grad_norm": 0.4567081928253174, |
| "learning_rate": 0.0002038468655147195, |
| "loss": 1.3674, |
| "step": 7600 |
| }, |
| { |
| "epoch": 0.8027426160337553, |
| "grad_norm": 0.49346765875816345, |
| "learning_rate": 0.00020103714603387894, |
| "loss": 1.3815, |
| "step": 7610 |
| }, |
| { |
| "epoch": 0.8037974683544303, |
| "grad_norm": 0.45700404047966003, |
| "learning_rate": 0.00019826615426926338, |
| "loss": 1.3544, |
| "step": 7620 |
| }, |
| { |
| "epoch": 0.8048523206751055, |
| "grad_norm": 0.4595026671886444, |
| "learning_rate": 0.00019553335641811625, |
| "loss": 1.3763, |
| "step": 7630 |
| }, |
| { |
| "epoch": 0.8059071729957806, |
| "grad_norm": 0.5063329935073853, |
| "learning_rate": 0.0001928382260353415, |
| "loss": 1.3673, |
| "step": 7640 |
| }, |
| { |
| "epoch": 0.8069620253164557, |
| "grad_norm": 0.47150281071662903, |
| "learning_rate": 0.00019018024393208902, |
| "loss": 1.3843, |
| "step": 7650 |
| }, |
| { |
| "epoch": 0.8080168776371308, |
| "grad_norm": 0.4635440409183502, |
| "learning_rate": 0.00018755889807573872, |
| "loss": 1.3615, |
| "step": 7660 |
| }, |
| { |
| "epoch": 0.8090717299578059, |
| "grad_norm": 0.45574668049812317, |
| "learning_rate": 0.00018497368349126262, |
| "loss": 1.3672, |
| "step": 7670 |
| }, |
| { |
| "epoch": 0.810126582278481, |
| "grad_norm": 0.5359010696411133, |
| "learning_rate": 0.00018242410216394648, |
| "loss": 1.383, |
| "step": 7680 |
| }, |
| { |
| "epoch": 0.8111814345991561, |
| "grad_norm": 0.5147023797035217, |
| "learning_rate": 0.0001799096629434529, |
| "loss": 1.3556, |
| "step": 7690 |
| }, |
| { |
| "epoch": 0.8122362869198312, |
| "grad_norm": 0.46002885699272156, |
| "learning_rate": 0.00017742988144920578, |
| "loss": 1.3666, |
| "step": 7700 |
| }, |
| { |
| "epoch": 0.8132911392405063, |
| "grad_norm": 0.4675118625164032, |
| "learning_rate": 0.00017498427997707976, |
| "loss": 1.3633, |
| "step": 7710 |
| }, |
| { |
| "epoch": 0.8143459915611815, |
| "grad_norm": 0.5563496351242065, |
| "learning_rate": 0.00017257238740737548, |
| "loss": 1.3648, |
| "step": 7720 |
| }, |
| { |
| "epoch": 0.8154008438818565, |
| "grad_norm": 0.5589823722839355, |
| "learning_rate": 0.00017019373911406307, |
| "loss": 1.3735, |
| "step": 7730 |
| }, |
| { |
| "epoch": 0.8164556962025317, |
| "grad_norm": 0.46896493434906006, |
| "learning_rate": 0.000167847876875277, |
| "loss": 1.3721, |
| "step": 7740 |
| }, |
| { |
| "epoch": 0.8175105485232067, |
| "grad_norm": 0.45898881554603577, |
| "learning_rate": 0.00016553434878504428, |
| "loss": 1.3565, |
| "step": 7750 |
| }, |
| { |
| "epoch": 0.8185654008438819, |
| "grad_norm": 0.5108206868171692, |
| "learning_rate": 0.00016325270916622947, |
| "loss": 1.3584, |
| "step": 7760 |
| }, |
| { |
| "epoch": 0.819620253164557, |
| "grad_norm": 0.4540421962738037, |
| "learning_rate": 0.00016100251848467966, |
| "loss": 1.3679, |
| "step": 7770 |
| }, |
| { |
| "epoch": 0.820675105485232, |
| "grad_norm": 0.6645315885543823, |
| "learning_rate": 0.0001587833432645528, |
| "loss": 1.3602, |
| "step": 7780 |
| }, |
| { |
| "epoch": 0.8217299578059072, |
| "grad_norm": 0.5565598011016846, |
| "learning_rate": 0.00015659475600481292, |
| "loss": 1.3754, |
| "step": 7790 |
| }, |
| { |
| "epoch": 0.8227848101265823, |
| "grad_norm": 0.4732491075992584, |
| "learning_rate": 0.00015443633509687688, |
| "loss": 1.3637, |
| "step": 7800 |
| }, |
| { |
| "epoch": 0.8238396624472574, |
| "grad_norm": 0.4488430321216583, |
| "learning_rate": 0.00015230766474339536, |
| "loss": 1.363, |
| "step": 7810 |
| }, |
| { |
| "epoch": 0.8248945147679325, |
| "grad_norm": 0.4506990313529968, |
| "learning_rate": 0.00015020833487815416, |
| "loss": 1.3719, |
| "step": 7820 |
| }, |
| { |
| "epoch": 0.8259493670886076, |
| "grad_norm": 0.46290716528892517, |
| "learning_rate": 0.0001481379410870792, |
| "loss": 1.3612, |
| "step": 7830 |
| }, |
| { |
| "epoch": 0.8270042194092827, |
| "grad_norm": 0.47624319791793823, |
| "learning_rate": 0.00014609608453033013, |
| "loss": 1.3453, |
| "step": 7840 |
| }, |
| { |
| "epoch": 0.8280590717299579, |
| "grad_norm": 0.45356878638267517, |
| "learning_rate": 0.00014408237186546807, |
| "loss": 1.3565, |
| "step": 7850 |
| }, |
| { |
| "epoch": 0.8291139240506329, |
| "grad_norm": 0.46717569231987, |
| "learning_rate": 0.00014209641517168273, |
| "loss": 1.3433, |
| "step": 7860 |
| }, |
| { |
| "epoch": 0.830168776371308, |
| "grad_norm": 0.48989081382751465, |
| "learning_rate": 0.00014013783187506265, |
| "loss": 1.3578, |
| "step": 7870 |
| }, |
| { |
| "epoch": 0.8312236286919831, |
| "grad_norm": 0.529300332069397, |
| "learning_rate": 0.00013820624467489697, |
| "loss": 1.3766, |
| "step": 7880 |
| }, |
| { |
| "epoch": 0.8322784810126582, |
| "grad_norm": 0.47537773847579956, |
| "learning_rate": 0.00013630128147099213, |
| "loss": 1.3743, |
| "step": 7890 |
| }, |
| { |
| "epoch": 0.8333333333333334, |
| "grad_norm": 0.48015323281288147, |
| "learning_rate": 0.00013442257529199068, |
| "loss": 1.3561, |
| "step": 7900 |
| }, |
| { |
| "epoch": 0.8343881856540084, |
| "grad_norm": 0.5237522125244141, |
| "learning_rate": 0.00013256976422467803, |
| "loss": 1.3634, |
| "step": 7910 |
| }, |
| { |
| "epoch": 0.8354430379746836, |
| "grad_norm": 0.4523259997367859, |
| "learning_rate": 0.00013074249134426366, |
| "loss": 1.3622, |
| "step": 7920 |
| }, |
| { |
| "epoch": 0.8364978902953587, |
| "grad_norm": 0.5076009035110474, |
| "learning_rate": 0.0001289404046456233, |
| "loss": 1.3708, |
| "step": 7930 |
| }, |
| { |
| "epoch": 0.8375527426160337, |
| "grad_norm": 0.4758736789226532, |
| "learning_rate": 0.0001271631569754887, |
| "loss": 1.3642, |
| "step": 7940 |
| }, |
| { |
| "epoch": 0.8386075949367089, |
| "grad_norm": 0.4746805429458618, |
| "learning_rate": 0.0001254104059655723, |
| "loss": 1.3666, |
| "step": 7950 |
| }, |
| { |
| "epoch": 0.8396624472573839, |
| "grad_norm": 0.45755910873413086, |
| "learning_rate": 0.00012368181396661337, |
| "loss": 1.3526, |
| "step": 7960 |
| }, |
| { |
| "epoch": 0.8407172995780591, |
| "grad_norm": 0.5004385709762573, |
| "learning_rate": 0.00012197704798333364, |
| "loss": 1.3514, |
| "step": 7970 |
| }, |
| { |
| "epoch": 0.8417721518987342, |
| "grad_norm": 0.5192686319351196, |
| "learning_rate": 0.00012029577961028894, |
| "loss": 1.3512, |
| "step": 7980 |
| }, |
| { |
| "epoch": 0.8428270042194093, |
| "grad_norm": 0.48216772079467773, |
| "learning_rate": 0.00011863768496860542, |
| "loss": 1.3634, |
| "step": 7990 |
| }, |
| { |
| "epoch": 0.8438818565400844, |
| "grad_norm": 0.46114224195480347, |
| "learning_rate": 0.00011700244464358777, |
| "loss": 1.3595, |
| "step": 8000 |
| }, |
| { |
| "epoch": 0.8449367088607594, |
| "grad_norm": 0.45775875449180603, |
| "learning_rate": 0.00011538974362318715, |
| "loss": 1.3632, |
| "step": 8010 |
| }, |
| { |
| "epoch": 0.8459915611814346, |
| "grad_norm": 0.4667496979236603, |
| "learning_rate": 0.00011379927123731737, |
| "loss": 1.3565, |
| "step": 8020 |
| }, |
| { |
| "epoch": 0.8470464135021097, |
| "grad_norm": 0.4608895778656006, |
| "learning_rate": 0.0001122307210980077, |
| "loss": 1.3568, |
| "step": 8030 |
| }, |
| { |
| "epoch": 0.8481012658227848, |
| "grad_norm": 0.47877684235572815, |
| "learning_rate": 0.00011068379104038026, |
| "loss": 1.3704, |
| "step": 8040 |
| }, |
| { |
| "epoch": 0.8491561181434599, |
| "grad_norm": 0.457285612821579, |
| "learning_rate": 0.00010915818306444116, |
| "loss": 1.3466, |
| "step": 8050 |
| }, |
| { |
| "epoch": 0.8502109704641351, |
| "grad_norm": 0.5008485913276672, |
| "learning_rate": 0.00010765360327767384, |
| "loss": 1.3548, |
| "step": 8060 |
| }, |
| { |
| "epoch": 0.8512658227848101, |
| "grad_norm": 0.47175008058547974, |
| "learning_rate": 0.00010616976183842376, |
| "loss": 1.3633, |
| "step": 8070 |
| }, |
| { |
| "epoch": 0.8523206751054853, |
| "grad_norm": 0.4591042697429657, |
| "learning_rate": 0.00010470637290006365, |
| "loss": 1.3648, |
| "step": 8080 |
| }, |
| { |
| "epoch": 0.8533755274261603, |
| "grad_norm": 0.4671987295150757, |
| "learning_rate": 0.00010326315455592764, |
| "loss": 1.352, |
| "step": 8090 |
| }, |
| { |
| "epoch": 0.8544303797468354, |
| "grad_norm": 0.4841940104961395, |
| "learning_rate": 0.0001018398287850053, |
| "loss": 1.3435, |
| "step": 8100 |
| }, |
| { |
| "epoch": 0.8554852320675106, |
| "grad_norm": 0.4897977113723755, |
| "learning_rate": 0.00010043612139838357, |
| "loss": 1.3656, |
| "step": 8110 |
| }, |
| { |
| "epoch": 0.8565400843881856, |
| "grad_norm": 0.4758096933364868, |
| "learning_rate": 9.905176198642719e-05, |
| "loss": 1.3553, |
| "step": 8120 |
| }, |
| { |
| "epoch": 0.8575949367088608, |
| "grad_norm": 0.4628910720348358, |
| "learning_rate": 9.76864838666871e-05, |
| "loss": 1.3565, |
| "step": 8130 |
| }, |
| { |
| "epoch": 0.8586497890295358, |
| "grad_norm": 0.4522228538990021, |
| "learning_rate": 9.634002403252676e-05, |
| "loss": 1.3511, |
| "step": 8140 |
| }, |
| { |
| "epoch": 0.859704641350211, |
| "grad_norm": 0.47021564841270447, |
| "learning_rate": 9.501212310245681e-05, |
| "loss": 1.3488, |
| "step": 8150 |
| }, |
| { |
| "epoch": 0.8607594936708861, |
| "grad_norm": 0.49830883741378784, |
| "learning_rate": 9.370252527016777e-05, |
| "loss": 1.3601, |
| "step": 8160 |
| }, |
| { |
| "epoch": 0.8618143459915611, |
| "grad_norm": 0.47212862968444824, |
| "learning_rate": 9.241097825525163e-05, |
| "loss": 1.3512, |
| "step": 8170 |
| }, |
| { |
| "epoch": 0.8628691983122363, |
| "grad_norm": 0.48565971851348877, |
| "learning_rate": 9.113723325460276e-05, |
| "loss": 1.358, |
| "step": 8180 |
| }, |
| { |
| "epoch": 0.8639240506329114, |
| "grad_norm": 0.5443127155303955, |
| "learning_rate": 8.988104489448849e-05, |
| "loss": 1.3521, |
| "step": 8190 |
| }, |
| { |
| "epoch": 0.8649789029535865, |
| "grad_norm": 0.4680684506893158, |
| "learning_rate": 8.864217118328042e-05, |
| "loss": 1.3639, |
| "step": 8200 |
| }, |
| { |
| "epoch": 0.8660337552742616, |
| "grad_norm": 0.46079784631729126, |
| "learning_rate": 8.742037346483729e-05, |
| "loss": 1.3559, |
| "step": 8210 |
| }, |
| { |
| "epoch": 0.8670886075949367, |
| "grad_norm": 0.45251044631004333, |
| "learning_rate": 8.62154163725303e-05, |
| "loss": 1.3637, |
| "step": 8220 |
| }, |
| { |
| "epoch": 0.8681434599156118, |
| "grad_norm": 0.5130413770675659, |
| "learning_rate": 8.502706778390219e-05, |
| "loss": 1.3585, |
| "step": 8230 |
| }, |
| { |
| "epoch": 0.869198312236287, |
| "grad_norm": 0.4337594509124756, |
| "learning_rate": 8.38550987759513e-05, |
| "loss": 1.3566, |
| "step": 8240 |
| }, |
| { |
| "epoch": 0.870253164556962, |
| "grad_norm": 0.5292634963989258, |
| "learning_rate": 8.269928358103191e-05, |
| "loss": 1.3709, |
| "step": 8250 |
| }, |
| { |
| "epoch": 0.8713080168776371, |
| "grad_norm": 0.4459943473339081, |
| "learning_rate": 8.155939954336243e-05, |
| "loss": 1.3609, |
| "step": 8260 |
| }, |
| { |
| "epoch": 0.8723628691983122, |
| "grad_norm": 0.5037257075309753, |
| "learning_rate": 8.043522707613312e-05, |
| "loss": 1.3573, |
| "step": 8270 |
| }, |
| { |
| "epoch": 0.8734177215189873, |
| "grad_norm": 0.4748310446739197, |
| "learning_rate": 7.932654961920486e-05, |
| "loss": 1.3421, |
| "step": 8280 |
| }, |
| { |
| "epoch": 0.8744725738396625, |
| "grad_norm": 0.46188482642173767, |
| "learning_rate": 7.823315359739135e-05, |
| "loss": 1.3442, |
| "step": 8290 |
| }, |
| { |
| "epoch": 0.8755274261603375, |
| "grad_norm": 0.4867098331451416, |
| "learning_rate": 7.715482837931577e-05, |
| "loss": 1.3685, |
| "step": 8300 |
| }, |
| { |
| "epoch": 0.8765822784810127, |
| "grad_norm": 0.4996362328529358, |
| "learning_rate": 7.6091366236835e-05, |
| "loss": 1.3469, |
| "step": 8310 |
| }, |
| { |
| "epoch": 0.8776371308016878, |
| "grad_norm": 0.4916406273841858, |
| "learning_rate": 7.504256230502289e-05, |
| "loss": 1.3649, |
| "step": 8320 |
| }, |
| { |
| "epoch": 0.8786919831223629, |
| "grad_norm": 0.4626372754573822, |
| "learning_rate": 7.400821454270524e-05, |
| "loss": 1.3576, |
| "step": 8330 |
| }, |
| { |
| "epoch": 0.879746835443038, |
| "grad_norm": 0.45033547282218933, |
| "learning_rate": 7.29881236935386e-05, |
| "loss": 1.3472, |
| "step": 8340 |
| }, |
| { |
| "epoch": 0.880801687763713, |
| "grad_norm": 0.44288432598114014, |
| "learning_rate": 7.198209324762562e-05, |
| "loss": 1.3494, |
| "step": 8350 |
| }, |
| { |
| "epoch": 0.8818565400843882, |
| "grad_norm": 0.49059954285621643, |
| "learning_rate": 7.098992940365946e-05, |
| "loss": 1.3443, |
| "step": 8360 |
| }, |
| { |
| "epoch": 0.8829113924050633, |
| "grad_norm": 0.45661666989326477, |
| "learning_rate": 7.001144103159e-05, |
| "loss": 1.3537, |
| "step": 8370 |
| }, |
| { |
| "epoch": 0.8839662447257384, |
| "grad_norm": 0.4595849812030792, |
| "learning_rate": 6.904643963580461e-05, |
| "loss": 1.3609, |
| "step": 8380 |
| }, |
| { |
| "epoch": 0.8850210970464135, |
| "grad_norm": 0.44966819882392883, |
| "learning_rate": 6.809473931881644e-05, |
| "loss": 1.349, |
| "step": 8390 |
| }, |
| { |
| "epoch": 0.8860759493670886, |
| "grad_norm": 0.4483475983142853, |
| "learning_rate": 6.71561567454532e-05, |
| "loss": 1.3506, |
| "step": 8400 |
| }, |
| { |
| "epoch": 0.8871308016877637, |
| "grad_norm": 0.5414402484893799, |
| "learning_rate": 6.623051110753948e-05, |
| "loss": 1.3686, |
| "step": 8410 |
| }, |
| { |
| "epoch": 0.8881856540084389, |
| "grad_norm": 0.4634023606777191, |
| "learning_rate": 6.531762408906607e-05, |
| "loss": 1.3559, |
| "step": 8420 |
| }, |
| { |
| "epoch": 0.8892405063291139, |
| "grad_norm": 0.4461284875869751, |
| "learning_rate": 6.441731983183912e-05, |
| "loss": 1.3544, |
| "step": 8430 |
| }, |
| { |
| "epoch": 0.890295358649789, |
| "grad_norm": 0.45708733797073364, |
| "learning_rate": 6.352942490160292e-05, |
| "loss": 1.3451, |
| "step": 8440 |
| }, |
| { |
| "epoch": 0.8913502109704642, |
| "grad_norm": 0.4943016469478607, |
| "learning_rate": 6.265376825462966e-05, |
| "loss": 1.3544, |
| "step": 8450 |
| }, |
| { |
| "epoch": 0.8924050632911392, |
| "grad_norm": 0.470504492521286, |
| "learning_rate": 6.179018120476945e-05, |
| "loss": 1.3537, |
| "step": 8460 |
| }, |
| { |
| "epoch": 0.8934599156118144, |
| "grad_norm": 0.4554164409637451, |
| "learning_rate": 6.0938497390954946e-05, |
| "loss": 1.3508, |
| "step": 8470 |
| }, |
| { |
| "epoch": 0.8945147679324894, |
| "grad_norm": 0.453253835439682, |
| "learning_rate": 6.009855274515339e-05, |
| "loss": 1.3449, |
| "step": 8480 |
| }, |
| { |
| "epoch": 0.8955696202531646, |
| "grad_norm": 0.48623064160346985, |
| "learning_rate": 5.9270185460760735e-05, |
| "loss": 1.354, |
| "step": 8490 |
| }, |
| { |
| "epoch": 0.8966244725738397, |
| "grad_norm": 0.45357802510261536, |
| "learning_rate": 5.8453235961431225e-05, |
| "loss": 1.3415, |
| "step": 8500 |
| }, |
| { |
| "epoch": 0.8976793248945147, |
| "grad_norm": 0.4535787105560303, |
| "learning_rate": 5.764754687033678e-05, |
| "loss": 1.3523, |
| "step": 8510 |
| }, |
| { |
| "epoch": 0.8987341772151899, |
| "grad_norm": 0.4594683349132538, |
| "learning_rate": 5.6852962979849836e-05, |
| "loss": 1.3488, |
| "step": 8520 |
| }, |
| { |
| "epoch": 0.8997890295358649, |
| "grad_norm": 0.49994322657585144, |
| "learning_rate": 5.6069331221644284e-05, |
| "loss": 1.3642, |
| "step": 8530 |
| }, |
| { |
| "epoch": 0.9008438818565401, |
| "grad_norm": 0.4636819660663605, |
| "learning_rate": 5.529650063720842e-05, |
| "loss": 1.3577, |
| "step": 8540 |
| }, |
| { |
| "epoch": 0.9018987341772152, |
| "grad_norm": 0.4792076647281647, |
| "learning_rate": 5.453432234876445e-05, |
| "loss": 1.3498, |
| "step": 8550 |
| }, |
| { |
| "epoch": 0.9029535864978903, |
| "grad_norm": 0.4741894602775574, |
| "learning_rate": 5.37826495305886e-05, |
| "loss": 1.3411, |
| "step": 8560 |
| }, |
| { |
| "epoch": 0.9040084388185654, |
| "grad_norm": 0.5298281311988831, |
| "learning_rate": 5.304133738072674e-05, |
| "loss": 1.3623, |
| "step": 8570 |
| }, |
| { |
| "epoch": 0.9050632911392406, |
| "grad_norm": 0.5063627362251282, |
| "learning_rate": 5.2310243093099814e-05, |
| "loss": 1.3535, |
| "step": 8580 |
| }, |
| { |
| "epoch": 0.9061181434599156, |
| "grad_norm": 0.5597848892211914, |
| "learning_rate": 5.158922582999368e-05, |
| "loss": 1.3568, |
| "step": 8590 |
| }, |
| { |
| "epoch": 0.9071729957805907, |
| "grad_norm": 0.48414871096611023, |
| "learning_rate": 5.087814669492819e-05, |
| "loss": 1.3481, |
| "step": 8600 |
| }, |
| { |
| "epoch": 0.9082278481012658, |
| "grad_norm": 0.5020228028297424, |
| "learning_rate": 5.017686870590028e-05, |
| "loss": 1.3479, |
| "step": 8610 |
| }, |
| { |
| "epoch": 0.9092827004219409, |
| "grad_norm": 0.4592040777206421, |
| "learning_rate": 4.948525676899577e-05, |
| "loss": 1.3463, |
| "step": 8620 |
| }, |
| { |
| "epoch": 0.9103375527426161, |
| "grad_norm": 0.47056108713150024, |
| "learning_rate": 4.880317765236493e-05, |
| "loss": 1.3561, |
| "step": 8630 |
| }, |
| { |
| "epoch": 0.9113924050632911, |
| "grad_norm": 0.47921305894851685, |
| "learning_rate": 4.8130499960556755e-05, |
| "loss": 1.3355, |
| "step": 8640 |
| }, |
| { |
| "epoch": 0.9124472573839663, |
| "grad_norm": 0.4555310010910034, |
| "learning_rate": 4.746709410920699e-05, |
| "loss": 1.3422, |
| "step": 8650 |
| }, |
| { |
| "epoch": 0.9135021097046413, |
| "grad_norm": 0.5480925440788269, |
| "learning_rate": 4.681283230007507e-05, |
| "loss": 1.3452, |
| "step": 8660 |
| }, |
| { |
| "epoch": 0.9145569620253164, |
| "grad_norm": 0.4556386470794678, |
| "learning_rate": 4.616758849642509e-05, |
| "loss": 1.3525, |
| "step": 8670 |
| }, |
| { |
| "epoch": 0.9156118143459916, |
| "grad_norm": 0.45376330614089966, |
| "learning_rate": 4.553123839874615e-05, |
| "loss": 1.3588, |
| "step": 8680 |
| }, |
| { |
| "epoch": 0.9166666666666666, |
| "grad_norm": 0.48756328225135803, |
| "learning_rate": 4.490365942080736e-05, |
| "loss": 1.3523, |
| "step": 8690 |
| }, |
| { |
| "epoch": 0.9177215189873418, |
| "grad_norm": 0.4586666524410248, |
| "learning_rate": 4.428473066604285e-05, |
| "loss": 1.3498, |
| "step": 8700 |
| }, |
| { |
| "epoch": 0.9187763713080169, |
| "grad_norm": 0.4539191722869873, |
| "learning_rate": 4.367433290426233e-05, |
| "loss": 1.3505, |
| "step": 8710 |
| }, |
| { |
| "epoch": 0.919831223628692, |
| "grad_norm": 0.4817667603492737, |
| "learning_rate": 4.3072348548682595e-05, |
| "loss": 1.3431, |
| "step": 8720 |
| }, |
| { |
| "epoch": 0.9208860759493671, |
| "grad_norm": 0.46067652106285095, |
| "learning_rate": 4.247866163327575e-05, |
| "loss": 1.3548, |
| "step": 8730 |
| }, |
| { |
| "epoch": 0.9219409282700421, |
| "grad_norm": 0.4616350829601288, |
| "learning_rate": 4.1893157790429404e-05, |
| "loss": 1.3469, |
| "step": 8740 |
| }, |
| { |
| "epoch": 0.9229957805907173, |
| "grad_norm": 0.4541439712047577, |
| "learning_rate": 4.1315724228915066e-05, |
| "loss": 1.3421, |
| "step": 8750 |
| }, |
| { |
| "epoch": 0.9240506329113924, |
| "grad_norm": 0.5221729874610901, |
| "learning_rate": 4.074624971216005e-05, |
| "loss": 1.3293, |
| "step": 8760 |
| }, |
| { |
| "epoch": 0.9251054852320675, |
| "grad_norm": 0.4680933356285095, |
| "learning_rate": 4.018462453681889e-05, |
| "loss": 1.342, |
| "step": 8770 |
| }, |
| { |
| "epoch": 0.9261603375527426, |
| "grad_norm": 0.4517577886581421, |
| "learning_rate": 3.963074051164014e-05, |
| "loss": 1.351, |
| "step": 8780 |
| }, |
| { |
| "epoch": 0.9272151898734177, |
| "grad_norm": 0.4618259072303772, |
| "learning_rate": 3.908449093662446e-05, |
| "loss": 1.3435, |
| "step": 8790 |
| }, |
| { |
| "epoch": 0.9282700421940928, |
| "grad_norm": 0.45023104548454285, |
| "learning_rate": 3.854577058246998e-05, |
| "loss": 1.3425, |
| "step": 8800 |
| }, |
| { |
| "epoch": 0.929324894514768, |
| "grad_norm": 0.47846972942352295, |
| "learning_rate": 3.801447567030094e-05, |
| "loss": 1.3586, |
| "step": 8810 |
| }, |
| { |
| "epoch": 0.930379746835443, |
| "grad_norm": 0.45514824986457825, |
| "learning_rate": 3.7490503851675777e-05, |
| "loss": 1.3471, |
| "step": 8820 |
| }, |
| { |
| "epoch": 0.9314345991561181, |
| "grad_norm": 0.44830751419067383, |
| "learning_rate": 3.6973754188870806e-05, |
| "loss": 1.3653, |
| "step": 8830 |
| }, |
| { |
| "epoch": 0.9324894514767933, |
| "grad_norm": 0.465257465839386, |
| "learning_rate": 3.6464127135435536e-05, |
| "loss": 1.3491, |
| "step": 8840 |
| }, |
| { |
| "epoch": 0.9335443037974683, |
| "grad_norm": 0.5014391541481018, |
| "learning_rate": 3.596152451701616e-05, |
| "loss": 1.3464, |
| "step": 8850 |
| }, |
| { |
| "epoch": 0.9345991561181435, |
| "grad_norm": 0.48519137501716614, |
| "learning_rate": 3.5465849512443226e-05, |
| "loss": 1.3463, |
| "step": 8860 |
| }, |
| { |
| "epoch": 0.9356540084388185, |
| "grad_norm": 0.45895054936408997, |
| "learning_rate": 3.4977006635080086e-05, |
| "loss": 1.3504, |
| "step": 8870 |
| }, |
| { |
| "epoch": 0.9367088607594937, |
| "grad_norm": 0.4757814407348633, |
| "learning_rate": 3.449490171442838e-05, |
| "loss": 1.3539, |
| "step": 8880 |
| }, |
| { |
| "epoch": 0.9377637130801688, |
| "grad_norm": 0.46523842215538025, |
| "learning_rate": 3.401944187798702e-05, |
| "loss": 1.3522, |
| "step": 8890 |
| }, |
| { |
| "epoch": 0.9388185654008439, |
| "grad_norm": 0.447443425655365, |
| "learning_rate": 3.355053553336137e-05, |
| "loss": 1.3438, |
| "step": 8900 |
| }, |
| { |
| "epoch": 0.939873417721519, |
| "grad_norm": 0.47570958733558655, |
| "learning_rate": 3.308809235061882e-05, |
| "loss": 1.3425, |
| "step": 8910 |
| }, |
| { |
| "epoch": 0.9409282700421941, |
| "grad_norm": 0.45039355754852295, |
| "learning_rate": 3.263202324488772e-05, |
| "loss": 1.3494, |
| "step": 8920 |
| }, |
| { |
| "epoch": 0.9419831223628692, |
| "grad_norm": 0.45538634061813354, |
| "learning_rate": 3.218224035919609e-05, |
| "loss": 1.3456, |
| "step": 8930 |
| }, |
| { |
| "epoch": 0.9430379746835443, |
| "grad_norm": 0.47007620334625244, |
| "learning_rate": 3.173865704754688e-05, |
| "loss": 1.3475, |
| "step": 8940 |
| }, |
| { |
| "epoch": 0.9440928270042194, |
| "grad_norm": 0.4494016170501709, |
| "learning_rate": 3.130118785822657e-05, |
| "loss": 1.3498, |
| "step": 8950 |
| }, |
| { |
| "epoch": 0.9451476793248945, |
| "grad_norm": 0.46623754501342773, |
| "learning_rate": 3.08697485173437e-05, |
| "loss": 1.3525, |
| "step": 8960 |
| }, |
| { |
| "epoch": 0.9462025316455697, |
| "grad_norm": 0.5000026226043701, |
| "learning_rate": 3.0444255912594442e-05, |
| "loss": 1.3486, |
| "step": 8970 |
| }, |
| { |
| "epoch": 0.9472573839662447, |
| "grad_norm": 0.47525277733802795, |
| "learning_rate": 3.002462807725185e-05, |
| "loss": 1.3442, |
| "step": 8980 |
| }, |
| { |
| "epoch": 0.9483122362869199, |
| "grad_norm": 0.4763094186782837, |
| "learning_rate": 2.9610784174375868e-05, |
| "loss": 1.3515, |
| "step": 8990 |
| }, |
| { |
| "epoch": 0.9493670886075949, |
| "grad_norm": 0.45728081464767456, |
| "learning_rate": 2.920264448124087e-05, |
| "loss": 1.3449, |
| "step": 9000 |
| }, |
| { |
| "epoch": 0.95042194092827, |
| "grad_norm": 0.5230275988578796, |
| "learning_rate": 2.8800130373977936e-05, |
| "loss": 1.3412, |
| "step": 9010 |
| }, |
| { |
| "epoch": 0.9514767932489452, |
| "grad_norm": 0.45506682991981506, |
| "learning_rate": 2.84031643124288e-05, |
| "loss": 1.3435, |
| "step": 9020 |
| }, |
| { |
| "epoch": 0.9525316455696202, |
| "grad_norm": 0.46359187364578247, |
| "learning_rate": 2.8011669825208517e-05, |
| "loss": 1.3625, |
| "step": 9030 |
| }, |
| { |
| "epoch": 0.9535864978902954, |
| "grad_norm": 0.4452093541622162, |
| "learning_rate": 2.762557149497405e-05, |
| "loss": 1.3399, |
| "step": 9040 |
| }, |
| { |
| "epoch": 0.9546413502109705, |
| "grad_norm": 0.45948877930641174, |
| "learning_rate": 2.724479494389592e-05, |
| "loss": 1.3497, |
| "step": 9050 |
| }, |
| { |
| "epoch": 0.9556962025316456, |
| "grad_norm": 0.44907087087631226, |
| "learning_rate": 2.6869266819330058e-05, |
| "loss": 1.3511, |
| "step": 9060 |
| }, |
| { |
| "epoch": 0.9567510548523207, |
| "grad_norm": 0.45196837186813354, |
| "learning_rate": 2.6498914779687228e-05, |
| "loss": 1.3487, |
| "step": 9070 |
| }, |
| { |
| "epoch": 0.9578059071729957, |
| "grad_norm": 0.4547522962093353, |
| "learning_rate": 2.6133667480497115e-05, |
| "loss": 1.3515, |
| "step": 9080 |
| }, |
| { |
| "epoch": 0.9588607594936709, |
| "grad_norm": 0.46276381611824036, |
| "learning_rate": 2.5773454560664597e-05, |
| "loss": 1.3532, |
| "step": 9090 |
| }, |
| { |
| "epoch": 0.959915611814346, |
| "grad_norm": 0.4469272792339325, |
| "learning_rate": 2.541820662891541e-05, |
| "loss": 1.3316, |
| "step": 9100 |
| }, |
| { |
| "epoch": 0.9609704641350211, |
| "grad_norm": 0.45500412583351135, |
| "learning_rate": 2.5067855250428616e-05, |
| "loss": 1.3432, |
| "step": 9110 |
| }, |
| { |
| "epoch": 0.9620253164556962, |
| "grad_norm": 0.4534028470516205, |
| "learning_rate": 2.472233293365335e-05, |
| "loss": 1.3511, |
| "step": 9120 |
| }, |
| { |
| "epoch": 0.9630801687763713, |
| "grad_norm": 0.47841382026672363, |
| "learning_rate": 2.4381573117307307e-05, |
| "loss": 1.337, |
| "step": 9130 |
| }, |
| { |
| "epoch": 0.9641350210970464, |
| "grad_norm": 0.463321328163147, |
| "learning_rate": 2.4045510157554362e-05, |
| "loss": 1.3586, |
| "step": 9140 |
| }, |
| { |
| "epoch": 0.9651898734177216, |
| "grad_norm": 0.45994776487350464, |
| "learning_rate": 2.3714079315358985e-05, |
| "loss": 1.3446, |
| "step": 9150 |
| }, |
| { |
| "epoch": 0.9662447257383966, |
| "grad_norm": 0.46441754698753357, |
| "learning_rate": 2.338721674401494e-05, |
| "loss": 1.3521, |
| "step": 9160 |
| }, |
| { |
| "epoch": 0.9672995780590717, |
| "grad_norm": 0.4650691747665405, |
| "learning_rate": 2.30648594768459e-05, |
| "loss": 1.3502, |
| "step": 9170 |
| }, |
| { |
| "epoch": 0.9683544303797469, |
| "grad_norm": 0.44704753160476685, |
| "learning_rate": 2.2746945415075523e-05, |
| "loss": 1.3463, |
| "step": 9180 |
| }, |
| { |
| "epoch": 0.9694092827004219, |
| "grad_norm": 0.4620136320590973, |
| "learning_rate": 2.2433413315864803e-05, |
| "loss": 1.3418, |
| "step": 9190 |
| }, |
| { |
| "epoch": 0.9704641350210971, |
| "grad_norm": 0.4611254930496216, |
| "learning_rate": 2.2124202780514277e-05, |
| "loss": 1.3463, |
| "step": 9200 |
| }, |
| { |
| "epoch": 0.9715189873417721, |
| "grad_norm": 0.4542345702648163, |
| "learning_rate": 2.1819254242828815e-05, |
| "loss": 1.3471, |
| "step": 9210 |
| }, |
| { |
| "epoch": 0.9725738396624473, |
| "grad_norm": 0.4623969495296478, |
| "learning_rate": 2.151850895764285e-05, |
| "loss": 1.3437, |
| "step": 9220 |
| }, |
| { |
| "epoch": 0.9736286919831224, |
| "grad_norm": 0.4567524790763855, |
| "learning_rate": 2.12219089895037e-05, |
| "loss": 1.3354, |
| "step": 9230 |
| }, |
| { |
| "epoch": 0.9746835443037974, |
| "grad_norm": 0.45435529947280884, |
| "learning_rate": 2.092939720151092e-05, |
| "loss": 1.329, |
| "step": 9240 |
| }, |
| { |
| "epoch": 0.9757383966244726, |
| "grad_norm": 0.46444326639175415, |
| "learning_rate": 2.064091724430947e-05, |
| "loss": 1.3422, |
| "step": 9250 |
| }, |
| { |
| "epoch": 0.9767932489451476, |
| "grad_norm": 0.46743497252464294, |
| "learning_rate": 2.0356413545234603e-05, |
| "loss": 1.3389, |
| "step": 9260 |
| }, |
| { |
| "epoch": 0.9778481012658228, |
| "grad_norm": 0.4665004014968872, |
| "learning_rate": 2.0075831297606357e-05, |
| "loss": 1.3482, |
| "step": 9270 |
| }, |
| { |
| "epoch": 0.9789029535864979, |
| "grad_norm": 0.4589768946170807, |
| "learning_rate": 1.9799116450171627e-05, |
| "loss": 1.337, |
| "step": 9280 |
| }, |
| { |
| "epoch": 0.979957805907173, |
| "grad_norm": 0.4532953202724457, |
| "learning_rate": 1.952621569669175e-05, |
| "loss": 1.3465, |
| "step": 9290 |
| }, |
| { |
| "epoch": 0.9810126582278481, |
| "grad_norm": 0.4485061764717102, |
| "learning_rate": 1.9257076465673605e-05, |
| "loss": 1.3432, |
| "step": 9300 |
| }, |
| { |
| "epoch": 0.9820675105485233, |
| "grad_norm": 0.46737146377563477, |
| "learning_rate": 1.899164691024229e-05, |
| "loss": 1.3444, |
| "step": 9310 |
| }, |
| { |
| "epoch": 0.9831223628691983, |
| "grad_norm": 0.47384166717529297, |
| "learning_rate": 1.872987589815331e-05, |
| "loss": 1.3399, |
| "step": 9320 |
| }, |
| { |
| "epoch": 0.9841772151898734, |
| "grad_norm": 0.4618329107761383, |
| "learning_rate": 1.8471713001942538e-05, |
| "loss": 1.3527, |
| "step": 9330 |
| }, |
| { |
| "epoch": 0.9852320675105485, |
| "grad_norm": 0.44944530725479126, |
| "learning_rate": 1.8217108489211845e-05, |
| "loss": 1.35, |
| "step": 9340 |
| }, |
| { |
| "epoch": 0.9862869198312236, |
| "grad_norm": 0.444137841463089, |
| "learning_rate": 1.7966013313048696e-05, |
| "loss": 1.3594, |
| "step": 9350 |
| }, |
| { |
| "epoch": 0.9873417721518988, |
| "grad_norm": 0.457152783870697, |
| "learning_rate": 1.7718379102577752e-05, |
| "loss": 1.3519, |
| "step": 9360 |
| }, |
| { |
| "epoch": 0.9883966244725738, |
| "grad_norm": 0.5073128938674927, |
| "learning_rate": 1.7474158153642745e-05, |
| "loss": 1.3498, |
| "step": 9370 |
| }, |
| { |
| "epoch": 0.989451476793249, |
| "grad_norm": 0.49374303221702576, |
| "learning_rate": 1.7233303419616745e-05, |
| "loss": 1.3331, |
| "step": 9380 |
| }, |
| { |
| "epoch": 0.990506329113924, |
| "grad_norm": 0.4625350534915924, |
| "learning_rate": 1.699576850233916e-05, |
| "loss": 1.3397, |
| "step": 9390 |
| }, |
| { |
| "epoch": 0.9915611814345991, |
| "grad_norm": 0.4780026972293854, |
| "learning_rate": 1.6761507643177553e-05, |
| "loss": 1.3526, |
| "step": 9400 |
| }, |
| { |
| "epoch": 0.9926160337552743, |
| "grad_norm": 0.47076642513275146, |
| "learning_rate": 1.6530475714212752e-05, |
| "loss": 1.3287, |
| "step": 9410 |
| }, |
| { |
| "epoch": 0.9936708860759493, |
| "grad_norm": 0.444694459438324, |
| "learning_rate": 1.6302628209545423e-05, |
| "loss": 1.349, |
| "step": 9420 |
| }, |
| { |
| "epoch": 0.9947257383966245, |
| "grad_norm": 0.45865774154663086, |
| "learning_rate": 1.6077921236722464e-05, |
| "loss": 1.3475, |
| "step": 9430 |
| }, |
| { |
| "epoch": 0.9957805907172996, |
| "grad_norm": 0.4531383812427521, |
| "learning_rate": 1.5856311508281594e-05, |
| "loss": 1.3449, |
| "step": 9440 |
| }, |
| { |
| "epoch": 0.9968354430379747, |
| "grad_norm": 0.46567508578300476, |
| "learning_rate": 1.5637756333412454e-05, |
| "loss": 1.3555, |
| "step": 9450 |
| }, |
| { |
| "epoch": 0.9978902953586498, |
| "grad_norm": 0.4613323509693146, |
| "learning_rate": 1.542221360973268e-05, |
| "loss": 1.3376, |
| "step": 9460 |
| }, |
| { |
| "epoch": 0.9989451476793249, |
| "grad_norm": 0.46998530626296997, |
| "learning_rate": 1.5209641815177312e-05, |
| "loss": 1.3494, |
| "step": 9470 |
| }, |
| { |
| "epoch": 1.0, |
| "grad_norm": 1.2927305698394775, |
| "learning_rate": 1.5e-05, |
| "loss": 1.3386, |
| "step": 9480 |
| } |
| ], |
| "logging_steps": 10, |
| "max_steps": 9480, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 1, |
| "save_steps": 1000, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 1.832308198648013e+16, |
| "train_batch_size": 1024, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|