| { |
| "best_global_step": null, |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 2.1621621621621623, |
| "eval_steps": 500, |
| "global_step": 20000, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.021621621621621623, |
| "grad_norm": 43.9290657043457, |
| "learning_rate": 1.9856576576576577e-05, |
| "loss": 0.6499, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.043243243243243246, |
| "grad_norm": 13.308595657348633, |
| "learning_rate": 1.9712432432432433e-05, |
| "loss": 0.568, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.06486486486486487, |
| "grad_norm": 5.098663806915283, |
| "learning_rate": 1.956828828828829e-05, |
| "loss": 0.5371, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.08648648648648649, |
| "grad_norm": 9.126262664794922, |
| "learning_rate": 1.9424144144144147e-05, |
| "loss": 0.5181, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.10810810810810811, |
| "grad_norm": 15.761275291442871, |
| "learning_rate": 1.9280000000000002e-05, |
| "loss": 0.504, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.12972972972972974, |
| "grad_norm": 33.255863189697266, |
| "learning_rate": 1.9135855855855857e-05, |
| "loss": 0.5017, |
| "step": 1200 |
| }, |
| { |
| "epoch": 0.15135135135135136, |
| "grad_norm": 6.04021692276001, |
| "learning_rate": 1.8991711711711712e-05, |
| "loss": 0.468, |
| "step": 1400 |
| }, |
| { |
| "epoch": 0.17297297297297298, |
| "grad_norm": 15.048690795898438, |
| "learning_rate": 1.8847567567567568e-05, |
| "loss": 0.5056, |
| "step": 1600 |
| }, |
| { |
| "epoch": 0.1945945945945946, |
| "grad_norm": 22.5860538482666, |
| "learning_rate": 1.8703423423423426e-05, |
| "loss": 0.4601, |
| "step": 1800 |
| }, |
| { |
| "epoch": 0.21621621621621623, |
| "grad_norm": 12.32684326171875, |
| "learning_rate": 1.855927927927928e-05, |
| "loss": 0.4401, |
| "step": 2000 |
| }, |
| { |
| "epoch": 0.23783783783783785, |
| "grad_norm": 9.132744789123535, |
| "learning_rate": 1.8415135135135137e-05, |
| "loss": 0.4707, |
| "step": 2200 |
| }, |
| { |
| "epoch": 0.2594594594594595, |
| "grad_norm": 10.921464920043945, |
| "learning_rate": 1.8270990990990992e-05, |
| "loss": 0.4676, |
| "step": 2400 |
| }, |
| { |
| "epoch": 0.2810810810810811, |
| "grad_norm": 20.87602424621582, |
| "learning_rate": 1.8126846846846847e-05, |
| "loss": 0.469, |
| "step": 2600 |
| }, |
| { |
| "epoch": 0.3027027027027027, |
| "grad_norm": 27.636375427246094, |
| "learning_rate": 1.7982702702702702e-05, |
| "loss": 0.4419, |
| "step": 2800 |
| }, |
| { |
| "epoch": 0.32432432432432434, |
| "grad_norm": 8.047364234924316, |
| "learning_rate": 1.783855855855856e-05, |
| "loss": 0.4331, |
| "step": 3000 |
| }, |
| { |
| "epoch": 0.34594594594594597, |
| "grad_norm": 14.144438743591309, |
| "learning_rate": 1.7694414414414416e-05, |
| "loss": 0.4444, |
| "step": 3200 |
| }, |
| { |
| "epoch": 0.3675675675675676, |
| "grad_norm": 13.833978652954102, |
| "learning_rate": 1.755027027027027e-05, |
| "loss": 0.4269, |
| "step": 3400 |
| }, |
| { |
| "epoch": 0.3891891891891892, |
| "grad_norm": 7.048819541931152, |
| "learning_rate": 1.7406126126126127e-05, |
| "loss": 0.4518, |
| "step": 3600 |
| }, |
| { |
| "epoch": 0.41081081081081083, |
| "grad_norm": 16.350954055786133, |
| "learning_rate": 1.7261981981981982e-05, |
| "loss": 0.4306, |
| "step": 3800 |
| }, |
| { |
| "epoch": 0.43243243243243246, |
| "grad_norm": 36.817256927490234, |
| "learning_rate": 1.7117837837837837e-05, |
| "loss": 0.3993, |
| "step": 4000 |
| }, |
| { |
| "epoch": 0.4540540540540541, |
| "grad_norm": 6.8075151443481445, |
| "learning_rate": 1.6973693693693696e-05, |
| "loss": 0.4624, |
| "step": 4200 |
| }, |
| { |
| "epoch": 0.4756756756756757, |
| "grad_norm": 22.440227508544922, |
| "learning_rate": 1.682954954954955e-05, |
| "loss": 0.4062, |
| "step": 4400 |
| }, |
| { |
| "epoch": 0.4972972972972973, |
| "grad_norm": 27.693946838378906, |
| "learning_rate": 1.6685405405405406e-05, |
| "loss": 0.3986, |
| "step": 4600 |
| }, |
| { |
| "epoch": 0.518918918918919, |
| "grad_norm": 5.609838485717773, |
| "learning_rate": 1.6541261261261262e-05, |
| "loss": 0.418, |
| "step": 4800 |
| }, |
| { |
| "epoch": 0.5405405405405406, |
| "grad_norm": 9.340408325195312, |
| "learning_rate": 1.6397117117117117e-05, |
| "loss": 0.4292, |
| "step": 5000 |
| }, |
| { |
| "epoch": 0.5621621621621622, |
| "grad_norm": 17.670503616333008, |
| "learning_rate": 1.6252972972972972e-05, |
| "loss": 0.4358, |
| "step": 5200 |
| }, |
| { |
| "epoch": 0.5837837837837838, |
| "grad_norm": 22.615758895874023, |
| "learning_rate": 1.610882882882883e-05, |
| "loss": 0.4324, |
| "step": 5400 |
| }, |
| { |
| "epoch": 0.6054054054054054, |
| "grad_norm": 25.58919906616211, |
| "learning_rate": 1.5964684684684686e-05, |
| "loss": 0.4338, |
| "step": 5600 |
| }, |
| { |
| "epoch": 0.6270270270270271, |
| "grad_norm": 32.85356140136719, |
| "learning_rate": 1.582054054054054e-05, |
| "loss": 0.4226, |
| "step": 5800 |
| }, |
| { |
| "epoch": 0.6486486486486487, |
| "grad_norm": 9.336725234985352, |
| "learning_rate": 1.56763963963964e-05, |
| "loss": 0.4391, |
| "step": 6000 |
| }, |
| { |
| "epoch": 0.6702702702702703, |
| "grad_norm": 24.154613494873047, |
| "learning_rate": 1.5532252252252252e-05, |
| "loss": 0.3986, |
| "step": 6200 |
| }, |
| { |
| "epoch": 0.6918918918918919, |
| "grad_norm": 18.917322158813477, |
| "learning_rate": 1.5388108108108107e-05, |
| "loss": 0.4107, |
| "step": 6400 |
| }, |
| { |
| "epoch": 0.7135135135135136, |
| "grad_norm": 24.948320388793945, |
| "learning_rate": 1.5243963963963966e-05, |
| "loss": 0.4165, |
| "step": 6600 |
| }, |
| { |
| "epoch": 0.7351351351351352, |
| "grad_norm": 14.24731731414795, |
| "learning_rate": 1.5099819819819821e-05, |
| "loss": 0.4294, |
| "step": 6800 |
| }, |
| { |
| "epoch": 0.7567567567567568, |
| "grad_norm": 10.69514274597168, |
| "learning_rate": 1.4955675675675676e-05, |
| "loss": 0.4004, |
| "step": 7000 |
| }, |
| { |
| "epoch": 0.7783783783783784, |
| "grad_norm": 5.057804107666016, |
| "learning_rate": 1.4811531531531533e-05, |
| "loss": 0.4235, |
| "step": 7200 |
| }, |
| { |
| "epoch": 0.8, |
| "grad_norm": 19.741506576538086, |
| "learning_rate": 1.4667387387387388e-05, |
| "loss": 0.3834, |
| "step": 7400 |
| }, |
| { |
| "epoch": 0.8216216216216217, |
| "grad_norm": 15.608851432800293, |
| "learning_rate": 1.4523243243243244e-05, |
| "loss": 0.4089, |
| "step": 7600 |
| }, |
| { |
| "epoch": 0.8432432432432433, |
| "grad_norm": 8.335741996765137, |
| "learning_rate": 1.43790990990991e-05, |
| "loss": 0.4025, |
| "step": 7800 |
| }, |
| { |
| "epoch": 0.8648648648648649, |
| "grad_norm": 8.1391019821167, |
| "learning_rate": 1.4234954954954956e-05, |
| "loss": 0.444, |
| "step": 8000 |
| }, |
| { |
| "epoch": 0.8864864864864865, |
| "grad_norm": 28.06201934814453, |
| "learning_rate": 1.4090810810810811e-05, |
| "loss": 0.4113, |
| "step": 8200 |
| }, |
| { |
| "epoch": 0.9081081081081082, |
| "grad_norm": 5.816445827484131, |
| "learning_rate": 1.3946666666666668e-05, |
| "loss": 0.4234, |
| "step": 8400 |
| }, |
| { |
| "epoch": 0.9297297297297298, |
| "grad_norm": 22.189376831054688, |
| "learning_rate": 1.3802522522522523e-05, |
| "loss": 0.4147, |
| "step": 8600 |
| }, |
| { |
| "epoch": 0.9513513513513514, |
| "grad_norm": 9.486166954040527, |
| "learning_rate": 1.3658378378378379e-05, |
| "loss": 0.4047, |
| "step": 8800 |
| }, |
| { |
| "epoch": 0.972972972972973, |
| "grad_norm": 12.465399742126465, |
| "learning_rate": 1.3514234234234236e-05, |
| "loss": 0.4234, |
| "step": 9000 |
| }, |
| { |
| "epoch": 0.9945945945945946, |
| "grad_norm": 18.71035385131836, |
| "learning_rate": 1.337009009009009e-05, |
| "loss": 0.4202, |
| "step": 9200 |
| }, |
| { |
| "epoch": 1.0162162162162163, |
| "grad_norm": 18.901094436645508, |
| "learning_rate": 1.3225945945945946e-05, |
| "loss": 0.3622, |
| "step": 9400 |
| }, |
| { |
| "epoch": 1.037837837837838, |
| "grad_norm": 15.318452835083008, |
| "learning_rate": 1.3081801801801803e-05, |
| "loss": 0.3792, |
| "step": 9600 |
| }, |
| { |
| "epoch": 1.0594594594594595, |
| "grad_norm": 15.37670612335205, |
| "learning_rate": 1.2937657657657658e-05, |
| "loss": 0.3622, |
| "step": 9800 |
| }, |
| { |
| "epoch": 1.0810810810810811, |
| "grad_norm": 16.18653106689453, |
| "learning_rate": 1.2793513513513514e-05, |
| "loss": 0.3763, |
| "step": 10000 |
| }, |
| { |
| "epoch": 1.1027027027027028, |
| "grad_norm": 40.86188507080078, |
| "learning_rate": 1.264936936936937e-05, |
| "loss": 0.3574, |
| "step": 10200 |
| }, |
| { |
| "epoch": 1.1243243243243244, |
| "grad_norm": 7.759640693664551, |
| "learning_rate": 1.2505225225225226e-05, |
| "loss": 0.3878, |
| "step": 10400 |
| }, |
| { |
| "epoch": 1.145945945945946, |
| "grad_norm": 7.456469535827637, |
| "learning_rate": 1.2361081081081081e-05, |
| "loss": 0.3968, |
| "step": 10600 |
| }, |
| { |
| "epoch": 1.1675675675675676, |
| "grad_norm": 18.799278259277344, |
| "learning_rate": 1.2216936936936938e-05, |
| "loss": 0.3725, |
| "step": 10800 |
| }, |
| { |
| "epoch": 1.1891891891891893, |
| "grad_norm": 2.651585817337036, |
| "learning_rate": 1.2072792792792793e-05, |
| "loss": 0.3766, |
| "step": 11000 |
| }, |
| { |
| "epoch": 1.2108108108108109, |
| "grad_norm": 11.979673385620117, |
| "learning_rate": 1.1928648648648648e-05, |
| "loss": 0.3632, |
| "step": 11200 |
| }, |
| { |
| "epoch": 1.2324324324324325, |
| "grad_norm": 4.676784515380859, |
| "learning_rate": 1.1784504504504505e-05, |
| "loss": 0.3961, |
| "step": 11400 |
| }, |
| { |
| "epoch": 1.2540540540540541, |
| "grad_norm": 9.577638626098633, |
| "learning_rate": 1.164036036036036e-05, |
| "loss": 0.3602, |
| "step": 11600 |
| }, |
| { |
| "epoch": 1.2756756756756757, |
| "grad_norm": 9.80462646484375, |
| "learning_rate": 1.1496216216216216e-05, |
| "loss": 0.3479, |
| "step": 11800 |
| }, |
| { |
| "epoch": 1.2972972972972974, |
| "grad_norm": 11.522858619689941, |
| "learning_rate": 1.1352072072072073e-05, |
| "loss": 0.407, |
| "step": 12000 |
| }, |
| { |
| "epoch": 1.318918918918919, |
| "grad_norm": 3.6061971187591553, |
| "learning_rate": 1.1207927927927928e-05, |
| "loss": 0.3918, |
| "step": 12200 |
| }, |
| { |
| "epoch": 1.3405405405405406, |
| "grad_norm": 15.432760238647461, |
| "learning_rate": 1.1063783783783783e-05, |
| "loss": 0.3726, |
| "step": 12400 |
| }, |
| { |
| "epoch": 1.3621621621621622, |
| "grad_norm": 5.143666744232178, |
| "learning_rate": 1.091963963963964e-05, |
| "loss": 0.3504, |
| "step": 12600 |
| }, |
| { |
| "epoch": 1.3837837837837839, |
| "grad_norm": 8.900772094726562, |
| "learning_rate": 1.0775495495495495e-05, |
| "loss": 0.357, |
| "step": 12800 |
| }, |
| { |
| "epoch": 1.4054054054054055, |
| "grad_norm": 14.410099983215332, |
| "learning_rate": 1.0631351351351354e-05, |
| "loss": 0.3608, |
| "step": 13000 |
| }, |
| { |
| "epoch": 1.427027027027027, |
| "grad_norm": 13.041916847229004, |
| "learning_rate": 1.0487207207207208e-05, |
| "loss": 0.3684, |
| "step": 13200 |
| }, |
| { |
| "epoch": 1.4486486486486487, |
| "grad_norm": 16.890735626220703, |
| "learning_rate": 1.0343063063063063e-05, |
| "loss": 0.3835, |
| "step": 13400 |
| }, |
| { |
| "epoch": 1.4702702702702704, |
| "grad_norm": 7.087887763977051, |
| "learning_rate": 1.0198918918918922e-05, |
| "loss": 0.4041, |
| "step": 13600 |
| }, |
| { |
| "epoch": 1.491891891891892, |
| "grad_norm": 7.32821798324585, |
| "learning_rate": 1.0054774774774775e-05, |
| "loss": 0.3563, |
| "step": 13800 |
| }, |
| { |
| "epoch": 1.5135135135135136, |
| "grad_norm": 22.392210006713867, |
| "learning_rate": 9.91063063063063e-06, |
| "loss": 0.3807, |
| "step": 14000 |
| }, |
| { |
| "epoch": 1.535135135135135, |
| "grad_norm": 14.552515029907227, |
| "learning_rate": 9.766486486486487e-06, |
| "loss": 0.3539, |
| "step": 14200 |
| }, |
| { |
| "epoch": 1.5567567567567568, |
| "grad_norm": 9.686298370361328, |
| "learning_rate": 9.622342342342343e-06, |
| "loss": 0.3971, |
| "step": 14400 |
| }, |
| { |
| "epoch": 1.5783783783783782, |
| "grad_norm": 13.134624481201172, |
| "learning_rate": 9.478198198198198e-06, |
| "loss": 0.3957, |
| "step": 14600 |
| }, |
| { |
| "epoch": 1.6, |
| "grad_norm": 35.519474029541016, |
| "learning_rate": 9.334054054054055e-06, |
| "loss": 0.3655, |
| "step": 14800 |
| }, |
| { |
| "epoch": 1.6216216216216215, |
| "grad_norm": 14.94369125366211, |
| "learning_rate": 9.189909909909912e-06, |
| "loss": 0.3503, |
| "step": 15000 |
| }, |
| { |
| "epoch": 1.6432432432432433, |
| "grad_norm": 3.6573474407196045, |
| "learning_rate": 9.045765765765765e-06, |
| "loss": 0.3883, |
| "step": 15200 |
| }, |
| { |
| "epoch": 1.6648648648648647, |
| "grad_norm": 18.653778076171875, |
| "learning_rate": 8.901621621621622e-06, |
| "loss": 0.3425, |
| "step": 15400 |
| }, |
| { |
| "epoch": 1.6864864864864866, |
| "grad_norm": 17.726455688476562, |
| "learning_rate": 8.75747747747748e-06, |
| "loss": 0.3816, |
| "step": 15600 |
| }, |
| { |
| "epoch": 1.708108108108108, |
| "grad_norm": 15.91839599609375, |
| "learning_rate": 8.613333333333333e-06, |
| "loss": 0.3573, |
| "step": 15800 |
| }, |
| { |
| "epoch": 1.7297297297297298, |
| "grad_norm": 20.901330947875977, |
| "learning_rate": 8.46918918918919e-06, |
| "loss": 0.3515, |
| "step": 16000 |
| }, |
| { |
| "epoch": 1.7513513513513512, |
| "grad_norm": 12.939945220947266, |
| "learning_rate": 8.325045045045047e-06, |
| "loss": 0.3559, |
| "step": 16200 |
| }, |
| { |
| "epoch": 1.772972972972973, |
| "grad_norm": 9.921557426452637, |
| "learning_rate": 8.180900900900902e-06, |
| "loss": 0.3844, |
| "step": 16400 |
| }, |
| { |
| "epoch": 1.7945945945945945, |
| "grad_norm": 5.254030704498291, |
| "learning_rate": 8.036756756756757e-06, |
| "loss": 0.3567, |
| "step": 16600 |
| }, |
| { |
| "epoch": 1.8162162162162163, |
| "grad_norm": 29.623857498168945, |
| "learning_rate": 7.892612612612614e-06, |
| "loss": 0.3364, |
| "step": 16800 |
| }, |
| { |
| "epoch": 1.8378378378378377, |
| "grad_norm": 8.430907249450684, |
| "learning_rate": 7.74846846846847e-06, |
| "loss": 0.3742, |
| "step": 17000 |
| }, |
| { |
| "epoch": 1.8594594594594596, |
| "grad_norm": 14.618759155273438, |
| "learning_rate": 7.604324324324325e-06, |
| "loss": 0.3624, |
| "step": 17200 |
| }, |
| { |
| "epoch": 1.881081081081081, |
| "grad_norm": 20.811887741088867, |
| "learning_rate": 7.460180180180181e-06, |
| "loss": 0.3585, |
| "step": 17400 |
| }, |
| { |
| "epoch": 1.9027027027027028, |
| "grad_norm": 29.710891723632812, |
| "learning_rate": 7.316036036036037e-06, |
| "loss": 0.3648, |
| "step": 17600 |
| }, |
| { |
| "epoch": 1.9243243243243242, |
| "grad_norm": 7.511297702789307, |
| "learning_rate": 7.171891891891892e-06, |
| "loss": 0.364, |
| "step": 17800 |
| }, |
| { |
| "epoch": 1.945945945945946, |
| "grad_norm": 5.749581813812256, |
| "learning_rate": 7.027747747747748e-06, |
| "loss": 0.3467, |
| "step": 18000 |
| }, |
| { |
| "epoch": 1.9675675675675675, |
| "grad_norm": 13.77943229675293, |
| "learning_rate": 6.883603603603605e-06, |
| "loss": 0.3667, |
| "step": 18200 |
| }, |
| { |
| "epoch": 1.9891891891891893, |
| "grad_norm": 17.615646362304688, |
| "learning_rate": 6.7394594594594595e-06, |
| "loss": 0.376, |
| "step": 18400 |
| }, |
| { |
| "epoch": 2.0108108108108107, |
| "grad_norm": 5.2522501945495605, |
| "learning_rate": 6.595315315315316e-06, |
| "loss": 0.3473, |
| "step": 18600 |
| }, |
| { |
| "epoch": 2.0324324324324325, |
| "grad_norm": 31.85638999938965, |
| "learning_rate": 6.4511711711711725e-06, |
| "loss": 0.2966, |
| "step": 18800 |
| }, |
| { |
| "epoch": 2.054054054054054, |
| "grad_norm": 9.69465446472168, |
| "learning_rate": 6.307027027027027e-06, |
| "loss": 0.35, |
| "step": 19000 |
| }, |
| { |
| "epoch": 2.075675675675676, |
| "grad_norm": 18.219141006469727, |
| "learning_rate": 6.162882882882884e-06, |
| "loss": 0.3532, |
| "step": 19200 |
| }, |
| { |
| "epoch": 2.097297297297297, |
| "grad_norm": 3.134681224822998, |
| "learning_rate": 6.01873873873874e-06, |
| "loss": 0.3331, |
| "step": 19400 |
| }, |
| { |
| "epoch": 2.118918918918919, |
| "grad_norm": 19.817319869995117, |
| "learning_rate": 5.874594594594594e-06, |
| "loss": 0.3099, |
| "step": 19600 |
| }, |
| { |
| "epoch": 2.1405405405405404, |
| "grad_norm": 25.478727340698242, |
| "learning_rate": 5.730450450450451e-06, |
| "loss": 0.318, |
| "step": 19800 |
| }, |
| { |
| "epoch": 2.1621621621621623, |
| "grad_norm": 15.869763374328613, |
| "learning_rate": 5.5863063063063074e-06, |
| "loss": 0.3395, |
| "step": 20000 |
| } |
| ], |
| "logging_steps": 200, |
| "max_steps": 27750, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 3, |
| "save_steps": 1000, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": false |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 4.4606231612227584e+17, |
| "train_batch_size": 8, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|