| { |
| "best_global_step": null, |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 24.137931034482758, |
| "eval_steps": 50, |
| "global_step": 700, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.034482758620689655, |
| "grad_norm": 4.955975532531738, |
| "learning_rate": 0.0, |
| "loss": 1.5752, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.06896551724137931, |
| "grad_norm": 3.4605445861816406, |
| "learning_rate": 1.2500000000000002e-07, |
| "loss": 1.6145, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.10344827586206896, |
| "grad_norm": 3.622607946395874, |
| "learning_rate": 2.5000000000000004e-07, |
| "loss": 1.6033, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.13793103448275862, |
| "grad_norm": 6.3884453773498535, |
| "learning_rate": 3.75e-07, |
| "loss": 1.6219, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.1724137931034483, |
| "grad_norm": 3.212383270263672, |
| "learning_rate": 5.000000000000001e-07, |
| "loss": 1.5958, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.20689655172413793, |
| "grad_norm": 6.6238508224487305, |
| "learning_rate": 6.25e-07, |
| "loss": 1.6268, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.2413793103448276, |
| "grad_norm": 7.565281391143799, |
| "learning_rate": 7.5e-07, |
| "loss": 1.6133, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.27586206896551724, |
| "grad_norm": 11.01657772064209, |
| "learning_rate": 8.750000000000001e-07, |
| "loss": 1.6061, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.3103448275862069, |
| "grad_norm": 8.828049659729004, |
| "learning_rate": 1.0000000000000002e-06, |
| "loss": 1.6352, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.3448275862068966, |
| "grad_norm": 3.2700514793395996, |
| "learning_rate": 1.125e-06, |
| "loss": 1.5985, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.3793103448275862, |
| "grad_norm": 7.4744086265563965, |
| "learning_rate": 1.25e-06, |
| "loss": 1.6075, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.41379310344827586, |
| "grad_norm": 5.462789535522461, |
| "learning_rate": 1.3750000000000002e-06, |
| "loss": 1.6174, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.4482758620689655, |
| "grad_norm": 3.593034267425537, |
| "learning_rate": 1.5e-06, |
| "loss": 1.6083, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.4827586206896552, |
| "grad_norm": 4.305792331695557, |
| "learning_rate": 1.6250000000000001e-06, |
| "loss": 1.5997, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.5172413793103449, |
| "grad_norm": 8.656575202941895, |
| "learning_rate": 1.7500000000000002e-06, |
| "loss": 1.6283, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.5517241379310345, |
| "grad_norm": 6.6580491065979, |
| "learning_rate": 1.875e-06, |
| "loss": 1.5906, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.5862068965517241, |
| "grad_norm": 1.8648769855499268, |
| "learning_rate": 2.0000000000000003e-06, |
| "loss": 1.5667, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.6206896551724138, |
| "grad_norm": 8.883339881896973, |
| "learning_rate": 2.1250000000000004e-06, |
| "loss": 1.5999, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.6551724137931034, |
| "grad_norm": 3.959343194961548, |
| "learning_rate": 2.25e-06, |
| "loss": 1.5751, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.6896551724137931, |
| "grad_norm": 5.278771877288818, |
| "learning_rate": 2.375e-06, |
| "loss": 1.6216, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.7241379310344828, |
| "grad_norm": 3.716949224472046, |
| "learning_rate": 2.5e-06, |
| "loss": 1.613, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.7586206896551724, |
| "grad_norm": 1.6308438777923584, |
| "learning_rate": 2.625e-06, |
| "loss": 1.5822, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.7931034482758621, |
| "grad_norm": 1.8407272100448608, |
| "learning_rate": 2.7500000000000004e-06, |
| "loss": 1.5369, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.8275862068965517, |
| "grad_norm": 7.528758525848389, |
| "learning_rate": 2.8750000000000004e-06, |
| "loss": 1.584, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.8620689655172413, |
| "grad_norm": 3.860356569290161, |
| "learning_rate": 3e-06, |
| "loss": 1.58, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.896551724137931, |
| "grad_norm": 1.7416785955429077, |
| "learning_rate": 3.125e-06, |
| "loss": 1.5714, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.9310344827586207, |
| "grad_norm": 4.024614334106445, |
| "learning_rate": 3.2500000000000002e-06, |
| "loss": 1.5905, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.9655172413793104, |
| "grad_norm": 5.027279376983643, |
| "learning_rate": 3.3750000000000003e-06, |
| "loss": 1.5906, |
| "step": 28 |
| }, |
| { |
| "epoch": 1.0, |
| "grad_norm": 7.354084014892578, |
| "learning_rate": 3.5000000000000004e-06, |
| "loss": 1.5781, |
| "step": 29 |
| }, |
| { |
| "epoch": 1.0344827586206897, |
| "grad_norm": 1.950278401374817, |
| "learning_rate": 3.625e-06, |
| "loss": 1.5747, |
| "step": 30 |
| }, |
| { |
| "epoch": 1.0689655172413792, |
| "grad_norm": 3.3933162689208984, |
| "learning_rate": 3.75e-06, |
| "loss": 1.5642, |
| "step": 31 |
| }, |
| { |
| "epoch": 1.103448275862069, |
| "grad_norm": 4.551718235015869, |
| "learning_rate": 3.875e-06, |
| "loss": 1.5872, |
| "step": 32 |
| }, |
| { |
| "epoch": 1.1379310344827587, |
| "grad_norm": 3.4449899196624756, |
| "learning_rate": 4.000000000000001e-06, |
| "loss": 1.5755, |
| "step": 33 |
| }, |
| { |
| "epoch": 1.1724137931034484, |
| "grad_norm": 2.4862890243530273, |
| "learning_rate": 4.125e-06, |
| "loss": 1.5661, |
| "step": 34 |
| }, |
| { |
| "epoch": 1.206896551724138, |
| "grad_norm": 3.3560233116149902, |
| "learning_rate": 4.250000000000001e-06, |
| "loss": 1.5813, |
| "step": 35 |
| }, |
| { |
| "epoch": 1.2413793103448276, |
| "grad_norm": 1.7421871423721313, |
| "learning_rate": 4.375e-06, |
| "loss": 1.5653, |
| "step": 36 |
| }, |
| { |
| "epoch": 1.2758620689655173, |
| "grad_norm": 1.8098750114440918, |
| "learning_rate": 4.5e-06, |
| "loss": 1.5686, |
| "step": 37 |
| }, |
| { |
| "epoch": 1.3103448275862069, |
| "grad_norm": 5.115236759185791, |
| "learning_rate": 4.625e-06, |
| "loss": 1.5678, |
| "step": 38 |
| }, |
| { |
| "epoch": 1.3448275862068966, |
| "grad_norm": 3.044386625289917, |
| "learning_rate": 4.75e-06, |
| "loss": 1.5634, |
| "step": 39 |
| }, |
| { |
| "epoch": 1.3793103448275863, |
| "grad_norm": 5.8161139488220215, |
| "learning_rate": 4.875000000000001e-06, |
| "loss": 1.5846, |
| "step": 40 |
| }, |
| { |
| "epoch": 1.4137931034482758, |
| "grad_norm": 1.6947190761566162, |
| "learning_rate": 5e-06, |
| "loss": 1.5512, |
| "step": 41 |
| }, |
| { |
| "epoch": 1.4482758620689655, |
| "grad_norm": 3.2046549320220947, |
| "learning_rate": 5.125e-06, |
| "loss": 1.5579, |
| "step": 42 |
| }, |
| { |
| "epoch": 1.4827586206896552, |
| "grad_norm": 2.9457404613494873, |
| "learning_rate": 5.25e-06, |
| "loss": 1.5661, |
| "step": 43 |
| }, |
| { |
| "epoch": 1.5172413793103448, |
| "grad_norm": 1.604246735572815, |
| "learning_rate": 5.375e-06, |
| "loss": 1.5584, |
| "step": 44 |
| }, |
| { |
| "epoch": 1.5517241379310345, |
| "grad_norm": 1.433769702911377, |
| "learning_rate": 5.500000000000001e-06, |
| "loss": 1.5398, |
| "step": 45 |
| }, |
| { |
| "epoch": 1.5862068965517242, |
| "grad_norm": 1.4531933069229126, |
| "learning_rate": 5.625e-06, |
| "loss": 1.5459, |
| "step": 46 |
| }, |
| { |
| "epoch": 1.6206896551724137, |
| "grad_norm": 3.0887269973754883, |
| "learning_rate": 5.750000000000001e-06, |
| "loss": 1.5475, |
| "step": 47 |
| }, |
| { |
| "epoch": 1.6551724137931034, |
| "grad_norm": 1.7311522960662842, |
| "learning_rate": 5.875e-06, |
| "loss": 1.5395, |
| "step": 48 |
| }, |
| { |
| "epoch": 1.6896551724137931, |
| "grad_norm": 1.57200026512146, |
| "learning_rate": 6e-06, |
| "loss": 1.5355, |
| "step": 49 |
| }, |
| { |
| "epoch": 1.7241379310344827, |
| "grad_norm": 6.282898426055908, |
| "learning_rate": 6.125e-06, |
| "loss": 1.5479, |
| "step": 50 |
| }, |
| { |
| "epoch": 1.7586206896551724, |
| "grad_norm": 5.877266883850098, |
| "learning_rate": 6.25e-06, |
| "loss": 1.535, |
| "step": 51 |
| }, |
| { |
| "epoch": 1.793103448275862, |
| "grad_norm": 2.238494396209717, |
| "learning_rate": 6.375000000000001e-06, |
| "loss": 1.5439, |
| "step": 52 |
| }, |
| { |
| "epoch": 1.8275862068965516, |
| "grad_norm": 3.176295518875122, |
| "learning_rate": 6.5000000000000004e-06, |
| "loss": 1.5411, |
| "step": 53 |
| }, |
| { |
| "epoch": 1.8620689655172413, |
| "grad_norm": 1.5491082668304443, |
| "learning_rate": 6.625000000000001e-06, |
| "loss": 1.5279, |
| "step": 54 |
| }, |
| { |
| "epoch": 1.896551724137931, |
| "grad_norm": 2.956325054168701, |
| "learning_rate": 6.750000000000001e-06, |
| "loss": 1.5381, |
| "step": 55 |
| }, |
| { |
| "epoch": 1.9310344827586206, |
| "grad_norm": 5.207773685455322, |
| "learning_rate": 6.875000000000001e-06, |
| "loss": 1.5499, |
| "step": 56 |
| }, |
| { |
| "epoch": 1.9655172413793105, |
| "grad_norm": 1.360984444618225, |
| "learning_rate": 7.000000000000001e-06, |
| "loss": 1.5294, |
| "step": 57 |
| }, |
| { |
| "epoch": 2.0, |
| "grad_norm": 1.9358643293380737, |
| "learning_rate": 7.1249999999999995e-06, |
| "loss": 1.5492, |
| "step": 58 |
| }, |
| { |
| "epoch": 2.0344827586206895, |
| "grad_norm": 1.5760064125061035, |
| "learning_rate": 7.25e-06, |
| "loss": 1.5279, |
| "step": 59 |
| }, |
| { |
| "epoch": 2.0689655172413794, |
| "grad_norm": 2.5182933807373047, |
| "learning_rate": 7.375e-06, |
| "loss": 1.5272, |
| "step": 60 |
| }, |
| { |
| "epoch": 2.103448275862069, |
| "grad_norm": 1.9873909950256348, |
| "learning_rate": 7.5e-06, |
| "loss": 1.5298, |
| "step": 61 |
| }, |
| { |
| "epoch": 2.1379310344827585, |
| "grad_norm": 1.4847761392593384, |
| "learning_rate": 7.625e-06, |
| "loss": 1.5129, |
| "step": 62 |
| }, |
| { |
| "epoch": 2.1724137931034484, |
| "grad_norm": 1.4767472743988037, |
| "learning_rate": 7.75e-06, |
| "loss": 1.5255, |
| "step": 63 |
| }, |
| { |
| "epoch": 2.206896551724138, |
| "grad_norm": 4.426845550537109, |
| "learning_rate": 7.875e-06, |
| "loss": 1.5277, |
| "step": 64 |
| }, |
| { |
| "epoch": 2.2413793103448274, |
| "grad_norm": 2.6555216312408447, |
| "learning_rate": 8.000000000000001e-06, |
| "loss": 1.5278, |
| "step": 65 |
| }, |
| { |
| "epoch": 2.2758620689655173, |
| "grad_norm": 2.148672580718994, |
| "learning_rate": 8.125000000000001e-06, |
| "loss": 1.5202, |
| "step": 66 |
| }, |
| { |
| "epoch": 2.310344827586207, |
| "grad_norm": 2.529113531112671, |
| "learning_rate": 8.25e-06, |
| "loss": 1.533, |
| "step": 67 |
| }, |
| { |
| "epoch": 2.344827586206897, |
| "grad_norm": 2.6022942066192627, |
| "learning_rate": 8.375e-06, |
| "loss": 1.5221, |
| "step": 68 |
| }, |
| { |
| "epoch": 2.3793103448275863, |
| "grad_norm": 4.527515888214111, |
| "learning_rate": 8.500000000000002e-06, |
| "loss": 1.5143, |
| "step": 69 |
| }, |
| { |
| "epoch": 2.413793103448276, |
| "grad_norm": 3.554746627807617, |
| "learning_rate": 8.625e-06, |
| "loss": 1.5105, |
| "step": 70 |
| }, |
| { |
| "epoch": 2.4482758620689653, |
| "grad_norm": 2.6532673835754395, |
| "learning_rate": 8.75e-06, |
| "loss": 1.4933, |
| "step": 71 |
| }, |
| { |
| "epoch": 2.4827586206896552, |
| "grad_norm": 3.799637794494629, |
| "learning_rate": 8.875e-06, |
| "loss": 1.5067, |
| "step": 72 |
| }, |
| { |
| "epoch": 2.5172413793103448, |
| "grad_norm": 3.7445719242095947, |
| "learning_rate": 9e-06, |
| "loss": 1.5193, |
| "step": 73 |
| }, |
| { |
| "epoch": 2.5517241379310347, |
| "grad_norm": 1.5109844207763672, |
| "learning_rate": 9.125e-06, |
| "loss": 1.5219, |
| "step": 74 |
| }, |
| { |
| "epoch": 2.586206896551724, |
| "grad_norm": 1.2224950790405273, |
| "learning_rate": 9.25e-06, |
| "loss": 1.4971, |
| "step": 75 |
| }, |
| { |
| "epoch": 2.6206896551724137, |
| "grad_norm": 5.30598783493042, |
| "learning_rate": 9.375000000000001e-06, |
| "loss": 1.4901, |
| "step": 76 |
| }, |
| { |
| "epoch": 2.655172413793103, |
| "grad_norm": 1.4342738389968872, |
| "learning_rate": 9.5e-06, |
| "loss": 1.5118, |
| "step": 77 |
| }, |
| { |
| "epoch": 2.689655172413793, |
| "grad_norm": 4.92336368560791, |
| "learning_rate": 9.625e-06, |
| "loss": 1.4859, |
| "step": 78 |
| }, |
| { |
| "epoch": 2.7241379310344827, |
| "grad_norm": 10.244889259338379, |
| "learning_rate": 9.750000000000002e-06, |
| "loss": 1.5147, |
| "step": 79 |
| }, |
| { |
| "epoch": 2.7586206896551726, |
| "grad_norm": 5.509783744812012, |
| "learning_rate": 9.875000000000001e-06, |
| "loss": 1.5108, |
| "step": 80 |
| }, |
| { |
| "epoch": 2.793103448275862, |
| "grad_norm": 1.6187537908554077, |
| "learning_rate": 1e-05, |
| "loss": 1.4753, |
| "step": 81 |
| }, |
| { |
| "epoch": 2.8275862068965516, |
| "grad_norm": 1.2228670120239258, |
| "learning_rate": 1.0125e-05, |
| "loss": 1.5034, |
| "step": 82 |
| }, |
| { |
| "epoch": 2.862068965517241, |
| "grad_norm": 2.5575168132781982, |
| "learning_rate": 1.025e-05, |
| "loss": 1.515, |
| "step": 83 |
| }, |
| { |
| "epoch": 2.896551724137931, |
| "grad_norm": 2.906822919845581, |
| "learning_rate": 1.0375e-05, |
| "loss": 1.5243, |
| "step": 84 |
| }, |
| { |
| "epoch": 2.9310344827586206, |
| "grad_norm": 2.380941152572632, |
| "learning_rate": 1.05e-05, |
| "loss": 1.4967, |
| "step": 85 |
| }, |
| { |
| "epoch": 2.9655172413793105, |
| "grad_norm": 2.5720791816711426, |
| "learning_rate": 1.0625e-05, |
| "loss": 1.5065, |
| "step": 86 |
| }, |
| { |
| "epoch": 3.0, |
| "grad_norm": 2.6703929901123047, |
| "learning_rate": 1.075e-05, |
| "loss": 1.4984, |
| "step": 87 |
| }, |
| { |
| "epoch": 3.0344827586206895, |
| "grad_norm": 3.5005722045898438, |
| "learning_rate": 1.0875e-05, |
| "loss": 1.5122, |
| "step": 88 |
| }, |
| { |
| "epoch": 3.0689655172413794, |
| "grad_norm": 4.101701736450195, |
| "learning_rate": 1.1000000000000001e-05, |
| "loss": 1.5057, |
| "step": 89 |
| }, |
| { |
| "epoch": 3.103448275862069, |
| "grad_norm": 1.8032540082931519, |
| "learning_rate": 1.1125000000000001e-05, |
| "loss": 1.4953, |
| "step": 90 |
| }, |
| { |
| "epoch": 3.1379310344827585, |
| "grad_norm": 1.8906192779541016, |
| "learning_rate": 1.125e-05, |
| "loss": 1.4885, |
| "step": 91 |
| }, |
| { |
| "epoch": 3.1724137931034484, |
| "grad_norm": 4.843202114105225, |
| "learning_rate": 1.1375e-05, |
| "loss": 1.4819, |
| "step": 92 |
| }, |
| { |
| "epoch": 3.206896551724138, |
| "grad_norm": 2.6482224464416504, |
| "learning_rate": 1.1500000000000002e-05, |
| "loss": 1.4766, |
| "step": 93 |
| }, |
| { |
| "epoch": 3.2413793103448274, |
| "grad_norm": 2.58847975730896, |
| "learning_rate": 1.1625000000000001e-05, |
| "loss": 1.4883, |
| "step": 94 |
| }, |
| { |
| "epoch": 3.2758620689655173, |
| "grad_norm": 2.6886796951293945, |
| "learning_rate": 1.175e-05, |
| "loss": 1.4912, |
| "step": 95 |
| }, |
| { |
| "epoch": 3.310344827586207, |
| "grad_norm": 4.70560359954834, |
| "learning_rate": 1.1875e-05, |
| "loss": 1.4835, |
| "step": 96 |
| }, |
| { |
| "epoch": 3.344827586206897, |
| "grad_norm": 1.3306467533111572, |
| "learning_rate": 1.2e-05, |
| "loss": 1.4993, |
| "step": 97 |
| }, |
| { |
| "epoch": 3.3793103448275863, |
| "grad_norm": 6.546566009521484, |
| "learning_rate": 1.2125e-05, |
| "loss": 1.4564, |
| "step": 98 |
| }, |
| { |
| "epoch": 3.413793103448276, |
| "grad_norm": 2.113272190093994, |
| "learning_rate": 1.225e-05, |
| "loss": 1.4483, |
| "step": 99 |
| }, |
| { |
| "epoch": 3.4482758620689653, |
| "grad_norm": 3.0866124629974365, |
| "learning_rate": 1.2375000000000001e-05, |
| "loss": 1.4824, |
| "step": 100 |
| }, |
| { |
| "epoch": 3.4827586206896552, |
| "grad_norm": 1.7086529731750488, |
| "learning_rate": 1.25e-05, |
| "loss": 1.4845, |
| "step": 101 |
| }, |
| { |
| "epoch": 3.5172413793103448, |
| "grad_norm": 1.3498625755310059, |
| "learning_rate": 1.2625e-05, |
| "loss": 1.4632, |
| "step": 102 |
| }, |
| { |
| "epoch": 3.5517241379310347, |
| "grad_norm": 2.0302608013153076, |
| "learning_rate": 1.2750000000000002e-05, |
| "loss": 1.4623, |
| "step": 103 |
| }, |
| { |
| "epoch": 3.586206896551724, |
| "grad_norm": 1.4253138303756714, |
| "learning_rate": 1.2875000000000001e-05, |
| "loss": 1.4459, |
| "step": 104 |
| }, |
| { |
| "epoch": 3.6206896551724137, |
| "grad_norm": 2.8165297508239746, |
| "learning_rate": 1.3000000000000001e-05, |
| "loss": 1.448, |
| "step": 105 |
| }, |
| { |
| "epoch": 3.655172413793103, |
| "grad_norm": 2.0474743843078613, |
| "learning_rate": 1.3125e-05, |
| "loss": 1.4324, |
| "step": 106 |
| }, |
| { |
| "epoch": 3.689655172413793, |
| "grad_norm": 1.4272180795669556, |
| "learning_rate": 1.3250000000000002e-05, |
| "loss": 1.4328, |
| "step": 107 |
| }, |
| { |
| "epoch": 3.7241379310344827, |
| "grad_norm": 5.992876052856445, |
| "learning_rate": 1.3375000000000002e-05, |
| "loss": 1.4314, |
| "step": 108 |
| }, |
| { |
| "epoch": 3.7586206896551726, |
| "grad_norm": 3.8637657165527344, |
| "learning_rate": 1.3500000000000001e-05, |
| "loss": 1.4809, |
| "step": 109 |
| }, |
| { |
| "epoch": 3.793103448275862, |
| "grad_norm": 4.207685947418213, |
| "learning_rate": 1.3625e-05, |
| "loss": 1.4811, |
| "step": 110 |
| }, |
| { |
| "epoch": 3.8275862068965516, |
| "grad_norm": 1.545461654663086, |
| "learning_rate": 1.3750000000000002e-05, |
| "loss": 1.4942, |
| "step": 111 |
| }, |
| { |
| "epoch": 3.862068965517241, |
| "grad_norm": 1.6922414302825928, |
| "learning_rate": 1.3875000000000002e-05, |
| "loss": 1.4439, |
| "step": 112 |
| }, |
| { |
| "epoch": 3.896551724137931, |
| "grad_norm": 1.6125301122665405, |
| "learning_rate": 1.4000000000000001e-05, |
| "loss": 1.4745, |
| "step": 113 |
| }, |
| { |
| "epoch": 3.9310344827586206, |
| "grad_norm": 3.392108678817749, |
| "learning_rate": 1.4125e-05, |
| "loss": 1.4744, |
| "step": 114 |
| }, |
| { |
| "epoch": 3.9655172413793105, |
| "grad_norm": 2.7309863567352295, |
| "learning_rate": 1.4249999999999999e-05, |
| "loss": 1.4439, |
| "step": 115 |
| }, |
| { |
| "epoch": 4.0, |
| "grad_norm": 6.303447246551514, |
| "learning_rate": 1.4374999999999999e-05, |
| "loss": 1.461, |
| "step": 116 |
| }, |
| { |
| "epoch": 4.0344827586206895, |
| "grad_norm": 1.5163921117782593, |
| "learning_rate": 1.45e-05, |
| "loss": 1.4461, |
| "step": 117 |
| }, |
| { |
| "epoch": 4.068965517241379, |
| "grad_norm": 2.4618422985076904, |
| "learning_rate": 1.4625e-05, |
| "loss": 1.4361, |
| "step": 118 |
| }, |
| { |
| "epoch": 4.103448275862069, |
| "grad_norm": 2.786285877227783, |
| "learning_rate": 1.475e-05, |
| "loss": 1.4183, |
| "step": 119 |
| }, |
| { |
| "epoch": 4.137931034482759, |
| "grad_norm": 5.21203088760376, |
| "learning_rate": 1.4875e-05, |
| "loss": 1.4407, |
| "step": 120 |
| }, |
| { |
| "epoch": 4.172413793103448, |
| "grad_norm": 1.920630931854248, |
| "learning_rate": 1.5e-05, |
| "loss": 1.3928, |
| "step": 121 |
| }, |
| { |
| "epoch": 4.206896551724138, |
| "grad_norm": 2.916647434234619, |
| "learning_rate": 1.5125e-05, |
| "loss": 1.409, |
| "step": 122 |
| }, |
| { |
| "epoch": 4.241379310344827, |
| "grad_norm": 4.050491809844971, |
| "learning_rate": 1.525e-05, |
| "loss": 1.4555, |
| "step": 123 |
| }, |
| { |
| "epoch": 4.275862068965517, |
| "grad_norm": 3.479999303817749, |
| "learning_rate": 1.5375e-05, |
| "loss": 1.396, |
| "step": 124 |
| }, |
| { |
| "epoch": 4.310344827586207, |
| "grad_norm": 5.414281368255615, |
| "learning_rate": 1.55e-05, |
| "loss": 1.3829, |
| "step": 125 |
| }, |
| { |
| "epoch": 4.344827586206897, |
| "grad_norm": 4.517972946166992, |
| "learning_rate": 1.5625e-05, |
| "loss": 1.3182, |
| "step": 126 |
| }, |
| { |
| "epoch": 4.379310344827586, |
| "grad_norm": 10.26574993133545, |
| "learning_rate": 1.575e-05, |
| "loss": 1.4205, |
| "step": 127 |
| }, |
| { |
| "epoch": 4.413793103448276, |
| "grad_norm": 2.3254988193511963, |
| "learning_rate": 1.5875e-05, |
| "loss": 1.2768, |
| "step": 128 |
| }, |
| { |
| "epoch": 4.448275862068965, |
| "grad_norm": 5.00213098526001, |
| "learning_rate": 1.6000000000000003e-05, |
| "loss": 1.3419, |
| "step": 129 |
| }, |
| { |
| "epoch": 4.482758620689655, |
| "grad_norm": 11.136003494262695, |
| "learning_rate": 1.6125000000000002e-05, |
| "loss": 1.2928, |
| "step": 130 |
| }, |
| { |
| "epoch": 4.517241379310345, |
| "grad_norm": 6.113260269165039, |
| "learning_rate": 1.6250000000000002e-05, |
| "loss": 1.2989, |
| "step": 131 |
| }, |
| { |
| "epoch": 4.551724137931035, |
| "grad_norm": 5.029887676239014, |
| "learning_rate": 1.6375e-05, |
| "loss": 1.3367, |
| "step": 132 |
| }, |
| { |
| "epoch": 4.586206896551724, |
| "grad_norm": 15.060640335083008, |
| "learning_rate": 1.65e-05, |
| "loss": 1.4661, |
| "step": 133 |
| }, |
| { |
| "epoch": 4.620689655172414, |
| "grad_norm": 18.83147430419922, |
| "learning_rate": 1.6625e-05, |
| "loss": 1.5725, |
| "step": 134 |
| }, |
| { |
| "epoch": 4.655172413793103, |
| "grad_norm": 8.334407806396484, |
| "learning_rate": 1.675e-05, |
| "loss": 1.3389, |
| "step": 135 |
| }, |
| { |
| "epoch": 4.689655172413794, |
| "grad_norm": 5.727024555206299, |
| "learning_rate": 1.6875000000000004e-05, |
| "loss": 1.3305, |
| "step": 136 |
| }, |
| { |
| "epoch": 4.724137931034483, |
| "grad_norm": 6.0667009353637695, |
| "learning_rate": 1.7000000000000003e-05, |
| "loss": 1.2844, |
| "step": 137 |
| }, |
| { |
| "epoch": 4.758620689655173, |
| "grad_norm": 8.634448051452637, |
| "learning_rate": 1.7125000000000003e-05, |
| "loss": 1.2871, |
| "step": 138 |
| }, |
| { |
| "epoch": 4.793103448275862, |
| "grad_norm": 6.196165561676025, |
| "learning_rate": 1.725e-05, |
| "loss": 1.2925, |
| "step": 139 |
| }, |
| { |
| "epoch": 4.827586206896552, |
| "grad_norm": 4.187297821044922, |
| "learning_rate": 1.7375e-05, |
| "loss": 1.2614, |
| "step": 140 |
| }, |
| { |
| "epoch": 4.862068965517241, |
| "grad_norm": 4.917987823486328, |
| "learning_rate": 1.75e-05, |
| "loss": 1.2843, |
| "step": 141 |
| }, |
| { |
| "epoch": 4.896551724137931, |
| "grad_norm": 3.2452642917633057, |
| "learning_rate": 1.7625e-05, |
| "loss": 1.2741, |
| "step": 142 |
| }, |
| { |
| "epoch": 4.931034482758621, |
| "grad_norm": 6.927144527435303, |
| "learning_rate": 1.775e-05, |
| "loss": 1.2967, |
| "step": 143 |
| }, |
| { |
| "epoch": 4.9655172413793105, |
| "grad_norm": 4.876591682434082, |
| "learning_rate": 1.7875e-05, |
| "loss": 1.1916, |
| "step": 144 |
| }, |
| { |
| "epoch": 5.0, |
| "grad_norm": 3.315114736557007, |
| "learning_rate": 1.8e-05, |
| "loss": 1.1966, |
| "step": 145 |
| }, |
| { |
| "epoch": 5.0344827586206895, |
| "grad_norm": 4.320967674255371, |
| "learning_rate": 1.8125e-05, |
| "loss": 1.2583, |
| "step": 146 |
| }, |
| { |
| "epoch": 5.068965517241379, |
| "grad_norm": 3.7428784370422363, |
| "learning_rate": 1.825e-05, |
| "loss": 1.2523, |
| "step": 147 |
| }, |
| { |
| "epoch": 5.103448275862069, |
| "grad_norm": 4.975188255310059, |
| "learning_rate": 1.8375e-05, |
| "loss": 1.2224, |
| "step": 148 |
| }, |
| { |
| "epoch": 5.137931034482759, |
| "grad_norm": 5.255687713623047, |
| "learning_rate": 1.85e-05, |
| "loss": 1.1885, |
| "step": 149 |
| }, |
| { |
| "epoch": 5.172413793103448, |
| "grad_norm": 7.562132835388184, |
| "learning_rate": 1.8625000000000002e-05, |
| "loss": 1.1543, |
| "step": 150 |
| }, |
| { |
| "epoch": 5.206896551724138, |
| "grad_norm": 4.356451988220215, |
| "learning_rate": 1.8750000000000002e-05, |
| "loss": 1.2019, |
| "step": 151 |
| }, |
| { |
| "epoch": 5.241379310344827, |
| "grad_norm": 8.989137649536133, |
| "learning_rate": 1.8875e-05, |
| "loss": 1.378, |
| "step": 152 |
| }, |
| { |
| "epoch": 5.275862068965517, |
| "grad_norm": 2.729421615600586, |
| "learning_rate": 1.9e-05, |
| "loss": 1.1547, |
| "step": 153 |
| }, |
| { |
| "epoch": 5.310344827586207, |
| "grad_norm": 2.701746702194214, |
| "learning_rate": 1.9125e-05, |
| "loss": 1.184, |
| "step": 154 |
| }, |
| { |
| "epoch": 5.344827586206897, |
| "grad_norm": 4.610611915588379, |
| "learning_rate": 1.925e-05, |
| "loss": 1.2343, |
| "step": 155 |
| }, |
| { |
| "epoch": 5.379310344827586, |
| "grad_norm": 5.182763576507568, |
| "learning_rate": 1.9375e-05, |
| "loss": 1.189, |
| "step": 156 |
| }, |
| { |
| "epoch": 5.413793103448276, |
| "grad_norm": 5.774525165557861, |
| "learning_rate": 1.9500000000000003e-05, |
| "loss": 1.2143, |
| "step": 157 |
| }, |
| { |
| "epoch": 5.448275862068965, |
| "grad_norm": 3.1842565536499023, |
| "learning_rate": 1.9625000000000003e-05, |
| "loss": 1.1613, |
| "step": 158 |
| }, |
| { |
| "epoch": 5.482758620689655, |
| "grad_norm": 5.885169982910156, |
| "learning_rate": 1.9750000000000002e-05, |
| "loss": 1.24, |
| "step": 159 |
| }, |
| { |
| "epoch": 5.517241379310345, |
| "grad_norm": 8.06775951385498, |
| "learning_rate": 1.9875000000000002e-05, |
| "loss": 1.1973, |
| "step": 160 |
| }, |
| { |
| "epoch": 5.551724137931035, |
| "grad_norm": 3.398749589920044, |
| "learning_rate": 2e-05, |
| "loss": 1.1485, |
| "step": 161 |
| }, |
| { |
| "epoch": 5.586206896551724, |
| "grad_norm": 3.8745975494384766, |
| "learning_rate": 2.0125e-05, |
| "loss": 1.1218, |
| "step": 162 |
| }, |
| { |
| "epoch": 5.620689655172414, |
| "grad_norm": 6.615961074829102, |
| "learning_rate": 2.025e-05, |
| "loss": 1.1056, |
| "step": 163 |
| }, |
| { |
| "epoch": 5.655172413793103, |
| "grad_norm": 4.613058090209961, |
| "learning_rate": 2.0375e-05, |
| "loss": 1.0493, |
| "step": 164 |
| }, |
| { |
| "epoch": 5.689655172413794, |
| "grad_norm": 4.730791091918945, |
| "learning_rate": 2.05e-05, |
| "loss": 1.0686, |
| "step": 165 |
| }, |
| { |
| "epoch": 5.724137931034483, |
| "grad_norm": 6.086816310882568, |
| "learning_rate": 2.0625e-05, |
| "loss": 1.084, |
| "step": 166 |
| }, |
| { |
| "epoch": 5.758620689655173, |
| "grad_norm": 5.453216552734375, |
| "learning_rate": 2.075e-05, |
| "loss": 1.0637, |
| "step": 167 |
| }, |
| { |
| "epoch": 5.793103448275862, |
| "grad_norm": 4.013542652130127, |
| "learning_rate": 2.0875e-05, |
| "loss": 1.0962, |
| "step": 168 |
| }, |
| { |
| "epoch": 5.827586206896552, |
| "grad_norm": 3.824021816253662, |
| "learning_rate": 2.1e-05, |
| "loss": 1.0632, |
| "step": 169 |
| }, |
| { |
| "epoch": 5.862068965517241, |
| "grad_norm": 3.00637149810791, |
| "learning_rate": 2.1125000000000002e-05, |
| "loss": 1.0575, |
| "step": 170 |
| }, |
| { |
| "epoch": 5.896551724137931, |
| "grad_norm": 4.230837821960449, |
| "learning_rate": 2.125e-05, |
| "loss": 1.0095, |
| "step": 171 |
| }, |
| { |
| "epoch": 5.931034482758621, |
| "grad_norm": 4.6836371421813965, |
| "learning_rate": 2.1375e-05, |
| "loss": 1.1063, |
| "step": 172 |
| }, |
| { |
| "epoch": 5.9655172413793105, |
| "grad_norm": 3.750352382659912, |
| "learning_rate": 2.15e-05, |
| "loss": 1.1189, |
| "step": 173 |
| }, |
| { |
| "epoch": 6.0, |
| "grad_norm": 4.330487251281738, |
| "learning_rate": 2.1625e-05, |
| "loss": 1.0574, |
| "step": 174 |
| }, |
| { |
| "epoch": 6.0344827586206895, |
| "grad_norm": 5.845319747924805, |
| "learning_rate": 2.175e-05, |
| "loss": 1.08, |
| "step": 175 |
| }, |
| { |
| "epoch": 6.068965517241379, |
| "grad_norm": 3.027203321456909, |
| "learning_rate": 2.1875e-05, |
| "loss": 0.9774, |
| "step": 176 |
| }, |
| { |
| "epoch": 6.103448275862069, |
| "grad_norm": 3.455016851425171, |
| "learning_rate": 2.2000000000000003e-05, |
| "loss": 1.0531, |
| "step": 177 |
| }, |
| { |
| "epoch": 6.137931034482759, |
| "grad_norm": 3.488523483276367, |
| "learning_rate": 2.2125000000000002e-05, |
| "loss": 0.9497, |
| "step": 178 |
| }, |
| { |
| "epoch": 6.172413793103448, |
| "grad_norm": 3.371523141860962, |
| "learning_rate": 2.2250000000000002e-05, |
| "loss": 1.0177, |
| "step": 179 |
| }, |
| { |
| "epoch": 6.206896551724138, |
| "grad_norm": 5.724728107452393, |
| "learning_rate": 2.2375000000000002e-05, |
| "loss": 1.1333, |
| "step": 180 |
| }, |
| { |
| "epoch": 6.241379310344827, |
| "grad_norm": 3.2824714183807373, |
| "learning_rate": 2.25e-05, |
| "loss": 1.0047, |
| "step": 181 |
| }, |
| { |
| "epoch": 6.275862068965517, |
| "grad_norm": 7.460841178894043, |
| "learning_rate": 2.2625e-05, |
| "loss": 1.1217, |
| "step": 182 |
| }, |
| { |
| "epoch": 6.310344827586207, |
| "grad_norm": 4.130681037902832, |
| "learning_rate": 2.275e-05, |
| "loss": 1.066, |
| "step": 183 |
| }, |
| { |
| "epoch": 6.344827586206897, |
| "grad_norm": 3.287168025970459, |
| "learning_rate": 2.2875e-05, |
| "loss": 0.9983, |
| "step": 184 |
| }, |
| { |
| "epoch": 6.379310344827586, |
| "grad_norm": 4.445807456970215, |
| "learning_rate": 2.3000000000000003e-05, |
| "loss": 1.1108, |
| "step": 185 |
| }, |
| { |
| "epoch": 6.413793103448276, |
| "grad_norm": 4.80116081237793, |
| "learning_rate": 2.3125000000000003e-05, |
| "loss": 1.0195, |
| "step": 186 |
| }, |
| { |
| "epoch": 6.448275862068965, |
| "grad_norm": 3.579498529434204, |
| "learning_rate": 2.3250000000000003e-05, |
| "loss": 1.0323, |
| "step": 187 |
| }, |
| { |
| "epoch": 6.482758620689655, |
| "grad_norm": 3.3527798652648926, |
| "learning_rate": 2.3375000000000002e-05, |
| "loss": 0.976, |
| "step": 188 |
| }, |
| { |
| "epoch": 6.517241379310345, |
| "grad_norm": 2.929718017578125, |
| "learning_rate": 2.35e-05, |
| "loss": 0.9717, |
| "step": 189 |
| }, |
| { |
| "epoch": 6.551724137931035, |
| "grad_norm": 4.6633758544921875, |
| "learning_rate": 2.3624999999999998e-05, |
| "loss": 1.1071, |
| "step": 190 |
| }, |
| { |
| "epoch": 6.586206896551724, |
| "grad_norm": 4.373714923858643, |
| "learning_rate": 2.375e-05, |
| "loss": 0.9349, |
| "step": 191 |
| }, |
| { |
| "epoch": 6.620689655172414, |
| "grad_norm": 4.665045738220215, |
| "learning_rate": 2.3875e-05, |
| "loss": 0.9615, |
| "step": 192 |
| }, |
| { |
| "epoch": 6.655172413793103, |
| "grad_norm": 7.3372907638549805, |
| "learning_rate": 2.4e-05, |
| "loss": 0.9949, |
| "step": 193 |
| }, |
| { |
| "epoch": 6.689655172413794, |
| "grad_norm": 4.059077739715576, |
| "learning_rate": 2.4125e-05, |
| "loss": 0.9306, |
| "step": 194 |
| }, |
| { |
| "epoch": 6.724137931034483, |
| "grad_norm": 5.908609390258789, |
| "learning_rate": 2.425e-05, |
| "loss": 0.7885, |
| "step": 195 |
| }, |
| { |
| "epoch": 6.758620689655173, |
| "grad_norm": 6.472893238067627, |
| "learning_rate": 2.4375e-05, |
| "loss": 1.1013, |
| "step": 196 |
| }, |
| { |
| "epoch": 6.793103448275862, |
| "grad_norm": 4.9746575355529785, |
| "learning_rate": 2.45e-05, |
| "loss": 1.0222, |
| "step": 197 |
| }, |
| { |
| "epoch": 6.827586206896552, |
| "grad_norm": 5.179764270782471, |
| "learning_rate": 2.4625000000000002e-05, |
| "loss": 0.9958, |
| "step": 198 |
| }, |
| { |
| "epoch": 6.862068965517241, |
| "grad_norm": 7.71660041809082, |
| "learning_rate": 2.4750000000000002e-05, |
| "loss": 1.05, |
| "step": 199 |
| }, |
| { |
| "epoch": 6.896551724137931, |
| "grad_norm": 6.319756507873535, |
| "learning_rate": 2.4875e-05, |
| "loss": 1.0519, |
| "step": 200 |
| }, |
| { |
| "epoch": 6.931034482758621, |
| "grad_norm": 4.988058567047119, |
| "learning_rate": 2.5e-05, |
| "loss": 0.998, |
| "step": 201 |
| }, |
| { |
| "epoch": 6.9655172413793105, |
| "grad_norm": 5.239701747894287, |
| "learning_rate": 2.4999980961416097e-05, |
| "loss": 1.0276, |
| "step": 202 |
| }, |
| { |
| "epoch": 7.0, |
| "grad_norm": 9.979659080505371, |
| "learning_rate": 2.499992384572238e-05, |
| "loss": 1.005, |
| "step": 203 |
| }, |
| { |
| "epoch": 7.0344827586206895, |
| "grad_norm": 5.444819927215576, |
| "learning_rate": 2.4999828653092835e-05, |
| "loss": 1.0034, |
| "step": 204 |
| }, |
| { |
| "epoch": 7.068965517241379, |
| "grad_norm": 5.544732570648193, |
| "learning_rate": 2.4999695383817435e-05, |
| "loss": 1.0074, |
| "step": 205 |
| }, |
| { |
| "epoch": 7.103448275862069, |
| "grad_norm": 7.6183762550354, |
| "learning_rate": 2.499952403830214e-05, |
| "loss": 0.9798, |
| "step": 206 |
| }, |
| { |
| "epoch": 7.137931034482759, |
| "grad_norm": 4.80704927444458, |
| "learning_rate": 2.4999314617068904e-05, |
| "loss": 0.9429, |
| "step": 207 |
| }, |
| { |
| "epoch": 7.172413793103448, |
| "grad_norm": 5.107629776000977, |
| "learning_rate": 2.4999067120755652e-05, |
| "loss": 1.0054, |
| "step": 208 |
| }, |
| { |
| "epoch": 7.206896551724138, |
| "grad_norm": 5.443073272705078, |
| "learning_rate": 2.4998781550116305e-05, |
| "loss": 1.0603, |
| "step": 209 |
| }, |
| { |
| "epoch": 7.241379310344827, |
| "grad_norm": 2.814347267150879, |
| "learning_rate": 2.499845790602076e-05, |
| "loss": 1.0818, |
| "step": 210 |
| }, |
| { |
| "epoch": 7.275862068965517, |
| "grad_norm": 4.260725021362305, |
| "learning_rate": 2.4998096189454893e-05, |
| "loss": 0.9751, |
| "step": 211 |
| }, |
| { |
| "epoch": 7.310344827586207, |
| "grad_norm": 3.0248708724975586, |
| "learning_rate": 2.4997696401520555e-05, |
| "loss": 0.9156, |
| "step": 212 |
| }, |
| { |
| "epoch": 7.344827586206897, |
| "grad_norm": 6.022355079650879, |
| "learning_rate": 2.499725854343557e-05, |
| "loss": 0.9621, |
| "step": 213 |
| }, |
| { |
| "epoch": 7.379310344827586, |
| "grad_norm": 2.480070114135742, |
| "learning_rate": 2.4996782616533732e-05, |
| "loss": 0.9835, |
| "step": 214 |
| }, |
| { |
| "epoch": 7.413793103448276, |
| "grad_norm": 2.478194236755371, |
| "learning_rate": 2.499626862226479e-05, |
| "loss": 0.8196, |
| "step": 215 |
| }, |
| { |
| "epoch": 7.448275862068965, |
| "grad_norm": 2.9012348651885986, |
| "learning_rate": 2.4995716562194465e-05, |
| "loss": 0.9136, |
| "step": 216 |
| }, |
| { |
| "epoch": 7.482758620689655, |
| "grad_norm": 2.776332378387451, |
| "learning_rate": 2.499512643800443e-05, |
| "loss": 0.874, |
| "step": 217 |
| }, |
| { |
| "epoch": 7.517241379310345, |
| "grad_norm": 3.0665347576141357, |
| "learning_rate": 2.4994498251492302e-05, |
| "loss": 0.8381, |
| "step": 218 |
| }, |
| { |
| "epoch": 7.551724137931035, |
| "grad_norm": 3.6337053775787354, |
| "learning_rate": 2.4993832004571646e-05, |
| "loss": 0.9145, |
| "step": 219 |
| }, |
| { |
| "epoch": 7.586206896551724, |
| "grad_norm": 3.960299015045166, |
| "learning_rate": 2.4993127699271966e-05, |
| "loss": 0.8541, |
| "step": 220 |
| }, |
| { |
| "epoch": 7.620689655172414, |
| "grad_norm": 4.1685967445373535, |
| "learning_rate": 2.49923853377387e-05, |
| "loss": 0.9458, |
| "step": 221 |
| }, |
| { |
| "epoch": 7.655172413793103, |
| "grad_norm": 3.217474937438965, |
| "learning_rate": 2.4991604922233204e-05, |
| "loss": 0.842, |
| "step": 222 |
| }, |
| { |
| "epoch": 7.689655172413794, |
| "grad_norm": 3.9884676933288574, |
| "learning_rate": 2.4990786455132764e-05, |
| "loss": 0.8746, |
| "step": 223 |
| }, |
| { |
| "epoch": 7.724137931034483, |
| "grad_norm": 6.753876209259033, |
| "learning_rate": 2.4989929938930576e-05, |
| "loss": 0.8353, |
| "step": 224 |
| }, |
| { |
| "epoch": 7.758620689655173, |
| "grad_norm": 4.734915256500244, |
| "learning_rate": 2.498903537623573e-05, |
| "loss": 0.8488, |
| "step": 225 |
| }, |
| { |
| "epoch": 7.793103448275862, |
| "grad_norm": 2.8307461738586426, |
| "learning_rate": 2.4988102769773227e-05, |
| "loss": 0.8886, |
| "step": 226 |
| }, |
| { |
| "epoch": 7.827586206896552, |
| "grad_norm": 4.811133861541748, |
| "learning_rate": 2.4987132122383936e-05, |
| "loss": 0.8869, |
| "step": 227 |
| }, |
| { |
| "epoch": 7.862068965517241, |
| "grad_norm": 6.6021728515625, |
| "learning_rate": 2.4986123437024627e-05, |
| "loss": 1.0391, |
| "step": 228 |
| }, |
| { |
| "epoch": 7.896551724137931, |
| "grad_norm": 8.46661376953125, |
| "learning_rate": 2.4985076716767927e-05, |
| "loss": 0.9857, |
| "step": 229 |
| }, |
| { |
| "epoch": 7.931034482758621, |
| "grad_norm": 7.210445880889893, |
| "learning_rate": 2.4983991964802327e-05, |
| "loss": 0.8793, |
| "step": 230 |
| }, |
| { |
| "epoch": 7.9655172413793105, |
| "grad_norm": 4.752058506011963, |
| "learning_rate": 2.4982869184432174e-05, |
| "loss": 0.8634, |
| "step": 231 |
| }, |
| { |
| "epoch": 8.0, |
| "grad_norm": 8.460209846496582, |
| "learning_rate": 2.498170837907765e-05, |
| "loss": 0.9404, |
| "step": 232 |
| }, |
| { |
| "epoch": 8.03448275862069, |
| "grad_norm": 4.385580539703369, |
| "learning_rate": 2.4980509552274765e-05, |
| "loss": 0.8939, |
| "step": 233 |
| }, |
| { |
| "epoch": 8.068965517241379, |
| "grad_norm": 4.554064750671387, |
| "learning_rate": 2.4979272707675356e-05, |
| "loss": 0.8854, |
| "step": 234 |
| }, |
| { |
| "epoch": 8.10344827586207, |
| "grad_norm": 5.013895034790039, |
| "learning_rate": 2.497799784904707e-05, |
| "loss": 0.8524, |
| "step": 235 |
| }, |
| { |
| "epoch": 8.137931034482758, |
| "grad_norm": 2.559938669204712, |
| "learning_rate": 2.4976684980273338e-05, |
| "loss": 0.8763, |
| "step": 236 |
| }, |
| { |
| "epoch": 8.172413793103448, |
| "grad_norm": 2.896611452102661, |
| "learning_rate": 2.4975334105353396e-05, |
| "loss": 0.8625, |
| "step": 237 |
| }, |
| { |
| "epoch": 8.206896551724139, |
| "grad_norm": 3.1764471530914307, |
| "learning_rate": 2.497394522840224e-05, |
| "loss": 0.8599, |
| "step": 238 |
| }, |
| { |
| "epoch": 8.241379310344827, |
| "grad_norm": 2.926391363143921, |
| "learning_rate": 2.4972518353650626e-05, |
| "loss": 0.8583, |
| "step": 239 |
| }, |
| { |
| "epoch": 8.275862068965518, |
| "grad_norm": 3.4869906902313232, |
| "learning_rate": 2.497105348544507e-05, |
| "loss": 0.8417, |
| "step": 240 |
| }, |
| { |
| "epoch": 8.310344827586206, |
| "grad_norm": 5.803765296936035, |
| "learning_rate": 2.4969550628247805e-05, |
| "loss": 0.8914, |
| "step": 241 |
| }, |
| { |
| "epoch": 8.344827586206897, |
| "grad_norm": 3.910322904586792, |
| "learning_rate": 2.49680097866368e-05, |
| "loss": 0.8737, |
| "step": 242 |
| }, |
| { |
| "epoch": 8.379310344827585, |
| "grad_norm": 3.2522225379943848, |
| "learning_rate": 2.4966430965305727e-05, |
| "loss": 0.8915, |
| "step": 243 |
| }, |
| { |
| "epoch": 8.413793103448276, |
| "grad_norm": 3.7491557598114014, |
| "learning_rate": 2.4964814169063948e-05, |
| "loss": 0.9399, |
| "step": 244 |
| }, |
| { |
| "epoch": 8.448275862068966, |
| "grad_norm": 2.59580397605896, |
| "learning_rate": 2.4963159402836506e-05, |
| "loss": 0.9132, |
| "step": 245 |
| }, |
| { |
| "epoch": 8.482758620689655, |
| "grad_norm": 2.840510368347168, |
| "learning_rate": 2.49614666716641e-05, |
| "loss": 0.8135, |
| "step": 246 |
| }, |
| { |
| "epoch": 8.517241379310345, |
| "grad_norm": 6.813579082489014, |
| "learning_rate": 2.495973598070309e-05, |
| "loss": 0.8306, |
| "step": 247 |
| }, |
| { |
| "epoch": 8.551724137931034, |
| "grad_norm": 5.595628261566162, |
| "learning_rate": 2.4957967335225456e-05, |
| "loss": 0.9031, |
| "step": 248 |
| }, |
| { |
| "epoch": 8.586206896551724, |
| "grad_norm": 3.344548463821411, |
| "learning_rate": 2.4956160740618806e-05, |
| "loss": 0.8205, |
| "step": 249 |
| }, |
| { |
| "epoch": 8.620689655172415, |
| "grad_norm": 2.66579270362854, |
| "learning_rate": 2.495431620238633e-05, |
| "loss": 0.8087, |
| "step": 250 |
| }, |
| { |
| "epoch": 8.655172413793103, |
| "grad_norm": 3.5464556217193604, |
| "learning_rate": 2.495243372614682e-05, |
| "loss": 0.7636, |
| "step": 251 |
| }, |
| { |
| "epoch": 8.689655172413794, |
| "grad_norm": 5.313910484313965, |
| "learning_rate": 2.495051331763462e-05, |
| "loss": 0.8267, |
| "step": 252 |
| }, |
| { |
| "epoch": 8.724137931034482, |
| "grad_norm": 5.469503879547119, |
| "learning_rate": 2.494855498269963e-05, |
| "loss": 0.7766, |
| "step": 253 |
| }, |
| { |
| "epoch": 8.758620689655173, |
| "grad_norm": 4.186700820922852, |
| "learning_rate": 2.4946558727307277e-05, |
| "loss": 0.8521, |
| "step": 254 |
| }, |
| { |
| "epoch": 8.793103448275861, |
| "grad_norm": 3.4536843299865723, |
| "learning_rate": 2.4944524557538503e-05, |
| "loss": 0.7532, |
| "step": 255 |
| }, |
| { |
| "epoch": 8.827586206896552, |
| "grad_norm": 3.1947126388549805, |
| "learning_rate": 2.4942452479589735e-05, |
| "loss": 0.716, |
| "step": 256 |
| }, |
| { |
| "epoch": 8.862068965517242, |
| "grad_norm": 4.5636138916015625, |
| "learning_rate": 2.494034249977289e-05, |
| "loss": 0.9678, |
| "step": 257 |
| }, |
| { |
| "epoch": 8.89655172413793, |
| "grad_norm": 4.235171794891357, |
| "learning_rate": 2.4938194624515333e-05, |
| "loss": 0.9419, |
| "step": 258 |
| }, |
| { |
| "epoch": 8.931034482758621, |
| "grad_norm": 4.488134860992432, |
| "learning_rate": 2.4936008860359854e-05, |
| "loss": 0.9512, |
| "step": 259 |
| }, |
| { |
| "epoch": 8.96551724137931, |
| "grad_norm": 5.502348899841309, |
| "learning_rate": 2.4933785213964677e-05, |
| "loss": 0.8107, |
| "step": 260 |
| }, |
| { |
| "epoch": 9.0, |
| "grad_norm": 3.3806426525115967, |
| "learning_rate": 2.4931523692103418e-05, |
| "loss": 0.8593, |
| "step": 261 |
| }, |
| { |
| "epoch": 9.03448275862069, |
| "grad_norm": 5.362583160400391, |
| "learning_rate": 2.492922430166506e-05, |
| "loss": 0.9019, |
| "step": 262 |
| }, |
| { |
| "epoch": 9.068965517241379, |
| "grad_norm": 6.098759651184082, |
| "learning_rate": 2.4926887049653943e-05, |
| "loss": 0.9845, |
| "step": 263 |
| }, |
| { |
| "epoch": 9.10344827586207, |
| "grad_norm": 3.2926342487335205, |
| "learning_rate": 2.492451194318975e-05, |
| "loss": 0.8941, |
| "step": 264 |
| }, |
| { |
| "epoch": 9.137931034482758, |
| "grad_norm": 3.6239383220672607, |
| "learning_rate": 2.4922098989507454e-05, |
| "loss": 0.9016, |
| "step": 265 |
| }, |
| { |
| "epoch": 9.172413793103448, |
| "grad_norm": 2.945098400115967, |
| "learning_rate": 2.4919648195957344e-05, |
| "loss": 0.8249, |
| "step": 266 |
| }, |
| { |
| "epoch": 9.206896551724139, |
| "grad_norm": 5.63911247253418, |
| "learning_rate": 2.4917159570004954e-05, |
| "loss": 0.8786, |
| "step": 267 |
| }, |
| { |
| "epoch": 9.241379310344827, |
| "grad_norm": 4.707492351531982, |
| "learning_rate": 2.491463311923108e-05, |
| "loss": 0.9624, |
| "step": 268 |
| }, |
| { |
| "epoch": 9.275862068965518, |
| "grad_norm": 3.5989744663238525, |
| "learning_rate": 2.491206885133171e-05, |
| "loss": 0.7872, |
| "step": 269 |
| }, |
| { |
| "epoch": 9.310344827586206, |
| "grad_norm": 3.1965887546539307, |
| "learning_rate": 2.490946677411807e-05, |
| "loss": 0.8428, |
| "step": 270 |
| }, |
| { |
| "epoch": 9.344827586206897, |
| "grad_norm": 3.745168924331665, |
| "learning_rate": 2.4906826895516528e-05, |
| "loss": 0.8836, |
| "step": 271 |
| }, |
| { |
| "epoch": 9.379310344827585, |
| "grad_norm": 4.135939598083496, |
| "learning_rate": 2.490414922356861e-05, |
| "loss": 0.8569, |
| "step": 272 |
| }, |
| { |
| "epoch": 9.413793103448276, |
| "grad_norm": 3.0350985527038574, |
| "learning_rate": 2.4901433766430975e-05, |
| "loss": 0.8092, |
| "step": 273 |
| }, |
| { |
| "epoch": 9.448275862068966, |
| "grad_norm": 2.5709338188171387, |
| "learning_rate": 2.4898680532375374e-05, |
| "loss": 0.8073, |
| "step": 274 |
| }, |
| { |
| "epoch": 9.482758620689655, |
| "grad_norm": 3.360161542892456, |
| "learning_rate": 2.489588952978863e-05, |
| "loss": 0.7794, |
| "step": 275 |
| }, |
| { |
| "epoch": 9.517241379310345, |
| "grad_norm": 3.463740587234497, |
| "learning_rate": 2.4893060767172632e-05, |
| "loss": 0.8216, |
| "step": 276 |
| }, |
| { |
| "epoch": 9.551724137931034, |
| "grad_norm": 4.866174221038818, |
| "learning_rate": 2.489019425314427e-05, |
| "loss": 0.821, |
| "step": 277 |
| }, |
| { |
| "epoch": 9.586206896551724, |
| "grad_norm": 4.300073146820068, |
| "learning_rate": 2.4887289996435452e-05, |
| "loss": 0.8998, |
| "step": 278 |
| }, |
| { |
| "epoch": 9.620689655172415, |
| "grad_norm": 7.732598304748535, |
| "learning_rate": 2.4884348005893045e-05, |
| "loss": 0.8335, |
| "step": 279 |
| }, |
| { |
| "epoch": 9.655172413793103, |
| "grad_norm": 6.1583027839660645, |
| "learning_rate": 2.488136829047886e-05, |
| "loss": 0.8181, |
| "step": 280 |
| }, |
| { |
| "epoch": 9.689655172413794, |
| "grad_norm": 3.6511595249176025, |
| "learning_rate": 2.487835085926963e-05, |
| "loss": 0.8059, |
| "step": 281 |
| }, |
| { |
| "epoch": 9.724137931034482, |
| "grad_norm": 3.68314790725708, |
| "learning_rate": 2.487529572145697e-05, |
| "loss": 0.8394, |
| "step": 282 |
| }, |
| { |
| "epoch": 9.758620689655173, |
| "grad_norm": 3.6726181507110596, |
| "learning_rate": 2.4872202886347362e-05, |
| "loss": 0.7453, |
| "step": 283 |
| }, |
| { |
| "epoch": 9.793103448275861, |
| "grad_norm": 2.719984531402588, |
| "learning_rate": 2.486907236336212e-05, |
| "loss": 0.8272, |
| "step": 284 |
| }, |
| { |
| "epoch": 9.827586206896552, |
| "grad_norm": 3.7963995933532715, |
| "learning_rate": 2.4865904162037358e-05, |
| "loss": 0.8991, |
| "step": 285 |
| }, |
| { |
| "epoch": 9.862068965517242, |
| "grad_norm": 2.754535436630249, |
| "learning_rate": 2.4862698292023963e-05, |
| "loss": 0.7064, |
| "step": 286 |
| }, |
| { |
| "epoch": 9.89655172413793, |
| "grad_norm": 3.0903878211975098, |
| "learning_rate": 2.4859454763087577e-05, |
| "loss": 0.735, |
| "step": 287 |
| }, |
| { |
| "epoch": 9.931034482758621, |
| "grad_norm": 4.279144287109375, |
| "learning_rate": 2.4856173585108544e-05, |
| "loss": 0.8074, |
| "step": 288 |
| }, |
| { |
| "epoch": 9.96551724137931, |
| "grad_norm": 4.23798942565918, |
| "learning_rate": 2.4852854768081912e-05, |
| "loss": 0.7802, |
| "step": 289 |
| }, |
| { |
| "epoch": 10.0, |
| "grad_norm": 3.5624747276306152, |
| "learning_rate": 2.4849498322117364e-05, |
| "loss": 0.8029, |
| "step": 290 |
| }, |
| { |
| "epoch": 10.03448275862069, |
| "grad_norm": 2.764233350753784, |
| "learning_rate": 2.4846104257439222e-05, |
| "loss": 0.7497, |
| "step": 291 |
| }, |
| { |
| "epoch": 10.068965517241379, |
| "grad_norm": 4.359961032867432, |
| "learning_rate": 2.4842672584386396e-05, |
| "loss": 0.7794, |
| "step": 292 |
| }, |
| { |
| "epoch": 10.10344827586207, |
| "grad_norm": 5.876353740692139, |
| "learning_rate": 2.483920331341235e-05, |
| "loss": 0.8483, |
| "step": 293 |
| }, |
| { |
| "epoch": 10.137931034482758, |
| "grad_norm": 4.351016998291016, |
| "learning_rate": 2.4835696455085093e-05, |
| "loss": 0.7924, |
| "step": 294 |
| }, |
| { |
| "epoch": 10.172413793103448, |
| "grad_norm": 3.5949246883392334, |
| "learning_rate": 2.483215202008712e-05, |
| "loss": 0.8293, |
| "step": 295 |
| }, |
| { |
| "epoch": 10.206896551724139, |
| "grad_norm": 3.226196765899658, |
| "learning_rate": 2.4828570019215396e-05, |
| "loss": 0.7934, |
| "step": 296 |
| }, |
| { |
| "epoch": 10.241379310344827, |
| "grad_norm": 5.124762535095215, |
| "learning_rate": 2.4824950463381314e-05, |
| "loss": 0.8055, |
| "step": 297 |
| }, |
| { |
| "epoch": 10.275862068965518, |
| "grad_norm": 3.658576011657715, |
| "learning_rate": 2.482129336361067e-05, |
| "loss": 0.6952, |
| "step": 298 |
| }, |
| { |
| "epoch": 10.310344827586206, |
| "grad_norm": 3.146904945373535, |
| "learning_rate": 2.481759873104363e-05, |
| "loss": 0.714, |
| "step": 299 |
| }, |
| { |
| "epoch": 10.344827586206897, |
| "grad_norm": 2.9892313480377197, |
| "learning_rate": 2.4813866576934676e-05, |
| "loss": 0.7746, |
| "step": 300 |
| }, |
| { |
| "epoch": 10.379310344827585, |
| "grad_norm": 2.505178928375244, |
| "learning_rate": 2.4810096912652604e-05, |
| "loss": 0.6921, |
| "step": 301 |
| }, |
| { |
| "epoch": 10.413793103448276, |
| "grad_norm": 2.44994854927063, |
| "learning_rate": 2.480628974968046e-05, |
| "loss": 0.7294, |
| "step": 302 |
| }, |
| { |
| "epoch": 10.448275862068966, |
| "grad_norm": 3.606544256210327, |
| "learning_rate": 2.4802445099615525e-05, |
| "loss": 0.8161, |
| "step": 303 |
| }, |
| { |
| "epoch": 10.482758620689655, |
| "grad_norm": 3.289849042892456, |
| "learning_rate": 2.479856297416927e-05, |
| "loss": 0.7754, |
| "step": 304 |
| }, |
| { |
| "epoch": 10.517241379310345, |
| "grad_norm": 3.646949529647827, |
| "learning_rate": 2.4794643385167327e-05, |
| "loss": 0.8014, |
| "step": 305 |
| }, |
| { |
| "epoch": 10.551724137931034, |
| "grad_norm": 6.4262847900390625, |
| "learning_rate": 2.4790686344549436e-05, |
| "loss": 0.8158, |
| "step": 306 |
| }, |
| { |
| "epoch": 10.586206896551724, |
| "grad_norm": 3.1850662231445312, |
| "learning_rate": 2.478669186436943e-05, |
| "loss": 0.7479, |
| "step": 307 |
| }, |
| { |
| "epoch": 10.620689655172415, |
| "grad_norm": 3.0070602893829346, |
| "learning_rate": 2.478265995679519e-05, |
| "loss": 0.769, |
| "step": 308 |
| }, |
| { |
| "epoch": 10.655172413793103, |
| "grad_norm": 6.722799777984619, |
| "learning_rate": 2.4778590634108613e-05, |
| "loss": 0.7574, |
| "step": 309 |
| }, |
| { |
| "epoch": 10.689655172413794, |
| "grad_norm": 3.4408349990844727, |
| "learning_rate": 2.4774483908705546e-05, |
| "loss": 0.6743, |
| "step": 310 |
| }, |
| { |
| "epoch": 10.724137931034482, |
| "grad_norm": 2.932551383972168, |
| "learning_rate": 2.4770339793095802e-05, |
| "loss": 0.7759, |
| "step": 311 |
| }, |
| { |
| "epoch": 10.758620689655173, |
| "grad_norm": 3.669299602508545, |
| "learning_rate": 2.4766158299903062e-05, |
| "loss": 0.8693, |
| "step": 312 |
| }, |
| { |
| "epoch": 10.793103448275861, |
| "grad_norm": 3.167391061782837, |
| "learning_rate": 2.4761939441864895e-05, |
| "loss": 0.7359, |
| "step": 313 |
| }, |
| { |
| "epoch": 10.827586206896552, |
| "grad_norm": 2.894587993621826, |
| "learning_rate": 2.4757683231832662e-05, |
| "loss": 0.6821, |
| "step": 314 |
| }, |
| { |
| "epoch": 10.862068965517242, |
| "grad_norm": 3.0310933589935303, |
| "learning_rate": 2.4753389682771523e-05, |
| "loss": 0.7143, |
| "step": 315 |
| }, |
| { |
| "epoch": 10.89655172413793, |
| "grad_norm": 3.1652767658233643, |
| "learning_rate": 2.474905880776037e-05, |
| "loss": 0.7949, |
| "step": 316 |
| }, |
| { |
| "epoch": 10.931034482758621, |
| "grad_norm": 5.710330009460449, |
| "learning_rate": 2.47446906199918e-05, |
| "loss": 0.7518, |
| "step": 317 |
| }, |
| { |
| "epoch": 10.96551724137931, |
| "grad_norm": 5.877900123596191, |
| "learning_rate": 2.4740285132772072e-05, |
| "loss": 0.8513, |
| "step": 318 |
| }, |
| { |
| "epoch": 11.0, |
| "grad_norm": 3.645622730255127, |
| "learning_rate": 2.4735842359521064e-05, |
| "loss": 0.8153, |
| "step": 319 |
| }, |
| { |
| "epoch": 11.03448275862069, |
| "grad_norm": 3.9911906719207764, |
| "learning_rate": 2.4731362313772233e-05, |
| "loss": 0.7651, |
| "step": 320 |
| }, |
| { |
| "epoch": 11.068965517241379, |
| "grad_norm": 4.0215935707092285, |
| "learning_rate": 2.4726845009172572e-05, |
| "loss": 0.7985, |
| "step": 321 |
| }, |
| { |
| "epoch": 11.10344827586207, |
| "grad_norm": 2.402211904525757, |
| "learning_rate": 2.4722290459482578e-05, |
| "loss": 0.7054, |
| "step": 322 |
| }, |
| { |
| "epoch": 11.137931034482758, |
| "grad_norm": 2.782041549682617, |
| "learning_rate": 2.47176986785762e-05, |
| "loss": 0.7909, |
| "step": 323 |
| }, |
| { |
| "epoch": 11.172413793103448, |
| "grad_norm": 4.322781085968018, |
| "learning_rate": 2.47130696804408e-05, |
| "loss": 0.747, |
| "step": 324 |
| }, |
| { |
| "epoch": 11.206896551724139, |
| "grad_norm": 5.149921417236328, |
| "learning_rate": 2.47084034791771e-05, |
| "loss": 0.8314, |
| "step": 325 |
| }, |
| { |
| "epoch": 11.241379310344827, |
| "grad_norm": 3.920372486114502, |
| "learning_rate": 2.4703700088999167e-05, |
| "loss": 0.8623, |
| "step": 326 |
| }, |
| { |
| "epoch": 11.275862068965518, |
| "grad_norm": 2.6054768562316895, |
| "learning_rate": 2.4698959524234346e-05, |
| "loss": 0.7899, |
| "step": 327 |
| }, |
| { |
| "epoch": 11.310344827586206, |
| "grad_norm": 4.032412528991699, |
| "learning_rate": 2.4694181799323206e-05, |
| "loss": 0.8142, |
| "step": 328 |
| }, |
| { |
| "epoch": 11.344827586206897, |
| "grad_norm": 3.573333978652954, |
| "learning_rate": 2.468936692881954e-05, |
| "loss": 0.7309, |
| "step": 329 |
| }, |
| { |
| "epoch": 11.379310344827585, |
| "grad_norm": 7.383831977844238, |
| "learning_rate": 2.4684514927390274e-05, |
| "loss": 0.8518, |
| "step": 330 |
| }, |
| { |
| "epoch": 11.413793103448276, |
| "grad_norm": 3.8350508213043213, |
| "learning_rate": 2.4679625809815443e-05, |
| "loss": 0.7778, |
| "step": 331 |
| }, |
| { |
| "epoch": 11.448275862068966, |
| "grad_norm": 5.659679889678955, |
| "learning_rate": 2.467469959098815e-05, |
| "loss": 0.7818, |
| "step": 332 |
| }, |
| { |
| "epoch": 11.482758620689655, |
| "grad_norm": 6.893414497375488, |
| "learning_rate": 2.4669736285914505e-05, |
| "loss": 0.7871, |
| "step": 333 |
| }, |
| { |
| "epoch": 11.517241379310345, |
| "grad_norm": 3.4787838459014893, |
| "learning_rate": 2.4664735909713606e-05, |
| "loss": 0.6967, |
| "step": 334 |
| }, |
| { |
| "epoch": 11.551724137931034, |
| "grad_norm": 3.2118844985961914, |
| "learning_rate": 2.465969847761746e-05, |
| "loss": 0.6598, |
| "step": 335 |
| }, |
| { |
| "epoch": 11.586206896551724, |
| "grad_norm": 3.6841318607330322, |
| "learning_rate": 2.4654624004970957e-05, |
| "loss": 0.7323, |
| "step": 336 |
| }, |
| { |
| "epoch": 11.620689655172415, |
| "grad_norm": 4.46773624420166, |
| "learning_rate": 2.464951250723183e-05, |
| "loss": 0.6713, |
| "step": 337 |
| }, |
| { |
| "epoch": 11.655172413793103, |
| "grad_norm": 2.266669988632202, |
| "learning_rate": 2.4644363999970576e-05, |
| "loss": 0.6425, |
| "step": 338 |
| }, |
| { |
| "epoch": 11.689655172413794, |
| "grad_norm": 5.7526140213012695, |
| "learning_rate": 2.4639178498870452e-05, |
| "loss": 0.6836, |
| "step": 339 |
| }, |
| { |
| "epoch": 11.724137931034482, |
| "grad_norm": 5.038420677185059, |
| "learning_rate": 2.4633956019727385e-05, |
| "loss": 0.7308, |
| "step": 340 |
| }, |
| { |
| "epoch": 11.758620689655173, |
| "grad_norm": 5.116992473602295, |
| "learning_rate": 2.4628696578449956e-05, |
| "loss": 0.7882, |
| "step": 341 |
| }, |
| { |
| "epoch": 11.793103448275861, |
| "grad_norm": 4.496801376342773, |
| "learning_rate": 2.4623400191059335e-05, |
| "loss": 0.7327, |
| "step": 342 |
| }, |
| { |
| "epoch": 11.827586206896552, |
| "grad_norm": 5.261680603027344, |
| "learning_rate": 2.4618066873689238e-05, |
| "loss": 0.8085, |
| "step": 343 |
| }, |
| { |
| "epoch": 11.862068965517242, |
| "grad_norm": 4.4358367919921875, |
| "learning_rate": 2.461269664258587e-05, |
| "loss": 0.6914, |
| "step": 344 |
| }, |
| { |
| "epoch": 11.89655172413793, |
| "grad_norm": 3.8224196434020996, |
| "learning_rate": 2.4607289514107888e-05, |
| "loss": 0.6859, |
| "step": 345 |
| }, |
| { |
| "epoch": 11.931034482758621, |
| "grad_norm": 3.285635471343994, |
| "learning_rate": 2.460184550472635e-05, |
| "loss": 0.8106, |
| "step": 346 |
| }, |
| { |
| "epoch": 11.96551724137931, |
| "grad_norm": 4.5762939453125, |
| "learning_rate": 2.4596364631024643e-05, |
| "loss": 0.7377, |
| "step": 347 |
| }, |
| { |
| "epoch": 12.0, |
| "grad_norm": 3.780836820602417, |
| "learning_rate": 2.459084690969846e-05, |
| "loss": 0.707, |
| "step": 348 |
| }, |
| { |
| "epoch": 12.03448275862069, |
| "grad_norm": 4.315356731414795, |
| "learning_rate": 2.4585292357555746e-05, |
| "loss": 0.7106, |
| "step": 349 |
| }, |
| { |
| "epoch": 12.068965517241379, |
| "grad_norm": 3.3244175910949707, |
| "learning_rate": 2.457970099151662e-05, |
| "loss": 0.699, |
| "step": 350 |
| }, |
| { |
| "epoch": 12.10344827586207, |
| "grad_norm": 2.7675392627716064, |
| "learning_rate": 2.4574072828613354e-05, |
| "loss": 0.6546, |
| "step": 351 |
| }, |
| { |
| "epoch": 12.137931034482758, |
| "grad_norm": 3.6822872161865234, |
| "learning_rate": 2.4568407885990313e-05, |
| "loss": 0.6953, |
| "step": 352 |
| }, |
| { |
| "epoch": 12.172413793103448, |
| "grad_norm": 4.7930731773376465, |
| "learning_rate": 2.4562706180903894e-05, |
| "loss": 0.7863, |
| "step": 353 |
| }, |
| { |
| "epoch": 12.206896551724139, |
| "grad_norm": 3.7039031982421875, |
| "learning_rate": 2.4556967730722478e-05, |
| "loss": 0.7581, |
| "step": 354 |
| }, |
| { |
| "epoch": 12.241379310344827, |
| "grad_norm": 5.0022053718566895, |
| "learning_rate": 2.455119255292638e-05, |
| "loss": 0.8091, |
| "step": 355 |
| }, |
| { |
| "epoch": 12.275862068965518, |
| "grad_norm": 3.0869357585906982, |
| "learning_rate": 2.4545380665107786e-05, |
| "loss": 0.7677, |
| "step": 356 |
| }, |
| { |
| "epoch": 12.310344827586206, |
| "grad_norm": 3.7048325538635254, |
| "learning_rate": 2.453953208497073e-05, |
| "loss": 0.7886, |
| "step": 357 |
| }, |
| { |
| "epoch": 12.344827586206897, |
| "grad_norm": 4.7608561515808105, |
| "learning_rate": 2.4533646830330986e-05, |
| "loss": 0.9105, |
| "step": 358 |
| }, |
| { |
| "epoch": 12.379310344827585, |
| "grad_norm": 5.352680683135986, |
| "learning_rate": 2.452772491911607e-05, |
| "loss": 0.8114, |
| "step": 359 |
| }, |
| { |
| "epoch": 12.413793103448276, |
| "grad_norm": 8.703157424926758, |
| "learning_rate": 2.4521766369365142e-05, |
| "loss": 0.8285, |
| "step": 360 |
| }, |
| { |
| "epoch": 12.448275862068966, |
| "grad_norm": 3.2992141246795654, |
| "learning_rate": 2.4515771199228987e-05, |
| "loss": 0.7644, |
| "step": 361 |
| }, |
| { |
| "epoch": 12.482758620689655, |
| "grad_norm": 6.761632919311523, |
| "learning_rate": 2.450973942696993e-05, |
| "loss": 0.8596, |
| "step": 362 |
| }, |
| { |
| "epoch": 12.517241379310345, |
| "grad_norm": 4.267969608306885, |
| "learning_rate": 2.450367107096179e-05, |
| "loss": 0.7883, |
| "step": 363 |
| }, |
| { |
| "epoch": 12.551724137931034, |
| "grad_norm": 3.685096025466919, |
| "learning_rate": 2.449756614968984e-05, |
| "loss": 0.6954, |
| "step": 364 |
| }, |
| { |
| "epoch": 12.586206896551724, |
| "grad_norm": 3.5344960689544678, |
| "learning_rate": 2.449142468175072e-05, |
| "loss": 0.6647, |
| "step": 365 |
| }, |
| { |
| "epoch": 12.620689655172415, |
| "grad_norm": 4.839688777923584, |
| "learning_rate": 2.4485246685852413e-05, |
| "loss": 0.7058, |
| "step": 366 |
| }, |
| { |
| "epoch": 12.655172413793103, |
| "grad_norm": 3.455937623977661, |
| "learning_rate": 2.4479032180814166e-05, |
| "loss": 0.6476, |
| "step": 367 |
| }, |
| { |
| "epoch": 12.689655172413794, |
| "grad_norm": 4.819273948669434, |
| "learning_rate": 2.447278118556644e-05, |
| "loss": 0.6687, |
| "step": 368 |
| }, |
| { |
| "epoch": 12.724137931034482, |
| "grad_norm": 3.5306191444396973, |
| "learning_rate": 2.446649371915084e-05, |
| "loss": 0.709, |
| "step": 369 |
| }, |
| { |
| "epoch": 12.758620689655173, |
| "grad_norm": 5.589993953704834, |
| "learning_rate": 2.4460169800720095e-05, |
| "loss": 0.8271, |
| "step": 370 |
| }, |
| { |
| "epoch": 12.793103448275861, |
| "grad_norm": 2.941460132598877, |
| "learning_rate": 2.4453809449537947e-05, |
| "loss": 0.7747, |
| "step": 371 |
| }, |
| { |
| "epoch": 12.827586206896552, |
| "grad_norm": 2.6910247802734375, |
| "learning_rate": 2.4447412684979127e-05, |
| "loss": 0.6994, |
| "step": 372 |
| }, |
| { |
| "epoch": 12.862068965517242, |
| "grad_norm": 4.3834662437438965, |
| "learning_rate": 2.4440979526529295e-05, |
| "loss": 0.7782, |
| "step": 373 |
| }, |
| { |
| "epoch": 12.89655172413793, |
| "grad_norm": 5.008590221405029, |
| "learning_rate": 2.4434509993784972e-05, |
| "loss": 0.8679, |
| "step": 374 |
| }, |
| { |
| "epoch": 12.931034482758621, |
| "grad_norm": 3.8566648960113525, |
| "learning_rate": 2.4428004106453462e-05, |
| "loss": 0.8427, |
| "step": 375 |
| }, |
| { |
| "epoch": 12.96551724137931, |
| "grad_norm": 4.028674125671387, |
| "learning_rate": 2.4421461884352836e-05, |
| "loss": 0.7371, |
| "step": 376 |
| }, |
| { |
| "epoch": 13.0, |
| "grad_norm": 5.727315902709961, |
| "learning_rate": 2.4414883347411836e-05, |
| "loss": 0.7163, |
| "step": 377 |
| }, |
| { |
| "epoch": 13.03448275862069, |
| "grad_norm": 2.517099618911743, |
| "learning_rate": 2.440826851566983e-05, |
| "loss": 0.6465, |
| "step": 378 |
| }, |
| { |
| "epoch": 13.068965517241379, |
| "grad_norm": 2.401848316192627, |
| "learning_rate": 2.4401617409276735e-05, |
| "loss": 0.6307, |
| "step": 379 |
| }, |
| { |
| "epoch": 13.10344827586207, |
| "grad_norm": 3.5928995609283447, |
| "learning_rate": 2.439493004849298e-05, |
| "loss": 0.665, |
| "step": 380 |
| }, |
| { |
| "epoch": 13.137931034482758, |
| "grad_norm": 3.128108024597168, |
| "learning_rate": 2.438820645368942e-05, |
| "loss": 0.6785, |
| "step": 381 |
| }, |
| { |
| "epoch": 13.172413793103448, |
| "grad_norm": 2.5367393493652344, |
| "learning_rate": 2.4381446645347297e-05, |
| "loss": 0.6353, |
| "step": 382 |
| }, |
| { |
| "epoch": 13.206896551724139, |
| "grad_norm": 3.4342269897460938, |
| "learning_rate": 2.4374650644058156e-05, |
| "loss": 0.6067, |
| "step": 383 |
| }, |
| { |
| "epoch": 13.241379310344827, |
| "grad_norm": 2.9581339359283447, |
| "learning_rate": 2.43678184705238e-05, |
| "loss": 0.5938, |
| "step": 384 |
| }, |
| { |
| "epoch": 13.275862068965518, |
| "grad_norm": 4.147696018218994, |
| "learning_rate": 2.4360950145556208e-05, |
| "loss": 0.6053, |
| "step": 385 |
| }, |
| { |
| "epoch": 13.310344827586206, |
| "grad_norm": 3.097829580307007, |
| "learning_rate": 2.4354045690077492e-05, |
| "loss": 0.7284, |
| "step": 386 |
| }, |
| { |
| "epoch": 13.344827586206897, |
| "grad_norm": 3.5135653018951416, |
| "learning_rate": 2.4347105125119824e-05, |
| "loss": 0.6605, |
| "step": 387 |
| }, |
| { |
| "epoch": 13.379310344827585, |
| "grad_norm": 3.659688949584961, |
| "learning_rate": 2.4340128471825362e-05, |
| "loss": 0.6748, |
| "step": 388 |
| }, |
| { |
| "epoch": 13.413793103448276, |
| "grad_norm": 2.355313539505005, |
| "learning_rate": 2.4333115751446208e-05, |
| "loss": 0.6609, |
| "step": 389 |
| }, |
| { |
| "epoch": 13.448275862068966, |
| "grad_norm": 4.517484664916992, |
| "learning_rate": 2.4326066985344318e-05, |
| "loss": 0.6709, |
| "step": 390 |
| }, |
| { |
| "epoch": 13.482758620689655, |
| "grad_norm": 5.471557140350342, |
| "learning_rate": 2.4318982194991463e-05, |
| "loss": 0.7848, |
| "step": 391 |
| }, |
| { |
| "epoch": 13.517241379310345, |
| "grad_norm": 3.947722911834717, |
| "learning_rate": 2.4311861401969138e-05, |
| "loss": 0.7373, |
| "step": 392 |
| }, |
| { |
| "epoch": 13.551724137931034, |
| "grad_norm": 3.1544623374938965, |
| "learning_rate": 2.4304704627968515e-05, |
| "loss": 0.688, |
| "step": 393 |
| }, |
| { |
| "epoch": 13.586206896551724, |
| "grad_norm": 3.4366607666015625, |
| "learning_rate": 2.429751189479037e-05, |
| "loss": 0.6537, |
| "step": 394 |
| }, |
| { |
| "epoch": 13.620689655172415, |
| "grad_norm": 2.357426404953003, |
| "learning_rate": 2.429028322434501e-05, |
| "loss": 0.5864, |
| "step": 395 |
| }, |
| { |
| "epoch": 13.655172413793103, |
| "grad_norm": 2.7362890243530273, |
| "learning_rate": 2.4283018638652234e-05, |
| "loss": 0.6294, |
| "step": 396 |
| }, |
| { |
| "epoch": 13.689655172413794, |
| "grad_norm": 2.7302050590515137, |
| "learning_rate": 2.427571815984121e-05, |
| "loss": 0.6314, |
| "step": 397 |
| }, |
| { |
| "epoch": 13.724137931034482, |
| "grad_norm": 3.3525278568267822, |
| "learning_rate": 2.4268381810150474e-05, |
| "loss": 0.5765, |
| "step": 398 |
| }, |
| { |
| "epoch": 13.758620689655173, |
| "grad_norm": 5.29491662979126, |
| "learning_rate": 2.426100961192782e-05, |
| "loss": 0.6679, |
| "step": 399 |
| }, |
| { |
| "epoch": 13.793103448275861, |
| "grad_norm": 3.3575775623321533, |
| "learning_rate": 2.4253601587630236e-05, |
| "loss": 0.6177, |
| "step": 400 |
| }, |
| { |
| "epoch": 13.827586206896552, |
| "grad_norm": 3.106152057647705, |
| "learning_rate": 2.4246157759823855e-05, |
| "loss": 0.6268, |
| "step": 401 |
| }, |
| { |
| "epoch": 13.862068965517242, |
| "grad_norm": 3.3294615745544434, |
| "learning_rate": 2.4238678151183863e-05, |
| "loss": 0.6302, |
| "step": 402 |
| }, |
| { |
| "epoch": 13.89655172413793, |
| "grad_norm": 3.1822283267974854, |
| "learning_rate": 2.423116278449445e-05, |
| "loss": 0.626, |
| "step": 403 |
| }, |
| { |
| "epoch": 13.931034482758621, |
| "grad_norm": 2.9840409755706787, |
| "learning_rate": 2.4223611682648724e-05, |
| "loss": 0.6787, |
| "step": 404 |
| }, |
| { |
| "epoch": 13.96551724137931, |
| "grad_norm": 2.9849910736083984, |
| "learning_rate": 2.4216024868648644e-05, |
| "loss": 0.6664, |
| "step": 405 |
| }, |
| { |
| "epoch": 14.0, |
| "grad_norm": 4.7554731369018555, |
| "learning_rate": 2.4208402365604972e-05, |
| "loss": 0.6893, |
| "step": 406 |
| }, |
| { |
| "epoch": 14.03448275862069, |
| "grad_norm": 4.694082260131836, |
| "learning_rate": 2.420074419673717e-05, |
| "loss": 0.6614, |
| "step": 407 |
| }, |
| { |
| "epoch": 14.068965517241379, |
| "grad_norm": 4.404138088226318, |
| "learning_rate": 2.4193050385373344e-05, |
| "loss": 0.6874, |
| "step": 408 |
| }, |
| { |
| "epoch": 14.10344827586207, |
| "grad_norm": 3.568782091140747, |
| "learning_rate": 2.418532095495018e-05, |
| "loss": 0.6397, |
| "step": 409 |
| }, |
| { |
| "epoch": 14.137931034482758, |
| "grad_norm": 3.321381092071533, |
| "learning_rate": 2.417755592901287e-05, |
| "loss": 0.7479, |
| "step": 410 |
| }, |
| { |
| "epoch": 14.172413793103448, |
| "grad_norm": 2.899473190307617, |
| "learning_rate": 2.4169755331215023e-05, |
| "loss": 0.5792, |
| "step": 411 |
| }, |
| { |
| "epoch": 14.206896551724139, |
| "grad_norm": 5.696684837341309, |
| "learning_rate": 2.4161919185318617e-05, |
| "loss": 0.6417, |
| "step": 412 |
| }, |
| { |
| "epoch": 14.241379310344827, |
| "grad_norm": 3.3947043418884277, |
| "learning_rate": 2.4154047515193904e-05, |
| "loss": 0.596, |
| "step": 413 |
| }, |
| { |
| "epoch": 14.275862068965518, |
| "grad_norm": 4.2483229637146, |
| "learning_rate": 2.4146140344819363e-05, |
| "loss": 0.6651, |
| "step": 414 |
| }, |
| { |
| "epoch": 14.310344827586206, |
| "grad_norm": 4.062285423278809, |
| "learning_rate": 2.4138197698281606e-05, |
| "loss": 0.6853, |
| "step": 415 |
| }, |
| { |
| "epoch": 14.344827586206897, |
| "grad_norm": 3.0232627391815186, |
| "learning_rate": 2.413021959977531e-05, |
| "loss": 0.6696, |
| "step": 416 |
| }, |
| { |
| "epoch": 14.379310344827585, |
| "grad_norm": 2.5746848583221436, |
| "learning_rate": 2.4122206073603142e-05, |
| "loss": 0.7158, |
| "step": 417 |
| }, |
| { |
| "epoch": 14.413793103448276, |
| "grad_norm": 5.224025249481201, |
| "learning_rate": 2.4114157144175703e-05, |
| "loss": 0.7015, |
| "step": 418 |
| }, |
| { |
| "epoch": 14.448275862068966, |
| "grad_norm": 3.374368906021118, |
| "learning_rate": 2.4106072836011422e-05, |
| "loss": 0.6896, |
| "step": 419 |
| }, |
| { |
| "epoch": 14.482758620689655, |
| "grad_norm": 2.7048842906951904, |
| "learning_rate": 2.40979531737365e-05, |
| "loss": 0.6628, |
| "step": 420 |
| }, |
| { |
| "epoch": 14.517241379310345, |
| "grad_norm": 3.953042984008789, |
| "learning_rate": 2.4089798182084845e-05, |
| "loss": 0.6238, |
| "step": 421 |
| }, |
| { |
| "epoch": 14.551724137931034, |
| "grad_norm": 2.6758952140808105, |
| "learning_rate": 2.4081607885897966e-05, |
| "loss": 0.6396, |
| "step": 422 |
| }, |
| { |
| "epoch": 14.586206896551724, |
| "grad_norm": 3.827179193496704, |
| "learning_rate": 2.407338231012494e-05, |
| "loss": 0.6959, |
| "step": 423 |
| }, |
| { |
| "epoch": 14.620689655172415, |
| "grad_norm": 6.4605607986450195, |
| "learning_rate": 2.406512147982228e-05, |
| "loss": 0.7085, |
| "step": 424 |
| }, |
| { |
| "epoch": 14.655172413793103, |
| "grad_norm": 6.189979076385498, |
| "learning_rate": 2.4056825420153917e-05, |
| "loss": 0.7617, |
| "step": 425 |
| }, |
| { |
| "epoch": 14.689655172413794, |
| "grad_norm": 5.246057987213135, |
| "learning_rate": 2.4048494156391087e-05, |
| "loss": 0.7481, |
| "step": 426 |
| }, |
| { |
| "epoch": 14.724137931034482, |
| "grad_norm": 2.7838568687438965, |
| "learning_rate": 2.4040127713912264e-05, |
| "loss": 0.634, |
| "step": 427 |
| }, |
| { |
| "epoch": 14.758620689655173, |
| "grad_norm": 2.8083300590515137, |
| "learning_rate": 2.403172611820308e-05, |
| "loss": 0.6767, |
| "step": 428 |
| }, |
| { |
| "epoch": 14.793103448275861, |
| "grad_norm": 3.925990343093872, |
| "learning_rate": 2.4023289394856257e-05, |
| "loss": 0.7427, |
| "step": 429 |
| }, |
| { |
| "epoch": 14.827586206896552, |
| "grad_norm": 2.5433623790740967, |
| "learning_rate": 2.401481756957152e-05, |
| "loss": 0.6591, |
| "step": 430 |
| }, |
| { |
| "epoch": 14.862068965517242, |
| "grad_norm": 4.543763160705566, |
| "learning_rate": 2.4006310668155508e-05, |
| "loss": 0.6728, |
| "step": 431 |
| }, |
| { |
| "epoch": 14.89655172413793, |
| "grad_norm": 4.649848461151123, |
| "learning_rate": 2.3997768716521723e-05, |
| "loss": 0.741, |
| "step": 432 |
| }, |
| { |
| "epoch": 14.931034482758621, |
| "grad_norm": 3.5682265758514404, |
| "learning_rate": 2.398919174069043e-05, |
| "loss": 0.6855, |
| "step": 433 |
| }, |
| { |
| "epoch": 14.96551724137931, |
| "grad_norm": 2.9493958950042725, |
| "learning_rate": 2.398057976678859e-05, |
| "loss": 0.6561, |
| "step": 434 |
| }, |
| { |
| "epoch": 15.0, |
| "grad_norm": 2.569502115249634, |
| "learning_rate": 2.3971932821049765e-05, |
| "loss": 0.6723, |
| "step": 435 |
| }, |
| { |
| "epoch": 15.03448275862069, |
| "grad_norm": 2.2379672527313232, |
| "learning_rate": 2.396325092981405e-05, |
| "loss": 0.6323, |
| "step": 436 |
| }, |
| { |
| "epoch": 15.068965517241379, |
| "grad_norm": 3.066236972808838, |
| "learning_rate": 2.3954534119527996e-05, |
| "loss": 0.7536, |
| "step": 437 |
| }, |
| { |
| "epoch": 15.10344827586207, |
| "grad_norm": 3.0116641521453857, |
| "learning_rate": 2.3945782416744517e-05, |
| "loss": 0.7323, |
| "step": 438 |
| }, |
| { |
| "epoch": 15.137931034482758, |
| "grad_norm": 4.455348491668701, |
| "learning_rate": 2.3936995848122812e-05, |
| "loss": 0.6905, |
| "step": 439 |
| }, |
| { |
| "epoch": 15.172413793103448, |
| "grad_norm": 3.218564033508301, |
| "learning_rate": 2.3928174440428297e-05, |
| "loss": 0.663, |
| "step": 440 |
| }, |
| { |
| "epoch": 15.206896551724139, |
| "grad_norm": 3.6186459064483643, |
| "learning_rate": 2.391931822053251e-05, |
| "loss": 0.6548, |
| "step": 441 |
| }, |
| { |
| "epoch": 15.241379310344827, |
| "grad_norm": 2.9447264671325684, |
| "learning_rate": 2.3910427215413036e-05, |
| "loss": 0.6127, |
| "step": 442 |
| }, |
| { |
| "epoch": 15.275862068965518, |
| "grad_norm": 2.7375073432922363, |
| "learning_rate": 2.390150145215341e-05, |
| "loss": 0.6404, |
| "step": 443 |
| }, |
| { |
| "epoch": 15.310344827586206, |
| "grad_norm": 2.91684889793396, |
| "learning_rate": 2.3892540957943067e-05, |
| "loss": 0.629, |
| "step": 444 |
| }, |
| { |
| "epoch": 15.344827586206897, |
| "grad_norm": 4.3465399742126465, |
| "learning_rate": 2.3883545760077215e-05, |
| "loss": 0.7172, |
| "step": 445 |
| }, |
| { |
| "epoch": 15.379310344827585, |
| "grad_norm": 6.414719104766846, |
| "learning_rate": 2.3874515885956792e-05, |
| "loss": 0.6642, |
| "step": 446 |
| }, |
| { |
| "epoch": 15.413793103448276, |
| "grad_norm": 2.770498752593994, |
| "learning_rate": 2.386545136308836e-05, |
| "loss": 0.7017, |
| "step": 447 |
| }, |
| { |
| "epoch": 15.448275862068966, |
| "grad_norm": 3.6498284339904785, |
| "learning_rate": 2.3856352219084024e-05, |
| "loss": 0.6538, |
| "step": 448 |
| }, |
| { |
| "epoch": 15.482758620689655, |
| "grad_norm": 2.958104372024536, |
| "learning_rate": 2.384721848166136e-05, |
| "loss": 0.5846, |
| "step": 449 |
| }, |
| { |
| "epoch": 15.517241379310345, |
| "grad_norm": 2.3549644947052, |
| "learning_rate": 2.3838050178643312e-05, |
| "loss": 0.5435, |
| "step": 450 |
| }, |
| { |
| "epoch": 15.551724137931034, |
| "grad_norm": 3.659111261367798, |
| "learning_rate": 2.3828847337958126e-05, |
| "loss": 0.6394, |
| "step": 451 |
| }, |
| { |
| "epoch": 15.586206896551724, |
| "grad_norm": 2.7539238929748535, |
| "learning_rate": 2.3819609987639247e-05, |
| "loss": 0.6072, |
| "step": 452 |
| }, |
| { |
| "epoch": 15.620689655172415, |
| "grad_norm": 3.189476728439331, |
| "learning_rate": 2.3810338155825245e-05, |
| "loss": 0.6523, |
| "step": 453 |
| }, |
| { |
| "epoch": 15.655172413793103, |
| "grad_norm": 2.3295481204986572, |
| "learning_rate": 2.3801031870759732e-05, |
| "loss": 0.6303, |
| "step": 454 |
| }, |
| { |
| "epoch": 15.689655172413794, |
| "grad_norm": 2.7341103553771973, |
| "learning_rate": 2.379169116079126e-05, |
| "loss": 0.6822, |
| "step": 455 |
| }, |
| { |
| "epoch": 15.724137931034482, |
| "grad_norm": 2.9363160133361816, |
| "learning_rate": 2.378231605437326e-05, |
| "loss": 0.6329, |
| "step": 456 |
| }, |
| { |
| "epoch": 15.758620689655173, |
| "grad_norm": 5.169081211090088, |
| "learning_rate": 2.3772906580063924e-05, |
| "loss": 0.644, |
| "step": 457 |
| }, |
| { |
| "epoch": 15.793103448275861, |
| "grad_norm": 4.046895503997803, |
| "learning_rate": 2.3763462766526145e-05, |
| "loss": 0.6442, |
| "step": 458 |
| }, |
| { |
| "epoch": 15.827586206896552, |
| "grad_norm": 5.600619316101074, |
| "learning_rate": 2.3753984642527423e-05, |
| "loss": 0.6732, |
| "step": 459 |
| }, |
| { |
| "epoch": 15.862068965517242, |
| "grad_norm": 2.420551061630249, |
| "learning_rate": 2.3744472236939753e-05, |
| "loss": 0.5782, |
| "step": 460 |
| }, |
| { |
| "epoch": 15.89655172413793, |
| "grad_norm": 4.496841907501221, |
| "learning_rate": 2.3734925578739588e-05, |
| "loss": 0.6463, |
| "step": 461 |
| }, |
| { |
| "epoch": 15.931034482758621, |
| "grad_norm": 5.044814586639404, |
| "learning_rate": 2.3725344697007696e-05, |
| "loss": 0.5673, |
| "step": 462 |
| }, |
| { |
| "epoch": 15.96551724137931, |
| "grad_norm": 4.280508518218994, |
| "learning_rate": 2.3715729620929106e-05, |
| "loss": 0.5989, |
| "step": 463 |
| }, |
| { |
| "epoch": 16.0, |
| "grad_norm": 3.7099342346191406, |
| "learning_rate": 2.3706080379793007e-05, |
| "loss": 0.641, |
| "step": 464 |
| }, |
| { |
| "epoch": 16.03448275862069, |
| "grad_norm": 4.294964790344238, |
| "learning_rate": 2.3696397002992663e-05, |
| "loss": 0.6666, |
| "step": 465 |
| }, |
| { |
| "epoch": 16.06896551724138, |
| "grad_norm": 3.043022632598877, |
| "learning_rate": 2.3686679520025314e-05, |
| "loss": 0.6193, |
| "step": 466 |
| }, |
| { |
| "epoch": 16.103448275862068, |
| "grad_norm": 2.8048079013824463, |
| "learning_rate": 2.36769279604921e-05, |
| "loss": 0.6389, |
| "step": 467 |
| }, |
| { |
| "epoch": 16.137931034482758, |
| "grad_norm": 4.923339366912842, |
| "learning_rate": 2.366714235409797e-05, |
| "loss": 0.6676, |
| "step": 468 |
| }, |
| { |
| "epoch": 16.17241379310345, |
| "grad_norm": 2.6823081970214844, |
| "learning_rate": 2.365732273065157e-05, |
| "loss": 0.6144, |
| "step": 469 |
| }, |
| { |
| "epoch": 16.20689655172414, |
| "grad_norm": 2.74808406829834, |
| "learning_rate": 2.3647469120065177e-05, |
| "loss": 0.5502, |
| "step": 470 |
| }, |
| { |
| "epoch": 16.24137931034483, |
| "grad_norm": 4.341047286987305, |
| "learning_rate": 2.36375815523546e-05, |
| "loss": 0.6018, |
| "step": 471 |
| }, |
| { |
| "epoch": 16.275862068965516, |
| "grad_norm": 3.325765371322632, |
| "learning_rate": 2.3627660057639082e-05, |
| "loss": 0.6211, |
| "step": 472 |
| }, |
| { |
| "epoch": 16.310344827586206, |
| "grad_norm": 4.053094387054443, |
| "learning_rate": 2.361770466614122e-05, |
| "loss": 0.7121, |
| "step": 473 |
| }, |
| { |
| "epoch": 16.344827586206897, |
| "grad_norm": 3.4117484092712402, |
| "learning_rate": 2.3607715408186863e-05, |
| "loss": 0.6648, |
| "step": 474 |
| }, |
| { |
| "epoch": 16.379310344827587, |
| "grad_norm": 5.172271251678467, |
| "learning_rate": 2.3597692314205016e-05, |
| "loss": 0.659, |
| "step": 475 |
| }, |
| { |
| "epoch": 16.413793103448278, |
| "grad_norm": 3.7227883338928223, |
| "learning_rate": 2.358763541472777e-05, |
| "loss": 0.6244, |
| "step": 476 |
| }, |
| { |
| "epoch": 16.448275862068964, |
| "grad_norm": 2.213212013244629, |
| "learning_rate": 2.3577544740390184e-05, |
| "loss": 0.6075, |
| "step": 477 |
| }, |
| { |
| "epoch": 16.482758620689655, |
| "grad_norm": 2.4992291927337646, |
| "learning_rate": 2.35674203219302e-05, |
| "loss": 0.6339, |
| "step": 478 |
| }, |
| { |
| "epoch": 16.517241379310345, |
| "grad_norm": 5.619305610656738, |
| "learning_rate": 2.355726219018855e-05, |
| "loss": 0.6946, |
| "step": 479 |
| }, |
| { |
| "epoch": 16.551724137931036, |
| "grad_norm": 3.0107035636901855, |
| "learning_rate": 2.354707037610867e-05, |
| "loss": 0.6593, |
| "step": 480 |
| }, |
| { |
| "epoch": 16.586206896551722, |
| "grad_norm": 2.2236149311065674, |
| "learning_rate": 2.353684491073659e-05, |
| "loss": 0.615, |
| "step": 481 |
| }, |
| { |
| "epoch": 16.620689655172413, |
| "grad_norm": 2.139848470687866, |
| "learning_rate": 2.3526585825220848e-05, |
| "loss": 0.6075, |
| "step": 482 |
| }, |
| { |
| "epoch": 16.655172413793103, |
| "grad_norm": 2.2389163970947266, |
| "learning_rate": 2.35162931508124e-05, |
| "loss": 0.6533, |
| "step": 483 |
| }, |
| { |
| "epoch": 16.689655172413794, |
| "grad_norm": 3.55932879447937, |
| "learning_rate": 2.3505966918864525e-05, |
| "loss": 0.7435, |
| "step": 484 |
| }, |
| { |
| "epoch": 16.724137931034484, |
| "grad_norm": 2.86104416847229, |
| "learning_rate": 2.3495607160832707e-05, |
| "loss": 0.6867, |
| "step": 485 |
| }, |
| { |
| "epoch": 16.75862068965517, |
| "grad_norm": 3.4844839572906494, |
| "learning_rate": 2.3485213908274567e-05, |
| "loss": 0.571, |
| "step": 486 |
| }, |
| { |
| "epoch": 16.79310344827586, |
| "grad_norm": 2.288844108581543, |
| "learning_rate": 2.3474787192849756e-05, |
| "loss": 0.6348, |
| "step": 487 |
| }, |
| { |
| "epoch": 16.82758620689655, |
| "grad_norm": 2.6627936363220215, |
| "learning_rate": 2.346432704631986e-05, |
| "loss": 0.5483, |
| "step": 488 |
| }, |
| { |
| "epoch": 16.862068965517242, |
| "grad_norm": 3.744588851928711, |
| "learning_rate": 2.3453833500548295e-05, |
| "loss": 0.6306, |
| "step": 489 |
| }, |
| { |
| "epoch": 16.896551724137932, |
| "grad_norm": 2.5902559757232666, |
| "learning_rate": 2.3443306587500225e-05, |
| "loss": 0.6857, |
| "step": 490 |
| }, |
| { |
| "epoch": 16.93103448275862, |
| "grad_norm": 3.003737449645996, |
| "learning_rate": 2.3432746339242448e-05, |
| "loss": 0.6444, |
| "step": 491 |
| }, |
| { |
| "epoch": 16.96551724137931, |
| "grad_norm": 2.010629653930664, |
| "learning_rate": 2.342215278794332e-05, |
| "loss": 0.5778, |
| "step": 492 |
| }, |
| { |
| "epoch": 17.0, |
| "grad_norm": 3.528261423110962, |
| "learning_rate": 2.341152596587262e-05, |
| "loss": 0.6003, |
| "step": 493 |
| }, |
| { |
| "epoch": 17.03448275862069, |
| "grad_norm": 4.4283223152160645, |
| "learning_rate": 2.340086590540151e-05, |
| "loss": 0.6058, |
| "step": 494 |
| }, |
| { |
| "epoch": 17.06896551724138, |
| "grad_norm": 2.131556272506714, |
| "learning_rate": 2.339017263900237e-05, |
| "loss": 0.6174, |
| "step": 495 |
| }, |
| { |
| "epoch": 17.103448275862068, |
| "grad_norm": 3.118192672729492, |
| "learning_rate": 2.3379446199248747e-05, |
| "loss": 0.5973, |
| "step": 496 |
| }, |
| { |
| "epoch": 17.137931034482758, |
| "grad_norm": 4.915523052215576, |
| "learning_rate": 2.3368686618815238e-05, |
| "loss": 0.6529, |
| "step": 497 |
| }, |
| { |
| "epoch": 17.17241379310345, |
| "grad_norm": 3.0185019969940186, |
| "learning_rate": 2.335789393047739e-05, |
| "loss": 0.5984, |
| "step": 498 |
| }, |
| { |
| "epoch": 17.20689655172414, |
| "grad_norm": 2.78558087348938, |
| "learning_rate": 2.334706816711161e-05, |
| "loss": 0.6387, |
| "step": 499 |
| }, |
| { |
| "epoch": 17.24137931034483, |
| "grad_norm": 6.381213665008545, |
| "learning_rate": 2.3336209361695035e-05, |
| "loss": 0.6722, |
| "step": 500 |
| }, |
| { |
| "epoch": 17.275862068965516, |
| "grad_norm": 3.779954195022583, |
| "learning_rate": 2.3325317547305485e-05, |
| "loss": 0.6373, |
| "step": 501 |
| }, |
| { |
| "epoch": 17.310344827586206, |
| "grad_norm": 4.086780548095703, |
| "learning_rate": 2.3314392757121308e-05, |
| "loss": 0.6152, |
| "step": 502 |
| }, |
| { |
| "epoch": 17.344827586206897, |
| "grad_norm": 4.311843395233154, |
| "learning_rate": 2.3303435024421312e-05, |
| "loss": 0.6094, |
| "step": 503 |
| }, |
| { |
| "epoch": 17.379310344827587, |
| "grad_norm": 4.192368507385254, |
| "learning_rate": 2.3292444382584648e-05, |
| "loss": 0.6398, |
| "step": 504 |
| }, |
| { |
| "epoch": 17.413793103448278, |
| "grad_norm": 1.923520565032959, |
| "learning_rate": 2.328142086509072e-05, |
| "loss": 0.6108, |
| "step": 505 |
| }, |
| { |
| "epoch": 17.448275862068964, |
| "grad_norm": 3.6873779296875, |
| "learning_rate": 2.3270364505519073e-05, |
| "loss": 0.6146, |
| "step": 506 |
| }, |
| { |
| "epoch": 17.482758620689655, |
| "grad_norm": 2.5435547828674316, |
| "learning_rate": 2.32592753375493e-05, |
| "loss": 0.5429, |
| "step": 507 |
| }, |
| { |
| "epoch": 17.517241379310345, |
| "grad_norm": 4.662963390350342, |
| "learning_rate": 2.324815339496092e-05, |
| "loss": 0.5818, |
| "step": 508 |
| }, |
| { |
| "epoch": 17.551724137931036, |
| "grad_norm": 3.448092222213745, |
| "learning_rate": 2.3236998711633307e-05, |
| "loss": 0.5538, |
| "step": 509 |
| }, |
| { |
| "epoch": 17.586206896551722, |
| "grad_norm": 2.9241552352905273, |
| "learning_rate": 2.322581132154556e-05, |
| "loss": 0.5671, |
| "step": 510 |
| }, |
| { |
| "epoch": 17.620689655172413, |
| "grad_norm": 3.708261489868164, |
| "learning_rate": 2.3214591258776404e-05, |
| "loss": 0.5787, |
| "step": 511 |
| }, |
| { |
| "epoch": 17.655172413793103, |
| "grad_norm": 3.2863380908966064, |
| "learning_rate": 2.3203338557504105e-05, |
| "loss": 0.5985, |
| "step": 512 |
| }, |
| { |
| "epoch": 17.689655172413794, |
| "grad_norm": 2.276576042175293, |
| "learning_rate": 2.3192053252006335e-05, |
| "loss": 0.5104, |
| "step": 513 |
| }, |
| { |
| "epoch": 17.724137931034484, |
| "grad_norm": 4.1456780433654785, |
| "learning_rate": 2.3180735376660094e-05, |
| "loss": 0.5687, |
| "step": 514 |
| }, |
| { |
| "epoch": 17.75862068965517, |
| "grad_norm": 4.008676528930664, |
| "learning_rate": 2.3169384965941592e-05, |
| "loss": 0.6583, |
| "step": 515 |
| }, |
| { |
| "epoch": 17.79310344827586, |
| "grad_norm": 3.410106658935547, |
| "learning_rate": 2.3158002054426153e-05, |
| "loss": 0.6284, |
| "step": 516 |
| }, |
| { |
| "epoch": 17.82758620689655, |
| "grad_norm": 3.3623507022857666, |
| "learning_rate": 2.3146586676788095e-05, |
| "loss": 0.5826, |
| "step": 517 |
| }, |
| { |
| "epoch": 17.862068965517242, |
| "grad_norm": 2.8540868759155273, |
| "learning_rate": 2.3135138867800642e-05, |
| "loss": 0.6098, |
| "step": 518 |
| }, |
| { |
| "epoch": 17.896551724137932, |
| "grad_norm": 5.287265777587891, |
| "learning_rate": 2.3123658662335802e-05, |
| "loss": 0.6344, |
| "step": 519 |
| }, |
| { |
| "epoch": 17.93103448275862, |
| "grad_norm": 2.628873586654663, |
| "learning_rate": 2.311214609536427e-05, |
| "loss": 0.5716, |
| "step": 520 |
| }, |
| { |
| "epoch": 17.96551724137931, |
| "grad_norm": 3.3482089042663574, |
| "learning_rate": 2.3100601201955324e-05, |
| "loss": 0.6347, |
| "step": 521 |
| }, |
| { |
| "epoch": 18.0, |
| "grad_norm": 2.661550521850586, |
| "learning_rate": 2.308902401727672e-05, |
| "loss": 0.6066, |
| "step": 522 |
| }, |
| { |
| "epoch": 18.03448275862069, |
| "grad_norm": 3.0608065128326416, |
| "learning_rate": 2.3077414576594553e-05, |
| "loss": 0.639, |
| "step": 523 |
| }, |
| { |
| "epoch": 18.06896551724138, |
| "grad_norm": 5.210854530334473, |
| "learning_rate": 2.3065772915273203e-05, |
| "loss": 0.6772, |
| "step": 524 |
| }, |
| { |
| "epoch": 18.103448275862068, |
| "grad_norm": 3.0067954063415527, |
| "learning_rate": 2.305409906877519e-05, |
| "loss": 0.6215, |
| "step": 525 |
| }, |
| { |
| "epoch": 18.137931034482758, |
| "grad_norm": 3.596611261367798, |
| "learning_rate": 2.3042393072661074e-05, |
| "loss": 0.6174, |
| "step": 526 |
| }, |
| { |
| "epoch": 18.17241379310345, |
| "grad_norm": 2.643958806991577, |
| "learning_rate": 2.3030654962589346e-05, |
| "loss": 0.6218, |
| "step": 527 |
| }, |
| { |
| "epoch": 18.20689655172414, |
| "grad_norm": 2.740034818649292, |
| "learning_rate": 2.3018884774316328e-05, |
| "loss": 0.5306, |
| "step": 528 |
| }, |
| { |
| "epoch": 18.24137931034483, |
| "grad_norm": 2.9877166748046875, |
| "learning_rate": 2.3007082543696055e-05, |
| "loss": 0.573, |
| "step": 529 |
| }, |
| { |
| "epoch": 18.275862068965516, |
| "grad_norm": 2.206543445587158, |
| "learning_rate": 2.299524830668017e-05, |
| "loss": 0.5803, |
| "step": 530 |
| }, |
| { |
| "epoch": 18.310344827586206, |
| "grad_norm": 3.766697883605957, |
| "learning_rate": 2.2983382099317803e-05, |
| "loss": 0.7642, |
| "step": 531 |
| }, |
| { |
| "epoch": 18.344827586206897, |
| "grad_norm": 2.516557455062866, |
| "learning_rate": 2.2971483957755487e-05, |
| "loss": 0.588, |
| "step": 532 |
| }, |
| { |
| "epoch": 18.379310344827587, |
| "grad_norm": 5.181385517120361, |
| "learning_rate": 2.295955391823702e-05, |
| "loss": 0.7196, |
| "step": 533 |
| }, |
| { |
| "epoch": 18.413793103448278, |
| "grad_norm": 6.308480739593506, |
| "learning_rate": 2.2947592017103383e-05, |
| "loss": 0.7492, |
| "step": 534 |
| }, |
| { |
| "epoch": 18.448275862068964, |
| "grad_norm": 5.336180210113525, |
| "learning_rate": 2.2935598290792583e-05, |
| "loss": 0.6505, |
| "step": 535 |
| }, |
| { |
| "epoch": 18.482758620689655, |
| "grad_norm": 3.4817123413085938, |
| "learning_rate": 2.2923572775839603e-05, |
| "loss": 0.624, |
| "step": 536 |
| }, |
| { |
| "epoch": 18.517241379310345, |
| "grad_norm": 3.327723503112793, |
| "learning_rate": 2.2911515508876243e-05, |
| "loss": 0.5971, |
| "step": 537 |
| }, |
| { |
| "epoch": 18.551724137931036, |
| "grad_norm": 4.9538350105285645, |
| "learning_rate": 2.2899426526631033e-05, |
| "loss": 0.6098, |
| "step": 538 |
| }, |
| { |
| "epoch": 18.586206896551722, |
| "grad_norm": 4.590574264526367, |
| "learning_rate": 2.2887305865929104e-05, |
| "loss": 0.6799, |
| "step": 539 |
| }, |
| { |
| "epoch": 18.620689655172413, |
| "grad_norm": 3.821331262588501, |
| "learning_rate": 2.2875153563692094e-05, |
| "loss": 0.6897, |
| "step": 540 |
| }, |
| { |
| "epoch": 18.655172413793103, |
| "grad_norm": 3.8579297065734863, |
| "learning_rate": 2.286296965693802e-05, |
| "loss": 0.6482, |
| "step": 541 |
| }, |
| { |
| "epoch": 18.689655172413794, |
| "grad_norm": 6.096618175506592, |
| "learning_rate": 2.285075418278118e-05, |
| "loss": 0.6636, |
| "step": 542 |
| }, |
| { |
| "epoch": 18.724137931034484, |
| "grad_norm": 5.061996936798096, |
| "learning_rate": 2.283850717843202e-05, |
| "loss": 0.6584, |
| "step": 543 |
| }, |
| { |
| "epoch": 18.75862068965517, |
| "grad_norm": 3.448591709136963, |
| "learning_rate": 2.2826228681197047e-05, |
| "loss": 0.6224, |
| "step": 544 |
| }, |
| { |
| "epoch": 18.79310344827586, |
| "grad_norm": 5.197028160095215, |
| "learning_rate": 2.281391872847869e-05, |
| "loss": 0.5659, |
| "step": 545 |
| }, |
| { |
| "epoch": 18.82758620689655, |
| "grad_norm": 4.6534318923950195, |
| "learning_rate": 2.2801577357775193e-05, |
| "loss": 0.6029, |
| "step": 546 |
| }, |
| { |
| "epoch": 18.862068965517242, |
| "grad_norm": 4.554355144500732, |
| "learning_rate": 2.2789204606680524e-05, |
| "loss": 0.681, |
| "step": 547 |
| }, |
| { |
| "epoch": 18.896551724137932, |
| "grad_norm": 6.15927791595459, |
| "learning_rate": 2.2776800512884218e-05, |
| "loss": 0.7005, |
| "step": 548 |
| }, |
| { |
| "epoch": 18.93103448275862, |
| "grad_norm": 4.163325786590576, |
| "learning_rate": 2.2764365114171303e-05, |
| "loss": 0.6504, |
| "step": 549 |
| }, |
| { |
| "epoch": 18.96551724137931, |
| "grad_norm": 5.173964500427246, |
| "learning_rate": 2.2751898448422155e-05, |
| "loss": 0.6507, |
| "step": 550 |
| }, |
| { |
| "epoch": 19.0, |
| "grad_norm": 3.4791982173919678, |
| "learning_rate": 2.27394005536124e-05, |
| "loss": 0.6203, |
| "step": 551 |
| }, |
| { |
| "epoch": 19.03448275862069, |
| "grad_norm": 2.631436347961426, |
| "learning_rate": 2.2726871467812795e-05, |
| "loss": 0.6327, |
| "step": 552 |
| }, |
| { |
| "epoch": 19.06896551724138, |
| "grad_norm": 4.092041969299316, |
| "learning_rate": 2.2714311229189105e-05, |
| "loss": 0.649, |
| "step": 553 |
| }, |
| { |
| "epoch": 19.103448275862068, |
| "grad_norm": 3.6334736347198486, |
| "learning_rate": 2.2701719876002004e-05, |
| "loss": 0.6024, |
| "step": 554 |
| }, |
| { |
| "epoch": 19.137931034482758, |
| "grad_norm": 3.8364288806915283, |
| "learning_rate": 2.268909744660693e-05, |
| "loss": 0.5991, |
| "step": 555 |
| }, |
| { |
| "epoch": 19.17241379310345, |
| "grad_norm": 4.198896408081055, |
| "learning_rate": 2.267644397945399e-05, |
| "loss": 0.6068, |
| "step": 556 |
| }, |
| { |
| "epoch": 19.20689655172414, |
| "grad_norm": 3.1704394817352295, |
| "learning_rate": 2.2663759513087846e-05, |
| "loss": 0.6155, |
| "step": 557 |
| }, |
| { |
| "epoch": 19.24137931034483, |
| "grad_norm": 5.422239303588867, |
| "learning_rate": 2.2651044086147578e-05, |
| "loss": 0.6836, |
| "step": 558 |
| }, |
| { |
| "epoch": 19.275862068965516, |
| "grad_norm": 3.0339040756225586, |
| "learning_rate": 2.2638297737366583e-05, |
| "loss": 0.5523, |
| "step": 559 |
| }, |
| { |
| "epoch": 19.310344827586206, |
| "grad_norm": 2.4835903644561768, |
| "learning_rate": 2.2625520505572452e-05, |
| "loss": 0.6802, |
| "step": 560 |
| }, |
| { |
| "epoch": 19.344827586206897, |
| "grad_norm": 4.562428951263428, |
| "learning_rate": 2.2612712429686845e-05, |
| "loss": 0.7499, |
| "step": 561 |
| }, |
| { |
| "epoch": 19.379310344827587, |
| "grad_norm": 4.1088409423828125, |
| "learning_rate": 2.259987354872538e-05, |
| "loss": 0.6736, |
| "step": 562 |
| }, |
| { |
| "epoch": 19.413793103448278, |
| "grad_norm": 3.155371904373169, |
| "learning_rate": 2.2587003901797528e-05, |
| "loss": 0.6435, |
| "step": 563 |
| }, |
| { |
| "epoch": 19.448275862068964, |
| "grad_norm": 4.05208158493042, |
| "learning_rate": 2.2574103528106448e-05, |
| "loss": 0.6947, |
| "step": 564 |
| }, |
| { |
| "epoch": 19.482758620689655, |
| "grad_norm": 2.9079976081848145, |
| "learning_rate": 2.2561172466948925e-05, |
| "loss": 0.6085, |
| "step": 565 |
| }, |
| { |
| "epoch": 19.517241379310345, |
| "grad_norm": 2.7722418308258057, |
| "learning_rate": 2.2548210757715216e-05, |
| "loss": 0.6012, |
| "step": 566 |
| }, |
| { |
| "epoch": 19.551724137931036, |
| "grad_norm": 3.1863250732421875, |
| "learning_rate": 2.2535218439888933e-05, |
| "loss": 0.6131, |
| "step": 567 |
| }, |
| { |
| "epoch": 19.586206896551722, |
| "grad_norm": 4.646592140197754, |
| "learning_rate": 2.2522195553046926e-05, |
| "loss": 0.6314, |
| "step": 568 |
| }, |
| { |
| "epoch": 19.620689655172413, |
| "grad_norm": 5.8424577713012695, |
| "learning_rate": 2.2509142136859168e-05, |
| "loss": 0.6064, |
| "step": 569 |
| }, |
| { |
| "epoch": 19.655172413793103, |
| "grad_norm": 3.509737968444824, |
| "learning_rate": 2.2496058231088635e-05, |
| "loss": 0.5589, |
| "step": 570 |
| }, |
| { |
| "epoch": 19.689655172413794, |
| "grad_norm": 6.115649700164795, |
| "learning_rate": 2.248294387559116e-05, |
| "loss": 0.646, |
| "step": 571 |
| }, |
| { |
| "epoch": 19.724137931034484, |
| "grad_norm": 2.8004534244537354, |
| "learning_rate": 2.246979911031536e-05, |
| "loss": 0.5837, |
| "step": 572 |
| }, |
| { |
| "epoch": 19.75862068965517, |
| "grad_norm": 2.3154537677764893, |
| "learning_rate": 2.2456623975302454e-05, |
| "loss": 0.5972, |
| "step": 573 |
| }, |
| { |
| "epoch": 19.79310344827586, |
| "grad_norm": 2.849769115447998, |
| "learning_rate": 2.2443418510686198e-05, |
| "loss": 0.6159, |
| "step": 574 |
| }, |
| { |
| "epoch": 19.82758620689655, |
| "grad_norm": 2.4394686222076416, |
| "learning_rate": 2.2430182756692728e-05, |
| "loss": 0.5879, |
| "step": 575 |
| }, |
| { |
| "epoch": 19.862068965517242, |
| "grad_norm": 1.768813133239746, |
| "learning_rate": 2.2416916753640442e-05, |
| "loss": 0.5802, |
| "step": 576 |
| }, |
| { |
| "epoch": 19.896551724137932, |
| "grad_norm": 2.7482104301452637, |
| "learning_rate": 2.2403620541939884e-05, |
| "loss": 0.5606, |
| "step": 577 |
| }, |
| { |
| "epoch": 19.93103448275862, |
| "grad_norm": 1.9933055639266968, |
| "learning_rate": 2.2390294162093627e-05, |
| "loss": 0.5557, |
| "step": 578 |
| }, |
| { |
| "epoch": 19.96551724137931, |
| "grad_norm": 3.114577293395996, |
| "learning_rate": 2.2376937654696133e-05, |
| "loss": 0.5388, |
| "step": 579 |
| }, |
| { |
| "epoch": 20.0, |
| "grad_norm": 2.786837339401245, |
| "learning_rate": 2.2363551060433636e-05, |
| "loss": 0.5458, |
| "step": 580 |
| }, |
| { |
| "epoch": 20.03448275862069, |
| "grad_norm": 3.6182146072387695, |
| "learning_rate": 2.2350134420084023e-05, |
| "loss": 0.6075, |
| "step": 581 |
| }, |
| { |
| "epoch": 20.06896551724138, |
| "grad_norm": 4.18286657333374, |
| "learning_rate": 2.233668777451672e-05, |
| "loss": 0.6064, |
| "step": 582 |
| }, |
| { |
| "epoch": 20.103448275862068, |
| "grad_norm": 2.0372018814086914, |
| "learning_rate": 2.2323211164692526e-05, |
| "loss": 0.575, |
| "step": 583 |
| }, |
| { |
| "epoch": 20.137931034482758, |
| "grad_norm": 2.5157814025878906, |
| "learning_rate": 2.2309704631663536e-05, |
| "loss": 0.5397, |
| "step": 584 |
| }, |
| { |
| "epoch": 20.17241379310345, |
| "grad_norm": 2.8599259853363037, |
| "learning_rate": 2.2296168216573e-05, |
| "loss": 0.6183, |
| "step": 585 |
| }, |
| { |
| "epoch": 20.20689655172414, |
| "grad_norm": 2.6183788776397705, |
| "learning_rate": 2.2282601960655175e-05, |
| "loss": 0.5912, |
| "step": 586 |
| }, |
| { |
| "epoch": 20.24137931034483, |
| "grad_norm": 2.3617968559265137, |
| "learning_rate": 2.2269005905235234e-05, |
| "loss": 0.6095, |
| "step": 587 |
| }, |
| { |
| "epoch": 20.275862068965516, |
| "grad_norm": 2.041109323501587, |
| "learning_rate": 2.2255380091729124e-05, |
| "loss": 0.5709, |
| "step": 588 |
| }, |
| { |
| "epoch": 20.310344827586206, |
| "grad_norm": 2.473818778991699, |
| "learning_rate": 2.224172456164343e-05, |
| "loss": 0.622, |
| "step": 589 |
| }, |
| { |
| "epoch": 20.344827586206897, |
| "grad_norm": 4.0362324714660645, |
| "learning_rate": 2.2228039356575265e-05, |
| "loss": 0.6281, |
| "step": 590 |
| }, |
| { |
| "epoch": 20.379310344827587, |
| "grad_norm": 4.358520984649658, |
| "learning_rate": 2.221432451821214e-05, |
| "loss": 0.6563, |
| "step": 591 |
| }, |
| { |
| "epoch": 20.413793103448278, |
| "grad_norm": 2.3213906288146973, |
| "learning_rate": 2.2200580088331825e-05, |
| "loss": 0.577, |
| "step": 592 |
| }, |
| { |
| "epoch": 20.448275862068964, |
| "grad_norm": 4.955238342285156, |
| "learning_rate": 2.2186806108802248e-05, |
| "loss": 0.6418, |
| "step": 593 |
| }, |
| { |
| "epoch": 20.482758620689655, |
| "grad_norm": 5.53913688659668, |
| "learning_rate": 2.217300262158133e-05, |
| "loss": 0.5817, |
| "step": 594 |
| }, |
| { |
| "epoch": 20.517241379310345, |
| "grad_norm": 3.345353603363037, |
| "learning_rate": 2.215916966871689e-05, |
| "loss": 0.5873, |
| "step": 595 |
| }, |
| { |
| "epoch": 20.551724137931036, |
| "grad_norm": 3.257923126220703, |
| "learning_rate": 2.2145307292346502e-05, |
| "loss": 0.5968, |
| "step": 596 |
| }, |
| { |
| "epoch": 20.586206896551722, |
| "grad_norm": 3.00583553314209, |
| "learning_rate": 2.213141553469737e-05, |
| "loss": 0.5637, |
| "step": 597 |
| }, |
| { |
| "epoch": 20.620689655172413, |
| "grad_norm": 3.975980043411255, |
| "learning_rate": 2.211749443808619e-05, |
| "loss": 0.6064, |
| "step": 598 |
| }, |
| { |
| "epoch": 20.655172413793103, |
| "grad_norm": 4.438958644866943, |
| "learning_rate": 2.2103544044919045e-05, |
| "loss": 0.6115, |
| "step": 599 |
| }, |
| { |
| "epoch": 20.689655172413794, |
| "grad_norm": 5.422476768493652, |
| "learning_rate": 2.208956439769125e-05, |
| "loss": 0.6534, |
| "step": 600 |
| }, |
| { |
| "epoch": 20.724137931034484, |
| "grad_norm": 2.1388237476348877, |
| "learning_rate": 2.2075555538987227e-05, |
| "loss": 0.5648, |
| "step": 601 |
| }, |
| { |
| "epoch": 20.75862068965517, |
| "grad_norm": 3.457695245742798, |
| "learning_rate": 2.20615175114804e-05, |
| "loss": 0.53, |
| "step": 602 |
| }, |
| { |
| "epoch": 20.79310344827586, |
| "grad_norm": 4.380722999572754, |
| "learning_rate": 2.2047450357933032e-05, |
| "loss": 0.6238, |
| "step": 603 |
| }, |
| { |
| "epoch": 20.82758620689655, |
| "grad_norm": 3.4675967693328857, |
| "learning_rate": 2.2033354121196102e-05, |
| "loss": 0.594, |
| "step": 604 |
| }, |
| { |
| "epoch": 20.862068965517242, |
| "grad_norm": 6.335079669952393, |
| "learning_rate": 2.201922884420921e-05, |
| "loss": 0.5905, |
| "step": 605 |
| }, |
| { |
| "epoch": 20.896551724137932, |
| "grad_norm": 4.242478370666504, |
| "learning_rate": 2.200507457000039e-05, |
| "loss": 0.5376, |
| "step": 606 |
| }, |
| { |
| "epoch": 20.93103448275862, |
| "grad_norm": 2.4837803840637207, |
| "learning_rate": 2.1990891341686008e-05, |
| "loss": 0.5069, |
| "step": 607 |
| }, |
| { |
| "epoch": 20.96551724137931, |
| "grad_norm": 2.1897082328796387, |
| "learning_rate": 2.1976679202470654e-05, |
| "loss": 0.5472, |
| "step": 608 |
| }, |
| { |
| "epoch": 21.0, |
| "grad_norm": 2.4400973320007324, |
| "learning_rate": 2.1962438195646958e-05, |
| "loss": 0.547, |
| "step": 609 |
| }, |
| { |
| "epoch": 21.03448275862069, |
| "grad_norm": 4.650160789489746, |
| "learning_rate": 2.1948168364595497e-05, |
| "loss": 0.6903, |
| "step": 610 |
| }, |
| { |
| "epoch": 21.06896551724138, |
| "grad_norm": 3.0969042778015137, |
| "learning_rate": 2.1933869752784654e-05, |
| "loss": 0.5143, |
| "step": 611 |
| }, |
| { |
| "epoch": 21.103448275862068, |
| "grad_norm": 4.73034143447876, |
| "learning_rate": 2.1919542403770476e-05, |
| "loss": 0.6413, |
| "step": 612 |
| }, |
| { |
| "epoch": 21.137931034482758, |
| "grad_norm": 2.4023869037628174, |
| "learning_rate": 2.1905186361196556e-05, |
| "loss": 0.5607, |
| "step": 613 |
| }, |
| { |
| "epoch": 21.17241379310345, |
| "grad_norm": 3.85475754737854, |
| "learning_rate": 2.189080166879389e-05, |
| "loss": 0.6169, |
| "step": 614 |
| }, |
| { |
| "epoch": 21.20689655172414, |
| "grad_norm": 2.8691437244415283, |
| "learning_rate": 2.1876388370380745e-05, |
| "loss": 0.4884, |
| "step": 615 |
| }, |
| { |
| "epoch": 21.24137931034483, |
| "grad_norm": 2.3422751426696777, |
| "learning_rate": 2.186194650986253e-05, |
| "loss": 0.5269, |
| "step": 616 |
| }, |
| { |
| "epoch": 21.275862068965516, |
| "grad_norm": 3.4676578044891357, |
| "learning_rate": 2.184747613123165e-05, |
| "loss": 0.5517, |
| "step": 617 |
| }, |
| { |
| "epoch": 21.310344827586206, |
| "grad_norm": 3.613835573196411, |
| "learning_rate": 2.1832977278567394e-05, |
| "loss": 0.5919, |
| "step": 618 |
| }, |
| { |
| "epoch": 21.344827586206897, |
| "grad_norm": 3.2445290088653564, |
| "learning_rate": 2.181844999603578e-05, |
| "loss": 0.5624, |
| "step": 619 |
| }, |
| { |
| "epoch": 21.379310344827587, |
| "grad_norm": 3.5118792057037354, |
| "learning_rate": 2.1803894327889425e-05, |
| "loss": 0.587, |
| "step": 620 |
| }, |
| { |
| "epoch": 21.413793103448278, |
| "grad_norm": 5.919454574584961, |
| "learning_rate": 2.178931031846743e-05, |
| "loss": 0.6432, |
| "step": 621 |
| }, |
| { |
| "epoch": 21.448275862068964, |
| "grad_norm": 3.322352409362793, |
| "learning_rate": 2.1774698012195206e-05, |
| "loss": 0.6003, |
| "step": 622 |
| }, |
| { |
| "epoch": 21.482758620689655, |
| "grad_norm": 6.281351566314697, |
| "learning_rate": 2.1760057453584376e-05, |
| "loss": 0.5805, |
| "step": 623 |
| }, |
| { |
| "epoch": 21.517241379310345, |
| "grad_norm": 5.132048606872559, |
| "learning_rate": 2.1745388687232624e-05, |
| "loss": 0.5572, |
| "step": 624 |
| }, |
| { |
| "epoch": 21.551724137931036, |
| "grad_norm": 2.332777976989746, |
| "learning_rate": 2.1730691757823553e-05, |
| "loss": 0.5633, |
| "step": 625 |
| }, |
| { |
| "epoch": 21.586206896551722, |
| "grad_norm": 3.328850030899048, |
| "learning_rate": 2.171596671012655e-05, |
| "loss": 0.631, |
| "step": 626 |
| }, |
| { |
| "epoch": 21.620689655172413, |
| "grad_norm": 2.524472713470459, |
| "learning_rate": 2.1701213588996683e-05, |
| "loss": 0.5458, |
| "step": 627 |
| }, |
| { |
| "epoch": 21.655172413793103, |
| "grad_norm": 2.7530570030212402, |
| "learning_rate": 2.16864324393745e-05, |
| "loss": 0.5397, |
| "step": 628 |
| }, |
| { |
| "epoch": 21.689655172413794, |
| "grad_norm": 4.604249954223633, |
| "learning_rate": 2.1671623306285956e-05, |
| "loss": 0.5842, |
| "step": 629 |
| }, |
| { |
| "epoch": 21.724137931034484, |
| "grad_norm": 2.306530714035034, |
| "learning_rate": 2.1656786234842237e-05, |
| "loss": 0.5489, |
| "step": 630 |
| }, |
| { |
| "epoch": 21.75862068965517, |
| "grad_norm": 3.085984230041504, |
| "learning_rate": 2.1641921270239632e-05, |
| "loss": 0.715, |
| "step": 631 |
| }, |
| { |
| "epoch": 21.79310344827586, |
| "grad_norm": 3.6664793491363525, |
| "learning_rate": 2.1627028457759408e-05, |
| "loss": 0.5976, |
| "step": 632 |
| }, |
| { |
| "epoch": 21.82758620689655, |
| "grad_norm": 3.8308627605438232, |
| "learning_rate": 2.1612107842767647e-05, |
| "loss": 0.5413, |
| "step": 633 |
| }, |
| { |
| "epoch": 21.862068965517242, |
| "grad_norm": 3.766814947128296, |
| "learning_rate": 2.1597159470715133e-05, |
| "loss": 0.5849, |
| "step": 634 |
| }, |
| { |
| "epoch": 21.896551724137932, |
| "grad_norm": 4.085217475891113, |
| "learning_rate": 2.15821833871372e-05, |
| "loss": 0.6535, |
| "step": 635 |
| }, |
| { |
| "epoch": 21.93103448275862, |
| "grad_norm": 3.2848877906799316, |
| "learning_rate": 2.1567179637653594e-05, |
| "loss": 0.6572, |
| "step": 636 |
| }, |
| { |
| "epoch": 21.96551724137931, |
| "grad_norm": 4.021419048309326, |
| "learning_rate": 2.1552148267968347e-05, |
| "loss": 0.5827, |
| "step": 637 |
| }, |
| { |
| "epoch": 22.0, |
| "grad_norm": 3.385075569152832, |
| "learning_rate": 2.1537089323869604e-05, |
| "loss": 0.575, |
| "step": 638 |
| }, |
| { |
| "epoch": 22.03448275862069, |
| "grad_norm": 3.6210548877716064, |
| "learning_rate": 2.152200285122953e-05, |
| "loss": 0.5036, |
| "step": 639 |
| }, |
| { |
| "epoch": 22.06896551724138, |
| "grad_norm": 4.066530704498291, |
| "learning_rate": 2.1506888896004133e-05, |
| "loss": 0.5167, |
| "step": 640 |
| }, |
| { |
| "epoch": 22.103448275862068, |
| "grad_norm": 2.326347589492798, |
| "learning_rate": 2.1491747504233138e-05, |
| "loss": 0.5364, |
| "step": 641 |
| }, |
| { |
| "epoch": 22.137931034482758, |
| "grad_norm": 3.226869583129883, |
| "learning_rate": 2.147657872203986e-05, |
| "loss": 0.5291, |
| "step": 642 |
| }, |
| { |
| "epoch": 22.17241379310345, |
| "grad_norm": 2.8205795288085938, |
| "learning_rate": 2.1461382595631036e-05, |
| "loss": 0.5034, |
| "step": 643 |
| }, |
| { |
| "epoch": 22.20689655172414, |
| "grad_norm": 5.307386875152588, |
| "learning_rate": 2.14461591712967e-05, |
| "loss": 0.6833, |
| "step": 644 |
| }, |
| { |
| "epoch": 22.24137931034483, |
| "grad_norm": 3.1979541778564453, |
| "learning_rate": 2.1430908495410042e-05, |
| "loss": 0.5307, |
| "step": 645 |
| }, |
| { |
| "epoch": 22.275862068965516, |
| "grad_norm": 3.491663694381714, |
| "learning_rate": 2.1415630614427272e-05, |
| "loss": 0.5779, |
| "step": 646 |
| }, |
| { |
| "epoch": 22.310344827586206, |
| "grad_norm": 2.6055986881256104, |
| "learning_rate": 2.140032557488746e-05, |
| "loss": 0.5429, |
| "step": 647 |
| }, |
| { |
| "epoch": 22.344827586206897, |
| "grad_norm": 1.5949605703353882, |
| "learning_rate": 2.1384993423412407e-05, |
| "loss": 0.5109, |
| "step": 648 |
| }, |
| { |
| "epoch": 22.379310344827587, |
| "grad_norm": 3.324326515197754, |
| "learning_rate": 2.136963420670651e-05, |
| "loss": 0.5277, |
| "step": 649 |
| }, |
| { |
| "epoch": 22.413793103448278, |
| "grad_norm": 3.71690034866333, |
| "learning_rate": 2.135424797155661e-05, |
| "loss": 0.5901, |
| "step": 650 |
| }, |
| { |
| "epoch": 22.448275862068964, |
| "grad_norm": 3.789421319961548, |
| "learning_rate": 2.1338834764831845e-05, |
| "loss": 0.5424, |
| "step": 651 |
| }, |
| { |
| "epoch": 22.482758620689655, |
| "grad_norm": 3.0680930614471436, |
| "learning_rate": 2.1323394633483514e-05, |
| "loss": 0.6194, |
| "step": 652 |
| }, |
| { |
| "epoch": 22.517241379310345, |
| "grad_norm": 2.454967975616455, |
| "learning_rate": 2.1307927624544934e-05, |
| "loss": 0.5856, |
| "step": 653 |
| }, |
| { |
| "epoch": 22.551724137931036, |
| "grad_norm": 4.197128772735596, |
| "learning_rate": 2.1292433785131298e-05, |
| "loss": 0.6164, |
| "step": 654 |
| }, |
| { |
| "epoch": 22.586206896551722, |
| "grad_norm": 4.163417339324951, |
| "learning_rate": 2.1276913162439532e-05, |
| "loss": 0.5499, |
| "step": 655 |
| }, |
| { |
| "epoch": 22.620689655172413, |
| "grad_norm": 3.207831621170044, |
| "learning_rate": 2.1261365803748138e-05, |
| "loss": 0.5379, |
| "step": 656 |
| }, |
| { |
| "epoch": 22.655172413793103, |
| "grad_norm": 2.1474342346191406, |
| "learning_rate": 2.124579175641707e-05, |
| "loss": 0.5198, |
| "step": 657 |
| }, |
| { |
| "epoch": 22.689655172413794, |
| "grad_norm": 2.0802290439605713, |
| "learning_rate": 2.1230191067887574e-05, |
| "loss": 0.5988, |
| "step": 658 |
| }, |
| { |
| "epoch": 22.724137931034484, |
| "grad_norm": 2.383214235305786, |
| "learning_rate": 2.121456378568206e-05, |
| "loss": 0.5644, |
| "step": 659 |
| }, |
| { |
| "epoch": 22.75862068965517, |
| "grad_norm": 3.3427810668945312, |
| "learning_rate": 2.1198909957403928e-05, |
| "loss": 0.5382, |
| "step": 660 |
| }, |
| { |
| "epoch": 22.79310344827586, |
| "grad_norm": 2.118605136871338, |
| "learning_rate": 2.1183229630737467e-05, |
| "loss": 0.5364, |
| "step": 661 |
| }, |
| { |
| "epoch": 22.82758620689655, |
| "grad_norm": 2.1082427501678467, |
| "learning_rate": 2.1167522853447664e-05, |
| "loss": 0.5631, |
| "step": 662 |
| }, |
| { |
| "epoch": 22.862068965517242, |
| "grad_norm": 2.0310721397399902, |
| "learning_rate": 2.1151789673380086e-05, |
| "loss": 0.583, |
| "step": 663 |
| }, |
| { |
| "epoch": 22.896551724137932, |
| "grad_norm": 3.352940082550049, |
| "learning_rate": 2.113603013846073e-05, |
| "loss": 0.5694, |
| "step": 664 |
| }, |
| { |
| "epoch": 22.93103448275862, |
| "grad_norm": 2.5591657161712646, |
| "learning_rate": 2.1120244296695874e-05, |
| "loss": 0.5605, |
| "step": 665 |
| }, |
| { |
| "epoch": 22.96551724137931, |
| "grad_norm": 3.0828518867492676, |
| "learning_rate": 2.1104432196171924e-05, |
| "loss": 0.5105, |
| "step": 666 |
| }, |
| { |
| "epoch": 23.0, |
| "grad_norm": 4.050001621246338, |
| "learning_rate": 2.1088593885055288e-05, |
| "loss": 0.5483, |
| "step": 667 |
| }, |
| { |
| "epoch": 23.03448275862069, |
| "grad_norm": 3.4078168869018555, |
| "learning_rate": 2.1072729411592206e-05, |
| "loss": 0.5672, |
| "step": 668 |
| }, |
| { |
| "epoch": 23.06896551724138, |
| "grad_norm": 4.16298246383667, |
| "learning_rate": 2.105683882410861e-05, |
| "loss": 0.6305, |
| "step": 669 |
| }, |
| { |
| "epoch": 23.103448275862068, |
| "grad_norm": 4.402109146118164, |
| "learning_rate": 2.1040922171009993e-05, |
| "loss": 0.6278, |
| "step": 670 |
| }, |
| { |
| "epoch": 23.137931034482758, |
| "grad_norm": 2.614933729171753, |
| "learning_rate": 2.1024979500781232e-05, |
| "loss": 0.5573, |
| "step": 671 |
| }, |
| { |
| "epoch": 23.17241379310345, |
| "grad_norm": 5.566406726837158, |
| "learning_rate": 2.1009010861986476e-05, |
| "loss": 0.5987, |
| "step": 672 |
| }, |
| { |
| "epoch": 23.20689655172414, |
| "grad_norm": 3.687830686569214, |
| "learning_rate": 2.099301630326896e-05, |
| "loss": 0.5332, |
| "step": 673 |
| }, |
| { |
| "epoch": 23.24137931034483, |
| "grad_norm": 1.985662579536438, |
| "learning_rate": 2.0976995873350887e-05, |
| "loss": 0.5032, |
| "step": 674 |
| }, |
| { |
| "epoch": 23.275862068965516, |
| "grad_norm": 3.1905019283294678, |
| "learning_rate": 2.096094962103326e-05, |
| "loss": 0.5556, |
| "step": 675 |
| }, |
| { |
| "epoch": 23.310344827586206, |
| "grad_norm": 2.235889434814453, |
| "learning_rate": 2.0944877595195755e-05, |
| "loss": 0.5636, |
| "step": 676 |
| }, |
| { |
| "epoch": 23.344827586206897, |
| "grad_norm": 4.098913669586182, |
| "learning_rate": 2.092877984479654e-05, |
| "loss": 0.5974, |
| "step": 677 |
| }, |
| { |
| "epoch": 23.379310344827587, |
| "grad_norm": 5.365559101104736, |
| "learning_rate": 2.091265641887217e-05, |
| "loss": 0.5036, |
| "step": 678 |
| }, |
| { |
| "epoch": 23.413793103448278, |
| "grad_norm": 3.486875534057617, |
| "learning_rate": 2.089650736653738e-05, |
| "loss": 0.5835, |
| "step": 679 |
| }, |
| { |
| "epoch": 23.448275862068964, |
| "grad_norm": 6.258121967315674, |
| "learning_rate": 2.088033273698499e-05, |
| "loss": 0.6289, |
| "step": 680 |
| }, |
| { |
| "epoch": 23.482758620689655, |
| "grad_norm": 3.1548774242401123, |
| "learning_rate": 2.086413257948573e-05, |
| "loss": 0.4888, |
| "step": 681 |
| }, |
| { |
| "epoch": 23.517241379310345, |
| "grad_norm": 2.209894895553589, |
| "learning_rate": 2.0847906943388085e-05, |
| "loss": 0.4746, |
| "step": 682 |
| }, |
| { |
| "epoch": 23.551724137931036, |
| "grad_norm": 2.9352316856384277, |
| "learning_rate": 2.0831655878118155e-05, |
| "loss": 0.525, |
| "step": 683 |
| }, |
| { |
| "epoch": 23.586206896551722, |
| "grad_norm": 2.3592867851257324, |
| "learning_rate": 2.081537943317951e-05, |
| "loss": 0.4875, |
| "step": 684 |
| }, |
| { |
| "epoch": 23.620689655172413, |
| "grad_norm": 3.657504081726074, |
| "learning_rate": 2.0799077658153022e-05, |
| "loss": 0.554, |
| "step": 685 |
| }, |
| { |
| "epoch": 23.655172413793103, |
| "grad_norm": 6.461456298828125, |
| "learning_rate": 2.0782750602696722e-05, |
| "loss": 0.5205, |
| "step": 686 |
| }, |
| { |
| "epoch": 23.689655172413794, |
| "grad_norm": 4.881630897521973, |
| "learning_rate": 2.0766398316545648e-05, |
| "loss": 0.5587, |
| "step": 687 |
| }, |
| { |
| "epoch": 23.724137931034484, |
| "grad_norm": 2.7277519702911377, |
| "learning_rate": 2.0750020849511712e-05, |
| "loss": 0.546, |
| "step": 688 |
| }, |
| { |
| "epoch": 23.75862068965517, |
| "grad_norm": 4.6229071617126465, |
| "learning_rate": 2.0733618251483506e-05, |
| "loss": 0.5404, |
| "step": 689 |
| }, |
| { |
| "epoch": 23.79310344827586, |
| "grad_norm": 5.046594619750977, |
| "learning_rate": 2.07171905724262e-05, |
| "loss": 0.59, |
| "step": 690 |
| }, |
| { |
| "epoch": 23.82758620689655, |
| "grad_norm": 4.2928853034973145, |
| "learning_rate": 2.070073786238134e-05, |
| "loss": 0.5288, |
| "step": 691 |
| }, |
| { |
| "epoch": 23.862068965517242, |
| "grad_norm": 6.604857444763184, |
| "learning_rate": 2.0684260171466745e-05, |
| "loss": 0.6729, |
| "step": 692 |
| }, |
| { |
| "epoch": 23.896551724137932, |
| "grad_norm": 7.314076900482178, |
| "learning_rate": 2.066775754987632e-05, |
| "loss": 0.6542, |
| "step": 693 |
| }, |
| { |
| "epoch": 23.93103448275862, |
| "grad_norm": 3.7828738689422607, |
| "learning_rate": 2.0651230047879905e-05, |
| "loss": 0.6574, |
| "step": 694 |
| }, |
| { |
| "epoch": 23.96551724137931, |
| "grad_norm": 3.6089084148406982, |
| "learning_rate": 2.0634677715823137e-05, |
| "loss": 0.6718, |
| "step": 695 |
| }, |
| { |
| "epoch": 24.0, |
| "grad_norm": 3.206212043762207, |
| "learning_rate": 2.0618100604127295e-05, |
| "loss": 0.6149, |
| "step": 696 |
| }, |
| { |
| "epoch": 24.03448275862069, |
| "grad_norm": 2.9614017009735107, |
| "learning_rate": 2.0601498763289138e-05, |
| "loss": 0.6372, |
| "step": 697 |
| }, |
| { |
| "epoch": 24.06896551724138, |
| "grad_norm": 3.116159439086914, |
| "learning_rate": 2.058487224388075e-05, |
| "loss": 0.6268, |
| "step": 698 |
| }, |
| { |
| "epoch": 24.103448275862068, |
| "grad_norm": 3.259319543838501, |
| "learning_rate": 2.0568221096549384e-05, |
| "loss": 0.5631, |
| "step": 699 |
| }, |
| { |
| "epoch": 24.137931034482758, |
| "grad_norm": 2.3190183639526367, |
| "learning_rate": 2.0551545372017332e-05, |
| "loss": 0.5782, |
| "step": 700 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 2000, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 69, |
| "save_steps": 200, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": false, |
| "should_training_stop": false |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 0.0, |
| "train_batch_size": 64, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|