| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.0, | |
| "eval_steps": 500, | |
| "global_step": 1052, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0019011406844106464, | |
| "grad_norm": 3.543563120944334, | |
| "learning_rate": 1.886792452830189e-07, | |
| "loss": 0.7171, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0038022813688212928, | |
| "grad_norm": 3.134528805708614, | |
| "learning_rate": 3.773584905660378e-07, | |
| "loss": 0.7077, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.005703422053231939, | |
| "grad_norm": 3.3108474708387035, | |
| "learning_rate": 5.660377358490567e-07, | |
| "loss": 0.7009, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.0076045627376425855, | |
| "grad_norm": 2.970033559560159, | |
| "learning_rate": 7.547169811320755e-07, | |
| "loss": 0.6726, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.009505703422053232, | |
| "grad_norm": 3.294594738976327, | |
| "learning_rate": 9.433962264150944e-07, | |
| "loss": 0.7326, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.011406844106463879, | |
| "grad_norm": 2.8820850963946385, | |
| "learning_rate": 1.1320754716981133e-06, | |
| "loss": 0.6952, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.013307984790874524, | |
| "grad_norm": 3.11216614322685, | |
| "learning_rate": 1.3207547169811322e-06, | |
| "loss": 0.6842, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.015209125475285171, | |
| "grad_norm": 2.8413364359091298, | |
| "learning_rate": 1.509433962264151e-06, | |
| "loss": 0.701, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.017110266159695818, | |
| "grad_norm": 2.500072275053778, | |
| "learning_rate": 1.6981132075471698e-06, | |
| "loss": 0.6894, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.019011406844106463, | |
| "grad_norm": 2.2440297191575618, | |
| "learning_rate": 1.8867924528301889e-06, | |
| "loss": 0.6889, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.02091254752851711, | |
| "grad_norm": 1.5628470346670023, | |
| "learning_rate": 2.075471698113208e-06, | |
| "loss": 0.6417, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.022813688212927757, | |
| "grad_norm": 1.650126576984669, | |
| "learning_rate": 2.2641509433962266e-06, | |
| "loss": 0.6228, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.024714828897338403, | |
| "grad_norm": 1.5261236931507487, | |
| "learning_rate": 2.4528301886792453e-06, | |
| "loss": 0.5947, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.026615969581749048, | |
| "grad_norm": 1.4501322657772775, | |
| "learning_rate": 2.6415094339622644e-06, | |
| "loss": 0.5985, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.028517110266159697, | |
| "grad_norm": 1.9236590118956196, | |
| "learning_rate": 2.830188679245283e-06, | |
| "loss": 0.6004, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.030418250950570342, | |
| "grad_norm": 2.377022025079658, | |
| "learning_rate": 3.018867924528302e-06, | |
| "loss": 0.5962, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.03231939163498099, | |
| "grad_norm": 2.4182325229756896, | |
| "learning_rate": 3.207547169811321e-06, | |
| "loss": 0.5983, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.034220532319391636, | |
| "grad_norm": 1.7564315589146346, | |
| "learning_rate": 3.3962264150943395e-06, | |
| "loss": 0.5933, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.03612167300380228, | |
| "grad_norm": 1.5034120850992967, | |
| "learning_rate": 3.5849056603773586e-06, | |
| "loss": 0.5595, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.03802281368821293, | |
| "grad_norm": 1.1443663493325327, | |
| "learning_rate": 3.7735849056603777e-06, | |
| "loss": 0.5655, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.039923954372623575, | |
| "grad_norm": 1.306368639785732, | |
| "learning_rate": 3.962264150943396e-06, | |
| "loss": 0.5582, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.04182509505703422, | |
| "grad_norm": 1.3612829543860225, | |
| "learning_rate": 4.150943396226416e-06, | |
| "loss": 0.596, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.043726235741444866, | |
| "grad_norm": 1.3696510769631531, | |
| "learning_rate": 4.339622641509435e-06, | |
| "loss": 0.5652, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.045627376425855515, | |
| "grad_norm": 1.1182366862548874, | |
| "learning_rate": 4.528301886792453e-06, | |
| "loss": 0.5654, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.04752851711026616, | |
| "grad_norm": 1.0531881495744095, | |
| "learning_rate": 4.716981132075472e-06, | |
| "loss": 0.5514, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.049429657794676805, | |
| "grad_norm": 1.078120653698867, | |
| "learning_rate": 4.905660377358491e-06, | |
| "loss": 0.5684, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.051330798479087454, | |
| "grad_norm": 1.092617539125252, | |
| "learning_rate": 5.09433962264151e-06, | |
| "loss": 0.5321, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.053231939163498096, | |
| "grad_norm": 1.0008404757563176, | |
| "learning_rate": 5.283018867924529e-06, | |
| "loss": 0.5336, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.055133079847908745, | |
| "grad_norm": 1.065077496303526, | |
| "learning_rate": 5.4716981132075475e-06, | |
| "loss": 0.5252, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.057034220532319393, | |
| "grad_norm": 0.9683072765449663, | |
| "learning_rate": 5.660377358490566e-06, | |
| "loss": 0.523, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.058935361216730035, | |
| "grad_norm": 0.9737151907831615, | |
| "learning_rate": 5.849056603773585e-06, | |
| "loss": 0.5174, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.060836501901140684, | |
| "grad_norm": 0.8595625736166826, | |
| "learning_rate": 6.037735849056604e-06, | |
| "loss": 0.4918, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.06273764258555133, | |
| "grad_norm": 0.9531666162090434, | |
| "learning_rate": 6.226415094339623e-06, | |
| "loss": 0.5305, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.06463878326996197, | |
| "grad_norm": 0.9599390305140895, | |
| "learning_rate": 6.415094339622642e-06, | |
| "loss": 0.5229, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.06653992395437262, | |
| "grad_norm": 0.9678893644633788, | |
| "learning_rate": 6.60377358490566e-06, | |
| "loss": 0.5146, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.06844106463878327, | |
| "grad_norm": 0.8794433545132875, | |
| "learning_rate": 6.792452830188679e-06, | |
| "loss": 0.5193, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.07034220532319392, | |
| "grad_norm": 0.8246687040064605, | |
| "learning_rate": 6.981132075471699e-06, | |
| "loss": 0.4622, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.07224334600760456, | |
| "grad_norm": 0.9916926582647634, | |
| "learning_rate": 7.169811320754717e-06, | |
| "loss": 0.5432, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.0741444866920152, | |
| "grad_norm": 0.9183891509494178, | |
| "learning_rate": 7.358490566037736e-06, | |
| "loss": 0.541, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.07604562737642585, | |
| "grad_norm": 0.9248266966777409, | |
| "learning_rate": 7.5471698113207555e-06, | |
| "loss": 0.4915, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.0779467680608365, | |
| "grad_norm": 0.8615958555117602, | |
| "learning_rate": 7.735849056603775e-06, | |
| "loss": 0.4952, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.07984790874524715, | |
| "grad_norm": 0.783476643264497, | |
| "learning_rate": 7.924528301886793e-06, | |
| "loss": 0.4852, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.0817490494296578, | |
| "grad_norm": 1.0208336285932929, | |
| "learning_rate": 8.113207547169812e-06, | |
| "loss": 0.4829, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.08365019011406843, | |
| "grad_norm": 0.8971167261659345, | |
| "learning_rate": 8.301886792452832e-06, | |
| "loss": 0.4609, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.08555133079847908, | |
| "grad_norm": 0.8431239513709753, | |
| "learning_rate": 8.49056603773585e-06, | |
| "loss": 0.5068, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.08745247148288973, | |
| "grad_norm": 0.890838996293819, | |
| "learning_rate": 8.67924528301887e-06, | |
| "loss": 0.4845, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.08935361216730038, | |
| "grad_norm": 0.9263034110977246, | |
| "learning_rate": 8.867924528301887e-06, | |
| "loss": 0.5025, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.09125475285171103, | |
| "grad_norm": 0.9938229828364126, | |
| "learning_rate": 9.056603773584907e-06, | |
| "loss": 0.4808, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.09315589353612168, | |
| "grad_norm": 0.935880419411291, | |
| "learning_rate": 9.245283018867926e-06, | |
| "loss": 0.4871, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.09505703422053231, | |
| "grad_norm": 0.790988585485088, | |
| "learning_rate": 9.433962264150944e-06, | |
| "loss": 0.4923, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.09695817490494296, | |
| "grad_norm": 0.8056922969337292, | |
| "learning_rate": 9.622641509433963e-06, | |
| "loss": 0.4746, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.09885931558935361, | |
| "grad_norm": 0.8910788436372384, | |
| "learning_rate": 9.811320754716981e-06, | |
| "loss": 0.4965, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.10076045627376426, | |
| "grad_norm": 0.8093275547565477, | |
| "learning_rate": 1e-05, | |
| "loss": 0.4735, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.10266159695817491, | |
| "grad_norm": 0.8305126433449763, | |
| "learning_rate": 1.018867924528302e-05, | |
| "loss": 0.4832, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.10456273764258556, | |
| "grad_norm": 0.9018586424203268, | |
| "learning_rate": 1.0377358490566038e-05, | |
| "loss": 0.4861, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.10646387832699619, | |
| "grad_norm": 0.8030227881166272, | |
| "learning_rate": 1.0566037735849058e-05, | |
| "loss": 0.4669, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.10836501901140684, | |
| "grad_norm": 0.8688059999460508, | |
| "learning_rate": 1.0754716981132076e-05, | |
| "loss": 0.474, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.11026615969581749, | |
| "grad_norm": 0.884082051502537, | |
| "learning_rate": 1.0943396226415095e-05, | |
| "loss": 0.4964, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.11216730038022814, | |
| "grad_norm": 0.8603468446856929, | |
| "learning_rate": 1.1132075471698115e-05, | |
| "loss": 0.4911, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.11406844106463879, | |
| "grad_norm": 0.9194854617964594, | |
| "learning_rate": 1.1320754716981132e-05, | |
| "loss": 0.4871, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.11596958174904944, | |
| "grad_norm": 0.7479596062889228, | |
| "learning_rate": 1.1509433962264152e-05, | |
| "loss": 0.4724, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.11787072243346007, | |
| "grad_norm": 0.9168272240022514, | |
| "learning_rate": 1.169811320754717e-05, | |
| "loss": 0.4846, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.11977186311787072, | |
| "grad_norm": 0.7772705055695882, | |
| "learning_rate": 1.188679245283019e-05, | |
| "loss": 0.4675, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.12167300380228137, | |
| "grad_norm": 0.8492075596943389, | |
| "learning_rate": 1.2075471698113209e-05, | |
| "loss": 0.5012, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.12357414448669202, | |
| "grad_norm": 0.9187542228191987, | |
| "learning_rate": 1.2264150943396227e-05, | |
| "loss": 0.4783, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.12547528517110265, | |
| "grad_norm": 0.8209561337987101, | |
| "learning_rate": 1.2452830188679246e-05, | |
| "loss": 0.4745, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.12737642585551331, | |
| "grad_norm": 1.0331375378950258, | |
| "learning_rate": 1.2641509433962264e-05, | |
| "loss": 0.4576, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.12927756653992395, | |
| "grad_norm": 0.8561415380736503, | |
| "learning_rate": 1.2830188679245283e-05, | |
| "loss": 0.4785, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.1311787072243346, | |
| "grad_norm": 0.8721974164237442, | |
| "learning_rate": 1.3018867924528303e-05, | |
| "loss": 0.5083, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.13307984790874525, | |
| "grad_norm": 0.8582491398721172, | |
| "learning_rate": 1.320754716981132e-05, | |
| "loss": 0.4797, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.13498098859315588, | |
| "grad_norm": 0.8129924390816186, | |
| "learning_rate": 1.339622641509434e-05, | |
| "loss": 0.4716, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.13688212927756654, | |
| "grad_norm": 0.8787816298845147, | |
| "learning_rate": 1.3584905660377358e-05, | |
| "loss": 0.4738, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.13878326996197718, | |
| "grad_norm": 0.7959271442057968, | |
| "learning_rate": 1.3773584905660378e-05, | |
| "loss": 0.4687, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.14068441064638784, | |
| "grad_norm": 0.8183530147710967, | |
| "learning_rate": 1.3962264150943397e-05, | |
| "loss": 0.4519, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.14258555133079848, | |
| "grad_norm": 0.8683474851744627, | |
| "learning_rate": 1.4150943396226415e-05, | |
| "loss": 0.4622, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.1444866920152091, | |
| "grad_norm": 0.8132767053539134, | |
| "learning_rate": 1.4339622641509435e-05, | |
| "loss": 0.4915, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.14638783269961977, | |
| "grad_norm": 0.8721003041022382, | |
| "learning_rate": 1.4528301886792452e-05, | |
| "loss": 0.489, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.1482889733840304, | |
| "grad_norm": 0.809088533507313, | |
| "learning_rate": 1.4716981132075472e-05, | |
| "loss": 0.4589, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.15019011406844107, | |
| "grad_norm": 0.9463693163722353, | |
| "learning_rate": 1.4905660377358491e-05, | |
| "loss": 0.4909, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.1520912547528517, | |
| "grad_norm": 0.8401227508892435, | |
| "learning_rate": 1.5094339622641511e-05, | |
| "loss": 0.4819, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.15399239543726237, | |
| "grad_norm": 0.7573558483847573, | |
| "learning_rate": 1.5283018867924532e-05, | |
| "loss": 0.4864, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.155893536121673, | |
| "grad_norm": 0.8034297103712562, | |
| "learning_rate": 1.547169811320755e-05, | |
| "loss": 0.4582, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.15779467680608364, | |
| "grad_norm": 0.8057219910471038, | |
| "learning_rate": 1.5660377358490568e-05, | |
| "loss": 0.4622, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.1596958174904943, | |
| "grad_norm": 0.8604450785087412, | |
| "learning_rate": 1.5849056603773586e-05, | |
| "loss": 0.4789, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.16159695817490494, | |
| "grad_norm": 0.8614298024985783, | |
| "learning_rate": 1.6037735849056607e-05, | |
| "loss": 0.4821, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.1634980988593156, | |
| "grad_norm": 0.8423889880865927, | |
| "learning_rate": 1.6226415094339625e-05, | |
| "loss": 0.4614, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.16539923954372623, | |
| "grad_norm": 0.8780094614420434, | |
| "learning_rate": 1.6415094339622643e-05, | |
| "loss": 0.4929, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.16730038022813687, | |
| "grad_norm": 0.8932071406355626, | |
| "learning_rate": 1.6603773584905664e-05, | |
| "loss": 0.4944, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.16920152091254753, | |
| "grad_norm": 0.8698894319506517, | |
| "learning_rate": 1.679245283018868e-05, | |
| "loss": 0.458, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.17110266159695817, | |
| "grad_norm": 0.8273288647325024, | |
| "learning_rate": 1.69811320754717e-05, | |
| "loss": 0.4843, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.17300380228136883, | |
| "grad_norm": 0.9672158226835698, | |
| "learning_rate": 1.716981132075472e-05, | |
| "loss": 0.4518, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.17490494296577946, | |
| "grad_norm": 0.9382506322184585, | |
| "learning_rate": 1.735849056603774e-05, | |
| "loss": 0.4847, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.17680608365019013, | |
| "grad_norm": 0.8259074240501604, | |
| "learning_rate": 1.7547169811320756e-05, | |
| "loss": 0.4878, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.17870722433460076, | |
| "grad_norm": 0.8340686193863727, | |
| "learning_rate": 1.7735849056603774e-05, | |
| "loss": 0.4844, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.1806083650190114, | |
| "grad_norm": 1.1276776728267681, | |
| "learning_rate": 1.7924528301886795e-05, | |
| "loss": 0.483, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.18250950570342206, | |
| "grad_norm": 0.7976657349377618, | |
| "learning_rate": 1.8113207547169813e-05, | |
| "loss": 0.4777, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.1844106463878327, | |
| "grad_norm": 1.016020065605956, | |
| "learning_rate": 1.830188679245283e-05, | |
| "loss": 0.4865, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.18631178707224336, | |
| "grad_norm": 0.8219823478258979, | |
| "learning_rate": 1.8490566037735852e-05, | |
| "loss": 0.4403, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.188212927756654, | |
| "grad_norm": 0.9383751237837756, | |
| "learning_rate": 1.867924528301887e-05, | |
| "loss": 0.4697, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.19011406844106463, | |
| "grad_norm": 0.8124290211581414, | |
| "learning_rate": 1.8867924528301888e-05, | |
| "loss": 0.4775, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.1920152091254753, | |
| "grad_norm": 0.9480657539493688, | |
| "learning_rate": 1.905660377358491e-05, | |
| "loss": 0.5037, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.19391634980988592, | |
| "grad_norm": 0.7955337600533607, | |
| "learning_rate": 1.9245283018867927e-05, | |
| "loss": 0.4565, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.1958174904942966, | |
| "grad_norm": 0.8195478328055906, | |
| "learning_rate": 1.9433962264150945e-05, | |
| "loss": 0.4916, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.19771863117870722, | |
| "grad_norm": 0.8649203105927161, | |
| "learning_rate": 1.9622641509433963e-05, | |
| "loss": 0.4566, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.19961977186311788, | |
| "grad_norm": 0.8244948162415039, | |
| "learning_rate": 1.9811320754716984e-05, | |
| "loss": 0.4757, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.20152091254752852, | |
| "grad_norm": 0.8512459881080684, | |
| "learning_rate": 2e-05, | |
| "loss": 0.4594, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.20342205323193915, | |
| "grad_norm": 0.8603030063772141, | |
| "learning_rate": 1.9999944857420527e-05, | |
| "loss": 0.4604, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.20532319391634982, | |
| "grad_norm": 0.8101932090308214, | |
| "learning_rate": 1.9999779430290247e-05, | |
| "loss": 0.4696, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.20722433460076045, | |
| "grad_norm": 0.8657670824598397, | |
| "learning_rate": 1.9999503720433575e-05, | |
| "loss": 0.4871, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.20912547528517111, | |
| "grad_norm": 1.0564412721571157, | |
| "learning_rate": 1.999911773089118e-05, | |
| "loss": 0.4738, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.21102661596958175, | |
| "grad_norm": 0.8111442279627397, | |
| "learning_rate": 1.999862146591996e-05, | |
| "loss": 0.4412, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.21292775665399238, | |
| "grad_norm": 0.9023909926836202, | |
| "learning_rate": 1.9998014930992976e-05, | |
| "loss": 0.4544, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.21482889733840305, | |
| "grad_norm": 0.8647242957227482, | |
| "learning_rate": 1.9997298132799408e-05, | |
| "loss": 0.5023, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.21673003802281368, | |
| "grad_norm": 0.8693318115962565, | |
| "learning_rate": 1.9996471079244477e-05, | |
| "loss": 0.4802, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.21863117870722434, | |
| "grad_norm": 0.8315685175837906, | |
| "learning_rate": 1.999553377944936e-05, | |
| "loss": 0.4649, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.22053231939163498, | |
| "grad_norm": 0.8217337708650966, | |
| "learning_rate": 1.9994486243751076e-05, | |
| "loss": 0.4638, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.2224334600760456, | |
| "grad_norm": 0.8215361072497253, | |
| "learning_rate": 1.9993328483702393e-05, | |
| "loss": 0.4735, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.22433460076045628, | |
| "grad_norm": 0.9717326448849636, | |
| "learning_rate": 1.999206051207169e-05, | |
| "loss": 0.4755, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.2262357414448669, | |
| "grad_norm": 0.8207467916929897, | |
| "learning_rate": 1.9990682342842805e-05, | |
| "loss": 0.4857, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.22813688212927757, | |
| "grad_norm": 0.7765902199955256, | |
| "learning_rate": 1.99891939912149e-05, | |
| "loss": 0.4503, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.2300380228136882, | |
| "grad_norm": 0.9146208651198684, | |
| "learning_rate": 1.9987595473602292e-05, | |
| "loss": 0.4806, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.23193916349809887, | |
| "grad_norm": 0.8945453784425959, | |
| "learning_rate": 1.9985886807634246e-05, | |
| "loss": 0.4955, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.2338403041825095, | |
| "grad_norm": 0.8179514578584116, | |
| "learning_rate": 1.9984068012154824e-05, | |
| "loss": 0.4632, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.23574144486692014, | |
| "grad_norm": 0.8655018624692868, | |
| "learning_rate": 1.9982139107222634e-05, | |
| "loss": 0.4525, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.2376425855513308, | |
| "grad_norm": 0.9870629024761469, | |
| "learning_rate": 1.9980100114110637e-05, | |
| "loss": 0.448, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.23954372623574144, | |
| "grad_norm": 0.9201115620315975, | |
| "learning_rate": 1.99779510553059e-05, | |
| "loss": 0.4445, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.2414448669201521, | |
| "grad_norm": 0.8387445902611144, | |
| "learning_rate": 1.9975691954509347e-05, | |
| "loss": 0.4847, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.24334600760456274, | |
| "grad_norm": 1.141613293216816, | |
| "learning_rate": 1.9973322836635517e-05, | |
| "loss": 0.4828, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.24524714828897337, | |
| "grad_norm": 0.9008151424712199, | |
| "learning_rate": 1.997084372781226e-05, | |
| "loss": 0.5076, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.24714828897338403, | |
| "grad_norm": 0.9345443014614474, | |
| "learning_rate": 1.9968254655380465e-05, | |
| "loss": 0.4807, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.24904942965779467, | |
| "grad_norm": 0.7826130159992625, | |
| "learning_rate": 1.996555564789376e-05, | |
| "loss": 0.4752, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.2509505703422053, | |
| "grad_norm": 0.9673240058147855, | |
| "learning_rate": 1.996274673511819e-05, | |
| "loss": 0.4391, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.25285171102661597, | |
| "grad_norm": 0.7840806024885154, | |
| "learning_rate": 1.99598279480319e-05, | |
| "loss": 0.4469, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.25475285171102663, | |
| "grad_norm": 0.9763249314425736, | |
| "learning_rate": 1.9956799318824776e-05, | |
| "loss": 0.4552, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.25665399239543724, | |
| "grad_norm": 0.8207451143930588, | |
| "learning_rate": 1.99536608808981e-05, | |
| "loss": 0.4661, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.2585551330798479, | |
| "grad_norm": 0.7544695434471612, | |
| "learning_rate": 1.995041266886419e-05, | |
| "loss": 0.4793, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.26045627376425856, | |
| "grad_norm": 0.8075353846676147, | |
| "learning_rate": 1.9947054718545996e-05, | |
| "loss": 0.4833, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.2623574144486692, | |
| "grad_norm": 0.7782912202996453, | |
| "learning_rate": 1.994358706697674e-05, | |
| "loss": 0.4511, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.26425855513307983, | |
| "grad_norm": 0.8037210690044305, | |
| "learning_rate": 1.9940009752399462e-05, | |
| "loss": 0.4619, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.2661596958174905, | |
| "grad_norm": 0.7770432450761198, | |
| "learning_rate": 1.9936322814266634e-05, | |
| "loss": 0.4786, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.26806083650190116, | |
| "grad_norm": 0.8708708238278937, | |
| "learning_rate": 1.9932526293239713e-05, | |
| "loss": 0.4829, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.26996197718631176, | |
| "grad_norm": 0.7747530790335395, | |
| "learning_rate": 1.9928620231188694e-05, | |
| "loss": 0.4374, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.2718631178707224, | |
| "grad_norm": 0.8851217381425746, | |
| "learning_rate": 1.992460467119164e-05, | |
| "loss": 0.4742, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.2737642585551331, | |
| "grad_norm": 0.7760629385373885, | |
| "learning_rate": 1.992047965753422e-05, | |
| "loss": 0.4891, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.27566539923954375, | |
| "grad_norm": 0.7787196605973257, | |
| "learning_rate": 1.991624523570922e-05, | |
| "loss": 0.4963, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.27756653992395436, | |
| "grad_norm": 0.9868328334059948, | |
| "learning_rate": 1.9911901452416012e-05, | |
| "loss": 0.4466, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.279467680608365, | |
| "grad_norm": 0.8316020531291785, | |
| "learning_rate": 1.9907448355560094e-05, | |
| "loss": 0.4675, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.2813688212927757, | |
| "grad_norm": 0.9281989085697481, | |
| "learning_rate": 1.9902885994252506e-05, | |
| "loss": 0.4439, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.2832699619771863, | |
| "grad_norm": 0.7792207308402411, | |
| "learning_rate": 1.989821441880933e-05, | |
| "loss": 0.4612, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.28517110266159695, | |
| "grad_norm": 0.8613005285946763, | |
| "learning_rate": 1.9893433680751105e-05, | |
| "loss": 0.4657, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.2870722433460076, | |
| "grad_norm": 0.8210165583980534, | |
| "learning_rate": 1.9888543832802277e-05, | |
| "loss": 0.4263, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.2889733840304182, | |
| "grad_norm": 0.7714803191377311, | |
| "learning_rate": 1.9883544928890612e-05, | |
| "loss": 0.4686, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.2908745247148289, | |
| "grad_norm": 0.8250167834139593, | |
| "learning_rate": 1.9878437024146603e-05, | |
| "loss": 0.4971, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.29277566539923955, | |
| "grad_norm": 0.7861740388487014, | |
| "learning_rate": 1.9873220174902857e-05, | |
| "loss": 0.4469, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.2946768060836502, | |
| "grad_norm": 0.7249715138577719, | |
| "learning_rate": 1.986789443869348e-05, | |
| "loss": 0.4531, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.2965779467680608, | |
| "grad_norm": 0.8583505069076927, | |
| "learning_rate": 1.9862459874253438e-05, | |
| "loss": 0.4442, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.2984790874524715, | |
| "grad_norm": 0.7520251075717874, | |
| "learning_rate": 1.985691654151791e-05, | |
| "loss": 0.4491, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.30038022813688214, | |
| "grad_norm": 0.7656881722680602, | |
| "learning_rate": 1.9851264501621635e-05, | |
| "loss": 0.4606, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.30228136882129275, | |
| "grad_norm": 0.8379970347078813, | |
| "learning_rate": 1.984550381689822e-05, | |
| "loss": 0.4573, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.3041825095057034, | |
| "grad_norm": 0.7817991925958266, | |
| "learning_rate": 1.983963455087946e-05, | |
| "loss": 0.466, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.3060836501901141, | |
| "grad_norm": 0.8118856527010587, | |
| "learning_rate": 1.983365676829466e-05, | |
| "loss": 0.4583, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.30798479087452474, | |
| "grad_norm": 0.7344147439952623, | |
| "learning_rate": 1.982757053506989e-05, | |
| "loss": 0.4425, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.30988593155893535, | |
| "grad_norm": 0.8545451778644841, | |
| "learning_rate": 1.9821375918327268e-05, | |
| "loss": 0.4773, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.311787072243346, | |
| "grad_norm": 0.7025581830676092, | |
| "learning_rate": 1.981507298638422e-05, | |
| "loss": 0.4396, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.31368821292775667, | |
| "grad_norm": 0.8851349680194602, | |
| "learning_rate": 1.9808661808752735e-05, | |
| "loss": 0.4826, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.3155893536121673, | |
| "grad_norm": 0.7681571587876704, | |
| "learning_rate": 1.980214245613858e-05, | |
| "loss": 0.452, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.31749049429657794, | |
| "grad_norm": 0.7794414661400576, | |
| "learning_rate": 1.979551500044055e-05, | |
| "loss": 0.4538, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.3193916349809886, | |
| "grad_norm": 0.8384271746956266, | |
| "learning_rate": 1.9788779514749635e-05, | |
| "loss": 0.4623, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.32129277566539927, | |
| "grad_norm": 0.7718155189061099, | |
| "learning_rate": 1.978193607334826e-05, | |
| "loss": 0.4549, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.3231939163498099, | |
| "grad_norm": 1.0138563545319552, | |
| "learning_rate": 1.977498475170941e-05, | |
| "loss": 0.4747, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.32509505703422054, | |
| "grad_norm": 0.7458003820927573, | |
| "learning_rate": 1.9767925626495857e-05, | |
| "loss": 0.4549, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.3269961977186312, | |
| "grad_norm": 0.866747938033783, | |
| "learning_rate": 1.9760758775559275e-05, | |
| "loss": 0.4583, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.3288973384030418, | |
| "grad_norm": 0.9035479168656856, | |
| "learning_rate": 1.975348427793939e-05, | |
| "loss": 0.4779, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.33079847908745247, | |
| "grad_norm": 0.7796735864025548, | |
| "learning_rate": 1.9746102213863113e-05, | |
| "loss": 0.4607, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.33269961977186313, | |
| "grad_norm": 0.8684367382730057, | |
| "learning_rate": 1.973861266474366e-05, | |
| "loss": 0.4694, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.33460076045627374, | |
| "grad_norm": 0.7162069876010442, | |
| "learning_rate": 1.9731015713179643e-05, | |
| "loss": 0.4761, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.3365019011406844, | |
| "grad_norm": 0.8958659658064765, | |
| "learning_rate": 1.9723311442954163e-05, | |
| "loss": 0.4751, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.33840304182509506, | |
| "grad_norm": 0.796005319276747, | |
| "learning_rate": 1.9715499939033883e-05, | |
| "loss": 0.4654, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.3403041825095057, | |
| "grad_norm": 0.7888987138904092, | |
| "learning_rate": 1.9707581287568094e-05, | |
| "loss": 0.4318, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.34220532319391633, | |
| "grad_norm": 0.8103807470308049, | |
| "learning_rate": 1.969955557588778e-05, | |
| "loss": 0.4298, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.344106463878327, | |
| "grad_norm": 0.8058766719829449, | |
| "learning_rate": 1.9691422892504626e-05, | |
| "loss": 0.4603, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.34600760456273766, | |
| "grad_norm": 0.880578279301494, | |
| "learning_rate": 1.968318332711006e-05, | |
| "loss": 0.4838, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.34790874524714827, | |
| "grad_norm": 0.7804211767359951, | |
| "learning_rate": 1.9674836970574253e-05, | |
| "loss": 0.4704, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.34980988593155893, | |
| "grad_norm": 0.8647656692828007, | |
| "learning_rate": 1.966638391494514e-05, | |
| "loss": 0.447, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.3517110266159696, | |
| "grad_norm": 0.7506012812845948, | |
| "learning_rate": 1.9657824253447378e-05, | |
| "loss": 0.439, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.35361216730038025, | |
| "grad_norm": 0.8291019824560986, | |
| "learning_rate": 1.9649158080481327e-05, | |
| "loss": 0.4631, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.35551330798479086, | |
| "grad_norm": 0.8999892379585273, | |
| "learning_rate": 1.964038549162201e-05, | |
| "loss": 0.4459, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.3574144486692015, | |
| "grad_norm": 0.7259750655060316, | |
| "learning_rate": 1.963150658361807e-05, | |
| "loss": 0.4624, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.3593155893536122, | |
| "grad_norm": 0.9299533509835078, | |
| "learning_rate": 1.962252145439068e-05, | |
| "loss": 0.4292, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.3612167300380228, | |
| "grad_norm": 0.7695909765978082, | |
| "learning_rate": 1.9613430203032486e-05, | |
| "loss": 0.4486, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.36311787072243346, | |
| "grad_norm": 0.8339651919265445, | |
| "learning_rate": 1.9604232929806493e-05, | |
| "loss": 0.479, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.3650190114068441, | |
| "grad_norm": 0.8620820612572161, | |
| "learning_rate": 1.9594929736144978e-05, | |
| "loss": 0.4531, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.3669201520912547, | |
| "grad_norm": 0.7818932440900317, | |
| "learning_rate": 1.9585520724648354e-05, | |
| "loss": 0.4582, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.3688212927756654, | |
| "grad_norm": 0.7856880577295874, | |
| "learning_rate": 1.957600599908406e-05, | |
| "loss": 0.4432, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.37072243346007605, | |
| "grad_norm": 0.7866033571117518, | |
| "learning_rate": 1.95663856643854e-05, | |
| "loss": 0.438, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.3726235741444867, | |
| "grad_norm": 0.7888771681083976, | |
| "learning_rate": 1.955665982665038e-05, | |
| "loss": 0.4561, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.3745247148288973, | |
| "grad_norm": 0.8070561489572124, | |
| "learning_rate": 1.9546828593140565e-05, | |
| "loss": 0.4572, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.376425855513308, | |
| "grad_norm": 0.8236325824013025, | |
| "learning_rate": 1.9536892072279863e-05, | |
| "loss": 0.4623, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.37832699619771865, | |
| "grad_norm": 0.8313282577300511, | |
| "learning_rate": 1.9526850373653356e-05, | |
| "loss": 0.4654, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.38022813688212925, | |
| "grad_norm": 0.7340343274836881, | |
| "learning_rate": 1.9516703608006074e-05, | |
| "loss": 0.421, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.3821292775665399, | |
| "grad_norm": 0.7038043982976423, | |
| "learning_rate": 1.9506451887241787e-05, | |
| "loss": 0.4504, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.3840304182509506, | |
| "grad_norm": 0.7768101022360497, | |
| "learning_rate": 1.949609532442176e-05, | |
| "loss": 0.4643, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.38593155893536124, | |
| "grad_norm": 0.741342333060678, | |
| "learning_rate": 1.9485634033763507e-05, | |
| "loss": 0.4368, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.38783269961977185, | |
| "grad_norm": 0.7427897459287397, | |
| "learning_rate": 1.9475068130639543e-05, | |
| "loss": 0.4419, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.3897338403041825, | |
| "grad_norm": 0.6983318762383683, | |
| "learning_rate": 1.9464397731576093e-05, | |
| "loss": 0.4453, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.3916349809885932, | |
| "grad_norm": 0.7493656228639105, | |
| "learning_rate": 1.945362295425183e-05, | |
| "loss": 0.4517, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.3935361216730038, | |
| "grad_norm": 0.7082763028084567, | |
| "learning_rate": 1.944274391749655e-05, | |
| "loss": 0.4325, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.39543726235741444, | |
| "grad_norm": 0.7503319428627745, | |
| "learning_rate": 1.9431760741289886e-05, | |
| "loss": 0.4393, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.3973384030418251, | |
| "grad_norm": 0.7267299099234377, | |
| "learning_rate": 1.942067354675997e-05, | |
| "loss": 0.4668, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.39923954372623577, | |
| "grad_norm": 0.7089870420627601, | |
| "learning_rate": 1.9409482456182105e-05, | |
| "loss": 0.4366, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.4011406844106464, | |
| "grad_norm": 0.7198920468219249, | |
| "learning_rate": 1.939818759297741e-05, | |
| "loss": 0.4464, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.40304182509505704, | |
| "grad_norm": 0.764699443358051, | |
| "learning_rate": 1.9386789081711465e-05, | |
| "loss": 0.4449, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.4049429657794677, | |
| "grad_norm": 0.7075234071480062, | |
| "learning_rate": 1.9375287048092927e-05, | |
| "loss": 0.4367, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.4068441064638783, | |
| "grad_norm": 0.8285295444420562, | |
| "learning_rate": 1.9363681618972166e-05, | |
| "loss": 0.4466, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.40874524714828897, | |
| "grad_norm": 0.6996219305480876, | |
| "learning_rate": 1.9351972922339835e-05, | |
| "loss": 0.4604, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.41064638783269963, | |
| "grad_norm": 0.7525127356378865, | |
| "learning_rate": 1.9340161087325483e-05, | |
| "loss": 0.4647, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.41254752851711024, | |
| "grad_norm": 0.7093555482264836, | |
| "learning_rate": 1.9328246244196117e-05, | |
| "loss": 0.4385, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.4144486692015209, | |
| "grad_norm": 0.7085059363822336, | |
| "learning_rate": 1.931622852435478e-05, | |
| "loss": 0.44, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.41634980988593157, | |
| "grad_norm": 0.7119759966500714, | |
| "learning_rate": 1.930410806033908e-05, | |
| "loss": 0.4166, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.41825095057034223, | |
| "grad_norm": 0.7432492847264208, | |
| "learning_rate": 1.929188498581975e-05, | |
| "loss": 0.4426, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.42015209125475284, | |
| "grad_norm": 0.7289164737227367, | |
| "learning_rate": 1.9279559435599164e-05, | |
| "loss": 0.4501, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.4220532319391635, | |
| "grad_norm": 0.7648856889387022, | |
| "learning_rate": 1.926713154560984e-05, | |
| "loss": 0.4603, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.42395437262357416, | |
| "grad_norm": 0.7068605242151745, | |
| "learning_rate": 1.9254601452912972e-05, | |
| "loss": 0.4674, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.42585551330798477, | |
| "grad_norm": 0.7728547793461618, | |
| "learning_rate": 1.924196929569688e-05, | |
| "loss": 0.4334, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.42775665399239543, | |
| "grad_norm": 0.7691807673731599, | |
| "learning_rate": 1.922923521327551e-05, | |
| "loss": 0.4805, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.4296577946768061, | |
| "grad_norm": 0.8343825595956904, | |
| "learning_rate": 1.9216399346086893e-05, | |
| "loss": 0.4637, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.43155893536121676, | |
| "grad_norm": 0.7061671083817953, | |
| "learning_rate": 1.9203461835691596e-05, | |
| "loss": 0.4584, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.43346007604562736, | |
| "grad_norm": 0.7608168770104771, | |
| "learning_rate": 1.9190422824771158e-05, | |
| "loss": 0.4445, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.435361216730038, | |
| "grad_norm": 0.6935228961858564, | |
| "learning_rate": 1.9177282457126515e-05, | |
| "loss": 0.4372, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.4372623574144487, | |
| "grad_norm": 0.7050972509876273, | |
| "learning_rate": 1.9164040877676425e-05, | |
| "loss": 0.4455, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.4391634980988593, | |
| "grad_norm": 0.6897129028465291, | |
| "learning_rate": 1.9150698232455853e-05, | |
| "loss": 0.4068, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.44106463878326996, | |
| "grad_norm": 0.6793264970416749, | |
| "learning_rate": 1.913725466861438e-05, | |
| "loss": 0.4561, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.4429657794676806, | |
| "grad_norm": 0.7461308816890755, | |
| "learning_rate": 1.9123710334414552e-05, | |
| "loss": 0.4493, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.4448669201520912, | |
| "grad_norm": 0.6813937107711601, | |
| "learning_rate": 1.911006537923029e-05, | |
| "loss": 0.4253, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.4467680608365019, | |
| "grad_norm": 0.6905660080876765, | |
| "learning_rate": 1.9096319953545186e-05, | |
| "loss": 0.4193, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.44866920152091255, | |
| "grad_norm": 0.7174972374532641, | |
| "learning_rate": 1.908247420895089e-05, | |
| "loss": 0.4379, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.4505703422053232, | |
| "grad_norm": 0.7242256578318375, | |
| "learning_rate": 1.9068528298145418e-05, | |
| "loss": 0.4496, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.4524714828897338, | |
| "grad_norm": 0.6895042981650908, | |
| "learning_rate": 1.905448237493147e-05, | |
| "loss": 0.4384, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.4543726235741445, | |
| "grad_norm": 0.6791220075114859, | |
| "learning_rate": 1.9040336594214727e-05, | |
| "loss": 0.4484, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.45627376425855515, | |
| "grad_norm": 0.7003014966030247, | |
| "learning_rate": 1.9026091112002163e-05, | |
| "loss": 0.427, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.45817490494296575, | |
| "grad_norm": 0.6586180741828473, | |
| "learning_rate": 1.90117460854003e-05, | |
| "loss": 0.4246, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.4600760456273764, | |
| "grad_norm": 0.7110182244210966, | |
| "learning_rate": 1.8997301672613496e-05, | |
| "loss": 0.4723, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.4619771863117871, | |
| "grad_norm": 0.7131822770777457, | |
| "learning_rate": 1.8982758032942184e-05, | |
| "loss": 0.4748, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.46387832699619774, | |
| "grad_norm": 0.6901119607064604, | |
| "learning_rate": 1.896811532678113e-05, | |
| "loss": 0.4406, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.46577946768060835, | |
| "grad_norm": 0.6835566320746159, | |
| "learning_rate": 1.8953373715617646e-05, | |
| "loss": 0.4481, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.467680608365019, | |
| "grad_norm": 0.7308432937998676, | |
| "learning_rate": 1.893853336202983e-05, | |
| "loss": 0.4376, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.4695817490494297, | |
| "grad_norm": 0.6889791639845325, | |
| "learning_rate": 1.892359442968475e-05, | |
| "loss": 0.4329, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.4714828897338403, | |
| "grad_norm": 0.7164505306991646, | |
| "learning_rate": 1.8908557083336668e-05, | |
| "loss": 0.4327, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.47338403041825095, | |
| "grad_norm": 0.7312302969020609, | |
| "learning_rate": 1.889342148882519e-05, | |
| "loss": 0.4405, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.4752851711026616, | |
| "grad_norm": 0.761124106282301, | |
| "learning_rate": 1.8878187813073465e-05, | |
| "loss": 0.4672, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.47718631178707227, | |
| "grad_norm": 0.7278179694328336, | |
| "learning_rate": 1.886285622408633e-05, | |
| "loss": 0.4227, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.4790874524714829, | |
| "grad_norm": 0.7704287962216506, | |
| "learning_rate": 1.8847426890948447e-05, | |
| "loss": 0.4455, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.48098859315589354, | |
| "grad_norm": 0.7209104445830121, | |
| "learning_rate": 1.8831899983822475e-05, | |
| "loss": 0.4577, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.4828897338403042, | |
| "grad_norm": 0.6778733331648699, | |
| "learning_rate": 1.8816275673947148e-05, | |
| "loss": 0.4299, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.4847908745247148, | |
| "grad_norm": 0.7249026311202598, | |
| "learning_rate": 1.8800554133635417e-05, | |
| "loss": 0.4441, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.4866920152091255, | |
| "grad_norm": 0.6775906288556909, | |
| "learning_rate": 1.8784735536272543e-05, | |
| "loss": 0.4306, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.48859315589353614, | |
| "grad_norm": 0.7581151576054069, | |
| "learning_rate": 1.8768820056314173e-05, | |
| "loss": 0.4254, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.49049429657794674, | |
| "grad_norm": 0.6985586620745072, | |
| "learning_rate": 1.875280786928444e-05, | |
| "loss": 0.4465, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.4923954372623574, | |
| "grad_norm": 0.74518147583897, | |
| "learning_rate": 1.873669915177399e-05, | |
| "loss": 0.4618, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.49429657794676807, | |
| "grad_norm": 0.7962046182231821, | |
| "learning_rate": 1.872049408143808e-05, | |
| "loss": 0.4157, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.49619771863117873, | |
| "grad_norm": 0.6603320598031359, | |
| "learning_rate": 1.8704192836994578e-05, | |
| "loss": 0.4115, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.49809885931558934, | |
| "grad_norm": 0.7481543196661218, | |
| "learning_rate": 1.8687795598222024e-05, | |
| "loss": 0.4267, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 0.8103258071955443, | |
| "learning_rate": 1.8671302545957628e-05, | |
| "loss": 0.4419, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.5019011406844106, | |
| "grad_norm": 0.6877165733471877, | |
| "learning_rate": 1.8654713862095272e-05, | |
| "loss": 0.4328, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.5038022813688213, | |
| "grad_norm": 0.7268745662155378, | |
| "learning_rate": 1.8638029729583524e-05, | |
| "loss": 0.4486, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.5057034220532319, | |
| "grad_norm": 0.7091890052470671, | |
| "learning_rate": 1.8621250332423603e-05, | |
| "loss": 0.437, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.5076045627376425, | |
| "grad_norm": 0.719969767761705, | |
| "learning_rate": 1.860437585566736e-05, | |
| "loss": 0.4356, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.5095057034220533, | |
| "grad_norm": 0.7366118794549946, | |
| "learning_rate": 1.8587406485415226e-05, | |
| "loss": 0.4415, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.5114068441064639, | |
| "grad_norm": 0.7377096235856714, | |
| "learning_rate": 1.8570342408814173e-05, | |
| "loss": 0.4398, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.5133079847908745, | |
| "grad_norm": 0.7217035154851869, | |
| "learning_rate": 1.855318381405564e-05, | |
| "loss": 0.456, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.5152091254752852, | |
| "grad_norm": 0.7588098533941107, | |
| "learning_rate": 1.8535930890373467e-05, | |
| "loss": 0.4495, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.5171102661596958, | |
| "grad_norm": 0.6791279635658835, | |
| "learning_rate": 1.8518583828041787e-05, | |
| "loss": 0.4421, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.5190114068441065, | |
| "grad_norm": 0.758447807333382, | |
| "learning_rate": 1.8501142818372964e-05, | |
| "loss": 0.4456, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.5209125475285171, | |
| "grad_norm": 0.7182320714542124, | |
| "learning_rate": 1.848360805371544e-05, | |
| "loss": 0.4619, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.5228136882129277, | |
| "grad_norm": 0.6847555730257598, | |
| "learning_rate": 1.8465979727451653e-05, | |
| "loss": 0.4384, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.5247148288973384, | |
| "grad_norm": 0.7288227148404307, | |
| "learning_rate": 1.8448258033995877e-05, | |
| "loss": 0.4327, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.526615969581749, | |
| "grad_norm": 0.7431322533201045, | |
| "learning_rate": 1.8430443168792087e-05, | |
| "loss": 0.4277, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.5285171102661597, | |
| "grad_norm": 0.6783505984701499, | |
| "learning_rate": 1.8412535328311813e-05, | |
| "loss": 0.4449, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.5304182509505704, | |
| "grad_norm": 0.7315053231523375, | |
| "learning_rate": 1.8394534710051956e-05, | |
| "loss": 0.4476, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.532319391634981, | |
| "grad_norm": 0.7132548586466316, | |
| "learning_rate": 1.8376441512532617e-05, | |
| "loss": 0.4629, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.5342205323193916, | |
| "grad_norm": 0.672941355023213, | |
| "learning_rate": 1.835825593529492e-05, | |
| "loss": 0.4265, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.5361216730038023, | |
| "grad_norm": 0.6417782063096907, | |
| "learning_rate": 1.833997817889878e-05, | |
| "loss": 0.4205, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.5380228136882129, | |
| "grad_norm": 0.6672848322112241, | |
| "learning_rate": 1.8321608444920738e-05, | |
| "loss": 0.4093, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.5399239543726235, | |
| "grad_norm": 0.7856005929857678, | |
| "learning_rate": 1.830314693595169e-05, | |
| "loss": 0.4383, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.5418250950570342, | |
| "grad_norm": 0.6609973864820794, | |
| "learning_rate": 1.828459385559468e-05, | |
| "loss": 0.4235, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.5437262357414449, | |
| "grad_norm": 0.6246108546856488, | |
| "learning_rate": 1.8265949408462657e-05, | |
| "loss": 0.4194, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.5456273764258555, | |
| "grad_norm": 0.7028771617342063, | |
| "learning_rate": 1.8247213800176192e-05, | |
| "loss": 0.4564, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.5475285171102662, | |
| "grad_norm": 0.6509975785392375, | |
| "learning_rate": 1.8228387237361245e-05, | |
| "loss": 0.4313, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.5494296577946768, | |
| "grad_norm": 0.6573566464571174, | |
| "learning_rate": 1.8209469927646863e-05, | |
| "loss": 0.4152, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.5513307984790875, | |
| "grad_norm": 0.67855442401745, | |
| "learning_rate": 1.8190462079662897e-05, | |
| "loss": 0.4604, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.5532319391634981, | |
| "grad_norm": 0.7035055292001037, | |
| "learning_rate": 1.81713639030377e-05, | |
| "loss": 0.4344, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.5551330798479087, | |
| "grad_norm": 0.7178034573292358, | |
| "learning_rate": 1.8152175608395814e-05, | |
| "loss": 0.4274, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.5570342205323194, | |
| "grad_norm": 0.6981942435397704, | |
| "learning_rate": 1.8132897407355657e-05, | |
| "loss": 0.44, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.55893536121673, | |
| "grad_norm": 0.7371595546689097, | |
| "learning_rate": 1.811352951252717e-05, | |
| "loss": 0.4154, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.5608365019011406, | |
| "grad_norm": 0.6548294561791782, | |
| "learning_rate": 1.809407213750949e-05, | |
| "loss": 0.4271, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.5627376425855514, | |
| "grad_norm": 0.7052940207042259, | |
| "learning_rate": 1.807452549688859e-05, | |
| "loss": 0.4534, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.564638783269962, | |
| "grad_norm": 0.6630848357499377, | |
| "learning_rate": 1.8054889806234906e-05, | |
| "loss": 0.4509, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 0.5665399239543726, | |
| "grad_norm": 0.6978435011312227, | |
| "learning_rate": 1.8035165282100963e-05, | |
| "loss": 0.4438, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.5684410646387833, | |
| "grad_norm": 0.6929482083484854, | |
| "learning_rate": 1.8015352142018984e-05, | |
| "loss": 0.4243, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 0.5703422053231939, | |
| "grad_norm": 0.6608586485170236, | |
| "learning_rate": 1.799545060449851e-05, | |
| "loss": 0.4196, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.5722433460076045, | |
| "grad_norm": 0.6857921145457854, | |
| "learning_rate": 1.797546088902396e-05, | |
| "loss": 0.4388, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 0.5741444866920152, | |
| "grad_norm": 0.6281151846143317, | |
| "learning_rate": 1.7955383216052224e-05, | |
| "loss": 0.4315, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.5760456273764258, | |
| "grad_norm": 0.7624253550008769, | |
| "learning_rate": 1.7935217807010238e-05, | |
| "loss": 0.4252, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 0.5779467680608364, | |
| "grad_norm": 0.7144847142436779, | |
| "learning_rate": 1.7914964884292543e-05, | |
| "loss": 0.4214, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.5798479087452472, | |
| "grad_norm": 0.6732698532329222, | |
| "learning_rate": 1.7894624671258813e-05, | |
| "loss": 0.4302, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.5817490494296578, | |
| "grad_norm": 0.6906696392017235, | |
| "learning_rate": 1.7874197392231414e-05, | |
| "loss": 0.4264, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.5836501901140685, | |
| "grad_norm": 0.7370892471220831, | |
| "learning_rate": 1.7853683272492913e-05, | |
| "loss": 0.4589, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 0.5855513307984791, | |
| "grad_norm": 0.7067051271575602, | |
| "learning_rate": 1.7833082538283615e-05, | |
| "loss": 0.4317, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.5874524714828897, | |
| "grad_norm": 0.6569277613244852, | |
| "learning_rate": 1.7812395416799034e-05, | |
| "loss": 0.4177, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 0.5893536121673004, | |
| "grad_norm": 0.7181388347261146, | |
| "learning_rate": 1.7791622136187422e-05, | |
| "loss": 0.4552, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.591254752851711, | |
| "grad_norm": 0.722616922712641, | |
| "learning_rate": 1.7770762925547235e-05, | |
| "loss": 0.4517, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 0.5931558935361216, | |
| "grad_norm": 0.7151748465204784, | |
| "learning_rate": 1.7749818014924612e-05, | |
| "loss": 0.4233, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.5950570342205324, | |
| "grad_norm": 0.6878805663873514, | |
| "learning_rate": 1.7728787635310828e-05, | |
| "loss": 0.4221, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 0.596958174904943, | |
| "grad_norm": 0.7045898884798314, | |
| "learning_rate": 1.770767201863976e-05, | |
| "loss": 0.4441, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.5988593155893536, | |
| "grad_norm": 0.6620924930921461, | |
| "learning_rate": 1.7686471397785322e-05, | |
| "loss": 0.4192, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.6007604562737643, | |
| "grad_norm": 0.7041981653346706, | |
| "learning_rate": 1.76651860065589e-05, | |
| "loss": 0.4093, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.6026615969581749, | |
| "grad_norm": 0.7270853220895429, | |
| "learning_rate": 1.764381607970677e-05, | |
| "loss": 0.4299, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 0.6045627376425855, | |
| "grad_norm": 0.6752083075012906, | |
| "learning_rate": 1.7622361852907506e-05, | |
| "loss": 0.4122, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.6064638783269962, | |
| "grad_norm": 0.7619447399989167, | |
| "learning_rate": 1.760082356276939e-05, | |
| "loss": 0.4258, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 0.6083650190114068, | |
| "grad_norm": 0.685983135852867, | |
| "learning_rate": 1.75792014468278e-05, | |
| "loss": 0.4285, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.6102661596958175, | |
| "grad_norm": 0.74530013250665, | |
| "learning_rate": 1.7557495743542586e-05, | |
| "loss": 0.4513, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 0.6121673003802282, | |
| "grad_norm": 0.7999895040442231, | |
| "learning_rate": 1.7535706692295436e-05, | |
| "loss": 0.4487, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.6140684410646388, | |
| "grad_norm": 0.8028458295667564, | |
| "learning_rate": 1.7513834533387256e-05, | |
| "loss": 0.4176, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 0.6159695817490495, | |
| "grad_norm": 0.7325132472330159, | |
| "learning_rate": 1.7491879508035488e-05, | |
| "loss": 0.4359, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.6178707224334601, | |
| "grad_norm": 0.6773766146952694, | |
| "learning_rate": 1.746984185837149e-05, | |
| "loss": 0.4181, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.6197718631178707, | |
| "grad_norm": 0.8627689991408384, | |
| "learning_rate": 1.744772182743782e-05, | |
| "loss": 0.4187, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.6216730038022814, | |
| "grad_norm": 0.6285531419272437, | |
| "learning_rate": 1.7425519659185596e-05, | |
| "loss": 0.4277, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 0.623574144486692, | |
| "grad_norm": 0.7705124950870843, | |
| "learning_rate": 1.740323559847179e-05, | |
| "loss": 0.4187, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.6254752851711026, | |
| "grad_norm": 0.7451647218294556, | |
| "learning_rate": 1.738086989105651e-05, | |
| "loss": 0.401, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 0.6273764258555133, | |
| "grad_norm": 0.7624223120181423, | |
| "learning_rate": 1.735842278360032e-05, | |
| "loss": 0.4677, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.629277566539924, | |
| "grad_norm": 0.8557950129190357, | |
| "learning_rate": 1.73358945236615e-05, | |
| "loss": 0.4255, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 0.6311787072243346, | |
| "grad_norm": 0.8612737232439938, | |
| "learning_rate": 1.7313285359693322e-05, | |
| "loss": 0.4481, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 0.6330798479087453, | |
| "grad_norm": 0.8392054679152542, | |
| "learning_rate": 1.7290595541041312e-05, | |
| "loss": 0.4186, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 0.6349809885931559, | |
| "grad_norm": 0.6903476908328429, | |
| "learning_rate": 1.7267825317940494e-05, | |
| "loss": 0.4415, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 0.6368821292775665, | |
| "grad_norm": 0.6519650017429904, | |
| "learning_rate": 1.724497494151264e-05, | |
| "loss": 0.4152, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.6387832699619772, | |
| "grad_norm": 0.7482521722237108, | |
| "learning_rate": 1.7222044663763484e-05, | |
| "loss": 0.439, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 0.6406844106463878, | |
| "grad_norm": 0.7036723686764073, | |
| "learning_rate": 1.7199034737579962e-05, | |
| "loss": 0.4389, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 0.6425855513307985, | |
| "grad_norm": 0.6775180477225546, | |
| "learning_rate": 1.7175945416727405e-05, | |
| "loss": 0.4508, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 0.6444866920152091, | |
| "grad_norm": 0.7720138928395854, | |
| "learning_rate": 1.7152776955846768e-05, | |
| "loss": 0.414, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 0.6463878326996197, | |
| "grad_norm": 0.6830735295751708, | |
| "learning_rate": 1.7129529610451775e-05, | |
| "loss": 0.4052, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.6482889733840305, | |
| "grad_norm": 0.6262779081520732, | |
| "learning_rate": 1.7106203636926154e-05, | |
| "loss": 0.4309, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 0.6501901140684411, | |
| "grad_norm": 0.8702165945015574, | |
| "learning_rate": 1.7082799292520767e-05, | |
| "loss": 0.4259, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 0.6520912547528517, | |
| "grad_norm": 0.7134273616385337, | |
| "learning_rate": 1.7059316835350806e-05, | |
| "loss": 0.4462, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 0.6539923954372624, | |
| "grad_norm": 0.78772087731039, | |
| "learning_rate": 1.7035756524392924e-05, | |
| "loss": 0.4186, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 0.655893536121673, | |
| "grad_norm": 0.7605067325997421, | |
| "learning_rate": 1.7012118619482376e-05, | |
| "loss": 0.4462, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.6577946768060836, | |
| "grad_norm": 0.6537472688400661, | |
| "learning_rate": 1.6988403381310177e-05, | |
| "loss": 0.4552, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 0.6596958174904943, | |
| "grad_norm": 0.7145767092649539, | |
| "learning_rate": 1.696461107142021e-05, | |
| "loss": 0.4449, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 0.6615969581749049, | |
| "grad_norm": 0.6803535095802667, | |
| "learning_rate": 1.6940741952206342e-05, | |
| "loss": 0.4221, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 0.6634980988593155, | |
| "grad_norm": 0.7170135471845068, | |
| "learning_rate": 1.691679628690953e-05, | |
| "loss": 0.4354, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 0.6653992395437263, | |
| "grad_norm": 0.7074011603582587, | |
| "learning_rate": 1.6892774339614927e-05, | |
| "loss": 0.4144, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.6673003802281369, | |
| "grad_norm": 0.7685273398553027, | |
| "learning_rate": 1.686867637524896e-05, | |
| "loss": 0.4376, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 0.6692015209125475, | |
| "grad_norm": 0.6937068173065084, | |
| "learning_rate": 1.6844502659576414e-05, | |
| "loss": 0.4229, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 0.6711026615969582, | |
| "grad_norm": 0.724511172987768, | |
| "learning_rate": 1.6820253459197493e-05, | |
| "loss": 0.4561, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 0.6730038022813688, | |
| "grad_norm": 0.7699832674704752, | |
| "learning_rate": 1.679592904154489e-05, | |
| "loss": 0.4276, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 0.6749049429657795, | |
| "grad_norm": 0.7163882166589179, | |
| "learning_rate": 1.677152967488084e-05, | |
| "loss": 0.4368, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.6768060836501901, | |
| "grad_norm": 0.7040660813806539, | |
| "learning_rate": 1.6747055628294134e-05, | |
| "loss": 0.4321, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 0.6787072243346007, | |
| "grad_norm": 0.7628828572987904, | |
| "learning_rate": 1.6722507171697184e-05, | |
| "loss": 0.4478, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 0.6806083650190115, | |
| "grad_norm": 0.7641482754411942, | |
| "learning_rate": 1.669788457582304e-05, | |
| "loss": 0.4463, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 0.6825095057034221, | |
| "grad_norm": 0.7105385056334287, | |
| "learning_rate": 1.6673188112222394e-05, | |
| "loss": 0.4157, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 0.6844106463878327, | |
| "grad_norm": 0.7098650082212842, | |
| "learning_rate": 1.6648418053260585e-05, | |
| "loss": 0.426, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.6863117870722434, | |
| "grad_norm": 0.6903618883871796, | |
| "learning_rate": 1.6623574672114596e-05, | |
| "loss": 0.4279, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 0.688212927756654, | |
| "grad_norm": 0.7177358451409801, | |
| "learning_rate": 1.6598658242770054e-05, | |
| "loss": 0.4401, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 0.6901140684410646, | |
| "grad_norm": 0.7170418228678317, | |
| "learning_rate": 1.6573669040018202e-05, | |
| "loss": 0.4165, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 0.6920152091254753, | |
| "grad_norm": 0.6963504093050362, | |
| "learning_rate": 1.6548607339452853e-05, | |
| "loss": 0.4213, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 0.6939163498098859, | |
| "grad_norm": 0.702947847524009, | |
| "learning_rate": 1.652347341746737e-05, | |
| "loss": 0.4241, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.6958174904942965, | |
| "grad_norm": 0.6790564252108022, | |
| "learning_rate": 1.6498267551251618e-05, | |
| "loss": 0.4099, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 0.6977186311787072, | |
| "grad_norm": 0.6648262328005833, | |
| "learning_rate": 1.6472990018788884e-05, | |
| "loss": 0.4181, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 0.6996197718631179, | |
| "grad_norm": 0.6311935103333491, | |
| "learning_rate": 1.644764109885284e-05, | |
| "loss": 0.4334, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 0.7015209125475285, | |
| "grad_norm": 0.7258180072953032, | |
| "learning_rate": 1.642222107100446e-05, | |
| "loss": 0.4379, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 0.7034220532319392, | |
| "grad_norm": 0.6814356082096739, | |
| "learning_rate": 1.6396730215588913e-05, | |
| "loss": 0.4212, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.7053231939163498, | |
| "grad_norm": 0.6969049026642412, | |
| "learning_rate": 1.6371168813732514e-05, | |
| "loss": 0.4192, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 0.7072243346007605, | |
| "grad_norm": 0.7238955470089643, | |
| "learning_rate": 1.6345537147339578e-05, | |
| "loss": 0.4229, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 0.7091254752851711, | |
| "grad_norm": 0.6645932684376477, | |
| "learning_rate": 1.6319835499089358e-05, | |
| "loss": 0.4172, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 0.7110266159695817, | |
| "grad_norm": 0.6520752661678441, | |
| "learning_rate": 1.6294064152432878e-05, | |
| "loss": 0.4132, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 0.7129277566539924, | |
| "grad_norm": 0.6656271548921159, | |
| "learning_rate": 1.626822339158985e-05, | |
| "loss": 0.406, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.714828897338403, | |
| "grad_norm": 0.6426577795182453, | |
| "learning_rate": 1.6242313501545522e-05, | |
| "loss": 0.4271, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 0.7167300380228137, | |
| "grad_norm": 0.6975486917786486, | |
| "learning_rate": 1.621633476804752e-05, | |
| "loss": 0.4539, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 0.7186311787072244, | |
| "grad_norm": 0.7188411134554173, | |
| "learning_rate": 1.6190287477602716e-05, | |
| "loss": 0.4246, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 0.720532319391635, | |
| "grad_norm": 0.6227278521149572, | |
| "learning_rate": 1.6164171917474078e-05, | |
| "loss": 0.4095, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 0.7224334600760456, | |
| "grad_norm": 0.7328421887539684, | |
| "learning_rate": 1.6137988375677466e-05, | |
| "loss": 0.4651, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.7243346007604563, | |
| "grad_norm": 0.6168834956412362, | |
| "learning_rate": 1.6111737140978495e-05, | |
| "loss": 0.418, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 0.7262357414448669, | |
| "grad_norm": 0.6333999746774848, | |
| "learning_rate": 1.6085418502889315e-05, | |
| "loss": 0.4338, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 0.7281368821292775, | |
| "grad_norm": 0.7589289359383163, | |
| "learning_rate": 1.6059032751665454e-05, | |
| "loss": 0.4438, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 0.7300380228136882, | |
| "grad_norm": 0.6393654757399255, | |
| "learning_rate": 1.6032580178302585e-05, | |
| "loss": 0.4529, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 0.7319391634980988, | |
| "grad_norm": 0.6434787469137258, | |
| "learning_rate": 1.600606107453333e-05, | |
| "loss": 0.4192, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.7338403041825095, | |
| "grad_norm": 0.6813780662350235, | |
| "learning_rate": 1.597947573282405e-05, | |
| "loss": 0.4223, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 0.7357414448669202, | |
| "grad_norm": 0.6797400005408815, | |
| "learning_rate": 1.5952824446371608e-05, | |
| "loss": 0.4582, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 0.7376425855513308, | |
| "grad_norm": 0.617341235227393, | |
| "learning_rate": 1.592610750910014e-05, | |
| "loss": 0.4158, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 0.7395437262357415, | |
| "grad_norm": 0.6371479338615863, | |
| "learning_rate": 1.589932521565781e-05, | |
| "loss": 0.4147, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 0.7414448669201521, | |
| "grad_norm": 0.6534769326243557, | |
| "learning_rate": 1.587247786141358e-05, | |
| "loss": 0.4328, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.7433460076045627, | |
| "grad_norm": 0.6816947394928101, | |
| "learning_rate": 1.5845565742453906e-05, | |
| "loss": 0.4291, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 0.7452471482889734, | |
| "grad_norm": 0.6351322615092592, | |
| "learning_rate": 1.581858915557953e-05, | |
| "loss": 0.444, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 0.747148288973384, | |
| "grad_norm": 0.7041051882853031, | |
| "learning_rate": 1.5791548398302167e-05, | |
| "loss": 0.4255, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 0.7490494296577946, | |
| "grad_norm": 0.6385489754211712, | |
| "learning_rate": 1.5764443768841234e-05, | |
| "loss": 0.4112, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 0.7509505703422054, | |
| "grad_norm": 0.6992724135698174, | |
| "learning_rate": 1.5737275566120577e-05, | |
| "loss": 0.4433, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.752851711026616, | |
| "grad_norm": 0.663720375692244, | |
| "learning_rate": 1.5710044089765144e-05, | |
| "loss": 0.4204, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 0.7547528517110266, | |
| "grad_norm": 0.6347735133323371, | |
| "learning_rate": 1.5682749640097708e-05, | |
| "loss": 0.4086, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 0.7566539923954373, | |
| "grad_norm": 0.6175999701268513, | |
| "learning_rate": 1.565539251813554e-05, | |
| "loss": 0.3903, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 0.7585551330798479, | |
| "grad_norm": 0.7129526321685288, | |
| "learning_rate": 1.5627973025587093e-05, | |
| "loss": 0.4451, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 0.7604562737642585, | |
| "grad_norm": 0.6449101388785314, | |
| "learning_rate": 1.560049146484868e-05, | |
| "loss": 0.4299, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.7623574144486692, | |
| "grad_norm": 0.6444516226737715, | |
| "learning_rate": 1.5572948139001128e-05, | |
| "loss": 0.4342, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 0.7642585551330798, | |
| "grad_norm": 0.6558594850124524, | |
| "learning_rate": 1.5545343351806443e-05, | |
| "loss": 0.4019, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 0.7661596958174905, | |
| "grad_norm": 0.6806311087546817, | |
| "learning_rate": 1.551767740770446e-05, | |
| "loss": 0.4103, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 0.7680608365019012, | |
| "grad_norm": 0.6396842682593787, | |
| "learning_rate": 1.5489950611809484e-05, | |
| "loss": 0.4178, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 0.7699619771863118, | |
| "grad_norm": 0.7519663236857671, | |
| "learning_rate": 1.5462163269906928e-05, | |
| "loss": 0.4342, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.7718631178707225, | |
| "grad_norm": 0.6552424954209749, | |
| "learning_rate": 1.5434315688449924e-05, | |
| "loss": 0.4488, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 0.7737642585551331, | |
| "grad_norm": 0.6937977645953339, | |
| "learning_rate": 1.5406408174555978e-05, | |
| "loss": 0.4001, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 0.7756653992395437, | |
| "grad_norm": 0.7794329997117471, | |
| "learning_rate": 1.5378441036003543e-05, | |
| "loss": 0.4427, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 0.7775665399239544, | |
| "grad_norm": 0.6358150349928228, | |
| "learning_rate": 1.535041458122865e-05, | |
| "loss": 0.4006, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 0.779467680608365, | |
| "grad_norm": 0.7674589573608444, | |
| "learning_rate": 1.5322329119321508e-05, | |
| "loss": 0.4115, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.7813688212927756, | |
| "grad_norm": 0.6455232027663905, | |
| "learning_rate": 1.529418496002308e-05, | |
| "loss": 0.4209, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 0.7832699619771863, | |
| "grad_norm": 0.6174759025618579, | |
| "learning_rate": 1.5265982413721662e-05, | |
| "loss": 0.3952, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 0.785171102661597, | |
| "grad_norm": 0.7431314676516845, | |
| "learning_rate": 1.5237721791449497e-05, | |
| "loss": 0.4121, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 0.7870722433460076, | |
| "grad_norm": 0.6796103385856099, | |
| "learning_rate": 1.5209403404879305e-05, | |
| "loss": 0.4307, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 0.7889733840304183, | |
| "grad_norm": 0.6532811912078915, | |
| "learning_rate": 1.5181027566320858e-05, | |
| "loss": 0.4154, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.7908745247148289, | |
| "grad_norm": 0.6805708206788301, | |
| "learning_rate": 1.5152594588717544e-05, | |
| "loss": 0.4399, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 0.7927756653992395, | |
| "grad_norm": 0.6533397054385441, | |
| "learning_rate": 1.5124104785642909e-05, | |
| "loss": 0.4212, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 0.7946768060836502, | |
| "grad_norm": 0.658584511203062, | |
| "learning_rate": 1.5095558471297196e-05, | |
| "loss": 0.4167, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 0.7965779467680608, | |
| "grad_norm": 0.6630520720627732, | |
| "learning_rate": 1.5066955960503893e-05, | |
| "loss": 0.4257, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 0.7984790874524715, | |
| "grad_norm": 0.6372129404140038, | |
| "learning_rate": 1.5038297568706244e-05, | |
| "loss": 0.4271, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.8003802281368821, | |
| "grad_norm": 0.6400681464799695, | |
| "learning_rate": 1.5009583611963772e-05, | |
| "loss": 0.4345, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 0.8022813688212928, | |
| "grad_norm": 0.6556610356736914, | |
| "learning_rate": 1.4980814406948806e-05, | |
| "loss": 0.4129, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 0.8041825095057035, | |
| "grad_norm": 0.7029178472668136, | |
| "learning_rate": 1.4951990270942991e-05, | |
| "loss": 0.4071, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 0.8060836501901141, | |
| "grad_norm": 0.6351537405211911, | |
| "learning_rate": 1.492311152183376e-05, | |
| "loss": 0.3886, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 0.8079847908745247, | |
| "grad_norm": 0.6843158388460424, | |
| "learning_rate": 1.4894178478110856e-05, | |
| "loss": 0.4168, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.8098859315589354, | |
| "grad_norm": 0.5743874395798579, | |
| "learning_rate": 1.4865191458862816e-05, | |
| "loss": 0.4087, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 0.811787072243346, | |
| "grad_norm": 0.692396967419666, | |
| "learning_rate": 1.4836150783773442e-05, | |
| "loss": 0.4093, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 0.8136882129277566, | |
| "grad_norm": 0.7031909774524918, | |
| "learning_rate": 1.4807056773118276e-05, | |
| "loss": 0.4476, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 0.8155893536121673, | |
| "grad_norm": 0.6127007830700513, | |
| "learning_rate": 1.4777909747761085e-05, | |
| "loss": 0.3985, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 0.8174904942965779, | |
| "grad_norm": 0.6420983422029166, | |
| "learning_rate": 1.4748710029150296e-05, | |
| "loss": 0.4265, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.8193916349809885, | |
| "grad_norm": 0.6773693451005507, | |
| "learning_rate": 1.4719457939315468e-05, | |
| "loss": 0.4279, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 0.8212927756653993, | |
| "grad_norm": 0.6717229130904546, | |
| "learning_rate": 1.4690153800863743e-05, | |
| "loss": 0.4074, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 0.8231939163498099, | |
| "grad_norm": 0.6811614228148969, | |
| "learning_rate": 1.4660797936976278e-05, | |
| "loss": 0.4055, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 0.8250950570342205, | |
| "grad_norm": 0.6278318563098005, | |
| "learning_rate": 1.4631390671404682e-05, | |
| "loss": 0.4153, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 0.8269961977186312, | |
| "grad_norm": 0.6431306020762307, | |
| "learning_rate": 1.460193232846745e-05, | |
| "loss": 0.3992, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.8288973384030418, | |
| "grad_norm": 0.6652935673846776, | |
| "learning_rate": 1.4572423233046386e-05, | |
| "loss": 0.427, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 0.8307984790874525, | |
| "grad_norm": 0.6568821044706049, | |
| "learning_rate": 1.4542863710583022e-05, | |
| "loss": 0.4318, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 0.8326996197718631, | |
| "grad_norm": 0.6307936715284781, | |
| "learning_rate": 1.4513254087075015e-05, | |
| "loss": 0.4122, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 0.8346007604562737, | |
| "grad_norm": 0.6627313451613924, | |
| "learning_rate": 1.4483594689072571e-05, | |
| "loss": 0.4091, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 0.8365019011406845, | |
| "grad_norm": 0.6300865030894374, | |
| "learning_rate": 1.4453885843674837e-05, | |
| "loss": 0.418, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.8384030418250951, | |
| "grad_norm": 0.6615690412313698, | |
| "learning_rate": 1.4424127878526278e-05, | |
| "loss": 0.4268, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 0.8403041825095057, | |
| "grad_norm": 0.626756825773454, | |
| "learning_rate": 1.4394321121813093e-05, | |
| "loss": 0.4193, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 0.8422053231939164, | |
| "grad_norm": 0.6274644266595787, | |
| "learning_rate": 1.436446590225957e-05, | |
| "loss": 0.432, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 0.844106463878327, | |
| "grad_norm": 0.6059230118627331, | |
| "learning_rate": 1.433456254912447e-05, | |
| "loss": 0.4052, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 0.8460076045627376, | |
| "grad_norm": 0.6711500102524064, | |
| "learning_rate": 1.4304611392197399e-05, | |
| "loss": 0.412, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.8479087452471483, | |
| "grad_norm": 0.6546345246712256, | |
| "learning_rate": 1.427461276179517e-05, | |
| "loss": 0.433, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 0.8498098859315589, | |
| "grad_norm": 0.6467469429935729, | |
| "learning_rate": 1.4244566988758152e-05, | |
| "loss": 0.4187, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 0.8517110266159695, | |
| "grad_norm": 0.6336841174243936, | |
| "learning_rate": 1.4214474404446633e-05, | |
| "loss": 0.4199, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 0.8536121673003803, | |
| "grad_norm": 0.6284480342970814, | |
| "learning_rate": 1.4184335340737158e-05, | |
| "loss": 0.4202, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 0.8555133079847909, | |
| "grad_norm": 0.6923171974945731, | |
| "learning_rate": 1.4154150130018867e-05, | |
| "loss": 0.4239, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.8574144486692015, | |
| "grad_norm": 0.6444804505469319, | |
| "learning_rate": 1.4123919105189836e-05, | |
| "loss": 0.4308, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 0.8593155893536122, | |
| "grad_norm": 0.6672469398131327, | |
| "learning_rate": 1.4093642599653406e-05, | |
| "loss": 0.4271, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 0.8612167300380228, | |
| "grad_norm": 0.6035050075177592, | |
| "learning_rate": 1.40633209473145e-05, | |
| "loss": 0.4206, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 0.8631178707224335, | |
| "grad_norm": 0.6768617758402847, | |
| "learning_rate": 1.4032954482575938e-05, | |
| "loss": 0.4257, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 0.8650190114068441, | |
| "grad_norm": 0.6308728500144681, | |
| "learning_rate": 1.4002543540334766e-05, | |
| "loss": 0.4249, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.8669201520912547, | |
| "grad_norm": 0.6691874910966634, | |
| "learning_rate": 1.3972088455978537e-05, | |
| "loss": 0.4233, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 0.8688212927756654, | |
| "grad_norm": 0.6172045612665286, | |
| "learning_rate": 1.3941589565381635e-05, | |
| "loss": 0.4083, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 0.870722433460076, | |
| "grad_norm": 0.6772164506260429, | |
| "learning_rate": 1.391104720490156e-05, | |
| "loss": 0.4528, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 0.8726235741444867, | |
| "grad_norm": 0.6239977301880284, | |
| "learning_rate": 1.3880461711375224e-05, | |
| "loss": 0.3989, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 0.8745247148288974, | |
| "grad_norm": 0.659481865459748, | |
| "learning_rate": 1.3849833422115221e-05, | |
| "loss": 0.4261, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.876425855513308, | |
| "grad_norm": 0.6429441067016293, | |
| "learning_rate": 1.3819162674906134e-05, | |
| "loss": 0.4256, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 0.8783269961977186, | |
| "grad_norm": 0.6357728227730678, | |
| "learning_rate": 1.378844980800078e-05, | |
| "loss": 0.4079, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 0.8802281368821293, | |
| "grad_norm": 0.7250436583862916, | |
| "learning_rate": 1.3757695160116502e-05, | |
| "loss": 0.444, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 0.8821292775665399, | |
| "grad_norm": 0.6398958091746487, | |
| "learning_rate": 1.3726899070431423e-05, | |
| "loss": 0.4103, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 0.8840304182509505, | |
| "grad_norm": 0.6612576176418071, | |
| "learning_rate": 1.3696061878580707e-05, | |
| "loss": 0.421, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.8859315589353612, | |
| "grad_norm": 0.6481963973738333, | |
| "learning_rate": 1.3665183924652817e-05, | |
| "loss": 0.4299, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 0.8878326996197718, | |
| "grad_norm": 0.6098249998172416, | |
| "learning_rate": 1.3634265549185755e-05, | |
| "loss": 0.4, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 0.8897338403041825, | |
| "grad_norm": 0.6530878808565888, | |
| "learning_rate": 1.3603307093163319e-05, | |
| "loss": 0.4443, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 0.8916349809885932, | |
| "grad_norm": 0.679486653338678, | |
| "learning_rate": 1.3572308898011328e-05, | |
| "loss": 0.4035, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 0.8935361216730038, | |
| "grad_norm": 0.6218670923193103, | |
| "learning_rate": 1.3541271305593878e-05, | |
| "loss": 0.4348, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.8954372623574145, | |
| "grad_norm": 0.5980743675904272, | |
| "learning_rate": 1.3510194658209547e-05, | |
| "loss": 0.4222, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 0.8973384030418251, | |
| "grad_norm": 0.647314843046179, | |
| "learning_rate": 1.3479079298587634e-05, | |
| "loss": 0.4059, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 0.8992395437262357, | |
| "grad_norm": 0.5873313222766207, | |
| "learning_rate": 1.3447925569884374e-05, | |
| "loss": 0.4072, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 0.9011406844106464, | |
| "grad_norm": 0.6537063310674617, | |
| "learning_rate": 1.3416733815679166e-05, | |
| "loss": 0.4189, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 0.903041825095057, | |
| "grad_norm": 0.6754512865376269, | |
| "learning_rate": 1.3385504379970764e-05, | |
| "loss": 0.4295, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.9049429657794676, | |
| "grad_norm": 0.6067247936922769, | |
| "learning_rate": 1.3354237607173494e-05, | |
| "loss": 0.4102, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 0.9068441064638784, | |
| "grad_norm": 0.6833680813451695, | |
| "learning_rate": 1.3322933842113457e-05, | |
| "loss": 0.4017, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 0.908745247148289, | |
| "grad_norm": 0.6186933482398198, | |
| "learning_rate": 1.3291593430024727e-05, | |
| "loss": 0.4154, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 0.9106463878326996, | |
| "grad_norm": 0.6730039502740822, | |
| "learning_rate": 1.3260216716545534e-05, | |
| "loss": 0.4192, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 0.9125475285171103, | |
| "grad_norm": 0.6291337336765166, | |
| "learning_rate": 1.3228804047714462e-05, | |
| "loss": 0.3971, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.9144486692015209, | |
| "grad_norm": 0.6318840599894472, | |
| "learning_rate": 1.319735576996663e-05, | |
| "loss": 0.4342, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 0.9163498098859315, | |
| "grad_norm": 0.6335710437189978, | |
| "learning_rate": 1.3165872230129869e-05, | |
| "loss": 0.4133, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 0.9182509505703422, | |
| "grad_norm": 0.6480898780804585, | |
| "learning_rate": 1.3134353775420895e-05, | |
| "loss": 0.4194, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 0.9201520912547528, | |
| "grad_norm": 0.6168591088733274, | |
| "learning_rate": 1.3102800753441488e-05, | |
| "loss": 0.4223, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 0.9220532319391636, | |
| "grad_norm": 0.5977807382717967, | |
| "learning_rate": 1.3071213512174655e-05, | |
| "loss": 0.4077, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.9239543726235742, | |
| "grad_norm": 0.6022104824691142, | |
| "learning_rate": 1.3039592399980785e-05, | |
| "loss": 0.3908, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 0.9258555133079848, | |
| "grad_norm": 0.6723992717504055, | |
| "learning_rate": 1.3007937765593818e-05, | |
| "loss": 0.4449, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 0.9277566539923955, | |
| "grad_norm": 0.6184905381045962, | |
| "learning_rate": 1.2976249958117395e-05, | |
| "loss": 0.4147, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 0.9296577946768061, | |
| "grad_norm": 0.6530221821421701, | |
| "learning_rate": 1.2944529327021002e-05, | |
| "loss": 0.4003, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 0.9315589353612167, | |
| "grad_norm": 0.5959923754829242, | |
| "learning_rate": 1.291277622213612e-05, | |
| "loss": 0.3984, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.9334600760456274, | |
| "grad_norm": 0.6353819256338685, | |
| "learning_rate": 1.2880990993652379e-05, | |
| "loss": 0.4224, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 0.935361216730038, | |
| "grad_norm": 0.638345220354351, | |
| "learning_rate": 1.2849173992113669e-05, | |
| "loss": 0.4204, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 0.9372623574144486, | |
| "grad_norm": 0.584651569297595, | |
| "learning_rate": 1.2817325568414299e-05, | |
| "loss": 0.4214, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 0.9391634980988594, | |
| "grad_norm": 0.7200159520003764, | |
| "learning_rate": 1.2785446073795118e-05, | |
| "loss": 0.4188, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 0.94106463878327, | |
| "grad_norm": 0.6306709003285014, | |
| "learning_rate": 1.2753535859839638e-05, | |
| "loss": 0.4301, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.9429657794676806, | |
| "grad_norm": 0.5893049294222169, | |
| "learning_rate": 1.272159527847016e-05, | |
| "loss": 0.4092, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 0.9448669201520913, | |
| "grad_norm": 0.6795913949827043, | |
| "learning_rate": 1.2689624681943897e-05, | |
| "loss": 0.455, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 0.9467680608365019, | |
| "grad_norm": 0.6761007540534718, | |
| "learning_rate": 1.2657624422849077e-05, | |
| "loss": 0.4255, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 0.9486692015209125, | |
| "grad_norm": 0.6378491171589162, | |
| "learning_rate": 1.2625594854101066e-05, | |
| "loss": 0.4323, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 0.9505703422053232, | |
| "grad_norm": 0.6108365423995037, | |
| "learning_rate": 1.2593536328938471e-05, | |
| "loss": 0.4134, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.9524714828897338, | |
| "grad_norm": 0.6368086695926756, | |
| "learning_rate": 1.2561449200919253e-05, | |
| "loss": 0.3992, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 0.9543726235741445, | |
| "grad_norm": 0.5833894134146728, | |
| "learning_rate": 1.2529333823916807e-05, | |
| "loss": 0.3976, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 0.9562737642585551, | |
| "grad_norm": 0.6261483419708064, | |
| "learning_rate": 1.2497190552116082e-05, | |
| "loss": 0.4254, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 0.9581749049429658, | |
| "grad_norm": 0.6178872314380964, | |
| "learning_rate": 1.2465019740009662e-05, | |
| "loss": 0.3992, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 0.9600760456273765, | |
| "grad_norm": 0.6061484208731519, | |
| "learning_rate": 1.2432821742393854e-05, | |
| "loss": 0.4006, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.9619771863117871, | |
| "grad_norm": 0.6113136199830722, | |
| "learning_rate": 1.2400596914364792e-05, | |
| "loss": 0.4139, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 0.9638783269961977, | |
| "grad_norm": 0.6709960786631555, | |
| "learning_rate": 1.2368345611314508e-05, | |
| "loss": 0.4165, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 0.9657794676806084, | |
| "grad_norm": 0.6242443663693101, | |
| "learning_rate": 1.2336068188927002e-05, | |
| "loss": 0.4053, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 0.967680608365019, | |
| "grad_norm": 0.6236062339195279, | |
| "learning_rate": 1.2303765003174342e-05, | |
| "loss": 0.4278, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 0.9695817490494296, | |
| "grad_norm": 0.6548211230430354, | |
| "learning_rate": 1.2271436410312727e-05, | |
| "loss": 0.4447, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.9714828897338403, | |
| "grad_norm": 0.624074712698175, | |
| "learning_rate": 1.2239082766878557e-05, | |
| "loss": 0.4241, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 0.973384030418251, | |
| "grad_norm": 0.7294034635221138, | |
| "learning_rate": 1.2206704429684504e-05, | |
| "loss": 0.4257, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 0.9752851711026616, | |
| "grad_norm": 0.6187635573371786, | |
| "learning_rate": 1.2174301755815572e-05, | |
| "loss": 0.4113, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 0.9771863117870723, | |
| "grad_norm": 0.6198785412272387, | |
| "learning_rate": 1.2141875102625166e-05, | |
| "loss": 0.4056, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 0.9790874524714829, | |
| "grad_norm": 0.6369088801309587, | |
| "learning_rate": 1.2109424827731144e-05, | |
| "loss": 0.4215, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.9809885931558935, | |
| "grad_norm": 0.6333190392505884, | |
| "learning_rate": 1.2076951289011884e-05, | |
| "loss": 0.4383, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 0.9828897338403042, | |
| "grad_norm": 0.6541758625133649, | |
| "learning_rate": 1.204445484460232e-05, | |
| "loss": 0.4266, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 0.9847908745247148, | |
| "grad_norm": 0.6704894802331391, | |
| "learning_rate": 1.2011935852890004e-05, | |
| "loss": 0.4192, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 0.9866920152091255, | |
| "grad_norm": 0.6186995676417961, | |
| "learning_rate": 1.1979394672511156e-05, | |
| "loss": 0.3915, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 0.9885931558935361, | |
| "grad_norm": 0.6303070165471784, | |
| "learning_rate": 1.19468316623467e-05, | |
| "loss": 0.4366, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.9904942965779467, | |
| "grad_norm": 0.621326155702956, | |
| "learning_rate": 1.1914247181518312e-05, | |
| "loss": 0.4106, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 0.9923954372623575, | |
| "grad_norm": 0.6929601033716066, | |
| "learning_rate": 1.1881641589384456e-05, | |
| "loss": 0.422, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 0.9942965779467681, | |
| "grad_norm": 0.6386041656603102, | |
| "learning_rate": 1.1849015245536424e-05, | |
| "loss": 0.4326, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 0.9961977186311787, | |
| "grad_norm": 0.6325536264265625, | |
| "learning_rate": 1.1816368509794365e-05, | |
| "loss": 0.4182, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 0.9980988593155894, | |
| "grad_norm": 0.6673904035381814, | |
| "learning_rate": 1.1783701742203326e-05, | |
| "loss": 0.3813, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.6142927864699834, | |
| "learning_rate": 1.1751015303029272e-05, | |
| "loss": 0.415, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 1.0019011406844107, | |
| "grad_norm": 0.7567657954486408, | |
| "learning_rate": 1.1718309552755118e-05, | |
| "loss": 0.3148, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 1.0038022813688212, | |
| "grad_norm": 0.7743039878469087, | |
| "learning_rate": 1.1685584852076746e-05, | |
| "loss": 0.3311, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 1.005703422053232, | |
| "grad_norm": 0.6206147209102286, | |
| "learning_rate": 1.1652841561899042e-05, | |
| "loss": 0.3244, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 1.0076045627376427, | |
| "grad_norm": 0.6328673371806786, | |
| "learning_rate": 1.1620080043331901e-05, | |
| "loss": 0.3118, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 1.0095057034220531, | |
| "grad_norm": 0.804854643307543, | |
| "learning_rate": 1.1587300657686254e-05, | |
| "loss": 0.3327, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 1.0114068441064639, | |
| "grad_norm": 0.7240334268453062, | |
| "learning_rate": 1.1554503766470069e-05, | |
| "loss": 0.3164, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 1.0133079847908746, | |
| "grad_norm": 0.7818219706730486, | |
| "learning_rate": 1.1521689731384391e-05, | |
| "loss": 0.331, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 1.015209125475285, | |
| "grad_norm": 0.7538362566630916, | |
| "learning_rate": 1.1488858914319321e-05, | |
| "loss": 0.3252, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 1.0171102661596958, | |
| "grad_norm": 0.6712713612049159, | |
| "learning_rate": 1.1456011677350052e-05, | |
| "loss": 0.3032, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 1.0190114068441065, | |
| "grad_norm": 0.6562029003399492, | |
| "learning_rate": 1.1423148382732854e-05, | |
| "loss": 0.3298, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 1.020912547528517, | |
| "grad_norm": 0.706224794105384, | |
| "learning_rate": 1.1390269392901096e-05, | |
| "loss": 0.3147, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 1.0228136882129277, | |
| "grad_norm": 0.6588696483189674, | |
| "learning_rate": 1.1357375070461241e-05, | |
| "loss": 0.3043, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 1.0247148288973384, | |
| "grad_norm": 0.7123669769752703, | |
| "learning_rate": 1.1324465778188846e-05, | |
| "loss": 0.3305, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 1.026615969581749, | |
| "grad_norm": 0.6669797984307142, | |
| "learning_rate": 1.1291541879024568e-05, | |
| "loss": 0.3297, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 1.0285171102661597, | |
| "grad_norm": 0.6381973464071211, | |
| "learning_rate": 1.1258603736070145e-05, | |
| "loss": 0.3009, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 1.0304182509505704, | |
| "grad_norm": 0.7940253991567974, | |
| "learning_rate": 1.1225651712584413e-05, | |
| "loss": 0.3195, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 1.032319391634981, | |
| "grad_norm": 0.6714161551801561, | |
| "learning_rate": 1.1192686171979288e-05, | |
| "loss": 0.3226, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 1.0342205323193916, | |
| "grad_norm": 0.7146160723111444, | |
| "learning_rate": 1.1159707477815756e-05, | |
| "loss": 0.3, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 1.0361216730038023, | |
| "grad_norm": 0.6813394577831967, | |
| "learning_rate": 1.1126715993799875e-05, | |
| "loss": 0.3187, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 1.038022813688213, | |
| "grad_norm": 0.6869823414888075, | |
| "learning_rate": 1.1093712083778748e-05, | |
| "loss": 0.3146, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 1.0399239543726235, | |
| "grad_norm": 0.656230196105513, | |
| "learning_rate": 1.1060696111736515e-05, | |
| "loss": 0.3138, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 1.0418250950570342, | |
| "grad_norm": 0.7109198393417229, | |
| "learning_rate": 1.1027668441790358e-05, | |
| "loss": 0.306, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 1.043726235741445, | |
| "grad_norm": 0.6626014170019084, | |
| "learning_rate": 1.099462943818646e-05, | |
| "loss": 0.3216, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 1.0456273764258555, | |
| "grad_norm": 0.6306791432051244, | |
| "learning_rate": 1.0961579465295987e-05, | |
| "loss": 0.2921, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 1.0475285171102662, | |
| "grad_norm": 0.7489719529508805, | |
| "learning_rate": 1.0928518887611099e-05, | |
| "loss": 0.3156, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 1.049429657794677, | |
| "grad_norm": 0.665698625880171, | |
| "learning_rate": 1.0895448069740902e-05, | |
| "loss": 0.3175, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 1.0513307984790874, | |
| "grad_norm": 0.6519804753565112, | |
| "learning_rate": 1.0862367376407433e-05, | |
| "loss": 0.3039, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 1.053231939163498, | |
| "grad_norm": 0.756179126442388, | |
| "learning_rate": 1.0829277172441648e-05, | |
| "loss": 0.314, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 1.0551330798479088, | |
| "grad_norm": 0.6304165794242884, | |
| "learning_rate": 1.0796177822779384e-05, | |
| "loss": 0.3094, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 1.0570342205323193, | |
| "grad_norm": 0.7018758569344249, | |
| "learning_rate": 1.0763069692457346e-05, | |
| "loss": 0.3096, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 1.05893536121673, | |
| "grad_norm": 0.6881349412387706, | |
| "learning_rate": 1.0729953146609076e-05, | |
| "loss": 0.3326, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 1.0608365019011408, | |
| "grad_norm": 0.6907287988770531, | |
| "learning_rate": 1.0696828550460928e-05, | |
| "loss": 0.3162, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 1.0627376425855513, | |
| "grad_norm": 0.7493069779156201, | |
| "learning_rate": 1.0663696269328034e-05, | |
| "loss": 0.3257, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 1.064638783269962, | |
| "grad_norm": 0.6071383227656758, | |
| "learning_rate": 1.0630556668610286e-05, | |
| "loss": 0.2886, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 1.0665399239543727, | |
| "grad_norm": 0.694172432550109, | |
| "learning_rate": 1.059741011378829e-05, | |
| "loss": 0.304, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 1.0684410646387832, | |
| "grad_norm": 0.6927845465044504, | |
| "learning_rate": 1.0564256970419367e-05, | |
| "loss": 0.3063, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 1.070342205323194, | |
| "grad_norm": 0.6986357791718101, | |
| "learning_rate": 1.0531097604133473e-05, | |
| "loss": 0.309, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 1.0722433460076046, | |
| "grad_norm": 0.6950021086160334, | |
| "learning_rate": 1.0497932380629207e-05, | |
| "loss": 0.3197, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 1.0741444866920151, | |
| "grad_norm": 0.6403594433633433, | |
| "learning_rate": 1.0464761665669771e-05, | |
| "loss": 0.317, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 1.0760456273764258, | |
| "grad_norm": 0.6779444307188064, | |
| "learning_rate": 1.0431585825078916e-05, | |
| "loss": 0.3124, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 1.0779467680608366, | |
| "grad_norm": 0.7023183490397679, | |
| "learning_rate": 1.0398405224736927e-05, | |
| "loss": 0.3115, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 1.079847908745247, | |
| "grad_norm": 0.6663870967494772, | |
| "learning_rate": 1.0365220230576592e-05, | |
| "loss": 0.3045, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 1.0817490494296578, | |
| "grad_norm": 0.6771312090496145, | |
| "learning_rate": 1.0332031208579133e-05, | |
| "loss": 0.3143, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 1.0836501901140685, | |
| "grad_norm": 0.6874639276333454, | |
| "learning_rate": 1.0298838524770212e-05, | |
| "loss": 0.3024, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 1.085551330798479, | |
| "grad_norm": 0.6417290534195866, | |
| "learning_rate": 1.0265642545215872e-05, | |
| "loss": 0.3114, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 1.0874524714828897, | |
| "grad_norm": 0.6629393985575456, | |
| "learning_rate": 1.0232443636018502e-05, | |
| "loss": 0.3264, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 1.0893536121673004, | |
| "grad_norm": 0.6319408992869819, | |
| "learning_rate": 1.0199242163312794e-05, | |
| "loss": 0.2942, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 1.091254752851711, | |
| "grad_norm": 0.6172840127538345, | |
| "learning_rate": 1.0166038493261723e-05, | |
| "loss": 0.294, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 1.0931558935361216, | |
| "grad_norm": 0.7460037185243779, | |
| "learning_rate": 1.013283299205249e-05, | |
| "loss": 0.3068, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 1.0950570342205324, | |
| "grad_norm": 0.6672993078508158, | |
| "learning_rate": 1.0099626025892491e-05, | |
| "loss": 0.3062, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 1.0969581749049429, | |
| "grad_norm": 0.7139923948907059, | |
| "learning_rate": 1.0066417961005283e-05, | |
| "loss": 0.3207, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 1.0988593155893536, | |
| "grad_norm": 0.7124981996044598, | |
| "learning_rate": 1.0033209163626539e-05, | |
| "loss": 0.3161, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 1.1007604562737643, | |
| "grad_norm": 0.7106756511788879, | |
| "learning_rate": 1e-05, | |
| "loss": 0.3179, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 1.102661596958175, | |
| "grad_norm": 0.6789498549631824, | |
| "learning_rate": 9.966790836373465e-06, | |
| "loss": 0.3052, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 1.1045627376425855, | |
| "grad_norm": 0.6682232835476003, | |
| "learning_rate": 9.933582038994719e-06, | |
| "loss": 0.3034, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 1.1064638783269962, | |
| "grad_norm": 0.6832289098173764, | |
| "learning_rate": 9.90037397410751e-06, | |
| "loss": 0.3276, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 1.108365019011407, | |
| "grad_norm": 0.6333224045505158, | |
| "learning_rate": 9.867167007947511e-06, | |
| "loss": 0.3107, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 1.1102661596958174, | |
| "grad_norm": 0.640215466998294, | |
| "learning_rate": 9.833961506738282e-06, | |
| "loss": 0.3043, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 1.1121673003802282, | |
| "grad_norm": 0.6266766141725529, | |
| "learning_rate": 9.80075783668721e-06, | |
| "loss": 0.2944, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 1.1140684410646389, | |
| "grad_norm": 0.7020252967068222, | |
| "learning_rate": 9.767556363981503e-06, | |
| "loss": 0.3196, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 1.1159695817490494, | |
| "grad_norm": 0.7040123204276214, | |
| "learning_rate": 9.734357454784131e-06, | |
| "loss": 0.3214, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 1.11787072243346, | |
| "grad_norm": 0.6863486730737147, | |
| "learning_rate": 9.701161475229791e-06, | |
| "loss": 0.3114, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 1.1197718631178708, | |
| "grad_norm": 0.6912981815193899, | |
| "learning_rate": 9.66796879142087e-06, | |
| "loss": 0.3042, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 1.1216730038022813, | |
| "grad_norm": 0.7384609271838191, | |
| "learning_rate": 9.634779769423412e-06, | |
| "loss": 0.2964, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 1.123574144486692, | |
| "grad_norm": 0.6572458418372202, | |
| "learning_rate": 9.601594775263073e-06, | |
| "loss": 0.3138, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 1.1254752851711027, | |
| "grad_norm": 0.6913471221584859, | |
| "learning_rate": 9.568414174921085e-06, | |
| "loss": 0.3206, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 1.1273764258555132, | |
| "grad_norm": 0.6536339218884893, | |
| "learning_rate": 9.535238334330234e-06, | |
| "loss": 0.3106, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 1.129277566539924, | |
| "grad_norm": 0.6752489169636481, | |
| "learning_rate": 9.502067619370794e-06, | |
| "loss": 0.3082, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 1.1311787072243347, | |
| "grad_norm": 0.6389942772080103, | |
| "learning_rate": 9.468902395866532e-06, | |
| "loss": 0.3081, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 1.1330798479087452, | |
| "grad_norm": 0.6822398263484467, | |
| "learning_rate": 9.435743029580638e-06, | |
| "loss": 0.309, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 1.1349809885931559, | |
| "grad_norm": 0.6693269891103041, | |
| "learning_rate": 9.402589886211711e-06, | |
| "loss": 0.2971, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 1.1368821292775666, | |
| "grad_norm": 0.7559474114725443, | |
| "learning_rate": 9.369443331389718e-06, | |
| "loss": 0.3141, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 1.138783269961977, | |
| "grad_norm": 0.7532379721788304, | |
| "learning_rate": 9.336303730671968e-06, | |
| "loss": 0.316, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 1.1406844106463878, | |
| "grad_norm": 0.6991651986355691, | |
| "learning_rate": 9.303171449539074e-06, | |
| "loss": 0.3116, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 1.1425855513307985, | |
| "grad_norm": 0.6863333309884198, | |
| "learning_rate": 9.270046853390924e-06, | |
| "loss": 0.3013, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 1.144486692015209, | |
| "grad_norm": 0.7094229250728489, | |
| "learning_rate": 9.236930307542654e-06, | |
| "loss": 0.3134, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 1.1463878326996197, | |
| "grad_norm": 0.6351551403401836, | |
| "learning_rate": 9.203822177220621e-06, | |
| "loss": 0.2992, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 1.1482889733840305, | |
| "grad_norm": 0.7260162424852228, | |
| "learning_rate": 9.170722827558357e-06, | |
| "loss": 0.3197, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 1.1501901140684412, | |
| "grad_norm": 0.66039016261964, | |
| "learning_rate": 9.13763262359257e-06, | |
| "loss": 0.3167, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 1.1520912547528517, | |
| "grad_norm": 0.6619132996772102, | |
| "learning_rate": 9.104551930259101e-06, | |
| "loss": 0.3094, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 1.1539923954372624, | |
| "grad_norm": 0.742097466060349, | |
| "learning_rate": 9.071481112388905e-06, | |
| "loss": 0.3317, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 1.1558935361216731, | |
| "grad_norm": 0.6255698154176806, | |
| "learning_rate": 9.038420534704015e-06, | |
| "loss": 0.3034, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 1.1577946768060836, | |
| "grad_norm": 0.6817646505211483, | |
| "learning_rate": 9.005370561813545e-06, | |
| "loss": 0.3221, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 1.1596958174904943, | |
| "grad_norm": 0.6620971817439955, | |
| "learning_rate": 8.972331558209644e-06, | |
| "loss": 0.3204, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 1.161596958174905, | |
| "grad_norm": 0.6342403295083855, | |
| "learning_rate": 8.939303888263485e-06, | |
| "loss": 0.3085, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 1.1634980988593155, | |
| "grad_norm": 0.6380039715231196, | |
| "learning_rate": 8.906287916221259e-06, | |
| "loss": 0.3179, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 1.1653992395437263, | |
| "grad_norm": 0.675302620678565, | |
| "learning_rate": 8.873284006200129e-06, | |
| "loss": 0.3216, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 1.167300380228137, | |
| "grad_norm": 0.6345731348769819, | |
| "learning_rate": 8.840292522184247e-06, | |
| "loss": 0.3004, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 1.1692015209125475, | |
| "grad_norm": 0.6384933685857499, | |
| "learning_rate": 8.807313828020715e-06, | |
| "loss": 0.3284, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 1.1711026615969582, | |
| "grad_norm": 0.6731293673288582, | |
| "learning_rate": 8.774348287415589e-06, | |
| "loss": 0.3156, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 1.173003802281369, | |
| "grad_norm": 0.6942659262130042, | |
| "learning_rate": 8.74139626392986e-06, | |
| "loss": 0.327, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 1.1749049429657794, | |
| "grad_norm": 0.6603235514347305, | |
| "learning_rate": 8.708458120975436e-06, | |
| "loss": 0.3279, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 1.1768060836501901, | |
| "grad_norm": 0.6400317785473698, | |
| "learning_rate": 8.675534221811156e-06, | |
| "loss": 0.3038, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 1.1787072243346008, | |
| "grad_norm": 0.6613562198466957, | |
| "learning_rate": 8.64262492953876e-06, | |
| "loss": 0.3032, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 1.1806083650190113, | |
| "grad_norm": 0.6854120917680185, | |
| "learning_rate": 8.60973060709891e-06, | |
| "loss": 0.3359, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 1.182509505703422, | |
| "grad_norm": 0.6447935546909752, | |
| "learning_rate": 8.576851617267151e-06, | |
| "loss": 0.3157, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 1.1844106463878328, | |
| "grad_norm": 0.7476373991170864, | |
| "learning_rate": 8.543988322649954e-06, | |
| "loss": 0.3299, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 1.1863117870722433, | |
| "grad_norm": 0.6912834675759985, | |
| "learning_rate": 8.511141085680684e-06, | |
| "loss": 0.3235, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 1.188212927756654, | |
| "grad_norm": 0.6656061123071986, | |
| "learning_rate": 8.478310268615612e-06, | |
| "loss": 0.3101, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 1.1901140684410647, | |
| "grad_norm": 0.6673575209674452, | |
| "learning_rate": 8.445496233529934e-06, | |
| "loss": 0.3158, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 1.1920152091254752, | |
| "grad_norm": 0.7071630034315103, | |
| "learning_rate": 8.41269934231375e-06, | |
| "loss": 0.3233, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 1.193916349809886, | |
| "grad_norm": 0.7359468132472458, | |
| "learning_rate": 8.3799199566681e-06, | |
| "loss": 0.2974, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 1.1958174904942966, | |
| "grad_norm": 0.6162603743293579, | |
| "learning_rate": 8.34715843810096e-06, | |
| "loss": 0.2928, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 1.1977186311787071, | |
| "grad_norm": 0.7052275817772723, | |
| "learning_rate": 8.314415147923254e-06, | |
| "loss": 0.3233, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 1.1996197718631179, | |
| "grad_norm": 0.7370257259057312, | |
| "learning_rate": 8.281690447244887e-06, | |
| "loss": 0.2961, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 1.2015209125475286, | |
| "grad_norm": 0.6812899958604767, | |
| "learning_rate": 8.248984696970732e-06, | |
| "loss": 0.3029, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 1.203422053231939, | |
| "grad_norm": 0.7136678871929358, | |
| "learning_rate": 8.216298257796677e-06, | |
| "loss": 0.3116, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 1.2053231939163498, | |
| "grad_norm": 0.7107107141560293, | |
| "learning_rate": 8.183631490205636e-06, | |
| "loss": 0.2996, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 1.2072243346007605, | |
| "grad_norm": 0.6587104150936467, | |
| "learning_rate": 8.150984754463578e-06, | |
| "loss": 0.3198, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 1.209125475285171, | |
| "grad_norm": 0.6341935123938901, | |
| "learning_rate": 8.118358410615545e-06, | |
| "loss": 0.3002, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 1.2110266159695817, | |
| "grad_norm": 0.6660049033954356, | |
| "learning_rate": 8.08575281848169e-06, | |
| "loss": 0.2898, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 1.2129277566539924, | |
| "grad_norm": 0.6715304185806915, | |
| "learning_rate": 8.0531683376533e-06, | |
| "loss": 0.2986, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 1.214828897338403, | |
| "grad_norm": 0.6213458218094212, | |
| "learning_rate": 8.020605327488846e-06, | |
| "loss": 0.3118, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 1.2167300380228137, | |
| "grad_norm": 0.6960407357391247, | |
| "learning_rate": 7.988064147110001e-06, | |
| "loss": 0.3124, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 1.2186311787072244, | |
| "grad_norm": 0.6707064334641694, | |
| "learning_rate": 7.955545155397684e-06, | |
| "loss": 0.313, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 1.2205323193916349, | |
| "grad_norm": 0.6782080740034353, | |
| "learning_rate": 7.923048710988119e-06, | |
| "loss": 0.3213, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 1.2224334600760456, | |
| "grad_norm": 0.6619198859902301, | |
| "learning_rate": 7.890575172268858e-06, | |
| "loss": 0.3134, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 1.2243346007604563, | |
| "grad_norm": 0.7035792732567658, | |
| "learning_rate": 7.858124897374837e-06, | |
| "loss": 0.3056, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 1.2262357414448668, | |
| "grad_norm": 0.6751749687198899, | |
| "learning_rate": 7.825698244184432e-06, | |
| "loss": 0.2925, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 1.2281368821292775, | |
| "grad_norm": 0.6936340270241994, | |
| "learning_rate": 7.7932955703155e-06, | |
| "loss": 0.3072, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 1.2300380228136882, | |
| "grad_norm": 0.701858042273816, | |
| "learning_rate": 7.760917233121443e-06, | |
| "loss": 0.3108, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 1.231939163498099, | |
| "grad_norm": 0.6840751166351406, | |
| "learning_rate": 7.728563589687275e-06, | |
| "loss": 0.3087, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 1.2338403041825095, | |
| "grad_norm": 0.6807017456046729, | |
| "learning_rate": 7.696234996825663e-06, | |
| "loss": 0.3202, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 1.2357414448669202, | |
| "grad_norm": 0.6386727556378318, | |
| "learning_rate": 7.663931811073003e-06, | |
| "loss": 0.3088, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 1.2376425855513309, | |
| "grad_norm": 0.7009331833667488, | |
| "learning_rate": 7.631654388685496e-06, | |
| "loss": 0.3071, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 1.2395437262357414, | |
| "grad_norm": 0.6827819538685319, | |
| "learning_rate": 7.599403085635208e-06, | |
| "loss": 0.3157, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 1.241444866920152, | |
| "grad_norm": 0.6722582705746584, | |
| "learning_rate": 7.567178257606147e-06, | |
| "loss": 0.3191, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 1.2433460076045628, | |
| "grad_norm": 0.6471922074596952, | |
| "learning_rate": 7.534980259990341e-06, | |
| "loss": 0.3027, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 1.2452471482889733, | |
| "grad_norm": 0.6521812957226463, | |
| "learning_rate": 7.50280944788392e-06, | |
| "loss": 0.3002, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 1.247148288973384, | |
| "grad_norm": 0.6558170066911624, | |
| "learning_rate": 7.470666176083193e-06, | |
| "loss": 0.3244, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 1.2490494296577948, | |
| "grad_norm": 0.6942890145329433, | |
| "learning_rate": 7.438550799080746e-06, | |
| "loss": 0.3136, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 1.2509505703422052, | |
| "grad_norm": 0.6464117784348532, | |
| "learning_rate": 7.40646367106153e-06, | |
| "loss": 0.3012, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 1.252851711026616, | |
| "grad_norm": 0.6539911295397738, | |
| "learning_rate": 7.3744051458989395e-06, | |
| "loss": 0.3161, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 1.2547528517110267, | |
| "grad_norm": 0.6822911716962985, | |
| "learning_rate": 7.342375577150928e-06, | |
| "loss": 0.3234, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 1.2566539923954372, | |
| "grad_norm": 0.6803309835935368, | |
| "learning_rate": 7.310375318056107e-06, | |
| "loss": 0.3113, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 1.258555133079848, | |
| "grad_norm": 0.6724876292154344, | |
| "learning_rate": 7.278404721529843e-06, | |
| "loss": 0.326, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 1.2604562737642586, | |
| "grad_norm": 0.6972288244147081, | |
| "learning_rate": 7.246464140160365e-06, | |
| "loss": 0.3133, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 1.2623574144486693, | |
| "grad_norm": 0.682580821472393, | |
| "learning_rate": 7.214553926204884e-06, | |
| "loss": 0.3085, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 1.2642585551330798, | |
| "grad_norm": 0.6605431294186738, | |
| "learning_rate": 7.182674431585703e-06, | |
| "loss": 0.3052, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 1.2661596958174905, | |
| "grad_norm": 0.6899447197752879, | |
| "learning_rate": 7.150826007886334e-06, | |
| "loss": 0.307, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 1.2680608365019013, | |
| "grad_norm": 0.6517238815526328, | |
| "learning_rate": 7.119009006347625e-06, | |
| "loss": 0.2922, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 1.2699619771863118, | |
| "grad_norm": 0.6913504174591502, | |
| "learning_rate": 7.087223777863883e-06, | |
| "loss": 0.3014, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 1.2718631178707225, | |
| "grad_norm": 0.7227430784437066, | |
| "learning_rate": 7.055470672979003e-06, | |
| "loss": 0.3285, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 1.2737642585551332, | |
| "grad_norm": 0.6515735781790528, | |
| "learning_rate": 7.023750041882609e-06, | |
| "loss": 0.3032, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 1.2756653992395437, | |
| "grad_norm": 0.6625385323226395, | |
| "learning_rate": 6.992062234406185e-06, | |
| "loss": 0.3134, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 1.2775665399239544, | |
| "grad_norm": 0.6708298325056333, | |
| "learning_rate": 6.960407600019217e-06, | |
| "loss": 0.3083, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 1.2794676806083651, | |
| "grad_norm": 0.6612395435229904, | |
| "learning_rate": 6.9287864878253475e-06, | |
| "loss": 0.3091, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 1.2813688212927756, | |
| "grad_norm": 0.6837151968592426, | |
| "learning_rate": 6.897199246558515e-06, | |
| "loss": 0.3118, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 1.2832699619771863, | |
| "grad_norm": 0.6781150379200291, | |
| "learning_rate": 6.865646224579108e-06, | |
| "loss": 0.2988, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 1.285171102661597, | |
| "grad_norm": 0.678566624722647, | |
| "learning_rate": 6.834127769870134e-06, | |
| "loss": 0.3076, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 1.2870722433460076, | |
| "grad_norm": 0.6834768663413462, | |
| "learning_rate": 6.802644230033373e-06, | |
| "loss": 0.3116, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 1.2889733840304183, | |
| "grad_norm": 0.6907952475147346, | |
| "learning_rate": 6.771195952285541e-06, | |
| "loss": 0.3016, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 1.290874524714829, | |
| "grad_norm": 0.6818559560637484, | |
| "learning_rate": 6.739783283454469e-06, | |
| "loss": 0.312, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 1.2927756653992395, | |
| "grad_norm": 0.6518931355826918, | |
| "learning_rate": 6.708406569975274e-06, | |
| "loss": 0.2842, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 1.2946768060836502, | |
| "grad_norm": 0.7167944932770222, | |
| "learning_rate": 6.6770661578865444e-06, | |
| "loss": 0.3028, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 1.296577946768061, | |
| "grad_norm": 0.6707479802381471, | |
| "learning_rate": 6.645762392826509e-06, | |
| "loss": 0.3107, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 1.2984790874524714, | |
| "grad_norm": 0.6856140297972533, | |
| "learning_rate": 6.614495620029238e-06, | |
| "loss": 0.3181, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 1.3003802281368821, | |
| "grad_norm": 0.6697515293848041, | |
| "learning_rate": 6.583266184320836e-06, | |
| "loss": 0.3002, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 1.3022813688212929, | |
| "grad_norm": 0.6982390982595713, | |
| "learning_rate": 6.552074430115624e-06, | |
| "loss": 0.3197, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 1.3041825095057034, | |
| "grad_norm": 0.6654665659226864, | |
| "learning_rate": 6.520920701412371e-06, | |
| "loss": 0.2871, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 1.306083650190114, | |
| "grad_norm": 0.6732852242560583, | |
| "learning_rate": 6.489805341790456e-06, | |
| "loss": 0.3106, | |
| "step": 687 | |
| }, | |
| { | |
| "epoch": 1.3079847908745248, | |
| "grad_norm": 0.6801725587509859, | |
| "learning_rate": 6.458728694406124e-06, | |
| "loss": 0.3047, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 1.3098859315589353, | |
| "grad_norm": 0.6381992924585771, | |
| "learning_rate": 6.427691101988673e-06, | |
| "loss": 0.3063, | |
| "step": 689 | |
| }, | |
| { | |
| "epoch": 1.311787072243346, | |
| "grad_norm": 0.6753229514508059, | |
| "learning_rate": 6.396692906836686e-06, | |
| "loss": 0.3282, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 1.3136882129277567, | |
| "grad_norm": 0.6626489897159549, | |
| "learning_rate": 6.3657344508142495e-06, | |
| "loss": 0.2963, | |
| "step": 691 | |
| }, | |
| { | |
| "epoch": 1.3155893536121672, | |
| "grad_norm": 0.6471686520659756, | |
| "learning_rate": 6.334816075347185e-06, | |
| "loss": 0.2923, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 1.317490494296578, | |
| "grad_norm": 0.6500781845590925, | |
| "learning_rate": 6.303938121419295e-06, | |
| "loss": 0.2913, | |
| "step": 693 | |
| }, | |
| { | |
| "epoch": 1.3193916349809887, | |
| "grad_norm": 0.6766896397454606, | |
| "learning_rate": 6.273100929568579e-06, | |
| "loss": 0.3126, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 1.3212927756653992, | |
| "grad_norm": 0.6737617817741325, | |
| "learning_rate": 6.242304839883502e-06, | |
| "loss": 0.2986, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 1.3231939163498099, | |
| "grad_norm": 0.6739470812402147, | |
| "learning_rate": 6.211550191999223e-06, | |
| "loss": 0.3147, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 1.3250950570342206, | |
| "grad_norm": 0.6432620208901774, | |
| "learning_rate": 6.18083732509387e-06, | |
| "loss": 0.3257, | |
| "step": 697 | |
| }, | |
| { | |
| "epoch": 1.326996197718631, | |
| "grad_norm": 0.6636708200730211, | |
| "learning_rate": 6.150166577884781e-06, | |
| "loss": 0.307, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 1.3288973384030418, | |
| "grad_norm": 0.7150140307986511, | |
| "learning_rate": 6.119538288624778e-06, | |
| "loss": 0.295, | |
| "step": 699 | |
| }, | |
| { | |
| "epoch": 1.3307984790874525, | |
| "grad_norm": 0.6788119585326947, | |
| "learning_rate": 6.088952795098442e-06, | |
| "loss": 0.3023, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 1.332699619771863, | |
| "grad_norm": 0.6767763469477007, | |
| "learning_rate": 6.058410434618367e-06, | |
| "loss": 0.3141, | |
| "step": 701 | |
| }, | |
| { | |
| "epoch": 1.3346007604562737, | |
| "grad_norm": 0.6587977038584661, | |
| "learning_rate": 6.027911544021465e-06, | |
| "loss": 0.3066, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 1.3365019011406845, | |
| "grad_norm": 0.6657335294839596, | |
| "learning_rate": 5.997456459665237e-06, | |
| "loss": 0.3085, | |
| "step": 703 | |
| }, | |
| { | |
| "epoch": 1.338403041825095, | |
| "grad_norm": 0.6641976710273393, | |
| "learning_rate": 5.967045517424062e-06, | |
| "loss": 0.3169, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 1.3403041825095057, | |
| "grad_norm": 0.7804294419491767, | |
| "learning_rate": 5.936679052685505e-06, | |
| "loss": 0.324, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 1.3422053231939164, | |
| "grad_norm": 0.7110396313542058, | |
| "learning_rate": 5.906357400346596e-06, | |
| "loss": 0.3162, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 1.3441064638783269, | |
| "grad_norm": 0.6955774962264867, | |
| "learning_rate": 5.876080894810167e-06, | |
| "loss": 0.3092, | |
| "step": 707 | |
| }, | |
| { | |
| "epoch": 1.3460076045627376, | |
| "grad_norm": 0.7049142132137731, | |
| "learning_rate": 5.845849869981137e-06, | |
| "loss": 0.3061, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 1.3479087452471483, | |
| "grad_norm": 0.7118716485906978, | |
| "learning_rate": 5.815664659262845e-06, | |
| "loss": 0.3267, | |
| "step": 709 | |
| }, | |
| { | |
| "epoch": 1.3498098859315588, | |
| "grad_norm": 0.6951159538135848, | |
| "learning_rate": 5.78552559555337e-06, | |
| "loss": 0.301, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 1.3517110266159695, | |
| "grad_norm": 0.6691910954733171, | |
| "learning_rate": 5.755433011241851e-06, | |
| "loss": 0.3026, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 1.3536121673003803, | |
| "grad_norm": 0.6732866257480521, | |
| "learning_rate": 5.725387238204831e-06, | |
| "loss": 0.3105, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 1.3555133079847907, | |
| "grad_norm": 0.6711392963738159, | |
| "learning_rate": 5.695388607802603e-06, | |
| "loss": 0.3162, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 1.3574144486692015, | |
| "grad_norm": 0.6601644189905524, | |
| "learning_rate": 5.665437450875534e-06, | |
| "loss": 0.3086, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 1.3593155893536122, | |
| "grad_norm": 0.6943369058367233, | |
| "learning_rate": 5.635534097740435e-06, | |
| "loss": 0.3233, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 1.3612167300380227, | |
| "grad_norm": 0.6691145578219352, | |
| "learning_rate": 5.605678878186911e-06, | |
| "loss": 0.3001, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 1.3631178707224334, | |
| "grad_norm": 0.6797259168746197, | |
| "learning_rate": 5.575872121473722e-06, | |
| "loss": 0.2937, | |
| "step": 717 | |
| }, | |
| { | |
| "epoch": 1.3650190114068441, | |
| "grad_norm": 0.6909581483738534, | |
| "learning_rate": 5.546114156325166e-06, | |
| "loss": 0.3131, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 1.3669201520912546, | |
| "grad_norm": 0.7071585971726193, | |
| "learning_rate": 5.516405310927431e-06, | |
| "loss": 0.3122, | |
| "step": 719 | |
| }, | |
| { | |
| "epoch": 1.3688212927756653, | |
| "grad_norm": 0.6754309768277333, | |
| "learning_rate": 5.4867459129249846e-06, | |
| "loss": 0.3031, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 1.370722433460076, | |
| "grad_norm": 0.6809878902824876, | |
| "learning_rate": 5.4571362894169795e-06, | |
| "loss": 0.319, | |
| "step": 721 | |
| }, | |
| { | |
| "epoch": 1.3726235741444868, | |
| "grad_norm": 0.692552486237672, | |
| "learning_rate": 5.427576766953615e-06, | |
| "loss": 0.3065, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 1.3745247148288973, | |
| "grad_norm": 0.6763669845690329, | |
| "learning_rate": 5.398067671532554e-06, | |
| "loss": 0.3102, | |
| "step": 723 | |
| }, | |
| { | |
| "epoch": 1.376425855513308, | |
| "grad_norm": 0.6855139704003235, | |
| "learning_rate": 5.368609328595323e-06, | |
| "loss": 0.2998, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 1.3783269961977187, | |
| "grad_norm": 0.6396662407803108, | |
| "learning_rate": 5.339202063023727e-06, | |
| "loss": 0.2964, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 1.3802281368821292, | |
| "grad_norm": 0.6657206756011729, | |
| "learning_rate": 5.309846199136258e-06, | |
| "loss": 0.3074, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 1.38212927756654, | |
| "grad_norm": 0.6415665374182151, | |
| "learning_rate": 5.280542060684535e-06, | |
| "loss": 0.3129, | |
| "step": 727 | |
| }, | |
| { | |
| "epoch": 1.3840304182509506, | |
| "grad_norm": 0.6712903867782852, | |
| "learning_rate": 5.2512899708497086e-06, | |
| "loss": 0.2953, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 1.3859315589353614, | |
| "grad_norm": 0.6327218749289589, | |
| "learning_rate": 5.222090252238916e-06, | |
| "loss": 0.3044, | |
| "step": 729 | |
| }, | |
| { | |
| "epoch": 1.3878326996197718, | |
| "grad_norm": 0.6540419946066243, | |
| "learning_rate": 5.192943226881724e-06, | |
| "loss": 0.2823, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 1.3897338403041826, | |
| "grad_norm": 0.7238198585913332, | |
| "learning_rate": 5.163849216226562e-06, | |
| "loss": 0.3089, | |
| "step": 731 | |
| }, | |
| { | |
| "epoch": 1.3916349809885933, | |
| "grad_norm": 0.6734977277400134, | |
| "learning_rate": 5.134808541137183e-06, | |
| "loss": 0.3189, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 1.3935361216730038, | |
| "grad_norm": 0.6671087719041168, | |
| "learning_rate": 5.105821521889147e-06, | |
| "loss": 0.3062, | |
| "step": 733 | |
| }, | |
| { | |
| "epoch": 1.3954372623574145, | |
| "grad_norm": 0.6464892330911215, | |
| "learning_rate": 5.076888478166247e-06, | |
| "loss": 0.3098, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 1.3973384030418252, | |
| "grad_norm": 0.6535815686671727, | |
| "learning_rate": 5.048009729057012e-06, | |
| "loss": 0.312, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 1.3992395437262357, | |
| "grad_norm": 0.6680122953376342, | |
| "learning_rate": 5.0191855930511946e-06, | |
| "loss": 0.3016, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 1.4011406844106464, | |
| "grad_norm": 0.6730541904541806, | |
| "learning_rate": 4.990416388036233e-06, | |
| "loss": 0.3019, | |
| "step": 737 | |
| }, | |
| { | |
| "epoch": 1.4030418250950571, | |
| "grad_norm": 0.6685964253690054, | |
| "learning_rate": 4.961702431293759e-06, | |
| "loss": 0.3138, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 1.4049429657794676, | |
| "grad_norm": 0.7112793728373952, | |
| "learning_rate": 4.933044039496107e-06, | |
| "loss": 0.3146, | |
| "step": 739 | |
| }, | |
| { | |
| "epoch": 1.4068441064638784, | |
| "grad_norm": 0.6842204679166234, | |
| "learning_rate": 4.904441528702806e-06, | |
| "loss": 0.3005, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 1.408745247148289, | |
| "grad_norm": 0.7114788849177559, | |
| "learning_rate": 4.875895214357093e-06, | |
| "loss": 0.2945, | |
| "step": 741 | |
| }, | |
| { | |
| "epoch": 1.4106463878326996, | |
| "grad_norm": 0.6932958766371303, | |
| "learning_rate": 4.847405411282462e-06, | |
| "loss": 0.3192, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 1.4125475285171103, | |
| "grad_norm": 0.6857521413046783, | |
| "learning_rate": 4.818972433679145e-06, | |
| "loss": 0.3109, | |
| "step": 743 | |
| }, | |
| { | |
| "epoch": 1.414448669201521, | |
| "grad_norm": 0.7455761250985122, | |
| "learning_rate": 4.790596595120699e-06, | |
| "loss": 0.3046, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 1.4163498098859315, | |
| "grad_norm": 0.6751850997091181, | |
| "learning_rate": 4.762278208550505e-06, | |
| "loss": 0.2883, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 1.4182509505703422, | |
| "grad_norm": 0.6596919818238839, | |
| "learning_rate": 4.734017586278337e-06, | |
| "loss": 0.2936, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 1.420152091254753, | |
| "grad_norm": 0.6890595515887692, | |
| "learning_rate": 4.7058150399769245e-06, | |
| "loss": 0.313, | |
| "step": 747 | |
| }, | |
| { | |
| "epoch": 1.4220532319391634, | |
| "grad_norm": 0.6988083117077564, | |
| "learning_rate": 4.677670880678493e-06, | |
| "loss": 0.3071, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 1.4239543726235742, | |
| "grad_norm": 0.664898922606292, | |
| "learning_rate": 4.649585418771348e-06, | |
| "loss": 0.3026, | |
| "step": 749 | |
| }, | |
| { | |
| "epoch": 1.4258555133079849, | |
| "grad_norm": 0.6904470633941515, | |
| "learning_rate": 4.621558963996458e-06, | |
| "loss": 0.3203, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 1.4277566539923954, | |
| "grad_norm": 0.6958515487508979, | |
| "learning_rate": 4.593591825444028e-06, | |
| "loss": 0.2987, | |
| "step": 751 | |
| }, | |
| { | |
| "epoch": 1.429657794676806, | |
| "grad_norm": 0.6856176886111498, | |
| "learning_rate": 4.565684311550077e-06, | |
| "loss": 0.319, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 1.4315589353612168, | |
| "grad_norm": 0.7017889511668561, | |
| "learning_rate": 4.537836730093077e-06, | |
| "loss": 0.3127, | |
| "step": 753 | |
| }, | |
| { | |
| "epoch": 1.4334600760456273, | |
| "grad_norm": 0.7011091674122127, | |
| "learning_rate": 4.510049388190518e-06, | |
| "loss": 0.3103, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 1.435361216730038, | |
| "grad_norm": 0.7241449435024591, | |
| "learning_rate": 4.482322592295541e-06, | |
| "loss": 0.3105, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 1.4372623574144487, | |
| "grad_norm": 0.6556599329014728, | |
| "learning_rate": 4.454656648193559e-06, | |
| "loss": 0.2924, | |
| "step": 756 | |
| }, | |
| { | |
| "epoch": 1.4391634980988592, | |
| "grad_norm": 0.6544953547441912, | |
| "learning_rate": 4.427051860998877e-06, | |
| "loss": 0.2925, | |
| "step": 757 | |
| }, | |
| { | |
| "epoch": 1.44106463878327, | |
| "grad_norm": 0.686007515123509, | |
| "learning_rate": 4.399508535151321e-06, | |
| "loss": 0.2891, | |
| "step": 758 | |
| }, | |
| { | |
| "epoch": 1.4429657794676807, | |
| "grad_norm": 0.6475224325223989, | |
| "learning_rate": 4.372026974412907e-06, | |
| "loss": 0.2809, | |
| "step": 759 | |
| }, | |
| { | |
| "epoch": 1.4448669201520912, | |
| "grad_norm": 0.6734382320420421, | |
| "learning_rate": 4.344607481864466e-06, | |
| "loss": 0.2967, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 1.446768060836502, | |
| "grad_norm": 0.6751981990227319, | |
| "learning_rate": 4.317250359902295e-06, | |
| "loss": 0.3014, | |
| "step": 761 | |
| }, | |
| { | |
| "epoch": 1.4486692015209126, | |
| "grad_norm": 0.6770684845599902, | |
| "learning_rate": 4.2899559102348585e-06, | |
| "loss": 0.3123, | |
| "step": 762 | |
| }, | |
| { | |
| "epoch": 1.450570342205323, | |
| "grad_norm": 0.7574805618323069, | |
| "learning_rate": 4.262724433879427e-06, | |
| "loss": 0.3153, | |
| "step": 763 | |
| }, | |
| { | |
| "epoch": 1.4524714828897338, | |
| "grad_norm": 0.7454241178377625, | |
| "learning_rate": 4.235556231158765e-06, | |
| "loss": 0.3045, | |
| "step": 764 | |
| }, | |
| { | |
| "epoch": 1.4543726235741445, | |
| "grad_norm": 0.6486078025900676, | |
| "learning_rate": 4.208451601697836e-06, | |
| "loss": 0.2989, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 1.456273764258555, | |
| "grad_norm": 0.6363946264816792, | |
| "learning_rate": 4.181410844420473e-06, | |
| "loss": 0.3025, | |
| "step": 766 | |
| }, | |
| { | |
| "epoch": 1.4581749049429658, | |
| "grad_norm": 0.6542860455199976, | |
| "learning_rate": 4.154434257546095e-06, | |
| "loss": 0.2755, | |
| "step": 767 | |
| }, | |
| { | |
| "epoch": 1.4600760456273765, | |
| "grad_norm": 0.6326434515736381, | |
| "learning_rate": 4.127522138586424e-06, | |
| "loss": 0.2982, | |
| "step": 768 | |
| }, | |
| { | |
| "epoch": 1.461977186311787, | |
| "grad_norm": 0.670937812039149, | |
| "learning_rate": 4.10067478434219e-06, | |
| "loss": 0.2921, | |
| "step": 769 | |
| }, | |
| { | |
| "epoch": 1.4638783269961977, | |
| "grad_norm": 0.6734977569004734, | |
| "learning_rate": 4.073892490899865e-06, | |
| "loss": 0.3049, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 1.4657794676806084, | |
| "grad_norm": 0.624771468575235, | |
| "learning_rate": 4.047175553628397e-06, | |
| "loss": 0.291, | |
| "step": 771 | |
| }, | |
| { | |
| "epoch": 1.467680608365019, | |
| "grad_norm": 0.6874073660863155, | |
| "learning_rate": 4.020524267175954e-06, | |
| "loss": 0.3031, | |
| "step": 772 | |
| }, | |
| { | |
| "epoch": 1.4695817490494296, | |
| "grad_norm": 0.66534378666526, | |
| "learning_rate": 3.993938925466674e-06, | |
| "loss": 0.3001, | |
| "step": 773 | |
| }, | |
| { | |
| "epoch": 1.4714828897338403, | |
| "grad_norm": 0.6509645668183451, | |
| "learning_rate": 3.96741982169742e-06, | |
| "loss": 0.302, | |
| "step": 774 | |
| }, | |
| { | |
| "epoch": 1.4733840304182508, | |
| "grad_norm": 0.6565111641476009, | |
| "learning_rate": 3.9409672483345465e-06, | |
| "loss": 0.296, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 1.4752851711026616, | |
| "grad_norm": 0.6625075265919935, | |
| "learning_rate": 3.914581497110684e-06, | |
| "loss": 0.2975, | |
| "step": 776 | |
| }, | |
| { | |
| "epoch": 1.4771863117870723, | |
| "grad_norm": 0.6515952077370761, | |
| "learning_rate": 3.888262859021508e-06, | |
| "loss": 0.2868, | |
| "step": 777 | |
| }, | |
| { | |
| "epoch": 1.4790874524714828, | |
| "grad_norm": 0.6581630519633069, | |
| "learning_rate": 3.862011624322534e-06, | |
| "loss": 0.291, | |
| "step": 778 | |
| }, | |
| { | |
| "epoch": 1.4809885931558935, | |
| "grad_norm": 0.7158398925167476, | |
| "learning_rate": 3.835828082525925e-06, | |
| "loss": 0.3141, | |
| "step": 779 | |
| }, | |
| { | |
| "epoch": 1.4828897338403042, | |
| "grad_norm": 0.7030272548472132, | |
| "learning_rate": 3.8097125223972864e-06, | |
| "loss": 0.3064, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 1.4847908745247147, | |
| "grad_norm": 0.6631452328183778, | |
| "learning_rate": 3.7836652319524835e-06, | |
| "loss": 0.2879, | |
| "step": 781 | |
| }, | |
| { | |
| "epoch": 1.4866920152091254, | |
| "grad_norm": 0.6901200410909741, | |
| "learning_rate": 3.7576864984544814e-06, | |
| "loss": 0.3096, | |
| "step": 782 | |
| }, | |
| { | |
| "epoch": 1.4885931558935361, | |
| "grad_norm": 0.6555400734606364, | |
| "learning_rate": 3.73177660841015e-06, | |
| "loss": 0.3173, | |
| "step": 783 | |
| }, | |
| { | |
| "epoch": 1.4904942965779466, | |
| "grad_norm": 0.6626196708278583, | |
| "learning_rate": 3.7059358475671225e-06, | |
| "loss": 0.3142, | |
| "step": 784 | |
| }, | |
| { | |
| "epoch": 1.4923954372623573, | |
| "grad_norm": 0.635282459929513, | |
| "learning_rate": 3.680164500910646e-06, | |
| "loss": 0.2949, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 1.494296577946768, | |
| "grad_norm": 0.644732909457007, | |
| "learning_rate": 3.654462852660423e-06, | |
| "loss": 0.3045, | |
| "step": 786 | |
| }, | |
| { | |
| "epoch": 1.4961977186311788, | |
| "grad_norm": 0.6812571652084998, | |
| "learning_rate": 3.6288311862674885e-06, | |
| "loss": 0.3234, | |
| "step": 787 | |
| }, | |
| { | |
| "epoch": 1.4980988593155893, | |
| "grad_norm": 0.654814950500561, | |
| "learning_rate": 3.6032697844110896e-06, | |
| "loss": 0.3019, | |
| "step": 788 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "grad_norm": 0.6712107027401386, | |
| "learning_rate": 3.5777789289955454e-06, | |
| "loss": 0.3097, | |
| "step": 789 | |
| }, | |
| { | |
| "epoch": 1.5019011406844105, | |
| "grad_norm": 0.623819495019153, | |
| "learning_rate": 3.5523589011471592e-06, | |
| "loss": 0.2844, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 1.5038022813688214, | |
| "grad_norm": 0.7002158885169754, | |
| "learning_rate": 3.527009981211119e-06, | |
| "loss": 0.3103, | |
| "step": 791 | |
| }, | |
| { | |
| "epoch": 1.505703422053232, | |
| "grad_norm": 0.5815705094174439, | |
| "learning_rate": 3.5017324487483873e-06, | |
| "loss": 0.2876, | |
| "step": 792 | |
| }, | |
| { | |
| "epoch": 1.5076045627376424, | |
| "grad_norm": 0.6947919861245513, | |
| "learning_rate": 3.47652658253263e-06, | |
| "loss": 0.2991, | |
| "step": 793 | |
| }, | |
| { | |
| "epoch": 1.5095057034220534, | |
| "grad_norm": 0.6719058063673552, | |
| "learning_rate": 3.4513926605471504e-06, | |
| "loss": 0.283, | |
| "step": 794 | |
| }, | |
| { | |
| "epoch": 1.5114068441064639, | |
| "grad_norm": 0.6280729348443554, | |
| "learning_rate": 3.4263309599818017e-06, | |
| "loss": 0.2972, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 1.5133079847908744, | |
| "grad_norm": 0.6527013372496685, | |
| "learning_rate": 3.4013417572299446e-06, | |
| "loss": 0.3158, | |
| "step": 796 | |
| }, | |
| { | |
| "epoch": 1.5152091254752853, | |
| "grad_norm": 0.6492582620327985, | |
| "learning_rate": 3.37642532788541e-06, | |
| "loss": 0.2954, | |
| "step": 797 | |
| }, | |
| { | |
| "epoch": 1.5171102661596958, | |
| "grad_norm": 0.6600811901345778, | |
| "learning_rate": 3.3515819467394184e-06, | |
| "loss": 0.3075, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 1.5190114068441065, | |
| "grad_norm": 0.650843109106421, | |
| "learning_rate": 3.326811887777607e-06, | |
| "loss": 0.3073, | |
| "step": 799 | |
| }, | |
| { | |
| "epoch": 1.5209125475285172, | |
| "grad_norm": 0.662569518593219, | |
| "learning_rate": 3.3021154241769606e-06, | |
| "loss": 0.2977, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 1.5228136882129277, | |
| "grad_norm": 0.6629225556528807, | |
| "learning_rate": 3.2774928283028153e-06, | |
| "loss": 0.3123, | |
| "step": 801 | |
| }, | |
| { | |
| "epoch": 1.5247148288973384, | |
| "grad_norm": 0.7156532708420646, | |
| "learning_rate": 3.2529443717058693e-06, | |
| "loss": 0.31, | |
| "step": 802 | |
| }, | |
| { | |
| "epoch": 1.5266159695817492, | |
| "grad_norm": 0.6589275384939368, | |
| "learning_rate": 3.228470325119164e-06, | |
| "loss": 0.3028, | |
| "step": 803 | |
| }, | |
| { | |
| "epoch": 1.5285171102661597, | |
| "grad_norm": 0.661104035867554, | |
| "learning_rate": 3.20407095845511e-06, | |
| "loss": 0.2935, | |
| "step": 804 | |
| }, | |
| { | |
| "epoch": 1.5304182509505704, | |
| "grad_norm": 0.6722621828203308, | |
| "learning_rate": 3.179746540802506e-06, | |
| "loss": 0.3147, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 1.532319391634981, | |
| "grad_norm": 0.6229057909298685, | |
| "learning_rate": 3.155497340423588e-06, | |
| "loss": 0.2909, | |
| "step": 806 | |
| }, | |
| { | |
| "epoch": 1.5342205323193916, | |
| "grad_norm": 0.6416369260057041, | |
| "learning_rate": 3.1313236247510414e-06, | |
| "loss": 0.2951, | |
| "step": 807 | |
| }, | |
| { | |
| "epoch": 1.5361216730038023, | |
| "grad_norm": 0.6969803637712781, | |
| "learning_rate": 3.107225660385077e-06, | |
| "loss": 0.3178, | |
| "step": 808 | |
| }, | |
| { | |
| "epoch": 1.538022813688213, | |
| "grad_norm": 0.6567560318788205, | |
| "learning_rate": 3.0832037130904748e-06, | |
| "loss": 0.3028, | |
| "step": 809 | |
| }, | |
| { | |
| "epoch": 1.5399239543726235, | |
| "grad_norm": 0.6668044319752654, | |
| "learning_rate": 3.0592580477936606e-06, | |
| "loss": 0.2973, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 1.5418250950570342, | |
| "grad_norm": 0.6928028816575115, | |
| "learning_rate": 3.035388928579792e-06, | |
| "loss": 0.3113, | |
| "step": 811 | |
| }, | |
| { | |
| "epoch": 1.543726235741445, | |
| "grad_norm": 0.6575707870893531, | |
| "learning_rate": 3.011596618689825e-06, | |
| "loss": 0.3023, | |
| "step": 812 | |
| }, | |
| { | |
| "epoch": 1.5456273764258555, | |
| "grad_norm": 0.6076672383662967, | |
| "learning_rate": 2.9878813805176252e-06, | |
| "loss": 0.293, | |
| "step": 813 | |
| }, | |
| { | |
| "epoch": 1.5475285171102662, | |
| "grad_norm": 0.6735163011272223, | |
| "learning_rate": 2.9642434756070793e-06, | |
| "loss": 0.2963, | |
| "step": 814 | |
| }, | |
| { | |
| "epoch": 1.549429657794677, | |
| "grad_norm": 0.6372809827293456, | |
| "learning_rate": 2.940683164649194e-06, | |
| "loss": 0.2893, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 1.5513307984790874, | |
| "grad_norm": 0.6480494798119758, | |
| "learning_rate": 2.9172007074792342e-06, | |
| "loss": 0.2895, | |
| "step": 816 | |
| }, | |
| { | |
| "epoch": 1.553231939163498, | |
| "grad_norm": 0.6336859246685975, | |
| "learning_rate": 2.8937963630738517e-06, | |
| "loss": 0.2974, | |
| "step": 817 | |
| }, | |
| { | |
| "epoch": 1.5551330798479088, | |
| "grad_norm": 0.646376560598661, | |
| "learning_rate": 2.87047038954823e-06, | |
| "loss": 0.3124, | |
| "step": 818 | |
| }, | |
| { | |
| "epoch": 1.5570342205323193, | |
| "grad_norm": 0.6441926612495962, | |
| "learning_rate": 2.8472230441532365e-06, | |
| "loss": 0.3072, | |
| "step": 819 | |
| }, | |
| { | |
| "epoch": 1.55893536121673, | |
| "grad_norm": 0.6182777445924572, | |
| "learning_rate": 2.8240545832725963e-06, | |
| "loss": 0.2951, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 1.5608365019011408, | |
| "grad_norm": 0.6588196679377797, | |
| "learning_rate": 2.8009652624200436e-06, | |
| "loss": 0.3106, | |
| "step": 821 | |
| }, | |
| { | |
| "epoch": 1.5627376425855513, | |
| "grad_norm": 0.6420349668469926, | |
| "learning_rate": 2.7779553362365184e-06, | |
| "loss": 0.2902, | |
| "step": 822 | |
| }, | |
| { | |
| "epoch": 1.564638783269962, | |
| "grad_norm": 0.7050276140268589, | |
| "learning_rate": 2.755025058487364e-06, | |
| "loss": 0.3078, | |
| "step": 823 | |
| }, | |
| { | |
| "epoch": 1.5665399239543727, | |
| "grad_norm": 0.6782848492605729, | |
| "learning_rate": 2.7321746820595084e-06, | |
| "loss": 0.3015, | |
| "step": 824 | |
| }, | |
| { | |
| "epoch": 1.5684410646387832, | |
| "grad_norm": 0.6537451645197219, | |
| "learning_rate": 2.709404458958693e-06, | |
| "loss": 0.2967, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 1.570342205323194, | |
| "grad_norm": 0.6385190690216156, | |
| "learning_rate": 2.6867146403066833e-06, | |
| "loss": 0.2773, | |
| "step": 826 | |
| }, | |
| { | |
| "epoch": 1.5722433460076046, | |
| "grad_norm": 0.6267071551789702, | |
| "learning_rate": 2.6641054763385044e-06, | |
| "loss": 0.2945, | |
| "step": 827 | |
| }, | |
| { | |
| "epoch": 1.5741444866920151, | |
| "grad_norm": 0.6915097210035877, | |
| "learning_rate": 2.6415772163996845e-06, | |
| "loss": 0.2992, | |
| "step": 828 | |
| }, | |
| { | |
| "epoch": 1.5760456273764258, | |
| "grad_norm": 0.6924370009490849, | |
| "learning_rate": 2.619130108943494e-06, | |
| "loss": 0.3053, | |
| "step": 829 | |
| }, | |
| { | |
| "epoch": 1.5779467680608366, | |
| "grad_norm": 0.6638410265793022, | |
| "learning_rate": 2.5967644015282146e-06, | |
| "loss": 0.287, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 1.579847908745247, | |
| "grad_norm": 0.667384809637936, | |
| "learning_rate": 2.5744803408144026e-06, | |
| "loss": 0.2899, | |
| "step": 831 | |
| }, | |
| { | |
| "epoch": 1.5817490494296578, | |
| "grad_norm": 0.6955198260393315, | |
| "learning_rate": 2.5522781725621814e-06, | |
| "loss": 0.3074, | |
| "step": 832 | |
| }, | |
| { | |
| "epoch": 1.5836501901140685, | |
| "grad_norm": 0.6665701889068537, | |
| "learning_rate": 2.530158141628515e-06, | |
| "loss": 0.3035, | |
| "step": 833 | |
| }, | |
| { | |
| "epoch": 1.585551330798479, | |
| "grad_norm": 0.6345048597672414, | |
| "learning_rate": 2.508120491964512e-06, | |
| "loss": 0.2967, | |
| "step": 834 | |
| }, | |
| { | |
| "epoch": 1.5874524714828897, | |
| "grad_norm": 0.644554287454183, | |
| "learning_rate": 2.486165466612751e-06, | |
| "loss": 0.292, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 1.5893536121673004, | |
| "grad_norm": 0.6748126491410086, | |
| "learning_rate": 2.464293307704566e-06, | |
| "loss": 0.2982, | |
| "step": 836 | |
| }, | |
| { | |
| "epoch": 1.591254752851711, | |
| "grad_norm": 0.7206473644142021, | |
| "learning_rate": 2.4425042564574186e-06, | |
| "loss": 0.2942, | |
| "step": 837 | |
| }, | |
| { | |
| "epoch": 1.5931558935361216, | |
| "grad_norm": 0.6333835203266194, | |
| "learning_rate": 2.4207985531722034e-06, | |
| "loss": 0.291, | |
| "step": 838 | |
| }, | |
| { | |
| "epoch": 1.5950570342205324, | |
| "grad_norm": 0.6586139035136321, | |
| "learning_rate": 2.3991764372306113e-06, | |
| "loss": 0.2926, | |
| "step": 839 | |
| }, | |
| { | |
| "epoch": 1.5969581749049429, | |
| "grad_norm": 0.6198422953951707, | |
| "learning_rate": 2.377638147092497e-06, | |
| "loss": 0.2733, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 1.5988593155893536, | |
| "grad_norm": 0.6465643678647274, | |
| "learning_rate": 2.3561839202932344e-06, | |
| "loss": 0.3002, | |
| "step": 841 | |
| }, | |
| { | |
| "epoch": 1.6007604562737643, | |
| "grad_norm": 0.6487573439149329, | |
| "learning_rate": 2.3348139934411008e-06, | |
| "loss": 0.2953, | |
| "step": 842 | |
| }, | |
| { | |
| "epoch": 1.6026615969581748, | |
| "grad_norm": 0.634571908953738, | |
| "learning_rate": 2.3135286022146785e-06, | |
| "loss": 0.2933, | |
| "step": 843 | |
| }, | |
| { | |
| "epoch": 1.6045627376425855, | |
| "grad_norm": 0.6186739519432609, | |
| "learning_rate": 2.292327981360245e-06, | |
| "loss": 0.2959, | |
| "step": 844 | |
| }, | |
| { | |
| "epoch": 1.6064638783269962, | |
| "grad_norm": 0.6559436146379715, | |
| "learning_rate": 2.271212364689176e-06, | |
| "loss": 0.2847, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 1.6083650190114067, | |
| "grad_norm": 0.680603578954794, | |
| "learning_rate": 2.2501819850753925e-06, | |
| "loss": 0.3056, | |
| "step": 846 | |
| }, | |
| { | |
| "epoch": 1.6102661596958177, | |
| "grad_norm": 0.7078108527217358, | |
| "learning_rate": 2.229237074452768e-06, | |
| "loss": 0.3064, | |
| "step": 847 | |
| }, | |
| { | |
| "epoch": 1.6121673003802282, | |
| "grad_norm": 0.6416139090924158, | |
| "learning_rate": 2.2083778638125796e-06, | |
| "loss": 0.2993, | |
| "step": 848 | |
| }, | |
| { | |
| "epoch": 1.6140684410646386, | |
| "grad_norm": 0.6882556013614182, | |
| "learning_rate": 2.1876045832009694e-06, | |
| "loss": 0.297, | |
| "step": 849 | |
| }, | |
| { | |
| "epoch": 1.6159695817490496, | |
| "grad_norm": 0.6788523471948567, | |
| "learning_rate": 2.16691746171639e-06, | |
| "loss": 0.3019, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 1.61787072243346, | |
| "grad_norm": 0.648498967010636, | |
| "learning_rate": 2.1463167275070863e-06, | |
| "loss": 0.3036, | |
| "step": 851 | |
| }, | |
| { | |
| "epoch": 1.6197718631178706, | |
| "grad_norm": 0.6402044905245188, | |
| "learning_rate": 2.125802607768588e-06, | |
| "loss": 0.2988, | |
| "step": 852 | |
| }, | |
| { | |
| "epoch": 1.6216730038022815, | |
| "grad_norm": 0.6523123304883696, | |
| "learning_rate": 2.1053753287411895e-06, | |
| "loss": 0.2836, | |
| "step": 853 | |
| }, | |
| { | |
| "epoch": 1.623574144486692, | |
| "grad_norm": 0.6436793000810468, | |
| "learning_rate": 2.08503511570746e-06, | |
| "loss": 0.2878, | |
| "step": 854 | |
| }, | |
| { | |
| "epoch": 1.6254752851711025, | |
| "grad_norm": 0.612088809565017, | |
| "learning_rate": 2.064782192989765e-06, | |
| "loss": 0.2801, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 1.6273764258555135, | |
| "grad_norm": 0.6755744491564173, | |
| "learning_rate": 2.0446167839477815e-06, | |
| "loss": 0.2938, | |
| "step": 856 | |
| }, | |
| { | |
| "epoch": 1.629277566539924, | |
| "grad_norm": 0.6693714636704428, | |
| "learning_rate": 2.0245391109760437e-06, | |
| "loss": 0.3014, | |
| "step": 857 | |
| }, | |
| { | |
| "epoch": 1.6311787072243344, | |
| "grad_norm": 0.6762094541452331, | |
| "learning_rate": 2.0045493955014915e-06, | |
| "loss": 0.2976, | |
| "step": 858 | |
| }, | |
| { | |
| "epoch": 1.6330798479087454, | |
| "grad_norm": 0.6420570529962033, | |
| "learning_rate": 1.984647857981017e-06, | |
| "loss": 0.3105, | |
| "step": 859 | |
| }, | |
| { | |
| "epoch": 1.6349809885931559, | |
| "grad_norm": 0.6414184173615649, | |
| "learning_rate": 1.96483471789904e-06, | |
| "loss": 0.3165, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 1.6368821292775664, | |
| "grad_norm": 0.661308224075709, | |
| "learning_rate": 1.9451101937650963e-06, | |
| "loss": 0.2971, | |
| "step": 861 | |
| }, | |
| { | |
| "epoch": 1.6387832699619773, | |
| "grad_norm": 0.6385561135024754, | |
| "learning_rate": 1.925474503111412e-06, | |
| "loss": 0.2896, | |
| "step": 862 | |
| }, | |
| { | |
| "epoch": 1.6406844106463878, | |
| "grad_norm": 0.6623238582184514, | |
| "learning_rate": 1.905927862490512e-06, | |
| "loss": 0.2959, | |
| "step": 863 | |
| }, | |
| { | |
| "epoch": 1.6425855513307985, | |
| "grad_norm": 0.647755847343158, | |
| "learning_rate": 1.8864704874728346e-06, | |
| "loss": 0.2861, | |
| "step": 864 | |
| }, | |
| { | |
| "epoch": 1.6444866920152093, | |
| "grad_norm": 0.7029499628462588, | |
| "learning_rate": 1.8671025926443464e-06, | |
| "loss": 0.3207, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 1.6463878326996197, | |
| "grad_norm": 0.629971275407021, | |
| "learning_rate": 1.8478243916041882e-06, | |
| "loss": 0.2835, | |
| "step": 866 | |
| }, | |
| { | |
| "epoch": 1.6482889733840305, | |
| "grad_norm": 0.621473518544072, | |
| "learning_rate": 1.828636096962304e-06, | |
| "loss": 0.2887, | |
| "step": 867 | |
| }, | |
| { | |
| "epoch": 1.6501901140684412, | |
| "grad_norm": 0.6579997424197924, | |
| "learning_rate": 1.8095379203371044e-06, | |
| "loss": 0.2965, | |
| "step": 868 | |
| }, | |
| { | |
| "epoch": 1.6520912547528517, | |
| "grad_norm": 0.6423184913170983, | |
| "learning_rate": 1.7905300723531393e-06, | |
| "loss": 0.2953, | |
| "step": 869 | |
| }, | |
| { | |
| "epoch": 1.6539923954372624, | |
| "grad_norm": 0.6779690920582607, | |
| "learning_rate": 1.771612762638758e-06, | |
| "loss": 0.3019, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 1.6558935361216731, | |
| "grad_norm": 0.6317715325420993, | |
| "learning_rate": 1.7527861998238094e-06, | |
| "loss": 0.2882, | |
| "step": 871 | |
| }, | |
| { | |
| "epoch": 1.6577946768060836, | |
| "grad_norm": 0.6635896514167742, | |
| "learning_rate": 1.7340505915373495e-06, | |
| "loss": 0.2915, | |
| "step": 872 | |
| }, | |
| { | |
| "epoch": 1.6596958174904943, | |
| "grad_norm": 0.6389155015173053, | |
| "learning_rate": 1.7154061444053239e-06, | |
| "loss": 0.304, | |
| "step": 873 | |
| }, | |
| { | |
| "epoch": 1.661596958174905, | |
| "grad_norm": 0.6355748131339661, | |
| "learning_rate": 1.6968530640483126e-06, | |
| "loss": 0.2876, | |
| "step": 874 | |
| }, | |
| { | |
| "epoch": 1.6634980988593155, | |
| "grad_norm": 0.626569334990134, | |
| "learning_rate": 1.6783915550792652e-06, | |
| "loss": 0.2806, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 1.6653992395437263, | |
| "grad_norm": 0.6378035534667061, | |
| "learning_rate": 1.660021821101222e-06, | |
| "loss": 0.2738, | |
| "step": 876 | |
| }, | |
| { | |
| "epoch": 1.667300380228137, | |
| "grad_norm": 0.6103702721036466, | |
| "learning_rate": 1.6417440647050853e-06, | |
| "loss": 0.2835, | |
| "step": 877 | |
| }, | |
| { | |
| "epoch": 1.6692015209125475, | |
| "grad_norm": 0.6454622032705831, | |
| "learning_rate": 1.6235584874673848e-06, | |
| "loss": 0.3032, | |
| "step": 878 | |
| }, | |
| { | |
| "epoch": 1.6711026615969582, | |
| "grad_norm": 0.6279775693339934, | |
| "learning_rate": 1.6054652899480472e-06, | |
| "loss": 0.2787, | |
| "step": 879 | |
| }, | |
| { | |
| "epoch": 1.673003802281369, | |
| "grad_norm": 0.6146594568868697, | |
| "learning_rate": 1.587464671688187e-06, | |
| "loss": 0.2966, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 1.6749049429657794, | |
| "grad_norm": 0.6583748227243941, | |
| "learning_rate": 1.5695568312079156e-06, | |
| "loss": 0.3, | |
| "step": 881 | |
| }, | |
| { | |
| "epoch": 1.6768060836501901, | |
| "grad_norm": 0.6838830989525052, | |
| "learning_rate": 1.5517419660041277e-06, | |
| "loss": 0.3021, | |
| "step": 882 | |
| }, | |
| { | |
| "epoch": 1.6787072243346008, | |
| "grad_norm": 0.648011554601945, | |
| "learning_rate": 1.534020272548349e-06, | |
| "loss": 0.301, | |
| "step": 883 | |
| }, | |
| { | |
| "epoch": 1.6806083650190113, | |
| "grad_norm": 0.6932730036739784, | |
| "learning_rate": 1.5163919462845622e-06, | |
| "loss": 0.2962, | |
| "step": 884 | |
| }, | |
| { | |
| "epoch": 1.682509505703422, | |
| "grad_norm": 0.6910740995373551, | |
| "learning_rate": 1.4988571816270402e-06, | |
| "loss": 0.3129, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 1.6844106463878328, | |
| "grad_norm": 0.6498628982843386, | |
| "learning_rate": 1.4814161719582132e-06, | |
| "loss": 0.279, | |
| "step": 886 | |
| }, | |
| { | |
| "epoch": 1.6863117870722433, | |
| "grad_norm": 0.6787610014412867, | |
| "learning_rate": 1.4640691096265358e-06, | |
| "loss": 0.3031, | |
| "step": 887 | |
| }, | |
| { | |
| "epoch": 1.688212927756654, | |
| "grad_norm": 0.6607604852082093, | |
| "learning_rate": 1.4468161859443609e-06, | |
| "loss": 0.3005, | |
| "step": 888 | |
| }, | |
| { | |
| "epoch": 1.6901140684410647, | |
| "grad_norm": 0.646640747876956, | |
| "learning_rate": 1.4296575911858268e-06, | |
| "loss": 0.3025, | |
| "step": 889 | |
| }, | |
| { | |
| "epoch": 1.6920152091254752, | |
| "grad_norm": 0.6709325927148602, | |
| "learning_rate": 1.412593514584777e-06, | |
| "loss": 0.2952, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 1.693916349809886, | |
| "grad_norm": 0.6751700217162946, | |
| "learning_rate": 1.3956241443326423e-06, | |
| "loss": 0.3063, | |
| "step": 891 | |
| }, | |
| { | |
| "epoch": 1.6958174904942966, | |
| "grad_norm": 0.6363991293695894, | |
| "learning_rate": 1.378749667576399e-06, | |
| "loss": 0.3121, | |
| "step": 892 | |
| }, | |
| { | |
| "epoch": 1.6977186311787071, | |
| "grad_norm": 0.6657969702233526, | |
| "learning_rate": 1.3619702704164783e-06, | |
| "loss": 0.3018, | |
| "step": 893 | |
| }, | |
| { | |
| "epoch": 1.6996197718631179, | |
| "grad_norm": 0.7025441059677974, | |
| "learning_rate": 1.3452861379047289e-06, | |
| "loss": 0.3082, | |
| "step": 894 | |
| }, | |
| { | |
| "epoch": 1.7015209125475286, | |
| "grad_norm": 0.640597351135649, | |
| "learning_rate": 1.3286974540423747e-06, | |
| "loss": 0.2914, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 1.703422053231939, | |
| "grad_norm": 0.6825617327489302, | |
| "learning_rate": 1.3122044017779768e-06, | |
| "loss": 0.3006, | |
| "step": 896 | |
| }, | |
| { | |
| "epoch": 1.7053231939163498, | |
| "grad_norm": 0.6475020752585855, | |
| "learning_rate": 1.2958071630054214e-06, | |
| "loss": 0.3055, | |
| "step": 897 | |
| }, | |
| { | |
| "epoch": 1.7072243346007605, | |
| "grad_norm": 0.633383544424715, | |
| "learning_rate": 1.279505918561923e-06, | |
| "loss": 0.3002, | |
| "step": 898 | |
| }, | |
| { | |
| "epoch": 1.709125475285171, | |
| "grad_norm": 0.6699285348704596, | |
| "learning_rate": 1.2633008482260146e-06, | |
| "loss": 0.3084, | |
| "step": 899 | |
| }, | |
| { | |
| "epoch": 1.7110266159695817, | |
| "grad_norm": 0.6542176575384464, | |
| "learning_rate": 1.2471921307155655e-06, | |
| "loss": 0.2868, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 1.7129277566539924, | |
| "grad_norm": 0.6389431114067012, | |
| "learning_rate": 1.2311799436858275e-06, | |
| "loss": 0.3013, | |
| "step": 901 | |
| }, | |
| { | |
| "epoch": 1.714828897338403, | |
| "grad_norm": 0.6358707984245675, | |
| "learning_rate": 1.2152644637274603e-06, | |
| "loss": 0.2935, | |
| "step": 902 | |
| }, | |
| { | |
| "epoch": 1.7167300380228137, | |
| "grad_norm": 0.6685040075734522, | |
| "learning_rate": 1.1994458663645836e-06, | |
| "loss": 0.3185, | |
| "step": 903 | |
| }, | |
| { | |
| "epoch": 1.7186311787072244, | |
| "grad_norm": 0.6512819358455129, | |
| "learning_rate": 1.1837243260528542e-06, | |
| "loss": 0.3018, | |
| "step": 904 | |
| }, | |
| { | |
| "epoch": 1.7205323193916349, | |
| "grad_norm": 0.6562207695245915, | |
| "learning_rate": 1.168100016177528e-06, | |
| "loss": 0.2994, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 1.7224334600760456, | |
| "grad_norm": 0.6653912877064982, | |
| "learning_rate": 1.1525731090515536e-06, | |
| "loss": 0.295, | |
| "step": 906 | |
| }, | |
| { | |
| "epoch": 1.7243346007604563, | |
| "grad_norm": 0.6743835312599717, | |
| "learning_rate": 1.137143775913675e-06, | |
| "loss": 0.2998, | |
| "step": 907 | |
| }, | |
| { | |
| "epoch": 1.7262357414448668, | |
| "grad_norm": 0.6241478596632326, | |
| "learning_rate": 1.1218121869265365e-06, | |
| "loss": 0.3001, | |
| "step": 908 | |
| }, | |
| { | |
| "epoch": 1.7281368821292775, | |
| "grad_norm": 0.596358673278889, | |
| "learning_rate": 1.1065785111748117e-06, | |
| "loss": 0.274, | |
| "step": 909 | |
| }, | |
| { | |
| "epoch": 1.7300380228136882, | |
| "grad_norm": 0.6449168594422872, | |
| "learning_rate": 1.0914429166633355e-06, | |
| "loss": 0.288, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 1.7319391634980987, | |
| "grad_norm": 0.6588828562278465, | |
| "learning_rate": 1.076405570315252e-06, | |
| "loss": 0.2998, | |
| "step": 911 | |
| }, | |
| { | |
| "epoch": 1.7338403041825095, | |
| "grad_norm": 0.6995753509739197, | |
| "learning_rate": 1.0614666379701732e-06, | |
| "loss": 0.292, | |
| "step": 912 | |
| }, | |
| { | |
| "epoch": 1.7357414448669202, | |
| "grad_norm": 0.6733881826858943, | |
| "learning_rate": 1.046626284382356e-06, | |
| "loss": 0.2958, | |
| "step": 913 | |
| }, | |
| { | |
| "epoch": 1.7376425855513307, | |
| "grad_norm": 0.6842998003017107, | |
| "learning_rate": 1.0318846732188737e-06, | |
| "loss": 0.3017, | |
| "step": 914 | |
| }, | |
| { | |
| "epoch": 1.7395437262357416, | |
| "grad_norm": 0.6232987641931355, | |
| "learning_rate": 1.017241967057816e-06, | |
| "loss": 0.2971, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 1.741444866920152, | |
| "grad_norm": 0.6460854475268452, | |
| "learning_rate": 1.0026983273865055e-06, | |
| "loss": 0.294, | |
| "step": 916 | |
| }, | |
| { | |
| "epoch": 1.7433460076045626, | |
| "grad_norm": 0.630572397486309, | |
| "learning_rate": 9.882539145997027e-07, | |
| "loss": 0.2863, | |
| "step": 917 | |
| }, | |
| { | |
| "epoch": 1.7452471482889735, | |
| "grad_norm": 0.6824059027767321, | |
| "learning_rate": 9.739088879978409e-07, | |
| "loss": 0.3025, | |
| "step": 918 | |
| }, | |
| { | |
| "epoch": 1.747148288973384, | |
| "grad_norm": 0.627963754447401, | |
| "learning_rate": 9.59663405785277e-07, | |
| "loss": 0.2832, | |
| "step": 919 | |
| }, | |
| { | |
| "epoch": 1.7490494296577945, | |
| "grad_norm": 0.68493034609757, | |
| "learning_rate": 9.455176250685338e-07, | |
| "loss": 0.3093, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 1.7509505703422055, | |
| "grad_norm": 0.6501488925521651, | |
| "learning_rate": 9.314717018545838e-07, | |
| "loss": 0.2934, | |
| "step": 921 | |
| }, | |
| { | |
| "epoch": 1.752851711026616, | |
| "grad_norm": 0.6841996769420994, | |
| "learning_rate": 9.17525791049112e-07, | |
| "loss": 0.2933, | |
| "step": 922 | |
| }, | |
| { | |
| "epoch": 1.7547528517110265, | |
| "grad_norm": 0.6678765571208509, | |
| "learning_rate": 9.036800464548157e-07, | |
| "loss": 0.2907, | |
| "step": 923 | |
| }, | |
| { | |
| "epoch": 1.7566539923954374, | |
| "grad_norm": 0.7108037605427074, | |
| "learning_rate": 8.899346207697135e-07, | |
| "loss": 0.2878, | |
| "step": 924 | |
| }, | |
| { | |
| "epoch": 1.758555133079848, | |
| "grad_norm": 0.6677849556128975, | |
| "learning_rate": 8.762896655854481e-07, | |
| "loss": 0.3198, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 1.7604562737642584, | |
| "grad_norm": 0.6639677841117704, | |
| "learning_rate": 8.627453313856249e-07, | |
| "loss": 0.2989, | |
| "step": 926 | |
| }, | |
| { | |
| "epoch": 1.7623574144486693, | |
| "grad_norm": 0.6490695273031204, | |
| "learning_rate": 8.493017675441495e-07, | |
| "loss": 0.2981, | |
| "step": 927 | |
| }, | |
| { | |
| "epoch": 1.7642585551330798, | |
| "grad_norm": 0.7008658855496248, | |
| "learning_rate": 8.359591223235785e-07, | |
| "loss": 0.3111, | |
| "step": 928 | |
| }, | |
| { | |
| "epoch": 1.7661596958174905, | |
| "grad_norm": 0.6866567156881749, | |
| "learning_rate": 8.227175428734868e-07, | |
| "loss": 0.3106, | |
| "step": 929 | |
| }, | |
| { | |
| "epoch": 1.7680608365019013, | |
| "grad_norm": 0.6886379190760398, | |
| "learning_rate": 8.095771752288451e-07, | |
| "loss": 0.2966, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 1.7699619771863118, | |
| "grad_norm": 0.6498944030044856, | |
| "learning_rate": 7.965381643084069e-07, | |
| "loss": 0.2884, | |
| "step": 931 | |
| }, | |
| { | |
| "epoch": 1.7718631178707225, | |
| "grad_norm": 0.6427191709264957, | |
| "learning_rate": 7.83600653913108e-07, | |
| "loss": 0.2952, | |
| "step": 932 | |
| }, | |
| { | |
| "epoch": 1.7737642585551332, | |
| "grad_norm": 0.6459065397761733, | |
| "learning_rate": 7.707647867244927e-07, | |
| "loss": 0.2841, | |
| "step": 933 | |
| }, | |
| { | |
| "epoch": 1.7756653992395437, | |
| "grad_norm": 0.6606508930704461, | |
| "learning_rate": 7.580307043031232e-07, | |
| "loss": 0.3016, | |
| "step": 934 | |
| }, | |
| { | |
| "epoch": 1.7775665399239544, | |
| "grad_norm": 0.6824845019659308, | |
| "learning_rate": 7.453985470870284e-07, | |
| "loss": 0.2952, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 1.7794676806083651, | |
| "grad_norm": 0.6620033004208168, | |
| "learning_rate": 7.328684543901598e-07, | |
| "loss": 0.3031, | |
| "step": 936 | |
| }, | |
| { | |
| "epoch": 1.7813688212927756, | |
| "grad_norm": 0.6477002490125038, | |
| "learning_rate": 7.204405644008416e-07, | |
| "loss": 0.2867, | |
| "step": 937 | |
| }, | |
| { | |
| "epoch": 1.7832699619771863, | |
| "grad_norm": 0.6483735735140436, | |
| "learning_rate": 7.081150141802518e-07, | |
| "loss": 0.2879, | |
| "step": 938 | |
| }, | |
| { | |
| "epoch": 1.785171102661597, | |
| "grad_norm": 0.6272847209383634, | |
| "learning_rate": 6.958919396609231e-07, | |
| "loss": 0.2963, | |
| "step": 939 | |
| }, | |
| { | |
| "epoch": 1.7870722433460076, | |
| "grad_norm": 0.6420742935773867, | |
| "learning_rate": 6.837714756452241e-07, | |
| "loss": 0.2916, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 1.7889733840304183, | |
| "grad_norm": 0.6395398251864199, | |
| "learning_rate": 6.717537558038845e-07, | |
| "loss": 0.3008, | |
| "step": 941 | |
| }, | |
| { | |
| "epoch": 1.790874524714829, | |
| "grad_norm": 0.6631351455513448, | |
| "learning_rate": 6.598389126745209e-07, | |
| "loss": 0.3055, | |
| "step": 942 | |
| }, | |
| { | |
| "epoch": 1.7927756653992395, | |
| "grad_norm": 0.6174789070839137, | |
| "learning_rate": 6.480270776601682e-07, | |
| "loss": 0.2844, | |
| "step": 943 | |
| }, | |
| { | |
| "epoch": 1.7946768060836502, | |
| "grad_norm": 0.6737785440199602, | |
| "learning_rate": 6.36318381027835e-07, | |
| "loss": 0.2972, | |
| "step": 944 | |
| }, | |
| { | |
| "epoch": 1.796577946768061, | |
| "grad_norm": 0.6503599690690047, | |
| "learning_rate": 6.247129519070728e-07, | |
| "loss": 0.2855, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 1.7984790874524714, | |
| "grad_norm": 0.6621637148105989, | |
| "learning_rate": 6.132109182885382e-07, | |
| "loss": 0.2951, | |
| "step": 946 | |
| }, | |
| { | |
| "epoch": 1.8003802281368821, | |
| "grad_norm": 0.656301130268409, | |
| "learning_rate": 6.018124070225928e-07, | |
| "loss": 0.2857, | |
| "step": 947 | |
| }, | |
| { | |
| "epoch": 1.8022813688212929, | |
| "grad_norm": 0.616497778574551, | |
| "learning_rate": 5.905175438178979e-07, | |
| "loss": 0.297, | |
| "step": 948 | |
| }, | |
| { | |
| "epoch": 1.8041825095057034, | |
| "grad_norm": 0.6348171512051156, | |
| "learning_rate": 5.793264532400311e-07, | |
| "loss": 0.3067, | |
| "step": 949 | |
| }, | |
| { | |
| "epoch": 1.806083650190114, | |
| "grad_norm": 0.643847754842476, | |
| "learning_rate": 5.68239258710116e-07, | |
| "loss": 0.2842, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 1.8079847908745248, | |
| "grad_norm": 0.6495570370837512, | |
| "learning_rate": 5.572560825034523e-07, | |
| "loss": 0.2899, | |
| "step": 951 | |
| }, | |
| { | |
| "epoch": 1.8098859315589353, | |
| "grad_norm": 0.6792066758557851, | |
| "learning_rate": 5.463770457481732e-07, | |
| "loss": 0.304, | |
| "step": 952 | |
| }, | |
| { | |
| "epoch": 1.811787072243346, | |
| "grad_norm": 0.6444184836884931, | |
| "learning_rate": 5.35602268423906e-07, | |
| "loss": 0.3062, | |
| "step": 953 | |
| }, | |
| { | |
| "epoch": 1.8136882129277567, | |
| "grad_norm": 0.6779495232301359, | |
| "learning_rate": 5.249318693604577e-07, | |
| "loss": 0.3056, | |
| "step": 954 | |
| }, | |
| { | |
| "epoch": 1.8155893536121672, | |
| "grad_norm": 0.6637906350436148, | |
| "learning_rate": 5.143659662364931e-07, | |
| "loss": 0.3091, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 1.817490494296578, | |
| "grad_norm": 0.665516411756311, | |
| "learning_rate": 5.039046755782417e-07, | |
| "loss": 0.278, | |
| "step": 956 | |
| }, | |
| { | |
| "epoch": 1.8193916349809887, | |
| "grad_norm": 0.6380985438566863, | |
| "learning_rate": 4.935481127582131e-07, | |
| "loss": 0.2836, | |
| "step": 957 | |
| }, | |
| { | |
| "epoch": 1.8212927756653992, | |
| "grad_norm": 0.6916665265872712, | |
| "learning_rate": 4.83296391993926e-07, | |
| "loss": 0.3116, | |
| "step": 958 | |
| }, | |
| { | |
| "epoch": 1.8231939163498099, | |
| "grad_norm": 0.6270186459380254, | |
| "learning_rate": 4.7314962634664616e-07, | |
| "loss": 0.2946, | |
| "step": 959 | |
| }, | |
| { | |
| "epoch": 1.8250950570342206, | |
| "grad_norm": 0.6414055679246391, | |
| "learning_rate": 4.631079277201389e-07, | |
| "loss": 0.2813, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 1.826996197718631, | |
| "grad_norm": 0.6404613601514051, | |
| "learning_rate": 4.5317140685943726e-07, | |
| "loss": 0.296, | |
| "step": 961 | |
| }, | |
| { | |
| "epoch": 1.8288973384030418, | |
| "grad_norm": 0.7649854660647702, | |
| "learning_rate": 4.433401733496201e-07, | |
| "loss": 0.2988, | |
| "step": 962 | |
| }, | |
| { | |
| "epoch": 1.8307984790874525, | |
| "grad_norm": 0.6697104390550309, | |
| "learning_rate": 4.3361433561460274e-07, | |
| "loss": 0.302, | |
| "step": 963 | |
| }, | |
| { | |
| "epoch": 1.832699619771863, | |
| "grad_norm": 0.6571923674918133, | |
| "learning_rate": 4.2399400091594154e-07, | |
| "loss": 0.2802, | |
| "step": 964 | |
| }, | |
| { | |
| "epoch": 1.8346007604562737, | |
| "grad_norm": 0.6161933054558018, | |
| "learning_rate": 4.14479275351648e-07, | |
| "loss": 0.2721, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 1.8365019011406845, | |
| "grad_norm": 0.6272929945233245, | |
| "learning_rate": 4.0507026385502747e-07, | |
| "loss": 0.2843, | |
| "step": 966 | |
| }, | |
| { | |
| "epoch": 1.838403041825095, | |
| "grad_norm": 0.6312964955141961, | |
| "learning_rate": 3.9576707019350903e-07, | |
| "loss": 0.2949, | |
| "step": 967 | |
| }, | |
| { | |
| "epoch": 1.8403041825095057, | |
| "grad_norm": 0.7182327459174982, | |
| "learning_rate": 3.865697969675164e-07, | |
| "loss": 0.3104, | |
| "step": 968 | |
| }, | |
| { | |
| "epoch": 1.8422053231939164, | |
| "grad_norm": 0.6617625816751216, | |
| "learning_rate": 3.7747854560931996e-07, | |
| "loss": 0.2934, | |
| "step": 969 | |
| }, | |
| { | |
| "epoch": 1.8441064638783269, | |
| "grad_norm": 0.6357456704190849, | |
| "learning_rate": 3.684934163819309e-07, | |
| "loss": 0.299, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 1.8460076045627376, | |
| "grad_norm": 0.6454151454404484, | |
| "learning_rate": 3.596145083779912e-07, | |
| "loss": 0.2774, | |
| "step": 971 | |
| }, | |
| { | |
| "epoch": 1.8479087452471483, | |
| "grad_norm": 0.6509123231700535, | |
| "learning_rate": 3.508419195186774e-07, | |
| "loss": 0.2835, | |
| "step": 972 | |
| }, | |
| { | |
| "epoch": 1.8498098859315588, | |
| "grad_norm": 0.6341447708992041, | |
| "learning_rate": 3.421757465526243e-07, | |
| "loss": 0.296, | |
| "step": 973 | |
| }, | |
| { | |
| "epoch": 1.8517110266159695, | |
| "grad_norm": 0.6887603534419984, | |
| "learning_rate": 3.33616085054862e-07, | |
| "loss": 0.2985, | |
| "step": 974 | |
| }, | |
| { | |
| "epoch": 1.8536121673003803, | |
| "grad_norm": 0.654825898828302, | |
| "learning_rate": 3.2516302942574794e-07, | |
| "loss": 0.3047, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 1.8555133079847907, | |
| "grad_norm": 0.6550270073433818, | |
| "learning_rate": 3.1681667288994353e-07, | |
| "loss": 0.2917, | |
| "step": 976 | |
| }, | |
| { | |
| "epoch": 1.8574144486692015, | |
| "grad_norm": 0.637979330404123, | |
| "learning_rate": 3.0857710749537585e-07, | |
| "loss": 0.2842, | |
| "step": 977 | |
| }, | |
| { | |
| "epoch": 1.8593155893536122, | |
| "grad_norm": 0.6504109130841377, | |
| "learning_rate": 3.0044442411222066e-07, | |
| "loss": 0.2805, | |
| "step": 978 | |
| }, | |
| { | |
| "epoch": 1.8612167300380227, | |
| "grad_norm": 0.657142617893898, | |
| "learning_rate": 2.9241871243190555e-07, | |
| "loss": 0.296, | |
| "step": 979 | |
| }, | |
| { | |
| "epoch": 1.8631178707224336, | |
| "grad_norm": 0.6065612306854088, | |
| "learning_rate": 2.845000609661208e-07, | |
| "loss": 0.2803, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 1.8650190114068441, | |
| "grad_norm": 0.6522428493288547, | |
| "learning_rate": 2.7668855704583997e-07, | |
| "loss": 0.2911, | |
| "step": 981 | |
| }, | |
| { | |
| "epoch": 1.8669201520912546, | |
| "grad_norm": 0.6338524303700772, | |
| "learning_rate": 2.689842868203563e-07, | |
| "loss": 0.2823, | |
| "step": 982 | |
| }, | |
| { | |
| "epoch": 1.8688212927756656, | |
| "grad_norm": 0.6982484407721956, | |
| "learning_rate": 2.6138733525633896e-07, | |
| "loss": 0.2979, | |
| "step": 983 | |
| }, | |
| { | |
| "epoch": 1.870722433460076, | |
| "grad_norm": 0.6458416142163295, | |
| "learning_rate": 2.5389778613688744e-07, | |
| "loss": 0.2946, | |
| "step": 984 | |
| }, | |
| { | |
| "epoch": 1.8726235741444865, | |
| "grad_norm": 0.6333421517785925, | |
| "learning_rate": 2.46515722060614e-07, | |
| "loss": 0.2917, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 1.8745247148288975, | |
| "grad_norm": 0.614631821322944, | |
| "learning_rate": 2.392412244407294e-07, | |
| "loss": 0.2633, | |
| "step": 986 | |
| }, | |
| { | |
| "epoch": 1.876425855513308, | |
| "grad_norm": 0.6476929927138576, | |
| "learning_rate": 2.3207437350414418e-07, | |
| "loss": 0.2832, | |
| "step": 987 | |
| }, | |
| { | |
| "epoch": 1.8783269961977185, | |
| "grad_norm": 0.6464938459083758, | |
| "learning_rate": 2.2501524829059208e-07, | |
| "loss": 0.286, | |
| "step": 988 | |
| }, | |
| { | |
| "epoch": 1.8802281368821294, | |
| "grad_norm": 0.6957620871282872, | |
| "learning_rate": 2.180639266517448e-07, | |
| "loss": 0.292, | |
| "step": 989 | |
| }, | |
| { | |
| "epoch": 1.88212927756654, | |
| "grad_norm": 0.6758010937956551, | |
| "learning_rate": 2.1122048525036409e-07, | |
| "loss": 0.299, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 1.8840304182509504, | |
| "grad_norm": 0.6461531213152573, | |
| "learning_rate": 2.0448499955945223e-07, | |
| "loss": 0.2916, | |
| "step": 991 | |
| }, | |
| { | |
| "epoch": 1.8859315589353614, | |
| "grad_norm": 0.6923506127509973, | |
| "learning_rate": 1.9785754386142164e-07, | |
| "loss": 0.3041, | |
| "step": 992 | |
| }, | |
| { | |
| "epoch": 1.8878326996197718, | |
| "grad_norm": 0.6388219612898095, | |
| "learning_rate": 1.9133819124727003e-07, | |
| "loss": 0.2905, | |
| "step": 993 | |
| }, | |
| { | |
| "epoch": 1.8897338403041823, | |
| "grad_norm": 0.6841759938255663, | |
| "learning_rate": 1.8492701361578326e-07, | |
| "loss": 0.3013, | |
| "step": 994 | |
| }, | |
| { | |
| "epoch": 1.8916349809885933, | |
| "grad_norm": 0.6267566323225999, | |
| "learning_rate": 1.7862408167273472e-07, | |
| "loss": 0.2933, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 1.8935361216730038, | |
| "grad_norm": 0.661276053498803, | |
| "learning_rate": 1.724294649301095e-07, | |
| "loss": 0.3092, | |
| "step": 996 | |
| }, | |
| { | |
| "epoch": 1.8954372623574145, | |
| "grad_norm": 0.6517586538205721, | |
| "learning_rate": 1.6634323170533928e-07, | |
| "loss": 0.2987, | |
| "step": 997 | |
| }, | |
| { | |
| "epoch": 1.8973384030418252, | |
| "grad_norm": 0.64482375721809, | |
| "learning_rate": 1.6036544912054087e-07, | |
| "loss": 0.2997, | |
| "step": 998 | |
| }, | |
| { | |
| "epoch": 1.8992395437262357, | |
| "grad_norm": 0.6589055856098988, | |
| "learning_rate": 1.544961831017855e-07, | |
| "loss": 0.2906, | |
| "step": 999 | |
| }, | |
| { | |
| "epoch": 1.9011406844106464, | |
| "grad_norm": 0.6626207488682027, | |
| "learning_rate": 1.487354983783673e-07, | |
| "loss": 0.3051, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 1.9030418250950571, | |
| "grad_norm": 0.6335880944031885, | |
| "learning_rate": 1.430834584820895e-07, | |
| "loss": 0.286, | |
| "step": 1001 | |
| }, | |
| { | |
| "epoch": 1.9049429657794676, | |
| "grad_norm": 0.6460463217877473, | |
| "learning_rate": 1.375401257465625e-07, | |
| "loss": 0.2936, | |
| "step": 1002 | |
| }, | |
| { | |
| "epoch": 1.9068441064638784, | |
| "grad_norm": 0.6424730269635128, | |
| "learning_rate": 1.3210556130652031e-07, | |
| "loss": 0.2931, | |
| "step": 1003 | |
| }, | |
| { | |
| "epoch": 1.908745247148289, | |
| "grad_norm": 0.6455228366759804, | |
| "learning_rate": 1.2677982509714415e-07, | |
| "loss": 0.2869, | |
| "step": 1004 | |
| }, | |
| { | |
| "epoch": 1.9106463878326996, | |
| "grad_norm": 0.6264726515900757, | |
| "learning_rate": 1.2156297585339872e-07, | |
| "loss": 0.2821, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 1.9125475285171103, | |
| "grad_norm": 0.728749939279872, | |
| "learning_rate": 1.1645507110938925e-07, | |
| "loss": 0.2891, | |
| "step": 1006 | |
| }, | |
| { | |
| "epoch": 1.914448669201521, | |
| "grad_norm": 0.6399487727701857, | |
| "learning_rate": 1.1145616719772545e-07, | |
| "loss": 0.2954, | |
| "step": 1007 | |
| }, | |
| { | |
| "epoch": 1.9163498098859315, | |
| "grad_norm": 0.6432091953282726, | |
| "learning_rate": 1.0656631924889749e-07, | |
| "loss": 0.2918, | |
| "step": 1008 | |
| }, | |
| { | |
| "epoch": 1.9182509505703422, | |
| "grad_norm": 0.6247745645489756, | |
| "learning_rate": 1.0178558119067316e-07, | |
| "loss": 0.2965, | |
| "step": 1009 | |
| }, | |
| { | |
| "epoch": 1.920152091254753, | |
| "grad_norm": 0.6688688203903079, | |
| "learning_rate": 9.711400574749507e-08, | |
| "loss": 0.2984, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 1.9220532319391634, | |
| "grad_norm": 0.6242621997077458, | |
| "learning_rate": 9.255164443990994e-08, | |
| "loss": 0.2898, | |
| "step": 1011 | |
| }, | |
| { | |
| "epoch": 1.9239543726235742, | |
| "grad_norm": 0.6711509572905329, | |
| "learning_rate": 8.809854758399017e-08, | |
| "loss": 0.3007, | |
| "step": 1012 | |
| }, | |
| { | |
| "epoch": 1.9258555133079849, | |
| "grad_norm": 0.6225231432258058, | |
| "learning_rate": 8.375476429078543e-08, | |
| "loss": 0.2984, | |
| "step": 1013 | |
| }, | |
| { | |
| "epoch": 1.9277566539923954, | |
| "grad_norm": 0.6421689282394204, | |
| "learning_rate": 7.952034246577977e-08, | |
| "loss": 0.2934, | |
| "step": 1014 | |
| }, | |
| { | |
| "epoch": 1.929657794676806, | |
| "grad_norm": 0.6329121751911602, | |
| "learning_rate": 7.539532880836087e-08, | |
| "loss": 0.2967, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 1.9315589353612168, | |
| "grad_norm": 0.6282030528306141, | |
| "learning_rate": 7.137976881130826e-08, | |
| "loss": 0.3008, | |
| "step": 1016 | |
| }, | |
| { | |
| "epoch": 1.9334600760456273, | |
| "grad_norm": 0.6669160927439524, | |
| "learning_rate": 6.747370676028819e-08, | |
| "loss": 0.303, | |
| "step": 1017 | |
| }, | |
| { | |
| "epoch": 1.935361216730038, | |
| "grad_norm": 0.6618011089673258, | |
| "learning_rate": 6.367718573336845e-08, | |
| "loss": 0.2866, | |
| "step": 1018 | |
| }, | |
| { | |
| "epoch": 1.9372623574144487, | |
| "grad_norm": 0.6905695452390231, | |
| "learning_rate": 5.999024760054095e-08, | |
| "loss": 0.3046, | |
| "step": 1019 | |
| }, | |
| { | |
| "epoch": 1.9391634980988592, | |
| "grad_norm": 0.6426986598068631, | |
| "learning_rate": 5.641293302326323e-08, | |
| "loss": 0.2895, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 1.94106463878327, | |
| "grad_norm": 0.6984568584805342, | |
| "learning_rate": 5.2945281454003236e-08, | |
| "loss": 0.302, | |
| "step": 1021 | |
| }, | |
| { | |
| "epoch": 1.9429657794676807, | |
| "grad_norm": 0.6698410818193015, | |
| "learning_rate": 4.958733113581415e-08, | |
| "loss": 0.3084, | |
| "step": 1022 | |
| }, | |
| { | |
| "epoch": 1.9448669201520912, | |
| "grad_norm": 0.6319688367087622, | |
| "learning_rate": 4.6339119101902475e-08, | |
| "loss": 0.2887, | |
| "step": 1023 | |
| }, | |
| { | |
| "epoch": 1.946768060836502, | |
| "grad_norm": 0.655799051759164, | |
| "learning_rate": 4.320068117522835e-08, | |
| "loss": 0.3017, | |
| "step": 1024 | |
| }, | |
| { | |
| "epoch": 1.9486692015209126, | |
| "grad_norm": 0.6592681210985964, | |
| "learning_rate": 4.0172051968101474e-08, | |
| "loss": 0.283, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 1.950570342205323, | |
| "grad_norm": 0.603320461185307, | |
| "learning_rate": 3.7253264881809137e-08, | |
| "loss": 0.2793, | |
| "step": 1026 | |
| }, | |
| { | |
| "epoch": 1.9524714828897338, | |
| "grad_norm": 0.6232649505568311, | |
| "learning_rate": 3.4444352106242086e-08, | |
| "loss": 0.2887, | |
| "step": 1027 | |
| }, | |
| { | |
| "epoch": 1.9543726235741445, | |
| "grad_norm": 0.6397692202615624, | |
| "learning_rate": 3.174534461953593e-08, | |
| "loss": 0.2825, | |
| "step": 1028 | |
| }, | |
| { | |
| "epoch": 1.956273764258555, | |
| "grad_norm": 0.6462927726031551, | |
| "learning_rate": 2.915627218774142e-08, | |
| "loss": 0.2843, | |
| "step": 1029 | |
| }, | |
| { | |
| "epoch": 1.9581749049429658, | |
| "grad_norm": 0.7007015347093432, | |
| "learning_rate": 2.667716336448356e-08, | |
| "loss": 0.3012, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 1.9600760456273765, | |
| "grad_norm": 0.6270719702435925, | |
| "learning_rate": 2.430804549065302e-08, | |
| "loss": 0.3073, | |
| "step": 1031 | |
| }, | |
| { | |
| "epoch": 1.961977186311787, | |
| "grad_norm": 0.6651423444611577, | |
| "learning_rate": 2.2048944694104123e-08, | |
| "loss": 0.3045, | |
| "step": 1032 | |
| }, | |
| { | |
| "epoch": 1.9638783269961977, | |
| "grad_norm": 0.695944835631861, | |
| "learning_rate": 1.989988588936509e-08, | |
| "loss": 0.3172, | |
| "step": 1033 | |
| }, | |
| { | |
| "epoch": 1.9657794676806084, | |
| "grad_norm": 0.6631941833855483, | |
| "learning_rate": 1.7860892777367133e-08, | |
| "loss": 0.2979, | |
| "step": 1034 | |
| }, | |
| { | |
| "epoch": 1.967680608365019, | |
| "grad_norm": 0.6310626037881104, | |
| "learning_rate": 1.5931987845176912e-08, | |
| "loss": 0.2711, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 1.9695817490494296, | |
| "grad_norm": 0.646056718183438, | |
| "learning_rate": 1.411319236575337e-08, | |
| "loss": 0.3012, | |
| "step": 1036 | |
| }, | |
| { | |
| "epoch": 1.9714828897338403, | |
| "grad_norm": 0.65086493647846, | |
| "learning_rate": 1.2404526397711281e-08, | |
| "loss": 0.3025, | |
| "step": 1037 | |
| }, | |
| { | |
| "epoch": 1.9733840304182508, | |
| "grad_norm": 0.6495836115699344, | |
| "learning_rate": 1.0806008785100297e-08, | |
| "loss": 0.2852, | |
| "step": 1038 | |
| }, | |
| { | |
| "epoch": 1.9752851711026616, | |
| "grad_norm": 0.7378789473301116, | |
| "learning_rate": 9.317657157197347e-09, | |
| "loss": 0.3174, | |
| "step": 1039 | |
| }, | |
| { | |
| "epoch": 1.9771863117870723, | |
| "grad_norm": 0.6835003277534576, | |
| "learning_rate": 7.93948792831234e-09, | |
| "loss": 0.2918, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 1.9790874524714828, | |
| "grad_norm": 0.6527036232069688, | |
| "learning_rate": 6.671516297606095e-09, | |
| "loss": 0.2948, | |
| "step": 1041 | |
| }, | |
| { | |
| "epoch": 1.9809885931558935, | |
| "grad_norm": 0.6492166321981369, | |
| "learning_rate": 5.513756248924917e-09, | |
| "loss": 0.2942, | |
| "step": 1042 | |
| }, | |
| { | |
| "epoch": 1.9828897338403042, | |
| "grad_norm": 0.6398411142650093, | |
| "learning_rate": 4.466220550641831e-09, | |
| "loss": 0.2956, | |
| "step": 1043 | |
| }, | |
| { | |
| "epoch": 1.9847908745247147, | |
| "grad_norm": 0.6851683646003736, | |
| "learning_rate": 3.528920755523357e-09, | |
| "loss": 0.2993, | |
| "step": 1044 | |
| }, | |
| { | |
| "epoch": 1.9866920152091256, | |
| "grad_norm": 0.6618944159829543, | |
| "learning_rate": 2.701867200592956e-09, | |
| "loss": 0.2963, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 1.9885931558935361, | |
| "grad_norm": 0.642524251150367, | |
| "learning_rate": 1.9850690070266633e-09, | |
| "loss": 0.2987, | |
| "step": 1046 | |
| }, | |
| { | |
| "epoch": 1.9904942965779466, | |
| "grad_norm": 0.6779610839057724, | |
| "learning_rate": 1.378534080042071e-09, | |
| "loss": 0.2955, | |
| "step": 1047 | |
| }, | |
| { | |
| "epoch": 1.9923954372623576, | |
| "grad_norm": 0.6483718998108124, | |
| "learning_rate": 8.822691088195001e-10, | |
| "loss": 0.2869, | |
| "step": 1048 | |
| }, | |
| { | |
| "epoch": 1.994296577946768, | |
| "grad_norm": 0.6521070029739895, | |
| "learning_rate": 4.962795664265052e-10, | |
| "loss": 0.2844, | |
| "step": 1049 | |
| }, | |
| { | |
| "epoch": 1.9961977186311786, | |
| "grad_norm": 0.6314016099340298, | |
| "learning_rate": 2.2056970975459223e-10, | |
| "loss": 0.2809, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 1.9980988593155895, | |
| "grad_norm": 0.6551900164096042, | |
| "learning_rate": 5.514257947369928e-11, | |
| "loss": 0.2965, | |
| "step": 1051 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.6298562155658421, | |
| "learning_rate": 0.0, | |
| "loss": 0.2972, | |
| "step": 1052 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "step": 1052, | |
| "total_flos": 2.1834932150075392e+17, | |
| "train_loss": 0.37792977690696716, | |
| "train_runtime": 2639.5671, | |
| "train_samples_per_second": 51.014, | |
| "train_steps_per_second": 0.399 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 1052, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 2, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 2.1834932150075392e+17, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |