| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 0, | |
| "global_step": 461, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0021691973969631237, | |
| "grad_norm": 2.11081600189209, | |
| "learning_rate": 1e-05, | |
| "loss": 2.6121, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.004338394793926247, | |
| "grad_norm": 1.9794774055480957, | |
| "learning_rate": 9.97830802603037e-06, | |
| "loss": 2.5875, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.006507592190889371, | |
| "grad_norm": 1.8341801166534424, | |
| "learning_rate": 9.956616052060738e-06, | |
| "loss": 2.6118, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.008676789587852495, | |
| "grad_norm": 1.5984541177749634, | |
| "learning_rate": 9.934924078091108e-06, | |
| "loss": 2.5194, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.010845986984815618, | |
| "grad_norm": 1.3061403036117554, | |
| "learning_rate": 9.913232104121477e-06, | |
| "loss": 2.4602, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.013015184381778741, | |
| "grad_norm": 1.0785109996795654, | |
| "learning_rate": 9.891540130151845e-06, | |
| "loss": 2.4176, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.015184381778741865, | |
| "grad_norm": 0.8170204758644104, | |
| "learning_rate": 9.869848156182214e-06, | |
| "loss": 2.315, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.01735357917570499, | |
| "grad_norm": 0.7127103209495544, | |
| "learning_rate": 9.848156182212582e-06, | |
| "loss": 2.2658, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.019522776572668113, | |
| "grad_norm": 0.7081116437911987, | |
| "learning_rate": 9.82646420824295e-06, | |
| "loss": 2.281, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.021691973969631236, | |
| "grad_norm": 0.768051028251648, | |
| "learning_rate": 9.80477223427332e-06, | |
| "loss": 2.2928, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.02386117136659436, | |
| "grad_norm": 0.8635964393615723, | |
| "learning_rate": 9.783080260303689e-06, | |
| "loss": 2.2291, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.026030368763557483, | |
| "grad_norm": 0.8809370398521423, | |
| "learning_rate": 9.761388286334057e-06, | |
| "loss": 2.2048, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.028199566160520606, | |
| "grad_norm": 0.908088207244873, | |
| "learning_rate": 9.739696312364426e-06, | |
| "loss": 2.1956, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.03036876355748373, | |
| "grad_norm": 1.0039963722229004, | |
| "learning_rate": 9.718004338394794e-06, | |
| "loss": 2.1148, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.03253796095444685, | |
| "grad_norm": 0.907769501209259, | |
| "learning_rate": 9.696312364425163e-06, | |
| "loss": 2.0764, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.03470715835140998, | |
| "grad_norm": 0.8381527066230774, | |
| "learning_rate": 9.674620390455533e-06, | |
| "loss": 2.0732, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.0368763557483731, | |
| "grad_norm": 0.7855611443519592, | |
| "learning_rate": 9.6529284164859e-06, | |
| "loss": 1.9714, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.039045553145336226, | |
| "grad_norm": 0.7366162538528442, | |
| "learning_rate": 9.63123644251627e-06, | |
| "loss": 1.965, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.04121475054229935, | |
| "grad_norm": 0.6809542179107666, | |
| "learning_rate": 9.60954446854664e-06, | |
| "loss": 2.0019, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.04338394793926247, | |
| "grad_norm": 0.6486644744873047, | |
| "learning_rate": 9.587852494577007e-06, | |
| "loss": 1.9383, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.0455531453362256, | |
| "grad_norm": 0.6221187710762024, | |
| "learning_rate": 9.566160520607377e-06, | |
| "loss": 1.883, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.04772234273318872, | |
| "grad_norm": 0.5827253460884094, | |
| "learning_rate": 9.544468546637745e-06, | |
| "loss": 1.8966, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.049891540130151846, | |
| "grad_norm": 0.5862624049186707, | |
| "learning_rate": 9.522776572668114e-06, | |
| "loss": 1.8187, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.052060737527114966, | |
| "grad_norm": 0.6064738035202026, | |
| "learning_rate": 9.501084598698482e-06, | |
| "loss": 1.8233, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.05422993492407809, | |
| "grad_norm": 0.6191762685775757, | |
| "learning_rate": 9.479392624728851e-06, | |
| "loss": 1.7323, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.05639913232104121, | |
| "grad_norm": 0.6317607164382935, | |
| "learning_rate": 9.457700650759219e-06, | |
| "loss": 1.7398, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.05856832971800434, | |
| "grad_norm": 0.6248988509178162, | |
| "learning_rate": 9.436008676789589e-06, | |
| "loss": 1.7171, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.06073752711496746, | |
| "grad_norm": 0.6342098712921143, | |
| "learning_rate": 9.414316702819958e-06, | |
| "loss": 1.6891, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.06290672451193059, | |
| "grad_norm": 0.6058349609375, | |
| "learning_rate": 9.392624728850326e-06, | |
| "loss": 1.6624, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.0650759219088937, | |
| "grad_norm": 0.5988097786903381, | |
| "learning_rate": 9.370932754880695e-06, | |
| "loss": 1.6438, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.06724511930585683, | |
| "grad_norm": 0.6042107939720154, | |
| "learning_rate": 9.349240780911065e-06, | |
| "loss": 1.5956, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.06941431670281996, | |
| "grad_norm": 0.5972896218299866, | |
| "learning_rate": 9.327548806941433e-06, | |
| "loss": 1.5142, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.07158351409978309, | |
| "grad_norm": 0.5832527875900269, | |
| "learning_rate": 9.305856832971802e-06, | |
| "loss": 1.5384, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.0737527114967462, | |
| "grad_norm": 0.5889501571655273, | |
| "learning_rate": 9.28416485900217e-06, | |
| "loss": 1.5183, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.07592190889370933, | |
| "grad_norm": 0.622042715549469, | |
| "learning_rate": 9.26247288503254e-06, | |
| "loss": 1.4945, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.07809110629067245, | |
| "grad_norm": 0.630580723285675, | |
| "learning_rate": 9.240780911062907e-06, | |
| "loss": 1.4925, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.08026030368763558, | |
| "grad_norm": 0.5893872976303101, | |
| "learning_rate": 9.219088937093276e-06, | |
| "loss": 1.4306, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.0824295010845987, | |
| "grad_norm": 0.5707374811172485, | |
| "learning_rate": 9.197396963123644e-06, | |
| "loss": 1.418, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.08459869848156182, | |
| "grad_norm": 0.5506007075309753, | |
| "learning_rate": 9.175704989154014e-06, | |
| "loss": 1.3998, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.08676789587852494, | |
| "grad_norm": 0.5373205542564392, | |
| "learning_rate": 9.154013015184382e-06, | |
| "loss": 1.3508, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.08893709327548807, | |
| "grad_norm": 0.5845463871955872, | |
| "learning_rate": 9.132321041214751e-06, | |
| "loss": 1.3387, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.0911062906724512, | |
| "grad_norm": 0.6560288071632385, | |
| "learning_rate": 9.11062906724512e-06, | |
| "loss": 1.3078, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.09327548806941431, | |
| "grad_norm": 0.580771267414093, | |
| "learning_rate": 9.088937093275488e-06, | |
| "loss": 1.309, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.09544468546637744, | |
| "grad_norm": 0.5993460416793823, | |
| "learning_rate": 9.067245119305858e-06, | |
| "loss": 1.3005, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.09761388286334056, | |
| "grad_norm": 0.6303313374519348, | |
| "learning_rate": 9.045553145336227e-06, | |
| "loss": 1.2776, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.09978308026030369, | |
| "grad_norm": 0.5471832156181335, | |
| "learning_rate": 9.023861171366595e-06, | |
| "loss": 1.2588, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.1019522776572668, | |
| "grad_norm": 0.5459536910057068, | |
| "learning_rate": 9.002169197396964e-06, | |
| "loss": 1.2094, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.10412147505422993, | |
| "grad_norm": 0.5408421754837036, | |
| "learning_rate": 8.980477223427332e-06, | |
| "loss": 1.2355, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.10629067245119306, | |
| "grad_norm": 0.48613736033439636, | |
| "learning_rate": 8.958785249457702e-06, | |
| "loss": 1.1594, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.10845986984815618, | |
| "grad_norm": 0.4992137849330902, | |
| "learning_rate": 8.93709327548807e-06, | |
| "loss": 1.1853, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.11062906724511931, | |
| "grad_norm": 0.4883258044719696, | |
| "learning_rate": 8.915401301518439e-06, | |
| "loss": 1.1467, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.11279826464208242, | |
| "grad_norm": 0.48137181997299194, | |
| "learning_rate": 8.893709327548807e-06, | |
| "loss": 1.1541, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.11496746203904555, | |
| "grad_norm": 0.4401426911354065, | |
| "learning_rate": 8.872017353579176e-06, | |
| "loss": 1.1112, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.11713665943600868, | |
| "grad_norm": 0.4389711320400238, | |
| "learning_rate": 8.850325379609546e-06, | |
| "loss": 1.0961, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.1193058568329718, | |
| "grad_norm": 0.439217746257782, | |
| "learning_rate": 8.828633405639913e-06, | |
| "loss": 1.0838, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.12147505422993492, | |
| "grad_norm": 0.521327555179596, | |
| "learning_rate": 8.806941431670283e-06, | |
| "loss": 1.0813, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.12364425162689804, | |
| "grad_norm": 0.4604964256286621, | |
| "learning_rate": 8.785249457700652e-06, | |
| "loss": 1.0857, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.12581344902386118, | |
| "grad_norm": 0.42932406067848206, | |
| "learning_rate": 8.76355748373102e-06, | |
| "loss": 1.0513, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.1279826464208243, | |
| "grad_norm": 0.43107494711875916, | |
| "learning_rate": 8.74186550976139e-06, | |
| "loss": 1.0488, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.1301518438177874, | |
| "grad_norm": 0.42236247658729553, | |
| "learning_rate": 8.720173535791757e-06, | |
| "loss": 1.028, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.13232104121475055, | |
| "grad_norm": 0.43343520164489746, | |
| "learning_rate": 8.698481561822127e-06, | |
| "loss": 1.0272, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.13449023861171366, | |
| "grad_norm": 0.48012059926986694, | |
| "learning_rate": 8.676789587852495e-06, | |
| "loss": 1.0099, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.13665943600867678, | |
| "grad_norm": 0.42098668217658997, | |
| "learning_rate": 8.655097613882864e-06, | |
| "loss": 1.0158, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.13882863340563992, | |
| "grad_norm": 0.4227031171321869, | |
| "learning_rate": 8.633405639913232e-06, | |
| "loss": 0.9815, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.14099783080260303, | |
| "grad_norm": 0.41489773988723755, | |
| "learning_rate": 8.611713665943601e-06, | |
| "loss": 0.9635, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.14316702819956617, | |
| "grad_norm": 0.40091952681541443, | |
| "learning_rate": 8.59002169197397e-06, | |
| "loss": 0.9528, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.14533622559652928, | |
| "grad_norm": 0.4547802805900574, | |
| "learning_rate": 8.568329718004339e-06, | |
| "loss": 0.9726, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.1475054229934924, | |
| "grad_norm": 0.4234392046928406, | |
| "learning_rate": 8.546637744034708e-06, | |
| "loss": 0.962, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.14967462039045554, | |
| "grad_norm": 0.38503268361091614, | |
| "learning_rate": 8.524945770065076e-06, | |
| "loss": 0.9463, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.15184381778741865, | |
| "grad_norm": 0.4524126350879669, | |
| "learning_rate": 8.503253796095445e-06, | |
| "loss": 0.9061, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.1540130151843818, | |
| "grad_norm": 0.40082183480262756, | |
| "learning_rate": 8.481561822125815e-06, | |
| "loss": 0.9034, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.1561822125813449, | |
| "grad_norm": 0.41073668003082275, | |
| "learning_rate": 8.459869848156183e-06, | |
| "loss": 0.8961, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.15835140997830802, | |
| "grad_norm": 0.5651035308837891, | |
| "learning_rate": 8.438177874186552e-06, | |
| "loss": 0.8851, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.16052060737527116, | |
| "grad_norm": 0.44745904207229614, | |
| "learning_rate": 8.416485900216922e-06, | |
| "loss": 0.9203, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.16268980477223427, | |
| "grad_norm": 0.4688916504383087, | |
| "learning_rate": 8.39479392624729e-06, | |
| "loss": 0.9449, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.1648590021691974, | |
| "grad_norm": 0.386109322309494, | |
| "learning_rate": 8.373101952277657e-06, | |
| "loss": 0.868, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.16702819956616052, | |
| "grad_norm": 0.47646722197532654, | |
| "learning_rate": 8.351409978308027e-06, | |
| "loss": 0.8853, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.16919739696312364, | |
| "grad_norm": 0.38544556498527527, | |
| "learning_rate": 8.329718004338394e-06, | |
| "loss": 0.8703, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.17136659436008678, | |
| "grad_norm": 0.5066248178482056, | |
| "learning_rate": 8.308026030368764e-06, | |
| "loss": 0.8963, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.1735357917570499, | |
| "grad_norm": 0.471044659614563, | |
| "learning_rate": 8.286334056399133e-06, | |
| "loss": 0.8547, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.175704989154013, | |
| "grad_norm": 0.4496097266674042, | |
| "learning_rate": 8.264642082429501e-06, | |
| "loss": 0.8556, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.17787418655097614, | |
| "grad_norm": 0.40527230501174927, | |
| "learning_rate": 8.24295010845987e-06, | |
| "loss": 0.8756, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.18004338394793926, | |
| "grad_norm": 0.40289977192878723, | |
| "learning_rate": 8.22125813449024e-06, | |
| "loss": 0.8412, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.1822125813449024, | |
| "grad_norm": 0.3807680606842041, | |
| "learning_rate": 8.199566160520608e-06, | |
| "loss": 0.8241, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.1843817787418655, | |
| "grad_norm": 0.4011147618293762, | |
| "learning_rate": 8.177874186550977e-06, | |
| "loss": 0.8308, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.18655097613882862, | |
| "grad_norm": 0.6492615938186646, | |
| "learning_rate": 8.156182212581345e-06, | |
| "loss": 0.8091, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.18872017353579176, | |
| "grad_norm": 0.45752060413360596, | |
| "learning_rate": 8.134490238611715e-06, | |
| "loss": 0.86, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.19088937093275488, | |
| "grad_norm": 0.39894232153892517, | |
| "learning_rate": 8.112798264642084e-06, | |
| "loss": 0.8087, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.19305856832971802, | |
| "grad_norm": 0.4044681787490845, | |
| "learning_rate": 8.091106290672452e-06, | |
| "loss": 0.8287, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.19522776572668113, | |
| "grad_norm": 0.5479378700256348, | |
| "learning_rate": 8.06941431670282e-06, | |
| "loss": 0.7788, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.19739696312364424, | |
| "grad_norm": 0.5609095096588135, | |
| "learning_rate": 8.04772234273319e-06, | |
| "loss": 0.8196, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.19956616052060738, | |
| "grad_norm": 0.3801177144050598, | |
| "learning_rate": 8.026030368763557e-06, | |
| "loss": 0.7683, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.2017353579175705, | |
| "grad_norm": 0.38417235016822815, | |
| "learning_rate": 8.004338394793926e-06, | |
| "loss": 0.7955, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.2039045553145336, | |
| "grad_norm": 0.6401964426040649, | |
| "learning_rate": 7.982646420824296e-06, | |
| "loss": 0.7861, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.20607375271149675, | |
| "grad_norm": 0.4698239266872406, | |
| "learning_rate": 7.960954446854664e-06, | |
| "loss": 0.7872, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.20824295010845986, | |
| "grad_norm": 0.45705363154411316, | |
| "learning_rate": 7.939262472885033e-06, | |
| "loss": 0.7865, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.210412147505423, | |
| "grad_norm": 0.4010639488697052, | |
| "learning_rate": 7.917570498915403e-06, | |
| "loss": 0.7641, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.21258134490238612, | |
| "grad_norm": 0.40273797512054443, | |
| "learning_rate": 7.89587852494577e-06, | |
| "loss": 0.7719, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.21475054229934923, | |
| "grad_norm": 0.4527304470539093, | |
| "learning_rate": 7.87418655097614e-06, | |
| "loss": 0.8008, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.21691973969631237, | |
| "grad_norm": 0.432674765586853, | |
| "learning_rate": 7.85249457700651e-06, | |
| "loss": 0.7426, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.21908893709327548, | |
| "grad_norm": 0.4265141785144806, | |
| "learning_rate": 7.830802603036877e-06, | |
| "loss": 0.7713, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.22125813449023862, | |
| "grad_norm": 0.4253065288066864, | |
| "learning_rate": 7.809110629067247e-06, | |
| "loss": 0.7484, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.22342733188720174, | |
| "grad_norm": 0.44442278146743774, | |
| "learning_rate": 7.787418655097614e-06, | |
| "loss": 0.763, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.22559652928416485, | |
| "grad_norm": 0.47105884552001953, | |
| "learning_rate": 7.765726681127982e-06, | |
| "loss": 0.7703, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.227765726681128, | |
| "grad_norm": 0.41451483964920044, | |
| "learning_rate": 7.744034707158352e-06, | |
| "loss": 0.7558, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.2299349240780911, | |
| "grad_norm": 0.4369511604309082, | |
| "learning_rate": 7.722342733188721e-06, | |
| "loss": 0.7592, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.23210412147505424, | |
| "grad_norm": 0.4827646017074585, | |
| "learning_rate": 7.700650759219089e-06, | |
| "loss": 0.753, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.23427331887201736, | |
| "grad_norm": 0.39963749051094055, | |
| "learning_rate": 7.678958785249458e-06, | |
| "loss": 0.7333, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.23644251626898047, | |
| "grad_norm": 0.4362649917602539, | |
| "learning_rate": 7.657266811279828e-06, | |
| "loss": 0.7392, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.2386117136659436, | |
| "grad_norm": 0.4753969609737396, | |
| "learning_rate": 7.635574837310196e-06, | |
| "loss": 0.7411, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.24078091106290672, | |
| "grad_norm": 0.3958611488342285, | |
| "learning_rate": 7.613882863340565e-06, | |
| "loss": 0.7125, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.24295010845986983, | |
| "grad_norm": 0.4026221036911011, | |
| "learning_rate": 7.592190889370933e-06, | |
| "loss": 0.7434, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.24511930585683298, | |
| "grad_norm": 0.672747790813446, | |
| "learning_rate": 7.570498915401302e-06, | |
| "loss": 0.702, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.2472885032537961, | |
| "grad_norm": 0.39787012338638306, | |
| "learning_rate": 7.548806941431671e-06, | |
| "loss": 0.7329, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.24945770065075923, | |
| "grad_norm": 0.4380010664463043, | |
| "learning_rate": 7.527114967462039e-06, | |
| "loss": 0.7281, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.25162689804772237, | |
| "grad_norm": 0.41002675890922546, | |
| "learning_rate": 7.505422993492408e-06, | |
| "loss": 0.7229, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.25379609544468545, | |
| "grad_norm": 0.41404595971107483, | |
| "learning_rate": 7.483731019522778e-06, | |
| "loss": 0.7177, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.2559652928416486, | |
| "grad_norm": 0.4458067715167999, | |
| "learning_rate": 7.4620390455531455e-06, | |
| "loss": 0.7226, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.25813449023861174, | |
| "grad_norm": 0.3937883973121643, | |
| "learning_rate": 7.440347071583515e-06, | |
| "loss": 0.7109, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.2603036876355748, | |
| "grad_norm": 0.42459914088249207, | |
| "learning_rate": 7.418655097613884e-06, | |
| "loss": 0.7292, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.26247288503253796, | |
| "grad_norm": 0.47104501724243164, | |
| "learning_rate": 7.396963123644252e-06, | |
| "loss": 0.7018, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.2646420824295011, | |
| "grad_norm": 0.413194477558136, | |
| "learning_rate": 7.375271149674621e-06, | |
| "loss": 0.6862, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.2668112798264642, | |
| "grad_norm": 0.44947072863578796, | |
| "learning_rate": 7.35357917570499e-06, | |
| "loss": 0.7134, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.26898047722342733, | |
| "grad_norm": 0.4308297038078308, | |
| "learning_rate": 7.331887201735358e-06, | |
| "loss": 0.7214, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.27114967462039047, | |
| "grad_norm": 0.48938828706741333, | |
| "learning_rate": 7.310195227765728e-06, | |
| "loss": 0.7065, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.27331887201735355, | |
| "grad_norm": 0.4659271538257599, | |
| "learning_rate": 7.288503253796096e-06, | |
| "loss": 0.717, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.2754880694143167, | |
| "grad_norm": 0.43395182490348816, | |
| "learning_rate": 7.266811279826465e-06, | |
| "loss": 0.7137, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.27765726681127983, | |
| "grad_norm": 0.4359440505504608, | |
| "learning_rate": 7.2451193058568335e-06, | |
| "loss": 0.6858, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.279826464208243, | |
| "grad_norm": 0.6049463152885437, | |
| "learning_rate": 7.223427331887203e-06, | |
| "loss": 0.7262, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.28199566160520606, | |
| "grad_norm": 0.9202204942703247, | |
| "learning_rate": 7.201735357917571e-06, | |
| "loss": 0.7012, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.2841648590021692, | |
| "grad_norm": 0.4885154664516449, | |
| "learning_rate": 7.18004338394794e-06, | |
| "loss": 0.7016, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.28633405639913234, | |
| "grad_norm": 0.5771850943565369, | |
| "learning_rate": 7.158351409978309e-06, | |
| "loss": 0.7069, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.2885032537960954, | |
| "grad_norm": 0.4814640283584595, | |
| "learning_rate": 7.1366594360086775e-06, | |
| "loss": 0.7129, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.29067245119305857, | |
| "grad_norm": 0.4861976206302643, | |
| "learning_rate": 7.114967462039046e-06, | |
| "loss": 0.6926, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.2928416485900217, | |
| "grad_norm": 0.43924835324287415, | |
| "learning_rate": 7.093275488069415e-06, | |
| "loss": 0.6729, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.2950108459869848, | |
| "grad_norm": 0.44776543974876404, | |
| "learning_rate": 7.071583514099783e-06, | |
| "loss": 0.69, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.29718004338394793, | |
| "grad_norm": 0.48698529601097107, | |
| "learning_rate": 7.049891540130153e-06, | |
| "loss": 0.6707, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.2993492407809111, | |
| "grad_norm": 0.49502941966056824, | |
| "learning_rate": 7.028199566160521e-06, | |
| "loss": 0.6705, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.30151843817787416, | |
| "grad_norm": 0.43487516045570374, | |
| "learning_rate": 7.00650759219089e-06, | |
| "loss": 0.6468, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.3036876355748373, | |
| "grad_norm": 0.45082563161849976, | |
| "learning_rate": 6.984815618221259e-06, | |
| "loss": 0.6882, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.30585683297180044, | |
| "grad_norm": 0.5704577565193176, | |
| "learning_rate": 6.963123644251627e-06, | |
| "loss": 0.6546, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.3080260303687636, | |
| "grad_norm": 0.5590646266937256, | |
| "learning_rate": 6.941431670281996e-06, | |
| "loss": 0.691, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.31019522776572667, | |
| "grad_norm": 0.4791410565376282, | |
| "learning_rate": 6.9197396963123654e-06, | |
| "loss": 0.6327, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.3123644251626898, | |
| "grad_norm": 0.435273140668869, | |
| "learning_rate": 6.898047722342733e-06, | |
| "loss": 0.675, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.31453362255965295, | |
| "grad_norm": 0.5272928476333618, | |
| "learning_rate": 6.876355748373103e-06, | |
| "loss": 0.6683, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.31670281995661603, | |
| "grad_norm": 0.5004708766937256, | |
| "learning_rate": 6.854663774403471e-06, | |
| "loss": 0.6518, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.3188720173535792, | |
| "grad_norm": 0.45495614409446716, | |
| "learning_rate": 6.83297180043384e-06, | |
| "loss": 0.6576, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.3210412147505423, | |
| "grad_norm": 0.48196735978126526, | |
| "learning_rate": 6.8112798264642086e-06, | |
| "loss": 0.6389, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.3232104121475054, | |
| "grad_norm": 0.4797559082508087, | |
| "learning_rate": 6.789587852494578e-06, | |
| "loss": 0.6677, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.32537960954446854, | |
| "grad_norm": 0.4930654764175415, | |
| "learning_rate": 6.767895878524946e-06, | |
| "loss": 0.6437, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.3275488069414317, | |
| "grad_norm": 0.45326152443885803, | |
| "learning_rate": 6.746203904555315e-06, | |
| "loss": 0.6616, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.3297180043383948, | |
| "grad_norm": 0.48853611946105957, | |
| "learning_rate": 6.724511930585684e-06, | |
| "loss": 0.6473, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.3318872017353579, | |
| "grad_norm": 0.4732632040977478, | |
| "learning_rate": 6.7028199566160526e-06, | |
| "loss": 0.6396, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.33405639913232105, | |
| "grad_norm": 0.4409032464027405, | |
| "learning_rate": 6.681127982646421e-06, | |
| "loss": 0.6507, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.3362255965292842, | |
| "grad_norm": 0.4853736162185669, | |
| "learning_rate": 6.659436008676791e-06, | |
| "loss": 0.6495, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.3383947939262473, | |
| "grad_norm": 0.5413075685501099, | |
| "learning_rate": 6.6377440347071584e-06, | |
| "loss": 0.6719, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.3405639913232104, | |
| "grad_norm": 0.4533112943172455, | |
| "learning_rate": 6.616052060737528e-06, | |
| "loss": 0.6358, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.34273318872017355, | |
| "grad_norm": 0.5843056440353394, | |
| "learning_rate": 6.594360086767897e-06, | |
| "loss": 0.6724, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.34490238611713664, | |
| "grad_norm": 0.48672884702682495, | |
| "learning_rate": 6.572668112798265e-06, | |
| "loss": 0.6449, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.3470715835140998, | |
| "grad_norm": 0.6113792657852173, | |
| "learning_rate": 6.550976138828634e-06, | |
| "loss": 0.6297, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.3492407809110629, | |
| "grad_norm": 0.5860591530799866, | |
| "learning_rate": 6.529284164859002e-06, | |
| "loss": 0.6617, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.351409978308026, | |
| "grad_norm": 0.5638704299926758, | |
| "learning_rate": 6.507592190889371e-06, | |
| "loss": 0.6225, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.35357917570498915, | |
| "grad_norm": 0.5348539352416992, | |
| "learning_rate": 6.4859002169197405e-06, | |
| "loss": 0.6351, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.3557483731019523, | |
| "grad_norm": 0.4822864830493927, | |
| "learning_rate": 6.464208242950108e-06, | |
| "loss": 0.6315, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.3579175704989154, | |
| "grad_norm": 0.5151847004890442, | |
| "learning_rate": 6.442516268980478e-06, | |
| "loss": 0.6311, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.3600867678958785, | |
| "grad_norm": 0.4632371962070465, | |
| "learning_rate": 6.420824295010846e-06, | |
| "loss": 0.6515, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.36225596529284165, | |
| "grad_norm": 0.4700559675693512, | |
| "learning_rate": 6.399132321041215e-06, | |
| "loss": 0.6218, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.3644251626898048, | |
| "grad_norm": 0.5487787127494812, | |
| "learning_rate": 6.377440347071584e-06, | |
| "loss": 0.6485, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.3665943600867679, | |
| "grad_norm": 0.5952288508415222, | |
| "learning_rate": 6.355748373101953e-06, | |
| "loss": 0.6263, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.368763557483731, | |
| "grad_norm": 0.4692787230014801, | |
| "learning_rate": 6.334056399132321e-06, | |
| "loss": 0.637, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.37093275488069416, | |
| "grad_norm": 0.48934659361839294, | |
| "learning_rate": 6.31236442516269e-06, | |
| "loss": 0.6177, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.37310195227765725, | |
| "grad_norm": 0.6284322142601013, | |
| "learning_rate": 6.29067245119306e-06, | |
| "loss": 0.6533, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.3752711496746204, | |
| "grad_norm": 0.583229660987854, | |
| "learning_rate": 6.268980477223428e-06, | |
| "loss": 0.6468, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.3774403470715835, | |
| "grad_norm": 0.5373253226280212, | |
| "learning_rate": 6.247288503253796e-06, | |
| "loss": 0.6219, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.3796095444685466, | |
| "grad_norm": 0.5065279603004456, | |
| "learning_rate": 6.225596529284166e-06, | |
| "loss": 0.609, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.38177874186550975, | |
| "grad_norm": 0.4818476438522339, | |
| "learning_rate": 6.2039045553145335e-06, | |
| "loss": 0.6203, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.3839479392624729, | |
| "grad_norm": 0.47112759947776794, | |
| "learning_rate": 6.182212581344903e-06, | |
| "loss": 0.6315, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.38611713665943603, | |
| "grad_norm": 0.5504185557365417, | |
| "learning_rate": 6.1605206073752725e-06, | |
| "loss": 0.6318, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.3882863340563991, | |
| "grad_norm": 0.5567037463188171, | |
| "learning_rate": 6.13882863340564e-06, | |
| "loss": 0.6104, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.39045553145336226, | |
| "grad_norm": 0.4654327630996704, | |
| "learning_rate": 6.11713665943601e-06, | |
| "loss": 0.619, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.3926247288503254, | |
| "grad_norm": 0.47351258993148804, | |
| "learning_rate": 6.095444685466378e-06, | |
| "loss": 0.6206, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.3947939262472885, | |
| "grad_norm": 0.47146716713905334, | |
| "learning_rate": 6.073752711496746e-06, | |
| "loss": 0.6111, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.3969631236442516, | |
| "grad_norm": 0.49498575925827026, | |
| "learning_rate": 6.052060737527116e-06, | |
| "loss": 0.6179, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.39913232104121477, | |
| "grad_norm": 0.5393364429473877, | |
| "learning_rate": 6.030368763557483e-06, | |
| "loss": 0.6331, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.40130151843817785, | |
| "grad_norm": 0.5126506686210632, | |
| "learning_rate": 6.008676789587853e-06, | |
| "loss": 0.6143, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.403470715835141, | |
| "grad_norm": 0.6334784626960754, | |
| "learning_rate": 5.986984815618222e-06, | |
| "loss": 0.6322, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.40563991323210413, | |
| "grad_norm": 0.5737828016281128, | |
| "learning_rate": 5.96529284164859e-06, | |
| "loss": 0.6206, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.4078091106290672, | |
| "grad_norm": 0.5050855875015259, | |
| "learning_rate": 5.943600867678959e-06, | |
| "loss": 0.6262, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.40997830802603036, | |
| "grad_norm": 0.4655757546424866, | |
| "learning_rate": 5.921908893709328e-06, | |
| "loss": 0.6062, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.4121475054229935, | |
| "grad_norm": 0.7657625675201416, | |
| "learning_rate": 5.900216919739696e-06, | |
| "loss": 0.6278, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.41431670281995664, | |
| "grad_norm": 0.4740203619003296, | |
| "learning_rate": 5.8785249457700655e-06, | |
| "loss": 0.6125, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.4164859002169197, | |
| "grad_norm": 0.47104138135910034, | |
| "learning_rate": 5.856832971800435e-06, | |
| "loss": 0.6181, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.41865509761388287, | |
| "grad_norm": 0.6184363961219788, | |
| "learning_rate": 5.835140997830803e-06, | |
| "loss": 0.6227, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.420824295010846, | |
| "grad_norm": 0.6750501990318298, | |
| "learning_rate": 5.813449023861172e-06, | |
| "loss": 0.6407, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.4229934924078091, | |
| "grad_norm": 0.6018952131271362, | |
| "learning_rate": 5.791757049891541e-06, | |
| "loss": 0.5914, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.42516268980477223, | |
| "grad_norm": 0.5619893670082092, | |
| "learning_rate": 5.770065075921909e-06, | |
| "loss": 0.6222, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.42733188720173537, | |
| "grad_norm": 0.4996378719806671, | |
| "learning_rate": 5.748373101952278e-06, | |
| "loss": 0.6076, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.42950108459869846, | |
| "grad_norm": 0.47961318492889404, | |
| "learning_rate": 5.7266811279826476e-06, | |
| "loss": 0.6102, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.4316702819956616, | |
| "grad_norm": 0.6786702871322632, | |
| "learning_rate": 5.704989154013015e-06, | |
| "loss": 0.6353, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.43383947939262474, | |
| "grad_norm": 0.6170738935470581, | |
| "learning_rate": 5.683297180043385e-06, | |
| "loss": 0.6038, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.4360086767895879, | |
| "grad_norm": 0.5487593412399292, | |
| "learning_rate": 5.6616052060737535e-06, | |
| "loss": 0.5929, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.43817787418655096, | |
| "grad_norm": 0.593626856803894, | |
| "learning_rate": 5.639913232104122e-06, | |
| "loss": 0.5977, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.4403470715835141, | |
| "grad_norm": 0.7302510738372803, | |
| "learning_rate": 5.618221258134491e-06, | |
| "loss": 0.5996, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.44251626898047725, | |
| "grad_norm": 0.5379276275634766, | |
| "learning_rate": 5.59652928416486e-06, | |
| "loss": 0.6119, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.44468546637744033, | |
| "grad_norm": 0.6861233711242676, | |
| "learning_rate": 5.574837310195228e-06, | |
| "loss": 0.6119, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.44685466377440347, | |
| "grad_norm": 0.5536229014396667, | |
| "learning_rate": 5.5531453362255974e-06, | |
| "loss": 0.5926, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.4490238611713666, | |
| "grad_norm": 0.5160278677940369, | |
| "learning_rate": 5.531453362255966e-06, | |
| "loss": 0.6013, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.4511930585683297, | |
| "grad_norm": 0.5844294428825378, | |
| "learning_rate": 5.509761388286335e-06, | |
| "loss": 0.6044, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.45336225596529284, | |
| "grad_norm": 0.6622424721717834, | |
| "learning_rate": 5.488069414316703e-06, | |
| "loss": 0.6005, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.455531453362256, | |
| "grad_norm": 0.5025097727775574, | |
| "learning_rate": 5.466377440347071e-06, | |
| "loss": 0.5802, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.45770065075921906, | |
| "grad_norm": 0.5540115237236023, | |
| "learning_rate": 5.444685466377441e-06, | |
| "loss": 0.5932, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.4598698481561822, | |
| "grad_norm": 0.5804458856582642, | |
| "learning_rate": 5.42299349240781e-06, | |
| "loss": 0.5987, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.46203904555314534, | |
| "grad_norm": 0.5122389793395996, | |
| "learning_rate": 5.401301518438178e-06, | |
| "loss": 0.5916, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.4642082429501085, | |
| "grad_norm": 0.5717864632606506, | |
| "learning_rate": 5.379609544468547e-06, | |
| "loss": 0.5948, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.46637744034707157, | |
| "grad_norm": 0.6112474799156189, | |
| "learning_rate": 5.357917570498916e-06, | |
| "loss": 0.6016, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.4685466377440347, | |
| "grad_norm": 0.58445805311203, | |
| "learning_rate": 5.3362255965292846e-06, | |
| "loss": 0.5943, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.47071583514099785, | |
| "grad_norm": 0.5126850605010986, | |
| "learning_rate": 5.314533622559653e-06, | |
| "loss": 0.6058, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.47288503253796094, | |
| "grad_norm": 0.498695969581604, | |
| "learning_rate": 5.292841648590023e-06, | |
| "loss": 0.5867, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.4750542299349241, | |
| "grad_norm": 0.5060663223266602, | |
| "learning_rate": 5.2711496746203904e-06, | |
| "loss": 0.6161, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.4772234273318872, | |
| "grad_norm": 0.49926185607910156, | |
| "learning_rate": 5.24945770065076e-06, | |
| "loss": 0.594, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.4793926247288503, | |
| "grad_norm": 0.7231149673461914, | |
| "learning_rate": 5.2277657266811285e-06, | |
| "loss": 0.6506, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.48156182212581344, | |
| "grad_norm": 0.501641571521759, | |
| "learning_rate": 5.206073752711497e-06, | |
| "loss": 0.6031, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.4837310195227766, | |
| "grad_norm": 0.5351401567459106, | |
| "learning_rate": 5.184381778741866e-06, | |
| "loss": 0.5957, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.48590021691973967, | |
| "grad_norm": 0.4861268997192383, | |
| "learning_rate": 5.162689804772235e-06, | |
| "loss": 0.6109, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.4880694143167028, | |
| "grad_norm": 0.4920121133327484, | |
| "learning_rate": 5.140997830802603e-06, | |
| "loss": 0.5931, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.49023861171366595, | |
| "grad_norm": 0.5210174322128296, | |
| "learning_rate": 5.1193058568329725e-06, | |
| "loss": 0.6122, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.4924078091106291, | |
| "grad_norm": 0.7002459764480591, | |
| "learning_rate": 5.097613882863341e-06, | |
| "loss": 0.603, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.4945770065075922, | |
| "grad_norm": 0.4967700242996216, | |
| "learning_rate": 5.07592190889371e-06, | |
| "loss": 0.5675, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.4967462039045553, | |
| "grad_norm": 0.49998047947883606, | |
| "learning_rate": 5.054229934924078e-06, | |
| "loss": 0.5885, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.49891540130151846, | |
| "grad_norm": 0.46569979190826416, | |
| "learning_rate": 5.032537960954448e-06, | |
| "loss": 0.588, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.5010845986984815, | |
| "grad_norm": 0.518514096736908, | |
| "learning_rate": 5.010845986984816e-06, | |
| "loss": 0.5795, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.5032537960954447, | |
| "grad_norm": 0.5044274926185608, | |
| "learning_rate": 4.989154013015185e-06, | |
| "loss": 0.5777, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.5054229934924078, | |
| "grad_norm": 0.5333325862884521, | |
| "learning_rate": 4.967462039045554e-06, | |
| "loss": 0.5902, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.5075921908893709, | |
| "grad_norm": 0.666131854057312, | |
| "learning_rate": 4.945770065075922e-06, | |
| "loss": 0.5735, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.5097613882863341, | |
| "grad_norm": 0.5070677995681763, | |
| "learning_rate": 4.924078091106291e-06, | |
| "loss": 0.5677, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.5119305856832972, | |
| "grad_norm": 0.5434484481811523, | |
| "learning_rate": 4.90238611713666e-06, | |
| "loss": 0.5984, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.5140997830802603, | |
| "grad_norm": 0.5604612231254578, | |
| "learning_rate": 4.880694143167028e-06, | |
| "loss": 0.5915, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.5162689804772235, | |
| "grad_norm": 0.5144227147102356, | |
| "learning_rate": 4.859002169197397e-06, | |
| "loss": 0.5765, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.5184381778741866, | |
| "grad_norm": 0.4857323169708252, | |
| "learning_rate": 4.837310195227766e-06, | |
| "loss": 0.5722, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.5206073752711496, | |
| "grad_norm": 0.4755032956600189, | |
| "learning_rate": 4.815618221258135e-06, | |
| "loss": 0.5749, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.5227765726681128, | |
| "grad_norm": 0.587493896484375, | |
| "learning_rate": 4.793926247288504e-06, | |
| "loss": 0.5981, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.5249457700650759, | |
| "grad_norm": 0.5679769515991211, | |
| "learning_rate": 4.772234273318872e-06, | |
| "loss": 0.5884, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.527114967462039, | |
| "grad_norm": 0.498691588640213, | |
| "learning_rate": 4.750542299349241e-06, | |
| "loss": 0.5809, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.5292841648590022, | |
| "grad_norm": 0.675168514251709, | |
| "learning_rate": 4.7288503253796095e-06, | |
| "loss": 0.5874, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.5314533622559653, | |
| "grad_norm": 0.5249897241592407, | |
| "learning_rate": 4.707158351409979e-06, | |
| "loss": 0.5878, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.5336225596529284, | |
| "grad_norm": 0.48491179943084717, | |
| "learning_rate": 4.685466377440348e-06, | |
| "loss": 0.5743, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.5357917570498916, | |
| "grad_norm": 0.5798050761222839, | |
| "learning_rate": 4.663774403470716e-06, | |
| "loss": 0.5682, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.5379609544468547, | |
| "grad_norm": 0.5193563103675842, | |
| "learning_rate": 4.642082429501085e-06, | |
| "loss": 0.5743, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.5401301518438177, | |
| "grad_norm": 0.5050124526023865, | |
| "learning_rate": 4.6203904555314535e-06, | |
| "loss": 0.5823, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.5422993492407809, | |
| "grad_norm": 0.5084236264228821, | |
| "learning_rate": 4.598698481561822e-06, | |
| "loss": 0.5865, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.544468546637744, | |
| "grad_norm": 0.5621696710586548, | |
| "learning_rate": 4.577006507592191e-06, | |
| "loss": 0.6023, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.5466377440347071, | |
| "grad_norm": 0.4698992669582367, | |
| "learning_rate": 4.55531453362256e-06, | |
| "loss": 0.5835, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.5488069414316703, | |
| "grad_norm": 0.5616809129714966, | |
| "learning_rate": 4.533622559652929e-06, | |
| "loss": 0.559, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.5509761388286334, | |
| "grad_norm": 0.5029488801956177, | |
| "learning_rate": 4.5119305856832975e-06, | |
| "loss": 0.5693, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.5531453362255966, | |
| "grad_norm": 0.47164446115493774, | |
| "learning_rate": 4.490238611713666e-06, | |
| "loss": 0.564, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.5553145336225597, | |
| "grad_norm": 0.49710217118263245, | |
| "learning_rate": 4.468546637744035e-06, | |
| "loss": 0.5826, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.5574837310195228, | |
| "grad_norm": 0.48587456345558167, | |
| "learning_rate": 4.446854663774403e-06, | |
| "loss": 0.5816, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.559652928416486, | |
| "grad_norm": 0.5498842597007751, | |
| "learning_rate": 4.425162689804773e-06, | |
| "loss": 0.5736, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.561822125813449, | |
| "grad_norm": 0.4747694432735443, | |
| "learning_rate": 4.4034707158351415e-06, | |
| "loss": 0.574, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.5639913232104121, | |
| "grad_norm": 0.9344505667686462, | |
| "learning_rate": 4.38177874186551e-06, | |
| "loss": 0.5957, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.5661605206073753, | |
| "grad_norm": 0.5000290274620056, | |
| "learning_rate": 4.360086767895879e-06, | |
| "loss": 0.5774, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.5683297180043384, | |
| "grad_norm": 0.5080724358558655, | |
| "learning_rate": 4.338394793926247e-06, | |
| "loss": 0.5688, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.5704989154013015, | |
| "grad_norm": 0.5054919123649597, | |
| "learning_rate": 4.316702819956616e-06, | |
| "loss": 0.5834, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.5726681127982647, | |
| "grad_norm": 0.5046693682670593, | |
| "learning_rate": 4.295010845986985e-06, | |
| "loss": 0.5687, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.5748373101952278, | |
| "grad_norm": 0.5170451998710632, | |
| "learning_rate": 4.273318872017354e-06, | |
| "loss": 0.5497, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.5770065075921909, | |
| "grad_norm": 0.5297961235046387, | |
| "learning_rate": 4.251626898047723e-06, | |
| "loss": 0.6013, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.579175704989154, | |
| "grad_norm": 0.4973108172416687, | |
| "learning_rate": 4.229934924078091e-06, | |
| "loss": 0.5544, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.5813449023861171, | |
| "grad_norm": 0.6242494583129883, | |
| "learning_rate": 4.208242950108461e-06, | |
| "loss": 0.5917, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.5835140997830802, | |
| "grad_norm": 0.5227379202842712, | |
| "learning_rate": 4.186550976138829e-06, | |
| "loss": 0.5709, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.5856832971800434, | |
| "grad_norm": 0.527258574962616, | |
| "learning_rate": 4.164859002169197e-06, | |
| "loss": 0.5746, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.5878524945770065, | |
| "grad_norm": 0.5078297257423401, | |
| "learning_rate": 4.143167028199567e-06, | |
| "loss": 0.5569, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.5900216919739696, | |
| "grad_norm": 0.7073683738708496, | |
| "learning_rate": 4.121475054229935e-06, | |
| "loss": 0.5806, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.5921908893709328, | |
| "grad_norm": 0.6542203426361084, | |
| "learning_rate": 4.099783080260304e-06, | |
| "loss": 0.5963, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.5943600867678959, | |
| "grad_norm": 0.4986463189125061, | |
| "learning_rate": 4.078091106290673e-06, | |
| "loss": 0.5786, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.596529284164859, | |
| "grad_norm": 0.4862741231918335, | |
| "learning_rate": 4.056399132321042e-06, | |
| "loss": 0.5508, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.5986984815618221, | |
| "grad_norm": 0.5407434105873108, | |
| "learning_rate": 4.03470715835141e-06, | |
| "loss": 0.5586, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.6008676789587852, | |
| "grad_norm": 0.513647198677063, | |
| "learning_rate": 4.0130151843817785e-06, | |
| "loss": 0.5653, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.6030368763557483, | |
| "grad_norm": 0.4941583573818207, | |
| "learning_rate": 3.991323210412148e-06, | |
| "loss": 0.5724, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.6052060737527115, | |
| "grad_norm": 0.5022282600402832, | |
| "learning_rate": 3.9696312364425166e-06, | |
| "loss": 0.5597, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.6073752711496746, | |
| "grad_norm": 0.5344776511192322, | |
| "learning_rate": 3.947939262472885e-06, | |
| "loss": 0.576, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.6095444685466378, | |
| "grad_norm": 0.8711801171302795, | |
| "learning_rate": 3.926247288503255e-06, | |
| "loss": 0.5653, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.6117136659436009, | |
| "grad_norm": 0.6406508684158325, | |
| "learning_rate": 3.904555314533623e-06, | |
| "loss": 0.5612, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.613882863340564, | |
| "grad_norm": 0.5289674997329712, | |
| "learning_rate": 3.882863340563991e-06, | |
| "loss": 0.5553, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.6160520607375272, | |
| "grad_norm": 0.49930331110954285, | |
| "learning_rate": 3.8611713665943606e-06, | |
| "loss": 0.5661, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.6182212581344902, | |
| "grad_norm": 0.49601882696151733, | |
| "learning_rate": 3.839479392624729e-06, | |
| "loss": 0.5613, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.6203904555314533, | |
| "grad_norm": 0.5551280975341797, | |
| "learning_rate": 3.817787418655098e-06, | |
| "loss": 0.5692, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.6225596529284165, | |
| "grad_norm": 0.5268258452415466, | |
| "learning_rate": 3.7960954446854664e-06, | |
| "loss": 0.5653, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.6247288503253796, | |
| "grad_norm": 0.5202245712280273, | |
| "learning_rate": 3.7744034707158355e-06, | |
| "loss": 0.5623, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.6268980477223427, | |
| "grad_norm": 0.7391339540481567, | |
| "learning_rate": 3.752711496746204e-06, | |
| "loss": 0.5485, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.6290672451193059, | |
| "grad_norm": 0.7942306995391846, | |
| "learning_rate": 3.7310195227765728e-06, | |
| "loss": 0.5693, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.631236442516269, | |
| "grad_norm": 0.5168442130088806, | |
| "learning_rate": 3.709327548806942e-06, | |
| "loss": 0.563, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.6334056399132321, | |
| "grad_norm": 0.5481964349746704, | |
| "learning_rate": 3.6876355748373104e-06, | |
| "loss": 0.5694, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.6355748373101953, | |
| "grad_norm": 0.7699911594390869, | |
| "learning_rate": 3.665943600867679e-06, | |
| "loss": 0.602, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.6377440347071583, | |
| "grad_norm": 0.5012184381484985, | |
| "learning_rate": 3.644251626898048e-06, | |
| "loss": 0.5566, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.6399132321041214, | |
| "grad_norm": 0.5150504112243652, | |
| "learning_rate": 3.6225596529284167e-06, | |
| "loss": 0.5575, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.6420824295010846, | |
| "grad_norm": 0.4825197160243988, | |
| "learning_rate": 3.6008676789587854e-06, | |
| "loss": 0.578, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.6442516268980477, | |
| "grad_norm": 0.5708716511726379, | |
| "learning_rate": 3.5791757049891544e-06, | |
| "loss": 0.5748, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 0.6464208242950108, | |
| "grad_norm": 0.49083611369132996, | |
| "learning_rate": 3.557483731019523e-06, | |
| "loss": 0.5656, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.648590021691974, | |
| "grad_norm": 0.5466710329055786, | |
| "learning_rate": 3.5357917570498917e-06, | |
| "loss": 0.5808, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 0.6507592190889371, | |
| "grad_norm": 0.6208142042160034, | |
| "learning_rate": 3.5140997830802603e-06, | |
| "loss": 0.5665, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.6529284164859002, | |
| "grad_norm": 0.4832100570201874, | |
| "learning_rate": 3.4924078091106293e-06, | |
| "loss": 0.572, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 0.6550976138828634, | |
| "grad_norm": 0.5140089392662048, | |
| "learning_rate": 3.470715835140998e-06, | |
| "loss": 0.5565, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.6572668112798264, | |
| "grad_norm": 0.5033791661262512, | |
| "learning_rate": 3.4490238611713666e-06, | |
| "loss": 0.5522, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 0.6594360086767896, | |
| "grad_norm": 0.4576267600059509, | |
| "learning_rate": 3.4273318872017357e-06, | |
| "loss": 0.5511, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.6616052060737527, | |
| "grad_norm": 0.5113222002983093, | |
| "learning_rate": 3.4056399132321043e-06, | |
| "loss": 0.5736, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.6637744034707158, | |
| "grad_norm": 0.528408408164978, | |
| "learning_rate": 3.383947939262473e-06, | |
| "loss": 0.5679, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.665943600867679, | |
| "grad_norm": 0.4761233925819397, | |
| "learning_rate": 3.362255965292842e-06, | |
| "loss": 0.535, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 0.6681127982646421, | |
| "grad_norm": 0.4666268527507782, | |
| "learning_rate": 3.3405639913232106e-06, | |
| "loss": 0.5731, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.6702819956616052, | |
| "grad_norm": 0.5518378615379333, | |
| "learning_rate": 3.3188720173535792e-06, | |
| "loss": 0.5679, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 0.6724511930585684, | |
| "grad_norm": 0.4833706021308899, | |
| "learning_rate": 3.2971800433839487e-06, | |
| "loss": 0.5615, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.6746203904555315, | |
| "grad_norm": 0.5420160889625549, | |
| "learning_rate": 3.275488069414317e-06, | |
| "loss": 0.5529, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 0.6767895878524945, | |
| "grad_norm": 0.6026318669319153, | |
| "learning_rate": 3.2537960954446855e-06, | |
| "loss": 0.5478, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.6789587852494577, | |
| "grad_norm": 0.5486297607421875, | |
| "learning_rate": 3.232104121475054e-06, | |
| "loss": 0.5619, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 0.6811279826464208, | |
| "grad_norm": 0.49950018525123596, | |
| "learning_rate": 3.210412147505423e-06, | |
| "loss": 0.5539, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.6832971800433839, | |
| "grad_norm": 0.4778575301170349, | |
| "learning_rate": 3.188720173535792e-06, | |
| "loss": 0.546, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.6854663774403471, | |
| "grad_norm": 0.4857572317123413, | |
| "learning_rate": 3.1670281995661605e-06, | |
| "loss": 0.5502, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.6876355748373102, | |
| "grad_norm": 0.5437939167022705, | |
| "learning_rate": 3.14533622559653e-06, | |
| "loss": 0.5448, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 0.6898047722342733, | |
| "grad_norm": 0.6015022397041321, | |
| "learning_rate": 3.123644251626898e-06, | |
| "loss": 0.5562, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.6919739696312365, | |
| "grad_norm": 0.5425083041191101, | |
| "learning_rate": 3.1019522776572668e-06, | |
| "loss": 0.5496, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 0.6941431670281996, | |
| "grad_norm": 0.4970496594905853, | |
| "learning_rate": 3.0802603036876362e-06, | |
| "loss": 0.5577, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.6963123644251626, | |
| "grad_norm": 0.5393553972244263, | |
| "learning_rate": 3.058568329718005e-06, | |
| "loss": 0.5665, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 0.6984815618221258, | |
| "grad_norm": 0.5788228511810303, | |
| "learning_rate": 3.036876355748373e-06, | |
| "loss": 0.5525, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.7006507592190889, | |
| "grad_norm": 0.582302987575531, | |
| "learning_rate": 3.0151843817787417e-06, | |
| "loss": 0.5337, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 0.702819956616052, | |
| "grad_norm": 0.5455271601676941, | |
| "learning_rate": 2.993492407809111e-06, | |
| "loss": 0.567, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.7049891540130152, | |
| "grad_norm": 0.5106908679008484, | |
| "learning_rate": 2.9718004338394794e-06, | |
| "loss": 0.5572, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.7071583514099783, | |
| "grad_norm": 0.5830085277557373, | |
| "learning_rate": 2.950108459869848e-06, | |
| "loss": 0.5799, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.7093275488069414, | |
| "grad_norm": 0.543565571308136, | |
| "learning_rate": 2.9284164859002175e-06, | |
| "loss": 0.5486, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 0.7114967462039046, | |
| "grad_norm": 0.6363551616668701, | |
| "learning_rate": 2.906724511930586e-06, | |
| "loss": 0.5589, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.7136659436008677, | |
| "grad_norm": 0.5492388606071472, | |
| "learning_rate": 2.8850325379609543e-06, | |
| "loss": 0.5439, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 0.7158351409978309, | |
| "grad_norm": 0.48855558037757874, | |
| "learning_rate": 2.8633405639913238e-06, | |
| "loss": 0.5342, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.7180043383947939, | |
| "grad_norm": 0.5338431000709534, | |
| "learning_rate": 2.8416485900216924e-06, | |
| "loss": 0.5313, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 0.720173535791757, | |
| "grad_norm": 0.5648824572563171, | |
| "learning_rate": 2.819956616052061e-06, | |
| "loss": 0.5521, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 0.7223427331887202, | |
| "grad_norm": 0.5221781730651855, | |
| "learning_rate": 2.79826464208243e-06, | |
| "loss": 0.5445, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 0.7245119305856833, | |
| "grad_norm": 0.4889802634716034, | |
| "learning_rate": 2.7765726681127987e-06, | |
| "loss": 0.5588, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 0.7266811279826464, | |
| "grad_norm": 0.4981290400028229, | |
| "learning_rate": 2.7548806941431673e-06, | |
| "loss": 0.5709, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.7288503253796096, | |
| "grad_norm": 0.5129498243331909, | |
| "learning_rate": 2.7331887201735356e-06, | |
| "loss": 0.5592, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 0.7310195227765727, | |
| "grad_norm": 0.5097779631614685, | |
| "learning_rate": 2.711496746203905e-06, | |
| "loss": 0.5608, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 0.7331887201735358, | |
| "grad_norm": 0.49256396293640137, | |
| "learning_rate": 2.6898047722342737e-06, | |
| "loss": 0.547, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 0.735357917570499, | |
| "grad_norm": 0.49484625458717346, | |
| "learning_rate": 2.6681127982646423e-06, | |
| "loss": 0.5605, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 0.737527114967462, | |
| "grad_norm": 0.4865284264087677, | |
| "learning_rate": 2.6464208242950113e-06, | |
| "loss": 0.552, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.7396963123644251, | |
| "grad_norm": 0.5712233185768127, | |
| "learning_rate": 2.62472885032538e-06, | |
| "loss": 0.5229, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 0.7418655097613883, | |
| "grad_norm": 0.4782271981239319, | |
| "learning_rate": 2.6030368763557486e-06, | |
| "loss": 0.5409, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 0.7440347071583514, | |
| "grad_norm": 0.48679468035697937, | |
| "learning_rate": 2.5813449023861176e-06, | |
| "loss": 0.5502, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 0.7462039045553145, | |
| "grad_norm": 0.49135392904281616, | |
| "learning_rate": 2.5596529284164863e-06, | |
| "loss": 0.552, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 0.7483731019522777, | |
| "grad_norm": 0.4905093014240265, | |
| "learning_rate": 2.537960954446855e-06, | |
| "loss": 0.5388, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.7505422993492408, | |
| "grad_norm": 0.4936610758304596, | |
| "learning_rate": 2.516268980477224e-06, | |
| "loss": 0.5408, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 0.7527114967462039, | |
| "grad_norm": 0.4581211805343628, | |
| "learning_rate": 2.4945770065075926e-06, | |
| "loss": 0.5438, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 0.754880694143167, | |
| "grad_norm": 0.9855303764343262, | |
| "learning_rate": 2.472885032537961e-06, | |
| "loss": 0.5422, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 0.7570498915401301, | |
| "grad_norm": 0.4950404465198517, | |
| "learning_rate": 2.45119305856833e-06, | |
| "loss": 0.5488, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 0.7592190889370932, | |
| "grad_norm": 0.5436333417892456, | |
| "learning_rate": 2.4295010845986985e-06, | |
| "loss": 0.529, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.7613882863340564, | |
| "grad_norm": 0.6923372149467468, | |
| "learning_rate": 2.4078091106290675e-06, | |
| "loss": 0.566, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 0.7635574837310195, | |
| "grad_norm": 0.4927901327610016, | |
| "learning_rate": 2.386117136659436e-06, | |
| "loss": 0.5445, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 0.7657266811279827, | |
| "grad_norm": 0.6745809316635132, | |
| "learning_rate": 2.3644251626898048e-06, | |
| "loss": 0.5644, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 0.7678958785249458, | |
| "grad_norm": 0.5441464185714722, | |
| "learning_rate": 2.342733188720174e-06, | |
| "loss": 0.5481, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 0.7700650759219089, | |
| "grad_norm": 0.5015382170677185, | |
| "learning_rate": 2.3210412147505424e-06, | |
| "loss": 0.5632, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.7722342733188721, | |
| "grad_norm": 0.48177745938301086, | |
| "learning_rate": 2.299349240780911e-06, | |
| "loss": 0.5435, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 0.7744034707158352, | |
| "grad_norm": 0.6477603316307068, | |
| "learning_rate": 2.27765726681128e-06, | |
| "loss": 0.5488, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 0.7765726681127982, | |
| "grad_norm": 0.49363404512405396, | |
| "learning_rate": 2.2559652928416487e-06, | |
| "loss": 0.5359, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 0.7787418655097614, | |
| "grad_norm": 0.49064791202545166, | |
| "learning_rate": 2.2342733188720174e-06, | |
| "loss": 0.5708, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 0.7809110629067245, | |
| "grad_norm": 0.5506601929664612, | |
| "learning_rate": 2.2125813449023864e-06, | |
| "loss": 0.5638, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.7830802603036876, | |
| "grad_norm": 0.49600258469581604, | |
| "learning_rate": 2.190889370932755e-06, | |
| "loss": 0.5578, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 0.7852494577006508, | |
| "grad_norm": 0.6045665740966797, | |
| "learning_rate": 2.1691973969631237e-06, | |
| "loss": 0.5387, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 0.7874186550976139, | |
| "grad_norm": 0.4785294234752655, | |
| "learning_rate": 2.1475054229934923e-06, | |
| "loss": 0.5461, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 0.789587852494577, | |
| "grad_norm": 0.505807101726532, | |
| "learning_rate": 2.1258134490238614e-06, | |
| "loss": 0.5421, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 0.7917570498915402, | |
| "grad_norm": 0.5481471419334412, | |
| "learning_rate": 2.1041214750542304e-06, | |
| "loss": 0.5359, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.7939262472885033, | |
| "grad_norm": 0.5260441899299622, | |
| "learning_rate": 2.0824295010845986e-06, | |
| "loss": 0.5801, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 0.7960954446854663, | |
| "grad_norm": 0.5684512257575989, | |
| "learning_rate": 2.0607375271149677e-06, | |
| "loss": 0.5317, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 0.7982646420824295, | |
| "grad_norm": 0.5323051810264587, | |
| "learning_rate": 2.0390455531453363e-06, | |
| "loss": 0.5669, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 0.8004338394793926, | |
| "grad_norm": 0.6611142158508301, | |
| "learning_rate": 2.017353579175705e-06, | |
| "loss": 0.5582, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 0.8026030368763557, | |
| "grad_norm": 0.47256988286972046, | |
| "learning_rate": 1.995661605206074e-06, | |
| "loss": 0.5276, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.8047722342733189, | |
| "grad_norm": 0.6162890195846558, | |
| "learning_rate": 1.9739696312364426e-06, | |
| "loss": 0.5517, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 0.806941431670282, | |
| "grad_norm": 0.5171302556991577, | |
| "learning_rate": 1.9522776572668117e-06, | |
| "loss": 0.5467, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 0.8091106290672451, | |
| "grad_norm": 0.5168177485466003, | |
| "learning_rate": 1.9305856832971803e-06, | |
| "loss": 0.537, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 0.8112798264642083, | |
| "grad_norm": 0.49433693289756775, | |
| "learning_rate": 1.908893709327549e-06, | |
| "loss": 0.5441, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 0.8134490238611713, | |
| "grad_norm": 0.6254696249961853, | |
| "learning_rate": 1.8872017353579177e-06, | |
| "loss": 0.568, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.8156182212581344, | |
| "grad_norm": 0.4846223294734955, | |
| "learning_rate": 1.8655097613882864e-06, | |
| "loss": 0.5314, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 0.8177874186550976, | |
| "grad_norm": 0.5695744156837463, | |
| "learning_rate": 1.8438177874186552e-06, | |
| "loss": 0.5556, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 0.8199566160520607, | |
| "grad_norm": 0.5135949850082397, | |
| "learning_rate": 1.822125813449024e-06, | |
| "loss": 0.5513, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 0.8221258134490239, | |
| "grad_norm": 0.5039001107215881, | |
| "learning_rate": 1.8004338394793927e-06, | |
| "loss": 0.5395, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 0.824295010845987, | |
| "grad_norm": 0.5153144598007202, | |
| "learning_rate": 1.7787418655097615e-06, | |
| "loss": 0.5499, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.8264642082429501, | |
| "grad_norm": 0.6042090058326721, | |
| "learning_rate": 1.7570498915401301e-06, | |
| "loss": 0.561, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 0.8286334056399133, | |
| "grad_norm": 0.5201963186264038, | |
| "learning_rate": 1.735357917570499e-06, | |
| "loss": 0.5514, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 0.8308026030368764, | |
| "grad_norm": 0.7226424217224121, | |
| "learning_rate": 1.7136659436008678e-06, | |
| "loss": 0.5481, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 0.8329718004338394, | |
| "grad_norm": 0.49008411169052124, | |
| "learning_rate": 1.6919739696312365e-06, | |
| "loss": 0.5577, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 0.8351409978308026, | |
| "grad_norm": 0.5384876728057861, | |
| "learning_rate": 1.6702819956616053e-06, | |
| "loss": 0.5744, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.8373101952277657, | |
| "grad_norm": 0.6859086751937866, | |
| "learning_rate": 1.6485900216919743e-06, | |
| "loss": 0.5495, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 0.8394793926247288, | |
| "grad_norm": 0.5295915007591248, | |
| "learning_rate": 1.6268980477223428e-06, | |
| "loss": 0.5555, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 0.841648590021692, | |
| "grad_norm": 0.49512577056884766, | |
| "learning_rate": 1.6052060737527116e-06, | |
| "loss": 0.5336, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 0.8438177874186551, | |
| "grad_norm": 0.5301417112350464, | |
| "learning_rate": 1.5835140997830802e-06, | |
| "loss": 0.5366, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 0.8459869848156182, | |
| "grad_norm": 0.5286414623260498, | |
| "learning_rate": 1.561822125813449e-06, | |
| "loss": 0.5301, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.8481561822125814, | |
| "grad_norm": 0.4798189699649811, | |
| "learning_rate": 1.5401301518438181e-06, | |
| "loss": 0.5529, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 0.8503253796095445, | |
| "grad_norm": 0.5175362825393677, | |
| "learning_rate": 1.5184381778741865e-06, | |
| "loss": 0.5418, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 0.8524945770065075, | |
| "grad_norm": 0.5054755210876465, | |
| "learning_rate": 1.4967462039045556e-06, | |
| "loss": 0.5471, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 0.8546637744034707, | |
| "grad_norm": 0.48426923155784607, | |
| "learning_rate": 1.475054229934924e-06, | |
| "loss": 0.543, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 0.8568329718004338, | |
| "grad_norm": 0.49632081389427185, | |
| "learning_rate": 1.453362255965293e-06, | |
| "loss": 0.5336, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.8590021691973969, | |
| "grad_norm": 0.5040681958198547, | |
| "learning_rate": 1.4316702819956619e-06, | |
| "loss": 0.5334, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 0.8611713665943601, | |
| "grad_norm": 0.5700734853744507, | |
| "learning_rate": 1.4099783080260305e-06, | |
| "loss": 0.5365, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 0.8633405639913232, | |
| "grad_norm": 0.5232958197593689, | |
| "learning_rate": 1.3882863340563994e-06, | |
| "loss": 0.5372, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 0.8655097613882863, | |
| "grad_norm": 0.5089038610458374, | |
| "learning_rate": 1.3665943600867678e-06, | |
| "loss": 0.5431, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 0.8676789587852495, | |
| "grad_norm": 0.5343663692474365, | |
| "learning_rate": 1.3449023861171368e-06, | |
| "loss": 0.5505, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.8698481561822126, | |
| "grad_norm": 0.7061499953269958, | |
| "learning_rate": 1.3232104121475057e-06, | |
| "loss": 0.5394, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 0.8720173535791758, | |
| "grad_norm": 0.5129175186157227, | |
| "learning_rate": 1.3015184381778743e-06, | |
| "loss": 0.5515, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 0.8741865509761388, | |
| "grad_norm": 0.6809354424476624, | |
| "learning_rate": 1.2798264642082431e-06, | |
| "loss": 0.5453, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 0.8763557483731019, | |
| "grad_norm": 0.5095650553703308, | |
| "learning_rate": 1.258134490238612e-06, | |
| "loss": 0.5559, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 0.8785249457700651, | |
| "grad_norm": 0.707577645778656, | |
| "learning_rate": 1.2364425162689806e-06, | |
| "loss": 0.5318, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.8806941431670282, | |
| "grad_norm": 0.5874403119087219, | |
| "learning_rate": 1.2147505422993492e-06, | |
| "loss": 0.5481, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 0.8828633405639913, | |
| "grad_norm": 0.4959114193916321, | |
| "learning_rate": 1.193058568329718e-06, | |
| "loss": 0.5394, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 0.8850325379609545, | |
| "grad_norm": 0.5033082365989685, | |
| "learning_rate": 1.171366594360087e-06, | |
| "loss": 0.5644, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 0.8872017353579176, | |
| "grad_norm": 0.5417726635932922, | |
| "learning_rate": 1.1496746203904555e-06, | |
| "loss": 0.5209, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 0.8893709327548807, | |
| "grad_norm": 0.488228440284729, | |
| "learning_rate": 1.1279826464208244e-06, | |
| "loss": 0.5406, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.8915401301518439, | |
| "grad_norm": 0.9152227640151978, | |
| "learning_rate": 1.1062906724511932e-06, | |
| "loss": 0.532, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 0.8937093275488069, | |
| "grad_norm": 0.5113642811775208, | |
| "learning_rate": 1.0845986984815618e-06, | |
| "loss": 0.5476, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 0.89587852494577, | |
| "grad_norm": 0.5312101244926453, | |
| "learning_rate": 1.0629067245119307e-06, | |
| "loss": 0.5368, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 0.8980477223427332, | |
| "grad_norm": 0.589953601360321, | |
| "learning_rate": 1.0412147505422993e-06, | |
| "loss": 0.5381, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 0.9002169197396963, | |
| "grad_norm": 0.5381547212600708, | |
| "learning_rate": 1.0195227765726681e-06, | |
| "loss": 0.5492, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.9023861171366594, | |
| "grad_norm": 0.5344692468643188, | |
| "learning_rate": 9.97830802603037e-07, | |
| "loss": 0.5507, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 0.9045553145336226, | |
| "grad_norm": 0.48827818036079407, | |
| "learning_rate": 9.761388286334058e-07, | |
| "loss": 0.5365, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 0.9067245119305857, | |
| "grad_norm": 0.5503534078598022, | |
| "learning_rate": 9.544468546637745e-07, | |
| "loss": 0.531, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 0.9088937093275488, | |
| "grad_norm": 0.5214491486549377, | |
| "learning_rate": 9.327548806941432e-07, | |
| "loss": 0.5492, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 0.911062906724512, | |
| "grad_norm": 0.5597646236419678, | |
| "learning_rate": 9.11062906724512e-07, | |
| "loss": 0.5273, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.913232104121475, | |
| "grad_norm": 0.4814266264438629, | |
| "learning_rate": 8.893709327548808e-07, | |
| "loss": 0.5467, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 0.9154013015184381, | |
| "grad_norm": 0.5103883147239685, | |
| "learning_rate": 8.676789587852495e-07, | |
| "loss": 0.5514, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 0.9175704989154013, | |
| "grad_norm": 0.4807824492454529, | |
| "learning_rate": 8.459869848156182e-07, | |
| "loss": 0.5377, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 0.9197396963123644, | |
| "grad_norm": 0.49137192964553833, | |
| "learning_rate": 8.242950108459872e-07, | |
| "loss": 0.5384, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 0.9219088937093276, | |
| "grad_norm": 0.47830575704574585, | |
| "learning_rate": 8.026030368763558e-07, | |
| "loss": 0.5458, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.9240780911062907, | |
| "grad_norm": 0.5331500768661499, | |
| "learning_rate": 7.809110629067245e-07, | |
| "loss": 0.514, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 0.9262472885032538, | |
| "grad_norm": 0.4643237888813019, | |
| "learning_rate": 7.592190889370933e-07, | |
| "loss": 0.5404, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 0.928416485900217, | |
| "grad_norm": 0.49474000930786133, | |
| "learning_rate": 7.37527114967462e-07, | |
| "loss": 0.5687, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 0.93058568329718, | |
| "grad_norm": 0.551901638507843, | |
| "learning_rate": 7.158351409978309e-07, | |
| "loss": 0.5485, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 0.9327548806941431, | |
| "grad_norm": 0.5091038346290588, | |
| "learning_rate": 6.941431670281997e-07, | |
| "loss": 0.545, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.9349240780911063, | |
| "grad_norm": 0.4894218444824219, | |
| "learning_rate": 6.724511930585684e-07, | |
| "loss": 0.5391, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 0.9370932754880694, | |
| "grad_norm": 0.5032333731651306, | |
| "learning_rate": 6.507592190889371e-07, | |
| "loss": 0.5404, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 0.9392624728850325, | |
| "grad_norm": 0.5365532040596008, | |
| "learning_rate": 6.29067245119306e-07, | |
| "loss": 0.524, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 0.9414316702819957, | |
| "grad_norm": 0.47491738200187683, | |
| "learning_rate": 6.073752711496746e-07, | |
| "loss": 0.5451, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 0.9436008676789588, | |
| "grad_norm": 0.4964900314807892, | |
| "learning_rate": 5.856832971800435e-07, | |
| "loss": 0.5465, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.9457700650759219, | |
| "grad_norm": 0.4745555520057678, | |
| "learning_rate": 5.639913232104122e-07, | |
| "loss": 0.5432, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 0.9479392624728851, | |
| "grad_norm": 0.6818556785583496, | |
| "learning_rate": 5.422993492407809e-07, | |
| "loss": 0.5382, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 0.9501084598698482, | |
| "grad_norm": 0.5324418544769287, | |
| "learning_rate": 5.206073752711497e-07, | |
| "loss": 0.5733, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 0.9522776572668112, | |
| "grad_norm": 0.5001943111419678, | |
| "learning_rate": 4.989154013015185e-07, | |
| "loss": 0.5331, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 0.9544468546637744, | |
| "grad_norm": 0.5757445096969604, | |
| "learning_rate": 4.772234273318872e-07, | |
| "loss": 0.5383, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.9566160520607375, | |
| "grad_norm": 0.6072180271148682, | |
| "learning_rate": 4.55531453362256e-07, | |
| "loss": 0.5509, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 0.9587852494577006, | |
| "grad_norm": 0.5225549936294556, | |
| "learning_rate": 4.3383947939262475e-07, | |
| "loss": 0.5279, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 0.9609544468546638, | |
| "grad_norm": 0.4668637216091156, | |
| "learning_rate": 4.121475054229936e-07, | |
| "loss": 0.5488, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 0.9631236442516269, | |
| "grad_norm": 0.46520882844924927, | |
| "learning_rate": 3.9045553145336227e-07, | |
| "loss": 0.5272, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 0.96529284164859, | |
| "grad_norm": 0.45933347940444946, | |
| "learning_rate": 3.68763557483731e-07, | |
| "loss": 0.5526, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.9674620390455532, | |
| "grad_norm": 0.6567378044128418, | |
| "learning_rate": 3.4707158351409984e-07, | |
| "loss": 0.5387, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 0.9696312364425163, | |
| "grad_norm": 0.49450600147247314, | |
| "learning_rate": 3.2537960954446857e-07, | |
| "loss": 0.5329, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 0.9718004338394793, | |
| "grad_norm": 0.5677459239959717, | |
| "learning_rate": 3.036876355748373e-07, | |
| "loss": 0.5459, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 0.9739696312364425, | |
| "grad_norm": 0.4713030755519867, | |
| "learning_rate": 2.819956616052061e-07, | |
| "loss": 0.5394, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 0.9761388286334056, | |
| "grad_norm": 0.5460206866264343, | |
| "learning_rate": 2.6030368763557483e-07, | |
| "loss": 0.5318, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.9783080260303688, | |
| "grad_norm": 0.4780767560005188, | |
| "learning_rate": 2.386117136659436e-07, | |
| "loss": 0.5407, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 0.9804772234273319, | |
| "grad_norm": 0.4803195297718048, | |
| "learning_rate": 2.1691973969631237e-07, | |
| "loss": 0.5199, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 0.982646420824295, | |
| "grad_norm": 0.5350447297096252, | |
| "learning_rate": 1.9522776572668113e-07, | |
| "loss": 0.5434, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 0.9848156182212582, | |
| "grad_norm": 0.5030990839004517, | |
| "learning_rate": 1.7353579175704992e-07, | |
| "loss": 0.5368, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 0.9869848156182213, | |
| "grad_norm": 0.4834834933280945, | |
| "learning_rate": 1.5184381778741865e-07, | |
| "loss": 0.5386, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.9891540130151844, | |
| "grad_norm": 0.5929272174835205, | |
| "learning_rate": 1.3015184381778741e-07, | |
| "loss": 0.5452, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 0.9913232104121475, | |
| "grad_norm": 0.4731636047363281, | |
| "learning_rate": 1.0845986984815619e-07, | |
| "loss": 0.5294, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 0.9934924078091106, | |
| "grad_norm": 0.48876383900642395, | |
| "learning_rate": 8.676789587852496e-08, | |
| "loss": 0.539, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 0.9956616052060737, | |
| "grad_norm": 0.4793936610221863, | |
| "learning_rate": 6.507592190889371e-08, | |
| "loss": 0.5496, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 0.9978308026030369, | |
| "grad_norm": 0.46681129932403564, | |
| "learning_rate": 4.338394793926248e-08, | |
| "loss": 0.5283, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.5591140389442444, | |
| "learning_rate": 2.169197396963124e-08, | |
| "loss": 0.5435, | |
| "step": 461 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 461, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 0, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 8.281141529490227e+17, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |