| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 3.0, | |
| "eval_steps": 500, | |
| "global_step": 624, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.004807692307692308, | |
| "grad_norm": 6.990724563598633, | |
| "learning_rate": 1.5873015873015874e-07, | |
| "loss": 1.0623, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.009615384615384616, | |
| "grad_norm": 6.919309139251709, | |
| "learning_rate": 3.174603174603175e-07, | |
| "loss": 1.0437, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.014423076923076924, | |
| "grad_norm": 6.894330978393555, | |
| "learning_rate": 4.7619047619047623e-07, | |
| "loss": 1.047, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.019230769230769232, | |
| "grad_norm": 6.92576265335083, | |
| "learning_rate": 6.34920634920635e-07, | |
| "loss": 1.042, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.02403846153846154, | |
| "grad_norm": 6.855586051940918, | |
| "learning_rate": 7.936507936507937e-07, | |
| "loss": 1.0384, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.028846153846153848, | |
| "grad_norm": 6.824655532836914, | |
| "learning_rate": 9.523809523809525e-07, | |
| "loss": 1.0474, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.03365384615384615, | |
| "grad_norm": 6.360400199890137, | |
| "learning_rate": 1.111111111111111e-06, | |
| "loss": 1.0354, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.038461538461538464, | |
| "grad_norm": 6.237353324890137, | |
| "learning_rate": 1.26984126984127e-06, | |
| "loss": 1.035, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.04326923076923077, | |
| "grad_norm": 4.962989330291748, | |
| "learning_rate": 1.4285714285714286e-06, | |
| "loss": 0.986, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.04807692307692308, | |
| "grad_norm": 4.90160608291626, | |
| "learning_rate": 1.5873015873015873e-06, | |
| "loss": 0.9925, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.052884615384615384, | |
| "grad_norm": 4.697347640991211, | |
| "learning_rate": 1.746031746031746e-06, | |
| "loss": 0.9938, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.057692307692307696, | |
| "grad_norm": 2.8192811012268066, | |
| "learning_rate": 1.904761904761905e-06, | |
| "loss": 0.9378, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.0625, | |
| "grad_norm": 2.878005027770996, | |
| "learning_rate": 2.0634920634920634e-06, | |
| "loss": 0.9478, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.0673076923076923, | |
| "grad_norm": 2.691462993621826, | |
| "learning_rate": 2.222222222222222e-06, | |
| "loss": 0.9575, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.07211538461538461, | |
| "grad_norm": 2.599451780319214, | |
| "learning_rate": 2.380952380952381e-06, | |
| "loss": 0.9319, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.07692307692307693, | |
| "grad_norm": 3.288738489151001, | |
| "learning_rate": 2.53968253968254e-06, | |
| "loss": 0.9198, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.08173076923076923, | |
| "grad_norm": 3.5585649013519287, | |
| "learning_rate": 2.6984126984126986e-06, | |
| "loss": 0.8854, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.08653846153846154, | |
| "grad_norm": 3.675098419189453, | |
| "learning_rate": 2.8571428571428573e-06, | |
| "loss": 0.9028, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.09134615384615384, | |
| "grad_norm": 3.505143642425537, | |
| "learning_rate": 3.015873015873016e-06, | |
| "loss": 0.9001, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.09615384615384616, | |
| "grad_norm": 3.0340216159820557, | |
| "learning_rate": 3.1746031746031746e-06, | |
| "loss": 0.8766, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.10096153846153846, | |
| "grad_norm": 2.239123821258545, | |
| "learning_rate": 3.3333333333333333e-06, | |
| "loss": 0.8773, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.10576923076923077, | |
| "grad_norm": 1.5766472816467285, | |
| "learning_rate": 3.492063492063492e-06, | |
| "loss": 0.8311, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.11057692307692307, | |
| "grad_norm": 1.567060112953186, | |
| "learning_rate": 3.6507936507936507e-06, | |
| "loss": 0.8303, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.11538461538461539, | |
| "grad_norm": 1.373935580253601, | |
| "learning_rate": 3.80952380952381e-06, | |
| "loss": 0.8011, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.1201923076923077, | |
| "grad_norm": 1.1732724905014038, | |
| "learning_rate": 3.968253968253968e-06, | |
| "loss": 0.7878, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.125, | |
| "grad_norm": 1.0626819133758545, | |
| "learning_rate": 4.126984126984127e-06, | |
| "loss": 0.78, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.12980769230769232, | |
| "grad_norm": 1.0781354904174805, | |
| "learning_rate": 4.2857142857142855e-06, | |
| "loss": 0.7852, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.1346153846153846, | |
| "grad_norm": 1.006543517112732, | |
| "learning_rate": 4.444444444444444e-06, | |
| "loss": 0.7954, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.13942307692307693, | |
| "grad_norm": 0.9152565598487854, | |
| "learning_rate": 4.603174603174604e-06, | |
| "loss": 0.7573, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.14423076923076922, | |
| "grad_norm": 0.8509926199913025, | |
| "learning_rate": 4.761904761904762e-06, | |
| "loss": 0.7572, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.14903846153846154, | |
| "grad_norm": 0.8057727217674255, | |
| "learning_rate": 4.920634920634921e-06, | |
| "loss": 0.7536, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.15384615384615385, | |
| "grad_norm": 0.8267956376075745, | |
| "learning_rate": 5.07936507936508e-06, | |
| "loss": 0.7441, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.15865384615384615, | |
| "grad_norm": 0.8982202410697937, | |
| "learning_rate": 5.2380952380952384e-06, | |
| "loss": 0.7398, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.16346153846153846, | |
| "grad_norm": 0.7668877840042114, | |
| "learning_rate": 5.396825396825397e-06, | |
| "loss": 0.7356, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.16826923076923078, | |
| "grad_norm": 0.775622546672821, | |
| "learning_rate": 5.555555555555557e-06, | |
| "loss": 0.722, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.17307692307692307, | |
| "grad_norm": 0.7095115780830383, | |
| "learning_rate": 5.7142857142857145e-06, | |
| "loss": 0.7061, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.1778846153846154, | |
| "grad_norm": 0.7553266882896423, | |
| "learning_rate": 5.873015873015874e-06, | |
| "loss": 0.7194, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.18269230769230768, | |
| "grad_norm": 0.7487539052963257, | |
| "learning_rate": 6.031746031746032e-06, | |
| "loss": 0.7101, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.1875, | |
| "grad_norm": 0.7720729112625122, | |
| "learning_rate": 6.1904761904761914e-06, | |
| "loss": 0.7186, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.19230769230769232, | |
| "grad_norm": 0.7665765881538391, | |
| "learning_rate": 6.349206349206349e-06, | |
| "loss": 0.7262, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.1971153846153846, | |
| "grad_norm": 0.8309563398361206, | |
| "learning_rate": 6.507936507936509e-06, | |
| "loss": 0.7232, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.20192307692307693, | |
| "grad_norm": 0.8082911372184753, | |
| "learning_rate": 6.666666666666667e-06, | |
| "loss": 0.7006, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.20673076923076922, | |
| "grad_norm": 0.6936203837394714, | |
| "learning_rate": 6.825396825396826e-06, | |
| "loss": 0.7036, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.21153846153846154, | |
| "grad_norm": 0.8248268365859985, | |
| "learning_rate": 6.984126984126984e-06, | |
| "loss": 0.7022, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.21634615384615385, | |
| "grad_norm": 0.7197801470756531, | |
| "learning_rate": 7.1428571428571436e-06, | |
| "loss": 0.6632, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.22115384615384615, | |
| "grad_norm": 0.5920101404190063, | |
| "learning_rate": 7.301587301587301e-06, | |
| "loss": 0.682, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.22596153846153846, | |
| "grad_norm": 0.6113859415054321, | |
| "learning_rate": 7.460317460317461e-06, | |
| "loss": 0.6929, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.23076923076923078, | |
| "grad_norm": 0.6709004640579224, | |
| "learning_rate": 7.61904761904762e-06, | |
| "loss": 0.6728, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.23557692307692307, | |
| "grad_norm": 0.6471027731895447, | |
| "learning_rate": 7.77777777777778e-06, | |
| "loss": 0.6722, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.2403846153846154, | |
| "grad_norm": 0.5812222361564636, | |
| "learning_rate": 7.936507936507936e-06, | |
| "loss": 0.6787, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.24519230769230768, | |
| "grad_norm": 0.7254865765571594, | |
| "learning_rate": 8.095238095238097e-06, | |
| "loss": 0.6809, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 0.5452060103416443, | |
| "learning_rate": 8.253968253968254e-06, | |
| "loss": 0.6658, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.2548076923076923, | |
| "grad_norm": 0.6729876399040222, | |
| "learning_rate": 8.412698412698414e-06, | |
| "loss": 0.6777, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.25961538461538464, | |
| "grad_norm": 0.5397999286651611, | |
| "learning_rate": 8.571428571428571e-06, | |
| "loss": 0.6713, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.2644230769230769, | |
| "grad_norm": 0.6134487390518188, | |
| "learning_rate": 8.730158730158731e-06, | |
| "loss": 0.6668, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.2692307692307692, | |
| "grad_norm": 0.5382984280586243, | |
| "learning_rate": 8.888888888888888e-06, | |
| "loss": 0.6697, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.27403846153846156, | |
| "grad_norm": 0.6409310102462769, | |
| "learning_rate": 9.047619047619049e-06, | |
| "loss": 0.6725, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.27884615384615385, | |
| "grad_norm": 0.4940313398838043, | |
| "learning_rate": 9.206349206349207e-06, | |
| "loss": 0.6738, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.28365384615384615, | |
| "grad_norm": 0.5982186198234558, | |
| "learning_rate": 9.365079365079366e-06, | |
| "loss": 0.6625, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.28846153846153844, | |
| "grad_norm": 0.6377073526382446, | |
| "learning_rate": 9.523809523809525e-06, | |
| "loss": 0.6605, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.2932692307692308, | |
| "grad_norm": 0.6730812191963196, | |
| "learning_rate": 9.682539682539683e-06, | |
| "loss": 0.6589, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.2980769230769231, | |
| "grad_norm": 0.7052279710769653, | |
| "learning_rate": 9.841269841269842e-06, | |
| "loss": 0.6504, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.30288461538461536, | |
| "grad_norm": 0.5276140570640564, | |
| "learning_rate": 1e-05, | |
| "loss": 0.6623, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.3076923076923077, | |
| "grad_norm": 0.7044535279273987, | |
| "learning_rate": 9.99992160057155e-06, | |
| "loss": 0.6383, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.3125, | |
| "grad_norm": 0.7368455529212952, | |
| "learning_rate": 9.999686404744782e-06, | |
| "loss": 0.6447, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.3173076923076923, | |
| "grad_norm": 0.7093029618263245, | |
| "learning_rate": 9.999294419895389e-06, | |
| "loss": 0.6673, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.32211538461538464, | |
| "grad_norm": 0.7653289437294006, | |
| "learning_rate": 9.998745658315924e-06, | |
| "loss": 0.6437, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.3269230769230769, | |
| "grad_norm": 0.6811711192131042, | |
| "learning_rate": 9.998040137215423e-06, | |
| "loss": 0.656, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.3317307692307692, | |
| "grad_norm": 0.6195499897003174, | |
| "learning_rate": 9.99717787871887e-06, | |
| "loss": 0.6473, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.33653846153846156, | |
| "grad_norm": 0.7382338047027588, | |
| "learning_rate": 9.99615890986649e-06, | |
| "loss": 0.6484, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.34134615384615385, | |
| "grad_norm": 0.7722101807594299, | |
| "learning_rate": 9.994983262612916e-06, | |
| "loss": 0.6508, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.34615384615384615, | |
| "grad_norm": 0.7530032396316528, | |
| "learning_rate": 9.993650973826177e-06, | |
| "loss": 0.6346, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.35096153846153844, | |
| "grad_norm": 0.7273198962211609, | |
| "learning_rate": 9.992162085286543e-06, | |
| "loss": 0.6456, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.3557692307692308, | |
| "grad_norm": 0.7155181765556335, | |
| "learning_rate": 9.990516643685222e-06, | |
| "loss": 0.6553, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.3605769230769231, | |
| "grad_norm": 0.7894483804702759, | |
| "learning_rate": 9.988714700622882e-06, | |
| "loss": 0.648, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.36538461538461536, | |
| "grad_norm": 0.6426685452461243, | |
| "learning_rate": 9.986756312608048e-06, | |
| "loss": 0.6498, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.3701923076923077, | |
| "grad_norm": 0.6765972375869751, | |
| "learning_rate": 9.98464154105532e-06, | |
| "loss": 0.6478, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.375, | |
| "grad_norm": 0.6547264456748962, | |
| "learning_rate": 9.982370452283451e-06, | |
| "loss": 0.628, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.3798076923076923, | |
| "grad_norm": 0.6896611452102661, | |
| "learning_rate": 9.979943117513265e-06, | |
| "loss": 0.6204, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.38461538461538464, | |
| "grad_norm": 0.7491808533668518, | |
| "learning_rate": 9.977359612865424e-06, | |
| "loss": 0.6376, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.3894230769230769, | |
| "grad_norm": 0.6002772450447083, | |
| "learning_rate": 9.974620019358046e-06, | |
| "loss": 0.6378, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.3942307692307692, | |
| "grad_norm": 0.7602054476737976, | |
| "learning_rate": 9.971724422904154e-06, | |
| "loss": 0.643, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.39903846153846156, | |
| "grad_norm": 0.5507690906524658, | |
| "learning_rate": 9.968672914308995e-06, | |
| "loss": 0.6332, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.40384615384615385, | |
| "grad_norm": 0.6860342025756836, | |
| "learning_rate": 9.965465589267176e-06, | |
| "loss": 0.6489, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.40865384615384615, | |
| "grad_norm": 0.5931215286254883, | |
| "learning_rate": 9.96210254835968e-06, | |
| "loss": 0.6425, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.41346153846153844, | |
| "grad_norm": 0.5528920292854309, | |
| "learning_rate": 9.9585838970507e-06, | |
| "loss": 0.6361, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.4182692307692308, | |
| "grad_norm": 0.6603521704673767, | |
| "learning_rate": 9.954909745684339e-06, | |
| "loss": 0.63, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.4230769230769231, | |
| "grad_norm": 0.6493014693260193, | |
| "learning_rate": 9.951080209481138e-06, | |
| "loss": 0.6187, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.42788461538461536, | |
| "grad_norm": 0.5607807040214539, | |
| "learning_rate": 9.947095408534483e-06, | |
| "loss": 0.6459, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.4326923076923077, | |
| "grad_norm": 0.6842950582504272, | |
| "learning_rate": 9.94295546780682e-06, | |
| "loss": 0.6315, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.4375, | |
| "grad_norm": 0.7010789513587952, | |
| "learning_rate": 9.93866051712574e-06, | |
| "loss": 0.6353, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.4423076923076923, | |
| "grad_norm": 0.573545515537262, | |
| "learning_rate": 9.934210691179918e-06, | |
| "loss": 0.6149, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.44711538461538464, | |
| "grad_norm": 0.5375520586967468, | |
| "learning_rate": 9.929606129514875e-06, | |
| "loss": 0.6227, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.4519230769230769, | |
| "grad_norm": 0.6587491035461426, | |
| "learning_rate": 9.924846976528618e-06, | |
| "loss": 0.6297, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.4567307692307692, | |
| "grad_norm": 0.6161583662033081, | |
| "learning_rate": 9.919933381467088e-06, | |
| "loss": 0.6421, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.46153846153846156, | |
| "grad_norm": 0.5526056885719299, | |
| "learning_rate": 9.91486549841951e-06, | |
| "loss": 0.6506, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.46634615384615385, | |
| "grad_norm": 0.6603134870529175, | |
| "learning_rate": 9.909643486313533e-06, | |
| "loss": 0.639, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.47115384615384615, | |
| "grad_norm": 0.5784550309181213, | |
| "learning_rate": 9.904267508910269e-06, | |
| "loss": 0.6307, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.47596153846153844, | |
| "grad_norm": 0.5557315945625305, | |
| "learning_rate": 9.898737734799134e-06, | |
| "loss": 0.6163, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.4807692307692308, | |
| "grad_norm": 0.6521722078323364, | |
| "learning_rate": 9.89305433739258e-06, | |
| "loss": 0.6352, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.4855769230769231, | |
| "grad_norm": 0.6033216118812561, | |
| "learning_rate": 9.887217494920655e-06, | |
| "loss": 0.6253, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.49038461538461536, | |
| "grad_norm": 0.5049378871917725, | |
| "learning_rate": 9.881227390425404e-06, | |
| "loss": 0.6315, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.4951923076923077, | |
| "grad_norm": 0.5810129046440125, | |
| "learning_rate": 9.875084211755127e-06, | |
| "loss": 0.6303, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 0.5270701050758362, | |
| "learning_rate": 9.868788151558513e-06, | |
| "loss": 0.6275, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.5048076923076923, | |
| "grad_norm": 0.5791574120521545, | |
| "learning_rate": 9.862339407278564e-06, | |
| "loss": 0.6364, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.5096153846153846, | |
| "grad_norm": 0.5724871158599854, | |
| "learning_rate": 9.855738181146427e-06, | |
| "loss": 0.6222, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.5144230769230769, | |
| "grad_norm": 0.5011044144630432, | |
| "learning_rate": 9.848984680175049e-06, | |
| "loss": 0.6216, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.5192307692307693, | |
| "grad_norm": 0.5154832601547241, | |
| "learning_rate": 9.84207911615267e-06, | |
| "loss": 0.6224, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.5240384615384616, | |
| "grad_norm": 0.5105087757110596, | |
| "learning_rate": 9.835021705636201e-06, | |
| "loss": 0.6259, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.5288461538461539, | |
| "grad_norm": 0.66715407371521, | |
| "learning_rate": 9.827812669944423e-06, | |
| "loss": 0.6245, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.5336538461538461, | |
| "grad_norm": 0.5680528879165649, | |
| "learning_rate": 9.82045223515105e-06, | |
| "loss": 0.6264, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.5384615384615384, | |
| "grad_norm": 0.6406980156898499, | |
| "learning_rate": 9.812940632077629e-06, | |
| "loss": 0.6216, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.5432692307692307, | |
| "grad_norm": 0.7648453116416931, | |
| "learning_rate": 9.805278096286318e-06, | |
| "loss": 0.6225, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.5480769230769231, | |
| "grad_norm": 0.6609866619110107, | |
| "learning_rate": 9.797464868072489e-06, | |
| "loss": 0.6414, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.5528846153846154, | |
| "grad_norm": 0.6097372174263, | |
| "learning_rate": 9.789501192457188e-06, | |
| "loss": 0.6301, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.5576923076923077, | |
| "grad_norm": 0.6364119648933411, | |
| "learning_rate": 9.781387319179465e-06, | |
| "loss": 0.6302, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.5625, | |
| "grad_norm": 0.5458793640136719, | |
| "learning_rate": 9.773123502688532e-06, | |
| "loss": 0.6119, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.5673076923076923, | |
| "grad_norm": 0.7278120517730713, | |
| "learning_rate": 9.764710002135784e-06, | |
| "loss": 0.6298, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.5721153846153846, | |
| "grad_norm": 0.5531219244003296, | |
| "learning_rate": 9.756147081366673e-06, | |
| "loss": 0.6174, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.5769230769230769, | |
| "grad_norm": 0.6038543581962585, | |
| "learning_rate": 9.747435008912438e-06, | |
| "loss": 0.6192, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.5817307692307693, | |
| "grad_norm": 0.8131069540977478, | |
| "learning_rate": 9.73857405798168e-06, | |
| "loss": 0.6198, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.5865384615384616, | |
| "grad_norm": 0.5778982043266296, | |
| "learning_rate": 9.729564506451791e-06, | |
| "loss": 0.6205, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.5913461538461539, | |
| "grad_norm": 0.842856764793396, | |
| "learning_rate": 9.720406636860252e-06, | |
| "loss": 0.6289, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.5961538461538461, | |
| "grad_norm": 0.6863536238670349, | |
| "learning_rate": 9.711100736395758e-06, | |
| "loss": 0.6168, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.6009615384615384, | |
| "grad_norm": 0.6230315566062927, | |
| "learning_rate": 9.70164709688922e-06, | |
| "loss": 0.6125, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.6057692307692307, | |
| "grad_norm": 0.788438618183136, | |
| "learning_rate": 9.69204601480461e-06, | |
| "loss": 0.6296, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.6105769230769231, | |
| "grad_norm": 0.6000380516052246, | |
| "learning_rate": 9.682297791229668e-06, | |
| "loss": 0.6296, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.6153846153846154, | |
| "grad_norm": 0.73893141746521, | |
| "learning_rate": 9.67240273186646e-06, | |
| "loss": 0.6083, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.6201923076923077, | |
| "grad_norm": 0.6741968393325806, | |
| "learning_rate": 9.66236114702178e-06, | |
| "loss": 0.6141, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.625, | |
| "grad_norm": 0.7334549427032471, | |
| "learning_rate": 9.652173351597435e-06, | |
| "loss": 0.6052, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.6298076923076923, | |
| "grad_norm": 0.6906297206878662, | |
| "learning_rate": 9.641839665080363e-06, | |
| "loss": 0.6196, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.6346153846153846, | |
| "grad_norm": 0.7519117593765259, | |
| "learning_rate": 9.631360411532609e-06, | |
| "loss": 0.6209, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.6394230769230769, | |
| "grad_norm": 0.6346924304962158, | |
| "learning_rate": 9.620735919581168e-06, | |
| "loss": 0.624, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.6442307692307693, | |
| "grad_norm": 0.6513266563415527, | |
| "learning_rate": 9.609966522407678e-06, | |
| "loss": 0.6294, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.6490384615384616, | |
| "grad_norm": 0.719011664390564, | |
| "learning_rate": 9.599052557737973e-06, | |
| "loss": 0.6323, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.6538461538461539, | |
| "grad_norm": 0.5536561012268066, | |
| "learning_rate": 9.58799436783149e-06, | |
| "loss": 0.6081, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.6586538461538461, | |
| "grad_norm": 0.6664816737174988, | |
| "learning_rate": 9.576792299470537e-06, | |
| "loss": 0.6234, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.6634615384615384, | |
| "grad_norm": 0.6060488820075989, | |
| "learning_rate": 9.565446703949417e-06, | |
| "loss": 0.609, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.6682692307692307, | |
| "grad_norm": 0.7735944390296936, | |
| "learning_rate": 9.55395793706341e-06, | |
| "loss": 0.609, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.6730769230769231, | |
| "grad_norm": 0.5819251537322998, | |
| "learning_rate": 9.542326359097619e-06, | |
| "loss": 0.6116, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.6778846153846154, | |
| "grad_norm": 0.8322558403015137, | |
| "learning_rate": 9.530552334815672e-06, | |
| "loss": 0.6071, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.6826923076923077, | |
| "grad_norm": 0.744692862033844, | |
| "learning_rate": 9.518636233448276e-06, | |
| "loss": 0.5932, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.6875, | |
| "grad_norm": 0.9699648022651672, | |
| "learning_rate": 9.506578428681648e-06, | |
| "loss": 0.6119, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.6923076923076923, | |
| "grad_norm": 0.621521532535553, | |
| "learning_rate": 9.494379298645788e-06, | |
| "loss": 0.6121, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.6971153846153846, | |
| "grad_norm": 1.0709632635116577, | |
| "learning_rate": 9.482039225902623e-06, | |
| "loss": 0.6101, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.7019230769230769, | |
| "grad_norm": 0.5138935446739197, | |
| "learning_rate": 9.469558597434018e-06, | |
| "loss": 0.6283, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.7067307692307693, | |
| "grad_norm": 0.8398244976997375, | |
| "learning_rate": 9.456937804629623e-06, | |
| "loss": 0.6, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.7115384615384616, | |
| "grad_norm": 0.7468331456184387, | |
| "learning_rate": 9.444177243274619e-06, | |
| "loss": 0.6124, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.7163461538461539, | |
| "grad_norm": 0.7130693793296814, | |
| "learning_rate": 9.43127731353729e-06, | |
| "loss": 0.6195, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.7211538461538461, | |
| "grad_norm": 0.766042172908783, | |
| "learning_rate": 9.418238419956484e-06, | |
| "loss": 0.6052, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.7259615384615384, | |
| "grad_norm": 0.8716906309127808, | |
| "learning_rate": 9.405060971428924e-06, | |
| "loss": 0.6209, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.7307692307692307, | |
| "grad_norm": 0.6835882663726807, | |
| "learning_rate": 9.391745381196382e-06, | |
| "loss": 0.6174, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.7355769230769231, | |
| "grad_norm": 0.7342336177825928, | |
| "learning_rate": 9.378292066832723e-06, | |
| "loss": 0.6243, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.7403846153846154, | |
| "grad_norm": 0.6497460007667542, | |
| "learning_rate": 9.364701450230813e-06, | |
| "loss": 0.6313, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.7451923076923077, | |
| "grad_norm": 0.6471742391586304, | |
| "learning_rate": 9.350973957589278e-06, | |
| "loss": 0.6124, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 0.6364895105361938, | |
| "learning_rate": 9.33711001939915e-06, | |
| "loss": 0.614, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.7548076923076923, | |
| "grad_norm": 0.6573180556297302, | |
| "learning_rate": 9.32311007043036e-06, | |
| "loss": 0.6309, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.7596153846153846, | |
| "grad_norm": 0.6328399777412415, | |
| "learning_rate": 9.30897454971811e-06, | |
| "loss": 0.6101, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.7644230769230769, | |
| "grad_norm": 0.6596732139587402, | |
| "learning_rate": 9.294703900549096e-06, | |
| "loss": 0.6244, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.7692307692307693, | |
| "grad_norm": 0.6779056787490845, | |
| "learning_rate": 9.280298570447612e-06, | |
| "loss": 0.6095, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.7740384615384616, | |
| "grad_norm": 0.6027641892433167, | |
| "learning_rate": 9.265759011161519e-06, | |
| "loss": 0.5958, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.7788461538461539, | |
| "grad_norm": 0.7004779577255249, | |
| "learning_rate": 9.251085678648072e-06, | |
| "loss": 0.6221, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.7836538461538461, | |
| "grad_norm": 0.7348073124885559, | |
| "learning_rate": 9.236279033059622e-06, | |
| "loss": 0.6226, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.7884615384615384, | |
| "grad_norm": 0.5637420415878296, | |
| "learning_rate": 9.221339538729191e-06, | |
| "loss": 0.601, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.7932692307692307, | |
| "grad_norm": 0.8568174839019775, | |
| "learning_rate": 9.206267664155906e-06, | |
| "loss": 0.6081, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.7980769230769231, | |
| "grad_norm": 0.7954565286636353, | |
| "learning_rate": 9.191063881990308e-06, | |
| "loss": 0.6032, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.8028846153846154, | |
| "grad_norm": 0.6611636877059937, | |
| "learning_rate": 9.17572866901953e-06, | |
| "loss": 0.6101, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.8076923076923077, | |
| "grad_norm": 0.9244260787963867, | |
| "learning_rate": 9.160262506152343e-06, | |
| "loss": 0.6015, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.8125, | |
| "grad_norm": 0.6074010133743286, | |
| "learning_rate": 9.14466587840408e-06, | |
| "loss": 0.6106, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.8173076923076923, | |
| "grad_norm": 0.6845681071281433, | |
| "learning_rate": 9.12893927488142e-06, | |
| "loss": 0.6012, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.8221153846153846, | |
| "grad_norm": 0.5826325416564941, | |
| "learning_rate": 9.113083188767057e-06, | |
| "loss": 0.6137, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.8269230769230769, | |
| "grad_norm": 0.5240793228149414, | |
| "learning_rate": 9.097098117304223e-06, | |
| "loss": 0.6131, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.8317307692307693, | |
| "grad_norm": 0.5843315124511719, | |
| "learning_rate": 9.08098456178111e-06, | |
| "loss": 0.6072, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.8365384615384616, | |
| "grad_norm": 0.5391135215759277, | |
| "learning_rate": 9.064743027515127e-06, | |
| "loss": 0.6068, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.8413461538461539, | |
| "grad_norm": 0.6380318403244019, | |
| "learning_rate": 9.048374023837086e-06, | |
| "loss": 0.6035, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.8461538461538461, | |
| "grad_norm": 0.5520167350769043, | |
| "learning_rate": 9.03187806407519e-06, | |
| "loss": 0.6131, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.8509615384615384, | |
| "grad_norm": 0.5716068744659424, | |
| "learning_rate": 9.015255665538972e-06, | |
| "loss": 0.6138, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.8557692307692307, | |
| "grad_norm": 0.598839282989502, | |
| "learning_rate": 8.998507349503048e-06, | |
| "loss": 0.5959, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.8605769230769231, | |
| "grad_norm": 0.6445143818855286, | |
| "learning_rate": 8.981633641190779e-06, | |
| "loss": 0.6076, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.8653846153846154, | |
| "grad_norm": 0.6508511304855347, | |
| "learning_rate": 8.964635069757803e-06, | |
| "loss": 0.6021, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.8701923076923077, | |
| "grad_norm": 0.6267576813697815, | |
| "learning_rate": 8.94751216827543e-06, | |
| "loss": 0.5993, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.875, | |
| "grad_norm": 0.6657792925834656, | |
| "learning_rate": 8.930265473713939e-06, | |
| "loss": 0.601, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.8798076923076923, | |
| "grad_norm": 0.6445341110229492, | |
| "learning_rate": 8.912895526925726e-06, | |
| "loss": 0.6018, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.8846153846153846, | |
| "grad_norm": 0.506033718585968, | |
| "learning_rate": 8.895402872628352e-06, | |
| "loss": 0.6029, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.8894230769230769, | |
| "grad_norm": 0.6721712350845337, | |
| "learning_rate": 8.87778805938746e-06, | |
| "loss": 0.5957, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.8942307692307693, | |
| "grad_norm": 0.5404098033905029, | |
| "learning_rate": 8.86005163959956e-06, | |
| "loss": 0.601, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.8990384615384616, | |
| "grad_norm": 0.6862823367118835, | |
| "learning_rate": 8.842194169474727e-06, | |
| "loss": 0.5961, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.9038461538461539, | |
| "grad_norm": 0.6001689434051514, | |
| "learning_rate": 8.824216209019139e-06, | |
| "loss": 0.6023, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.9086538461538461, | |
| "grad_norm": 0.5968487858772278, | |
| "learning_rate": 8.806118322017525e-06, | |
| "loss": 0.6087, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.9134615384615384, | |
| "grad_norm": 0.5125237703323364, | |
| "learning_rate": 8.787901076015487e-06, | |
| "loss": 0.5929, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.9182692307692307, | |
| "grad_norm": 0.7270171046257019, | |
| "learning_rate": 8.769565042301692e-06, | |
| "loss": 0.6209, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.9230769230769231, | |
| "grad_norm": 0.5782671570777893, | |
| "learning_rate": 8.751110795889966e-06, | |
| "loss": 0.5954, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.9278846153846154, | |
| "grad_norm": 0.5898118615150452, | |
| "learning_rate": 8.732538915501257e-06, | |
| "loss": 0.5993, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.9326923076923077, | |
| "grad_norm": 0.5937283635139465, | |
| "learning_rate": 8.71384998354549e-06, | |
| "loss": 0.614, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.9375, | |
| "grad_norm": 0.6111398935317993, | |
| "learning_rate": 8.695044586103297e-06, | |
| "loss": 0.605, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.9423076923076923, | |
| "grad_norm": 0.592433750629425, | |
| "learning_rate": 8.676123312907641e-06, | |
| "loss": 0.6137, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.9471153846153846, | |
| "grad_norm": 0.579230546951294, | |
| "learning_rate": 8.657086757325328e-06, | |
| "loss": 0.5918, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.9519230769230769, | |
| "grad_norm": 0.7127505540847778, | |
| "learning_rate": 8.637935516338384e-06, | |
| "loss": 0.6034, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.9567307692307693, | |
| "grad_norm": 0.6220248341560364, | |
| "learning_rate": 8.61867019052535e-06, | |
| "loss": 0.6043, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.9615384615384616, | |
| "grad_norm": 0.523986279964447, | |
| "learning_rate": 8.599291384042442e-06, | |
| "loss": 0.5896, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.9663461538461539, | |
| "grad_norm": 0.7416500449180603, | |
| "learning_rate": 8.579799704604597e-06, | |
| "loss": 0.5966, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.9711538461538461, | |
| "grad_norm": 0.6234787106513977, | |
| "learning_rate": 8.560195763466428e-06, | |
| "loss": 0.6022, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.9759615384615384, | |
| "grad_norm": 0.608817458152771, | |
| "learning_rate": 8.540480175403045e-06, | |
| "loss": 0.6001, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.9807692307692307, | |
| "grad_norm": 0.5969523191452026, | |
| "learning_rate": 8.520653558690785e-06, | |
| "loss": 0.6143, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.9855769230769231, | |
| "grad_norm": 0.6564033031463623, | |
| "learning_rate": 8.500716535087815e-06, | |
| "loss": 0.5862, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.9903846153846154, | |
| "grad_norm": 0.5331311821937561, | |
| "learning_rate": 8.480669729814635e-06, | |
| "loss": 0.6073, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.9951923076923077, | |
| "grad_norm": 0.5960302948951721, | |
| "learning_rate": 8.460513771534475e-06, | |
| "loss": 0.5938, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.5379106402397156, | |
| "learning_rate": 8.440249292333583e-06, | |
| "loss": 0.6125, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 1.0048076923076923, | |
| "grad_norm": 0.6571820378303528, | |
| "learning_rate": 8.41987692770139e-06, | |
| "loss": 0.5833, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 1.0096153846153846, | |
| "grad_norm": 0.5532863736152649, | |
| "learning_rate": 8.399397316510596e-06, | |
| "loss": 0.5801, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 1.0144230769230769, | |
| "grad_norm": 0.5147875547409058, | |
| "learning_rate": 8.378811100997122e-06, | |
| "loss": 0.5696, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 1.0192307692307692, | |
| "grad_norm": 0.5540103316307068, | |
| "learning_rate": 8.358118926739984e-06, | |
| "loss": 0.578, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 1.0240384615384615, | |
| "grad_norm": 0.5395882725715637, | |
| "learning_rate": 8.337321442641036e-06, | |
| "loss": 0.5836, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 1.0288461538461537, | |
| "grad_norm": 0.5106703639030457, | |
| "learning_rate": 8.316419300904622e-06, | |
| "loss": 0.5531, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 1.0336538461538463, | |
| "grad_norm": 0.570037841796875, | |
| "learning_rate": 8.295413157017127e-06, | |
| "loss": 0.5619, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 1.0384615384615385, | |
| "grad_norm": 0.5976242423057556, | |
| "learning_rate": 8.274303669726427e-06, | |
| "loss": 0.559, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 1.0432692307692308, | |
| "grad_norm": 0.5582316517829895, | |
| "learning_rate": 8.25309150102121e-06, | |
| "loss": 0.5656, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 1.0480769230769231, | |
| "grad_norm": 0.535399854183197, | |
| "learning_rate": 8.231777316110245e-06, | |
| "loss": 0.5519, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 1.0528846153846154, | |
| "grad_norm": 0.6076732277870178, | |
| "learning_rate": 8.210361783401491e-06, | |
| "loss": 0.5601, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 1.0576923076923077, | |
| "grad_norm": 0.5462629199028015, | |
| "learning_rate": 8.188845574481162e-06, | |
| "loss": 0.5777, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 1.0625, | |
| "grad_norm": 0.6692743301391602, | |
| "learning_rate": 8.167229364092648e-06, | |
| "loss": 0.5874, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 1.0673076923076923, | |
| "grad_norm": 0.5311057567596436, | |
| "learning_rate": 8.145513830115367e-06, | |
| "loss": 0.5846, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 1.0721153846153846, | |
| "grad_norm": 0.5106709599494934, | |
| "learning_rate": 8.1236996535435e-06, | |
| "loss": 0.5643, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 1.0769230769230769, | |
| "grad_norm": 0.5355270504951477, | |
| "learning_rate": 8.101787518464634e-06, | |
| "loss": 0.5621, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 1.0817307692307692, | |
| "grad_norm": 0.5128294825553894, | |
| "learning_rate": 8.079778112038318e-06, | |
| "loss": 0.5695, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 1.0865384615384615, | |
| "grad_norm": 0.5155346989631653, | |
| "learning_rate": 8.057672124474508e-06, | |
| "loss": 0.5589, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 1.0913461538461537, | |
| "grad_norm": 0.6576012372970581, | |
| "learning_rate": 8.035470249011916e-06, | |
| "loss": 0.5612, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 1.0961538461538463, | |
| "grad_norm": 0.5001193284988403, | |
| "learning_rate": 8.013173181896283e-06, | |
| "loss": 0.5701, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 1.1009615384615385, | |
| "grad_norm": 0.6337225437164307, | |
| "learning_rate": 7.990781622358535e-06, | |
| "loss": 0.558, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 1.1057692307692308, | |
| "grad_norm": 0.622628390789032, | |
| "learning_rate": 7.968296272592862e-06, | |
| "loss": 0.5489, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 1.1105769230769231, | |
| "grad_norm": 0.5139206051826477, | |
| "learning_rate": 7.945717837734688e-06, | |
| "loss": 0.544, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 1.1153846153846154, | |
| "grad_norm": 0.4990007281303406, | |
| "learning_rate": 7.923047025838573e-06, | |
| "loss": 0.5771, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 1.1201923076923077, | |
| "grad_norm": 0.43576645851135254, | |
| "learning_rate": 7.900284547855992e-06, | |
| "loss": 0.5668, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 1.125, | |
| "grad_norm": 0.4898163378238678, | |
| "learning_rate": 7.87743111761305e-06, | |
| "loss": 0.5659, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 1.1298076923076923, | |
| "grad_norm": 0.44757646322250366, | |
| "learning_rate": 7.8544874517881e-06, | |
| "loss": 0.5384, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 1.1346153846153846, | |
| "grad_norm": 0.5017294883728027, | |
| "learning_rate": 7.831454269889251e-06, | |
| "loss": 0.5636, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 1.1394230769230769, | |
| "grad_norm": 0.5470489263534546, | |
| "learning_rate": 7.808332294231824e-06, | |
| "loss": 0.5653, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 1.1442307692307692, | |
| "grad_norm": 0.5034846067428589, | |
| "learning_rate": 7.785122249915688e-06, | |
| "loss": 0.5591, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 1.1490384615384615, | |
| "grad_norm": 0.5333671569824219, | |
| "learning_rate": 7.76182486480253e-06, | |
| "loss": 0.5473, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 1.1538461538461537, | |
| "grad_norm": 0.5511570572853088, | |
| "learning_rate": 7.738440869493018e-06, | |
| "loss": 0.5551, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.1586538461538463, | |
| "grad_norm": 0.4971109628677368, | |
| "learning_rate": 7.714970997303898e-06, | |
| "loss": 0.5444, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 1.1634615384615385, | |
| "grad_norm": 0.5407963991165161, | |
| "learning_rate": 7.691415984244998e-06, | |
| "loss": 0.5681, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 1.1682692307692308, | |
| "grad_norm": 0.4940282106399536, | |
| "learning_rate": 7.667776568996143e-06, | |
| "loss": 0.5734, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 1.1730769230769231, | |
| "grad_norm": 0.46702679991722107, | |
| "learning_rate": 7.64405349288399e-06, | |
| "loss": 0.5527, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 1.1778846153846154, | |
| "grad_norm": 0.49713486433029175, | |
| "learning_rate": 7.62024749985878e-06, | |
| "loss": 0.5618, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 1.1826923076923077, | |
| "grad_norm": 0.5031074285507202, | |
| "learning_rate": 7.596359336471015e-06, | |
| "loss": 0.546, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 1.1875, | |
| "grad_norm": 0.5209451913833618, | |
| "learning_rate": 7.572389751848037e-06, | |
| "loss": 0.5599, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 1.1923076923076923, | |
| "grad_norm": 0.5612902045249939, | |
| "learning_rate": 7.548339497670538e-06, | |
| "loss": 0.5537, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 1.1971153846153846, | |
| "grad_norm": 0.595409631729126, | |
| "learning_rate": 7.524209328148995e-06, | |
| "loss": 0.5488, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 1.2019230769230769, | |
| "grad_norm": 0.5032594203948975, | |
| "learning_rate": 7.500000000000001e-06, | |
| "loss": 0.5835, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.2067307692307692, | |
| "grad_norm": 0.5198405385017395, | |
| "learning_rate": 7.4757122724225575e-06, | |
| "loss": 0.5783, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 1.2115384615384615, | |
| "grad_norm": 0.552277147769928, | |
| "learning_rate": 7.451346907074245e-06, | |
| "loss": 0.5609, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 1.2163461538461537, | |
| "grad_norm": 0.554760217666626, | |
| "learning_rate": 7.426904668047352e-06, | |
| "loss": 0.5678, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 1.2211538461538463, | |
| "grad_norm": 0.49330809712409973, | |
| "learning_rate": 7.40238632184491e-06, | |
| "loss": 0.5779, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 1.2259615384615385, | |
| "grad_norm": 0.6228109002113342, | |
| "learning_rate": 7.377792637356644e-06, | |
| "loss": 0.5496, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 1.2307692307692308, | |
| "grad_norm": 0.4928394854068756, | |
| "learning_rate": 7.35312438583488e-06, | |
| "loss": 0.5928, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 1.2355769230769231, | |
| "grad_norm": 0.5220669507980347, | |
| "learning_rate": 7.3283823408703466e-06, | |
| "loss": 0.5657, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 1.2403846153846154, | |
| "grad_norm": 0.5418768525123596, | |
| "learning_rate": 7.303567278367918e-06, | |
| "loss": 0.5493, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 1.2451923076923077, | |
| "grad_norm": 0.6059012413024902, | |
| "learning_rate": 7.278679976522279e-06, | |
| "loss": 0.5781, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "grad_norm": 0.5703076720237732, | |
| "learning_rate": 7.253721215793528e-06, | |
| "loss": 0.5766, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.2548076923076923, | |
| "grad_norm": 0.7210759520530701, | |
| "learning_rate": 7.2286917788826926e-06, | |
| "loss": 0.5951, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 1.2596153846153846, | |
| "grad_norm": 0.49044269323349, | |
| "learning_rate": 7.203592450707193e-06, | |
| "loss": 0.5625, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 1.2644230769230769, | |
| "grad_norm": 0.5832895636558533, | |
| "learning_rate": 7.178424018376224e-06, | |
| "loss": 0.5594, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 1.2692307692307692, | |
| "grad_norm": 0.5973503589630127, | |
| "learning_rate": 7.153187271166071e-06, | |
| "loss": 0.5629, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 1.2740384615384617, | |
| "grad_norm": 0.5439810752868652, | |
| "learning_rate": 7.127883000495353e-06, | |
| "loss": 0.5548, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 1.2788461538461537, | |
| "grad_norm": 0.5828291177749634, | |
| "learning_rate": 7.102511999900213e-06, | |
| "loss": 0.5533, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 1.2836538461538463, | |
| "grad_norm": 0.601917028427124, | |
| "learning_rate": 7.0770750650094335e-06, | |
| "loss": 0.5673, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 1.2884615384615383, | |
| "grad_norm": 0.6008758544921875, | |
| "learning_rate": 7.051572993519474e-06, | |
| "loss": 0.557, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 1.2932692307692308, | |
| "grad_norm": 0.49223729968070984, | |
| "learning_rate": 7.026006585169467e-06, | |
| "loss": 0.5623, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 1.2980769230769231, | |
| "grad_norm": 0.6068645119667053, | |
| "learning_rate": 7.0003766417161335e-06, | |
| "loss": 0.5706, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.3028846153846154, | |
| "grad_norm": 0.5903732776641846, | |
| "learning_rate": 6.974683966908642e-06, | |
| "loss": 0.5625, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 1.3076923076923077, | |
| "grad_norm": 0.550208568572998, | |
| "learning_rate": 6.948929366463397e-06, | |
| "loss": 0.5873, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 1.3125, | |
| "grad_norm": 0.4981112480163574, | |
| "learning_rate": 6.923113648038784e-06, | |
| "loss": 0.5586, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 1.3173076923076923, | |
| "grad_norm": 0.48777687549591064, | |
| "learning_rate": 6.897237621209831e-06, | |
| "loss": 0.5635, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 1.3221153846153846, | |
| "grad_norm": 0.540177047252655, | |
| "learning_rate": 6.87130209744282e-06, | |
| "loss": 0.5619, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 1.3269230769230769, | |
| "grad_norm": 0.49552926421165466, | |
| "learning_rate": 6.845307890069851e-06, | |
| "loss": 0.5506, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 1.3317307692307692, | |
| "grad_norm": 0.6340379118919373, | |
| "learning_rate": 6.8192558142633215e-06, | |
| "loss": 0.5794, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 1.3365384615384617, | |
| "grad_norm": 0.4628427028656006, | |
| "learning_rate": 6.7931466870103735e-06, | |
| "loss": 0.5692, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 1.3413461538461537, | |
| "grad_norm": 0.5693328380584717, | |
| "learning_rate": 6.766981327087271e-06, | |
| "loss": 0.5617, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 1.3461538461538463, | |
| "grad_norm": 0.564184308052063, | |
| "learning_rate": 6.740760555033715e-06, | |
| "loss": 0.5501, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.3509615384615383, | |
| "grad_norm": 0.4685192406177521, | |
| "learning_rate": 6.714485193127126e-06, | |
| "loss": 0.5469, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 1.3557692307692308, | |
| "grad_norm": 0.5547739863395691, | |
| "learning_rate": 6.688156065356845e-06, | |
| "loss": 0.5787, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 1.3605769230769231, | |
| "grad_norm": 0.5170398354530334, | |
| "learning_rate": 6.6617739973982985e-06, | |
| "loss": 0.5526, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 1.3653846153846154, | |
| "grad_norm": 0.5516389608383179, | |
| "learning_rate": 6.635339816587109e-06, | |
| "loss": 0.5769, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 1.3701923076923077, | |
| "grad_norm": 0.6596104502677917, | |
| "learning_rate": 6.60885435189314e-06, | |
| "loss": 0.5687, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 1.375, | |
| "grad_norm": 0.5153890252113342, | |
| "learning_rate": 6.582318433894513e-06, | |
| "loss": 0.5623, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 1.3798076923076923, | |
| "grad_norm": 0.7115325927734375, | |
| "learning_rate": 6.555732894751548e-06, | |
| "loss": 0.5588, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 1.3846153846153846, | |
| "grad_norm": 0.5545114278793335, | |
| "learning_rate": 6.529098568180672e-06, | |
| "loss": 0.5605, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 1.3894230769230769, | |
| "grad_norm": 0.617314338684082, | |
| "learning_rate": 6.502416289428282e-06, | |
| "loss": 0.5768, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 1.3942307692307692, | |
| "grad_norm": 0.6576439142227173, | |
| "learning_rate": 6.475686895244534e-06, | |
| "loss": 0.5544, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.3990384615384617, | |
| "grad_norm": 0.5532417893409729, | |
| "learning_rate": 6.448911223857124e-06, | |
| "loss": 0.5457, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 1.4038461538461537, | |
| "grad_norm": 0.7050262689590454, | |
| "learning_rate": 6.422090114944982e-06, | |
| "loss": 0.5703, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 1.4086538461538463, | |
| "grad_norm": 0.5105507969856262, | |
| "learning_rate": 6.3952244096119535e-06, | |
| "loss": 0.5502, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 1.4134615384615383, | |
| "grad_norm": 0.6280075907707214, | |
| "learning_rate": 6.368314950360416e-06, | |
| "loss": 0.572, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 1.4182692307692308, | |
| "grad_norm": 0.5413298606872559, | |
| "learning_rate": 6.341362581064856e-06, | |
| "loss": 0.5599, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 1.4230769230769231, | |
| "grad_norm": 0.5309041738510132, | |
| "learning_rate": 6.314368146945418e-06, | |
| "loss": 0.5496, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 1.4278846153846154, | |
| "grad_norm": 0.585471510887146, | |
| "learning_rate": 6.28733249454138e-06, | |
| "loss": 0.5519, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 1.4326923076923077, | |
| "grad_norm": 0.4619200825691223, | |
| "learning_rate": 6.260256471684622e-06, | |
| "loss": 0.562, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 1.4375, | |
| "grad_norm": 0.568500816822052, | |
| "learning_rate": 6.233140927473033e-06, | |
| "loss": 0.5404, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 1.4423076923076923, | |
| "grad_norm": 0.5464410781860352, | |
| "learning_rate": 6.205986712243876e-06, | |
| "loss": 0.5724, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.4471153846153846, | |
| "grad_norm": 0.5567378401756287, | |
| "learning_rate": 6.178794677547138e-06, | |
| "loss": 0.5841, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 1.4519230769230769, | |
| "grad_norm": 0.5256141424179077, | |
| "learning_rate": 6.151565676118805e-06, | |
| "loss": 0.556, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 1.4567307692307692, | |
| "grad_norm": 0.5142210721969604, | |
| "learning_rate": 6.124300561854139e-06, | |
| "loss": 0.5667, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 1.4615384615384617, | |
| "grad_norm": 0.5487565398216248, | |
| "learning_rate": 6.097000189780893e-06, | |
| "loss": 0.5557, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 1.4663461538461537, | |
| "grad_norm": 0.5075297951698303, | |
| "learning_rate": 6.0696654160324875e-06, | |
| "loss": 0.5737, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 1.4711538461538463, | |
| "grad_norm": 0.5169580578804016, | |
| "learning_rate": 6.042297097821184e-06, | |
| "loss": 0.5487, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 1.4759615384615383, | |
| "grad_norm": 0.5064544677734375, | |
| "learning_rate": 6.014896093411181e-06, | |
| "loss": 0.5471, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 1.4807692307692308, | |
| "grad_norm": 0.4726231098175049, | |
| "learning_rate": 5.987463262091715e-06, | |
| "loss": 0.544, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 1.4855769230769231, | |
| "grad_norm": 0.5868953466415405, | |
| "learning_rate": 5.959999464150101e-06, | |
| "loss": 0.5579, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 1.4903846153846154, | |
| "grad_norm": 0.5180251598358154, | |
| "learning_rate": 5.932505560844766e-06, | |
| "loss": 0.5547, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.4951923076923077, | |
| "grad_norm": 0.5364190936088562, | |
| "learning_rate": 5.904982414378233e-06, | |
| "loss": 0.5838, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "grad_norm": 0.5316298007965088, | |
| "learning_rate": 5.877430887870081e-06, | |
| "loss": 0.5821, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 1.5048076923076923, | |
| "grad_norm": 0.4852922558784485, | |
| "learning_rate": 5.849851845329884e-06, | |
| "loss": 0.5718, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 1.5096153846153846, | |
| "grad_norm": 0.4950406551361084, | |
| "learning_rate": 5.822246151630109e-06, | |
| "loss": 0.5475, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 1.5144230769230769, | |
| "grad_norm": 0.5721108317375183, | |
| "learning_rate": 5.794614672479e-06, | |
| "loss": 0.5682, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 1.5192307692307692, | |
| "grad_norm": 0.43430256843566895, | |
| "learning_rate": 5.766958274393428e-06, | |
| "loss": 0.5723, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 1.5240384615384617, | |
| "grad_norm": 0.5114768147468567, | |
| "learning_rate": 5.739277824671711e-06, | |
| "loss": 0.5724, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 1.5288461538461537, | |
| "grad_norm": 0.5058174133300781, | |
| "learning_rate": 5.711574191366427e-06, | |
| "loss": 0.5636, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 1.5336538461538463, | |
| "grad_norm": 0.4865401089191437, | |
| "learning_rate": 5.683848243257181e-06, | |
| "loss": 0.5444, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 1.5384615384615383, | |
| "grad_norm": 0.5734021067619324, | |
| "learning_rate": 5.656100849823366e-06, | |
| "loss": 0.5703, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 1.5432692307692308, | |
| "grad_norm": 0.4867899715900421, | |
| "learning_rate": 5.628332881216899e-06, | |
| "loss": 0.5728, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 1.5480769230769231, | |
| "grad_norm": 0.46812713146209717, | |
| "learning_rate": 5.600545208234927e-06, | |
| "loss": 0.5624, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 1.5528846153846154, | |
| "grad_norm": 0.5345550775527954, | |
| "learning_rate": 5.57273870229252e-06, | |
| "loss": 0.5487, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 1.5576923076923077, | |
| "grad_norm": 0.45916569232940674, | |
| "learning_rate": 5.544914235395347e-06, | |
| "loss": 0.551, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 1.5625, | |
| "grad_norm": 0.4594832956790924, | |
| "learning_rate": 5.517072680112332e-06, | |
| "loss": 0.5619, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 1.5673076923076923, | |
| "grad_norm": 0.5421361923217773, | |
| "learning_rate": 5.4892149095482815e-06, | |
| "loss": 0.5509, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 1.5721153846153846, | |
| "grad_norm": 0.4083757996559143, | |
| "learning_rate": 5.46134179731651e-06, | |
| "loss": 0.567, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 1.5769230769230769, | |
| "grad_norm": 0.4842531085014343, | |
| "learning_rate": 5.4334542175114495e-06, | |
| "loss": 0.5572, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 1.5817307692307692, | |
| "grad_norm": 0.5053375363349915, | |
| "learning_rate": 5.40555304468122e-06, | |
| "loss": 0.5653, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 1.5865384615384617, | |
| "grad_norm": 0.5320326685905457, | |
| "learning_rate": 5.377639153800229e-06, | |
| "loss": 0.5595, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.5913461538461537, | |
| "grad_norm": 0.502543568611145, | |
| "learning_rate": 5.34971342024171e-06, | |
| "loss": 0.572, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 1.5961538461538463, | |
| "grad_norm": 0.47758862376213074, | |
| "learning_rate": 5.321776719750283e-06, | |
| "loss": 0.5742, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 1.6009615384615383, | |
| "grad_norm": 0.49216747283935547, | |
| "learning_rate": 5.29382992841449e-06, | |
| "loss": 0.5618, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 1.6057692307692308, | |
| "grad_norm": 0.5157171487808228, | |
| "learning_rate": 5.265873922639315e-06, | |
| "loss": 0.5513, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 1.6105769230769231, | |
| "grad_norm": 0.48715752363204956, | |
| "learning_rate": 5.237909579118713e-06, | |
| "loss": 0.5626, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 1.6153846153846154, | |
| "grad_norm": 0.48291143774986267, | |
| "learning_rate": 5.209937774808098e-06, | |
| "loss": 0.559, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 1.6201923076923077, | |
| "grad_norm": 0.5301973819732666, | |
| "learning_rate": 5.181959386896862e-06, | |
| "loss": 0.5612, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 1.625, | |
| "grad_norm": 0.4594566226005554, | |
| "learning_rate": 5.153975292780852e-06, | |
| "loss": 0.5637, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 1.6298076923076923, | |
| "grad_norm": 0.4743099510669708, | |
| "learning_rate": 5.125986370034862e-06, | |
| "loss": 0.568, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 1.6346153846153846, | |
| "grad_norm": 0.46242398023605347, | |
| "learning_rate": 5.097993496385112e-06, | |
| "loss": 0.5612, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.6394230769230769, | |
| "grad_norm": 0.45778384804725647, | |
| "learning_rate": 5.069997549681718e-06, | |
| "loss": 0.5608, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 1.6442307692307692, | |
| "grad_norm": 0.47530800104141235, | |
| "learning_rate": 5.041999407871168e-06, | |
| "loss": 0.5687, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 1.6490384615384617, | |
| "grad_norm": 0.5340310335159302, | |
| "learning_rate": 5.01399994896879e-06, | |
| "loss": 0.5452, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 1.6538461538461537, | |
| "grad_norm": 0.43904221057891846, | |
| "learning_rate": 4.986000051031212e-06, | |
| "loss": 0.5535, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 1.6586538461538463, | |
| "grad_norm": 0.49031099677085876, | |
| "learning_rate": 4.958000592128834e-06, | |
| "loss": 0.534, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 1.6634615384615383, | |
| "grad_norm": 0.4739511013031006, | |
| "learning_rate": 4.930002450318282e-06, | |
| "loss": 0.5489, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 1.6682692307692308, | |
| "grad_norm": 0.4124240577220917, | |
| "learning_rate": 4.9020065036148885e-06, | |
| "loss": 0.5575, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 1.6730769230769231, | |
| "grad_norm": 0.4636637270450592, | |
| "learning_rate": 4.874013629965138e-06, | |
| "loss": 0.5614, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 1.6778846153846154, | |
| "grad_norm": 0.4664590060710907, | |
| "learning_rate": 4.846024707219149e-06, | |
| "loss": 0.5743, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 1.6826923076923077, | |
| "grad_norm": 0.41771262884140015, | |
| "learning_rate": 4.818040613103139e-06, | |
| "loss": 0.5555, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.6875, | |
| "grad_norm": 0.46983399987220764, | |
| "learning_rate": 4.790062225191902e-06, | |
| "loss": 0.5587, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 1.6923076923076923, | |
| "grad_norm": 0.4576813578605652, | |
| "learning_rate": 4.762090420881289e-06, | |
| "loss": 0.5577, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 1.6971153846153846, | |
| "grad_norm": 0.4811893403530121, | |
| "learning_rate": 4.734126077360685e-06, | |
| "loss": 0.5537, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 1.7019230769230769, | |
| "grad_norm": 0.4063519239425659, | |
| "learning_rate": 4.706170071585513e-06, | |
| "loss": 0.5662, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 1.7067307692307692, | |
| "grad_norm": 0.4614051580429077, | |
| "learning_rate": 4.678223280249718e-06, | |
| "loss": 0.5503, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 1.7115384615384617, | |
| "grad_norm": 0.43803513050079346, | |
| "learning_rate": 4.650286579758291e-06, | |
| "loss": 0.5652, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 1.7163461538461537, | |
| "grad_norm": 0.42544105648994446, | |
| "learning_rate": 4.622360846199772e-06, | |
| "loss": 0.5431, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 1.7211538461538463, | |
| "grad_norm": 0.39839082956314087, | |
| "learning_rate": 4.594446955318781e-06, | |
| "loss": 0.555, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 1.7259615384615383, | |
| "grad_norm": 0.38914695382118225, | |
| "learning_rate": 4.566545782488554e-06, | |
| "loss": 0.5623, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 1.7307692307692308, | |
| "grad_norm": 0.410859078168869, | |
| "learning_rate": 4.53865820268349e-06, | |
| "loss": 0.5626, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.7355769230769231, | |
| "grad_norm": 0.4289568364620209, | |
| "learning_rate": 4.510785090451719e-06, | |
| "loss": 0.5659, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 1.7403846153846154, | |
| "grad_norm": 0.4343273639678955, | |
| "learning_rate": 4.482927319887669e-06, | |
| "loss": 0.5543, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 1.7451923076923077, | |
| "grad_norm": 0.41860419511795044, | |
| "learning_rate": 4.455085764604653e-06, | |
| "loss": 0.5501, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "grad_norm": 0.4254235625267029, | |
| "learning_rate": 4.427261297707482e-06, | |
| "loss": 0.5478, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 1.7548076923076923, | |
| "grad_norm": 0.420207142829895, | |
| "learning_rate": 4.399454791765076e-06, | |
| "loss": 0.5683, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 1.7596153846153846, | |
| "grad_norm": 0.4237710237503052, | |
| "learning_rate": 4.371667118783101e-06, | |
| "loss": 0.5551, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 1.7644230769230769, | |
| "grad_norm": 0.42474254965782166, | |
| "learning_rate": 4.343899150176635e-06, | |
| "loss": 0.5508, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 1.7692307692307692, | |
| "grad_norm": 0.4262840449810028, | |
| "learning_rate": 4.316151756742821e-06, | |
| "loss": 0.5545, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 1.7740384615384617, | |
| "grad_norm": 0.4713822603225708, | |
| "learning_rate": 4.2884258086335755e-06, | |
| "loss": 0.5632, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 1.7788461538461537, | |
| "grad_norm": 0.465378999710083, | |
| "learning_rate": 4.26072217532829e-06, | |
| "loss": 0.5706, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 1.7836538461538463, | |
| "grad_norm": 0.4259505271911621, | |
| "learning_rate": 4.233041725606573e-06, | |
| "loss": 0.5653, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 1.7884615384615383, | |
| "grad_norm": 0.4873887300491333, | |
| "learning_rate": 4.205385327521002e-06, | |
| "loss": 0.5547, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 1.7932692307692308, | |
| "grad_norm": 0.5444599390029907, | |
| "learning_rate": 4.177753848369892e-06, | |
| "loss": 0.5635, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 1.7980769230769231, | |
| "grad_norm": 0.4417326748371124, | |
| "learning_rate": 4.1501481546701185e-06, | |
| "loss": 0.5542, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 1.8028846153846154, | |
| "grad_norm": 0.42065057158470154, | |
| "learning_rate": 4.12256911212992e-06, | |
| "loss": 0.5712, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 1.8076923076923077, | |
| "grad_norm": 0.4802209436893463, | |
| "learning_rate": 4.095017585621767e-06, | |
| "loss": 0.5762, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 1.8125, | |
| "grad_norm": 0.47133877873420715, | |
| "learning_rate": 4.067494439155236e-06, | |
| "loss": 0.5706, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 1.8173076923076923, | |
| "grad_norm": 0.42150482535362244, | |
| "learning_rate": 4.0400005358499e-06, | |
| "loss": 0.5549, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 1.8221153846153846, | |
| "grad_norm": 0.48820599913597107, | |
| "learning_rate": 4.012536737908288e-06, | |
| "loss": 0.5646, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 1.8269230769230769, | |
| "grad_norm": 0.46589240431785583, | |
| "learning_rate": 3.985103906588821e-06, | |
| "loss": 0.5521, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.8317307692307692, | |
| "grad_norm": 0.43386465311050415, | |
| "learning_rate": 3.957702902178816e-06, | |
| "loss": 0.5603, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 1.8365384615384617, | |
| "grad_norm": 0.438164085149765, | |
| "learning_rate": 3.930334583967514e-06, | |
| "loss": 0.5547, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 1.8413461538461537, | |
| "grad_norm": 0.43723925948143005, | |
| "learning_rate": 3.902999810219109e-06, | |
| "loss": 0.5277, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 1.8461538461538463, | |
| "grad_norm": 0.4327050745487213, | |
| "learning_rate": 3.875699438145862e-06, | |
| "loss": 0.5426, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 1.8509615384615383, | |
| "grad_norm": 0.4460081160068512, | |
| "learning_rate": 3.8484343238811976e-06, | |
| "loss": 0.5478, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 1.8557692307692308, | |
| "grad_norm": 0.40871721506118774, | |
| "learning_rate": 3.821205322452863e-06, | |
| "loss": 0.5674, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 1.8605769230769231, | |
| "grad_norm": 0.39245539903640747, | |
| "learning_rate": 3.794013287756125e-06, | |
| "loss": 0.5587, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 1.8653846153846154, | |
| "grad_norm": 0.4072204828262329, | |
| "learning_rate": 3.766859072526969e-06, | |
| "loss": 0.5503, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 1.8701923076923077, | |
| "grad_norm": 0.4407781958580017, | |
| "learning_rate": 3.7397435283153795e-06, | |
| "loss": 0.5612, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 1.875, | |
| "grad_norm": 0.4392929673194885, | |
| "learning_rate": 3.712667505458622e-06, | |
| "loss": 0.5543, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.8798076923076923, | |
| "grad_norm": 0.4082246720790863, | |
| "learning_rate": 3.685631853054583e-06, | |
| "loss": 0.5384, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 1.8846153846153846, | |
| "grad_norm": 0.40940192341804504, | |
| "learning_rate": 3.658637418935146e-06, | |
| "loss": 0.5586, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 1.8894230769230769, | |
| "grad_norm": 0.4302951395511627, | |
| "learning_rate": 3.6316850496395863e-06, | |
| "loss": 0.5541, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 1.8942307692307692, | |
| "grad_norm": 0.472651869058609, | |
| "learning_rate": 3.6047755903880478e-06, | |
| "loss": 0.5523, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 1.8990384615384617, | |
| "grad_norm": 0.4774600863456726, | |
| "learning_rate": 3.577909885055019e-06, | |
| "loss": 0.5551, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 1.9038461538461537, | |
| "grad_norm": 0.40797290205955505, | |
| "learning_rate": 3.5510887761428764e-06, | |
| "loss": 0.5675, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 1.9086538461538463, | |
| "grad_norm": 0.4044354557991028, | |
| "learning_rate": 3.524313104755468e-06, | |
| "loss": 0.5804, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 1.9134615384615383, | |
| "grad_norm": 0.48422014713287354, | |
| "learning_rate": 3.4975837105717203e-06, | |
| "loss": 0.5551, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 1.9182692307692308, | |
| "grad_norm": 0.4386497735977173, | |
| "learning_rate": 3.4709014318193298e-06, | |
| "loss": 0.5637, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 1.9230769230769231, | |
| "grad_norm": 0.41975468397140503, | |
| "learning_rate": 3.4442671052484545e-06, | |
| "loss": 0.5714, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.9278846153846154, | |
| "grad_norm": 0.37439560890197754, | |
| "learning_rate": 3.4176815661054884e-06, | |
| "loss": 0.5422, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 1.9326923076923077, | |
| "grad_norm": 0.40816617012023926, | |
| "learning_rate": 3.3911456481068613e-06, | |
| "loss": 0.5438, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 1.9375, | |
| "grad_norm": 0.47742047905921936, | |
| "learning_rate": 3.3646601834128924e-06, | |
| "loss": 0.5584, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 1.9423076923076923, | |
| "grad_norm": 0.44102102518081665, | |
| "learning_rate": 3.3382260026017027e-06, | |
| "loss": 0.5481, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 1.9471153846153846, | |
| "grad_norm": 0.39150795340538025, | |
| "learning_rate": 3.311843934643157e-06, | |
| "loss": 0.5545, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 1.9519230769230769, | |
| "grad_norm": 0.4709937572479248, | |
| "learning_rate": 3.2855148068728753e-06, | |
| "loss": 0.5654, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 1.9567307692307692, | |
| "grad_norm": 0.4761818051338196, | |
| "learning_rate": 3.2592394449662867e-06, | |
| "loss": 0.5343, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 1.9615384615384617, | |
| "grad_norm": 0.44496825337409973, | |
| "learning_rate": 3.233018672912731e-06, | |
| "loss": 0.5527, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 1.9663461538461537, | |
| "grad_norm": 0.5426850318908691, | |
| "learning_rate": 3.2068533129896273e-06, | |
| "loss": 0.5473, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 1.9711538461538463, | |
| "grad_norm": 0.499978244304657, | |
| "learning_rate": 3.1807441857366798e-06, | |
| "loss": 0.5656, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.9759615384615383, | |
| "grad_norm": 0.41915661096572876, | |
| "learning_rate": 3.1546921099301507e-06, | |
| "loss": 0.5797, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 1.9807692307692308, | |
| "grad_norm": 0.4491116404533386, | |
| "learning_rate": 3.1286979025571817e-06, | |
| "loss": 0.5662, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 1.9855769230769231, | |
| "grad_norm": 0.42887210845947266, | |
| "learning_rate": 3.1027623787901706e-06, | |
| "loss": 0.5593, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 1.9903846153846154, | |
| "grad_norm": 0.445030152797699, | |
| "learning_rate": 3.076886351961217e-06, | |
| "loss": 0.547, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 1.9951923076923077, | |
| "grad_norm": 0.47931942343711853, | |
| "learning_rate": 3.0510706335366034e-06, | |
| "loss": 0.5684, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.4522424638271332, | |
| "learning_rate": 3.02531603309136e-06, | |
| "loss": 0.5519, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 2.0048076923076925, | |
| "grad_norm": 0.42797061800956726, | |
| "learning_rate": 2.9996233582838686e-06, | |
| "loss": 0.5399, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 2.0096153846153846, | |
| "grad_norm": 0.46265673637390137, | |
| "learning_rate": 2.973993414830534e-06, | |
| "loss": 0.5101, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 2.014423076923077, | |
| "grad_norm": 0.46875470876693726, | |
| "learning_rate": 2.948427006480528e-06, | |
| "loss": 0.5288, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 2.019230769230769, | |
| "grad_norm": 0.3783641755580902, | |
| "learning_rate": 2.9229249349905686e-06, | |
| "loss": 0.5254, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 2.0240384615384617, | |
| "grad_norm": 0.4915563464164734, | |
| "learning_rate": 2.897488000099788e-06, | |
| "loss": 0.5348, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 2.0288461538461537, | |
| "grad_norm": 0.46728116273880005, | |
| "learning_rate": 2.8721169995046503e-06, | |
| "loss": 0.5388, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 2.0336538461538463, | |
| "grad_norm": 0.4580628275871277, | |
| "learning_rate": 2.846812728833931e-06, | |
| "loss": 0.5255, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 2.0384615384615383, | |
| "grad_norm": 0.42225995659828186, | |
| "learning_rate": 2.8215759816237748e-06, | |
| "loss": 0.5206, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 2.043269230769231, | |
| "grad_norm": 0.4477675259113312, | |
| "learning_rate": 2.796407549292809e-06, | |
| "loss": 0.503, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 2.048076923076923, | |
| "grad_norm": 0.4319400489330292, | |
| "learning_rate": 2.771308221117309e-06, | |
| "loss": 0.5187, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 2.0528846153846154, | |
| "grad_norm": 0.4639637768268585, | |
| "learning_rate": 2.7462787842064753e-06, | |
| "loss": 0.5327, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 2.0576923076923075, | |
| "grad_norm": 0.3952256739139557, | |
| "learning_rate": 2.7213200234777215e-06, | |
| "loss": 0.5132, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 2.0625, | |
| "grad_norm": 0.38108691573143005, | |
| "learning_rate": 2.696432721632082e-06, | |
| "loss": 0.5536, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 2.0673076923076925, | |
| "grad_norm": 0.3831665515899658, | |
| "learning_rate": 2.671617659129655e-06, | |
| "loss": 0.5069, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 2.0721153846153846, | |
| "grad_norm": 0.38643160462379456, | |
| "learning_rate": 2.646875614165121e-06, | |
| "loss": 0.5222, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 2.076923076923077, | |
| "grad_norm": 0.40774914622306824, | |
| "learning_rate": 2.6222073626433587e-06, | |
| "loss": 0.5208, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 2.081730769230769, | |
| "grad_norm": 0.40251675248146057, | |
| "learning_rate": 2.597613678155092e-06, | |
| "loss": 0.513, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 2.0865384615384617, | |
| "grad_norm": 0.38830575346946716, | |
| "learning_rate": 2.573095331952646e-06, | |
| "loss": 0.5445, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 2.0913461538461537, | |
| "grad_norm": 0.4013608694076538, | |
| "learning_rate": 2.5486530929257574e-06, | |
| "loss": 0.5285, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 2.0961538461538463, | |
| "grad_norm": 0.3629082441329956, | |
| "learning_rate": 2.5242877275774446e-06, | |
| "loss": 0.532, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 2.1009615384615383, | |
| "grad_norm": 0.40173375606536865, | |
| "learning_rate": 2.5000000000000015e-06, | |
| "loss": 0.529, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 2.105769230769231, | |
| "grad_norm": 0.3868687152862549, | |
| "learning_rate": 2.475790671851007e-06, | |
| "loss": 0.5278, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 2.110576923076923, | |
| "grad_norm": 0.41604334115982056, | |
| "learning_rate": 2.4516605023294626e-06, | |
| "loss": 0.5284, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 2.1153846153846154, | |
| "grad_norm": 0.43302401900291443, | |
| "learning_rate": 2.4276102481519655e-06, | |
| "loss": 0.5286, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 2.1201923076923075, | |
| "grad_norm": 0.36202147603034973, | |
| "learning_rate": 2.403640663528986e-06, | |
| "loss": 0.5415, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 2.125, | |
| "grad_norm": 0.36743971705436707, | |
| "learning_rate": 2.379752500141222e-06, | |
| "loss": 0.5429, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 2.1298076923076925, | |
| "grad_norm": 0.4273446202278137, | |
| "learning_rate": 2.355946507116012e-06, | |
| "loss": 0.5329, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 2.1346153846153846, | |
| "grad_norm": 0.4194796681404114, | |
| "learning_rate": 2.332223431003859e-06, | |
| "loss": 0.547, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 2.139423076923077, | |
| "grad_norm": 0.3628264367580414, | |
| "learning_rate": 2.3085840157550036e-06, | |
| "loss": 0.5254, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 2.144230769230769, | |
| "grad_norm": 0.3705046772956848, | |
| "learning_rate": 2.2850290026961032e-06, | |
| "loss": 0.5359, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 2.1490384615384617, | |
| "grad_norm": 0.39662665128707886, | |
| "learning_rate": 2.2615591305069846e-06, | |
| "loss": 0.5121, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 2.1538461538461537, | |
| "grad_norm": 0.3908136785030365, | |
| "learning_rate": 2.238175135197471e-06, | |
| "loss": 0.5438, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 2.1586538461538463, | |
| "grad_norm": 0.36550912261009216, | |
| "learning_rate": 2.2148777500843125e-06, | |
| "loss": 0.5169, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 2.1634615384615383, | |
| "grad_norm": 0.39665716886520386, | |
| "learning_rate": 2.1916677057681786e-06, | |
| "loss": 0.4957, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 2.168269230769231, | |
| "grad_norm": 0.3526061475276947, | |
| "learning_rate": 2.1685457301107506e-06, | |
| "loss": 0.5355, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 2.173076923076923, | |
| "grad_norm": 0.3616856336593628, | |
| "learning_rate": 2.145512548211902e-06, | |
| "loss": 0.5136, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 2.1778846153846154, | |
| "grad_norm": 0.36202114820480347, | |
| "learning_rate": 2.1225688823869494e-06, | |
| "loss": 0.5107, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 2.1826923076923075, | |
| "grad_norm": 0.38488027453422546, | |
| "learning_rate": 2.09971545214401e-06, | |
| "loss": 0.5279, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 2.1875, | |
| "grad_norm": 0.38552579283714294, | |
| "learning_rate": 2.0769529741614297e-06, | |
| "loss": 0.5216, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 2.1923076923076925, | |
| "grad_norm": 0.40065112709999084, | |
| "learning_rate": 2.054282162265313e-06, | |
| "loss": 0.5092, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 2.1971153846153846, | |
| "grad_norm": 0.3517535924911499, | |
| "learning_rate": 2.0317037274071412e-06, | |
| "loss": 0.5175, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 2.201923076923077, | |
| "grad_norm": 0.3569267988204956, | |
| "learning_rate": 2.009218377641466e-06, | |
| "loss": 0.5361, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 2.206730769230769, | |
| "grad_norm": 0.3930048942565918, | |
| "learning_rate": 1.9868268181037186e-06, | |
| "loss": 0.5252, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 2.2115384615384617, | |
| "grad_norm": 0.37880074977874756, | |
| "learning_rate": 1.964529750988086e-06, | |
| "loss": 0.5251, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 2.2163461538461537, | |
| "grad_norm": 0.38495028018951416, | |
| "learning_rate": 1.9423278755254933e-06, | |
| "loss": 0.532, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 2.2211538461538463, | |
| "grad_norm": 0.3525002598762512, | |
| "learning_rate": 1.9202218879616824e-06, | |
| "loss": 0.5215, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 2.2259615384615383, | |
| "grad_norm": 0.3603316843509674, | |
| "learning_rate": 1.8982124815353665e-06, | |
| "loss": 0.5221, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 2.230769230769231, | |
| "grad_norm": 0.35701784491539, | |
| "learning_rate": 1.8763003464565022e-06, | |
| "loss": 0.5117, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 2.235576923076923, | |
| "grad_norm": 0.3503670394420624, | |
| "learning_rate": 1.854486169884635e-06, | |
| "loss": 0.5457, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 2.2403846153846154, | |
| "grad_norm": 0.3608977198600769, | |
| "learning_rate": 1.8327706359073526e-06, | |
| "loss": 0.5061, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 2.2451923076923075, | |
| "grad_norm": 0.3715517222881317, | |
| "learning_rate": 1.8111544255188402e-06, | |
| "loss": 0.5429, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "grad_norm": 0.393916517496109, | |
| "learning_rate": 1.7896382165985094e-06, | |
| "loss": 0.5272, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 2.2548076923076925, | |
| "grad_norm": 0.36094385385513306, | |
| "learning_rate": 1.768222683889757e-06, | |
| "loss": 0.5299, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 2.2596153846153846, | |
| "grad_norm": 0.3601950407028198, | |
| "learning_rate": 1.746908498978791e-06, | |
| "loss": 0.5251, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 2.264423076923077, | |
| "grad_norm": 0.3479694426059723, | |
| "learning_rate": 1.7256963302735752e-06, | |
| "loss": 0.5398, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 2.269230769230769, | |
| "grad_norm": 0.3505624532699585, | |
| "learning_rate": 1.7045868429828745e-06, | |
| "loss": 0.4987, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 2.2740384615384617, | |
| "grad_norm": 0.3886091411113739, | |
| "learning_rate": 1.6835806990953802e-06, | |
| "loss": 0.5267, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 2.2788461538461537, | |
| "grad_norm": 0.37276989221572876, | |
| "learning_rate": 1.6626785573589667e-06, | |
| "loss": 0.5153, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 2.2836538461538463, | |
| "grad_norm": 0.3589704930782318, | |
| "learning_rate": 1.6418810732600177e-06, | |
| "loss": 0.513, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 2.2884615384615383, | |
| "grad_norm": 0.3904787302017212, | |
| "learning_rate": 1.6211888990028785e-06, | |
| "loss": 0.5202, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 2.293269230769231, | |
| "grad_norm": 0.3644522726535797, | |
| "learning_rate": 1.6006026834894068e-06, | |
| "loss": 0.521, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 2.298076923076923, | |
| "grad_norm": 0.3736514151096344, | |
| "learning_rate": 1.5801230722986104e-06, | |
| "loss": 0.5056, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 2.3028846153846154, | |
| "grad_norm": 0.35614803433418274, | |
| "learning_rate": 1.5597507076664187e-06, | |
| "loss": 0.5307, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 2.3076923076923075, | |
| "grad_norm": 0.35307708382606506, | |
| "learning_rate": 1.5394862284655266e-06, | |
| "loss": 0.5279, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 2.3125, | |
| "grad_norm": 0.37877628207206726, | |
| "learning_rate": 1.5193302701853674e-06, | |
| "loss": 0.5286, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 2.3173076923076925, | |
| "grad_norm": 0.35032036900520325, | |
| "learning_rate": 1.499283464912188e-06, | |
| "loss": 0.5401, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 2.3221153846153846, | |
| "grad_norm": 0.3594701886177063, | |
| "learning_rate": 1.4793464413092161e-06, | |
| "loss": 0.5239, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 2.326923076923077, | |
| "grad_norm": 0.35576295852661133, | |
| "learning_rate": 1.459519824596956e-06, | |
| "loss": 0.5325, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 2.331730769230769, | |
| "grad_norm": 0.38037362694740295, | |
| "learning_rate": 1.4398042365335745e-06, | |
| "loss": 0.5181, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 2.3365384615384617, | |
| "grad_norm": 0.3730925917625427, | |
| "learning_rate": 1.4202002953954042e-06, | |
| "loss": 0.5153, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 2.3413461538461537, | |
| "grad_norm": 0.3507210612297058, | |
| "learning_rate": 1.4007086159575595e-06, | |
| "loss": 0.5119, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 2.3461538461538463, | |
| "grad_norm": 0.3383399248123169, | |
| "learning_rate": 1.3813298094746491e-06, | |
| "loss": 0.5458, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 2.3509615384615383, | |
| "grad_norm": 0.34395211935043335, | |
| "learning_rate": 1.362064483661617e-06, | |
| "loss": 0.5318, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 2.355769230769231, | |
| "grad_norm": 0.3459123373031616, | |
| "learning_rate": 1.3429132426746743e-06, | |
| "loss": 0.5194, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 2.360576923076923, | |
| "grad_norm": 0.38007426261901855, | |
| "learning_rate": 1.3238766870923592e-06, | |
| "loss": 0.5311, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 2.3653846153846154, | |
| "grad_norm": 0.3795618414878845, | |
| "learning_rate": 1.3049554138967052e-06, | |
| "loss": 0.5362, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 2.3701923076923075, | |
| "grad_norm": 0.3444887101650238, | |
| "learning_rate": 1.286150016454511e-06, | |
| "loss": 0.5263, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 2.375, | |
| "grad_norm": 0.39823612570762634, | |
| "learning_rate": 1.267461084498744e-06, | |
| "loss": 0.5171, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 2.3798076923076925, | |
| "grad_norm": 0.3500751852989197, | |
| "learning_rate": 1.2488892041100364e-06, | |
| "loss": 0.5099, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 2.3846153846153846, | |
| "grad_norm": 0.34816107153892517, | |
| "learning_rate": 1.2304349576983094e-06, | |
| "loss": 0.5343, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 2.389423076923077, | |
| "grad_norm": 0.3754243552684784, | |
| "learning_rate": 1.2120989239845149e-06, | |
| "loss": 0.5265, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 2.394230769230769, | |
| "grad_norm": 0.32973939180374146, | |
| "learning_rate": 1.1938816779824753e-06, | |
| "loss": 0.5437, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 2.3990384615384617, | |
| "grad_norm": 0.37367406487464905, | |
| "learning_rate": 1.1757837909808628e-06, | |
| "loss": 0.5212, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 2.4038461538461537, | |
| "grad_norm": 0.34787219762802124, | |
| "learning_rate": 1.157805830525275e-06, | |
| "loss": 0.5225, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 2.4086538461538463, | |
| "grad_norm": 0.3542671799659729, | |
| "learning_rate": 1.1399483604004403e-06, | |
| "loss": 0.5301, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 2.4134615384615383, | |
| "grad_norm": 0.36626604199409485, | |
| "learning_rate": 1.1222119406125426e-06, | |
| "loss": 0.5234, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 2.418269230769231, | |
| "grad_norm": 0.3557561933994293, | |
| "learning_rate": 1.1045971273716476e-06, | |
| "loss": 0.5316, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 2.423076923076923, | |
| "grad_norm": 0.3680206835269928, | |
| "learning_rate": 1.0871044730742752e-06, | |
| "loss": 0.5261, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 2.4278846153846154, | |
| "grad_norm": 0.39090457558631897, | |
| "learning_rate": 1.0697345262860638e-06, | |
| "loss": 0.5279, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 2.4326923076923075, | |
| "grad_norm": 0.33717867732048035, | |
| "learning_rate": 1.0524878317245713e-06, | |
| "loss": 0.5218, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 2.4375, | |
| "grad_norm": 0.357800155878067, | |
| "learning_rate": 1.0353649302421982e-06, | |
| "loss": 0.5227, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 2.4423076923076925, | |
| "grad_norm": 0.34802302718162537, | |
| "learning_rate": 1.0183663588092214e-06, | |
| "loss": 0.5566, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 2.4471153846153846, | |
| "grad_norm": 0.3453739583492279, | |
| "learning_rate": 1.0014926504969535e-06, | |
| "loss": 0.5259, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 2.451923076923077, | |
| "grad_norm": 0.3763096034526825, | |
| "learning_rate": 9.847443344610296e-07, | |
| "loss": 0.5201, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 2.456730769230769, | |
| "grad_norm": 0.3687525689601898, | |
| "learning_rate": 9.681219359248106e-07, | |
| "loss": 0.5224, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 2.4615384615384617, | |
| "grad_norm": 0.3717356026172638, | |
| "learning_rate": 9.516259761629148e-07, | |
| "loss": 0.5145, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 2.4663461538461537, | |
| "grad_norm": 0.36599400639533997, | |
| "learning_rate": 9.352569724848715e-07, | |
| "loss": 0.5086, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 2.4711538461538463, | |
| "grad_norm": 0.33455225825309753, | |
| "learning_rate": 9.190154382188921e-07, | |
| "loss": 0.5148, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 2.4759615384615383, | |
| "grad_norm": 0.3153638243675232, | |
| "learning_rate": 9.029018826957775e-07, | |
| "loss": 0.5286, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 2.480769230769231, | |
| "grad_norm": 0.3458288609981537, | |
| "learning_rate": 8.86916811232944e-07, | |
| "loss": 0.5337, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 2.485576923076923, | |
| "grad_norm": 0.33229777216911316, | |
| "learning_rate": 8.710607251185799e-07, | |
| "loss": 0.5443, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 2.4903846153846154, | |
| "grad_norm": 0.3534226417541504, | |
| "learning_rate": 8.553341215959215e-07, | |
| "loss": 0.5391, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 2.4951923076923075, | |
| "grad_norm": 0.3250564634799957, | |
| "learning_rate": 8.397374938476594e-07, | |
| "loss": 0.5482, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "grad_norm": 0.3450680375099182, | |
| "learning_rate": 8.242713309804729e-07, | |
| "loss": 0.5215, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 2.5048076923076925, | |
| "grad_norm": 0.3476369380950928, | |
| "learning_rate": 8.089361180096927e-07, | |
| "loss": 0.5308, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 2.5096153846153846, | |
| "grad_norm": 0.35948845744132996, | |
| "learning_rate": 7.937323358440935e-07, | |
| "loss": 0.5019, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 2.5144230769230766, | |
| "grad_norm": 0.3242650628089905, | |
| "learning_rate": 7.786604612708093e-07, | |
| "loss": 0.5317, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 2.519230769230769, | |
| "grad_norm": 0.3232422173023224, | |
| "learning_rate": 7.637209669403789e-07, | |
| "loss": 0.5165, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 2.5240384615384617, | |
| "grad_norm": 0.32376691699028015, | |
| "learning_rate": 7.489143213519301e-07, | |
| "loss": 0.5199, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 2.5288461538461537, | |
| "grad_norm": 0.3350430130958557, | |
| "learning_rate": 7.342409888384816e-07, | |
| "loss": 0.5087, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 2.5336538461538463, | |
| "grad_norm": 0.3153380751609802, | |
| "learning_rate": 7.197014295523879e-07, | |
| "loss": 0.5377, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 2.5384615384615383, | |
| "grad_norm": 0.36297881603240967, | |
| "learning_rate": 7.052960994509056e-07, | |
| "loss": 0.5238, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 2.543269230769231, | |
| "grad_norm": 0.34906795620918274, | |
| "learning_rate": 6.910254502818914e-07, | |
| "loss": 0.5119, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 2.5480769230769234, | |
| "grad_norm": 0.3447514474391937, | |
| "learning_rate": 6.768899295696413e-07, | |
| "loss": 0.5261, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 2.5528846153846154, | |
| "grad_norm": 0.3420708477497101, | |
| "learning_rate": 6.628899806008515e-07, | |
| "loss": 0.5201, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 2.5576923076923075, | |
| "grad_norm": 0.35410743951797485, | |
| "learning_rate": 6.490260424107231e-07, | |
| "loss": 0.5136, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 2.5625, | |
| "grad_norm": 0.3542673885822296, | |
| "learning_rate": 6.352985497691883e-07, | |
| "loss": 0.5212, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 2.5673076923076925, | |
| "grad_norm": 0.35093072056770325, | |
| "learning_rate": 6.217079331672777e-07, | |
| "loss": 0.5121, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 2.5721153846153846, | |
| "grad_norm": 0.33335307240486145, | |
| "learning_rate": 6.082546188036204e-07, | |
| "loss": 0.5007, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 2.5769230769230766, | |
| "grad_norm": 0.3272242546081543, | |
| "learning_rate": 5.949390285710777e-07, | |
| "loss": 0.5443, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 2.581730769230769, | |
| "grad_norm": 0.34555184841156006, | |
| "learning_rate": 5.817615800435167e-07, | |
| "loss": 0.5255, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 2.5865384615384617, | |
| "grad_norm": 0.3510143458843231, | |
| "learning_rate": 5.687226864627115e-07, | |
| "loss": 0.5238, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 2.5913461538461537, | |
| "grad_norm": 0.35968631505966187, | |
| "learning_rate": 5.558227567253832e-07, | |
| "loss": 0.5186, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 2.5961538461538463, | |
| "grad_norm": 0.35358065366744995, | |
| "learning_rate": 5.430621953703785e-07, | |
| "loss": 0.5132, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 2.6009615384615383, | |
| "grad_norm": 0.34705501794815063, | |
| "learning_rate": 5.304414025659832e-07, | |
| "loss": 0.5418, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 2.605769230769231, | |
| "grad_norm": 0.34814560413360596, | |
| "learning_rate": 5.179607740973764e-07, | |
| "loss": 0.5217, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 2.6105769230769234, | |
| "grad_norm": 0.32552292943000793, | |
| "learning_rate": 5.056207013542131e-07, | |
| "loss": 0.5224, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 2.6153846153846154, | |
| "grad_norm": 0.3452615737915039, | |
| "learning_rate": 4.934215713183527e-07, | |
| "loss": 0.5226, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 2.6201923076923075, | |
| "grad_norm": 0.3437098264694214, | |
| "learning_rate": 4.813637665517251e-07, | |
| "loss": 0.5258, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 2.625, | |
| "grad_norm": 0.36558979749679565, | |
| "learning_rate": 4.6944766518432936e-07, | |
| "loss": 0.5137, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 2.6298076923076925, | |
| "grad_norm": 0.33913153409957886, | |
| "learning_rate": 4.576736409023813e-07, | |
| "loss": 0.508, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 2.6346153846153846, | |
| "grad_norm": 0.35266298055648804, | |
| "learning_rate": 4.460420629365919e-07, | |
| "loss": 0.5167, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 2.6394230769230766, | |
| "grad_norm": 0.3532564640045166, | |
| "learning_rate": 4.3455329605058436e-07, | |
| "loss": 0.5145, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 2.644230769230769, | |
| "grad_norm": 0.3402523994445801, | |
| "learning_rate": 4.232077005294638e-07, | |
| "loss": 0.5237, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 2.6490384615384617, | |
| "grad_norm": 0.3120058476924896, | |
| "learning_rate": 4.120056321685101e-07, | |
| "loss": 0.562, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 2.6538461538461537, | |
| "grad_norm": 0.33833712339401245, | |
| "learning_rate": 4.009474422620269e-07, | |
| "loss": 0.5257, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 2.6586538461538463, | |
| "grad_norm": 0.333404004573822, | |
| "learning_rate": 3.900334775923237e-07, | |
| "loss": 0.5115, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 2.6634615384615383, | |
| "grad_norm": 0.32522115111351013, | |
| "learning_rate": 3.7926408041883355e-07, | |
| "loss": 0.5286, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 2.668269230769231, | |
| "grad_norm": 0.3394847810268402, | |
| "learning_rate": 3.6863958846739213e-07, | |
| "loss": 0.5195, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 2.6730769230769234, | |
| "grad_norm": 0.3137637674808502, | |
| "learning_rate": 3.581603349196372e-07, | |
| "loss": 0.5339, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 2.6778846153846154, | |
| "grad_norm": 0.35012125968933105, | |
| "learning_rate": 3.4782664840256387e-07, | |
| "loss": 0.5054, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 2.6826923076923075, | |
| "grad_norm": 0.3344334065914154, | |
| "learning_rate": 3.3763885297822153e-07, | |
| "loss": 0.5303, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 2.6875, | |
| "grad_norm": 0.3342319428920746, | |
| "learning_rate": 3.275972681335421e-07, | |
| "loss": 0.512, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 2.6923076923076925, | |
| "grad_norm": 0.32932013273239136, | |
| "learning_rate": 3.1770220877033243e-07, | |
| "loss": 0.5354, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 2.6971153846153846, | |
| "grad_norm": 0.34048202633857727, | |
| "learning_rate": 3.0795398519539113e-07, | |
| "loss": 0.5216, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 2.7019230769230766, | |
| "grad_norm": 0.3443983495235443, | |
| "learning_rate": 2.9835290311078123e-07, | |
| "loss": 0.5131, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 2.706730769230769, | |
| "grad_norm": 0.34546706080436707, | |
| "learning_rate": 2.888992636042437e-07, | |
| "loss": 0.4995, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 2.7115384615384617, | |
| "grad_norm": 0.32637590169906616, | |
| "learning_rate": 2.7959336313974847e-07, | |
| "loss": 0.524, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 2.7163461538461537, | |
| "grad_norm": 0.32179778814315796, | |
| "learning_rate": 2.704354935482095e-07, | |
| "loss": 0.5369, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 2.7211538461538463, | |
| "grad_norm": 0.3366025388240814, | |
| "learning_rate": 2.6142594201832183e-07, | |
| "loss": 0.5021, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 2.7259615384615383, | |
| "grad_norm": 0.3441389501094818, | |
| "learning_rate": 2.525649910875627e-07, | |
| "loss": 0.522, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 2.730769230769231, | |
| "grad_norm": 0.3519365191459656, | |
| "learning_rate": 2.438529186333288e-07, | |
| "loss": 0.5234, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 2.7355769230769234, | |
| "grad_norm": 0.330476313829422, | |
| "learning_rate": 2.3528999786421758e-07, | |
| "loss": 0.5309, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 2.7403846153846154, | |
| "grad_norm": 0.3406146466732025, | |
| "learning_rate": 2.2687649731146844e-07, | |
| "loss": 0.5279, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 2.7451923076923075, | |
| "grad_norm": 0.3351764678955078, | |
| "learning_rate": 2.1861268082053466e-07, | |
| "loss": 0.5275, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "grad_norm": 0.37317317724227905, | |
| "learning_rate": 2.104988075428127e-07, | |
| "loss": 0.5285, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 2.7548076923076925, | |
| "grad_norm": 0.3595113158226013, | |
| "learning_rate": 2.0253513192751374e-07, | |
| "loss": 0.5287, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 2.7596153846153846, | |
| "grad_norm": 0.33269202709198, | |
| "learning_rate": 1.947219037136827e-07, | |
| "loss": 0.5151, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 2.7644230769230766, | |
| "grad_norm": 0.341153085231781, | |
| "learning_rate": 1.8705936792237255e-07, | |
| "loss": 0.5243, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 2.769230769230769, | |
| "grad_norm": 0.3603745102882385, | |
| "learning_rate": 1.7954776484895188e-07, | |
| "loss": 0.5228, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 2.7740384615384617, | |
| "grad_norm": 0.3396835923194885, | |
| "learning_rate": 1.7218733005557707e-07, | |
| "loss": 0.5073, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 2.7788461538461537, | |
| "grad_norm": 0.3207492530345917, | |
| "learning_rate": 1.6497829436380009e-07, | |
| "loss": 0.5266, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 2.7836538461538463, | |
| "grad_norm": 0.3595437705516815, | |
| "learning_rate": 1.5792088384733174e-07, | |
| "loss": 0.5045, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 2.7884615384615383, | |
| "grad_norm": 0.3366645574569702, | |
| "learning_rate": 1.510153198249531e-07, | |
| "loss": 0.5434, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 2.793269230769231, | |
| "grad_norm": 0.3330222964286804, | |
| "learning_rate": 1.4426181885357215e-07, | |
| "loss": 0.5246, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 2.7980769230769234, | |
| "grad_norm": 0.3313714861869812, | |
| "learning_rate": 1.376605927214364e-07, | |
| "loss": 0.5196, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 2.8028846153846154, | |
| "grad_norm": 0.3222265839576721, | |
| "learning_rate": 1.312118484414876e-07, | |
| "loss": 0.5153, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 2.8076923076923075, | |
| "grad_norm": 0.3020241856575012, | |
| "learning_rate": 1.2491578824487204e-07, | |
| "loss": 0.5489, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 2.8125, | |
| "grad_norm": 0.3178803026676178, | |
| "learning_rate": 1.1877260957459835e-07, | |
| "loss": 0.5352, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 2.8173076923076925, | |
| "grad_norm": 0.3120872676372528, | |
| "learning_rate": 1.1278250507934518e-07, | |
| "loss": 0.5294, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 2.8221153846153846, | |
| "grad_norm": 0.3329028785228729, | |
| "learning_rate": 1.0694566260742001e-07, | |
| "loss": 0.5307, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 2.8269230769230766, | |
| "grad_norm": 0.3385623097419739, | |
| "learning_rate": 1.0126226520086823e-07, | |
| "loss": 0.524, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 2.831730769230769, | |
| "grad_norm": 0.3274289071559906, | |
| "learning_rate": 9.573249108973281e-08, | |
| "loss": 0.5196, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 2.8365384615384617, | |
| "grad_norm": 0.31717026233673096, | |
| "learning_rate": 9.035651368646647e-08, | |
| "loss": 0.5163, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 2.8413461538461537, | |
| "grad_norm": 0.3221368193626404, | |
| "learning_rate": 8.513450158049109e-08, | |
| "loss": 0.5256, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 2.8461538461538463, | |
| "grad_norm": 0.3442428708076477, | |
| "learning_rate": 8.006661853291298e-08, | |
| "loss": 0.5197, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 2.8509615384615383, | |
| "grad_norm": 0.33662882447242737, | |
| "learning_rate": 7.515302347138486e-08, | |
| "loss": 0.5188, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 2.855769230769231, | |
| "grad_norm": 0.33708903193473816, | |
| "learning_rate": 7.03938704851248e-08, | |
| "loss": 0.5268, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 2.8605769230769234, | |
| "grad_norm": 0.3152363896369934, | |
| "learning_rate": 6.578930882008283e-08, | |
| "loss": 0.5295, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 2.8653846153846154, | |
| "grad_norm": 0.32401999831199646, | |
| "learning_rate": 6.133948287426028e-08, | |
| "loss": 0.511, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 2.8701923076923075, | |
| "grad_norm": 0.32977667450904846, | |
| "learning_rate": 5.704453219318118e-08, | |
| "loss": 0.514, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 2.875, | |
| "grad_norm": 0.32670727372169495, | |
| "learning_rate": 5.2904591465516855e-08, | |
| "loss": 0.514, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 2.8798076923076925, | |
| "grad_norm": 0.32648882269859314, | |
| "learning_rate": 4.891979051886153e-08, | |
| "loss": 0.5181, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 2.8846153846153846, | |
| "grad_norm": 0.324532687664032, | |
| "learning_rate": 4.509025431566283e-08, | |
| "loss": 0.5221, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 2.8894230769230766, | |
| "grad_norm": 0.31154200434684753, | |
| "learning_rate": 4.141610294930043e-08, | |
| "loss": 0.5212, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 2.894230769230769, | |
| "grad_norm": 0.3022911250591278, | |
| "learning_rate": 3.7897451640321326e-08, | |
| "loss": 0.5227, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 2.8990384615384617, | |
| "grad_norm": 0.3582252860069275, | |
| "learning_rate": 3.4534410732825485e-08, | |
| "loss": 0.5237, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 2.9038461538461537, | |
| "grad_norm": 0.3342374265193939, | |
| "learning_rate": 3.1327085691006954e-08, | |
| "loss": 0.5086, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 2.9086538461538463, | |
| "grad_norm": 0.35545840859413147, | |
| "learning_rate": 2.8275577095846495e-08, | |
| "loss": 0.5204, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 2.9134615384615383, | |
| "grad_norm": 0.37066027522087097, | |
| "learning_rate": 2.5379980641955792e-08, | |
| "loss": 0.519, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 2.918269230769231, | |
| "grad_norm": 0.3329092860221863, | |
| "learning_rate": 2.264038713457706e-08, | |
| "loss": 0.5424, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 2.9230769230769234, | |
| "grad_norm": 0.34132009744644165, | |
| "learning_rate": 2.0056882486736982e-08, | |
| "loss": 0.5209, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 2.9278846153846154, | |
| "grad_norm": 0.32534360885620117, | |
| "learning_rate": 1.762954771655001e-08, | |
| "loss": 0.5298, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 2.9326923076923075, | |
| "grad_norm": 0.3154315948486328, | |
| "learning_rate": 1.5358458944680356e-08, | |
| "loss": 0.5075, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 2.9375, | |
| "grad_norm": 0.3233172595500946, | |
| "learning_rate": 1.3243687391952809e-08, | |
| "loss": 0.5264, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 2.9423076923076925, | |
| "grad_norm": 0.3091405928134918, | |
| "learning_rate": 1.1285299377118974e-08, | |
| "loss": 0.5431, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 2.9471153846153846, | |
| "grad_norm": 0.32396966218948364, | |
| "learning_rate": 9.48335631477948e-09, | |
| "loss": 0.5235, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 2.9519230769230766, | |
| "grad_norm": 0.3247375786304474, | |
| "learning_rate": 7.837914713457184e-09, | |
| "loss": 0.53, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 2.956730769230769, | |
| "grad_norm": 0.31406140327453613, | |
| "learning_rate": 6.349026173824713e-09, | |
| "loss": 0.517, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 2.9615384615384617, | |
| "grad_norm": 0.3556351661682129, | |
| "learning_rate": 5.016737387085191e-09, | |
| "loss": 0.5218, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 2.9663461538461537, | |
| "grad_norm": 0.34563007950782776, | |
| "learning_rate": 3.841090133511749e-09, | |
| "loss": 0.5199, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 2.9711538461538463, | |
| "grad_norm": 0.33256563544273376, | |
| "learning_rate": 2.8221212811324616e-09, | |
| "loss": 0.511, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 2.9759615384615383, | |
| "grad_norm": 0.32485440373420715, | |
| "learning_rate": 1.959862784577937e-09, | |
| "loss": 0.5198, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 2.980769230769231, | |
| "grad_norm": 0.30697503685951233, | |
| "learning_rate": 1.2543416840771206e-09, | |
| "loss": 0.5484, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 2.9855769230769234, | |
| "grad_norm": 0.3317164182662964, | |
| "learning_rate": 7.055801046113031e-10, | |
| "loss": 0.5138, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 2.9903846153846154, | |
| "grad_norm": 0.3181401789188385, | |
| "learning_rate": 3.1359525521801326e-10, | |
| "loss": 0.5243, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 2.9951923076923075, | |
| "grad_norm": 0.32515648007392883, | |
| "learning_rate": 7.839942845144777e-11, | |
| "loss": 0.5262, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "grad_norm": 0.3372482359409332, | |
| "learning_rate": 0.0, | |
| "loss": 0.5101, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "step": 624, | |
| "total_flos": 1149901297156096.0, | |
| "train_loss": 0.5865381705837373, | |
| "train_runtime": 8382.2551, | |
| "train_samples_per_second": 7.124, | |
| "train_steps_per_second": 0.074 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 624, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 100, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1149901297156096.0, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |