| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 3.0, | |
| "eval_steps": 500, | |
| "global_step": 519, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.005780346820809248, | |
| "grad_norm": 21.87793783020131, | |
| "learning_rate": 0.0, | |
| "loss": 3.0384, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.011560693641618497, | |
| "grad_norm": 25.944153812411226, | |
| "learning_rate": 1.9230769230769234e-07, | |
| "loss": 2.7475, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.017341040462427744, | |
| "grad_norm": 27.600907958149808, | |
| "learning_rate": 3.846153846153847e-07, | |
| "loss": 4.4341, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.023121387283236993, | |
| "grad_norm": 32.91981305085188, | |
| "learning_rate": 5.76923076923077e-07, | |
| "loss": 6.5326, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.028901734104046242, | |
| "grad_norm": 28.298528074059195, | |
| "learning_rate": 7.692307692307694e-07, | |
| "loss": 4.6837, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.03468208092485549, | |
| "grad_norm": 28.4809265754676, | |
| "learning_rate": 9.615384615384617e-07, | |
| "loss": 4.2992, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.04046242774566474, | |
| "grad_norm": 34.294499257509685, | |
| "learning_rate": 1.153846153846154e-06, | |
| "loss": 6.3554, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.046242774566473986, | |
| "grad_norm": 32.40670348934399, | |
| "learning_rate": 1.3461538461538462e-06, | |
| "loss": 5.5468, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.05202312138728324, | |
| "grad_norm": 31.088077207788306, | |
| "learning_rate": 1.5384615384615387e-06, | |
| "loss": 4.7881, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.057803468208092484, | |
| "grad_norm": 27.103526225977465, | |
| "learning_rate": 1.7307692307692308e-06, | |
| "loss": 4.2773, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.06358381502890173, | |
| "grad_norm": 23.372545017917634, | |
| "learning_rate": 1.9230769230769234e-06, | |
| "loss": 3.7714, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.06936416184971098, | |
| "grad_norm": 29.536841037137425, | |
| "learning_rate": 2.1153846153846155e-06, | |
| "loss": 5.4037, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.07514450867052024, | |
| "grad_norm": 20.51058072309702, | |
| "learning_rate": 2.307692307692308e-06, | |
| "loss": 3.0428, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.08092485549132948, | |
| "grad_norm": 25.851758056606016, | |
| "learning_rate": 2.5e-06, | |
| "loss": 3.5816, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.08670520231213873, | |
| "grad_norm": 15.01324797937803, | |
| "learning_rate": 2.6923076923076923e-06, | |
| "loss": 2.3105, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.09248554913294797, | |
| "grad_norm": 15.199500088532252, | |
| "learning_rate": 2.8846153846153845e-06, | |
| "loss": 2.5381, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.09826589595375723, | |
| "grad_norm": 17.505903657285067, | |
| "learning_rate": 3.0769230769230774e-06, | |
| "loss": 2.9574, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.10404624277456648, | |
| "grad_norm": 11.528249965912817, | |
| "learning_rate": 3.2692307692307696e-06, | |
| "loss": 2.3289, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.10982658959537572, | |
| "grad_norm": 12.880494070804799, | |
| "learning_rate": 3.4615384615384617e-06, | |
| "loss": 2.6934, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.11560693641618497, | |
| "grad_norm": 10.865402934129182, | |
| "learning_rate": 3.653846153846154e-06, | |
| "loss": 2.6421, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.12138728323699421, | |
| "grad_norm": 7.125972199900619, | |
| "learning_rate": 3.846153846153847e-06, | |
| "loss": 2.5033, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.12716763005780346, | |
| "grad_norm": 11.21541490251159, | |
| "learning_rate": 4.0384615384615385e-06, | |
| "loss": 2.6345, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.1329479768786127, | |
| "grad_norm": 4.100350430185937, | |
| "learning_rate": 4.230769230769231e-06, | |
| "loss": 1.9498, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.13872832369942195, | |
| "grad_norm": 2.2057204886805226, | |
| "learning_rate": 4.423076923076924e-06, | |
| "loss": 1.0249, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.14450867052023122, | |
| "grad_norm": 3.0429202636869386, | |
| "learning_rate": 4.615384615384616e-06, | |
| "loss": 1.68, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.15028901734104047, | |
| "grad_norm": 5.131215855608062, | |
| "learning_rate": 4.807692307692308e-06, | |
| "loss": 2.6619, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.15606936416184972, | |
| "grad_norm": 3.4088527219076052, | |
| "learning_rate": 5e-06, | |
| "loss": 1.8795, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.16184971098265896, | |
| "grad_norm": 3.7165759547218675, | |
| "learning_rate": 5.192307692307693e-06, | |
| "loss": 2.1434, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.1676300578034682, | |
| "grad_norm": 3.4769214622128977, | |
| "learning_rate": 5.384615384615385e-06, | |
| "loss": 1.9582, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.17341040462427745, | |
| "grad_norm": 3.4755064358560963, | |
| "learning_rate": 5.576923076923077e-06, | |
| "loss": 1.7078, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.1791907514450867, | |
| "grad_norm": 2.346759845137902, | |
| "learning_rate": 5.769230769230769e-06, | |
| "loss": 1.3841, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.18497109826589594, | |
| "grad_norm": 3.3589744625467217, | |
| "learning_rate": 5.961538461538462e-06, | |
| "loss": 1.9876, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.1907514450867052, | |
| "grad_norm": 2.23530410646522, | |
| "learning_rate": 6.153846153846155e-06, | |
| "loss": 1.3731, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.19653179190751446, | |
| "grad_norm": 3.0334549128243062, | |
| "learning_rate": 6.3461538461538466e-06, | |
| "loss": 1.954, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.2023121387283237, | |
| "grad_norm": 3.019944847537477, | |
| "learning_rate": 6.538461538461539e-06, | |
| "loss": 1.7887, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.20809248554913296, | |
| "grad_norm": 2.574674373885689, | |
| "learning_rate": 6.730769230769232e-06, | |
| "loss": 1.3396, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.2138728323699422, | |
| "grad_norm": 1.6788041080361362, | |
| "learning_rate": 6.923076923076923e-06, | |
| "loss": 1.1171, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.21965317919075145, | |
| "grad_norm": 2.223469814132116, | |
| "learning_rate": 7.115384615384616e-06, | |
| "loss": 1.1395, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.2254335260115607, | |
| "grad_norm": 2.1461305027339996, | |
| "learning_rate": 7.307692307692308e-06, | |
| "loss": 1.2949, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.23121387283236994, | |
| "grad_norm": 1.8459121298686243, | |
| "learning_rate": 7.500000000000001e-06, | |
| "loss": 1.2421, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.23699421965317918, | |
| "grad_norm": 1.6284680007294197, | |
| "learning_rate": 7.692307692307694e-06, | |
| "loss": 1.2065, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.24277456647398843, | |
| "grad_norm": 2.600840218113501, | |
| "learning_rate": 7.884615384615384e-06, | |
| "loss": 1.5263, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.24855491329479767, | |
| "grad_norm": 2.4077378791650097, | |
| "learning_rate": 8.076923076923077e-06, | |
| "loss": 1.6776, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.2543352601156069, | |
| "grad_norm": 1.7255161535686105, | |
| "learning_rate": 8.26923076923077e-06, | |
| "loss": 1.0938, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.26011560693641617, | |
| "grad_norm": 1.4160107000643258, | |
| "learning_rate": 8.461538461538462e-06, | |
| "loss": 1.0143, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.2658959537572254, | |
| "grad_norm": 1.5785981094401695, | |
| "learning_rate": 8.653846153846155e-06, | |
| "loss": 1.1311, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.27167630057803466, | |
| "grad_norm": 1.6456550398187915, | |
| "learning_rate": 8.846153846153847e-06, | |
| "loss": 1.1797, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.2774566473988439, | |
| "grad_norm": 1.043459304370755, | |
| "learning_rate": 9.03846153846154e-06, | |
| "loss": 0.7448, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.2832369942196532, | |
| "grad_norm": 1.219858313334948, | |
| "learning_rate": 9.230769230769232e-06, | |
| "loss": 0.8615, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.28901734104046245, | |
| "grad_norm": 1.8504734290476341, | |
| "learning_rate": 9.423076923076923e-06, | |
| "loss": 1.0442, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.2947976878612717, | |
| "grad_norm": 1.6671704729926984, | |
| "learning_rate": 9.615384615384616e-06, | |
| "loss": 1.1062, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.30057803468208094, | |
| "grad_norm": 1.7440575528412874, | |
| "learning_rate": 9.807692307692308e-06, | |
| "loss": 0.9787, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.3063583815028902, | |
| "grad_norm": 1.6865691956435074, | |
| "learning_rate": 1e-05, | |
| "loss": 1.039, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.31213872832369943, | |
| "grad_norm": 1.8681738071024672, | |
| "learning_rate": 9.99988686307906e-06, | |
| "loss": 1.1171, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.3179190751445087, | |
| "grad_norm": 1.4603859563880623, | |
| "learning_rate": 9.999547457436221e-06, | |
| "loss": 1.1437, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.3236994219653179, | |
| "grad_norm": 1.8133100631538102, | |
| "learning_rate": 9.99898179843121e-06, | |
| "loss": 1.2624, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.32947976878612717, | |
| "grad_norm": 0.9630648626847151, | |
| "learning_rate": 9.998189911662793e-06, | |
| "loss": 0.7498, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.3352601156069364, | |
| "grad_norm": 1.9231249269543056, | |
| "learning_rate": 9.99717183296762e-06, | |
| "loss": 1.4657, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.34104046242774566, | |
| "grad_norm": 1.362454107646963, | |
| "learning_rate": 9.995927608418612e-06, | |
| "loss": 0.9301, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.3468208092485549, | |
| "grad_norm": 1.5299236968171044, | |
| "learning_rate": 9.994457294322858e-06, | |
| "loss": 1.0736, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.35260115606936415, | |
| "grad_norm": 1.256654893455161, | |
| "learning_rate": 9.992760957219083e-06, | |
| "loss": 0.8821, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.3583815028901734, | |
| "grad_norm": 1.1404370436709175, | |
| "learning_rate": 9.99083867387463e-06, | |
| "loss": 0.8719, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.36416184971098264, | |
| "grad_norm": 1.3053446137350901, | |
| "learning_rate": 9.988690531281988e-06, | |
| "loss": 0.9312, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.3699421965317919, | |
| "grad_norm": 1.227248883019738, | |
| "learning_rate": 9.986316626654851e-06, | |
| "loss": 0.976, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.37572254335260113, | |
| "grad_norm": 1.129527697823112, | |
| "learning_rate": 9.983717067423721e-06, | |
| "loss": 1.0485, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.3815028901734104, | |
| "grad_norm": 2.2554059120706835, | |
| "learning_rate": 9.980891971231052e-06, | |
| "loss": 1.2171, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.3872832369942196, | |
| "grad_norm": 0.904872887481673, | |
| "learning_rate": 9.977841465925918e-06, | |
| "loss": 0.7695, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.3930635838150289, | |
| "grad_norm": 1.0481280098300976, | |
| "learning_rate": 9.974565689558228e-06, | |
| "loss": 0.9209, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.3988439306358382, | |
| "grad_norm": 1.2781650561787112, | |
| "learning_rate": 9.971064790372484e-06, | |
| "loss": 1.1006, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.4046242774566474, | |
| "grad_norm": 0.8969433268648974, | |
| "learning_rate": 9.967338926801066e-06, | |
| "loss": 0.7209, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.41040462427745666, | |
| "grad_norm": 1.1393852014047097, | |
| "learning_rate": 9.963388267457071e-06, | |
| "loss": 0.7685, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.4161849710982659, | |
| "grad_norm": 0.8409711904675974, | |
| "learning_rate": 9.959212991126668e-06, | |
| "loss": 0.5886, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.42196531791907516, | |
| "grad_norm": 1.0097041809739957, | |
| "learning_rate": 9.954813286761021e-06, | |
| "loss": 0.8177, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.4277456647398844, | |
| "grad_norm": 0.8674783887677202, | |
| "learning_rate": 9.950189353467735e-06, | |
| "loss": 0.6911, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.43352601156069365, | |
| "grad_norm": 1.0032789649476923, | |
| "learning_rate": 9.945341400501838e-06, | |
| "loss": 0.9463, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.4393063583815029, | |
| "grad_norm": 1.0495612381066386, | |
| "learning_rate": 9.940269647256319e-06, | |
| "loss": 0.9201, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.44508670520231214, | |
| "grad_norm": 0.7000102566512686, | |
| "learning_rate": 9.934974323252195e-06, | |
| "loss": 0.633, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.4508670520231214, | |
| "grad_norm": 0.8404727978210973, | |
| "learning_rate": 9.929455668128129e-06, | |
| "loss": 0.7939, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.45664739884393063, | |
| "grad_norm": 0.8979481667152188, | |
| "learning_rate": 9.923713931629582e-06, | |
| "loss": 0.7995, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.4624277456647399, | |
| "grad_norm": 1.006209086886219, | |
| "learning_rate": 9.917749373597506e-06, | |
| "loss": 0.9377, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.4682080924855491, | |
| "grad_norm": 1.3079350852166611, | |
| "learning_rate": 9.911562263956593e-06, | |
| "loss": 0.8293, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.47398843930635837, | |
| "grad_norm": 1.0302107201825823, | |
| "learning_rate": 9.90515288270306e-06, | |
| "loss": 0.8907, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.4797687861271676, | |
| "grad_norm": 0.8512910211686245, | |
| "learning_rate": 9.898521519891968e-06, | |
| "loss": 0.6677, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.48554913294797686, | |
| "grad_norm": 1.4034538610622003, | |
| "learning_rate": 9.891668475624106e-06, | |
| "loss": 1.011, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.4913294797687861, | |
| "grad_norm": 1.1133640764754114, | |
| "learning_rate": 9.884594060032407e-06, | |
| "loss": 0.9868, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.49710982658959535, | |
| "grad_norm": 0.8832987768292198, | |
| "learning_rate": 9.877298593267906e-06, | |
| "loss": 0.7435, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.5028901734104047, | |
| "grad_norm": 0.9793843183088854, | |
| "learning_rate": 9.869782405485267e-06, | |
| "loss": 0.9289, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.5086705202312138, | |
| "grad_norm": 0.9159241257550397, | |
| "learning_rate": 9.862045836827821e-06, | |
| "loss": 0.704, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.5144508670520231, | |
| "grad_norm": 0.8669077131194463, | |
| "learning_rate": 9.854089237412194e-06, | |
| "loss": 0.7518, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.5202312138728323, | |
| "grad_norm": 0.9177313381025624, | |
| "learning_rate": 9.84591296731245e-06, | |
| "loss": 0.6925, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.5260115606936416, | |
| "grad_norm": 0.7724908538126755, | |
| "learning_rate": 9.837517396543799e-06, | |
| "loss": 0.6358, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.5317919075144508, | |
| "grad_norm": 1.6095008914897926, | |
| "learning_rate": 9.82890290504585e-06, | |
| "loss": 0.9213, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.5375722543352601, | |
| "grad_norm": 0.9894464003328666, | |
| "learning_rate": 9.82006988266542e-06, | |
| "loss": 0.8624, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.5433526011560693, | |
| "grad_norm": 0.7889218642430385, | |
| "learning_rate": 9.811018729138892e-06, | |
| "loss": 0.7508, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.5491329479768786, | |
| "grad_norm": 1.0722731395513763, | |
| "learning_rate": 9.801749854074122e-06, | |
| "loss": 0.7923, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.5549132947976878, | |
| "grad_norm": 1.0436817532057514, | |
| "learning_rate": 9.792263676931906e-06, | |
| "loss": 0.7715, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.5606936416184971, | |
| "grad_norm": 1.7989526448931117, | |
| "learning_rate": 9.78256062700699e-06, | |
| "loss": 1.2198, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.5664739884393064, | |
| "grad_norm": 1.0674303681403066, | |
| "learning_rate": 9.772641143408652e-06, | |
| "loss": 0.7883, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.5722543352601156, | |
| "grad_norm": 0.9353488034991134, | |
| "learning_rate": 9.762505675040826e-06, | |
| "loss": 0.7686, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.5780346820809249, | |
| "grad_norm": 1.1674201445045589, | |
| "learning_rate": 9.752154680581783e-06, | |
| "loss": 0.995, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.5838150289017341, | |
| "grad_norm": 0.9466404615356364, | |
| "learning_rate": 9.741588628463384e-06, | |
| "loss": 0.8552, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.5895953757225434, | |
| "grad_norm": 0.7681659885617845, | |
| "learning_rate": 9.730807996849864e-06, | |
| "loss": 0.6232, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.5953757225433526, | |
| "grad_norm": 1.0787984383542042, | |
| "learning_rate": 9.719813273616216e-06, | |
| "loss": 0.9384, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.6011560693641619, | |
| "grad_norm": 1.163335897640102, | |
| "learning_rate": 9.70860495632609e-06, | |
| "loss": 1.0792, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.6069364161849711, | |
| "grad_norm": 1.131274450256789, | |
| "learning_rate": 9.697183552209289e-06, | |
| "loss": 0.8781, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.6127167630057804, | |
| "grad_norm": 1.0095856194543398, | |
| "learning_rate": 9.68554957813881e-06, | |
| "loss": 0.8006, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.6184971098265896, | |
| "grad_norm": 0.835619336255323, | |
| "learning_rate": 9.673703560607459e-06, | |
| "loss": 0.6371, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.6242774566473989, | |
| "grad_norm": 0.7655234428141927, | |
| "learning_rate": 9.661646035704009e-06, | |
| "loss": 0.5344, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.630057803468208, | |
| "grad_norm": 0.9520070072269193, | |
| "learning_rate": 9.649377549088962e-06, | |
| "loss": 0.8094, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.6358381502890174, | |
| "grad_norm": 0.9429087170034833, | |
| "learning_rate": 9.636898655969837e-06, | |
| "loss": 0.7412, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.6416184971098265, | |
| "grad_norm": 0.9882443151038491, | |
| "learning_rate": 9.62420992107605e-06, | |
| "loss": 0.864, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.6473988439306358, | |
| "grad_norm": 0.9980586995279209, | |
| "learning_rate": 9.61131191863336e-06, | |
| "loss": 0.883, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.653179190751445, | |
| "grad_norm": 0.8143392097546589, | |
| "learning_rate": 9.598205232337882e-06, | |
| "loss": 0.593, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.6589595375722543, | |
| "grad_norm": 0.8759873715277585, | |
| "learning_rate": 9.584890455329667e-06, | |
| "loss": 0.7435, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.6647398843930635, | |
| "grad_norm": 0.787760238489214, | |
| "learning_rate": 9.571368190165865e-06, | |
| "loss": 0.6313, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.6705202312138728, | |
| "grad_norm": 0.7813584309524949, | |
| "learning_rate": 9.557639048793453e-06, | |
| "loss": 0.5329, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.6763005780346821, | |
| "grad_norm": 0.810792296710866, | |
| "learning_rate": 9.543703652521543e-06, | |
| "loss": 0.6099, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.6820809248554913, | |
| "grad_norm": 0.8539955958073703, | |
| "learning_rate": 9.52956263199327e-06, | |
| "loss": 0.7579, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.6878612716763006, | |
| "grad_norm": 0.973039298272916, | |
| "learning_rate": 9.515216627157238e-06, | |
| "loss": 0.7422, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.6936416184971098, | |
| "grad_norm": 0.8789927069109355, | |
| "learning_rate": 9.500666287238573e-06, | |
| "loss": 0.7703, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.6994219653179191, | |
| "grad_norm": 0.8280305404585776, | |
| "learning_rate": 9.485912270709542e-06, | |
| "loss": 0.5447, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.7052023121387283, | |
| "grad_norm": 0.8306195346423975, | |
| "learning_rate": 9.470955245259742e-06, | |
| "loss": 0.55, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.7109826589595376, | |
| "grad_norm": 0.8569125077265108, | |
| "learning_rate": 9.455795887765896e-06, | |
| "loss": 0.7363, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.7167630057803468, | |
| "grad_norm": 0.8571238902612902, | |
| "learning_rate": 9.440434884261216e-06, | |
| "loss": 0.6441, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.7225433526011561, | |
| "grad_norm": 0.8036597031305626, | |
| "learning_rate": 9.424872929904359e-06, | |
| "loss": 0.6873, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.7283236994219653, | |
| "grad_norm": 0.9237720491861983, | |
| "learning_rate": 9.409110728947964e-06, | |
| "loss": 0.7905, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.7341040462427746, | |
| "grad_norm": 0.9973443566645532, | |
| "learning_rate": 9.393148994706785e-06, | |
| "loss": 0.4917, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.7398843930635838, | |
| "grad_norm": 1.0006509192435291, | |
| "learning_rate": 9.376988449525405e-06, | |
| "loss": 0.8303, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.7456647398843931, | |
| "grad_norm": 0.9847352318496524, | |
| "learning_rate": 9.360629824745558e-06, | |
| "loss": 0.8072, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.7514450867052023, | |
| "grad_norm": 0.9057800124682823, | |
| "learning_rate": 9.344073860673016e-06, | |
| "loss": 0.7909, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.7572254335260116, | |
| "grad_norm": 0.751051206899431, | |
| "learning_rate": 9.327321306544097e-06, | |
| "loss": 0.6188, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.7630057803468208, | |
| "grad_norm": 0.7918961715149377, | |
| "learning_rate": 9.310372920491761e-06, | |
| "loss": 0.5697, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.7687861271676301, | |
| "grad_norm": 0.8912684513395774, | |
| "learning_rate": 9.293229469511293e-06, | |
| "loss": 0.7477, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.7745664739884393, | |
| "grad_norm": 0.6633704205093338, | |
| "learning_rate": 9.275891729425595e-06, | |
| "loss": 0.5831, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.7803468208092486, | |
| "grad_norm": 0.8899258986178338, | |
| "learning_rate": 9.25836048485008e-06, | |
| "loss": 0.7249, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.7861271676300579, | |
| "grad_norm": 0.7490887846503816, | |
| "learning_rate": 9.240636529157158e-06, | |
| "loss": 0.6534, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.791907514450867, | |
| "grad_norm": 1.3911616822761665, | |
| "learning_rate": 9.22272066444034e-06, | |
| "loss": 1.1257, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.7976878612716763, | |
| "grad_norm": 0.8807323209346695, | |
| "learning_rate": 9.204613701477935e-06, | |
| "loss": 0.5506, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.8034682080924855, | |
| "grad_norm": 1.2063249188448404, | |
| "learning_rate": 9.186316459696359e-06, | |
| "loss": 1.0353, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.8092485549132948, | |
| "grad_norm": 0.780960970851516, | |
| "learning_rate": 9.167829767133047e-06, | |
| "loss": 0.6501, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.815028901734104, | |
| "grad_norm": 1.0465279583342073, | |
| "learning_rate": 9.149154460398993e-06, | |
| "loss": 1.0094, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.8208092485549133, | |
| "grad_norm": 0.8493522401893265, | |
| "learning_rate": 9.130291384640873e-06, | |
| "loss": 0.6026, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.8265895953757225, | |
| "grad_norm": 1.0520248613531298, | |
| "learning_rate": 9.111241393502814e-06, | |
| "loss": 0.9218, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.8323699421965318, | |
| "grad_norm": 0.8062150660900307, | |
| "learning_rate": 9.092005349087754e-06, | |
| "loss": 0.5588, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.838150289017341, | |
| "grad_norm": 1.1800864030349798, | |
| "learning_rate": 9.072584121918426e-06, | |
| "loss": 0.8445, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.8439306358381503, | |
| "grad_norm": 1.204141856153108, | |
| "learning_rate": 9.052978590897964e-06, | |
| "loss": 0.8043, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.8497109826589595, | |
| "grad_norm": 0.8162766248184291, | |
| "learning_rate": 9.033189643270139e-06, | |
| "loss": 0.6661, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.8554913294797688, | |
| "grad_norm": 1.0427374615350444, | |
| "learning_rate": 9.013218174579189e-06, | |
| "loss": 0.8038, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.861271676300578, | |
| "grad_norm": 0.9354749929394746, | |
| "learning_rate": 8.993065088629304e-06, | |
| "loss": 0.7768, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.8670520231213873, | |
| "grad_norm": 0.9287721633918639, | |
| "learning_rate": 8.972731297443722e-06, | |
| "loss": 0.731, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.8728323699421965, | |
| "grad_norm": 0.9230583505002304, | |
| "learning_rate": 8.95221772122345e-06, | |
| "loss": 0.6313, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.8786127167630058, | |
| "grad_norm": 0.9349454478962814, | |
| "learning_rate": 8.931525288305633e-06, | |
| "loss": 0.8083, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.884393063583815, | |
| "grad_norm": 0.913057550393148, | |
| "learning_rate": 8.910654935121528e-06, | |
| "loss": 0.7647, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.8901734104046243, | |
| "grad_norm": 0.7348197855556597, | |
| "learning_rate": 8.889607606154132e-06, | |
| "loss": 0.5773, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.8959537572254336, | |
| "grad_norm": 0.7867588221031618, | |
| "learning_rate": 8.868384253895445e-06, | |
| "loss": 0.7065, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.9017341040462428, | |
| "grad_norm": 0.8228491320872959, | |
| "learning_rate": 8.846985838803357e-06, | |
| "loss": 0.7271, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.9075144508670521, | |
| "grad_norm": 1.3053568158332713, | |
| "learning_rate": 8.825413329258187e-06, | |
| "loss": 0.9825, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.9132947976878613, | |
| "grad_norm": 1.0945947320675422, | |
| "learning_rate": 8.803667701518857e-06, | |
| "loss": 0.9142, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.9190751445086706, | |
| "grad_norm": 0.9764593529348718, | |
| "learning_rate": 8.781749939678712e-06, | |
| "loss": 0.7848, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.9248554913294798, | |
| "grad_norm": 0.6941029727730416, | |
| "learning_rate": 8.759661035620992e-06, | |
| "loss": 0.5887, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.930635838150289, | |
| "grad_norm": 0.7925590817706383, | |
| "learning_rate": 8.73740198897393e-06, | |
| "loss": 0.6934, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.9364161849710982, | |
| "grad_norm": 0.7656873002992562, | |
| "learning_rate": 8.714973807065525e-06, | |
| "loss": 0.6853, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.9421965317919075, | |
| "grad_norm": 1.1161472512069044, | |
| "learning_rate": 8.69237750487796e-06, | |
| "loss": 0.853, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.9479768786127167, | |
| "grad_norm": 0.9749363576847155, | |
| "learning_rate": 8.669614105001652e-06, | |
| "loss": 0.8424, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.953757225433526, | |
| "grad_norm": 0.8325202621241643, | |
| "learning_rate": 8.646684637588992e-06, | |
| "loss": 0.8077, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.9595375722543352, | |
| "grad_norm": 0.9783393336805141, | |
| "learning_rate": 8.623590140307715e-06, | |
| "loss": 0.7791, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.9653179190751445, | |
| "grad_norm": 0.8080744052126594, | |
| "learning_rate": 8.600331658293948e-06, | |
| "loss": 0.6518, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.9710982658959537, | |
| "grad_norm": 1.1474927961761585, | |
| "learning_rate": 8.576910244104905e-06, | |
| "loss": 0.8716, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.976878612716763, | |
| "grad_norm": 0.9719680493249003, | |
| "learning_rate": 8.553326957671264e-06, | |
| "loss": 0.8288, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.9826589595375722, | |
| "grad_norm": 0.8706832814278217, | |
| "learning_rate": 8.529582866249187e-06, | |
| "loss": 0.592, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.9884393063583815, | |
| "grad_norm": 0.929595095010918, | |
| "learning_rate": 8.50567904437203e-06, | |
| "loss": 0.7816, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.9942196531791907, | |
| "grad_norm": 0.7657958882007067, | |
| "learning_rate": 8.48161657380172e-06, | |
| "loss": 0.4877, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 1.1436020534304703, | |
| "learning_rate": 8.457396543479787e-06, | |
| "loss": 0.8026, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 1.0057803468208093, | |
| "grad_norm": 0.7855851232058355, | |
| "learning_rate": 8.433020049478093e-06, | |
| "loss": 0.5871, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 1.0115606936416186, | |
| "grad_norm": 0.7646534714069891, | |
| "learning_rate": 8.408488194949229e-06, | |
| "loss": 0.6586, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 1.0173410404624277, | |
| "grad_norm": 0.8230539636052089, | |
| "learning_rate": 8.383802090076589e-06, | |
| "loss": 0.6081, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 1.023121387283237, | |
| "grad_norm": 0.9538635888680288, | |
| "learning_rate": 8.358962852024128e-06, | |
| "loss": 0.7103, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 1.0289017341040463, | |
| "grad_norm": 0.9031383623963593, | |
| "learning_rate": 8.333971604885817e-06, | |
| "loss": 0.5453, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 1.0346820809248556, | |
| "grad_norm": 0.8947067399258345, | |
| "learning_rate": 8.308829479634753e-06, | |
| "loss": 0.6581, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 1.0404624277456647, | |
| "grad_norm": 0.7906077257603986, | |
| "learning_rate": 8.283537614071987e-06, | |
| "loss": 0.6724, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 1.046242774566474, | |
| "grad_norm": 0.9275191131931334, | |
| "learning_rate": 8.258097152775045e-06, | |
| "loss": 0.6303, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 1.0520231213872833, | |
| "grad_norm": 1.0819371958975426, | |
| "learning_rate": 8.232509247046106e-06, | |
| "loss": 0.7358, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 1.0578034682080926, | |
| "grad_norm": 0.9172402539596392, | |
| "learning_rate": 8.206775054859914e-06, | |
| "loss": 0.6278, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 1.0635838150289016, | |
| "grad_norm": 0.7024408514337848, | |
| "learning_rate": 8.180895740811381e-06, | |
| "loss": 0.5198, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 1.069364161849711, | |
| "grad_norm": 0.6236935477897357, | |
| "learning_rate": 8.154872476062868e-06, | |
| "loss": 0.4613, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 1.0751445086705202, | |
| "grad_norm": 0.8702316152168408, | |
| "learning_rate": 8.128706438291193e-06, | |
| "loss": 0.5644, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 1.0809248554913296, | |
| "grad_norm": 0.8493352120847084, | |
| "learning_rate": 8.102398811634338e-06, | |
| "loss": 0.6492, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 1.0867052023121386, | |
| "grad_norm": 0.77177764951958, | |
| "learning_rate": 8.075950786637847e-06, | |
| "loss": 0.6817, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 1.092485549132948, | |
| "grad_norm": 0.7604368990026783, | |
| "learning_rate": 8.049363560200972e-06, | |
| "loss": 0.4197, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 1.0982658959537572, | |
| "grad_norm": 0.7547200781480733, | |
| "learning_rate": 8.022638335522484e-06, | |
| "loss": 0.4693, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 1.1040462427745665, | |
| "grad_norm": 0.8363425113790128, | |
| "learning_rate": 7.995776322046236e-06, | |
| "loss": 0.5314, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 1.1098265895953756, | |
| "grad_norm": 0.7732661043588892, | |
| "learning_rate": 7.968778735406426e-06, | |
| "loss": 0.5379, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 1.115606936416185, | |
| "grad_norm": 0.7255116220582496, | |
| "learning_rate": 7.941646797372584e-06, | |
| "loss": 0.5787, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 1.1213872832369942, | |
| "grad_norm": 0.7847798977214011, | |
| "learning_rate": 7.914381735794282e-06, | |
| "loss": 0.6169, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 1.1271676300578035, | |
| "grad_norm": 0.6595825059371818, | |
| "learning_rate": 7.886984784545565e-06, | |
| "loss": 0.4591, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 1.1329479768786128, | |
| "grad_norm": 0.7278796876236617, | |
| "learning_rate": 7.859457183469119e-06, | |
| "loss": 0.5845, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 1.138728323699422, | |
| "grad_norm": 0.7528110685404278, | |
| "learning_rate": 7.831800178320153e-06, | |
| "loss": 0.5812, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 1.1445086705202312, | |
| "grad_norm": 0.890267436582, | |
| "learning_rate": 7.804015020710028e-06, | |
| "loss": 0.6572, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 1.1502890173410405, | |
| "grad_norm": 0.8254440289935021, | |
| "learning_rate": 7.776102968049616e-06, | |
| "loss": 0.6755, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 1.1560693641618498, | |
| "grad_norm": 0.6507477500486915, | |
| "learning_rate": 7.748065283492397e-06, | |
| "loss": 0.554, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.1618497109826589, | |
| "grad_norm": 0.6283177198784258, | |
| "learning_rate": 7.719903235877289e-06, | |
| "loss": 0.5776, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 1.1676300578034682, | |
| "grad_norm": 0.8918223872817956, | |
| "learning_rate": 7.691618099671235e-06, | |
| "loss": 0.8012, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 1.1734104046242775, | |
| "grad_norm": 0.9344902364876053, | |
| "learning_rate": 7.663211154911523e-06, | |
| "loss": 0.8115, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 1.1791907514450868, | |
| "grad_norm": 0.7794391355469076, | |
| "learning_rate": 7.634683687147857e-06, | |
| "loss": 0.6142, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 1.1849710982658959, | |
| "grad_norm": 0.8659277724734541, | |
| "learning_rate": 7.606036987384185e-06, | |
| "loss": 0.7304, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 1.1907514450867052, | |
| "grad_norm": 0.9502452996067565, | |
| "learning_rate": 7.577272352020269e-06, | |
| "loss": 0.8352, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 1.1965317919075145, | |
| "grad_norm": 0.7759351158211815, | |
| "learning_rate": 7.5483910827930186e-06, | |
| "loss": 0.645, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 1.2023121387283238, | |
| "grad_norm": 0.6696108650382663, | |
| "learning_rate": 7.519394486717583e-06, | |
| "loss": 0.5238, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 1.208092485549133, | |
| "grad_norm": 0.8786759679795678, | |
| "learning_rate": 7.4902838760282024e-06, | |
| "loss": 0.74, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 1.2138728323699421, | |
| "grad_norm": 0.7066794429138108, | |
| "learning_rate": 7.461060568118822e-06, | |
| "loss": 0.5637, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 1.2196531791907514, | |
| "grad_norm": 0.7820317919250585, | |
| "learning_rate": 7.43172588548347e-06, | |
| "loss": 0.6129, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 1.2254335260115607, | |
| "grad_norm": 0.8213464787047494, | |
| "learning_rate": 7.402281155656415e-06, | |
| "loss": 0.7279, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 1.2312138728323698, | |
| "grad_norm": 1.2811020609856818, | |
| "learning_rate": 7.3727277111520864e-06, | |
| "loss": 1.027, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 1.2369942196531791, | |
| "grad_norm": 0.7203326251914638, | |
| "learning_rate": 7.343066889404769e-06, | |
| "loss": 0.5618, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 1.2427745664739884, | |
| "grad_norm": 0.7490042739512657, | |
| "learning_rate": 7.313300032708081e-06, | |
| "loss": 0.4959, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 1.2485549132947977, | |
| "grad_norm": 0.8330199251865329, | |
| "learning_rate": 7.283428488154227e-06, | |
| "loss": 0.6419, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 1.254335260115607, | |
| "grad_norm": 0.8109431295258052, | |
| "learning_rate": 7.253453607573037e-06, | |
| "loss": 0.5864, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 1.260115606936416, | |
| "grad_norm": 0.6789438861293353, | |
| "learning_rate": 7.223376747470792e-06, | |
| "loss": 0.5748, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 1.2658959537572254, | |
| "grad_norm": 0.7905554276439895, | |
| "learning_rate": 7.193199268968825e-06, | |
| "loss": 0.4877, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 1.2716763005780347, | |
| "grad_norm": 0.7672450268723763, | |
| "learning_rate": 7.162922537741937e-06, | |
| "loss": 0.488, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 1.2774566473988438, | |
| "grad_norm": 0.7113314734848389, | |
| "learning_rate": 7.1325479239565875e-06, | |
| "loss": 0.496, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 1.2832369942196533, | |
| "grad_norm": 0.775371647892662, | |
| "learning_rate": 7.102076802208887e-06, | |
| "loss": 0.5555, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 1.2890173410404624, | |
| "grad_norm": 0.8155548293800693, | |
| "learning_rate": 7.071510551462395e-06, | |
| "loss": 0.6871, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 1.2947976878612717, | |
| "grad_norm": 0.869775047071274, | |
| "learning_rate": 7.040850554985706e-06, | |
| "loss": 0.6442, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 1.300578034682081, | |
| "grad_norm": 0.8896294697712317, | |
| "learning_rate": 7.01009820028986e-06, | |
| "loss": 0.6255, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 1.30635838150289, | |
| "grad_norm": 0.8155789387389749, | |
| "learning_rate": 6.9792548790655465e-06, | |
| "loss": 0.7973, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 1.3121387283236994, | |
| "grad_norm": 0.7230343705020649, | |
| "learning_rate": 6.948321987120122e-06, | |
| "loss": 0.5421, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 1.3179190751445087, | |
| "grad_norm": 0.7291631821760047, | |
| "learning_rate": 6.9173009243144485e-06, | |
| "loss": 0.431, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 1.323699421965318, | |
| "grad_norm": 0.7287156885787672, | |
| "learning_rate": 6.886193094499537e-06, | |
| "loss": 0.558, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 1.3294797687861273, | |
| "grad_norm": 1.0724029075135588, | |
| "learning_rate": 6.854999905453022e-06, | |
| "loss": 0.5622, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 1.3352601156069364, | |
| "grad_norm": 0.8822778651565606, | |
| "learning_rate": 6.823722768815446e-06, | |
| "loss": 0.6397, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 1.3410404624277457, | |
| "grad_norm": 0.7985828585892485, | |
| "learning_rate": 6.792363100026383e-06, | |
| "loss": 0.6297, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 1.346820809248555, | |
| "grad_norm": 0.670328316646438, | |
| "learning_rate": 6.760922318260384e-06, | |
| "loss": 0.6461, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 1.352601156069364, | |
| "grad_norm": 0.7789795421340042, | |
| "learning_rate": 6.729401846362743e-06, | |
| "loss": 0.6102, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 1.3583815028901733, | |
| "grad_norm": 1.05298645942576, | |
| "learning_rate": 6.697803110785115e-06, | |
| "loss": 0.7174, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 1.3641618497109826, | |
| "grad_norm": 0.6391669736154241, | |
| "learning_rate": 6.666127541520958e-06, | |
| "loss": 0.4992, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 1.369942196531792, | |
| "grad_norm": 0.8465130878040216, | |
| "learning_rate": 6.634376572040826e-06, | |
| "loss": 0.5653, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 1.3757225433526012, | |
| "grad_norm": 0.8552412125002662, | |
| "learning_rate": 6.602551639227486e-06, | |
| "loss": 0.4559, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 1.3815028901734103, | |
| "grad_norm": 1.0924656658631755, | |
| "learning_rate": 6.570654183310901e-06, | |
| "loss": 0.8661, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 1.3872832369942196, | |
| "grad_norm": 0.7794676129362818, | |
| "learning_rate": 6.538685647803049e-06, | |
| "loss": 0.6934, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.393063583815029, | |
| "grad_norm": 0.7171760865328232, | |
| "learning_rate": 6.506647479432604e-06, | |
| "loss": 0.5452, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 1.3988439306358382, | |
| "grad_norm": 0.8512263697543654, | |
| "learning_rate": 6.474541128079452e-06, | |
| "loss": 0.539, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 1.4046242774566475, | |
| "grad_norm": 0.7418655135071943, | |
| "learning_rate": 6.442368046709087e-06, | |
| "loss": 0.5844, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 1.4104046242774566, | |
| "grad_norm": 0.7995078110330098, | |
| "learning_rate": 6.410129691306855e-06, | |
| "loss": 0.4744, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 1.416184971098266, | |
| "grad_norm": 0.9331606469598925, | |
| "learning_rate": 6.377827520812061e-06, | |
| "loss": 0.7489, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 1.4219653179190752, | |
| "grad_norm": 0.8268022509923469, | |
| "learning_rate": 6.34546299705195e-06, | |
| "loss": 0.7182, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 1.4277456647398843, | |
| "grad_norm": 0.6686099679963994, | |
| "learning_rate": 6.31303758467555e-06, | |
| "loss": 0.4366, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 1.4335260115606936, | |
| "grad_norm": 0.8037659222821476, | |
| "learning_rate": 6.280552751087384e-06, | |
| "loss": 0.6675, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 1.439306358381503, | |
| "grad_norm": 0.9009423195262899, | |
| "learning_rate": 6.248009966381074e-06, | |
| "loss": 0.6042, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 1.4450867052023122, | |
| "grad_norm": 0.9903928033155953, | |
| "learning_rate": 6.215410703272805e-06, | |
| "loss": 0.7219, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.4508670520231215, | |
| "grad_norm": 0.6591691197785381, | |
| "learning_rate": 6.182756437034677e-06, | |
| "loss": 0.4372, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 1.4566473988439306, | |
| "grad_norm": 0.695635459295935, | |
| "learning_rate": 6.150048645427944e-06, | |
| "loss": 0.496, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 1.4624277456647399, | |
| "grad_norm": 0.5945863816394287, | |
| "learning_rate": 6.117288808636141e-06, | |
| "loss": 0.5174, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 1.4682080924855492, | |
| "grad_norm": 0.7602116565399197, | |
| "learning_rate": 6.0844784091980955e-06, | |
| "loss": 0.645, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 1.4739884393063583, | |
| "grad_norm": 0.7090698522334424, | |
| "learning_rate": 6.05161893194083e-06, | |
| "loss": 0.5536, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 1.4797687861271676, | |
| "grad_norm": 0.725729229031602, | |
| "learning_rate": 6.018711863912381e-06, | |
| "loss": 0.6477, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 1.4855491329479769, | |
| "grad_norm": 0.6357974105573619, | |
| "learning_rate": 5.985758694314486e-06, | |
| "loss": 0.5217, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 1.4913294797687862, | |
| "grad_norm": 1.0443530505670549, | |
| "learning_rate": 5.9527609144352075e-06, | |
| "loss": 0.6152, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 1.4971098265895955, | |
| "grad_norm": 0.8202669154391847, | |
| "learning_rate": 5.9197200175814275e-06, | |
| "loss": 0.5029, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 1.5028901734104045, | |
| "grad_norm": 0.6862183739194273, | |
| "learning_rate": 5.8866374990112785e-06, | |
| "loss": 0.5836, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.5086705202312138, | |
| "grad_norm": 0.898525799970463, | |
| "learning_rate": 5.853514855866481e-06, | |
| "loss": 0.8393, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 1.5144508670520231, | |
| "grad_norm": 0.9820808227078155, | |
| "learning_rate": 5.8203535871045735e-06, | |
| "loss": 0.6995, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 1.5202312138728322, | |
| "grad_norm": 0.7519418179376166, | |
| "learning_rate": 5.787155193431095e-06, | |
| "loss": 0.5861, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 1.5260115606936417, | |
| "grad_norm": 0.9393739826771281, | |
| "learning_rate": 5.75392117723166e-06, | |
| "loss": 0.5841, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 1.5317919075144508, | |
| "grad_norm": 0.7596859297052929, | |
| "learning_rate": 5.7206530425039785e-06, | |
| "loss": 0.5483, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 1.5375722543352601, | |
| "grad_norm": 0.7596810301974631, | |
| "learning_rate": 5.687352294789776e-06, | |
| "loss": 0.601, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 1.5433526011560694, | |
| "grad_norm": 0.8936156970474353, | |
| "learning_rate": 5.654020441106682e-06, | |
| "loss": 0.8358, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 1.5491329479768785, | |
| "grad_norm": 0.8101430405700003, | |
| "learning_rate": 5.620658989880011e-06, | |
| "loss": 0.5428, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 1.5549132947976878, | |
| "grad_norm": 0.7262001629259932, | |
| "learning_rate": 5.587269450874513e-06, | |
| "loss": 0.4783, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 1.560693641618497, | |
| "grad_norm": 0.8075662054671324, | |
| "learning_rate": 5.5538533351260395e-06, | |
| "loss": 0.6543, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.5664739884393064, | |
| "grad_norm": 1.0619586513294879, | |
| "learning_rate": 5.52041215487317e-06, | |
| "loss": 0.6379, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 1.5722543352601157, | |
| "grad_norm": 0.6915488345799145, | |
| "learning_rate": 5.486947423488774e-06, | |
| "loss": 0.5354, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 1.5780346820809248, | |
| "grad_norm": 0.9595574551452887, | |
| "learning_rate": 5.453460655411515e-06, | |
| "loss": 0.6359, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 1.583815028901734, | |
| "grad_norm": 0.6746217408473637, | |
| "learning_rate": 5.4199533660773276e-06, | |
| "loss": 0.442, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 1.5895953757225434, | |
| "grad_norm": 0.6875447362464598, | |
| "learning_rate": 5.3864270718508305e-06, | |
| "loss": 0.6612, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 1.5953757225433525, | |
| "grad_norm": 1.0006017308905897, | |
| "learning_rate": 5.352883289956701e-06, | |
| "loss": 0.7303, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 1.601156069364162, | |
| "grad_norm": 0.8962934512743116, | |
| "learning_rate": 5.319323538411021e-06, | |
| "loss": 0.6552, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 1.606936416184971, | |
| "grad_norm": 1.3267780546969365, | |
| "learning_rate": 5.285749335952573e-06, | |
| "loss": 0.7785, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 1.6127167630057804, | |
| "grad_norm": 0.9202137432547965, | |
| "learning_rate": 5.252162201974112e-06, | |
| "loss": 0.6865, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 1.6184971098265897, | |
| "grad_norm": 1.0125080800080466, | |
| "learning_rate": 5.218563656453609e-06, | |
| "loss": 0.645, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.6242774566473988, | |
| "grad_norm": 0.7442509258922682, | |
| "learning_rate": 5.184955219885457e-06, | |
| "loss": 0.512, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 1.630057803468208, | |
| "grad_norm": 0.9033544155384335, | |
| "learning_rate": 5.15133841321167e-06, | |
| "loss": 0.694, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 1.6358381502890174, | |
| "grad_norm": 0.6900225057997373, | |
| "learning_rate": 5.117714757753045e-06, | |
| "loss": 0.4881, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 1.6416184971098264, | |
| "grad_norm": 0.6742757865306379, | |
| "learning_rate": 5.084085775140324e-06, | |
| "loss": 0.5103, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 1.647398843930636, | |
| "grad_norm": 0.7059532473644158, | |
| "learning_rate": 5.050452987245325e-06, | |
| "loss": 0.4349, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 1.653179190751445, | |
| "grad_norm": 0.6564844507129209, | |
| "learning_rate": 5.016817916112075e-06, | |
| "loss": 0.5836, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 1.6589595375722543, | |
| "grad_norm": 0.6951495372130232, | |
| "learning_rate": 4.9831820838879255e-06, | |
| "loss": 0.624, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 1.6647398843930636, | |
| "grad_norm": 0.8002765691543589, | |
| "learning_rate": 4.949547012754676e-06, | |
| "loss": 0.6247, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 1.6705202312138727, | |
| "grad_norm": 0.6945205161978173, | |
| "learning_rate": 4.915914224859677e-06, | |
| "loss": 0.5317, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 1.6763005780346822, | |
| "grad_norm": 1.0063599327693165, | |
| "learning_rate": 4.882285242246958e-06, | |
| "loss": 0.8422, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.6820809248554913, | |
| "grad_norm": 0.687707175040322, | |
| "learning_rate": 4.848661586788334e-06, | |
| "loss": 0.6278, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 1.6878612716763006, | |
| "grad_norm": 0.894386154392975, | |
| "learning_rate": 4.815044780114544e-06, | |
| "loss": 0.7546, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 1.69364161849711, | |
| "grad_norm": 0.6603346113796635, | |
| "learning_rate": 4.781436343546392e-06, | |
| "loss": 0.6156, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 1.699421965317919, | |
| "grad_norm": 0.886454781643024, | |
| "learning_rate": 4.7478377980258885e-06, | |
| "loss": 0.5466, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 1.7052023121387283, | |
| "grad_norm": 0.5996464506334955, | |
| "learning_rate": 4.714250664047428e-06, | |
| "loss": 0.5173, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 1.7109826589595376, | |
| "grad_norm": 0.74394974483986, | |
| "learning_rate": 4.68067646158898e-06, | |
| "loss": 0.5619, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 1.7167630057803467, | |
| "grad_norm": 0.8641214674891129, | |
| "learning_rate": 4.647116710043302e-06, | |
| "loss": 0.5851, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 1.7225433526011562, | |
| "grad_norm": 0.6473738175450051, | |
| "learning_rate": 4.613572928149172e-06, | |
| "loss": 0.5937, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 1.7283236994219653, | |
| "grad_norm": 0.7911023989217947, | |
| "learning_rate": 4.580046633922675e-06, | |
| "loss": 0.6134, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 1.7341040462427746, | |
| "grad_norm": 0.7199248015046684, | |
| "learning_rate": 4.546539344588486e-06, | |
| "loss": 0.4924, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.739884393063584, | |
| "grad_norm": 0.8611464036629595, | |
| "learning_rate": 4.513052576511227e-06, | |
| "loss": 0.5567, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 1.745664739884393, | |
| "grad_norm": 0.9153747256071443, | |
| "learning_rate": 4.47958784512683e-06, | |
| "loss": 0.5906, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 1.7514450867052023, | |
| "grad_norm": 0.5570439958075847, | |
| "learning_rate": 4.446146664873961e-06, | |
| "loss": 0.5195, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 1.7572254335260116, | |
| "grad_norm": 0.9281845417649991, | |
| "learning_rate": 4.41273054912549e-06, | |
| "loss": 0.6441, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 1.7630057803468207, | |
| "grad_norm": 0.788201691868334, | |
| "learning_rate": 4.379341010119992e-06, | |
| "loss": 0.6661, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 1.7687861271676302, | |
| "grad_norm": 0.9529288524795544, | |
| "learning_rate": 4.34597955889332e-06, | |
| "loss": 0.7531, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 1.7745664739884393, | |
| "grad_norm": 0.7779671173146703, | |
| "learning_rate": 4.312647705210226e-06, | |
| "loss": 0.6333, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 1.7803468208092486, | |
| "grad_norm": 0.932290786792518, | |
| "learning_rate": 4.279346957496023e-06, | |
| "loss": 0.6065, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 1.7861271676300579, | |
| "grad_norm": 0.8791757542275365, | |
| "learning_rate": 4.246078822768339e-06, | |
| "loss": 0.593, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 1.791907514450867, | |
| "grad_norm": 0.8229021097129374, | |
| "learning_rate": 4.212844806568906e-06, | |
| "loss": 0.7998, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.7976878612716765, | |
| "grad_norm": 0.7027823693838428, | |
| "learning_rate": 4.17964641289543e-06, | |
| "loss": 0.4884, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 1.8034682080924855, | |
| "grad_norm": 0.5638484231065025, | |
| "learning_rate": 4.1464851441335215e-06, | |
| "loss": 0.5413, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 1.8092485549132948, | |
| "grad_norm": 0.7504131374172438, | |
| "learning_rate": 4.113362500988722e-06, | |
| "loss": 0.5272, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 1.8150289017341041, | |
| "grad_norm": 0.6740181052595745, | |
| "learning_rate": 4.080279982418574e-06, | |
| "loss": 0.5272, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 1.8208092485549132, | |
| "grad_norm": 0.6590261922608894, | |
| "learning_rate": 4.047239085564794e-06, | |
| "loss": 0.5185, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 1.8265895953757225, | |
| "grad_norm": 0.6354968708763732, | |
| "learning_rate": 4.014241305685514e-06, | |
| "loss": 0.5437, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 1.8323699421965318, | |
| "grad_norm": 0.6088112809354961, | |
| "learning_rate": 3.98128813608762e-06, | |
| "loss": 0.4712, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 1.838150289017341, | |
| "grad_norm": 0.6374602552721457, | |
| "learning_rate": 3.948381068059171e-06, | |
| "loss": 0.5713, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 1.8439306358381504, | |
| "grad_norm": 0.7213250401566385, | |
| "learning_rate": 3.915521590801907e-06, | |
| "loss": 0.4117, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 1.8497109826589595, | |
| "grad_norm": 0.720008546371465, | |
| "learning_rate": 3.88271119136386e-06, | |
| "loss": 0.5622, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 1.8554913294797688, | |
| "grad_norm": 0.8620941640593333, | |
| "learning_rate": 3.849951354572057e-06, | |
| "loss": 0.7968, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 1.861271676300578, | |
| "grad_norm": 1.056732583118651, | |
| "learning_rate": 3.817243562965324e-06, | |
| "loss": 0.6582, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 1.8670520231213872, | |
| "grad_norm": 0.6578509759019269, | |
| "learning_rate": 3.7845892967271963e-06, | |
| "loss": 0.541, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 1.8728323699421965, | |
| "grad_norm": 0.6057121810732851, | |
| "learning_rate": 3.7519900336189265e-06, | |
| "loss": 0.4909, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 1.8786127167630058, | |
| "grad_norm": 0.7726511998943779, | |
| "learning_rate": 3.7194472489126176e-06, | |
| "loss": 0.6899, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 1.8843930635838149, | |
| "grad_norm": 0.8109214205884281, | |
| "learning_rate": 3.686962415324452e-06, | |
| "loss": 0.5595, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 1.8901734104046244, | |
| "grad_norm": 1.0646480130164524, | |
| "learning_rate": 3.6545370029480515e-06, | |
| "loss": 0.606, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 1.8959537572254335, | |
| "grad_norm": 0.705969500214819, | |
| "learning_rate": 3.6221724791879406e-06, | |
| "loss": 0.7158, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 1.9017341040462428, | |
| "grad_norm": 0.8864721580498878, | |
| "learning_rate": 3.5898703086931474e-06, | |
| "loss": 0.7594, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 1.907514450867052, | |
| "grad_norm": 0.7557686350987431, | |
| "learning_rate": 3.557631953290914e-06, | |
| "loss": 0.4859, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.9132947976878611, | |
| "grad_norm": 1.0351089698366276, | |
| "learning_rate": 3.5254588719205494e-06, | |
| "loss": 0.7508, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 1.9190751445086707, | |
| "grad_norm": 0.7085730332833243, | |
| "learning_rate": 3.4933525205673977e-06, | |
| "loss": 0.6455, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 1.9248554913294798, | |
| "grad_norm": 0.9200031613758001, | |
| "learning_rate": 3.461314352196952e-06, | |
| "loss": 0.5611, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 1.930635838150289, | |
| "grad_norm": 0.6429597743080722, | |
| "learning_rate": 3.429345816689101e-06, | |
| "loss": 0.4856, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 1.9364161849710984, | |
| "grad_norm": 0.9298164493352193, | |
| "learning_rate": 3.397448360772516e-06, | |
| "loss": 0.5274, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 1.9421965317919074, | |
| "grad_norm": 0.614550273953037, | |
| "learning_rate": 3.365623427959175e-06, | |
| "loss": 0.6099, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 1.9479768786127167, | |
| "grad_norm": 0.8692548756868597, | |
| "learning_rate": 3.3338724584790427e-06, | |
| "loss": 0.6163, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 1.953757225433526, | |
| "grad_norm": 0.7544063125807522, | |
| "learning_rate": 3.302196889214886e-06, | |
| "loss": 0.6332, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 1.9595375722543351, | |
| "grad_norm": 0.794867920805677, | |
| "learning_rate": 3.270598153637259e-06, | |
| "loss": 0.6922, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 1.9653179190751446, | |
| "grad_norm": 0.7609575009320511, | |
| "learning_rate": 3.239077681739618e-06, | |
| "loss": 0.5322, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.9710982658959537, | |
| "grad_norm": 0.6900161454249018, | |
| "learning_rate": 3.2076368999736175e-06, | |
| "loss": 0.5402, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 1.976878612716763, | |
| "grad_norm": 0.8506561948618055, | |
| "learning_rate": 3.176277231184556e-06, | |
| "loss": 0.6172, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 1.9826589595375723, | |
| "grad_norm": 0.6157710849420072, | |
| "learning_rate": 3.14500009454698e-06, | |
| "loss": 0.5905, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 1.9884393063583814, | |
| "grad_norm": 0.6934477411408183, | |
| "learning_rate": 3.1138069055004628e-06, | |
| "loss": 0.5682, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 1.9942196531791907, | |
| "grad_norm": 0.7277401390122208, | |
| "learning_rate": 3.0826990756855528e-06, | |
| "loss": 0.5831, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.7255445232592516, | |
| "learning_rate": 3.0516780128798794e-06, | |
| "loss": 0.4161, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 2.005780346820809, | |
| "grad_norm": 0.7986659641333982, | |
| "learning_rate": 3.020745120934455e-06, | |
| "loss": 0.6286, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 2.0115606936416186, | |
| "grad_norm": 0.6679979342656628, | |
| "learning_rate": 2.989901799710142e-06, | |
| "loss": 0.4853, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 2.0173410404624277, | |
| "grad_norm": 0.9986811632523198, | |
| "learning_rate": 2.9591494450142957e-06, | |
| "loss": 0.6472, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 2.023121387283237, | |
| "grad_norm": 0.8311828194343122, | |
| "learning_rate": 2.9284894485376057e-06, | |
| "loss": 0.4739, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 2.0289017341040463, | |
| "grad_norm": 0.6678215245683566, | |
| "learning_rate": 2.8979231977911127e-06, | |
| "loss": 0.3969, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 2.0346820809248554, | |
| "grad_norm": 0.6708618173706526, | |
| "learning_rate": 2.8674520760434145e-06, | |
| "loss": 0.6153, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 2.040462427745665, | |
| "grad_norm": 0.8515187777361045, | |
| "learning_rate": 2.8370774622580644e-06, | |
| "loss": 0.5254, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 2.046242774566474, | |
| "grad_norm": 0.6016593646695987, | |
| "learning_rate": 2.806800731031176e-06, | |
| "loss": 0.4817, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 2.052023121387283, | |
| "grad_norm": 0.7719152929747317, | |
| "learning_rate": 2.7766232525292104e-06, | |
| "loss": 0.5555, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 2.0578034682080926, | |
| "grad_norm": 0.6612185750070453, | |
| "learning_rate": 2.746546392426963e-06, | |
| "loss": 0.5369, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 2.0635838150289016, | |
| "grad_norm": 0.7359611638945163, | |
| "learning_rate": 2.7165715118457735e-06, | |
| "loss": 0.5443, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 2.069364161849711, | |
| "grad_norm": 0.7388558303950356, | |
| "learning_rate": 2.6866999672919198e-06, | |
| "loss": 0.5849, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 2.0751445086705202, | |
| "grad_norm": 0.8550602709603156, | |
| "learning_rate": 2.656933110595233e-06, | |
| "loss": 0.6346, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 2.0809248554913293, | |
| "grad_norm": 0.6813466801259701, | |
| "learning_rate": 2.6272722888479152e-06, | |
| "loss": 0.4533, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 2.086705202312139, | |
| "grad_norm": 0.9184018851746331, | |
| "learning_rate": 2.5977188443435874e-06, | |
| "loss": 0.5008, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 2.092485549132948, | |
| "grad_norm": 0.8469746620559047, | |
| "learning_rate": 2.5682741145165325e-06, | |
| "loss": 0.5568, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 2.098265895953757, | |
| "grad_norm": 0.8248199191803897, | |
| "learning_rate": 2.53893943188118e-06, | |
| "loss": 0.5211, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 2.1040462427745665, | |
| "grad_norm": 0.6499874475711263, | |
| "learning_rate": 2.5097161239717975e-06, | |
| "loss": 0.5555, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 2.1098265895953756, | |
| "grad_norm": 0.7813430444756865, | |
| "learning_rate": 2.4806055132824186e-06, | |
| "loss": 0.6461, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 2.115606936416185, | |
| "grad_norm": 0.84725387994917, | |
| "learning_rate": 2.4516089172069852e-06, | |
| "loss": 0.496, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 2.121387283236994, | |
| "grad_norm": 0.873004350480058, | |
| "learning_rate": 2.422727647979734e-06, | |
| "loss": 0.6613, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 2.1271676300578033, | |
| "grad_norm": 0.675120156410227, | |
| "learning_rate": 2.393963012615817e-06, | |
| "loss": 0.4495, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 2.132947976878613, | |
| "grad_norm": 0.7490115944986748, | |
| "learning_rate": 2.365316312852144e-06, | |
| "loss": 0.5333, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 2.138728323699422, | |
| "grad_norm": 0.6570964764736189, | |
| "learning_rate": 2.336788845088478e-06, | |
| "loss": 0.523, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 2.1445086705202314, | |
| "grad_norm": 1.5378979727083335, | |
| "learning_rate": 2.308381900328767e-06, | |
| "loss": 0.4837, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 2.1502890173410405, | |
| "grad_norm": 0.8452714858786144, | |
| "learning_rate": 2.2800967641227127e-06, | |
| "loss": 0.4845, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 2.1560693641618496, | |
| "grad_norm": 0.7851766322063914, | |
| "learning_rate": 2.2519347165076067e-06, | |
| "loss": 0.5861, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 2.161849710982659, | |
| "grad_norm": 0.7653788839856677, | |
| "learning_rate": 2.223897031950386e-06, | |
| "loss": 0.4178, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 2.167630057803468, | |
| "grad_norm": 0.7712566207030392, | |
| "learning_rate": 2.195984979289974e-06, | |
| "loss": 0.4706, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 2.1734104046242773, | |
| "grad_norm": 0.6331367645198177, | |
| "learning_rate": 2.1681998216798476e-06, | |
| "loss": 0.4277, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 2.179190751445087, | |
| "grad_norm": 0.6644887559230318, | |
| "learning_rate": 2.140542816530882e-06, | |
| "loss": 0.4447, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 2.184971098265896, | |
| "grad_norm": 0.6124699875496277, | |
| "learning_rate": 2.1130152154544346e-06, | |
| "loss": 0.5207, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 2.1907514450867054, | |
| "grad_norm": 0.6887031182813522, | |
| "learning_rate": 2.0856182642057182e-06, | |
| "loss": 0.5315, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 2.1965317919075145, | |
| "grad_norm": 0.67468259231777, | |
| "learning_rate": 2.058353202627417e-06, | |
| "loss": 0.4879, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 2.2023121387283235, | |
| "grad_norm": 0.7788323939968849, | |
| "learning_rate": 2.0312212645935755e-06, | |
| "loss": 0.497, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 2.208092485549133, | |
| "grad_norm": 0.6709526713707947, | |
| "learning_rate": 2.0042236779537668e-06, | |
| "loss": 0.6146, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 2.213872832369942, | |
| "grad_norm": 0.6359997471362626, | |
| "learning_rate": 1.977361664477518e-06, | |
| "loss": 0.5428, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 2.2196531791907512, | |
| "grad_norm": 0.8088756710901137, | |
| "learning_rate": 1.950636439799029e-06, | |
| "loss": 0.5739, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 2.2254335260115607, | |
| "grad_norm": 0.670342241899264, | |
| "learning_rate": 1.924049213362153e-06, | |
| "loss": 0.4031, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 2.23121387283237, | |
| "grad_norm": 0.8131170961600367, | |
| "learning_rate": 1.8976011883656632e-06, | |
| "loss": 0.5596, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 2.2369942196531793, | |
| "grad_norm": 0.6561035218947915, | |
| "learning_rate": 1.8712935617088067e-06, | |
| "loss": 0.5486, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 2.2427745664739884, | |
| "grad_norm": 0.6820151386870905, | |
| "learning_rate": 1.8451275239371337e-06, | |
| "loss": 0.6458, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 2.2485549132947975, | |
| "grad_norm": 0.6098685514723682, | |
| "learning_rate": 1.8191042591886198e-06, | |
| "loss": 0.4581, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 2.254335260115607, | |
| "grad_norm": 0.8452109569036743, | |
| "learning_rate": 1.7932249451400863e-06, | |
| "loss": 0.6904, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 2.260115606936416, | |
| "grad_norm": 0.6659828594692649, | |
| "learning_rate": 1.767490752953896e-06, | |
| "loss": 0.3941, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 2.2658959537572256, | |
| "grad_norm": 0.5756879764229126, | |
| "learning_rate": 1.7419028472249566e-06, | |
| "loss": 0.5507, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 2.2716763005780347, | |
| "grad_norm": 0.6604711511382437, | |
| "learning_rate": 1.7164623859280144e-06, | |
| "loss": 0.5287, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 2.277456647398844, | |
| "grad_norm": 1.5345932250020973, | |
| "learning_rate": 1.6911705203652506e-06, | |
| "loss": 0.5368, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 2.2832369942196533, | |
| "grad_norm": 0.6641692298107834, | |
| "learning_rate": 1.6660283951141847e-06, | |
| "loss": 0.4684, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 2.2890173410404624, | |
| "grad_norm": 0.8484405008890961, | |
| "learning_rate": 1.641037147975872e-06, | |
| "loss": 0.5993, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 2.294797687861272, | |
| "grad_norm": 0.9366024809353254, | |
| "learning_rate": 1.616197909923412e-06, | |
| "loss": 0.5794, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 2.300578034682081, | |
| "grad_norm": 0.5673626418707731, | |
| "learning_rate": 1.591511805050772e-06, | |
| "loss": 0.4806, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 2.30635838150289, | |
| "grad_norm": 0.7242857939505446, | |
| "learning_rate": 1.5669799505219069e-06, | |
| "loss": 0.502, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 2.3121387283236996, | |
| "grad_norm": 0.6060165227720642, | |
| "learning_rate": 1.542603456520214e-06, | |
| "loss": 0.4041, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 2.3179190751445087, | |
| "grad_norm": 0.6560686130548388, | |
| "learning_rate": 1.5183834261982804e-06, | |
| "loss": 0.5262, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 2.3236994219653178, | |
| "grad_norm": 0.6541682337960161, | |
| "learning_rate": 1.4943209556279698e-06, | |
| "loss": 0.5337, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 2.3294797687861273, | |
| "grad_norm": 0.6410584716703046, | |
| "learning_rate": 1.4704171337508144e-06, | |
| "loss": 0.5113, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 2.3352601156069364, | |
| "grad_norm": 0.688298063944939, | |
| "learning_rate": 1.4466730423287385e-06, | |
| "loss": 0.5875, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 2.3410404624277454, | |
| "grad_norm": 0.6724254637374641, | |
| "learning_rate": 1.423089755895095e-06, | |
| "loss": 0.396, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 2.346820809248555, | |
| "grad_norm": 0.5864952096498138, | |
| "learning_rate": 1.399668341706053e-06, | |
| "loss": 0.4239, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 2.352601156069364, | |
| "grad_norm": 0.6178918581100669, | |
| "learning_rate": 1.3764098596922865e-06, | |
| "loss": 0.5631, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 2.3583815028901736, | |
| "grad_norm": 0.9247148328484425, | |
| "learning_rate": 1.3533153624110097e-06, | |
| "loss": 0.4963, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 2.3641618497109826, | |
| "grad_norm": 0.8654766383717402, | |
| "learning_rate": 1.3303858949983495e-06, | |
| "loss": 0.5263, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 2.3699421965317917, | |
| "grad_norm": 0.7195607163743138, | |
| "learning_rate": 1.3076224951220413e-06, | |
| "loss": 0.6329, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 2.3757225433526012, | |
| "grad_norm": 0.6949276909817108, | |
| "learning_rate": 1.2850261929344748e-06, | |
| "loss": 0.4078, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 2.3815028901734103, | |
| "grad_norm": 0.6664530713874662, | |
| "learning_rate": 1.2625980110260711e-06, | |
| "loss": 0.5412, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 2.38728323699422, | |
| "grad_norm": 0.7267558282861785, | |
| "learning_rate": 1.2403389643790086e-06, | |
| "loss": 0.4662, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 2.393063583815029, | |
| "grad_norm": 0.6623588377036678, | |
| "learning_rate": 1.2182500603212882e-06, | |
| "loss": 0.4992, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 2.398843930635838, | |
| "grad_norm": 0.6718558580372862, | |
| "learning_rate": 1.1963322984811454e-06, | |
| "loss": 0.5274, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 2.4046242774566475, | |
| "grad_norm": 0.9631367487372555, | |
| "learning_rate": 1.1745866707418146e-06, | |
| "loss": 0.643, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 2.4104046242774566, | |
| "grad_norm": 0.7063167334871303, | |
| "learning_rate": 1.1530141611966438e-06, | |
| "loss": 0.4825, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 2.416184971098266, | |
| "grad_norm": 0.7292394956959912, | |
| "learning_rate": 1.1316157461045553e-06, | |
| "loss": 0.6665, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 2.421965317919075, | |
| "grad_norm": 0.926762344420454, | |
| "learning_rate": 1.1103923938458677e-06, | |
| "loss": 0.6534, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 2.4277456647398843, | |
| "grad_norm": 0.6784779321154496, | |
| "learning_rate": 1.0893450648784736e-06, | |
| "loss": 0.5104, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 2.433526011560694, | |
| "grad_norm": 0.7772613387332611, | |
| "learning_rate": 1.0684747116943683e-06, | |
| "loss": 0.3761, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 2.439306358381503, | |
| "grad_norm": 0.6162486774128405, | |
| "learning_rate": 1.04778227877655e-06, | |
| "loss": 0.4899, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 2.445086705202312, | |
| "grad_norm": 0.961701488339332, | |
| "learning_rate": 1.0272687025562794e-06, | |
| "loss": 0.6117, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 2.4508670520231215, | |
| "grad_norm": 0.6313815028131994, | |
| "learning_rate": 1.006934911370696e-06, | |
| "loss": 0.4986, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 2.4566473988439306, | |
| "grad_norm": 0.6603889532264356, | |
| "learning_rate": 9.867818254208122e-07, | |
| "loss": 0.5369, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 2.4624277456647397, | |
| "grad_norm": 0.8843950594070151, | |
| "learning_rate": 9.668103567298615e-07, | |
| "loss": 0.4081, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 2.468208092485549, | |
| "grad_norm": 0.5618293210160273, | |
| "learning_rate": 9.470214091020358e-07, | |
| "loss": 0.4613, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 2.4739884393063583, | |
| "grad_norm": 0.7055186578758241, | |
| "learning_rate": 9.274158780815767e-07, | |
| "loss": 0.6163, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 2.479768786127168, | |
| "grad_norm": 0.5685837831584806, | |
| "learning_rate": 9.079946509122473e-07, | |
| "loss": 0.3661, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 2.485549132947977, | |
| "grad_norm": 0.784214174921459, | |
| "learning_rate": 8.887586064971859e-07, | |
| "loss": 0.6345, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 2.491329479768786, | |
| "grad_norm": 0.6305599218370685, | |
| "learning_rate": 8.697086153591289e-07, | |
| "loss": 0.416, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 2.4971098265895955, | |
| "grad_norm": 0.6688718546399536, | |
| "learning_rate": 8.508455396010096e-07, | |
| "loss": 0.4818, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 2.5028901734104045, | |
| "grad_norm": 0.6161066570746109, | |
| "learning_rate": 8.321702328669534e-07, | |
| "loss": 0.5282, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 2.508670520231214, | |
| "grad_norm": 0.6566268049892484, | |
| "learning_rate": 8.136835403036413e-07, | |
| "loss": 0.4428, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 2.514450867052023, | |
| "grad_norm": 1.006353487708078, | |
| "learning_rate": 7.95386298522065e-07, | |
| "loss": 0.5345, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 2.520231213872832, | |
| "grad_norm": 0.6413665105044725, | |
| "learning_rate": 7.772793355596597e-07, | |
| "loss": 0.4737, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 2.5260115606936417, | |
| "grad_norm": 0.6293528837134872, | |
| "learning_rate": 7.593634708428438e-07, | |
| "loss": 0.5381, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 2.531791907514451, | |
| "grad_norm": 0.6738700311860045, | |
| "learning_rate": 7.416395151499223e-07, | |
| "loss": 0.5094, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 2.5375722543352603, | |
| "grad_norm": 0.6898932232980165, | |
| "learning_rate": 7.241082705744057e-07, | |
| "loss": 0.4759, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 2.5433526011560694, | |
| "grad_norm": 0.5745027363154075, | |
| "learning_rate": 7.067705304887074e-07, | |
| "loss": 0.5029, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 2.5491329479768785, | |
| "grad_norm": 0.6453821301523448, | |
| "learning_rate": 6.896270795082394e-07, | |
| "loss": 0.5204, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 2.5549132947976876, | |
| "grad_norm": 0.6922347786512014, | |
| "learning_rate": 6.726786934559048e-07, | |
| "loss": 0.3755, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 2.560693641618497, | |
| "grad_norm": 0.6866975025738358, | |
| "learning_rate": 6.559261393269872e-07, | |
| "loss": 0.5367, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 2.5664739884393066, | |
| "grad_norm": 0.6700088622432993, | |
| "learning_rate": 6.39370175254444e-07, | |
| "loss": 0.3622, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 2.5722543352601157, | |
| "grad_norm": 0.5402057213183034, | |
| "learning_rate": 6.230115504745954e-07, | |
| "loss": 0.3243, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 2.578034682080925, | |
| "grad_norm": 0.7350281504880568, | |
| "learning_rate": 6.06851005293217e-07, | |
| "loss": 0.5981, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 2.583815028901734, | |
| "grad_norm": 0.6703540902498925, | |
| "learning_rate": 5.908892710520375e-07, | |
| "loss": 0.4066, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 2.5895953757225434, | |
| "grad_norm": 0.7258590657543835, | |
| "learning_rate": 5.75127070095643e-07, | |
| "loss": 0.4559, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 2.5953757225433525, | |
| "grad_norm": 0.6704672360240732, | |
| "learning_rate": 5.595651157387855e-07, | |
| "loss": 0.4375, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 2.601156069364162, | |
| "grad_norm": 0.6579385787797936, | |
| "learning_rate": 5.442041122341057e-07, | |
| "loss": 0.5388, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 2.606936416184971, | |
| "grad_norm": 0.8553046424907753, | |
| "learning_rate": 5.290447547402594e-07, | |
| "loss": 0.7152, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 2.61271676300578, | |
| "grad_norm": 0.6877484771956439, | |
| "learning_rate": 5.14087729290459e-07, | |
| "loss": 0.5358, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 2.6184971098265897, | |
| "grad_norm": 0.6498973479008244, | |
| "learning_rate": 4.993337127614273e-07, | |
| "loss": 0.4932, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 2.6242774566473988, | |
| "grad_norm": 0.770132063847202, | |
| "learning_rate": 4.847833728427636e-07, | |
| "loss": 0.6222, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 2.6300578034682083, | |
| "grad_norm": 0.7044746862566583, | |
| "learning_rate": 4.7043736800673254e-07, | |
| "loss": 0.3938, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 2.6358381502890174, | |
| "grad_norm": 0.6702798408184912, | |
| "learning_rate": 4.5629634747845764e-07, | |
| "loss": 0.4261, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 2.6416184971098264, | |
| "grad_norm": 1.1172345643942705, | |
| "learning_rate": 4.423609512065485e-07, | |
| "loss": 0.6234, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 2.647398843930636, | |
| "grad_norm": 0.7659921270327145, | |
| "learning_rate": 4.2863180983413744e-07, | |
| "loss": 0.6352, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 2.653179190751445, | |
| "grad_norm": 0.7276018590380591, | |
| "learning_rate": 4.1510954467033457e-07, | |
| "loss": 0.5297, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 2.6589595375722546, | |
| "grad_norm": 0.6190001997300761, | |
| "learning_rate": 4.0179476766211865e-07, | |
| "loss": 0.5252, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 2.6647398843930636, | |
| "grad_norm": 0.6344762030667028, | |
| "learning_rate": 3.8868808136663995e-07, | |
| "loss": 0.5306, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 2.6705202312138727, | |
| "grad_norm": 0.5759828123397588, | |
| "learning_rate": 3.757900789239516e-07, | |
| "loss": 0.5379, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 2.6763005780346822, | |
| "grad_norm": 0.6866121028719112, | |
| "learning_rate": 3.631013440301645e-07, | |
| "loss": 0.4335, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 2.6820809248554913, | |
| "grad_norm": 0.6463962239601395, | |
| "learning_rate": 3.50622450911039e-07, | |
| "loss": 0.5133, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 2.687861271676301, | |
| "grad_norm": 0.653588946548295, | |
| "learning_rate": 3.383539642959915e-07, | |
| "loss": 0.5014, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 2.69364161849711, | |
| "grad_norm": 0.7652801505954091, | |
| "learning_rate": 3.262964393925433e-07, | |
| "loss": 0.4342, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 2.699421965317919, | |
| "grad_norm": 0.722324748328315, | |
| "learning_rate": 3.144504218611899e-07, | |
| "loss": 0.4172, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 2.705202312138728, | |
| "grad_norm": 0.7449073337272253, | |
| "learning_rate": 3.028164477907125e-07, | |
| "loss": 0.5408, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 2.7109826589595376, | |
| "grad_norm": 1.2797339305753486, | |
| "learning_rate": 2.913950436739116e-07, | |
| "loss": 0.5264, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 2.7167630057803467, | |
| "grad_norm": 0.8483318778291473, | |
| "learning_rate": 2.8018672638378486e-07, | |
| "loss": 0.5537, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 2.722543352601156, | |
| "grad_norm": 0.6475041734782216, | |
| "learning_rate": 2.6919200315013606e-07, | |
| "loss": 0.598, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 2.7283236994219653, | |
| "grad_norm": 0.6088717228752758, | |
| "learning_rate": 2.5841137153661765e-07, | |
| "loss": 0.3216, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 2.7341040462427744, | |
| "grad_norm": 0.6831655278694656, | |
| "learning_rate": 2.4784531941821675e-07, | |
| "loss": 0.5108, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 2.739884393063584, | |
| "grad_norm": 0.7006518455929439, | |
| "learning_rate": 2.3749432495917546e-07, | |
| "loss": 0.4978, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 2.745664739884393, | |
| "grad_norm": 0.6297202502249554, | |
| "learning_rate": 2.2735885659134927e-07, | |
| "loss": 0.4321, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 2.7514450867052025, | |
| "grad_norm": 0.6678663798391579, | |
| "learning_rate": 2.1743937299301242e-07, | |
| "loss": 0.5351, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 2.7572254335260116, | |
| "grad_norm": 0.6561058848775081, | |
| "learning_rate": 2.0773632306809622e-07, | |
| "loss": 0.519, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 2.7630057803468207, | |
| "grad_norm": 0.7009614607634279, | |
| "learning_rate": 1.9825014592587844e-07, | |
| "loss": 0.4933, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 2.76878612716763, | |
| "grad_norm": 0.642529585526624, | |
| "learning_rate": 1.889812708611083e-07, | |
| "loss": 0.4666, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 2.7745664739884393, | |
| "grad_norm": 0.7789251421262561, | |
| "learning_rate": 1.7993011733458077e-07, | |
| "loss": 0.5676, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 2.7803468208092488, | |
| "grad_norm": 0.7662588228210795, | |
| "learning_rate": 1.7109709495415073e-07, | |
| "loss": 0.5911, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 2.786127167630058, | |
| "grad_norm": 0.6573273387890879, | |
| "learning_rate": 1.624826034562016e-07, | |
| "loss": 0.4786, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 2.791907514450867, | |
| "grad_norm": 0.7395909205331832, | |
| "learning_rate": 1.5408703268754988e-07, | |
| "loss": 0.4978, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 2.7976878612716765, | |
| "grad_norm": 0.7598293296000616, | |
| "learning_rate": 1.459107625878059e-07, | |
| "loss": 0.7086, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 2.8034682080924855, | |
| "grad_norm": 0.8008269666978963, | |
| "learning_rate": 1.3795416317218036e-07, | |
| "loss": 0.441, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 2.809248554913295, | |
| "grad_norm": 0.607269773087072, | |
| "learning_rate": 1.3021759451473548e-07, | |
| "loss": 0.4577, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 2.815028901734104, | |
| "grad_norm": 0.8080424613928077, | |
| "learning_rate": 1.2270140673209473e-07, | |
| "loss": 0.4894, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 2.820809248554913, | |
| "grad_norm": 0.6835264471975695, | |
| "learning_rate": 1.1540593996759441e-07, | |
| "loss": 0.4831, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 2.8265895953757223, | |
| "grad_norm": 0.5617365597730085, | |
| "learning_rate": 1.0833152437589423e-07, | |
| "loss": 0.4427, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 2.832369942196532, | |
| "grad_norm": 0.740772247640691, | |
| "learning_rate": 1.0147848010803319e-07, | |
| "loss": 0.6623, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 2.838150289017341, | |
| "grad_norm": 0.6572245995068094, | |
| "learning_rate": 9.484711729694229e-08, | |
| "loss": 0.349, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 2.8439306358381504, | |
| "grad_norm": 0.5552515983374141, | |
| "learning_rate": 8.8437736043408e-08, | |
| "loss": 0.3654, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 2.8497109826589595, | |
| "grad_norm": 0.8034542181899899, | |
| "learning_rate": 8.225062640249636e-08, | |
| "loss": 0.5365, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 2.8554913294797686, | |
| "grad_norm": 0.8168265074856756, | |
| "learning_rate": 7.628606837041974e-08, | |
| "loss": 0.5066, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 2.861271676300578, | |
| "grad_norm": 0.7056216331593352, | |
| "learning_rate": 7.05443318718707e-08, | |
| "loss": 0.5115, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 2.867052023121387, | |
| "grad_norm": 0.5871150000820912, | |
| "learning_rate": 6.502567674780524e-08, | |
| "loss": 0.4359, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 2.8728323699421967, | |
| "grad_norm": 0.7322597178201294, | |
| "learning_rate": 5.973035274368266e-08, | |
| "loss": 0.5456, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 2.878612716763006, | |
| "grad_norm": 0.8233622518580206, | |
| "learning_rate": 5.465859949816299e-08, | |
| "loss": 0.4307, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 2.884393063583815, | |
| "grad_norm": 0.5931005955566576, | |
| "learning_rate": 4.981064653226564e-08, | |
| "loss": 0.4317, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 2.8901734104046244, | |
| "grad_norm": 0.6617780255291629, | |
| "learning_rate": 4.5186713238979385e-08, | |
| "loss": 0.4231, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 2.8959537572254335, | |
| "grad_norm": 0.7058701623939151, | |
| "learning_rate": 4.078700887333365e-08, | |
| "loss": 0.4358, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 2.901734104046243, | |
| "grad_norm": 0.8650529123196282, | |
| "learning_rate": 3.6611732542931044e-08, | |
| "loss": 0.4707, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 2.907514450867052, | |
| "grad_norm": 0.9022908071705149, | |
| "learning_rate": 3.266107319893463e-08, | |
| "loss": 0.58, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 2.913294797687861, | |
| "grad_norm": 0.6494927730691019, | |
| "learning_rate": 2.89352096275175e-08, | |
| "loss": 0.4633, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 2.9190751445086707, | |
| "grad_norm": 0.7088859627525681, | |
| "learning_rate": 2.5434310441773135e-08, | |
| "loss": 0.4791, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 2.9248554913294798, | |
| "grad_norm": 0.5591513082758558, | |
| "learning_rate": 2.2158534074083193e-08, | |
| "loss": 0.5138, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 2.9306358381502893, | |
| "grad_norm": 0.5945985036565976, | |
| "learning_rate": 1.910802876894824e-08, | |
| "loss": 0.4747, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 2.9364161849710984, | |
| "grad_norm": 0.8039612887428521, | |
| "learning_rate": 1.6282932576279775e-08, | |
| "loss": 0.5382, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 2.9421965317919074, | |
| "grad_norm": 0.6149926780604529, | |
| "learning_rate": 1.3683373345150796e-08, | |
| "loss": 0.4795, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 2.9479768786127165, | |
| "grad_norm": 0.6444388317466117, | |
| "learning_rate": 1.1309468718013194e-08, | |
| "loss": 0.3789, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 2.953757225433526, | |
| "grad_norm": 0.5826132360555071, | |
| "learning_rate": 9.16132612537035e-09, | |
| "loss": 0.5196, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 2.959537572254335, | |
| "grad_norm": 0.8093687170769972, | |
| "learning_rate": 7.2390427809176934e-09, | |
| "loss": 0.5751, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 2.9653179190751446, | |
| "grad_norm": 0.8287185571079843, | |
| "learning_rate": 5.542705677143434e-09, | |
| "loss": 0.7386, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 2.9710982658959537, | |
| "grad_norm": 0.6711414362177306, | |
| "learning_rate": 4.072391581388946e-09, | |
| "loss": 0.4838, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 2.976878612716763, | |
| "grad_norm": 0.6330747713176181, | |
| "learning_rate": 2.8281670323798693e-09, | |
| "loss": 0.442, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 2.9826589595375723, | |
| "grad_norm": 0.5833032431838548, | |
| "learning_rate": 1.8100883372085266e-09, | |
| "loss": 0.4679, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 2.9884393063583814, | |
| "grad_norm": 0.6448004089646303, | |
| "learning_rate": 1.0182015687909552e-09, | |
| "loss": 0.5389, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 2.994219653179191, | |
| "grad_norm": 0.6724467232133748, | |
| "learning_rate": 4.5254256377913474e-10, | |
| "loss": 0.5033, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "grad_norm": 0.6000692854824138, | |
| "learning_rate": 1.1313692094117034e-10, | |
| "loss": 0.4207, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "step": 519, | |
| "total_flos": 64152198111232.0, | |
| "train_loss": 0.8013873731929213, | |
| "train_runtime": 3362.3689, | |
| "train_samples_per_second": 1.852, | |
| "train_steps_per_second": 0.154 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 519, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 64152198111232.0, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |