| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.10024330900243308, | |
| "eval_steps": 500, | |
| "global_step": 1030, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 9.732360097323601e-05, | |
| "grad_norm": 16.13226202148005, | |
| "learning_rate": 3.2362459546925574e-08, | |
| "loss": 1.1997, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.00019464720194647202, | |
| "grad_norm": 15.765097511926365, | |
| "learning_rate": 6.472491909385115e-08, | |
| "loss": 1.384, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.00029197080291970805, | |
| "grad_norm": 16.64113665586635, | |
| "learning_rate": 9.70873786407767e-08, | |
| "loss": 1.2291, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.00038929440389294404, | |
| "grad_norm": 20.34864047521242, | |
| "learning_rate": 1.294498381877023e-07, | |
| "loss": 0.9025, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.00048661800486618007, | |
| "grad_norm": 28.710711096046108, | |
| "learning_rate": 1.6181229773462782e-07, | |
| "loss": 1.0305, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.0005839416058394161, | |
| "grad_norm": 21.945801992582915, | |
| "learning_rate": 1.941747572815534e-07, | |
| "loss": 1.0979, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.0006812652068126521, | |
| "grad_norm": 23.947905905644966, | |
| "learning_rate": 2.26537216828479e-07, | |
| "loss": 1.1909, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.0007785888077858881, | |
| "grad_norm": 19.835016686730835, | |
| "learning_rate": 2.588996763754046e-07, | |
| "loss": 1.2083, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.0008759124087591241, | |
| "grad_norm": 16.926846352507788, | |
| "learning_rate": 2.9126213592233014e-07, | |
| "loss": 1.2369, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.0009732360097323601, | |
| "grad_norm": 21.349924470647284, | |
| "learning_rate": 3.2362459546925565e-07, | |
| "loss": 1.0052, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.0010705596107055961, | |
| "grad_norm": 25.127579741628022, | |
| "learning_rate": 3.5598705501618125e-07, | |
| "loss": 1.2631, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.0011678832116788322, | |
| "grad_norm": 12.524049131196549, | |
| "learning_rate": 3.883495145631068e-07, | |
| "loss": 1.0884, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.001265206812652068, | |
| "grad_norm": 20.706648432487587, | |
| "learning_rate": 4.207119741100324e-07, | |
| "loss": 1.1469, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.0013625304136253042, | |
| "grad_norm": 17.655230197318655, | |
| "learning_rate": 4.53074433656958e-07, | |
| "loss": 1.2922, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.00145985401459854, | |
| "grad_norm": 16.550170455008725, | |
| "learning_rate": 4.854368932038835e-07, | |
| "loss": 1.1792, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.0015571776155717761, | |
| "grad_norm": 24.456798845425887, | |
| "learning_rate": 5.177993527508092e-07, | |
| "loss": 1.0804, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.0016545012165450122, | |
| "grad_norm": 14.659117460865279, | |
| "learning_rate": 5.501618122977346e-07, | |
| "loss": 1.0973, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.0017518248175182481, | |
| "grad_norm": 15.324823146378344, | |
| "learning_rate": 5.825242718446603e-07, | |
| "loss": 0.9791, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.0018491484184914842, | |
| "grad_norm": 12.483869597287145, | |
| "learning_rate": 6.148867313915858e-07, | |
| "loss": 1.0829, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.0019464720194647203, | |
| "grad_norm": 11.921211994178957, | |
| "learning_rate": 6.472491909385113e-07, | |
| "loss": 0.6862, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.0020437956204379564, | |
| "grad_norm": 14.53279456676939, | |
| "learning_rate": 6.79611650485437e-07, | |
| "loss": 0.7814, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.0021411192214111923, | |
| "grad_norm": 15.68359520937104, | |
| "learning_rate": 7.119741100323625e-07, | |
| "loss": 0.883, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.002238442822384428, | |
| "grad_norm": 14.062468532950906, | |
| "learning_rate": 7.443365695792882e-07, | |
| "loss": 1.0087, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.0023357664233576644, | |
| "grad_norm": 11.150778403716444, | |
| "learning_rate": 7.766990291262136e-07, | |
| "loss": 0.4884, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.0024330900243309003, | |
| "grad_norm": 7.740982223602688, | |
| "learning_rate": 8.090614886731392e-07, | |
| "loss": 0.8543, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.002530413625304136, | |
| "grad_norm": 6.4338060169141915, | |
| "learning_rate": 8.414239482200648e-07, | |
| "loss": 0.7948, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.002627737226277372, | |
| "grad_norm": 6.227022582398367, | |
| "learning_rate": 8.737864077669904e-07, | |
| "loss": 0.7814, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.0027250608272506084, | |
| "grad_norm": 7.989531820662516, | |
| "learning_rate": 9.06148867313916e-07, | |
| "loss": 0.5645, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.0028223844282238442, | |
| "grad_norm": 6.4745089193753, | |
| "learning_rate": 9.385113268608415e-07, | |
| "loss": 0.6802, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.00291970802919708, | |
| "grad_norm": 8.23650018531745, | |
| "learning_rate": 9.70873786407767e-07, | |
| "loss": 0.6218, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.0030170316301703164, | |
| "grad_norm": 4.915479010119541, | |
| "learning_rate": 1.0032362459546926e-06, | |
| "loss": 0.8879, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.0031143552311435523, | |
| "grad_norm": 4.288138757396447, | |
| "learning_rate": 1.0355987055016184e-06, | |
| "loss": 0.5917, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.003211678832116788, | |
| "grad_norm": 4.230901102531741, | |
| "learning_rate": 1.0679611650485437e-06, | |
| "loss": 0.7373, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.0033090024330900245, | |
| "grad_norm": 4.714303656539792, | |
| "learning_rate": 1.1003236245954693e-06, | |
| "loss": 0.5886, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.0034063260340632603, | |
| "grad_norm": 4.1204943469600925, | |
| "learning_rate": 1.132686084142395e-06, | |
| "loss": 0.5991, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.0035036496350364962, | |
| "grad_norm": 3.124375547961107, | |
| "learning_rate": 1.1650485436893206e-06, | |
| "loss": 0.432, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.0036009732360097325, | |
| "grad_norm": 3.741153837090354, | |
| "learning_rate": 1.197411003236246e-06, | |
| "loss": 0.6379, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.0036982968369829684, | |
| "grad_norm": 3.7740270813504506, | |
| "learning_rate": 1.2297734627831717e-06, | |
| "loss": 0.5595, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.0037956204379562043, | |
| "grad_norm": 4.783986424289694, | |
| "learning_rate": 1.2621359223300972e-06, | |
| "loss": 0.8717, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.0038929440389294406, | |
| "grad_norm": 4.242597978097827, | |
| "learning_rate": 1.2944983818770226e-06, | |
| "loss": 0.6632, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.0039902676399026765, | |
| "grad_norm": 4.309602952976607, | |
| "learning_rate": 1.3268608414239483e-06, | |
| "loss": 0.7191, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.004087591240875913, | |
| "grad_norm": 4.136462382872819, | |
| "learning_rate": 1.359223300970874e-06, | |
| "loss": 0.6782, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.004184914841849148, | |
| "grad_norm": 4.2148643401229, | |
| "learning_rate": 1.3915857605177997e-06, | |
| "loss": 0.8932, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.0042822384428223845, | |
| "grad_norm": 3.829331188520966, | |
| "learning_rate": 1.423948220064725e-06, | |
| "loss": 0.4697, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.004379562043795621, | |
| "grad_norm": 3.4564347781684557, | |
| "learning_rate": 1.4563106796116506e-06, | |
| "loss": 0.3377, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.004476885644768856, | |
| "grad_norm": 3.319649807488789, | |
| "learning_rate": 1.4886731391585763e-06, | |
| "loss": 0.4589, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.0045742092457420926, | |
| "grad_norm": 3.8856546910308034, | |
| "learning_rate": 1.5210355987055017e-06, | |
| "loss": 0.8413, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.004671532846715329, | |
| "grad_norm": 3.7955924171570605, | |
| "learning_rate": 1.5533980582524272e-06, | |
| "loss": 0.588, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.004768856447688564, | |
| "grad_norm": 4.5762685715882805, | |
| "learning_rate": 1.585760517799353e-06, | |
| "loss": 0.6472, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.004866180048661801, | |
| "grad_norm": 4.284420204063246, | |
| "learning_rate": 1.6181229773462783e-06, | |
| "loss": 0.5233, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.004963503649635037, | |
| "grad_norm": 4.0399534913964645, | |
| "learning_rate": 1.650485436893204e-06, | |
| "loss": 0.6737, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.005060827250608272, | |
| "grad_norm": 4.850258079033273, | |
| "learning_rate": 1.6828478964401297e-06, | |
| "loss": 0.5017, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.005158150851581509, | |
| "grad_norm": 3.289730774319516, | |
| "learning_rate": 1.715210355987055e-06, | |
| "loss": 0.6378, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.005255474452554744, | |
| "grad_norm": 3.116783938182044, | |
| "learning_rate": 1.7475728155339808e-06, | |
| "loss": 0.5681, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.00535279805352798, | |
| "grad_norm": 3.5896487509946677, | |
| "learning_rate": 1.7799352750809063e-06, | |
| "loss": 0.5222, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.005450121654501217, | |
| "grad_norm": 3.3627737905222146, | |
| "learning_rate": 1.812297734627832e-06, | |
| "loss": 0.351, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.005547445255474452, | |
| "grad_norm": 3.405981770724818, | |
| "learning_rate": 1.8446601941747574e-06, | |
| "loss": 0.5832, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.0056447688564476885, | |
| "grad_norm": 3.231134680455488, | |
| "learning_rate": 1.877022653721683e-06, | |
| "loss": 0.5558, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.005742092457420925, | |
| "grad_norm": 4.2963387449464605, | |
| "learning_rate": 1.9093851132686085e-06, | |
| "loss": 0.7544, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.00583941605839416, | |
| "grad_norm": 3.3678084152804315, | |
| "learning_rate": 1.941747572815534e-06, | |
| "loss": 0.554, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.0059367396593673965, | |
| "grad_norm": 3.635756089652443, | |
| "learning_rate": 1.9741100323624596e-06, | |
| "loss": 0.5312, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.006034063260340633, | |
| "grad_norm": 3.91764256649437, | |
| "learning_rate": 2.006472491909385e-06, | |
| "loss": 0.4329, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.006131386861313868, | |
| "grad_norm": 3.4866607421863565, | |
| "learning_rate": 2.0388349514563107e-06, | |
| "loss": 0.4453, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.006228710462287105, | |
| "grad_norm": 2.9369425143161147, | |
| "learning_rate": 2.0711974110032367e-06, | |
| "loss": 0.467, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.006326034063260341, | |
| "grad_norm": 3.0906723589687024, | |
| "learning_rate": 2.103559870550162e-06, | |
| "loss": 0.3917, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.006423357664233576, | |
| "grad_norm": 3.5121616512799747, | |
| "learning_rate": 2.1359223300970874e-06, | |
| "loss": 0.6428, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.006520681265206813, | |
| "grad_norm": 3.470270871630247, | |
| "learning_rate": 2.1682847896440134e-06, | |
| "loss": 0.586, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.006618004866180049, | |
| "grad_norm": 2.8689679430782498, | |
| "learning_rate": 2.2006472491909385e-06, | |
| "loss": 0.2938, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.006715328467153284, | |
| "grad_norm": 4.115573400175418, | |
| "learning_rate": 2.2330097087378645e-06, | |
| "loss": 0.3855, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.006812652068126521, | |
| "grad_norm": 3.903319335204406, | |
| "learning_rate": 2.26537216828479e-06, | |
| "loss": 0.6272, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.006909975669099757, | |
| "grad_norm": 2.649165320750572, | |
| "learning_rate": 2.297734627831715e-06, | |
| "loss": 0.5229, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.0070072992700729924, | |
| "grad_norm": 2.8543884488184235, | |
| "learning_rate": 2.330097087378641e-06, | |
| "loss": 0.4006, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.007104622871046229, | |
| "grad_norm": 2.9817247056794134, | |
| "learning_rate": 2.3624595469255667e-06, | |
| "loss": 0.2331, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.007201946472019465, | |
| "grad_norm": 3.592880940053797, | |
| "learning_rate": 2.394822006472492e-06, | |
| "loss": 0.4889, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.0072992700729927005, | |
| "grad_norm": 2.89844013224274, | |
| "learning_rate": 2.427184466019418e-06, | |
| "loss": 0.4711, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.007396593673965937, | |
| "grad_norm": 2.6071345596032134, | |
| "learning_rate": 2.4595469255663434e-06, | |
| "loss": 0.4844, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.007493917274939173, | |
| "grad_norm": 2.9053930844585776, | |
| "learning_rate": 2.491909385113269e-06, | |
| "loss": 0.5163, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.0075912408759124085, | |
| "grad_norm": 3.4016540038418115, | |
| "learning_rate": 2.5242718446601945e-06, | |
| "loss": 0.5852, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.007688564476885645, | |
| "grad_norm": 2.7133170026932887, | |
| "learning_rate": 2.55663430420712e-06, | |
| "loss": 0.4934, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.007785888077858881, | |
| "grad_norm": 3.2321439410345585, | |
| "learning_rate": 2.588996763754045e-06, | |
| "loss": 0.62, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.007883211678832117, | |
| "grad_norm": 2.6835948161160545, | |
| "learning_rate": 2.621359223300971e-06, | |
| "loss": 0.4689, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.007980535279805353, | |
| "grad_norm": 4.716894934604404, | |
| "learning_rate": 2.6537216828478967e-06, | |
| "loss": 0.3364, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.00807785888077859, | |
| "grad_norm": 2.6507857723180646, | |
| "learning_rate": 2.686084142394822e-06, | |
| "loss": 0.3785, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.008175182481751826, | |
| "grad_norm": 2.356714630861861, | |
| "learning_rate": 2.718446601941748e-06, | |
| "loss": 0.2591, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.00827250608272506, | |
| "grad_norm": 2.755477478688418, | |
| "learning_rate": 2.7508090614886734e-06, | |
| "loss": 0.4762, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.008369829683698296, | |
| "grad_norm": 3.7771581783688837, | |
| "learning_rate": 2.7831715210355993e-06, | |
| "loss": 0.4627, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.008467153284671533, | |
| "grad_norm": 2.8568450908810257, | |
| "learning_rate": 2.8155339805825245e-06, | |
| "loss": 0.4322, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.008564476885644769, | |
| "grad_norm": 2.914756058289183, | |
| "learning_rate": 2.84789644012945e-06, | |
| "loss": 0.4835, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.008661800486618005, | |
| "grad_norm": 2.414182197047686, | |
| "learning_rate": 2.880258899676376e-06, | |
| "loss": 0.493, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.008759124087591242, | |
| "grad_norm": 2.8597853736106975, | |
| "learning_rate": 2.912621359223301e-06, | |
| "loss": 0.6063, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.008856447688564476, | |
| "grad_norm": 2.4567808863650007, | |
| "learning_rate": 2.9449838187702267e-06, | |
| "loss": 0.5874, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.008953771289537713, | |
| "grad_norm": 2.819434031784131, | |
| "learning_rate": 2.9773462783171527e-06, | |
| "loss": 0.552, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.009051094890510949, | |
| "grad_norm": 1.9840396387462764, | |
| "learning_rate": 3.0097087378640778e-06, | |
| "loss": 0.3736, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.009148418491484185, | |
| "grad_norm": 2.52047300259283, | |
| "learning_rate": 3.0420711974110033e-06, | |
| "loss": 0.407, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.009245742092457421, | |
| "grad_norm": 3.140839526692518, | |
| "learning_rate": 3.0744336569579293e-06, | |
| "loss": 0.6513, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.009343065693430658, | |
| "grad_norm": 3.1368865731879554, | |
| "learning_rate": 3.1067961165048544e-06, | |
| "loss": 0.4804, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.009440389294403892, | |
| "grad_norm": 2.6987222968513196, | |
| "learning_rate": 3.13915857605178e-06, | |
| "loss": 0.4228, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.009537712895377129, | |
| "grad_norm": 2.5779408707034026, | |
| "learning_rate": 3.171521035598706e-06, | |
| "loss": 0.4654, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.009635036496350365, | |
| "grad_norm": 2.5189587792888934, | |
| "learning_rate": 3.2038834951456315e-06, | |
| "loss": 0.5465, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.009732360097323601, | |
| "grad_norm": 2.457408493992738, | |
| "learning_rate": 3.2362459546925567e-06, | |
| "loss": 0.5077, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.009829683698296838, | |
| "grad_norm": 2.445932328031196, | |
| "learning_rate": 3.2686084142394826e-06, | |
| "loss": 0.492, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.009927007299270074, | |
| "grad_norm": 2.3199141960061915, | |
| "learning_rate": 3.300970873786408e-06, | |
| "loss": 0.4432, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.010024330900243308, | |
| "grad_norm": 3.88769555780582, | |
| "learning_rate": 3.3333333333333333e-06, | |
| "loss": 0.3684, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.010121654501216545, | |
| "grad_norm": 2.63905676146042, | |
| "learning_rate": 3.3656957928802593e-06, | |
| "loss": 0.4238, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.010218978102189781, | |
| "grad_norm": 3.0073749174392885, | |
| "learning_rate": 3.398058252427185e-06, | |
| "loss": 0.4655, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.010316301703163017, | |
| "grad_norm": 2.613524831872459, | |
| "learning_rate": 3.43042071197411e-06, | |
| "loss": 0.4948, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.010413625304136254, | |
| "grad_norm": 2.4293628733346764, | |
| "learning_rate": 3.462783171521036e-06, | |
| "loss": 0.3717, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.010510948905109488, | |
| "grad_norm": 3.3036504610837016, | |
| "learning_rate": 3.4951456310679615e-06, | |
| "loss": 0.4939, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.010608272506082725, | |
| "grad_norm": 2.6808221933664846, | |
| "learning_rate": 3.5275080906148866e-06, | |
| "loss": 0.4809, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.01070559610705596, | |
| "grad_norm": 2.853958419293739, | |
| "learning_rate": 3.5598705501618126e-06, | |
| "loss": 0.4066, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.010802919708029197, | |
| "grad_norm": 5.3412930378250145, | |
| "learning_rate": 3.592233009708738e-06, | |
| "loss": 0.3599, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.010900243309002433, | |
| "grad_norm": 2.983669976646381, | |
| "learning_rate": 3.624595469255664e-06, | |
| "loss": 0.6187, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.01099756690997567, | |
| "grad_norm": 3.388543821878077, | |
| "learning_rate": 3.6569579288025893e-06, | |
| "loss": 0.717, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.011094890510948904, | |
| "grad_norm": 3.0720120062792127, | |
| "learning_rate": 3.689320388349515e-06, | |
| "loss": 0.5057, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.01119221411192214, | |
| "grad_norm": 2.521868238475485, | |
| "learning_rate": 3.721682847896441e-06, | |
| "loss": 0.4308, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.011289537712895377, | |
| "grad_norm": 2.641085251645149, | |
| "learning_rate": 3.754045307443366e-06, | |
| "loss": 0.4047, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.011386861313868613, | |
| "grad_norm": 2.6936547530255828, | |
| "learning_rate": 3.7864077669902915e-06, | |
| "loss": 0.5548, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.01148418491484185, | |
| "grad_norm": 5.599830434139348, | |
| "learning_rate": 3.818770226537217e-06, | |
| "loss": 0.5338, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.011581508515815086, | |
| "grad_norm": 2.6372065340185378, | |
| "learning_rate": 3.851132686084142e-06, | |
| "loss": 0.4833, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.01167883211678832, | |
| "grad_norm": 2.555049765563167, | |
| "learning_rate": 3.883495145631068e-06, | |
| "loss": 0.4295, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.011776155717761557, | |
| "grad_norm": 2.22725048478721, | |
| "learning_rate": 3.915857605177994e-06, | |
| "loss": 0.4074, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.011873479318734793, | |
| "grad_norm": 3.0093045583939984, | |
| "learning_rate": 3.948220064724919e-06, | |
| "loss": 0.7168, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.01197080291970803, | |
| "grad_norm": 2.8800338131191223, | |
| "learning_rate": 3.980582524271845e-06, | |
| "loss": 0.3826, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.012068126520681266, | |
| "grad_norm": 2.3197904571086974, | |
| "learning_rate": 4.01294498381877e-06, | |
| "loss": 0.2584, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.012165450121654502, | |
| "grad_norm": 2.929540360888414, | |
| "learning_rate": 4.045307443365696e-06, | |
| "loss": 0.4617, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.012262773722627737, | |
| "grad_norm": 2.5602803735383137, | |
| "learning_rate": 4.0776699029126215e-06, | |
| "loss": 0.2561, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.012360097323600973, | |
| "grad_norm": 2.676345297957673, | |
| "learning_rate": 4.1100323624595475e-06, | |
| "loss": 0.2996, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.01245742092457421, | |
| "grad_norm": 1.9047794610871986, | |
| "learning_rate": 4.1423948220064734e-06, | |
| "loss": 0.3475, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.012554744525547445, | |
| "grad_norm": 2.9014607006450555, | |
| "learning_rate": 4.1747572815533986e-06, | |
| "loss": 0.4748, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.012652068126520682, | |
| "grad_norm": 2.2992367182815987, | |
| "learning_rate": 4.207119741100324e-06, | |
| "loss": 0.3465, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.012749391727493918, | |
| "grad_norm": 2.668874383033437, | |
| "learning_rate": 4.23948220064725e-06, | |
| "loss": 0.6119, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.012846715328467153, | |
| "grad_norm": 2.69106703615133, | |
| "learning_rate": 4.271844660194175e-06, | |
| "loss": 0.4743, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.012944038929440389, | |
| "grad_norm": 2.972314561813759, | |
| "learning_rate": 4.304207119741101e-06, | |
| "loss": 0.5766, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.013041362530413625, | |
| "grad_norm": 2.7487017428059635, | |
| "learning_rate": 4.336569579288027e-06, | |
| "loss": 0.5818, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.013138686131386862, | |
| "grad_norm": 3.1117207482379663, | |
| "learning_rate": 4.368932038834952e-06, | |
| "loss": 0.6468, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.013236009732360098, | |
| "grad_norm": 2.781796948090657, | |
| "learning_rate": 4.401294498381877e-06, | |
| "loss": 0.7209, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.013333333333333334, | |
| "grad_norm": 2.5480533986327556, | |
| "learning_rate": 4.433656957928803e-06, | |
| "loss": 0.5907, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.013430656934306569, | |
| "grad_norm": 2.054397852683208, | |
| "learning_rate": 4.466019417475729e-06, | |
| "loss": 0.4079, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.013527980535279805, | |
| "grad_norm": 2.2564046621809037, | |
| "learning_rate": 4.498381877022654e-06, | |
| "loss": 0.4, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.013625304136253041, | |
| "grad_norm": 2.8739841159071022, | |
| "learning_rate": 4.53074433656958e-06, | |
| "loss": 0.5819, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.013722627737226278, | |
| "grad_norm": 2.6418540847993657, | |
| "learning_rate": 4.563106796116505e-06, | |
| "loss": 0.589, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.013819951338199514, | |
| "grad_norm": 2.431908870746442, | |
| "learning_rate": 4.59546925566343e-06, | |
| "loss": 0.5468, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.013917274939172749, | |
| "grad_norm": 4.44933942542394, | |
| "learning_rate": 4.627831715210356e-06, | |
| "loss": 0.3846, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.014014598540145985, | |
| "grad_norm": 2.2469929628351126, | |
| "learning_rate": 4.660194174757282e-06, | |
| "loss": 0.3047, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.014111922141119221, | |
| "grad_norm": 2.8361034502388205, | |
| "learning_rate": 4.6925566343042074e-06, | |
| "loss": 0.4186, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.014209245742092457, | |
| "grad_norm": 2.485184255788147, | |
| "learning_rate": 4.724919093851133e-06, | |
| "loss": 0.455, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.014306569343065694, | |
| "grad_norm": 2.677307495548506, | |
| "learning_rate": 4.7572815533980585e-06, | |
| "loss": 0.6346, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.01440389294403893, | |
| "grad_norm": 2.9440091029213034, | |
| "learning_rate": 4.789644012944984e-06, | |
| "loss": 0.4961, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.014501216545012165, | |
| "grad_norm": 2.6810327828724723, | |
| "learning_rate": 4.82200647249191e-06, | |
| "loss": 0.3754, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.014598540145985401, | |
| "grad_norm": 2.519257002697837, | |
| "learning_rate": 4.854368932038836e-06, | |
| "loss": 0.249, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.014695863746958637, | |
| "grad_norm": 2.8041238457488578, | |
| "learning_rate": 4.886731391585761e-06, | |
| "loss": 0.3117, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.014793187347931874, | |
| "grad_norm": 2.363481194731433, | |
| "learning_rate": 4.919093851132687e-06, | |
| "loss": 0.3325, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.01489051094890511, | |
| "grad_norm": 3.078347599868747, | |
| "learning_rate": 4.951456310679612e-06, | |
| "loss": 0.3569, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.014987834549878346, | |
| "grad_norm": 3.2926461094535515, | |
| "learning_rate": 4.983818770226538e-06, | |
| "loss": 0.716, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.01508515815085158, | |
| "grad_norm": 2.340052421830345, | |
| "learning_rate": 5.016181229773464e-06, | |
| "loss": 0.2642, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.015182481751824817, | |
| "grad_norm": 1.8915730140906823, | |
| "learning_rate": 5.048543689320389e-06, | |
| "loss": 0.3523, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.015279805352798053, | |
| "grad_norm": 4.2448533254564484, | |
| "learning_rate": 5.080906148867314e-06, | |
| "loss": 0.5185, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.01537712895377129, | |
| "grad_norm": 2.1172922256300333, | |
| "learning_rate": 5.11326860841424e-06, | |
| "loss": 0.3341, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.015474452554744526, | |
| "grad_norm": 2.7414250631657113, | |
| "learning_rate": 5.145631067961165e-06, | |
| "loss": 0.5965, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.015571776155717762, | |
| "grad_norm": 1.977804344185745, | |
| "learning_rate": 5.17799352750809e-06, | |
| "loss": 0.239, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.015669099756690997, | |
| "grad_norm": 2.771807640315191, | |
| "learning_rate": 5.210355987055017e-06, | |
| "loss": 0.4122, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.015766423357664233, | |
| "grad_norm": 1.9977073642008174, | |
| "learning_rate": 5.242718446601942e-06, | |
| "loss": 0.3423, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.01586374695863747, | |
| "grad_norm": 3.222730527079622, | |
| "learning_rate": 5.275080906148867e-06, | |
| "loss": 0.5647, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.015961070559610706, | |
| "grad_norm": 2.95441646694508, | |
| "learning_rate": 5.307443365695793e-06, | |
| "loss": 0.5198, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.016058394160583942, | |
| "grad_norm": 2.3346384576429116, | |
| "learning_rate": 5.3398058252427185e-06, | |
| "loss": 0.3516, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.01615571776155718, | |
| "grad_norm": 2.089159587923689, | |
| "learning_rate": 5.372168284789644e-06, | |
| "loss": 0.3704, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.016253041362530415, | |
| "grad_norm": 2.8135820638465088, | |
| "learning_rate": 5.4045307443365705e-06, | |
| "loss": 0.3729, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.01635036496350365, | |
| "grad_norm": 2.991259557993277, | |
| "learning_rate": 5.436893203883496e-06, | |
| "loss": 0.5622, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.016447688564476887, | |
| "grad_norm": 3.1512644455187857, | |
| "learning_rate": 5.4692556634304216e-06, | |
| "loss": 0.5915, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.01654501216545012, | |
| "grad_norm": 2.616126184062516, | |
| "learning_rate": 5.501618122977347e-06, | |
| "loss": 0.4252, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.016642335766423356, | |
| "grad_norm": 1.9958281517625203, | |
| "learning_rate": 5.533980582524272e-06, | |
| "loss": 0.3704, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.016739659367396593, | |
| "grad_norm": 2.470731302334384, | |
| "learning_rate": 5.566343042071199e-06, | |
| "loss": 0.4373, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.01683698296836983, | |
| "grad_norm": 2.583270308023139, | |
| "learning_rate": 5.598705501618124e-06, | |
| "loss": 0.4125, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.016934306569343065, | |
| "grad_norm": 1.9644684632241667, | |
| "learning_rate": 5.631067961165049e-06, | |
| "loss": 0.3522, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.0170316301703163, | |
| "grad_norm": 2.4207097357376046, | |
| "learning_rate": 5.663430420711975e-06, | |
| "loss": 0.3579, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.017128953771289538, | |
| "grad_norm": 2.3511041847292034, | |
| "learning_rate": 5.6957928802589e-06, | |
| "loss": 0.5412, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.017226277372262774, | |
| "grad_norm": 2.274427899539275, | |
| "learning_rate": 5.728155339805825e-06, | |
| "loss": 0.5353, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.01732360097323601, | |
| "grad_norm": 2.133749284526256, | |
| "learning_rate": 5.760517799352752e-06, | |
| "loss": 0.4392, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.017420924574209247, | |
| "grad_norm": 2.3097462109285787, | |
| "learning_rate": 5.792880258899677e-06, | |
| "loss": 0.4442, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.017518248175182483, | |
| "grad_norm": 2.2128802818602056, | |
| "learning_rate": 5.825242718446602e-06, | |
| "loss": 0.5635, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.017615571776155716, | |
| "grad_norm": 2.103405792854256, | |
| "learning_rate": 5.857605177993528e-06, | |
| "loss": 0.4533, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.017712895377128952, | |
| "grad_norm": 2.0565661990183597, | |
| "learning_rate": 5.889967637540453e-06, | |
| "loss": 0.3806, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.01781021897810219, | |
| "grad_norm": 2.179649872267064, | |
| "learning_rate": 5.9223300970873785e-06, | |
| "loss": 0.3842, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.017907542579075425, | |
| "grad_norm": 3.8333244047199146, | |
| "learning_rate": 5.954692556634305e-06, | |
| "loss": 0.3876, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.01800486618004866, | |
| "grad_norm": 2.2893517217095716, | |
| "learning_rate": 5.9870550161812304e-06, | |
| "loss": 0.4781, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.018102189781021898, | |
| "grad_norm": 1.6022498167897639, | |
| "learning_rate": 6.0194174757281556e-06, | |
| "loss": 0.2306, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.018199513381995134, | |
| "grad_norm": 2.32863493589546, | |
| "learning_rate": 6.0517799352750815e-06, | |
| "loss": 0.5139, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.01829683698296837, | |
| "grad_norm": 2.0789478938631314, | |
| "learning_rate": 6.084142394822007e-06, | |
| "loss": 0.2824, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.018394160583941607, | |
| "grad_norm": 1.7544615955949223, | |
| "learning_rate": 6.116504854368932e-06, | |
| "loss": 0.4172, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.018491484184914843, | |
| "grad_norm": 1.931043696572374, | |
| "learning_rate": 6.148867313915859e-06, | |
| "loss": 0.3584, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.01858880778588808, | |
| "grad_norm": 2.467258437370788, | |
| "learning_rate": 6.181229773462784e-06, | |
| "loss": 0.462, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.018686131386861315, | |
| "grad_norm": 2.1541091684996965, | |
| "learning_rate": 6.213592233009709e-06, | |
| "loss": 0.3967, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.01878345498783455, | |
| "grad_norm": 2.2330486922808395, | |
| "learning_rate": 6.245954692556635e-06, | |
| "loss": 0.5316, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.018880778588807785, | |
| "grad_norm": 2.3498262097642395, | |
| "learning_rate": 6.27831715210356e-06, | |
| "loss": 0.4815, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.01897810218978102, | |
| "grad_norm": 1.7045092076002246, | |
| "learning_rate": 6.310679611650487e-06, | |
| "loss": 0.3, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.019075425790754257, | |
| "grad_norm": 2.5703331850837023, | |
| "learning_rate": 6.343042071197412e-06, | |
| "loss": 0.4143, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.019172749391727494, | |
| "grad_norm": 2.6940646171495133, | |
| "learning_rate": 6.375404530744337e-06, | |
| "loss": 0.5463, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.01927007299270073, | |
| "grad_norm": 2.4185580273524847, | |
| "learning_rate": 6.407766990291263e-06, | |
| "loss": 0.5215, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.019367396593673966, | |
| "grad_norm": 2.6509824694985946, | |
| "learning_rate": 6.440129449838188e-06, | |
| "loss": 0.5286, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.019464720194647202, | |
| "grad_norm": 2.4807219128312767, | |
| "learning_rate": 6.472491909385113e-06, | |
| "loss": 0.3996, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.01956204379562044, | |
| "grad_norm": 2.651883834043772, | |
| "learning_rate": 6.50485436893204e-06, | |
| "loss": 0.3499, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.019659367396593675, | |
| "grad_norm": 2.670759179984812, | |
| "learning_rate": 6.537216828478965e-06, | |
| "loss": 0.552, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.01975669099756691, | |
| "grad_norm": 2.51305850829245, | |
| "learning_rate": 6.56957928802589e-06, | |
| "loss": 0.3806, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.019854014598540148, | |
| "grad_norm": 2.435954851305265, | |
| "learning_rate": 6.601941747572816e-06, | |
| "loss": 0.6093, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.01995133819951338, | |
| "grad_norm": 2.091315833022872, | |
| "learning_rate": 6.6343042071197415e-06, | |
| "loss": 0.3573, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.020048661800486617, | |
| "grad_norm": 2.205515437184344, | |
| "learning_rate": 6.666666666666667e-06, | |
| "loss": 0.2892, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.020145985401459853, | |
| "grad_norm": 2.314981932930035, | |
| "learning_rate": 6.6990291262135935e-06, | |
| "loss": 0.4184, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.02024330900243309, | |
| "grad_norm": 1.9102474885146974, | |
| "learning_rate": 6.731391585760519e-06, | |
| "loss": 0.2287, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.020340632603406326, | |
| "grad_norm": 1.9408029275065433, | |
| "learning_rate": 6.763754045307444e-06, | |
| "loss": 0.3958, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.020437956204379562, | |
| "grad_norm": 2.1006467731485823, | |
| "learning_rate": 6.79611650485437e-06, | |
| "loss": 0.3764, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.0205352798053528, | |
| "grad_norm": 2.0927447282795146, | |
| "learning_rate": 6.828478964401295e-06, | |
| "loss": 0.531, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.020632603406326035, | |
| "grad_norm": 3.4830081465453633, | |
| "learning_rate": 6.86084142394822e-06, | |
| "loss": 0.4887, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.02072992700729927, | |
| "grad_norm": 2.253360993066953, | |
| "learning_rate": 6.893203883495147e-06, | |
| "loss": 0.4587, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.020827250608272507, | |
| "grad_norm": 3.3751096354443852, | |
| "learning_rate": 6.925566343042072e-06, | |
| "loss": 0.3427, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.020924574209245744, | |
| "grad_norm": 1.9729713112803993, | |
| "learning_rate": 6.957928802588997e-06, | |
| "loss": 0.384, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.021021897810218976, | |
| "grad_norm": 2.761285167796522, | |
| "learning_rate": 6.990291262135923e-06, | |
| "loss": 0.3512, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.021119221411192213, | |
| "grad_norm": 2.431882400442612, | |
| "learning_rate": 7.022653721682848e-06, | |
| "loss": 0.3971, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.02121654501216545, | |
| "grad_norm": 3.659254877088116, | |
| "learning_rate": 7.055016181229773e-06, | |
| "loss": 0.4115, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.021313868613138685, | |
| "grad_norm": 2.5501534359714655, | |
| "learning_rate": 7.0873786407767e-06, | |
| "loss": 0.4963, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.02141119221411192, | |
| "grad_norm": 4.296894309260591, | |
| "learning_rate": 7.119741100323625e-06, | |
| "loss": 0.5203, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.021508515815085158, | |
| "grad_norm": 2.5489854552137237, | |
| "learning_rate": 7.152103559870551e-06, | |
| "loss": 0.4343, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.021605839416058394, | |
| "grad_norm": 2.00955207958064, | |
| "learning_rate": 7.184466019417476e-06, | |
| "loss": 0.3603, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.02170316301703163, | |
| "grad_norm": 2.2675038932590224, | |
| "learning_rate": 7.2168284789644015e-06, | |
| "loss": 0.3968, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.021800486618004867, | |
| "grad_norm": 2.4690586331753277, | |
| "learning_rate": 7.249190938511328e-06, | |
| "loss": 0.5883, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.021897810218978103, | |
| "grad_norm": 2.141328682063472, | |
| "learning_rate": 7.2815533980582534e-06, | |
| "loss": 0.3547, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.02199513381995134, | |
| "grad_norm": 2.223927434368622, | |
| "learning_rate": 7.3139158576051786e-06, | |
| "loss": 0.5031, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.022092457420924576, | |
| "grad_norm": 2.8602320319532346, | |
| "learning_rate": 7.3462783171521046e-06, | |
| "loss": 0.4226, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.02218978102189781, | |
| "grad_norm": 2.8852449405031835, | |
| "learning_rate": 7.37864077669903e-06, | |
| "loss": 0.4298, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.022287104622871045, | |
| "grad_norm": 1.7370522944561966, | |
| "learning_rate": 7.411003236245955e-06, | |
| "loss": 0.3827, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.02238442822384428, | |
| "grad_norm": 2.3907908463140584, | |
| "learning_rate": 7.443365695792882e-06, | |
| "loss": 0.4139, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.022481751824817518, | |
| "grad_norm": 2.27581306432663, | |
| "learning_rate": 7.475728155339807e-06, | |
| "loss": 0.4736, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.022579075425790754, | |
| "grad_norm": 2.1861094823645675, | |
| "learning_rate": 7.508090614886732e-06, | |
| "loss": 0.4809, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.02267639902676399, | |
| "grad_norm": 1.9626208371421419, | |
| "learning_rate": 7.540453074433658e-06, | |
| "loss": 0.3436, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.022773722627737226, | |
| "grad_norm": 1.7092390993202267, | |
| "learning_rate": 7.572815533980583e-06, | |
| "loss": 0.3224, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.022871046228710463, | |
| "grad_norm": 3.0168693228526546, | |
| "learning_rate": 7.605177993527508e-06, | |
| "loss": 0.6366, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.0229683698296837, | |
| "grad_norm": 2.424919921496664, | |
| "learning_rate": 7.637540453074434e-06, | |
| "loss": 0.4483, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.023065693430656935, | |
| "grad_norm": 2.4586833984787626, | |
| "learning_rate": 7.66990291262136e-06, | |
| "loss": 0.4031, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.02316301703163017, | |
| "grad_norm": 2.092010230715883, | |
| "learning_rate": 7.702265372168284e-06, | |
| "loss": 0.4257, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.023260340632603408, | |
| "grad_norm": 2.3360188447701655, | |
| "learning_rate": 7.734627831715211e-06, | |
| "loss": 0.4684, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.02335766423357664, | |
| "grad_norm": 2.087175894606599, | |
| "learning_rate": 7.766990291262136e-06, | |
| "loss": 0.4272, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.023454987834549877, | |
| "grad_norm": 2.598684557686617, | |
| "learning_rate": 7.799352750809061e-06, | |
| "loss": 0.5401, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.023552311435523113, | |
| "grad_norm": 2.025117037181364, | |
| "learning_rate": 7.831715210355988e-06, | |
| "loss": 0.372, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.02364963503649635, | |
| "grad_norm": 2.2467324584398405, | |
| "learning_rate": 7.864077669902913e-06, | |
| "loss": 0.5891, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.023746958637469586, | |
| "grad_norm": 2.38036373195977, | |
| "learning_rate": 7.896440129449839e-06, | |
| "loss": 0.5133, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.023844282238442822, | |
| "grad_norm": 2.052700924442009, | |
| "learning_rate": 7.928802588996765e-06, | |
| "loss": 0.5161, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.02394160583941606, | |
| "grad_norm": 3.4299018810240254, | |
| "learning_rate": 7.96116504854369e-06, | |
| "loss": 0.5314, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.024038929440389295, | |
| "grad_norm": 1.3903956706369247, | |
| "learning_rate": 7.993527508090616e-06, | |
| "loss": 0.3539, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.02413625304136253, | |
| "grad_norm": 2.4599878810180873, | |
| "learning_rate": 8.02588996763754e-06, | |
| "loss": 0.4876, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.024233576642335768, | |
| "grad_norm": 2.4053308291912083, | |
| "learning_rate": 8.058252427184466e-06, | |
| "loss": 0.5185, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.024330900243309004, | |
| "grad_norm": 1.6624263546342495, | |
| "learning_rate": 8.090614886731393e-06, | |
| "loss": 0.2909, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.024428223844282237, | |
| "grad_norm": 2.4091367373679597, | |
| "learning_rate": 8.122977346278318e-06, | |
| "loss": 0.6192, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.024525547445255473, | |
| "grad_norm": 2.4595313520548427, | |
| "learning_rate": 8.155339805825243e-06, | |
| "loss": 0.3444, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.02462287104622871, | |
| "grad_norm": 2.3200411140153174, | |
| "learning_rate": 8.18770226537217e-06, | |
| "loss": 0.6112, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.024720194647201946, | |
| "grad_norm": 2.029624875741936, | |
| "learning_rate": 8.220064724919095e-06, | |
| "loss": 0.4524, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.024817518248175182, | |
| "grad_norm": 1.8862765408033388, | |
| "learning_rate": 8.25242718446602e-06, | |
| "loss": 0.2173, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.02491484184914842, | |
| "grad_norm": 2.575687620331568, | |
| "learning_rate": 8.284789644012947e-06, | |
| "loss": 0.4599, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.025012165450121655, | |
| "grad_norm": 2.373530485379713, | |
| "learning_rate": 8.317152103559872e-06, | |
| "loss": 0.5326, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.02510948905109489, | |
| "grad_norm": 2.4086353319447262, | |
| "learning_rate": 8.349514563106797e-06, | |
| "loss": 0.6275, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.025206812652068127, | |
| "grad_norm": 2.1075725625285697, | |
| "learning_rate": 8.381877022653722e-06, | |
| "loss": 0.44, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.025304136253041364, | |
| "grad_norm": 2.0285700798989614, | |
| "learning_rate": 8.414239482200647e-06, | |
| "loss": 0.3489, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.0254014598540146, | |
| "grad_norm": 2.5592973746241, | |
| "learning_rate": 8.446601941747573e-06, | |
| "loss": 0.4403, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.025498783454987836, | |
| "grad_norm": 2.470930078509074, | |
| "learning_rate": 8.4789644012945e-06, | |
| "loss": 0.4985, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.02559610705596107, | |
| "grad_norm": 2.099638103909556, | |
| "learning_rate": 8.511326860841424e-06, | |
| "loss": 0.4194, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.025693430656934305, | |
| "grad_norm": 1.6030834140551835, | |
| "learning_rate": 8.54368932038835e-06, | |
| "loss": 0.3382, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.02579075425790754, | |
| "grad_norm": 1.8960928547169034, | |
| "learning_rate": 8.576051779935276e-06, | |
| "loss": 0.2838, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.025888077858880778, | |
| "grad_norm": 2.4306930963261966, | |
| "learning_rate": 8.608414239482202e-06, | |
| "loss": 0.4956, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.025985401459854014, | |
| "grad_norm": 2.374430136325354, | |
| "learning_rate": 8.640776699029127e-06, | |
| "loss": 0.5083, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.02608272506082725, | |
| "grad_norm": 2.410095115145934, | |
| "learning_rate": 8.673139158576054e-06, | |
| "loss": 0.4247, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.026180048661800487, | |
| "grad_norm": 2.41271065696519, | |
| "learning_rate": 8.705501618122979e-06, | |
| "loss": 0.6946, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.026277372262773723, | |
| "grad_norm": 1.752688930628829, | |
| "learning_rate": 8.737864077669904e-06, | |
| "loss": 0.2662, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.02637469586374696, | |
| "grad_norm": 1.9842034213162434, | |
| "learning_rate": 8.770226537216829e-06, | |
| "loss": 0.3611, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.026472019464720196, | |
| "grad_norm": 2.4137979998327497, | |
| "learning_rate": 8.802588996763754e-06, | |
| "loss": 0.501, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.026569343065693432, | |
| "grad_norm": 2.929650064864996, | |
| "learning_rate": 8.834951456310681e-06, | |
| "loss": 0.6153, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.02666666666666667, | |
| "grad_norm": 2.281738020025263, | |
| "learning_rate": 8.867313915857606e-06, | |
| "loss": 0.5395, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.0267639902676399, | |
| "grad_norm": 2.1406726692627975, | |
| "learning_rate": 8.899676375404531e-06, | |
| "loss": 0.4039, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.026861313868613138, | |
| "grad_norm": 3.2366954201371523, | |
| "learning_rate": 8.932038834951458e-06, | |
| "loss": 0.5414, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.026958637469586374, | |
| "grad_norm": 2.1900667662872513, | |
| "learning_rate": 8.964401294498383e-06, | |
| "loss": 0.3815, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.02705596107055961, | |
| "grad_norm": 2.5301939091612216, | |
| "learning_rate": 8.996763754045308e-06, | |
| "loss": 0.8016, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.027153284671532846, | |
| "grad_norm": 2.2552758985680907, | |
| "learning_rate": 9.029126213592233e-06, | |
| "loss": 0.4133, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.027250608272506083, | |
| "grad_norm": 2.309545536997134, | |
| "learning_rate": 9.06148867313916e-06, | |
| "loss": 0.5346, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.02734793187347932, | |
| "grad_norm": 2.585578916644781, | |
| "learning_rate": 9.093851132686085e-06, | |
| "loss": 0.407, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.027445255474452555, | |
| "grad_norm": 1.8503464194025006, | |
| "learning_rate": 9.12621359223301e-06, | |
| "loss": 0.4674, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.02754257907542579, | |
| "grad_norm": 2.431490115980846, | |
| "learning_rate": 9.158576051779936e-06, | |
| "loss": 0.6026, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.027639902676399028, | |
| "grad_norm": 1.916233248702735, | |
| "learning_rate": 9.19093851132686e-06, | |
| "loss": 0.4949, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.027737226277372264, | |
| "grad_norm": 2.2160236640245072, | |
| "learning_rate": 9.223300970873788e-06, | |
| "loss": 0.4765, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.027834549878345497, | |
| "grad_norm": 2.0764827118780143, | |
| "learning_rate": 9.255663430420713e-06, | |
| "loss": 0.472, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.027931873479318733, | |
| "grad_norm": 2.638286661284288, | |
| "learning_rate": 9.288025889967638e-06, | |
| "loss": 0.6312, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.02802919708029197, | |
| "grad_norm": 1.940011273577467, | |
| "learning_rate": 9.320388349514565e-06, | |
| "loss": 0.4555, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.028126520681265206, | |
| "grad_norm": 1.8760624736314784, | |
| "learning_rate": 9.35275080906149e-06, | |
| "loss": 0.3625, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.028223844282238442, | |
| "grad_norm": 1.3468692859077058, | |
| "learning_rate": 9.385113268608415e-06, | |
| "loss": 0.2442, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.02832116788321168, | |
| "grad_norm": 2.1497394847504014, | |
| "learning_rate": 9.41747572815534e-06, | |
| "loss": 0.5227, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.028418491484184915, | |
| "grad_norm": 2.1233743171190014, | |
| "learning_rate": 9.449838187702267e-06, | |
| "loss": 0.6184, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.02851581508515815, | |
| "grad_norm": 2.337806183860394, | |
| "learning_rate": 9.482200647249192e-06, | |
| "loss": 0.5491, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.028613138686131388, | |
| "grad_norm": 2.015000594070385, | |
| "learning_rate": 9.514563106796117e-06, | |
| "loss": 0.5137, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.028710462287104624, | |
| "grad_norm": 2.0267324830753766, | |
| "learning_rate": 9.546925566343042e-06, | |
| "loss": 0.4117, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.02880778588807786, | |
| "grad_norm": 1.732639028192012, | |
| "learning_rate": 9.579288025889967e-06, | |
| "loss": 0.3156, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.028905109489051097, | |
| "grad_norm": 2.1204056159243923, | |
| "learning_rate": 9.611650485436894e-06, | |
| "loss": 0.6056, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 0.02900243309002433, | |
| "grad_norm": 1.7868071753968195, | |
| "learning_rate": 9.64401294498382e-06, | |
| "loss": 0.3417, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.029099756690997566, | |
| "grad_norm": 1.9477439300595292, | |
| "learning_rate": 9.676375404530746e-06, | |
| "loss": 0.3631, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 0.029197080291970802, | |
| "grad_norm": 1.7688147839655162, | |
| "learning_rate": 9.708737864077671e-06, | |
| "loss": 0.3605, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.029294403892944038, | |
| "grad_norm": 1.9162335597538034, | |
| "learning_rate": 9.741100323624596e-06, | |
| "loss": 0.2498, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 0.029391727493917275, | |
| "grad_norm": 2.9282579520055756, | |
| "learning_rate": 9.773462783171522e-06, | |
| "loss": 0.4286, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.02948905109489051, | |
| "grad_norm": 1.9744499285549086, | |
| "learning_rate": 9.805825242718447e-06, | |
| "loss": 0.3391, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 0.029586374695863747, | |
| "grad_norm": 2.2116032868392455, | |
| "learning_rate": 9.838187702265373e-06, | |
| "loss": 0.3414, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.029683698296836983, | |
| "grad_norm": 1.9159144570242486, | |
| "learning_rate": 9.870550161812299e-06, | |
| "loss": 0.2915, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.02978102189781022, | |
| "grad_norm": 2.671718838238437, | |
| "learning_rate": 9.902912621359224e-06, | |
| "loss": 0.79, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.029878345498783456, | |
| "grad_norm": 2.093937424199301, | |
| "learning_rate": 9.935275080906149e-06, | |
| "loss": 0.576, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 0.029975669099756692, | |
| "grad_norm": 1.895574286512308, | |
| "learning_rate": 9.967637540453076e-06, | |
| "loss": 0.4223, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.03007299270072993, | |
| "grad_norm": 2.142643554578675, | |
| "learning_rate": 1e-05, | |
| "loss": 0.4719, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 0.03017031630170316, | |
| "grad_norm": 2.2548613483238378, | |
| "learning_rate": 9.999999751573464e-06, | |
| "loss": 0.5547, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.030267639902676398, | |
| "grad_norm": 2.375146158639999, | |
| "learning_rate": 9.99999900629388e-06, | |
| "loss": 0.3864, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 0.030364963503649634, | |
| "grad_norm": 1.558937895217452, | |
| "learning_rate": 9.99999776416132e-06, | |
| "loss": 0.3409, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.03046228710462287, | |
| "grad_norm": 2.7508940543848115, | |
| "learning_rate": 9.99999602517591e-06, | |
| "loss": 0.3641, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 0.030559610705596107, | |
| "grad_norm": 2.228096737889712, | |
| "learning_rate": 9.99999378933782e-06, | |
| "loss": 0.6464, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.030656934306569343, | |
| "grad_norm": 1.5612763763472, | |
| "learning_rate": 9.999991056647274e-06, | |
| "loss": 0.3124, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.03075425790754258, | |
| "grad_norm": 2.3203527787434104, | |
| "learning_rate": 9.999987827104544e-06, | |
| "loss": 0.5893, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.030851581508515816, | |
| "grad_norm": 1.8472611567410342, | |
| "learning_rate": 9.999984100709951e-06, | |
| "loss": 0.3732, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 0.030948905109489052, | |
| "grad_norm": 2.269778108549014, | |
| "learning_rate": 9.999979877463866e-06, | |
| "loss": 0.5537, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.03104622871046229, | |
| "grad_norm": 2.381498581134022, | |
| "learning_rate": 9.999975157366705e-06, | |
| "loss": 0.7179, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 0.031143552311435525, | |
| "grad_norm": 1.7030655036823346, | |
| "learning_rate": 9.99996994041894e-06, | |
| "loss": 0.4256, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.031240875912408757, | |
| "grad_norm": 1.8361141038730153, | |
| "learning_rate": 9.999964226621089e-06, | |
| "loss": 0.4648, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 0.031338199513381994, | |
| "grad_norm": 1.7985459229558753, | |
| "learning_rate": 9.99995801597372e-06, | |
| "loss": 0.3031, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.031435523114355234, | |
| "grad_norm": 2.4309020119442915, | |
| "learning_rate": 9.99995130847745e-06, | |
| "loss": 0.5011, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 0.031532846715328466, | |
| "grad_norm": 2.048514022969095, | |
| "learning_rate": 9.999944104132944e-06, | |
| "loss": 0.6152, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.031630170316301706, | |
| "grad_norm": 1.8892667320795724, | |
| "learning_rate": 9.99993640294092e-06, | |
| "loss": 0.4738, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.03172749391727494, | |
| "grad_norm": 2.081179331819785, | |
| "learning_rate": 9.999928204902141e-06, | |
| "loss": 0.5192, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.03182481751824817, | |
| "grad_norm": 2.410280889073595, | |
| "learning_rate": 9.999919510017424e-06, | |
| "loss": 0.3314, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 0.03192214111922141, | |
| "grad_norm": 1.663034255724975, | |
| "learning_rate": 9.999910318287632e-06, | |
| "loss": 0.3342, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.032019464720194644, | |
| "grad_norm": 1.7874391345352068, | |
| "learning_rate": 9.999900629713679e-06, | |
| "loss": 0.3189, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 0.032116788321167884, | |
| "grad_norm": 2.1098429973097805, | |
| "learning_rate": 9.999890444296528e-06, | |
| "loss": 0.4561, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.03221411192214112, | |
| "grad_norm": 2.4678279265353558, | |
| "learning_rate": 9.999879762037187e-06, | |
| "loss": 0.5831, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 0.03231143552311436, | |
| "grad_norm": 1.6643716587630457, | |
| "learning_rate": 9.999868582936726e-06, | |
| "loss": 0.4371, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 0.03240875912408759, | |
| "grad_norm": 2.088466639768523, | |
| "learning_rate": 9.999856906996246e-06, | |
| "loss": 0.3904, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 0.03250608272506083, | |
| "grad_norm": 2.0023651443392256, | |
| "learning_rate": 9.999844734216914e-06, | |
| "loss": 0.4802, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 0.03260340632603406, | |
| "grad_norm": 2.161282844007076, | |
| "learning_rate": 9.99983206459994e-06, | |
| "loss": 0.5187, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.0327007299270073, | |
| "grad_norm": 2.10212671583593, | |
| "learning_rate": 9.999818898146576e-06, | |
| "loss": 0.4618, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 0.032798053527980535, | |
| "grad_norm": 2.2142899508809286, | |
| "learning_rate": 9.999805234858137e-06, | |
| "loss": 0.2387, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 0.032895377128953775, | |
| "grad_norm": 2.1084763484693023, | |
| "learning_rate": 9.999791074735981e-06, | |
| "loss": 0.5652, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 0.03299270072992701, | |
| "grad_norm": 2.261838498017328, | |
| "learning_rate": 9.99977641778151e-06, | |
| "loss": 0.7224, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 0.03309002433090024, | |
| "grad_norm": 1.612816030006559, | |
| "learning_rate": 9.999761263996184e-06, | |
| "loss": 0.377, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.03318734793187348, | |
| "grad_norm": 2.1209830295615832, | |
| "learning_rate": 9.999745613381507e-06, | |
| "loss": 0.614, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 0.03328467153284671, | |
| "grad_norm": 1.7938764015879674, | |
| "learning_rate": 9.999729465939036e-06, | |
| "loss": 0.3983, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 0.03338199513381995, | |
| "grad_norm": 1.943418875698731, | |
| "learning_rate": 9.999712821670375e-06, | |
| "loss": 0.4708, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 0.033479318734793186, | |
| "grad_norm": 1.9787546900237571, | |
| "learning_rate": 9.99969568057718e-06, | |
| "loss": 0.578, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 0.033576642335766425, | |
| "grad_norm": 1.4798263726328331, | |
| "learning_rate": 9.99967804266115e-06, | |
| "loss": 0.394, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.03367396593673966, | |
| "grad_norm": 2.1936298043995484, | |
| "learning_rate": 9.99965990792404e-06, | |
| "loss": 0.6316, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 0.0337712895377129, | |
| "grad_norm": 2.2799650780195133, | |
| "learning_rate": 9.99964127636765e-06, | |
| "loss": 0.3985, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 0.03386861313868613, | |
| "grad_norm": 1.8519049219191819, | |
| "learning_rate": 9.999622147993837e-06, | |
| "loss": 0.3853, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 0.03396593673965937, | |
| "grad_norm": 1.5111895282974241, | |
| "learning_rate": 9.999602522804497e-06, | |
| "loss": 0.4201, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 0.0340632603406326, | |
| "grad_norm": 1.8605769784283237, | |
| "learning_rate": 9.99958240080158e-06, | |
| "loss": 0.5225, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.034160583941605836, | |
| "grad_norm": 1.6063240538866903, | |
| "learning_rate": 9.999561781987087e-06, | |
| "loss": 0.3165, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 0.034257907542579076, | |
| "grad_norm": 1.4751976204077173, | |
| "learning_rate": 9.999540666363068e-06, | |
| "loss": 0.3156, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 0.03435523114355231, | |
| "grad_norm": 2.1029966771511757, | |
| "learning_rate": 9.99951905393162e-06, | |
| "loss": 0.5336, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 0.03445255474452555, | |
| "grad_norm": 2.1419054642874267, | |
| "learning_rate": 9.99949694469489e-06, | |
| "loss": 0.5253, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 0.03454987834549878, | |
| "grad_norm": 2.169397271826959, | |
| "learning_rate": 9.999474338655075e-06, | |
| "loss": 0.5567, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.03464720194647202, | |
| "grad_norm": 2.2972412855327797, | |
| "learning_rate": 9.999451235814422e-06, | |
| "loss": 0.5233, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 0.034744525547445254, | |
| "grad_norm": 1.830377999961128, | |
| "learning_rate": 9.999427636175228e-06, | |
| "loss": 0.4297, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 0.034841849148418494, | |
| "grad_norm": 2.1217123292302875, | |
| "learning_rate": 9.999403539739837e-06, | |
| "loss": 0.3605, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 0.03493917274939173, | |
| "grad_norm": 2.001599625802253, | |
| "learning_rate": 9.999378946510642e-06, | |
| "loss": 0.5237, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 0.035036496350364967, | |
| "grad_norm": 1.6719956399048532, | |
| "learning_rate": 9.99935385649009e-06, | |
| "loss": 0.424, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.0351338199513382, | |
| "grad_norm": 1.5962062682133515, | |
| "learning_rate": 9.99932826968067e-06, | |
| "loss": 0.4228, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 0.03523114355231143, | |
| "grad_norm": 1.9754274750693919, | |
| "learning_rate": 9.999302186084929e-06, | |
| "loss": 0.4333, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 0.03532846715328467, | |
| "grad_norm": 1.8248617929879183, | |
| "learning_rate": 9.999275605705457e-06, | |
| "loss": 0.4985, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 0.035425790754257905, | |
| "grad_norm": 2.5923075514224982, | |
| "learning_rate": 9.999248528544895e-06, | |
| "loss": 0.4829, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 0.035523114355231145, | |
| "grad_norm": 1.9900801938638135, | |
| "learning_rate": 9.999220954605932e-06, | |
| "loss": 0.587, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.03562043795620438, | |
| "grad_norm": 1.731558772897005, | |
| "learning_rate": 9.999192883891314e-06, | |
| "loss": 0.3299, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 0.03571776155717762, | |
| "grad_norm": 2.339577788711278, | |
| "learning_rate": 9.999164316403823e-06, | |
| "loss": 0.4845, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 0.03581508515815085, | |
| "grad_norm": 1.9784113864985955, | |
| "learning_rate": 9.999135252146302e-06, | |
| "loss": 0.5776, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 0.03591240875912409, | |
| "grad_norm": 1.5555461256937277, | |
| "learning_rate": 9.999105691121638e-06, | |
| "loss": 0.3563, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 0.03600973236009732, | |
| "grad_norm": 1.7905677559908044, | |
| "learning_rate": 9.99907563333277e-06, | |
| "loss": 0.546, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.03610705596107056, | |
| "grad_norm": 2.0490894714600287, | |
| "learning_rate": 9.999045078782684e-06, | |
| "loss": 0.6836, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 0.036204379562043795, | |
| "grad_norm": 2.216601446334751, | |
| "learning_rate": 9.999014027474413e-06, | |
| "loss": 0.5237, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 0.036301703163017035, | |
| "grad_norm": 1.5937926342815392, | |
| "learning_rate": 9.998982479411047e-06, | |
| "loss": 0.3539, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 0.03639902676399027, | |
| "grad_norm": 2.3941848280266864, | |
| "learning_rate": 9.99895043459572e-06, | |
| "loss": 0.6249, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 0.0364963503649635, | |
| "grad_norm": 2.072859669066288, | |
| "learning_rate": 9.998917893031615e-06, | |
| "loss": 0.5415, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.03659367396593674, | |
| "grad_norm": 1.670908711065728, | |
| "learning_rate": 9.998884854721968e-06, | |
| "loss": 0.3034, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 0.03669099756690997, | |
| "grad_norm": 1.9880303784818283, | |
| "learning_rate": 9.998851319670057e-06, | |
| "loss": 0.5025, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 0.03678832116788321, | |
| "grad_norm": 1.3517666701087396, | |
| "learning_rate": 9.99881728787922e-06, | |
| "loss": 0.2775, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 0.036885644768856446, | |
| "grad_norm": 1.8952553535268069, | |
| "learning_rate": 9.998782759352839e-06, | |
| "loss": 0.5306, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 0.036982968369829686, | |
| "grad_norm": 1.8730537486024816, | |
| "learning_rate": 9.998747734094338e-06, | |
| "loss": 0.386, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.03708029197080292, | |
| "grad_norm": 2.058996056292158, | |
| "learning_rate": 9.998712212107205e-06, | |
| "loss": 0.5641, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 0.03717761557177616, | |
| "grad_norm": 1.9837834234853275, | |
| "learning_rate": 9.998676193394966e-06, | |
| "loss": 0.2628, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 0.03727493917274939, | |
| "grad_norm": 2.189700953999047, | |
| "learning_rate": 9.998639677961203e-06, | |
| "loss": 0.6024, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 0.03737226277372263, | |
| "grad_norm": 2.060696593716547, | |
| "learning_rate": 9.99860266580954e-06, | |
| "loss": 0.5377, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 0.037469586374695864, | |
| "grad_norm": 2.0831966609629227, | |
| "learning_rate": 9.99856515694366e-06, | |
| "loss": 0.5063, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.0375669099756691, | |
| "grad_norm": 2.2950496556846227, | |
| "learning_rate": 9.998527151367288e-06, | |
| "loss": 0.6484, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 0.037664233576642336, | |
| "grad_norm": 2.2597922123273873, | |
| "learning_rate": 9.9984886490842e-06, | |
| "loss": 0.6617, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 0.03776155717761557, | |
| "grad_norm": 2.071575887731456, | |
| "learning_rate": 9.99844965009822e-06, | |
| "loss": 0.5405, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 0.03785888077858881, | |
| "grad_norm": 2.004249587957457, | |
| "learning_rate": 9.99841015441323e-06, | |
| "loss": 0.4306, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 0.03795620437956204, | |
| "grad_norm": 1.9297023880727862, | |
| "learning_rate": 9.99837016203315e-06, | |
| "loss": 0.4083, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.03805352798053528, | |
| "grad_norm": 2.001337081282171, | |
| "learning_rate": 9.998329672961952e-06, | |
| "loss": 0.4999, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 0.038150851581508514, | |
| "grad_norm": 1.7630230797021285, | |
| "learning_rate": 9.998288687203665e-06, | |
| "loss": 0.4267, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 0.038248175182481754, | |
| "grad_norm": 1.4413546421147376, | |
| "learning_rate": 9.998247204762358e-06, | |
| "loss": 0.3028, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 0.03834549878345499, | |
| "grad_norm": 2.032450629241147, | |
| "learning_rate": 9.998205225642154e-06, | |
| "loss": 0.4216, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 0.03844282238442823, | |
| "grad_norm": 1.8288270303352272, | |
| "learning_rate": 9.998162749847224e-06, | |
| "loss": 0.451, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.03854014598540146, | |
| "grad_norm": 1.5869427581540143, | |
| "learning_rate": 9.998119777381791e-06, | |
| "loss": 0.4896, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 0.03863746958637469, | |
| "grad_norm": 1.9312614168983935, | |
| "learning_rate": 9.998076308250122e-06, | |
| "loss": 0.351, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 0.03873479318734793, | |
| "grad_norm": 2.182734939846557, | |
| "learning_rate": 9.99803234245654e-06, | |
| "loss": 0.4456, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 0.038832116788321165, | |
| "grad_norm": 1.6075130172605856, | |
| "learning_rate": 9.997987880005412e-06, | |
| "loss": 0.3333, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 0.038929440389294405, | |
| "grad_norm": 2.0206579020801048, | |
| "learning_rate": 9.997942920901154e-06, | |
| "loss": 0.4662, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.03902676399026764, | |
| "grad_norm": 2.0019154912621246, | |
| "learning_rate": 9.997897465148236e-06, | |
| "loss": 0.588, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 0.03912408759124088, | |
| "grad_norm": 1.9556688755730123, | |
| "learning_rate": 9.997851512751178e-06, | |
| "loss": 0.5364, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 0.03922141119221411, | |
| "grad_norm": 2.1735940620422687, | |
| "learning_rate": 9.997805063714541e-06, | |
| "loss": 0.4155, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 0.03931873479318735, | |
| "grad_norm": 1.893104755523836, | |
| "learning_rate": 9.997758118042945e-06, | |
| "loss": 0.2835, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 0.03941605839416058, | |
| "grad_norm": 1.892857392200546, | |
| "learning_rate": 9.99771067574105e-06, | |
| "loss": 0.317, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.03951338199513382, | |
| "grad_norm": 2.194365925195629, | |
| "learning_rate": 9.997662736813575e-06, | |
| "loss": 0.5972, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 0.039610705596107056, | |
| "grad_norm": 2.3359516870584547, | |
| "learning_rate": 9.997614301265281e-06, | |
| "loss": 0.3505, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 0.039708029197080295, | |
| "grad_norm": 1.8041349283411827, | |
| "learning_rate": 9.997565369100983e-06, | |
| "loss": 0.4003, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 0.03980535279805353, | |
| "grad_norm": 2.2199870140108273, | |
| "learning_rate": 9.997515940325542e-06, | |
| "loss": 0.4428, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 0.03990267639902676, | |
| "grad_norm": 2.193796849633566, | |
| "learning_rate": 9.997466014943871e-06, | |
| "loss": 0.3906, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 2.7309920828616168, | |
| "learning_rate": 9.99741559296093e-06, | |
| "loss": 0.6283, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 0.040097323600973234, | |
| "grad_norm": 2.220745639846989, | |
| "learning_rate": 9.99736467438173e-06, | |
| "loss": 0.4568, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 0.04019464720194647, | |
| "grad_norm": 1.905067765139487, | |
| "learning_rate": 9.99731325921133e-06, | |
| "loss": 0.3198, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 0.040291970802919706, | |
| "grad_norm": 2.0461180940034116, | |
| "learning_rate": 9.997261347454841e-06, | |
| "loss": 0.3783, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 0.040389294403892946, | |
| "grad_norm": 1.9732614929529544, | |
| "learning_rate": 9.99720893911742e-06, | |
| "loss": 0.5211, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.04048661800486618, | |
| "grad_norm": 2.341156401873798, | |
| "learning_rate": 9.997156034204276e-06, | |
| "loss": 0.5094, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 0.04058394160583942, | |
| "grad_norm": 2.2588135503158138, | |
| "learning_rate": 9.997102632720664e-06, | |
| "loss": 0.591, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 0.04068126520681265, | |
| "grad_norm": 2.187795564574772, | |
| "learning_rate": 9.997048734671893e-06, | |
| "loss": 0.3811, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 0.04077858880778589, | |
| "grad_norm": 2.2570398189900938, | |
| "learning_rate": 9.996994340063314e-06, | |
| "loss": 0.4494, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 0.040875912408759124, | |
| "grad_norm": 2.3267878846596597, | |
| "learning_rate": 9.996939448900341e-06, | |
| "loss": 0.5254, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.04097323600973236, | |
| "grad_norm": 1.9149387144635641, | |
| "learning_rate": 9.99688406118842e-06, | |
| "loss": 0.4281, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 0.0410705596107056, | |
| "grad_norm": 2.4052095021382285, | |
| "learning_rate": 9.996828176933062e-06, | |
| "loss": 0.61, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 0.04116788321167883, | |
| "grad_norm": 2.8744864627123237, | |
| "learning_rate": 9.996771796139814e-06, | |
| "loss": 0.4708, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 0.04126520681265207, | |
| "grad_norm": 2.0334953222734513, | |
| "learning_rate": 9.996714918814284e-06, | |
| "loss": 0.2697, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 0.0413625304136253, | |
| "grad_norm": 2.1314093477075486, | |
| "learning_rate": 9.996657544962119e-06, | |
| "loss": 0.3026, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.04145985401459854, | |
| "grad_norm": 1.7241742631767316, | |
| "learning_rate": 9.996599674589022e-06, | |
| "loss": 0.3624, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 0.041557177615571775, | |
| "grad_norm": 2.417754377928955, | |
| "learning_rate": 9.996541307700746e-06, | |
| "loss": 0.6682, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 0.041654501216545015, | |
| "grad_norm": 2.2126055245100256, | |
| "learning_rate": 9.99648244430309e-06, | |
| "loss": 0.3705, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 0.04175182481751825, | |
| "grad_norm": 1.8224510106748588, | |
| "learning_rate": 9.996423084401901e-06, | |
| "loss": 0.4318, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 0.04184914841849149, | |
| "grad_norm": 1.6786428352287364, | |
| "learning_rate": 9.996363228003079e-06, | |
| "loss": 0.4662, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.04194647201946472, | |
| "grad_norm": 1.9342922605897592, | |
| "learning_rate": 9.99630287511257e-06, | |
| "loss": 0.4874, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 0.04204379562043795, | |
| "grad_norm": 1.9444011100602645, | |
| "learning_rate": 9.996242025736377e-06, | |
| "loss": 0.3711, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 0.04214111922141119, | |
| "grad_norm": 3.114184163688958, | |
| "learning_rate": 9.99618067988054e-06, | |
| "loss": 0.5342, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 0.042238442822384425, | |
| "grad_norm": 1.993932460938173, | |
| "learning_rate": 9.99611883755116e-06, | |
| "loss": 0.465, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 0.042335766423357665, | |
| "grad_norm": 1.5062408953506277, | |
| "learning_rate": 9.99605649875438e-06, | |
| "loss": 0.3862, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.0424330900243309, | |
| "grad_norm": 2.5287447175721733, | |
| "learning_rate": 9.995993663496394e-06, | |
| "loss": 0.5638, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 0.04253041362530414, | |
| "grad_norm": 1.7215400937807486, | |
| "learning_rate": 9.995930331783448e-06, | |
| "loss": 0.3507, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 0.04262773722627737, | |
| "grad_norm": 1.5105936757865817, | |
| "learning_rate": 9.995866503621834e-06, | |
| "loss": 0.4086, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 0.04272506082725061, | |
| "grad_norm": 1.828501540310894, | |
| "learning_rate": 9.995802179017893e-06, | |
| "loss": 0.3477, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 0.04282238442822384, | |
| "grad_norm": 1.6658361590948114, | |
| "learning_rate": 9.995737357978022e-06, | |
| "loss": 0.4006, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.04291970802919708, | |
| "grad_norm": 1.6434395036324305, | |
| "learning_rate": 9.995672040508656e-06, | |
| "loss": 0.4349, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 0.043017031630170316, | |
| "grad_norm": 1.9913424027071125, | |
| "learning_rate": 9.99560622661629e-06, | |
| "loss": 0.3415, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 0.043114355231143556, | |
| "grad_norm": 1.6487474195389296, | |
| "learning_rate": 9.995539916307463e-06, | |
| "loss": 0.4804, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 0.04321167883211679, | |
| "grad_norm": 1.4861266391850032, | |
| "learning_rate": 9.995473109588764e-06, | |
| "loss": 0.411, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 0.04330900243309002, | |
| "grad_norm": 1.4390762643228305, | |
| "learning_rate": 9.995405806466831e-06, | |
| "loss": 0.3806, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.04340632603406326, | |
| "grad_norm": 1.7775332171720517, | |
| "learning_rate": 9.995338006948353e-06, | |
| "loss": 0.3332, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 0.043503649635036494, | |
| "grad_norm": 1.7312883283317864, | |
| "learning_rate": 9.995269711040067e-06, | |
| "loss": 0.2736, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 0.043600973236009734, | |
| "grad_norm": 1.7973901424872405, | |
| "learning_rate": 9.995200918748759e-06, | |
| "loss": 0.5597, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 0.04369829683698297, | |
| "grad_norm": 2.0409413301370334, | |
| "learning_rate": 9.995131630081265e-06, | |
| "loss": 0.6045, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 0.043795620437956206, | |
| "grad_norm": 3.2708903670147347, | |
| "learning_rate": 9.995061845044473e-06, | |
| "loss": 0.6245, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.04389294403892944, | |
| "grad_norm": 1.744466889932859, | |
| "learning_rate": 9.994991563645314e-06, | |
| "loss": 0.4129, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 0.04399026763990268, | |
| "grad_norm": 1.8775864246251477, | |
| "learning_rate": 9.994920785890771e-06, | |
| "loss": 0.414, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 0.04408759124087591, | |
| "grad_norm": 1.3868286948878126, | |
| "learning_rate": 9.994849511787881e-06, | |
| "loss": 0.3164, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 0.04418491484184915, | |
| "grad_norm": 1.6888257223301795, | |
| "learning_rate": 9.994777741343727e-06, | |
| "loss": 0.3241, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 0.044282238442822384, | |
| "grad_norm": 1.5029594314338663, | |
| "learning_rate": 9.994705474565436e-06, | |
| "loss": 0.4148, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.04437956204379562, | |
| "grad_norm": 1.7159996915963702, | |
| "learning_rate": 9.994632711460193e-06, | |
| "loss": 0.3387, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 0.04447688564476886, | |
| "grad_norm": 1.7717997513120352, | |
| "learning_rate": 9.994559452035228e-06, | |
| "loss": 0.4547, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 0.04457420924574209, | |
| "grad_norm": 1.887765282184233, | |
| "learning_rate": 9.99448569629782e-06, | |
| "loss": 0.5919, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 0.04467153284671533, | |
| "grad_norm": 2.0151049512314585, | |
| "learning_rate": 9.994411444255298e-06, | |
| "loss": 0.4556, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 0.04476885644768856, | |
| "grad_norm": 1.5706463359289826, | |
| "learning_rate": 9.994336695915041e-06, | |
| "loss": 0.3443, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.0448661800486618, | |
| "grad_norm": 1.9067884841542395, | |
| "learning_rate": 9.994261451284477e-06, | |
| "loss": 0.5862, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 0.044963503649635035, | |
| "grad_norm": 1.7346846845298518, | |
| "learning_rate": 9.994185710371083e-06, | |
| "loss": 0.3588, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 0.045060827250608275, | |
| "grad_norm": 1.5593715629463312, | |
| "learning_rate": 9.994109473182385e-06, | |
| "loss": 0.2891, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 0.04515815085158151, | |
| "grad_norm": 2.326736753149576, | |
| "learning_rate": 9.994032739725959e-06, | |
| "loss": 0.6517, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 0.04525547445255475, | |
| "grad_norm": 2.2142852132770305, | |
| "learning_rate": 9.99395551000943e-06, | |
| "loss": 0.3571, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.04535279805352798, | |
| "grad_norm": 1.7351954813390544, | |
| "learning_rate": 9.993877784040474e-06, | |
| "loss": 0.3849, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 0.04545012165450121, | |
| "grad_norm": 1.3962336815381617, | |
| "learning_rate": 9.993799561826811e-06, | |
| "loss": 0.311, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 0.04554744525547445, | |
| "grad_norm": 1.878958465421645, | |
| "learning_rate": 9.993720843376216e-06, | |
| "loss": 0.5602, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 0.045644768856447686, | |
| "grad_norm": 1.519160992933857, | |
| "learning_rate": 9.993641628696513e-06, | |
| "loss": 0.2379, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 0.045742092457420926, | |
| "grad_norm": 2.5345930464298885, | |
| "learning_rate": 9.99356191779557e-06, | |
| "loss": 0.4239, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.04583941605839416, | |
| "grad_norm": 1.3153911718041251, | |
| "learning_rate": 9.993481710681314e-06, | |
| "loss": 0.3454, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 0.0459367396593674, | |
| "grad_norm": 2.16208125563947, | |
| "learning_rate": 9.993401007361707e-06, | |
| "loss": 0.5386, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 0.04603406326034063, | |
| "grad_norm": 1.8150842593472827, | |
| "learning_rate": 9.993319807844775e-06, | |
| "loss": 0.3077, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 0.04613138686131387, | |
| "grad_norm": 1.6656864462678063, | |
| "learning_rate": 9.993238112138584e-06, | |
| "loss": 0.4927, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 0.046228710462287104, | |
| "grad_norm": 1.3429917702468868, | |
| "learning_rate": 9.993155920251252e-06, | |
| "loss": 0.2433, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.04632603406326034, | |
| "grad_norm": 1.3651155739367906, | |
| "learning_rate": 9.993073232190949e-06, | |
| "loss": 0.2947, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 0.046423357664233576, | |
| "grad_norm": 1.7815516701613203, | |
| "learning_rate": 9.992990047965887e-06, | |
| "loss": 0.5372, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 0.046520681265206816, | |
| "grad_norm": 1.846696342179327, | |
| "learning_rate": 9.992906367584337e-06, | |
| "loss": 0.5127, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 0.04661800486618005, | |
| "grad_norm": 1.7511253825578088, | |
| "learning_rate": 9.992822191054612e-06, | |
| "loss": 0.4074, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 0.04671532846715328, | |
| "grad_norm": 1.8105635986872588, | |
| "learning_rate": 9.992737518385076e-06, | |
| "loss": 0.4998, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.04681265206812652, | |
| "grad_norm": 2.2743597617900746, | |
| "learning_rate": 9.992652349584147e-06, | |
| "loss": 0.6249, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 0.046909975669099754, | |
| "grad_norm": 1.93948496382319, | |
| "learning_rate": 9.992566684660282e-06, | |
| "loss": 0.5411, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 0.047007299270072994, | |
| "grad_norm": 1.4073760716303516, | |
| "learning_rate": 9.992480523621999e-06, | |
| "loss": 0.3506, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 0.04710462287104623, | |
| "grad_norm": 1.388293079160528, | |
| "learning_rate": 9.992393866477856e-06, | |
| "loss": 0.3304, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 0.04720194647201947, | |
| "grad_norm": 2.082643572745618, | |
| "learning_rate": 9.992306713236467e-06, | |
| "loss": 0.5653, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.0472992700729927, | |
| "grad_norm": 1.7104664332606834, | |
| "learning_rate": 9.992219063906492e-06, | |
| "loss": 0.3317, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 0.04739659367396594, | |
| "grad_norm": 1.7575848919482624, | |
| "learning_rate": 9.992130918496638e-06, | |
| "loss": 0.4109, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 0.04749391727493917, | |
| "grad_norm": 1.7351379091271637, | |
| "learning_rate": 9.992042277015668e-06, | |
| "loss": 0.5065, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 0.04759124087591241, | |
| "grad_norm": 1.4444570948381004, | |
| "learning_rate": 9.991953139472387e-06, | |
| "loss": 0.4023, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 0.047688564476885645, | |
| "grad_norm": 1.4697709289140384, | |
| "learning_rate": 9.991863505875656e-06, | |
| "loss": 0.3364, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.04778588807785888, | |
| "grad_norm": 1.9428205960506804, | |
| "learning_rate": 9.99177337623438e-06, | |
| "loss": 0.4303, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 0.04788321167883212, | |
| "grad_norm": 1.931152158561148, | |
| "learning_rate": 9.991682750557516e-06, | |
| "loss": 0.2857, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 0.04798053527980535, | |
| "grad_norm": 1.9301394655308035, | |
| "learning_rate": 9.991591628854067e-06, | |
| "loss": 0.5998, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 0.04807785888077859, | |
| "grad_norm": 1.7788293016868693, | |
| "learning_rate": 9.99150001113309e-06, | |
| "loss": 0.4595, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 0.04817518248175182, | |
| "grad_norm": 2.0641225732440134, | |
| "learning_rate": 9.99140789740369e-06, | |
| "loss": 0.3848, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.04827250608272506, | |
| "grad_norm": 2.2832955373527044, | |
| "learning_rate": 9.99131528767502e-06, | |
| "loss": 0.6396, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 0.048369829683698295, | |
| "grad_norm": 1.6658790952812916, | |
| "learning_rate": 9.99122218195628e-06, | |
| "loss": 0.5429, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 0.048467153284671535, | |
| "grad_norm": 1.6568038302360257, | |
| "learning_rate": 9.991128580256725e-06, | |
| "loss": 0.4532, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 0.04856447688564477, | |
| "grad_norm": 1.8451659374514144, | |
| "learning_rate": 9.991034482585656e-06, | |
| "loss": 0.5845, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 0.04866180048661801, | |
| "grad_norm": 1.9103948838029656, | |
| "learning_rate": 9.99093988895242e-06, | |
| "loss": 0.5508, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.04875912408759124, | |
| "grad_norm": 1.9691733858712537, | |
| "learning_rate": 9.990844799366422e-06, | |
| "loss": 0.6374, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 0.048856447688564474, | |
| "grad_norm": 2.1278472226161846, | |
| "learning_rate": 9.990749213837108e-06, | |
| "loss": 0.572, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 0.04895377128953771, | |
| "grad_norm": 1.9704028865885994, | |
| "learning_rate": 9.990653132373977e-06, | |
| "loss": 0.6282, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 0.049051094890510946, | |
| "grad_norm": 1.8965741341561362, | |
| "learning_rate": 9.990556554986577e-06, | |
| "loss": 0.5749, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 0.049148418491484186, | |
| "grad_norm": 1.5425018763105707, | |
| "learning_rate": 9.990459481684504e-06, | |
| "loss": 0.4236, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.04924574209245742, | |
| "grad_norm": 1.736669998068125, | |
| "learning_rate": 9.990361912477405e-06, | |
| "loss": 0.4275, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 0.04934306569343066, | |
| "grad_norm": 2.049335776858506, | |
| "learning_rate": 9.990263847374976e-06, | |
| "loss": 0.6897, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 0.04944038929440389, | |
| "grad_norm": 1.8544975871268152, | |
| "learning_rate": 9.990165286386961e-06, | |
| "loss": 0.4811, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 0.04953771289537713, | |
| "grad_norm": 1.5709178763522822, | |
| "learning_rate": 9.990066229523155e-06, | |
| "loss": 0.4585, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 0.049635036496350364, | |
| "grad_norm": 2.1410068811754153, | |
| "learning_rate": 9.989966676793399e-06, | |
| "loss": 0.4773, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.049732360097323604, | |
| "grad_norm": 1.760724042734433, | |
| "learning_rate": 9.989866628207589e-06, | |
| "loss": 0.3144, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 0.04982968369829684, | |
| "grad_norm": 1.8521560168370175, | |
| "learning_rate": 9.989766083775662e-06, | |
| "loss": 0.4656, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 0.049927007299270076, | |
| "grad_norm": 1.544987615640627, | |
| "learning_rate": 9.989665043507616e-06, | |
| "loss": 0.4089, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 0.05002433090024331, | |
| "grad_norm": 1.9122960249889975, | |
| "learning_rate": 9.989563507413487e-06, | |
| "loss": 0.4535, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 0.05012165450121654, | |
| "grad_norm": 1.5187134098621655, | |
| "learning_rate": 9.989461475503363e-06, | |
| "loss": 0.31, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.05021897810218978, | |
| "grad_norm": 1.562160455050312, | |
| "learning_rate": 9.989358947787389e-06, | |
| "loss": 0.4009, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 0.050316301703163015, | |
| "grad_norm": 1.738084966314413, | |
| "learning_rate": 9.989255924275746e-06, | |
| "loss": 0.4723, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 0.050413625304136254, | |
| "grad_norm": 2.156580581755068, | |
| "learning_rate": 9.989152404978678e-06, | |
| "loss": 0.4407, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 0.05051094890510949, | |
| "grad_norm": 1.8652302207700793, | |
| "learning_rate": 9.989048389906469e-06, | |
| "loss": 0.587, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 0.05060827250608273, | |
| "grad_norm": 1.5934369396830426, | |
| "learning_rate": 9.988943879069452e-06, | |
| "loss": 0.3961, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.05070559610705596, | |
| "grad_norm": 1.4294562647861604, | |
| "learning_rate": 9.988838872478017e-06, | |
| "loss": 0.3382, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 0.0508029197080292, | |
| "grad_norm": 1.5693240874435923, | |
| "learning_rate": 9.988733370142598e-06, | |
| "loss": 0.3876, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 0.05090024330900243, | |
| "grad_norm": 1.6720738515514542, | |
| "learning_rate": 9.988627372073678e-06, | |
| "loss": 0.448, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 0.05099756690997567, | |
| "grad_norm": 2.0438207304961367, | |
| "learning_rate": 9.988520878281787e-06, | |
| "loss": 0.5724, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 0.051094890510948905, | |
| "grad_norm": 2.0003463921985456, | |
| "learning_rate": 9.988413888777512e-06, | |
| "loss": 0.4506, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.05119221411192214, | |
| "grad_norm": 2.11812759304704, | |
| "learning_rate": 9.988306403571482e-06, | |
| "loss": 0.757, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 0.05128953771289538, | |
| "grad_norm": 1.5594386055307068, | |
| "learning_rate": 9.98819842267438e-06, | |
| "loss": 0.4145, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 0.05138686131386861, | |
| "grad_norm": 1.917978943216931, | |
| "learning_rate": 9.988089946096933e-06, | |
| "loss": 0.5363, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 0.05148418491484185, | |
| "grad_norm": 1.3212282063862113, | |
| "learning_rate": 9.987980973849924e-06, | |
| "loss": 0.3132, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 0.05158150851581508, | |
| "grad_norm": 1.2285769982465171, | |
| "learning_rate": 9.987871505944177e-06, | |
| "loss": 0.2287, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.05167883211678832, | |
| "grad_norm": 1.849610792922833, | |
| "learning_rate": 9.987761542390574e-06, | |
| "loss": 0.6487, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 0.051776155717761556, | |
| "grad_norm": 1.158461389164102, | |
| "learning_rate": 9.987651083200044e-06, | |
| "loss": 0.2111, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 0.051873479318734796, | |
| "grad_norm": 1.8450520976911682, | |
| "learning_rate": 9.987540128383556e-06, | |
| "loss": 0.5579, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 0.05197080291970803, | |
| "grad_norm": 1.9047794610871986, | |
| "learning_rate": 9.98742867795214e-06, | |
| "loss": 0.4542, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 0.05206812652068127, | |
| "grad_norm": 1.5564676952152843, | |
| "learning_rate": 9.987316731916872e-06, | |
| "loss": 0.4467, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.0521654501216545, | |
| "grad_norm": 1.403952395827601, | |
| "learning_rate": 9.987204290288876e-06, | |
| "loss": 0.3761, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 0.052262773722627734, | |
| "grad_norm": 1.948151749349848, | |
| "learning_rate": 9.987091353079323e-06, | |
| "loss": 0.5782, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 0.052360097323600974, | |
| "grad_norm": 1.6211222818460531, | |
| "learning_rate": 9.986977920299437e-06, | |
| "loss": 0.4047, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 0.052457420924574207, | |
| "grad_norm": 1.4911900726837217, | |
| "learning_rate": 9.986863991960491e-06, | |
| "loss": 0.3817, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 0.052554744525547446, | |
| "grad_norm": 1.530872687739145, | |
| "learning_rate": 9.986749568073804e-06, | |
| "loss": 0.4639, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.05265206812652068, | |
| "grad_norm": 1.766399180057757, | |
| "learning_rate": 9.986634648650746e-06, | |
| "loss": 0.5132, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 0.05274939172749392, | |
| "grad_norm": 1.7318370911583716, | |
| "learning_rate": 9.98651923370274e-06, | |
| "loss": 0.5845, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 0.05284671532846715, | |
| "grad_norm": 1.4523428175637472, | |
| "learning_rate": 9.986403323241252e-06, | |
| "loss": 0.3817, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 0.05294403892944039, | |
| "grad_norm": 1.3085205057626972, | |
| "learning_rate": 9.9862869172778e-06, | |
| "loss": 0.294, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 0.053041362530413624, | |
| "grad_norm": 1.749260064779093, | |
| "learning_rate": 9.986170015823953e-06, | |
| "loss": 0.3885, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.053138686131386864, | |
| "grad_norm": 1.9224820302612053, | |
| "learning_rate": 9.986052618891326e-06, | |
| "loss": 0.5841, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 0.0532360097323601, | |
| "grad_norm": 1.6019594770490224, | |
| "learning_rate": 9.985934726491587e-06, | |
| "loss": 0.5602, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 0.05333333333333334, | |
| "grad_norm": 1.63788543125369, | |
| "learning_rate": 9.98581633863645e-06, | |
| "loss": 0.4913, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 0.05343065693430657, | |
| "grad_norm": 1.7751230304686407, | |
| "learning_rate": 9.985697455337677e-06, | |
| "loss": 0.4575, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 0.0535279805352798, | |
| "grad_norm": 1.4813830287768246, | |
| "learning_rate": 9.985578076607086e-06, | |
| "loss": 0.2811, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.05362530413625304, | |
| "grad_norm": 1.8047180833743464, | |
| "learning_rate": 9.985458202456534e-06, | |
| "loss": 0.5564, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 0.053722627737226275, | |
| "grad_norm": 1.4776771818705197, | |
| "learning_rate": 9.985337832897938e-06, | |
| "loss": 0.2842, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 0.053819951338199515, | |
| "grad_norm": 1.800973083472876, | |
| "learning_rate": 9.985216967943256e-06, | |
| "loss": 0.4017, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 0.05391727493917275, | |
| "grad_norm": 1.4167019147788764, | |
| "learning_rate": 9.985095607604502e-06, | |
| "loss": 0.2676, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 0.05401459854014599, | |
| "grad_norm": 1.462279330828973, | |
| "learning_rate": 9.984973751893732e-06, | |
| "loss": 0.342, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.05411192214111922, | |
| "grad_norm": 1.7941608662857766, | |
| "learning_rate": 9.984851400823056e-06, | |
| "loss": 0.4851, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 0.05420924574209246, | |
| "grad_norm": 1.865163176610701, | |
| "learning_rate": 9.984728554404632e-06, | |
| "loss": 0.5938, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 0.05430656934306569, | |
| "grad_norm": 1.9578700904261006, | |
| "learning_rate": 9.984605212650669e-06, | |
| "loss": 0.5846, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 0.05440389294403893, | |
| "grad_norm": 1.7615345522382602, | |
| "learning_rate": 9.98448137557342e-06, | |
| "loss": 0.5517, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 0.054501216545012166, | |
| "grad_norm": 1.7987507193579173, | |
| "learning_rate": 9.984357043185195e-06, | |
| "loss": 0.4511, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.0545985401459854, | |
| "grad_norm": 1.8966136067258859, | |
| "learning_rate": 9.984232215498347e-06, | |
| "loss": 0.3339, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 0.05469586374695864, | |
| "grad_norm": 1.760439118311743, | |
| "learning_rate": 9.98410689252528e-06, | |
| "loss": 0.4797, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 0.05479318734793187, | |
| "grad_norm": 1.7467534741216573, | |
| "learning_rate": 9.983981074278448e-06, | |
| "loss": 0.3854, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 0.05489051094890511, | |
| "grad_norm": 1.638747457914032, | |
| "learning_rate": 9.983854760770353e-06, | |
| "loss": 0.3215, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 0.054987834549878344, | |
| "grad_norm": 1.565721167011275, | |
| "learning_rate": 9.983727952013546e-06, | |
| "loss": 0.3573, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.05508515815085158, | |
| "grad_norm": 1.819373023432736, | |
| "learning_rate": 9.98360064802063e-06, | |
| "loss": 0.304, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 0.055182481751824816, | |
| "grad_norm": 2.219648367380945, | |
| "learning_rate": 9.983472848804254e-06, | |
| "loss": 0.7398, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 0.055279805352798056, | |
| "grad_norm": 1.7935096739228122, | |
| "learning_rate": 9.98334455437712e-06, | |
| "loss": 0.3257, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 0.05537712895377129, | |
| "grad_norm": 2.085379879601924, | |
| "learning_rate": 9.983215764751971e-06, | |
| "loss": 0.3477, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 0.05547445255474453, | |
| "grad_norm": 1.528881264990704, | |
| "learning_rate": 9.98308647994161e-06, | |
| "loss": 0.4173, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.05557177615571776, | |
| "grad_norm": 1.282510416609492, | |
| "learning_rate": 9.982956699958883e-06, | |
| "loss": 0.3513, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 0.055669099756690994, | |
| "grad_norm": 1.6035600811723405, | |
| "learning_rate": 9.982826424816688e-06, | |
| "loss": 0.3318, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 0.055766423357664234, | |
| "grad_norm": 1.9455996381881653, | |
| "learning_rate": 9.982695654527966e-06, | |
| "loss": 0.4991, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 0.05586374695863747, | |
| "grad_norm": 1.8397262762514839, | |
| "learning_rate": 9.982564389105714e-06, | |
| "loss": 0.345, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 0.05596107055961071, | |
| "grad_norm": 1.7997461351876956, | |
| "learning_rate": 9.982432628562978e-06, | |
| "loss": 0.5384, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.05605839416058394, | |
| "grad_norm": 1.6246101205121968, | |
| "learning_rate": 9.982300372912848e-06, | |
| "loss": 0.5499, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 0.05615571776155718, | |
| "grad_norm": 1.9184631207748861, | |
| "learning_rate": 9.982167622168467e-06, | |
| "loss": 0.449, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 0.05625304136253041, | |
| "grad_norm": 1.5368079698239796, | |
| "learning_rate": 9.982034376343029e-06, | |
| "loss": 0.3311, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 0.05635036496350365, | |
| "grad_norm": 1.9061539422519105, | |
| "learning_rate": 9.98190063544977e-06, | |
| "loss": 0.4182, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 0.056447688564476885, | |
| "grad_norm": 1.6727227174184238, | |
| "learning_rate": 9.981766399501984e-06, | |
| "loss": 0.482, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.056545012165450124, | |
| "grad_norm": 1.8546055763617424, | |
| "learning_rate": 9.98163166851301e-06, | |
| "loss": 0.5758, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 0.05664233576642336, | |
| "grad_norm": 2.0350303098403706, | |
| "learning_rate": 9.981496442496234e-06, | |
| "loss": 0.5236, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 0.0567396593673966, | |
| "grad_norm": 1.3907379790284926, | |
| "learning_rate": 9.981360721465095e-06, | |
| "loss": 0.3375, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 0.05683698296836983, | |
| "grad_norm": 2.0168702766261486, | |
| "learning_rate": 9.98122450543308e-06, | |
| "loss": 0.595, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 0.05693430656934306, | |
| "grad_norm": 1.7248754760467295, | |
| "learning_rate": 9.981087794413722e-06, | |
| "loss": 0.3747, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.0570316301703163, | |
| "grad_norm": 1.8918865818240052, | |
| "learning_rate": 9.98095058842061e-06, | |
| "loss": 0.5805, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 0.057128953771289535, | |
| "grad_norm": 1.8691153689026438, | |
| "learning_rate": 9.980812887467377e-06, | |
| "loss": 0.3451, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 0.057226277372262775, | |
| "grad_norm": 1.7475224395533677, | |
| "learning_rate": 9.980674691567705e-06, | |
| "loss": 0.2789, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 0.05732360097323601, | |
| "grad_norm": 1.876124489873064, | |
| "learning_rate": 9.980536000735328e-06, | |
| "loss": 0.5917, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 0.05742092457420925, | |
| "grad_norm": 1.6438847446693803, | |
| "learning_rate": 9.980396814984025e-06, | |
| "loss": 0.3063, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.05751824817518248, | |
| "grad_norm": 1.7609146888426583, | |
| "learning_rate": 9.980257134327634e-06, | |
| "loss": 0.4177, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 0.05761557177615572, | |
| "grad_norm": 3.1047413099950445, | |
| "learning_rate": 9.980116958780027e-06, | |
| "loss": 0.2793, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 0.05771289537712895, | |
| "grad_norm": 1.3365913263494138, | |
| "learning_rate": 9.979976288355137e-06, | |
| "loss": 0.2754, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 0.05781021897810219, | |
| "grad_norm": 1.7378721977452198, | |
| "learning_rate": 9.979835123066943e-06, | |
| "loss": 0.4156, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 0.057907542579075426, | |
| "grad_norm": 1.7652517953930271, | |
| "learning_rate": 9.979693462929472e-06, | |
| "loss": 0.3768, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.05800486618004866, | |
| "grad_norm": 2.4155692425963675, | |
| "learning_rate": 9.979551307956801e-06, | |
| "loss": 0.6409, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 0.0581021897810219, | |
| "grad_norm": 2.2339995809091913, | |
| "learning_rate": 9.979408658163055e-06, | |
| "loss": 0.3134, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 0.05819951338199513, | |
| "grad_norm": 1.9788468018769068, | |
| "learning_rate": 9.97926551356241e-06, | |
| "loss": 0.2509, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 0.05829683698296837, | |
| "grad_norm": 4.0668515887714385, | |
| "learning_rate": 9.979121874169091e-06, | |
| "loss": 0.3322, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 0.058394160583941604, | |
| "grad_norm": 2.0552497355613264, | |
| "learning_rate": 9.97897773999737e-06, | |
| "loss": 0.2732, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.058491484184914844, | |
| "grad_norm": 1.7372746328291984, | |
| "learning_rate": 9.978833111061573e-06, | |
| "loss": 0.3021, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 0.058588807785888077, | |
| "grad_norm": 1.8426989129405926, | |
| "learning_rate": 9.978687987376067e-06, | |
| "loss": 0.3147, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 0.058686131386861316, | |
| "grad_norm": 1.456816302033054, | |
| "learning_rate": 9.978542368955278e-06, | |
| "loss": 0.3669, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 0.05878345498783455, | |
| "grad_norm": 2.1398878847147973, | |
| "learning_rate": 9.978396255813672e-06, | |
| "loss": 0.457, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 0.05888077858880779, | |
| "grad_norm": 1.860652260495742, | |
| "learning_rate": 9.978249647965769e-06, | |
| "loss": 0.5567, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.05897810218978102, | |
| "grad_norm": 1.7559525207322428, | |
| "learning_rate": 9.97810254542614e-06, | |
| "loss": 0.439, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 0.059075425790754255, | |
| "grad_norm": 1.4912680944094816, | |
| "learning_rate": 9.977954948209402e-06, | |
| "loss": 0.4431, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 0.059172749391727494, | |
| "grad_norm": 1.766690700595448, | |
| "learning_rate": 9.97780685633022e-06, | |
| "loss": 0.3187, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 0.05927007299270073, | |
| "grad_norm": 2.169180646458804, | |
| "learning_rate": 9.977658269803312e-06, | |
| "loss": 0.5042, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 0.05936739659367397, | |
| "grad_norm": 1.623119439845207, | |
| "learning_rate": 9.977509188643441e-06, | |
| "loss": 0.3632, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.0594647201946472, | |
| "grad_norm": 2.0976883017366226, | |
| "learning_rate": 9.977359612865424e-06, | |
| "loss": 0.6465, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 0.05956204379562044, | |
| "grad_norm": 1.59126192242755, | |
| "learning_rate": 9.977209542484123e-06, | |
| "loss": 0.4335, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 0.05965936739659367, | |
| "grad_norm": 1.6532378246551842, | |
| "learning_rate": 9.97705897751445e-06, | |
| "loss": 0.3462, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 0.05975669099756691, | |
| "grad_norm": 1.6478059833585124, | |
| "learning_rate": 9.976907917971365e-06, | |
| "loss": 0.4063, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 0.059854014598540145, | |
| "grad_norm": 1.750559308727237, | |
| "learning_rate": 9.976756363869884e-06, | |
| "loss": 0.5062, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.059951338199513385, | |
| "grad_norm": 1.6400113365898012, | |
| "learning_rate": 9.976604315225063e-06, | |
| "loss": 0.3699, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 0.06004866180048662, | |
| "grad_norm": 1.5449283565959169, | |
| "learning_rate": 9.976451772052013e-06, | |
| "loss": 0.3635, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 0.06014598540145986, | |
| "grad_norm": 1.3799772345005799, | |
| "learning_rate": 9.97629873436589e-06, | |
| "loss": 0.2747, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 0.06024330900243309, | |
| "grad_norm": 1.9454941262632244, | |
| "learning_rate": 9.976145202181905e-06, | |
| "loss": 0.4963, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 0.06034063260340632, | |
| "grad_norm": 1.5274916477255973, | |
| "learning_rate": 9.975991175515311e-06, | |
| "loss": 0.3348, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.06043795620437956, | |
| "grad_norm": 1.9623540496009142, | |
| "learning_rate": 9.975836654381416e-06, | |
| "loss": 0.5373, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 0.060535279805352796, | |
| "grad_norm": 1.4248144765181632, | |
| "learning_rate": 9.975681638795575e-06, | |
| "loss": 0.3137, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 0.060632603406326036, | |
| "grad_norm": 1.4366236793713136, | |
| "learning_rate": 9.975526128773192e-06, | |
| "loss": 0.3519, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 0.06072992700729927, | |
| "grad_norm": 1.8458441140553945, | |
| "learning_rate": 9.97537012432972e-06, | |
| "loss": 0.3937, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 0.06082725060827251, | |
| "grad_norm": 1.868271580826056, | |
| "learning_rate": 9.975213625480658e-06, | |
| "loss": 0.4567, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.06092457420924574, | |
| "grad_norm": 2.4613001964869223, | |
| "learning_rate": 9.97505663224156e-06, | |
| "loss": 0.5607, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 0.06102189781021898, | |
| "grad_norm": 1.6709839772769468, | |
| "learning_rate": 9.974899144628027e-06, | |
| "loss": 0.3233, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 0.061119221411192214, | |
| "grad_norm": 1.8046591620263965, | |
| "learning_rate": 9.97474116265571e-06, | |
| "loss": 0.3929, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 0.06121654501216545, | |
| "grad_norm": 1.7182161369033975, | |
| "learning_rate": 9.974582686340304e-06, | |
| "loss": 0.3804, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 0.061313868613138686, | |
| "grad_norm": 2.435940855169524, | |
| "learning_rate": 9.974423715697558e-06, | |
| "loss": 0.7453, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.06141119221411192, | |
| "grad_norm": 1.401143104634322, | |
| "learning_rate": 9.974264250743272e-06, | |
| "loss": 0.306, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 0.06150851581508516, | |
| "grad_norm": 1.540550326071636, | |
| "learning_rate": 9.97410429149329e-06, | |
| "loss": 0.3582, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 0.06160583941605839, | |
| "grad_norm": 4.038520112503673, | |
| "learning_rate": 9.973943837963507e-06, | |
| "loss": 0.2688, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 0.06170316301703163, | |
| "grad_norm": 2.032927304778425, | |
| "learning_rate": 9.973782890169867e-06, | |
| "loss": 0.6952, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 0.061800486618004864, | |
| "grad_norm": 1.5242884680104736, | |
| "learning_rate": 9.973621448128364e-06, | |
| "loss": 0.3957, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.061897810218978104, | |
| "grad_norm": 1.599953340803732, | |
| "learning_rate": 9.973459511855042e-06, | |
| "loss": 0.3783, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 0.06199513381995134, | |
| "grad_norm": 2.1886899708740697, | |
| "learning_rate": 9.973297081365988e-06, | |
| "loss": 0.5426, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 0.06209245742092458, | |
| "grad_norm": 1.363421719809718, | |
| "learning_rate": 9.973134156677349e-06, | |
| "loss": 0.2707, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 0.06218978102189781, | |
| "grad_norm": 1.883218491971664, | |
| "learning_rate": 9.972970737805312e-06, | |
| "loss": 0.543, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 0.06228710462287105, | |
| "grad_norm": 1.6336178778276322, | |
| "learning_rate": 9.972806824766117e-06, | |
| "loss": 0.4833, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.06238442822384428, | |
| "grad_norm": 1.74145478719615, | |
| "learning_rate": 9.972642417576049e-06, | |
| "loss": 0.5456, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 0.062481751824817515, | |
| "grad_norm": 1.3939447959630629, | |
| "learning_rate": 9.972477516251448e-06, | |
| "loss": 0.2935, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 0.06257907542579075, | |
| "grad_norm": 1.9741261661680443, | |
| "learning_rate": 9.9723121208087e-06, | |
| "loss": 0.4377, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 0.06267639902676399, | |
| "grad_norm": 2.214700253529172, | |
| "learning_rate": 9.972146231264242e-06, | |
| "loss": 0.6711, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 0.06277372262773723, | |
| "grad_norm": 1.7399845992974294, | |
| "learning_rate": 9.971979847634554e-06, | |
| "loss": 0.5327, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.06287104622871047, | |
| "grad_norm": 1.3552365502663122, | |
| "learning_rate": 9.971812969936174e-06, | |
| "loss": 0.3553, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 0.06296836982968369, | |
| "grad_norm": 1.8378075997453163, | |
| "learning_rate": 9.971645598185685e-06, | |
| "loss": 0.3709, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 0.06306569343065693, | |
| "grad_norm": 1.7441350204189767, | |
| "learning_rate": 9.971477732399714e-06, | |
| "loss": 0.489, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 0.06316301703163017, | |
| "grad_norm": 2.083031963167252, | |
| "learning_rate": 9.971309372594947e-06, | |
| "loss": 0.6196, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 0.06326034063260341, | |
| "grad_norm": 1.5678236487001533, | |
| "learning_rate": 9.971140518788112e-06, | |
| "loss": 0.3202, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.06335766423357664, | |
| "grad_norm": 1.7281008810115812, | |
| "learning_rate": 9.970971170995988e-06, | |
| "loss": 0.4169, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 0.06345498783454988, | |
| "grad_norm": 1.5626981990993993, | |
| "learning_rate": 9.970801329235402e-06, | |
| "loss": 0.4238, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 0.06355231143552312, | |
| "grad_norm": 1.5338214380715702, | |
| "learning_rate": 9.970630993523234e-06, | |
| "loss": 0.278, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 0.06364963503649634, | |
| "grad_norm": 1.7806299033721755, | |
| "learning_rate": 9.970460163876409e-06, | |
| "loss": 0.5649, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 0.06374695863746958, | |
| "grad_norm": 1.9349681554929028, | |
| "learning_rate": 9.9702888403119e-06, | |
| "loss": 0.3297, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.06384428223844282, | |
| "grad_norm": 1.4947723050696704, | |
| "learning_rate": 9.970117022846736e-06, | |
| "loss": 0.4077, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 0.06394160583941606, | |
| "grad_norm": 1.5696774237596223, | |
| "learning_rate": 9.96994471149799e-06, | |
| "loss": 0.4681, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 0.06403892944038929, | |
| "grad_norm": 1.7662095984112474, | |
| "learning_rate": 9.969771906282781e-06, | |
| "loss": 0.539, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 0.06413625304136253, | |
| "grad_norm": 2.926336951253308, | |
| "learning_rate": 9.969598607218285e-06, | |
| "loss": 0.4196, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 0.06423357664233577, | |
| "grad_norm": 3.148192138198314, | |
| "learning_rate": 9.96942481432172e-06, | |
| "loss": 0.4827, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.06433090024330901, | |
| "grad_norm": 1.790436662552377, | |
| "learning_rate": 9.969250527610356e-06, | |
| "loss": 0.4972, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 0.06442822384428223, | |
| "grad_norm": 1.4712739725679773, | |
| "learning_rate": 9.969075747101514e-06, | |
| "loss": 0.4112, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 0.06452554744525547, | |
| "grad_norm": 1.4521996617982842, | |
| "learning_rate": 9.96890047281256e-06, | |
| "loss": 0.3729, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 0.06462287104622871, | |
| "grad_norm": 1.5457088814513262, | |
| "learning_rate": 9.96872470476091e-06, | |
| "loss": 0.4294, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 0.06472019464720194, | |
| "grad_norm": 1.7644033340951866, | |
| "learning_rate": 9.968548442964034e-06, | |
| "loss": 0.4487, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.06481751824817518, | |
| "grad_norm": 1.632555708701406, | |
| "learning_rate": 9.968371687439446e-06, | |
| "loss": 0.3929, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 0.06491484184914842, | |
| "grad_norm": 1.8990302396780172, | |
| "learning_rate": 9.968194438204708e-06, | |
| "loss": 0.4101, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 0.06501216545012166, | |
| "grad_norm": 2.092762728551112, | |
| "learning_rate": 9.968016695277436e-06, | |
| "loss": 0.5712, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 0.06510948905109488, | |
| "grad_norm": 1.5876668887386824, | |
| "learning_rate": 9.967838458675292e-06, | |
| "loss": 0.494, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 0.06520681265206812, | |
| "grad_norm": 1.7536517597940893, | |
| "learning_rate": 9.967659728415985e-06, | |
| "loss": 0.6121, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.06530413625304136, | |
| "grad_norm": 1.9021294255711243, | |
| "learning_rate": 9.96748050451728e-06, | |
| "loss": 0.3634, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 0.0654014598540146, | |
| "grad_norm": 1.4457078547633553, | |
| "learning_rate": 9.96730078699698e-06, | |
| "loss": 0.4586, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 0.06549878345498783, | |
| "grad_norm": 1.6474950184261972, | |
| "learning_rate": 9.967120575872952e-06, | |
| "loss": 0.5028, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 0.06559610705596107, | |
| "grad_norm": 1.9901979572232373, | |
| "learning_rate": 9.966939871163098e-06, | |
| "loss": 0.6986, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 0.06569343065693431, | |
| "grad_norm": 1.3671458210722949, | |
| "learning_rate": 9.966758672885375e-06, | |
| "loss": 0.3945, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.06579075425790755, | |
| "grad_norm": 1.8371332697903162, | |
| "learning_rate": 9.96657698105779e-06, | |
| "loss": 0.6782, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 0.06588807785888078, | |
| "grad_norm": 1.1955013749239556, | |
| "learning_rate": 9.966394795698397e-06, | |
| "loss": 0.242, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 0.06598540145985402, | |
| "grad_norm": 1.5330975344313047, | |
| "learning_rate": 9.966212116825302e-06, | |
| "loss": 0.4351, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 0.06608272506082725, | |
| "grad_norm": 1.539581985713935, | |
| "learning_rate": 9.966028944456657e-06, | |
| "loss": 0.3512, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 0.06618004866180048, | |
| "grad_norm": 1.9573455375443363, | |
| "learning_rate": 9.965845278610661e-06, | |
| "loss": 0.4859, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.06627737226277372, | |
| "grad_norm": 1.8387055004344444, | |
| "learning_rate": 9.96566111930557e-06, | |
| "loss": 0.3831, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 0.06637469586374696, | |
| "grad_norm": 1.7056154014174738, | |
| "learning_rate": 9.96547646655968e-06, | |
| "loss": 0.4675, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 0.0664720194647202, | |
| "grad_norm": 1.881602931580563, | |
| "learning_rate": 9.965291320391342e-06, | |
| "loss": 0.5955, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 0.06656934306569343, | |
| "grad_norm": 2.9885065529853416, | |
| "learning_rate": 9.965105680818955e-06, | |
| "loss": 0.393, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 0.06666666666666667, | |
| "grad_norm": 1.7363492709096229, | |
| "learning_rate": 9.964919547860963e-06, | |
| "loss": 0.4903, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.0667639902676399, | |
| "grad_norm": 1.8182376939684146, | |
| "learning_rate": 9.964732921535863e-06, | |
| "loss": 0.5443, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 0.06686131386861315, | |
| "grad_norm": 1.6914779965026407, | |
| "learning_rate": 9.964545801862202e-06, | |
| "loss": 0.5119, | |
| "step": 687 | |
| }, | |
| { | |
| "epoch": 0.06695863746958637, | |
| "grad_norm": 1.2736843314571082, | |
| "learning_rate": 9.964358188858573e-06, | |
| "loss": 0.2495, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 0.06705596107055961, | |
| "grad_norm": 1.5831736266585599, | |
| "learning_rate": 9.96417008254362e-06, | |
| "loss": 0.4489, | |
| "step": 689 | |
| }, | |
| { | |
| "epoch": 0.06715328467153285, | |
| "grad_norm": 2.2148297560046224, | |
| "learning_rate": 9.963981482936034e-06, | |
| "loss": 0.5415, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.06725060827250608, | |
| "grad_norm": 1.5025934211262992, | |
| "learning_rate": 9.963792390054558e-06, | |
| "loss": 0.3903, | |
| "step": 691 | |
| }, | |
| { | |
| "epoch": 0.06734793187347932, | |
| "grad_norm": 1.4602374679322867, | |
| "learning_rate": 9.96360280391798e-06, | |
| "loss": 0.3199, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 0.06744525547445256, | |
| "grad_norm": 1.5813416284844282, | |
| "learning_rate": 9.963412724545142e-06, | |
| "loss": 0.3213, | |
| "step": 693 | |
| }, | |
| { | |
| "epoch": 0.0675425790754258, | |
| "grad_norm": 1.246883512769049, | |
| "learning_rate": 9.96322215195493e-06, | |
| "loss": 0.2644, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 0.06763990267639902, | |
| "grad_norm": 1.7094335347253355, | |
| "learning_rate": 9.963031086166282e-06, | |
| "loss": 0.4761, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.06773722627737226, | |
| "grad_norm": 1.6516611118524773, | |
| "learning_rate": 9.962839527198184e-06, | |
| "loss": 0.4823, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 0.0678345498783455, | |
| "grad_norm": 1.3531669839243998, | |
| "learning_rate": 9.962647475069672e-06, | |
| "loss": 0.4272, | |
| "step": 697 | |
| }, | |
| { | |
| "epoch": 0.06793187347931874, | |
| "grad_norm": 1.9430916606586504, | |
| "learning_rate": 9.962454929799829e-06, | |
| "loss": 0.5776, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 0.06802919708029197, | |
| "grad_norm": 1.8772536403383466, | |
| "learning_rate": 9.962261891407792e-06, | |
| "loss": 0.6338, | |
| "step": 699 | |
| }, | |
| { | |
| "epoch": 0.0681265206812652, | |
| "grad_norm": 1.3972932620324034, | |
| "learning_rate": 9.96206835991274e-06, | |
| "loss": 0.3671, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.06822384428223845, | |
| "grad_norm": 1.287329601381866, | |
| "learning_rate": 9.961874335333904e-06, | |
| "loss": 0.2744, | |
| "step": 701 | |
| }, | |
| { | |
| "epoch": 0.06832116788321167, | |
| "grad_norm": 1.5600519457751545, | |
| "learning_rate": 9.961679817690566e-06, | |
| "loss": 0.4433, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 0.06841849148418491, | |
| "grad_norm": 1.3898736874388666, | |
| "learning_rate": 9.961484807002056e-06, | |
| "loss": 0.4197, | |
| "step": 703 | |
| }, | |
| { | |
| "epoch": 0.06851581508515815, | |
| "grad_norm": 1.672202746628868, | |
| "learning_rate": 9.961289303287749e-06, | |
| "loss": 0.4601, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 0.06861313868613139, | |
| "grad_norm": 1.7427655274680753, | |
| "learning_rate": 9.961093306567076e-06, | |
| "loss": 0.5845, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.06871046228710462, | |
| "grad_norm": 1.794570108008766, | |
| "learning_rate": 9.960896816859512e-06, | |
| "loss": 0.3459, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 0.06880778588807786, | |
| "grad_norm": 1.6024314197975584, | |
| "learning_rate": 9.960699834184582e-06, | |
| "loss": 0.4441, | |
| "step": 707 | |
| }, | |
| { | |
| "epoch": 0.0689051094890511, | |
| "grad_norm": 1.619306935418848, | |
| "learning_rate": 9.960502358561858e-06, | |
| "loss": 0.4647, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 0.06900243309002434, | |
| "grad_norm": 1.5009190604836247, | |
| "learning_rate": 9.960304390010968e-06, | |
| "loss": 0.373, | |
| "step": 709 | |
| }, | |
| { | |
| "epoch": 0.06909975669099756, | |
| "grad_norm": 1.8613999824223078, | |
| "learning_rate": 9.960105928551583e-06, | |
| "loss": 0.3926, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.0691970802919708, | |
| "grad_norm": 2.8907340364253757, | |
| "learning_rate": 9.959906974203422e-06, | |
| "loss": 0.5451, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 0.06929440389294404, | |
| "grad_norm": 1.826374356881247, | |
| "learning_rate": 9.959707526986256e-06, | |
| "loss": 0.4341, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 0.06939172749391727, | |
| "grad_norm": 2.5001373253299133, | |
| "learning_rate": 9.959507586919903e-06, | |
| "loss": 0.6643, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 0.06948905109489051, | |
| "grad_norm": 1.769427365923108, | |
| "learning_rate": 9.959307154024234e-06, | |
| "loss": 0.5431, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 0.06958637469586375, | |
| "grad_norm": 2.3285358245695322, | |
| "learning_rate": 9.959106228319166e-06, | |
| "loss": 0.5274, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.06968369829683699, | |
| "grad_norm": 1.4070234926508725, | |
| "learning_rate": 9.958904809824663e-06, | |
| "loss": 0.3257, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 0.06978102189781021, | |
| "grad_norm": 1.9284568290872997, | |
| "learning_rate": 9.958702898560742e-06, | |
| "loss": 0.5648, | |
| "step": 717 | |
| }, | |
| { | |
| "epoch": 0.06987834549878345, | |
| "grad_norm": 2.092543866644565, | |
| "learning_rate": 9.958500494547465e-06, | |
| "loss": 0.6256, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 0.0699756690997567, | |
| "grad_norm": 1.5948763588365042, | |
| "learning_rate": 9.958297597804947e-06, | |
| "loss": 0.4011, | |
| "step": 719 | |
| }, | |
| { | |
| "epoch": 0.07007299270072993, | |
| "grad_norm": 1.2246362905267065, | |
| "learning_rate": 9.958094208353348e-06, | |
| "loss": 0.2444, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.07017031630170316, | |
| "grad_norm": 1.2302916868666773, | |
| "learning_rate": 9.95789032621288e-06, | |
| "loss": 0.3191, | |
| "step": 721 | |
| }, | |
| { | |
| "epoch": 0.0702676399026764, | |
| "grad_norm": 1.5504396768673763, | |
| "learning_rate": 9.957685951403803e-06, | |
| "loss": 0.3112, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 0.07036496350364964, | |
| "grad_norm": 2.1205819146422438, | |
| "learning_rate": 9.957481083946427e-06, | |
| "loss": 0.3453, | |
| "step": 723 | |
| }, | |
| { | |
| "epoch": 0.07046228710462286, | |
| "grad_norm": 2.048519725880563, | |
| "learning_rate": 9.957275723861108e-06, | |
| "loss": 0.5266, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 0.0705596107055961, | |
| "grad_norm": 1.4453693275620771, | |
| "learning_rate": 9.957069871168253e-06, | |
| "loss": 0.3082, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.07065693430656934, | |
| "grad_norm": 1.8824931146868138, | |
| "learning_rate": 9.956863525888318e-06, | |
| "loss": 0.588, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 0.07075425790754258, | |
| "grad_norm": 1.6143333569692804, | |
| "learning_rate": 9.956656688041807e-06, | |
| "loss": 0.4126, | |
| "step": 727 | |
| }, | |
| { | |
| "epoch": 0.07085158150851581, | |
| "grad_norm": 1.7905307392122496, | |
| "learning_rate": 9.956449357649276e-06, | |
| "loss": 0.521, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 0.07094890510948905, | |
| "grad_norm": 1.3295021098228834, | |
| "learning_rate": 9.956241534731325e-06, | |
| "loss": 0.31, | |
| "step": 729 | |
| }, | |
| { | |
| "epoch": 0.07104622871046229, | |
| "grad_norm": 1.5783278835300563, | |
| "learning_rate": 9.956033219308607e-06, | |
| "loss": 0.3091, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.07114355231143553, | |
| "grad_norm": 1.9905003004076265, | |
| "learning_rate": 9.955824411401822e-06, | |
| "loss": 0.3843, | |
| "step": 731 | |
| }, | |
| { | |
| "epoch": 0.07124087591240875, | |
| "grad_norm": 1.7644558301646922, | |
| "learning_rate": 9.955615111031717e-06, | |
| "loss": 0.4288, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 0.071338199513382, | |
| "grad_norm": 1.5922207695027908, | |
| "learning_rate": 9.955405318219096e-06, | |
| "loss": 0.4767, | |
| "step": 733 | |
| }, | |
| { | |
| "epoch": 0.07143552311435523, | |
| "grad_norm": 1.7054240956141933, | |
| "learning_rate": 9.955195032984798e-06, | |
| "loss": 0.4082, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 0.07153284671532846, | |
| "grad_norm": 1.3954970063738148, | |
| "learning_rate": 9.954984255349729e-06, | |
| "loss": 0.318, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.0716301703163017, | |
| "grad_norm": 1.7287069268697828, | |
| "learning_rate": 9.954772985334825e-06, | |
| "loss": 0.4998, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 0.07172749391727494, | |
| "grad_norm": 1.4535895804720915, | |
| "learning_rate": 9.954561222961086e-06, | |
| "loss": 0.2489, | |
| "step": 737 | |
| }, | |
| { | |
| "epoch": 0.07182481751824818, | |
| "grad_norm": 1.7113518757446542, | |
| "learning_rate": 9.954348968249552e-06, | |
| "loss": 0.4578, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 0.0719221411192214, | |
| "grad_norm": 1.6741613993254088, | |
| "learning_rate": 9.954136221221316e-06, | |
| "loss": 0.4907, | |
| "step": 739 | |
| }, | |
| { | |
| "epoch": 0.07201946472019465, | |
| "grad_norm": 1.590982465166657, | |
| "learning_rate": 9.95392298189752e-06, | |
| "loss": 0.4116, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.07211678832116789, | |
| "grad_norm": 1.422974716648181, | |
| "learning_rate": 9.953709250299351e-06, | |
| "loss": 0.3501, | |
| "step": 741 | |
| }, | |
| { | |
| "epoch": 0.07221411192214112, | |
| "grad_norm": 1.8424007198547667, | |
| "learning_rate": 9.953495026448048e-06, | |
| "loss": 0.5647, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 0.07231143552311435, | |
| "grad_norm": 1.6572484299897867, | |
| "learning_rate": 9.953280310364902e-06, | |
| "loss": 0.3937, | |
| "step": 743 | |
| }, | |
| { | |
| "epoch": 0.07240875912408759, | |
| "grad_norm": 1.6027770112754065, | |
| "learning_rate": 9.953065102071245e-06, | |
| "loss": 0.3845, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 0.07250608272506083, | |
| "grad_norm": 1.3618658637431431, | |
| "learning_rate": 9.952849401588464e-06, | |
| "loss": 0.3946, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.07260340632603407, | |
| "grad_norm": 1.63075572158439, | |
| "learning_rate": 9.952633208937997e-06, | |
| "loss": 0.4506, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 0.0727007299270073, | |
| "grad_norm": 1.483187632244976, | |
| "learning_rate": 9.95241652414132e-06, | |
| "loss": 0.3908, | |
| "step": 747 | |
| }, | |
| { | |
| "epoch": 0.07279805352798054, | |
| "grad_norm": 2.147960263046311, | |
| "learning_rate": 9.952199347219972e-06, | |
| "loss": 0.5249, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 0.07289537712895378, | |
| "grad_norm": 1.5046941105429004, | |
| "learning_rate": 9.951981678195529e-06, | |
| "loss": 0.3592, | |
| "step": 749 | |
| }, | |
| { | |
| "epoch": 0.072992700729927, | |
| "grad_norm": 1.1457618113072725, | |
| "learning_rate": 9.951763517089624e-06, | |
| "loss": 0.2197, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.07309002433090024, | |
| "grad_norm": 1.9275946136488011, | |
| "learning_rate": 9.951544863923934e-06, | |
| "loss": 0.5692, | |
| "step": 751 | |
| }, | |
| { | |
| "epoch": 0.07318734793187348, | |
| "grad_norm": 1.9590929330277462, | |
| "learning_rate": 9.95132571872019e-06, | |
| "loss": 0.7243, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 0.07328467153284672, | |
| "grad_norm": 2.1368780826391283, | |
| "learning_rate": 9.951106081500162e-06, | |
| "loss": 0.7601, | |
| "step": 753 | |
| }, | |
| { | |
| "epoch": 0.07338199513381995, | |
| "grad_norm": 2.0085695969306396, | |
| "learning_rate": 9.950885952285682e-06, | |
| "loss": 0.5541, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 0.07347931873479319, | |
| "grad_norm": 1.9283983503616706, | |
| "learning_rate": 9.950665331098622e-06, | |
| "loss": 0.3832, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.07357664233576643, | |
| "grad_norm": 1.4173732379297153, | |
| "learning_rate": 9.950444217960902e-06, | |
| "loss": 0.379, | |
| "step": 756 | |
| }, | |
| { | |
| "epoch": 0.07367396593673967, | |
| "grad_norm": 1.5015176407129935, | |
| "learning_rate": 9.9502226128945e-06, | |
| "loss": 0.4696, | |
| "step": 757 | |
| }, | |
| { | |
| "epoch": 0.07377128953771289, | |
| "grad_norm": 1.6746905852394565, | |
| "learning_rate": 9.950000515921434e-06, | |
| "loss": 0.2984, | |
| "step": 758 | |
| }, | |
| { | |
| "epoch": 0.07386861313868613, | |
| "grad_norm": 1.4429847737048944, | |
| "learning_rate": 9.949777927063776e-06, | |
| "loss": 0.3748, | |
| "step": 759 | |
| }, | |
| { | |
| "epoch": 0.07396593673965937, | |
| "grad_norm": 1.1895632638034424, | |
| "learning_rate": 9.94955484634364e-06, | |
| "loss": 0.3014, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.0740632603406326, | |
| "grad_norm": 1.5497241513071458, | |
| "learning_rate": 9.949331273783198e-06, | |
| "loss": 0.5458, | |
| "step": 761 | |
| }, | |
| { | |
| "epoch": 0.07416058394160584, | |
| "grad_norm": 1.5531214201672936, | |
| "learning_rate": 9.949107209404664e-06, | |
| "loss": 0.4575, | |
| "step": 762 | |
| }, | |
| { | |
| "epoch": 0.07425790754257908, | |
| "grad_norm": 1.3336107839559097, | |
| "learning_rate": 9.948882653230306e-06, | |
| "loss": 0.4227, | |
| "step": 763 | |
| }, | |
| { | |
| "epoch": 0.07435523114355232, | |
| "grad_norm": 1.7418209768074853, | |
| "learning_rate": 9.948657605282437e-06, | |
| "loss": 0.659, | |
| "step": 764 | |
| }, | |
| { | |
| "epoch": 0.07445255474452554, | |
| "grad_norm": 1.462439433090815, | |
| "learning_rate": 9.94843206558342e-06, | |
| "loss": 0.445, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.07454987834549878, | |
| "grad_norm": 1.0856086178050317, | |
| "learning_rate": 9.948206034155666e-06, | |
| "loss": 0.2245, | |
| "step": 766 | |
| }, | |
| { | |
| "epoch": 0.07464720194647202, | |
| "grad_norm": 1.458503858496447, | |
| "learning_rate": 9.947979511021638e-06, | |
| "loss": 0.3009, | |
| "step": 767 | |
| }, | |
| { | |
| "epoch": 0.07474452554744526, | |
| "grad_norm": 1.1921292471996519, | |
| "learning_rate": 9.947752496203844e-06, | |
| "loss": 0.2988, | |
| "step": 768 | |
| }, | |
| { | |
| "epoch": 0.07484184914841849, | |
| "grad_norm": 1.6693024138876786, | |
| "learning_rate": 9.947524989724844e-06, | |
| "loss": 0.4783, | |
| "step": 769 | |
| }, | |
| { | |
| "epoch": 0.07493917274939173, | |
| "grad_norm": 1.4928671202909605, | |
| "learning_rate": 9.947296991607244e-06, | |
| "loss": 0.4161, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.07503649635036497, | |
| "grad_norm": 1.4549005796935413, | |
| "learning_rate": 9.947068501873702e-06, | |
| "loss": 0.4186, | |
| "step": 771 | |
| }, | |
| { | |
| "epoch": 0.0751338199513382, | |
| "grad_norm": 1.7544781744298734, | |
| "learning_rate": 9.946839520546923e-06, | |
| "loss": 0.5593, | |
| "step": 772 | |
| }, | |
| { | |
| "epoch": 0.07523114355231143, | |
| "grad_norm": 1.561541454027553, | |
| "learning_rate": 9.946610047649659e-06, | |
| "loss": 0.5097, | |
| "step": 773 | |
| }, | |
| { | |
| "epoch": 0.07532846715328467, | |
| "grad_norm": 1.598616630831168, | |
| "learning_rate": 9.946380083204714e-06, | |
| "loss": 0.3744, | |
| "step": 774 | |
| }, | |
| { | |
| "epoch": 0.07542579075425791, | |
| "grad_norm": 1.6915556597188157, | |
| "learning_rate": 9.94614962723494e-06, | |
| "loss": 0.439, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.07552311435523114, | |
| "grad_norm": 1.220024420697048, | |
| "learning_rate": 9.945918679763237e-06, | |
| "loss": 0.2339, | |
| "step": 776 | |
| }, | |
| { | |
| "epoch": 0.07562043795620438, | |
| "grad_norm": 1.6061445238682988, | |
| "learning_rate": 9.945687240812556e-06, | |
| "loss": 0.4493, | |
| "step": 777 | |
| }, | |
| { | |
| "epoch": 0.07571776155717762, | |
| "grad_norm": 1.400813806243779, | |
| "learning_rate": 9.945455310405895e-06, | |
| "loss": 0.4513, | |
| "step": 778 | |
| }, | |
| { | |
| "epoch": 0.07581508515815086, | |
| "grad_norm": 1.753751480308555, | |
| "learning_rate": 9.945222888566298e-06, | |
| "loss": 0.5379, | |
| "step": 779 | |
| }, | |
| { | |
| "epoch": 0.07591240875912408, | |
| "grad_norm": 1.4421667558329163, | |
| "learning_rate": 9.944989975316862e-06, | |
| "loss": 0.4118, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.07600973236009732, | |
| "grad_norm": 1.4411974086247974, | |
| "learning_rate": 9.944756570680733e-06, | |
| "loss": 0.3295, | |
| "step": 781 | |
| }, | |
| { | |
| "epoch": 0.07610705596107056, | |
| "grad_norm": 1.5545586767450623, | |
| "learning_rate": 9.944522674681107e-06, | |
| "loss": 0.4146, | |
| "step": 782 | |
| }, | |
| { | |
| "epoch": 0.07620437956204379, | |
| "grad_norm": 2.0019900434858084, | |
| "learning_rate": 9.944288287341222e-06, | |
| "loss": 0.4945, | |
| "step": 783 | |
| }, | |
| { | |
| "epoch": 0.07630170316301703, | |
| "grad_norm": 1.5834930071710975, | |
| "learning_rate": 9.944053408684371e-06, | |
| "loss": 0.3781, | |
| "step": 784 | |
| }, | |
| { | |
| "epoch": 0.07639902676399027, | |
| "grad_norm": 1.5272521164667598, | |
| "learning_rate": 9.943818038733894e-06, | |
| "loss": 0.3865, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.07649635036496351, | |
| "grad_norm": 1.8005925077547513, | |
| "learning_rate": 9.94358217751318e-06, | |
| "loss": 0.3951, | |
| "step": 786 | |
| }, | |
| { | |
| "epoch": 0.07659367396593673, | |
| "grad_norm": 2.0471085276865995, | |
| "learning_rate": 9.943345825045664e-06, | |
| "loss": 0.6391, | |
| "step": 787 | |
| }, | |
| { | |
| "epoch": 0.07669099756690997, | |
| "grad_norm": 1.7893386028077656, | |
| "learning_rate": 9.943108981354839e-06, | |
| "loss": 0.6373, | |
| "step": 788 | |
| }, | |
| { | |
| "epoch": 0.07678832116788321, | |
| "grad_norm": 1.6529186502183046, | |
| "learning_rate": 9.942871646464234e-06, | |
| "loss": 0.4901, | |
| "step": 789 | |
| }, | |
| { | |
| "epoch": 0.07688564476885645, | |
| "grad_norm": 1.8449837387732961, | |
| "learning_rate": 9.942633820397436e-06, | |
| "loss": 0.4444, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.07698296836982968, | |
| "grad_norm": 1.5278738521461448, | |
| "learning_rate": 9.942395503178077e-06, | |
| "loss": 0.3701, | |
| "step": 791 | |
| }, | |
| { | |
| "epoch": 0.07708029197080292, | |
| "grad_norm": 1.8197808533034088, | |
| "learning_rate": 9.942156694829838e-06, | |
| "loss": 0.6142, | |
| "step": 792 | |
| }, | |
| { | |
| "epoch": 0.07717761557177616, | |
| "grad_norm": 1.8496691201700692, | |
| "learning_rate": 9.941917395376452e-06, | |
| "loss": 0.2021, | |
| "step": 793 | |
| }, | |
| { | |
| "epoch": 0.07727493917274939, | |
| "grad_norm": 1.8762664332677217, | |
| "learning_rate": 9.941677604841696e-06, | |
| "loss": 0.6742, | |
| "step": 794 | |
| }, | |
| { | |
| "epoch": 0.07737226277372262, | |
| "grad_norm": 1.5933514264940258, | |
| "learning_rate": 9.9414373232494e-06, | |
| "loss": 0.5156, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.07746958637469586, | |
| "grad_norm": 1.538651154827247, | |
| "learning_rate": 9.94119655062344e-06, | |
| "loss": 0.446, | |
| "step": 796 | |
| }, | |
| { | |
| "epoch": 0.0775669099756691, | |
| "grad_norm": 3.7300878200470926, | |
| "learning_rate": 9.94095528698774e-06, | |
| "loss": 0.2745, | |
| "step": 797 | |
| }, | |
| { | |
| "epoch": 0.07766423357664233, | |
| "grad_norm": 1.685774804326696, | |
| "learning_rate": 9.940713532366277e-06, | |
| "loss": 0.4236, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 0.07776155717761557, | |
| "grad_norm": 1.2528388212678458, | |
| "learning_rate": 9.940471286783074e-06, | |
| "loss": 0.308, | |
| "step": 799 | |
| }, | |
| { | |
| "epoch": 0.07785888077858881, | |
| "grad_norm": 1.5082779398207746, | |
| "learning_rate": 9.940228550262203e-06, | |
| "loss": 0.4925, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.07795620437956205, | |
| "grad_norm": 1.544326069333433, | |
| "learning_rate": 9.939985322827784e-06, | |
| "loss": 0.4341, | |
| "step": 801 | |
| }, | |
| { | |
| "epoch": 0.07805352798053528, | |
| "grad_norm": 1.4959220289677864, | |
| "learning_rate": 9.939741604503987e-06, | |
| "loss": 0.4548, | |
| "step": 802 | |
| }, | |
| { | |
| "epoch": 0.07815085158150852, | |
| "grad_norm": 1.682287714178995, | |
| "learning_rate": 9.93949739531503e-06, | |
| "loss": 0.5277, | |
| "step": 803 | |
| }, | |
| { | |
| "epoch": 0.07824817518248176, | |
| "grad_norm": 1.6519496438708445, | |
| "learning_rate": 9.93925269528518e-06, | |
| "loss": 0.3074, | |
| "step": 804 | |
| }, | |
| { | |
| "epoch": 0.07834549878345498, | |
| "grad_norm": 1.4379883641500402, | |
| "learning_rate": 9.939007504438756e-06, | |
| "loss": 0.3069, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.07844282238442822, | |
| "grad_norm": 2.0644552037743793, | |
| "learning_rate": 9.93876182280012e-06, | |
| "loss": 0.4479, | |
| "step": 806 | |
| }, | |
| { | |
| "epoch": 0.07854014598540146, | |
| "grad_norm": 1.4791313310441092, | |
| "learning_rate": 9.938515650393685e-06, | |
| "loss": 0.4255, | |
| "step": 807 | |
| }, | |
| { | |
| "epoch": 0.0786374695863747, | |
| "grad_norm": 1.4280736600967436, | |
| "learning_rate": 9.938268987243914e-06, | |
| "loss": 0.466, | |
| "step": 808 | |
| }, | |
| { | |
| "epoch": 0.07873479318734793, | |
| "grad_norm": 1.610976672135659, | |
| "learning_rate": 9.93802183337532e-06, | |
| "loss": 0.4327, | |
| "step": 809 | |
| }, | |
| { | |
| "epoch": 0.07883211678832117, | |
| "grad_norm": 1.5447130604673693, | |
| "learning_rate": 9.93777418881246e-06, | |
| "loss": 0.4931, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.0789294403892944, | |
| "grad_norm": 1.3831325957946852, | |
| "learning_rate": 9.937526053579944e-06, | |
| "loss": 0.3877, | |
| "step": 811 | |
| }, | |
| { | |
| "epoch": 0.07902676399026765, | |
| "grad_norm": 1.4247112282736865, | |
| "learning_rate": 9.93727742770243e-06, | |
| "loss": 0.4168, | |
| "step": 812 | |
| }, | |
| { | |
| "epoch": 0.07912408759124087, | |
| "grad_norm": 1.5074130304911886, | |
| "learning_rate": 9.937028311204624e-06, | |
| "loss": 0.4747, | |
| "step": 813 | |
| }, | |
| { | |
| "epoch": 0.07922141119221411, | |
| "grad_norm": 1.4955958242475926, | |
| "learning_rate": 9.936778704111278e-06, | |
| "loss": 0.2999, | |
| "step": 814 | |
| }, | |
| { | |
| "epoch": 0.07931873479318735, | |
| "grad_norm": 1.6038468607718186, | |
| "learning_rate": 9.9365286064472e-06, | |
| "loss": 0.4897, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 0.07941605839416059, | |
| "grad_norm": 1.8040845780349017, | |
| "learning_rate": 9.93627801823724e-06, | |
| "loss": 0.6413, | |
| "step": 816 | |
| }, | |
| { | |
| "epoch": 0.07951338199513382, | |
| "grad_norm": 1.4598215502284355, | |
| "learning_rate": 9.936026939506298e-06, | |
| "loss": 0.3687, | |
| "step": 817 | |
| }, | |
| { | |
| "epoch": 0.07961070559610706, | |
| "grad_norm": 1.340412030499075, | |
| "learning_rate": 9.935775370279324e-06, | |
| "loss": 0.3833, | |
| "step": 818 | |
| }, | |
| { | |
| "epoch": 0.0797080291970803, | |
| "grad_norm": 1.6913032059853774, | |
| "learning_rate": 9.935523310581318e-06, | |
| "loss": 0.5857, | |
| "step": 819 | |
| }, | |
| { | |
| "epoch": 0.07980535279805352, | |
| "grad_norm": 1.9970663728185467, | |
| "learning_rate": 9.93527076043733e-06, | |
| "loss": 0.6843, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.07990267639902676, | |
| "grad_norm": 1.4408921562941295, | |
| "learning_rate": 9.93501771987245e-06, | |
| "loss": 0.4385, | |
| "step": 821 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 1.5184490203891443, | |
| "learning_rate": 9.934764188911827e-06, | |
| "loss": 0.4708, | |
| "step": 822 | |
| }, | |
| { | |
| "epoch": 0.08009732360097324, | |
| "grad_norm": 1.8501562903086661, | |
| "learning_rate": 9.934510167580654e-06, | |
| "loss": 0.6431, | |
| "step": 823 | |
| }, | |
| { | |
| "epoch": 0.08019464720194647, | |
| "grad_norm": 1.6997829158405129, | |
| "learning_rate": 9.934255655904172e-06, | |
| "loss": 0.5188, | |
| "step": 824 | |
| }, | |
| { | |
| "epoch": 0.08029197080291971, | |
| "grad_norm": 1.8510241792275326, | |
| "learning_rate": 9.934000653907674e-06, | |
| "loss": 0.5457, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.08038929440389295, | |
| "grad_norm": 1.6853569692908912, | |
| "learning_rate": 9.933745161616498e-06, | |
| "loss": 0.5062, | |
| "step": 826 | |
| }, | |
| { | |
| "epoch": 0.08048661800486619, | |
| "grad_norm": 1.3066104263898661, | |
| "learning_rate": 9.93348917905603e-06, | |
| "loss": 0.404, | |
| "step": 827 | |
| }, | |
| { | |
| "epoch": 0.08058394160583941, | |
| "grad_norm": 1.2788244408859646, | |
| "learning_rate": 9.933232706251712e-06, | |
| "loss": 0.3253, | |
| "step": 828 | |
| }, | |
| { | |
| "epoch": 0.08068126520681265, | |
| "grad_norm": 2.2690800072126325, | |
| "learning_rate": 9.932975743229027e-06, | |
| "loss": 0.3405, | |
| "step": 829 | |
| }, | |
| { | |
| "epoch": 0.08077858880778589, | |
| "grad_norm": 1.9113871035353245, | |
| "learning_rate": 9.932718290013512e-06, | |
| "loss": 0.5989, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.08087591240875912, | |
| "grad_norm": 1.3655256798283997, | |
| "learning_rate": 9.932460346630748e-06, | |
| "loss": 0.2942, | |
| "step": 831 | |
| }, | |
| { | |
| "epoch": 0.08097323600973236, | |
| "grad_norm": 1.5234864838378999, | |
| "learning_rate": 9.932201913106366e-06, | |
| "loss": 0.3913, | |
| "step": 832 | |
| }, | |
| { | |
| "epoch": 0.0810705596107056, | |
| "grad_norm": 1.3752195876516826, | |
| "learning_rate": 9.93194298946605e-06, | |
| "loss": 0.3293, | |
| "step": 833 | |
| }, | |
| { | |
| "epoch": 0.08116788321167884, | |
| "grad_norm": 1.4842622412969824, | |
| "learning_rate": 9.931683575735527e-06, | |
| "loss": 0.4157, | |
| "step": 834 | |
| }, | |
| { | |
| "epoch": 0.08126520681265206, | |
| "grad_norm": 4.003685207313109, | |
| "learning_rate": 9.931423671940577e-06, | |
| "loss": 0.3276, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 0.0813625304136253, | |
| "grad_norm": 1.509943035011216, | |
| "learning_rate": 9.931163278107023e-06, | |
| "loss": 0.4045, | |
| "step": 836 | |
| }, | |
| { | |
| "epoch": 0.08145985401459854, | |
| "grad_norm": 1.4382523765338775, | |
| "learning_rate": 9.930902394260746e-06, | |
| "loss": 0.2709, | |
| "step": 837 | |
| }, | |
| { | |
| "epoch": 0.08155717761557178, | |
| "grad_norm": 1.4492711471586157, | |
| "learning_rate": 9.930641020427665e-06, | |
| "loss": 0.3957, | |
| "step": 838 | |
| }, | |
| { | |
| "epoch": 0.08165450121654501, | |
| "grad_norm": 1.7428876214187694, | |
| "learning_rate": 9.930379156633758e-06, | |
| "loss": 0.5257, | |
| "step": 839 | |
| }, | |
| { | |
| "epoch": 0.08175182481751825, | |
| "grad_norm": 1.5652514836380926, | |
| "learning_rate": 9.930116802905042e-06, | |
| "loss": 0.4948, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.08184914841849149, | |
| "grad_norm": 2.4133112951540494, | |
| "learning_rate": 9.929853959267589e-06, | |
| "loss": 0.5455, | |
| "step": 841 | |
| }, | |
| { | |
| "epoch": 0.08194647201946471, | |
| "grad_norm": 1.4309460046419233, | |
| "learning_rate": 9.929590625747518e-06, | |
| "loss": 0.4057, | |
| "step": 842 | |
| }, | |
| { | |
| "epoch": 0.08204379562043795, | |
| "grad_norm": 1.0450296792009146, | |
| "learning_rate": 9.929326802370995e-06, | |
| "loss": 0.2332, | |
| "step": 843 | |
| }, | |
| { | |
| "epoch": 0.0821411192214112, | |
| "grad_norm": 1.1201933325217828, | |
| "learning_rate": 9.92906248916424e-06, | |
| "loss": 0.3264, | |
| "step": 844 | |
| }, | |
| { | |
| "epoch": 0.08223844282238443, | |
| "grad_norm": 1.6243579769967154, | |
| "learning_rate": 9.928797686153515e-06, | |
| "loss": 0.5385, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 0.08233576642335766, | |
| "grad_norm": 1.3496069901220336, | |
| "learning_rate": 9.928532393365136e-06, | |
| "loss": 0.3875, | |
| "step": 846 | |
| }, | |
| { | |
| "epoch": 0.0824330900243309, | |
| "grad_norm": 1.4862888245769246, | |
| "learning_rate": 9.928266610825462e-06, | |
| "loss": 0.4493, | |
| "step": 847 | |
| }, | |
| { | |
| "epoch": 0.08253041362530414, | |
| "grad_norm": 1.8305160014899666, | |
| "learning_rate": 9.928000338560906e-06, | |
| "loss": 0.4582, | |
| "step": 848 | |
| }, | |
| { | |
| "epoch": 0.08262773722627738, | |
| "grad_norm": 1.642584946989029, | |
| "learning_rate": 9.927733576597926e-06, | |
| "loss": 0.3347, | |
| "step": 849 | |
| }, | |
| { | |
| "epoch": 0.0827250608272506, | |
| "grad_norm": 1.5413363162928122, | |
| "learning_rate": 9.927466324963033e-06, | |
| "loss": 0.4607, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.08282238442822384, | |
| "grad_norm": 1.7093263469236866, | |
| "learning_rate": 9.927198583682784e-06, | |
| "loss": 0.5706, | |
| "step": 851 | |
| }, | |
| { | |
| "epoch": 0.08291970802919708, | |
| "grad_norm": 1.531714933227777, | |
| "learning_rate": 9.926930352783781e-06, | |
| "loss": 0.533, | |
| "step": 852 | |
| }, | |
| { | |
| "epoch": 0.08301703163017031, | |
| "grad_norm": 1.8181822267445191, | |
| "learning_rate": 9.926661632292683e-06, | |
| "loss": 0.5946, | |
| "step": 853 | |
| }, | |
| { | |
| "epoch": 0.08311435523114355, | |
| "grad_norm": 1.8304662465930317, | |
| "learning_rate": 9.926392422236189e-06, | |
| "loss": 0.3746, | |
| "step": 854 | |
| }, | |
| { | |
| "epoch": 0.08321167883211679, | |
| "grad_norm": 1.3135536142885351, | |
| "learning_rate": 9.926122722641051e-06, | |
| "loss": 0.429, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 0.08330900243309003, | |
| "grad_norm": 1.714390027755308, | |
| "learning_rate": 9.925852533534071e-06, | |
| "loss": 0.6806, | |
| "step": 856 | |
| }, | |
| { | |
| "epoch": 0.08340632603406326, | |
| "grad_norm": 1.3399957064659453, | |
| "learning_rate": 9.925581854942099e-06, | |
| "loss": 0.2824, | |
| "step": 857 | |
| }, | |
| { | |
| "epoch": 0.0835036496350365, | |
| "grad_norm": 1.3705351036499993, | |
| "learning_rate": 9.925310686892026e-06, | |
| "loss": 0.3085, | |
| "step": 858 | |
| }, | |
| { | |
| "epoch": 0.08360097323600973, | |
| "grad_norm": 1.5064665959171673, | |
| "learning_rate": 9.925039029410807e-06, | |
| "loss": 0.4445, | |
| "step": 859 | |
| }, | |
| { | |
| "epoch": 0.08369829683698297, | |
| "grad_norm": 1.725614330530946, | |
| "learning_rate": 9.924766882525433e-06, | |
| "loss": 0.4704, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.0837956204379562, | |
| "grad_norm": 1.765372064078189, | |
| "learning_rate": 9.924494246262944e-06, | |
| "loss": 0.6383, | |
| "step": 861 | |
| }, | |
| { | |
| "epoch": 0.08389294403892944, | |
| "grad_norm": 2.085503007877936, | |
| "learning_rate": 9.924221120650434e-06, | |
| "loss": 0.296, | |
| "step": 862 | |
| }, | |
| { | |
| "epoch": 0.08399026763990268, | |
| "grad_norm": 1.7898541160892734, | |
| "learning_rate": 9.923947505715046e-06, | |
| "loss": 0.5991, | |
| "step": 863 | |
| }, | |
| { | |
| "epoch": 0.0840875912408759, | |
| "grad_norm": 1.6476104975968628, | |
| "learning_rate": 9.923673401483968e-06, | |
| "loss": 0.4734, | |
| "step": 864 | |
| }, | |
| { | |
| "epoch": 0.08418491484184915, | |
| "grad_norm": 1.5502768976775265, | |
| "learning_rate": 9.923398807984439e-06, | |
| "loss": 0.2764, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 0.08428223844282239, | |
| "grad_norm": 1.2398437846135097, | |
| "learning_rate": 9.923123725243744e-06, | |
| "loss": 0.2705, | |
| "step": 866 | |
| }, | |
| { | |
| "epoch": 0.08437956204379563, | |
| "grad_norm": 1.5290591078236662, | |
| "learning_rate": 9.922848153289217e-06, | |
| "loss": 0.4228, | |
| "step": 867 | |
| }, | |
| { | |
| "epoch": 0.08447688564476885, | |
| "grad_norm": 1.134889947118225, | |
| "learning_rate": 9.922572092148244e-06, | |
| "loss": 0.2953, | |
| "step": 868 | |
| }, | |
| { | |
| "epoch": 0.08457420924574209, | |
| "grad_norm": 1.6307620082274505, | |
| "learning_rate": 9.922295541848257e-06, | |
| "loss": 0.3363, | |
| "step": 869 | |
| }, | |
| { | |
| "epoch": 0.08467153284671533, | |
| "grad_norm": 1.373015271795792, | |
| "learning_rate": 9.922018502416736e-06, | |
| "loss": 0.3593, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.08476885644768857, | |
| "grad_norm": 1.7500724096304088, | |
| "learning_rate": 9.921740973881211e-06, | |
| "loss": 0.5236, | |
| "step": 871 | |
| }, | |
| { | |
| "epoch": 0.0848661800486618, | |
| "grad_norm": 1.6167507595463353, | |
| "learning_rate": 9.92146295626926e-06, | |
| "loss": 0.5138, | |
| "step": 872 | |
| }, | |
| { | |
| "epoch": 0.08496350364963504, | |
| "grad_norm": 1.0398007401901226, | |
| "learning_rate": 9.92118444960851e-06, | |
| "loss": 0.295, | |
| "step": 873 | |
| }, | |
| { | |
| "epoch": 0.08506082725060828, | |
| "grad_norm": 1.4140920056378707, | |
| "learning_rate": 9.920905453926637e-06, | |
| "loss": 0.4192, | |
| "step": 874 | |
| }, | |
| { | |
| "epoch": 0.0851581508515815, | |
| "grad_norm": 1.8785238213855096, | |
| "learning_rate": 9.920625969251365e-06, | |
| "loss": 0.4228, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 0.08525547445255474, | |
| "grad_norm": 1.719991686268608, | |
| "learning_rate": 9.920345995610465e-06, | |
| "loss": 0.5026, | |
| "step": 876 | |
| }, | |
| { | |
| "epoch": 0.08535279805352798, | |
| "grad_norm": 1.7112372148926476, | |
| "learning_rate": 9.92006553303176e-06, | |
| "loss": 0.3157, | |
| "step": 877 | |
| }, | |
| { | |
| "epoch": 0.08545012165450122, | |
| "grad_norm": 2.5105720144829116, | |
| "learning_rate": 9.919784581543117e-06, | |
| "loss": 0.4777, | |
| "step": 878 | |
| }, | |
| { | |
| "epoch": 0.08554744525547445, | |
| "grad_norm": 1.42848630379055, | |
| "learning_rate": 9.919503141172458e-06, | |
| "loss": 0.3998, | |
| "step": 879 | |
| }, | |
| { | |
| "epoch": 0.08564476885644769, | |
| "grad_norm": 1.4246136626839867, | |
| "learning_rate": 9.919221211947748e-06, | |
| "loss": 0.4415, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.08574209245742093, | |
| "grad_norm": 1.939970471855472, | |
| "learning_rate": 9.918938793897002e-06, | |
| "loss": 0.5887, | |
| "step": 881 | |
| }, | |
| { | |
| "epoch": 0.08583941605839417, | |
| "grad_norm": 1.5467402852284964, | |
| "learning_rate": 9.918655887048285e-06, | |
| "loss": 0.3726, | |
| "step": 882 | |
| }, | |
| { | |
| "epoch": 0.08593673965936739, | |
| "grad_norm": 1.6261636529000345, | |
| "learning_rate": 9.918372491429708e-06, | |
| "loss": 0.3382, | |
| "step": 883 | |
| }, | |
| { | |
| "epoch": 0.08603406326034063, | |
| "grad_norm": 1.4859289768748727, | |
| "learning_rate": 9.918088607069434e-06, | |
| "loss": 0.4837, | |
| "step": 884 | |
| }, | |
| { | |
| "epoch": 0.08613138686131387, | |
| "grad_norm": 1.8534453271170916, | |
| "learning_rate": 9.917804233995673e-06, | |
| "loss": 0.5948, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 0.08622871046228711, | |
| "grad_norm": 1.3491809126204122, | |
| "learning_rate": 9.917519372236684e-06, | |
| "loss": 0.381, | |
| "step": 886 | |
| }, | |
| { | |
| "epoch": 0.08632603406326034, | |
| "grad_norm": 1.4913268478302555, | |
| "learning_rate": 9.91723402182077e-06, | |
| "loss": 0.2872, | |
| "step": 887 | |
| }, | |
| { | |
| "epoch": 0.08642335766423358, | |
| "grad_norm": 1.5345667515291348, | |
| "learning_rate": 9.916948182776289e-06, | |
| "loss": 0.4426, | |
| "step": 888 | |
| }, | |
| { | |
| "epoch": 0.08652068126520682, | |
| "grad_norm": 1.9142340135608018, | |
| "learning_rate": 9.916661855131646e-06, | |
| "loss": 0.467, | |
| "step": 889 | |
| }, | |
| { | |
| "epoch": 0.08661800486618004, | |
| "grad_norm": 1.7451883652681546, | |
| "learning_rate": 9.916375038915291e-06, | |
| "loss": 0.3579, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.08671532846715328, | |
| "grad_norm": 3.3675828599824618, | |
| "learning_rate": 9.916087734155728e-06, | |
| "loss": 0.3965, | |
| "step": 891 | |
| }, | |
| { | |
| "epoch": 0.08681265206812652, | |
| "grad_norm": 1.6430989821947144, | |
| "learning_rate": 9.915799940881504e-06, | |
| "loss": 0.5089, | |
| "step": 892 | |
| }, | |
| { | |
| "epoch": 0.08690997566909976, | |
| "grad_norm": 1.8434153107573372, | |
| "learning_rate": 9.915511659121219e-06, | |
| "loss": 0.6513, | |
| "step": 893 | |
| }, | |
| { | |
| "epoch": 0.08700729927007299, | |
| "grad_norm": 1.7259560464984558, | |
| "learning_rate": 9.91522288890352e-06, | |
| "loss": 0.5963, | |
| "step": 894 | |
| }, | |
| { | |
| "epoch": 0.08710462287104623, | |
| "grad_norm": 1.4417036209809253, | |
| "learning_rate": 9.9149336302571e-06, | |
| "loss": 0.4076, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 0.08720194647201947, | |
| "grad_norm": 1.4565626930182671, | |
| "learning_rate": 9.914643883210704e-06, | |
| "loss": 0.3548, | |
| "step": 896 | |
| }, | |
| { | |
| "epoch": 0.08729927007299271, | |
| "grad_norm": 1.8286482885292266, | |
| "learning_rate": 9.914353647793126e-06, | |
| "loss": 0.5158, | |
| "step": 897 | |
| }, | |
| { | |
| "epoch": 0.08739659367396593, | |
| "grad_norm": 1.573235746781315, | |
| "learning_rate": 9.914062924033204e-06, | |
| "loss": 0.4804, | |
| "step": 898 | |
| }, | |
| { | |
| "epoch": 0.08749391727493917, | |
| "grad_norm": 1.7725042500734154, | |
| "learning_rate": 9.91377171195983e-06, | |
| "loss": 0.4037, | |
| "step": 899 | |
| }, | |
| { | |
| "epoch": 0.08759124087591241, | |
| "grad_norm": 1.5572801757524644, | |
| "learning_rate": 9.913480011601939e-06, | |
| "loss": 0.2757, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.08768856447688564, | |
| "grad_norm": 1.690990088453521, | |
| "learning_rate": 9.91318782298852e-06, | |
| "loss": 0.624, | |
| "step": 901 | |
| }, | |
| { | |
| "epoch": 0.08778588807785888, | |
| "grad_norm": 1.5797017595834213, | |
| "learning_rate": 9.912895146148609e-06, | |
| "loss": 0.418, | |
| "step": 902 | |
| }, | |
| { | |
| "epoch": 0.08788321167883212, | |
| "grad_norm": 1.722754374021215, | |
| "learning_rate": 9.912601981111287e-06, | |
| "loss": 0.5991, | |
| "step": 903 | |
| }, | |
| { | |
| "epoch": 0.08798053527980536, | |
| "grad_norm": 1.2395740583484196, | |
| "learning_rate": 9.912308327905683e-06, | |
| "loss": 0.3632, | |
| "step": 904 | |
| }, | |
| { | |
| "epoch": 0.08807785888077858, | |
| "grad_norm": 1.8637568028899596, | |
| "learning_rate": 9.912014186560985e-06, | |
| "loss": 0.5766, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 0.08817518248175182, | |
| "grad_norm": 1.8489319991981024, | |
| "learning_rate": 9.911719557106418e-06, | |
| "loss": 0.6834, | |
| "step": 906 | |
| }, | |
| { | |
| "epoch": 0.08827250608272506, | |
| "grad_norm": 1.6692858460733677, | |
| "learning_rate": 9.911424439571258e-06, | |
| "loss": 0.5067, | |
| "step": 907 | |
| }, | |
| { | |
| "epoch": 0.0883698296836983, | |
| "grad_norm": 1.4727605888984552, | |
| "learning_rate": 9.911128833984834e-06, | |
| "loss": 0.3141, | |
| "step": 908 | |
| }, | |
| { | |
| "epoch": 0.08846715328467153, | |
| "grad_norm": 1.644393806422472, | |
| "learning_rate": 9.910832740376518e-06, | |
| "loss": 0.4599, | |
| "step": 909 | |
| }, | |
| { | |
| "epoch": 0.08856447688564477, | |
| "grad_norm": 1.730275300452632, | |
| "learning_rate": 9.910536158775734e-06, | |
| "loss": 0.3908, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.08866180048661801, | |
| "grad_norm": 1.7281903494262714, | |
| "learning_rate": 9.910239089211955e-06, | |
| "loss": 0.5919, | |
| "step": 911 | |
| }, | |
| { | |
| "epoch": 0.08875912408759123, | |
| "grad_norm": 1.7234172913238917, | |
| "learning_rate": 9.909941531714699e-06, | |
| "loss": 0.609, | |
| "step": 912 | |
| }, | |
| { | |
| "epoch": 0.08885644768856447, | |
| "grad_norm": 1.4594702058569258, | |
| "learning_rate": 9.909643486313533e-06, | |
| "loss": 0.4399, | |
| "step": 913 | |
| }, | |
| { | |
| "epoch": 0.08895377128953771, | |
| "grad_norm": 1.4625782448468165, | |
| "learning_rate": 9.90934495303808e-06, | |
| "loss": 0.4011, | |
| "step": 914 | |
| }, | |
| { | |
| "epoch": 0.08905109489051095, | |
| "grad_norm": 1.7262645481609784, | |
| "learning_rate": 9.909045931918e-06, | |
| "loss": 0.4992, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 0.08914841849148418, | |
| "grad_norm": 1.6255222361700263, | |
| "learning_rate": 9.908746422983007e-06, | |
| "loss": 0.4909, | |
| "step": 916 | |
| }, | |
| { | |
| "epoch": 0.08924574209245742, | |
| "grad_norm": 1.7512982185254946, | |
| "learning_rate": 9.908446426262865e-06, | |
| "loss": 0.5527, | |
| "step": 917 | |
| }, | |
| { | |
| "epoch": 0.08934306569343066, | |
| "grad_norm": 1.617605772613541, | |
| "learning_rate": 9.908145941787386e-06, | |
| "loss": 0.3228, | |
| "step": 918 | |
| }, | |
| { | |
| "epoch": 0.0894403892944039, | |
| "grad_norm": 1.489706963519404, | |
| "learning_rate": 9.907844969586427e-06, | |
| "loss": 0.4838, | |
| "step": 919 | |
| }, | |
| { | |
| "epoch": 0.08953771289537713, | |
| "grad_norm": 1.193837371345013, | |
| "learning_rate": 9.907543509689896e-06, | |
| "loss": 0.284, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.08963503649635036, | |
| "grad_norm": 1.5855787651349198, | |
| "learning_rate": 9.907241562127752e-06, | |
| "loss": 0.4641, | |
| "step": 921 | |
| }, | |
| { | |
| "epoch": 0.0897323600973236, | |
| "grad_norm": 1.2401284480478103, | |
| "learning_rate": 9.906939126929998e-06, | |
| "loss": 0.246, | |
| "step": 922 | |
| }, | |
| { | |
| "epoch": 0.08982968369829683, | |
| "grad_norm": 1.503842201355298, | |
| "learning_rate": 9.906636204126685e-06, | |
| "loss": 0.4031, | |
| "step": 923 | |
| }, | |
| { | |
| "epoch": 0.08992700729927007, | |
| "grad_norm": 1.9138265658958267, | |
| "learning_rate": 9.906332793747917e-06, | |
| "loss": 0.587, | |
| "step": 924 | |
| }, | |
| { | |
| "epoch": 0.09002433090024331, | |
| "grad_norm": 1.5381184892388742, | |
| "learning_rate": 9.906028895823844e-06, | |
| "loss": 0.4119, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 0.09012165450121655, | |
| "grad_norm": 1.5769181877690257, | |
| "learning_rate": 9.905724510384664e-06, | |
| "loss": 0.4071, | |
| "step": 926 | |
| }, | |
| { | |
| "epoch": 0.09021897810218978, | |
| "grad_norm": 1.4644408625641083, | |
| "learning_rate": 9.905419637460625e-06, | |
| "loss": 0.3656, | |
| "step": 927 | |
| }, | |
| { | |
| "epoch": 0.09031630170316302, | |
| "grad_norm": 2.043739071504731, | |
| "learning_rate": 9.90511427708202e-06, | |
| "loss": 0.6317, | |
| "step": 928 | |
| }, | |
| { | |
| "epoch": 0.09041362530413626, | |
| "grad_norm": 1.8397228419915481, | |
| "learning_rate": 9.904808429279195e-06, | |
| "loss": 0.6656, | |
| "step": 929 | |
| }, | |
| { | |
| "epoch": 0.0905109489051095, | |
| "grad_norm": 1.6689588837493128, | |
| "learning_rate": 9.904502094082542e-06, | |
| "loss": 0.4603, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.09060827250608272, | |
| "grad_norm": 1.7157610479724803, | |
| "learning_rate": 9.9041952715225e-06, | |
| "loss": 0.3566, | |
| "step": 931 | |
| }, | |
| { | |
| "epoch": 0.09070559610705596, | |
| "grad_norm": 1.5797548847560638, | |
| "learning_rate": 9.90388796162956e-06, | |
| "loss": 0.527, | |
| "step": 932 | |
| }, | |
| { | |
| "epoch": 0.0908029197080292, | |
| "grad_norm": 1.3861944362556795, | |
| "learning_rate": 9.903580164434262e-06, | |
| "loss": 0.3555, | |
| "step": 933 | |
| }, | |
| { | |
| "epoch": 0.09090024330900243, | |
| "grad_norm": 1.4873043668950738, | |
| "learning_rate": 9.903271879967185e-06, | |
| "loss": 0.3606, | |
| "step": 934 | |
| }, | |
| { | |
| "epoch": 0.09099756690997567, | |
| "grad_norm": 1.5471770637050817, | |
| "learning_rate": 9.90296310825897e-06, | |
| "loss": 0.5407, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 0.0910948905109489, | |
| "grad_norm": 1.7410898214633266, | |
| "learning_rate": 9.902653849340296e-06, | |
| "loss": 0.5604, | |
| "step": 936 | |
| }, | |
| { | |
| "epoch": 0.09119221411192215, | |
| "grad_norm": 1.490257412993615, | |
| "learning_rate": 9.902344103241897e-06, | |
| "loss": 0.4293, | |
| "step": 937 | |
| }, | |
| { | |
| "epoch": 0.09128953771289537, | |
| "grad_norm": 1.3076716120407041, | |
| "learning_rate": 9.90203386999455e-06, | |
| "loss": 0.4311, | |
| "step": 938 | |
| }, | |
| { | |
| "epoch": 0.09138686131386861, | |
| "grad_norm": 1.63883307554104, | |
| "learning_rate": 9.901723149629085e-06, | |
| "loss": 0.5026, | |
| "step": 939 | |
| }, | |
| { | |
| "epoch": 0.09148418491484185, | |
| "grad_norm": 1.460694807977355, | |
| "learning_rate": 9.901411942176377e-06, | |
| "loss": 0.4449, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.09158150851581509, | |
| "grad_norm": 1.631318499416747, | |
| "learning_rate": 9.901100247667352e-06, | |
| "loss": 0.4762, | |
| "step": 941 | |
| }, | |
| { | |
| "epoch": 0.09167883211678832, | |
| "grad_norm": 1.472942456024595, | |
| "learning_rate": 9.900788066132982e-06, | |
| "loss": 0.4208, | |
| "step": 942 | |
| }, | |
| { | |
| "epoch": 0.09177615571776156, | |
| "grad_norm": 1.9471723252943203, | |
| "learning_rate": 9.900475397604292e-06, | |
| "loss": 0.4887, | |
| "step": 943 | |
| }, | |
| { | |
| "epoch": 0.0918734793187348, | |
| "grad_norm": 1.4192635165617975, | |
| "learning_rate": 9.900162242112348e-06, | |
| "loss": 0.4753, | |
| "step": 944 | |
| }, | |
| { | |
| "epoch": 0.09197080291970802, | |
| "grad_norm": 1.7864248496903834, | |
| "learning_rate": 9.89984859968827e-06, | |
| "loss": 0.6063, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 0.09206812652068126, | |
| "grad_norm": 1.402919088092856, | |
| "learning_rate": 9.899534470363225e-06, | |
| "loss": 0.3561, | |
| "step": 946 | |
| }, | |
| { | |
| "epoch": 0.0921654501216545, | |
| "grad_norm": 1.15011785152118, | |
| "learning_rate": 9.89921985416843e-06, | |
| "loss": 0.2605, | |
| "step": 947 | |
| }, | |
| { | |
| "epoch": 0.09226277372262774, | |
| "grad_norm": 1.2940536511249239, | |
| "learning_rate": 9.898904751135145e-06, | |
| "loss": 0.2503, | |
| "step": 948 | |
| }, | |
| { | |
| "epoch": 0.09236009732360097, | |
| "grad_norm": 1.5093308152075566, | |
| "learning_rate": 9.898589161294684e-06, | |
| "loss": 0.4185, | |
| "step": 949 | |
| }, | |
| { | |
| "epoch": 0.09245742092457421, | |
| "grad_norm": 1.5826010349075055, | |
| "learning_rate": 9.898273084678406e-06, | |
| "loss": 0.536, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.09255474452554745, | |
| "grad_norm": 1.5672518381317015, | |
| "learning_rate": 9.897956521317724e-06, | |
| "loss": 0.5068, | |
| "step": 951 | |
| }, | |
| { | |
| "epoch": 0.09265206812652069, | |
| "grad_norm": 1.784767292144658, | |
| "learning_rate": 9.89763947124409e-06, | |
| "loss": 0.6601, | |
| "step": 952 | |
| }, | |
| { | |
| "epoch": 0.09274939172749391, | |
| "grad_norm": 1.620681747107968, | |
| "learning_rate": 9.897321934489011e-06, | |
| "loss": 0.5402, | |
| "step": 953 | |
| }, | |
| { | |
| "epoch": 0.09284671532846715, | |
| "grad_norm": 1.7479722673062432, | |
| "learning_rate": 9.897003911084042e-06, | |
| "loss": 0.6593, | |
| "step": 954 | |
| }, | |
| { | |
| "epoch": 0.09294403892944039, | |
| "grad_norm": 1.6618363798373263, | |
| "learning_rate": 9.896685401060783e-06, | |
| "loss": 0.6086, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 0.09304136253041363, | |
| "grad_norm": 1.3782603882872615, | |
| "learning_rate": 9.896366404450888e-06, | |
| "loss": 0.3431, | |
| "step": 956 | |
| }, | |
| { | |
| "epoch": 0.09313868613138686, | |
| "grad_norm": 1.6607836446620106, | |
| "learning_rate": 9.896046921286053e-06, | |
| "loss": 0.4015, | |
| "step": 957 | |
| }, | |
| { | |
| "epoch": 0.0932360097323601, | |
| "grad_norm": 1.372535143543006, | |
| "learning_rate": 9.895726951598026e-06, | |
| "loss": 0.3627, | |
| "step": 958 | |
| }, | |
| { | |
| "epoch": 0.09333333333333334, | |
| "grad_norm": 1.965175835699204, | |
| "learning_rate": 9.895406495418602e-06, | |
| "loss": 0.434, | |
| "step": 959 | |
| }, | |
| { | |
| "epoch": 0.09343065693430656, | |
| "grad_norm": 1.6072227382486934, | |
| "learning_rate": 9.895085552779626e-06, | |
| "loss": 0.3666, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.0935279805352798, | |
| "grad_norm": 1.8680414138630521, | |
| "learning_rate": 9.894764123712991e-06, | |
| "loss": 0.6182, | |
| "step": 961 | |
| }, | |
| { | |
| "epoch": 0.09362530413625304, | |
| "grad_norm": 1.7249394724081422, | |
| "learning_rate": 9.894442208250636e-06, | |
| "loss": 0.569, | |
| "step": 962 | |
| }, | |
| { | |
| "epoch": 0.09372262773722628, | |
| "grad_norm": 1.7887658285510963, | |
| "learning_rate": 9.894119806424549e-06, | |
| "loss": 0.4825, | |
| "step": 963 | |
| }, | |
| { | |
| "epoch": 0.09381995133819951, | |
| "grad_norm": 1.4470695743772581, | |
| "learning_rate": 9.89379691826677e-06, | |
| "loss": 0.4036, | |
| "step": 964 | |
| }, | |
| { | |
| "epoch": 0.09391727493917275, | |
| "grad_norm": 1.739037372856574, | |
| "learning_rate": 9.893473543809383e-06, | |
| "loss": 0.3734, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 0.09401459854014599, | |
| "grad_norm": 1.2401623802615098, | |
| "learning_rate": 9.893149683084522e-06, | |
| "loss": 0.2892, | |
| "step": 966 | |
| }, | |
| { | |
| "epoch": 0.09411192214111923, | |
| "grad_norm": 1.632367817316159, | |
| "learning_rate": 9.892825336124369e-06, | |
| "loss": 0.3324, | |
| "step": 967 | |
| }, | |
| { | |
| "epoch": 0.09420924574209245, | |
| "grad_norm": 1.4553279790204596, | |
| "learning_rate": 9.892500502961156e-06, | |
| "loss": 0.4518, | |
| "step": 968 | |
| }, | |
| { | |
| "epoch": 0.0943065693430657, | |
| "grad_norm": 2.0184949211791867, | |
| "learning_rate": 9.892175183627161e-06, | |
| "loss": 0.496, | |
| "step": 969 | |
| }, | |
| { | |
| "epoch": 0.09440389294403893, | |
| "grad_norm": 1.3847811204395728, | |
| "learning_rate": 9.89184937815471e-06, | |
| "loss": 0.3908, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.09450121654501216, | |
| "grad_norm": 1.7325451795183482, | |
| "learning_rate": 9.89152308657618e-06, | |
| "loss": 0.5813, | |
| "step": 971 | |
| }, | |
| { | |
| "epoch": 0.0945985401459854, | |
| "grad_norm": 1.3485480854398895, | |
| "learning_rate": 9.891196308923994e-06, | |
| "loss": 0.2773, | |
| "step": 972 | |
| }, | |
| { | |
| "epoch": 0.09469586374695864, | |
| "grad_norm": 1.6137214411092917, | |
| "learning_rate": 9.890869045230625e-06, | |
| "loss": 0.573, | |
| "step": 973 | |
| }, | |
| { | |
| "epoch": 0.09479318734793188, | |
| "grad_norm": 1.8098732560393935, | |
| "learning_rate": 9.890541295528593e-06, | |
| "loss": 0.5765, | |
| "step": 974 | |
| }, | |
| { | |
| "epoch": 0.0948905109489051, | |
| "grad_norm": 1.7169741386061155, | |
| "learning_rate": 9.890213059850467e-06, | |
| "loss": 0.5463, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 0.09498783454987834, | |
| "grad_norm": 1.6226425677233698, | |
| "learning_rate": 9.889884338228861e-06, | |
| "loss": 0.459, | |
| "step": 976 | |
| }, | |
| { | |
| "epoch": 0.09508515815085158, | |
| "grad_norm": 1.5712338302132318, | |
| "learning_rate": 9.889555130696445e-06, | |
| "loss": 0.2926, | |
| "step": 977 | |
| }, | |
| { | |
| "epoch": 0.09518248175182482, | |
| "grad_norm": 2.368668096329164, | |
| "learning_rate": 9.88922543728593e-06, | |
| "loss": 0.4602, | |
| "step": 978 | |
| }, | |
| { | |
| "epoch": 0.09527980535279805, | |
| "grad_norm": 1.5481463515619227, | |
| "learning_rate": 9.888895258030077e-06, | |
| "loss": 0.382, | |
| "step": 979 | |
| }, | |
| { | |
| "epoch": 0.09537712895377129, | |
| "grad_norm": 1.5566394762827083, | |
| "learning_rate": 9.888564592961698e-06, | |
| "loss": 0.4432, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.09547445255474453, | |
| "grad_norm": 1.2929219586068095, | |
| "learning_rate": 9.888233442113651e-06, | |
| "loss": 0.2986, | |
| "step": 981 | |
| }, | |
| { | |
| "epoch": 0.09557177615571776, | |
| "grad_norm": 1.7926346211976876, | |
| "learning_rate": 9.887901805518841e-06, | |
| "loss": 0.4536, | |
| "step": 982 | |
| }, | |
| { | |
| "epoch": 0.095669099756691, | |
| "grad_norm": 1.5810862037952855, | |
| "learning_rate": 9.887569683210225e-06, | |
| "loss": 0.5143, | |
| "step": 983 | |
| }, | |
| { | |
| "epoch": 0.09576642335766423, | |
| "grad_norm": 1.486412737689962, | |
| "learning_rate": 9.887237075220805e-06, | |
| "loss": 0.4422, | |
| "step": 984 | |
| }, | |
| { | |
| "epoch": 0.09586374695863747, | |
| "grad_norm": 1.5634292890846626, | |
| "learning_rate": 9.886903981583633e-06, | |
| "loss": 0.5158, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 0.0959610705596107, | |
| "grad_norm": 1.4911106877832496, | |
| "learning_rate": 9.88657040233181e-06, | |
| "loss": 0.3584, | |
| "step": 986 | |
| }, | |
| { | |
| "epoch": 0.09605839416058394, | |
| "grad_norm": 1.8920202230134835, | |
| "learning_rate": 9.886236337498481e-06, | |
| "loss": 0.7059, | |
| "step": 987 | |
| }, | |
| { | |
| "epoch": 0.09615571776155718, | |
| "grad_norm": 1.9765830057761664, | |
| "learning_rate": 9.885901787116844e-06, | |
| "loss": 0.3363, | |
| "step": 988 | |
| }, | |
| { | |
| "epoch": 0.09625304136253042, | |
| "grad_norm": 1.7412713212065478, | |
| "learning_rate": 9.885566751220144e-06, | |
| "loss": 0.6238, | |
| "step": 989 | |
| }, | |
| { | |
| "epoch": 0.09635036496350365, | |
| "grad_norm": 1.4558500764026314, | |
| "learning_rate": 9.885231229841675e-06, | |
| "loss": 0.5033, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.09644768856447689, | |
| "grad_norm": 1.5722863237428275, | |
| "learning_rate": 9.884895223014772e-06, | |
| "loss": 0.3026, | |
| "step": 991 | |
| }, | |
| { | |
| "epoch": 0.09654501216545013, | |
| "grad_norm": 1.7850396516814273, | |
| "learning_rate": 9.88455873077283e-06, | |
| "loss": 0.6797, | |
| "step": 992 | |
| }, | |
| { | |
| "epoch": 0.09664233576642335, | |
| "grad_norm": 1.5907642595826164, | |
| "learning_rate": 9.884221753149286e-06, | |
| "loss": 0.5051, | |
| "step": 993 | |
| }, | |
| { | |
| "epoch": 0.09673965936739659, | |
| "grad_norm": 1.383326117178851, | |
| "learning_rate": 9.883884290177623e-06, | |
| "loss": 0.394, | |
| "step": 994 | |
| }, | |
| { | |
| "epoch": 0.09683698296836983, | |
| "grad_norm": 1.5330791836349085, | |
| "learning_rate": 9.883546341891375e-06, | |
| "loss": 0.4531, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 0.09693430656934307, | |
| "grad_norm": 1.3858453283442664, | |
| "learning_rate": 9.883207908324126e-06, | |
| "loss": 0.4674, | |
| "step": 996 | |
| }, | |
| { | |
| "epoch": 0.0970316301703163, | |
| "grad_norm": 1.2633519423598012, | |
| "learning_rate": 9.882868989509507e-06, | |
| "loss": 0.3053, | |
| "step": 997 | |
| }, | |
| { | |
| "epoch": 0.09712895377128954, | |
| "grad_norm": 1.5725755469000553, | |
| "learning_rate": 9.882529585481194e-06, | |
| "loss": 0.5382, | |
| "step": 998 | |
| }, | |
| { | |
| "epoch": 0.09722627737226278, | |
| "grad_norm": 1.594807816051373, | |
| "learning_rate": 9.882189696272916e-06, | |
| "loss": 0.5027, | |
| "step": 999 | |
| }, | |
| { | |
| "epoch": 0.09732360097323602, | |
| "grad_norm": 1.7855937930735857, | |
| "learning_rate": 9.881849321918446e-06, | |
| "loss": 0.6336, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.09742092457420924, | |
| "grad_norm": 1.8161736452208326, | |
| "learning_rate": 9.88150846245161e-06, | |
| "loss": 0.5432, | |
| "step": 1001 | |
| }, | |
| { | |
| "epoch": 0.09751824817518248, | |
| "grad_norm": 1.2323791206307224, | |
| "learning_rate": 9.881167117906276e-06, | |
| "loss": 0.3361, | |
| "step": 1002 | |
| }, | |
| { | |
| "epoch": 0.09761557177615572, | |
| "grad_norm": 1.6720448345305876, | |
| "learning_rate": 9.880825288316367e-06, | |
| "loss": 0.3583, | |
| "step": 1003 | |
| }, | |
| { | |
| "epoch": 0.09771289537712895, | |
| "grad_norm": 1.408364549926656, | |
| "learning_rate": 9.880482973715846e-06, | |
| "loss": 0.3847, | |
| "step": 1004 | |
| }, | |
| { | |
| "epoch": 0.09781021897810219, | |
| "grad_norm": 1.493256031544701, | |
| "learning_rate": 9.880140174138735e-06, | |
| "loss": 0.3611, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 0.09790754257907543, | |
| "grad_norm": 1.3658283125944337, | |
| "learning_rate": 9.879796889619093e-06, | |
| "loss": 0.3555, | |
| "step": 1006 | |
| }, | |
| { | |
| "epoch": 0.09800486618004867, | |
| "grad_norm": 1.7346143127846696, | |
| "learning_rate": 9.879453120191037e-06, | |
| "loss": 0.5028, | |
| "step": 1007 | |
| }, | |
| { | |
| "epoch": 0.09810218978102189, | |
| "grad_norm": 1.9094090784905724, | |
| "learning_rate": 9.879108865888724e-06, | |
| "loss": 0.4799, | |
| "step": 1008 | |
| }, | |
| { | |
| "epoch": 0.09819951338199513, | |
| "grad_norm": 1.1235415223499565, | |
| "learning_rate": 9.878764126746364e-06, | |
| "loss": 0.2181, | |
| "step": 1009 | |
| }, | |
| { | |
| "epoch": 0.09829683698296837, | |
| "grad_norm": 1.494557121918356, | |
| "learning_rate": 9.878418902798215e-06, | |
| "loss": 0.4548, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.09839416058394161, | |
| "grad_norm": 1.5340021274706077, | |
| "learning_rate": 9.87807319407858e-06, | |
| "loss": 0.4952, | |
| "step": 1011 | |
| }, | |
| { | |
| "epoch": 0.09849148418491484, | |
| "grad_norm": 1.2523545024978981, | |
| "learning_rate": 9.877727000621815e-06, | |
| "loss": 0.2887, | |
| "step": 1012 | |
| }, | |
| { | |
| "epoch": 0.09858880778588808, | |
| "grad_norm": 1.424446798325285, | |
| "learning_rate": 9.877380322462317e-06, | |
| "loss": 0.3628, | |
| "step": 1013 | |
| }, | |
| { | |
| "epoch": 0.09868613138686132, | |
| "grad_norm": 1.6382574528105933, | |
| "learning_rate": 9.877033159634542e-06, | |
| "loss": 0.5396, | |
| "step": 1014 | |
| }, | |
| { | |
| "epoch": 0.09878345498783454, | |
| "grad_norm": 1.544256440771578, | |
| "learning_rate": 9.876685512172982e-06, | |
| "loss": 0.4031, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 0.09888077858880778, | |
| "grad_norm": 1.620162733287423, | |
| "learning_rate": 9.876337380112185e-06, | |
| "loss": 0.4925, | |
| "step": 1016 | |
| }, | |
| { | |
| "epoch": 0.09897810218978102, | |
| "grad_norm": 1.6140460771461889, | |
| "learning_rate": 9.875988763486746e-06, | |
| "loss": 0.5549, | |
| "step": 1017 | |
| }, | |
| { | |
| "epoch": 0.09907542579075426, | |
| "grad_norm": 1.6187864498320685, | |
| "learning_rate": 9.875639662331307e-06, | |
| "loss": 0.5034, | |
| "step": 1018 | |
| }, | |
| { | |
| "epoch": 0.09917274939172749, | |
| "grad_norm": 1.249422512171971, | |
| "learning_rate": 9.875290076680557e-06, | |
| "loss": 0.236, | |
| "step": 1019 | |
| }, | |
| { | |
| "epoch": 0.09927007299270073, | |
| "grad_norm": 1.5835572971087337, | |
| "learning_rate": 9.874940006569236e-06, | |
| "loss": 0.5309, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.09936739659367397, | |
| "grad_norm": 0.8658795502351594, | |
| "learning_rate": 9.874589452032131e-06, | |
| "loss": 0.1911, | |
| "step": 1021 | |
| }, | |
| { | |
| "epoch": 0.09946472019464721, | |
| "grad_norm": 1.3171385587421753, | |
| "learning_rate": 9.874238413104076e-06, | |
| "loss": 0.3486, | |
| "step": 1022 | |
| }, | |
| { | |
| "epoch": 0.09956204379562043, | |
| "grad_norm": 1.4498439375980756, | |
| "learning_rate": 9.873886889819953e-06, | |
| "loss": 0.1986, | |
| "step": 1023 | |
| }, | |
| { | |
| "epoch": 0.09965936739659367, | |
| "grad_norm": 1.5991307847988792, | |
| "learning_rate": 9.873534882214692e-06, | |
| "loss": 0.6397, | |
| "step": 1024 | |
| }, | |
| { | |
| "epoch": 0.09975669099756691, | |
| "grad_norm": 1.6135151765084201, | |
| "learning_rate": 9.873182390323277e-06, | |
| "loss": 0.4338, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 0.09985401459854015, | |
| "grad_norm": 1.465261170994732, | |
| "learning_rate": 9.872829414180733e-06, | |
| "loss": 0.4692, | |
| "step": 1026 | |
| }, | |
| { | |
| "epoch": 0.09995133819951338, | |
| "grad_norm": 1.6964068418559575, | |
| "learning_rate": 9.872475953822134e-06, | |
| "loss": 0.4763, | |
| "step": 1027 | |
| }, | |
| { | |
| "epoch": 0.10004866180048662, | |
| "grad_norm": 1.5209137969308788, | |
| "learning_rate": 9.872122009282604e-06, | |
| "loss": 0.4266, | |
| "step": 1028 | |
| }, | |
| { | |
| "epoch": 0.10014598540145986, | |
| "grad_norm": 1.4495568716439686, | |
| "learning_rate": 9.871767580597316e-06, | |
| "loss": 0.4087, | |
| "step": 1029 | |
| }, | |
| { | |
| "epoch": 0.10024330900243308, | |
| "grad_norm": 1.344434785457905, | |
| "learning_rate": 9.871412667801488e-06, | |
| "loss": 0.3797, | |
| "step": 1030 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 10275, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 515, | |
| "total_flos": 131710810677248.0, | |
| "train_batch_size": 2, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |