| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.994263862332696, | |
| "eval_steps": 500, | |
| "global_step": 522, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0057361376673040155, | |
| "grad_norm": 5.963601607855415, | |
| "learning_rate": 3.773584905660378e-07, | |
| "loss": 0.8872, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.011472275334608031, | |
| "grad_norm": 6.333703884001195, | |
| "learning_rate": 7.547169811320755e-07, | |
| "loss": 0.9165, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.017208413001912046, | |
| "grad_norm": 6.257855070229476, | |
| "learning_rate": 1.1320754716981133e-06, | |
| "loss": 0.9418, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.022944550669216062, | |
| "grad_norm": 5.956524172527457, | |
| "learning_rate": 1.509433962264151e-06, | |
| "loss": 0.9044, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.028680688336520075, | |
| "grad_norm": 5.705771711698959, | |
| "learning_rate": 1.8867924528301889e-06, | |
| "loss": 0.8938, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.03441682600382409, | |
| "grad_norm": 5.8492677879174275, | |
| "learning_rate": 2.2641509433962266e-06, | |
| "loss": 0.8815, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.040152963671128104, | |
| "grad_norm": 4.727616456076574, | |
| "learning_rate": 2.6415094339622644e-06, | |
| "loss": 0.8718, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.045889101338432124, | |
| "grad_norm": 2.6576351181409055, | |
| "learning_rate": 3.018867924528302e-06, | |
| "loss": 0.8163, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.05162523900573614, | |
| "grad_norm": 2.444013780371823, | |
| "learning_rate": 3.3962264150943395e-06, | |
| "loss": 0.7805, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.05736137667304015, | |
| "grad_norm": 2.190871953398052, | |
| "learning_rate": 3.7735849056603777e-06, | |
| "loss": 0.8185, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.06309751434034416, | |
| "grad_norm": 3.5229712196521215, | |
| "learning_rate": 4.150943396226416e-06, | |
| "loss": 0.7837, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.06883365200764818, | |
| "grad_norm": 3.726026713435741, | |
| "learning_rate": 4.528301886792453e-06, | |
| "loss": 0.733, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.0745697896749522, | |
| "grad_norm": 4.108103957749213, | |
| "learning_rate": 4.905660377358491e-06, | |
| "loss": 0.7995, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.08030592734225621, | |
| "grad_norm": 3.669645529053506, | |
| "learning_rate": 5.283018867924529e-06, | |
| "loss": 0.7584, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.08604206500956023, | |
| "grad_norm": 2.952547188045258, | |
| "learning_rate": 5.660377358490566e-06, | |
| "loss": 0.7447, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.09177820267686425, | |
| "grad_norm": 2.398455043486125, | |
| "learning_rate": 6.037735849056604e-06, | |
| "loss": 0.74, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.09751434034416825, | |
| "grad_norm": 1.829688656727974, | |
| "learning_rate": 6.415094339622642e-06, | |
| "loss": 0.7052, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.10325047801147227, | |
| "grad_norm": 1.7012819538641355, | |
| "learning_rate": 6.792452830188679e-06, | |
| "loss": 0.7006, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.1089866156787763, | |
| "grad_norm": 1.5287783620949145, | |
| "learning_rate": 7.169811320754717e-06, | |
| "loss": 0.6299, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.1147227533460803, | |
| "grad_norm": 1.6890797388961225, | |
| "learning_rate": 7.5471698113207555e-06, | |
| "loss": 0.6694, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.12045889101338432, | |
| "grad_norm": 1.5848500020422387, | |
| "learning_rate": 7.924528301886793e-06, | |
| "loss": 0.6481, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.12619502868068833, | |
| "grad_norm": 1.2651922200399228, | |
| "learning_rate": 8.301886792452832e-06, | |
| "loss": 0.643, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.13193116634799235, | |
| "grad_norm": 1.1704521558620569, | |
| "learning_rate": 8.67924528301887e-06, | |
| "loss": 0.6076, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.13766730401529637, | |
| "grad_norm": 1.211023371219137, | |
| "learning_rate": 9.056603773584907e-06, | |
| "loss": 0.6383, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.14340344168260039, | |
| "grad_norm": 1.0419000696186218, | |
| "learning_rate": 9.433962264150944e-06, | |
| "loss": 0.6244, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.1491395793499044, | |
| "grad_norm": 1.0173864861869972, | |
| "learning_rate": 9.811320754716981e-06, | |
| "loss": 0.6012, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.15487571701720843, | |
| "grad_norm": 1.1096264531149542, | |
| "learning_rate": 1.018867924528302e-05, | |
| "loss": 0.6421, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.16061185468451242, | |
| "grad_norm": 0.9517151993809669, | |
| "learning_rate": 1.0566037735849058e-05, | |
| "loss": 0.6205, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.16634799235181644, | |
| "grad_norm": 0.7532883579581415, | |
| "learning_rate": 1.0943396226415095e-05, | |
| "loss": 0.6032, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.17208413001912046, | |
| "grad_norm": 0.8143750313574308, | |
| "learning_rate": 1.1320754716981132e-05, | |
| "loss": 0.6013, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.17782026768642448, | |
| "grad_norm": 0.8204477456168644, | |
| "learning_rate": 1.169811320754717e-05, | |
| "loss": 0.5721, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.1835564053537285, | |
| "grad_norm": 0.7275017869334657, | |
| "learning_rate": 1.2075471698113209e-05, | |
| "loss": 0.5571, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.18929254302103252, | |
| "grad_norm": 0.6610654194161306, | |
| "learning_rate": 1.2452830188679246e-05, | |
| "loss": 0.5833, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.1950286806883365, | |
| "grad_norm": 0.6495136985020462, | |
| "learning_rate": 1.2830188679245283e-05, | |
| "loss": 0.6104, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.20076481835564053, | |
| "grad_norm": 0.6949029178393563, | |
| "learning_rate": 1.320754716981132e-05, | |
| "loss": 0.5837, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.20650095602294455, | |
| "grad_norm": 0.6489044947838399, | |
| "learning_rate": 1.3584905660377358e-05, | |
| "loss": 0.5722, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.21223709369024857, | |
| "grad_norm": 0.5408166196721413, | |
| "learning_rate": 1.3962264150943397e-05, | |
| "loss": 0.5711, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.2179732313575526, | |
| "grad_norm": 0.7281447634302773, | |
| "learning_rate": 1.4339622641509435e-05, | |
| "loss": 0.5892, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.2237093690248566, | |
| "grad_norm": 0.6166840085962729, | |
| "learning_rate": 1.4716981132075472e-05, | |
| "loss": 0.5924, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.2294455066921606, | |
| "grad_norm": 0.6040727735751141, | |
| "learning_rate": 1.5094339622641511e-05, | |
| "loss": 0.5446, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.23518164435946462, | |
| "grad_norm": 0.6273113840367965, | |
| "learning_rate": 1.547169811320755e-05, | |
| "loss": 0.5982, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.24091778202676864, | |
| "grad_norm": 0.6363659081534939, | |
| "learning_rate": 1.5849056603773586e-05, | |
| "loss": 0.5336, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.24665391969407266, | |
| "grad_norm": 0.6615133633303231, | |
| "learning_rate": 1.6226415094339625e-05, | |
| "loss": 0.5879, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.25239005736137665, | |
| "grad_norm": 0.8414155664286366, | |
| "learning_rate": 1.6603773584905664e-05, | |
| "loss": 0.5854, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.25812619502868067, | |
| "grad_norm": 0.6770822965707434, | |
| "learning_rate": 1.69811320754717e-05, | |
| "loss": 0.5684, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.2638623326959847, | |
| "grad_norm": 0.562245948225568, | |
| "learning_rate": 1.735849056603774e-05, | |
| "loss": 0.5275, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.2695984703632887, | |
| "grad_norm": 0.7981141857389045, | |
| "learning_rate": 1.7735849056603774e-05, | |
| "loss": 0.5689, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.27533460803059273, | |
| "grad_norm": 0.5837129815497581, | |
| "learning_rate": 1.8113207547169813e-05, | |
| "loss": 0.5544, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.28107074569789675, | |
| "grad_norm": 0.6069344726727888, | |
| "learning_rate": 1.8490566037735852e-05, | |
| "loss": 0.5788, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.28680688336520077, | |
| "grad_norm": 0.5964572258571391, | |
| "learning_rate": 1.8867924528301888e-05, | |
| "loss": 0.5352, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.2925430210325048, | |
| "grad_norm": 4.690516652663125, | |
| "learning_rate": 1.9245283018867927e-05, | |
| "loss": 0.5248, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.2982791586998088, | |
| "grad_norm": 0.8872735078180753, | |
| "learning_rate": 1.9622641509433963e-05, | |
| "loss": 0.5665, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.30401529636711283, | |
| "grad_norm": 0.5723834462499225, | |
| "learning_rate": 2e-05, | |
| "loss": 0.5539, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.30975143403441685, | |
| "grad_norm": 0.6524504508920996, | |
| "learning_rate": 1.999977565187699e-05, | |
| "loss": 0.5232, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.3154875717017208, | |
| "grad_norm": 0.6468217495098721, | |
| "learning_rate": 1.9999102617574366e-05, | |
| "loss": 0.5387, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.32122370936902483, | |
| "grad_norm": 0.6744222328381954, | |
| "learning_rate": 1.9997980927290928e-05, | |
| "loss": 0.5461, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.32695984703632885, | |
| "grad_norm": 0.7945647096558187, | |
| "learning_rate": 1.9996410631356496e-05, | |
| "loss": 0.5501, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.3326959847036329, | |
| "grad_norm": 0.583537979504518, | |
| "learning_rate": 1.9994391800229666e-05, | |
| "loss": 0.5475, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.3384321223709369, | |
| "grad_norm": 0.7688713046343357, | |
| "learning_rate": 1.999192452449463e-05, | |
| "loss": 0.5341, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.3441682600382409, | |
| "grad_norm": 0.6225461236118923, | |
| "learning_rate": 1.9989008914857115e-05, | |
| "loss": 0.5215, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.34990439770554493, | |
| "grad_norm": 0.7556986010825483, | |
| "learning_rate": 1.998564510213944e-05, | |
| "loss": 0.5485, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.35564053537284895, | |
| "grad_norm": 0.6466498188523241, | |
| "learning_rate": 1.998183323727462e-05, | |
| "loss": 0.5472, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.361376673040153, | |
| "grad_norm": 0.6498839386655383, | |
| "learning_rate": 1.9977573491299597e-05, | |
| "loss": 0.5458, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.367112810707457, | |
| "grad_norm": 0.7893514486845328, | |
| "learning_rate": 1.9972866055347572e-05, | |
| "loss": 0.5268, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.372848948374761, | |
| "grad_norm": 0.7956885248224083, | |
| "learning_rate": 1.996771114063943e-05, | |
| "loss": 0.5388, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.37858508604206503, | |
| "grad_norm": 0.8314332190289494, | |
| "learning_rate": 1.9962108978474265e-05, | |
| "loss": 0.5431, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.384321223709369, | |
| "grad_norm": 0.8426823703479597, | |
| "learning_rate": 1.9956059820218982e-05, | |
| "loss": 0.5151, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.390057361376673, | |
| "grad_norm": 0.7757061162171306, | |
| "learning_rate": 1.9949563937297045e-05, | |
| "loss": 0.5268, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.39579349904397704, | |
| "grad_norm": 2.4653347776714294, | |
| "learning_rate": 1.9942621621176283e-05, | |
| "loss": 0.5349, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.40152963671128106, | |
| "grad_norm": 0.8709833395015452, | |
| "learning_rate": 1.993523318335581e-05, | |
| "loss": 0.4825, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.4072657743785851, | |
| "grad_norm": 9.961649749835258, | |
| "learning_rate": 1.9927398955352062e-05, | |
| "loss": 0.5363, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.4130019120458891, | |
| "grad_norm": 0.9561541581658524, | |
| "learning_rate": 1.9919119288683908e-05, | |
| "loss": 0.504, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.4187380497131931, | |
| "grad_norm": 0.680622575434205, | |
| "learning_rate": 1.991039455485688e-05, | |
| "loss": 0.5364, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.42447418738049714, | |
| "grad_norm": 0.8693452396686029, | |
| "learning_rate": 1.990122514534651e-05, | |
| "loss": 0.5331, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.43021032504780116, | |
| "grad_norm": 0.7743910198858713, | |
| "learning_rate": 1.9891611471580767e-05, | |
| "loss": 0.5496, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.4359464627151052, | |
| "grad_norm": 0.8765835472970227, | |
| "learning_rate": 1.9881553964921574e-05, | |
| "loss": 0.5313, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.4416826003824092, | |
| "grad_norm": 0.7689840181875675, | |
| "learning_rate": 1.987105307664549e-05, | |
| "loss": 0.544, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.4474187380497132, | |
| "grad_norm": 0.6520437505199477, | |
| "learning_rate": 1.9860109277923417e-05, | |
| "loss": 0.5314, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.45315487571701724, | |
| "grad_norm": 0.8673404414655712, | |
| "learning_rate": 1.9848723059799508e-05, | |
| "loss": 0.5429, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.4588910133843212, | |
| "grad_norm": 0.6109886586522215, | |
| "learning_rate": 1.983689493316909e-05, | |
| "loss": 0.5223, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.4646271510516252, | |
| "grad_norm": 0.7103480782598592, | |
| "learning_rate": 1.982462542875576e-05, | |
| "loss": 0.5277, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.47036328871892924, | |
| "grad_norm": 0.6705871728122585, | |
| "learning_rate": 1.9811915097087587e-05, | |
| "loss": 0.5496, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.47609942638623326, | |
| "grad_norm": 0.7785921338601758, | |
| "learning_rate": 1.9798764508472373e-05, | |
| "loss": 0.5234, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.4818355640535373, | |
| "grad_norm": 0.7439955422623991, | |
| "learning_rate": 1.9785174252972092e-05, | |
| "loss": 0.5361, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.4875717017208413, | |
| "grad_norm": 0.797568241880013, | |
| "learning_rate": 1.9771144940376413e-05, | |
| "loss": 0.5214, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.4933078393881453, | |
| "grad_norm": 0.6230345386937433, | |
| "learning_rate": 1.9756677200175316e-05, | |
| "loss": 0.5261, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.49904397705544934, | |
| "grad_norm": 0.7261095628950723, | |
| "learning_rate": 1.974177168153088e-05, | |
| "loss": 0.5485, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.5047801147227533, | |
| "grad_norm": 0.5411293935713085, | |
| "learning_rate": 1.972642905324813e-05, | |
| "loss": 0.5281, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.5105162523900574, | |
| "grad_norm": 0.6298816468045366, | |
| "learning_rate": 1.971065000374504e-05, | |
| "loss": 0.5372, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.5162523900573613, | |
| "grad_norm": 0.4908410042540129, | |
| "learning_rate": 1.969443524102163e-05, | |
| "loss": 0.5429, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.5219885277246654, | |
| "grad_norm": 0.6557029434687354, | |
| "learning_rate": 1.967778549262822e-05, | |
| "loss": 0.497, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.5277246653919694, | |
| "grad_norm": 0.5620880029295425, | |
| "learning_rate": 1.9660701505632773e-05, | |
| "loss": 0.514, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.5334608030592735, | |
| "grad_norm": 0.6884951285963853, | |
| "learning_rate": 1.9643184046587373e-05, | |
| "loss": 0.5239, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.5391969407265774, | |
| "grad_norm": 0.6604434604547247, | |
| "learning_rate": 1.962523390149382e-05, | |
| "loss": 0.5207, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.5449330783938815, | |
| "grad_norm": 0.7336652529710372, | |
| "learning_rate": 1.9606851875768404e-05, | |
| "loss": 0.5331, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.5506692160611855, | |
| "grad_norm": 0.6669732222358924, | |
| "learning_rate": 1.9588038794205705e-05, | |
| "loss": 0.5323, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.5564053537284895, | |
| "grad_norm": 0.6137757201982459, | |
| "learning_rate": 1.9568795500941635e-05, | |
| "loss": 0.5023, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.5621414913957935, | |
| "grad_norm": 0.6272322569320127, | |
| "learning_rate": 1.9549122859415538e-05, | |
| "loss": 0.5576, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.5678776290630975, | |
| "grad_norm": 0.5620575180386782, | |
| "learning_rate": 1.9529021752331455e-05, | |
| "loss": 0.5186, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.5736137667304015, | |
| "grad_norm": 0.5651887008267796, | |
| "learning_rate": 1.9508493081618515e-05, | |
| "loss": 0.5105, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.5793499043977055, | |
| "grad_norm": 0.5618076420744421, | |
| "learning_rate": 1.9487537768390465e-05, | |
| "loss": 0.5406, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.5850860420650096, | |
| "grad_norm": 0.7480174415686112, | |
| "learning_rate": 1.9466156752904344e-05, | |
| "loss": 0.5351, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.5908221797323135, | |
| "grad_norm": 0.507106651617815, | |
| "learning_rate": 1.944435099451829e-05, | |
| "loss": 0.5266, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.5965583173996176, | |
| "grad_norm": 0.7081327554157436, | |
| "learning_rate": 1.94221214716485e-05, | |
| "loss": 0.5181, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.6022944550669216, | |
| "grad_norm": 0.5232017157016485, | |
| "learning_rate": 1.939946918172531e-05, | |
| "loss": 0.5114, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.6080305927342257, | |
| "grad_norm": 0.5400711534025171, | |
| "learning_rate": 1.9376395141148475e-05, | |
| "loss": 0.538, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.6137667304015296, | |
| "grad_norm": 0.5469717806295151, | |
| "learning_rate": 1.9352900385241534e-05, | |
| "loss": 0.5237, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.6195028680688337, | |
| "grad_norm": 0.44841617252688876, | |
| "learning_rate": 1.932898596820536e-05, | |
| "loss": 0.5315, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.6252390057361377, | |
| "grad_norm": 0.49862802027395625, | |
| "learning_rate": 1.9304652963070868e-05, | |
| "loss": 0.4925, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.6309751434034416, | |
| "grad_norm": 0.5410083146463058, | |
| "learning_rate": 1.9279902461650866e-05, | |
| "loss": 0.5355, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.6367112810707457, | |
| "grad_norm": 0.529469928044582, | |
| "learning_rate": 1.925473557449106e-05, | |
| "loss": 0.5368, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.6424474187380497, | |
| "grad_norm": 0.5510618931046878, | |
| "learning_rate": 1.9229153430820232e-05, | |
| "loss": 0.5431, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.6481835564053537, | |
| "grad_norm": 0.5320575572101697, | |
| "learning_rate": 1.920315717849956e-05, | |
| "loss": 0.5043, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.6539196940726577, | |
| "grad_norm": 0.573924225545293, | |
| "learning_rate": 1.917674798397113e-05, | |
| "loss": 0.5187, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.6596558317399618, | |
| "grad_norm": 0.5581615340473121, | |
| "learning_rate": 1.914992703220559e-05, | |
| "loss": 0.5333, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.6653919694072657, | |
| "grad_norm": 0.5234992447081256, | |
| "learning_rate": 1.9122695526648968e-05, | |
| "loss": 0.4942, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.6711281070745698, | |
| "grad_norm": 0.5885953422973085, | |
| "learning_rate": 1.9095054689168707e-05, | |
| "loss": 0.5195, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.6768642447418738, | |
| "grad_norm": 0.5297830313866135, | |
| "learning_rate": 1.9067005759998797e-05, | |
| "loss": 0.5423, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.6826003824091779, | |
| "grad_norm": 0.5428666103723754, | |
| "learning_rate": 1.903854999768417e-05, | |
| "loss": 0.5347, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.6883365200764818, | |
| "grad_norm": 0.6320535906206065, | |
| "learning_rate": 1.900968867902419e-05, | |
| "loss": 0.5044, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.6940726577437859, | |
| "grad_norm": 0.5752079115380615, | |
| "learning_rate": 1.8980423099015402e-05, | |
| "loss": 0.5411, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.6998087954110899, | |
| "grad_norm": 0.5816603517245035, | |
| "learning_rate": 1.8950754570793384e-05, | |
| "loss": 0.4981, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.7055449330783938, | |
| "grad_norm": 0.5761254539263562, | |
| "learning_rate": 1.8920684425573865e-05, | |
| "loss": 0.5067, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.7112810707456979, | |
| "grad_norm": 0.5639138995670413, | |
| "learning_rate": 1.8890214012592977e-05, | |
| "loss": 0.5475, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.7170172084130019, | |
| "grad_norm": 0.5486937201510219, | |
| "learning_rate": 1.88593446990467e-05, | |
| "loss": 0.5377, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.722753346080306, | |
| "grad_norm": 0.5333390387558036, | |
| "learning_rate": 1.8828077870029554e-05, | |
| "loss": 0.5067, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.7284894837476099, | |
| "grad_norm": 0.5407197244868929, | |
| "learning_rate": 1.8796414928472417e-05, | |
| "loss": 0.5061, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.734225621414914, | |
| "grad_norm": 0.5094464024381937, | |
| "learning_rate": 1.876435729507959e-05, | |
| "loss": 0.5222, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.739961759082218, | |
| "grad_norm": 0.5448472035851418, | |
| "learning_rate": 1.873190640826505e-05, | |
| "loss": 0.5283, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.745697896749522, | |
| "grad_norm": 0.5221064620617264, | |
| "learning_rate": 1.8699063724087905e-05, | |
| "loss": 0.533, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.751434034416826, | |
| "grad_norm": 0.594443241675541, | |
| "learning_rate": 1.8665830716187064e-05, | |
| "loss": 0.4986, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.7571701720841301, | |
| "grad_norm": 0.5508840932918168, | |
| "learning_rate": 1.8632208875715122e-05, | |
| "loss": 0.5052, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.762906309751434, | |
| "grad_norm": 0.6745209149751551, | |
| "learning_rate": 1.8598199711271433e-05, | |
| "loss": 0.4989, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.768642447418738, | |
| "grad_norm": 0.5833244098215166, | |
| "learning_rate": 1.856380474883444e-05, | |
| "loss": 0.4998, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.7743785850860421, | |
| "grad_norm": 0.6164702485478607, | |
| "learning_rate": 1.85290255316932e-05, | |
| "loss": 0.496, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.780114722753346, | |
| "grad_norm": 0.6451445851922506, | |
| "learning_rate": 1.8493863620378123e-05, | |
| "loss": 0.5092, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.7858508604206501, | |
| "grad_norm": 0.49991490314972736, | |
| "learning_rate": 1.8458320592590976e-05, | |
| "loss": 0.4953, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.7915869980879541, | |
| "grad_norm": 0.656124424238937, | |
| "learning_rate": 1.8422398043134068e-05, | |
| "loss": 0.5071, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.7973231357552581, | |
| "grad_norm": 0.6566346424212979, | |
| "learning_rate": 1.8386097583838714e-05, | |
| "loss": 0.5308, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.8030592734225621, | |
| "grad_norm": 0.6522233157913299, | |
| "learning_rate": 1.834942084349289e-05, | |
| "loss": 0.5547, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.8087954110898662, | |
| "grad_norm": 0.6353529825016769, | |
| "learning_rate": 1.8312369467768168e-05, | |
| "loss": 0.5046, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.8145315487571702, | |
| "grad_norm": 0.6249530767355247, | |
| "learning_rate": 1.827494511914587e-05, | |
| "loss": 0.5352, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.8202676864244742, | |
| "grad_norm": 0.6385861110529865, | |
| "learning_rate": 1.8237149476842472e-05, | |
| "loss": 0.5029, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.8260038240917782, | |
| "grad_norm": 0.5607581636914353, | |
| "learning_rate": 1.8198984236734246e-05, | |
| "loss": 0.5287, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.8317399617590823, | |
| "grad_norm": 0.6261652427463332, | |
| "learning_rate": 1.8160451111281202e-05, | |
| "loss": 0.5114, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.8374760994263862, | |
| "grad_norm": 0.6528446431728675, | |
| "learning_rate": 1.81215518294502e-05, | |
| "loss": 0.5188, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.8432122370936902, | |
| "grad_norm": 0.5907898971360636, | |
| "learning_rate": 1.808228813663742e-05, | |
| "loss": 0.5028, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.8489483747609943, | |
| "grad_norm": 0.6694368765652683, | |
| "learning_rate": 1.8042661794590023e-05, | |
| "loss": 0.5054, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.8546845124282982, | |
| "grad_norm": 0.596432065572989, | |
| "learning_rate": 1.8002674581327096e-05, | |
| "loss": 0.49, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.8604206500956023, | |
| "grad_norm": 0.666622846786942, | |
| "learning_rate": 1.7962328291059886e-05, | |
| "loss": 0.5096, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.8661567877629063, | |
| "grad_norm": 0.673682475957719, | |
| "learning_rate": 1.7921624734111292e-05, | |
| "loss": 0.5218, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.8718929254302104, | |
| "grad_norm": 0.5462997037842608, | |
| "learning_rate": 1.7880565736834642e-05, | |
| "loss": 0.4693, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.8776290630975143, | |
| "grad_norm": 0.5908658328573468, | |
| "learning_rate": 1.783915314153172e-05, | |
| "loss": 0.5269, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.8833652007648184, | |
| "grad_norm": 0.6605932505871055, | |
| "learning_rate": 1.7797388806370132e-05, | |
| "loss": 0.4939, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.8891013384321224, | |
| "grad_norm": 0.586405871875466, | |
| "learning_rate": 1.7755274605299924e-05, | |
| "loss": 0.512, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.8948374760994264, | |
| "grad_norm": 0.6667799308198323, | |
| "learning_rate": 1.7712812427969485e-05, | |
| "loss": 0.5177, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.9005736137667304, | |
| "grad_norm": 0.5579157809901777, | |
| "learning_rate": 1.7670004179640773e-05, | |
| "loss": 0.4961, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.9063097514340345, | |
| "grad_norm": 0.6568561151022876, | |
| "learning_rate": 1.762685178110382e-05, | |
| "loss": 0.5299, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.9120458891013384, | |
| "grad_norm": 0.5190724568456453, | |
| "learning_rate": 1.7583357168590552e-05, | |
| "loss": 0.496, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.9177820267686424, | |
| "grad_norm": 0.6188233552623875, | |
| "learning_rate": 1.75395222936879e-05, | |
| "loss": 0.5033, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.9235181644359465, | |
| "grad_norm": 0.48950716483283546, | |
| "learning_rate": 1.7495349123250242e-05, | |
| "loss": 0.5201, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.9292543021032504, | |
| "grad_norm": 0.6250127202556996, | |
| "learning_rate": 1.7450839639311164e-05, | |
| "loss": 0.4674, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.9349904397705545, | |
| "grad_norm": 0.5109679077677434, | |
| "learning_rate": 1.7405995838994495e-05, | |
| "loss": 0.5182, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.9407265774378585, | |
| "grad_norm": 0.5858835531348051, | |
| "learning_rate": 1.7360819734424718e-05, | |
| "loss": 0.518, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.9464627151051626, | |
| "grad_norm": 0.5788419193604027, | |
| "learning_rate": 1.731531335263669e-05, | |
| "loss": 0.5223, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.9521988527724665, | |
| "grad_norm": 0.5789436218944392, | |
| "learning_rate": 1.7269478735484682e-05, | |
| "loss": 0.4855, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.9579349904397706, | |
| "grad_norm": 0.5208960460250472, | |
| "learning_rate": 1.7223317939550756e-05, | |
| "loss": 0.4881, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.9636711281070746, | |
| "grad_norm": 0.5099943351497956, | |
| "learning_rate": 1.7176833036052495e-05, | |
| "loss": 0.4835, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.9694072657743786, | |
| "grad_norm": 0.5897069652807404, | |
| "learning_rate": 1.713002611075007e-05, | |
| "loss": 0.507, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.9751434034416826, | |
| "grad_norm": 0.564216052819639, | |
| "learning_rate": 1.708289926385265e-05, | |
| "loss": 0.4979, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.9808795411089866, | |
| "grad_norm": 0.5326107874470021, | |
| "learning_rate": 1.703545460992416e-05, | |
| "loss": 0.5437, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.9866156787762906, | |
| "grad_norm": 0.5578118857749461, | |
| "learning_rate": 1.698769427778842e-05, | |
| "loss": 0.497, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.9923518164435946, | |
| "grad_norm": 0.5684954816297212, | |
| "learning_rate": 1.693962041043359e-05, | |
| "loss": 0.5283, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.9980879541108987, | |
| "grad_norm": 0.5422375671625977, | |
| "learning_rate": 1.6891235164916066e-05, | |
| "loss": 0.5188, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 1.0038240917782026, | |
| "grad_norm": 0.8788446159577279, | |
| "learning_rate": 1.684254071226364e-05, | |
| "loss": 0.726, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 1.0095602294455066, | |
| "grad_norm": 0.6280093992507375, | |
| "learning_rate": 1.679353923737813e-05, | |
| "loss": 0.4404, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 1.0152963671128108, | |
| "grad_norm": 0.6259953429427509, | |
| "learning_rate": 1.674423293893731e-05, | |
| "loss": 0.4864, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 1.0210325047801148, | |
| "grad_norm": 0.5979650245228064, | |
| "learning_rate": 1.6694624029296288e-05, | |
| "loss": 0.4239, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 1.0267686424474187, | |
| "grad_norm": 0.6847243364062234, | |
| "learning_rate": 1.664471473438822e-05, | |
| "loss": 0.419, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 1.0325047801147227, | |
| "grad_norm": 0.5744272580087806, | |
| "learning_rate": 1.6594507293624426e-05, | |
| "loss": 0.4517, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 1.0382409177820269, | |
| "grad_norm": 0.5902807866572417, | |
| "learning_rate": 1.6544003959793925e-05, | |
| "loss": 0.3741, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 1.0439770554493308, | |
| "grad_norm": 0.6855325601836614, | |
| "learning_rate": 1.6493206998962353e-05, | |
| "loss": 0.4579, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 1.0497131931166348, | |
| "grad_norm": 0.5940167639272883, | |
| "learning_rate": 1.6442118690370272e-05, | |
| "loss": 0.4572, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 1.0554493307839388, | |
| "grad_norm": 0.5956164862309621, | |
| "learning_rate": 1.639074132633091e-05, | |
| "loss": 0.4421, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 1.0611854684512427, | |
| "grad_norm": 0.610351381314269, | |
| "learning_rate": 1.6339077212127294e-05, | |
| "loss": 0.4006, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 1.066921606118547, | |
| "grad_norm": 0.6717976632223687, | |
| "learning_rate": 1.628712866590885e-05, | |
| "loss": 0.4775, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 1.0726577437858509, | |
| "grad_norm": 0.6986452772586962, | |
| "learning_rate": 1.6234898018587336e-05, | |
| "loss": 0.4448, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 1.0783938814531548, | |
| "grad_norm": 0.6297000888622394, | |
| "learning_rate": 1.6182387613732295e-05, | |
| "loss": 0.4897, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 1.0841300191204588, | |
| "grad_norm": 0.4850479578653087, | |
| "learning_rate": 1.6129599807465873e-05, | |
| "loss": 0.3727, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 1.089866156787763, | |
| "grad_norm": 0.6942735667602296, | |
| "learning_rate": 1.607653696835713e-05, | |
| "loss": 0.5011, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 1.095602294455067, | |
| "grad_norm": 0.546352054633847, | |
| "learning_rate": 1.602320147731573e-05, | |
| "loss": 0.4083, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 1.101338432122371, | |
| "grad_norm": 0.497459437509141, | |
| "learning_rate": 1.596959572748514e-05, | |
| "loss": 0.4053, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 1.107074569789675, | |
| "grad_norm": 0.5915697095267101, | |
| "learning_rate": 1.5915722124135227e-05, | |
| "loss": 0.4421, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 1.1128107074569789, | |
| "grad_norm": 0.4822188270663418, | |
| "learning_rate": 1.586158308455435e-05, | |
| "loss": 0.4458, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 1.118546845124283, | |
| "grad_norm": 0.586482650803536, | |
| "learning_rate": 1.580718103794089e-05, | |
| "loss": 0.4402, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 1.124282982791587, | |
| "grad_norm": 0.5577297526984457, | |
| "learning_rate": 1.5752518425294258e-05, | |
| "loss": 0.3781, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 1.130019120458891, | |
| "grad_norm": 0.5870354491149415, | |
| "learning_rate": 1.569759769930537e-05, | |
| "loss": 0.5061, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 1.135755258126195, | |
| "grad_norm": 0.6371195649882143, | |
| "learning_rate": 1.564242132424657e-05, | |
| "loss": 0.4414, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 1.1414913957934991, | |
| "grad_norm": 0.4589988573509597, | |
| "learning_rate": 1.5586991775861103e-05, | |
| "loss": 0.3934, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 1.147227533460803, | |
| "grad_norm": 0.5493331894713434, | |
| "learning_rate": 1.5531311541251995e-05, | |
| "loss": 0.4114, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.152963671128107, | |
| "grad_norm": 0.5138358962197389, | |
| "learning_rate": 1.5475383118770473e-05, | |
| "loss": 0.4159, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 1.158699808795411, | |
| "grad_norm": 0.6607382125199128, | |
| "learning_rate": 1.5419209017903855e-05, | |
| "loss": 0.4859, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 1.1644359464627152, | |
| "grad_norm": 0.476778446402774, | |
| "learning_rate": 1.536279175916296e-05, | |
| "loss": 0.402, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 1.1701720841300192, | |
| "grad_norm": 0.662594438635697, | |
| "learning_rate": 1.5306133873969008e-05, | |
| "loss": 0.4958, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 1.1759082217973231, | |
| "grad_norm": 0.49342011546672193, | |
| "learning_rate": 1.5249237904540041e-05, | |
| "loss": 0.4438, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 1.181644359464627, | |
| "grad_norm": 0.56704289045039, | |
| "learning_rate": 1.519210640377685e-05, | |
| "loss": 0.4239, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 1.1873804971319313, | |
| "grad_norm": 0.5047410935107941, | |
| "learning_rate": 1.513474193514842e-05, | |
| "loss": 0.4288, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 1.1931166347992352, | |
| "grad_norm": 0.5926076517541518, | |
| "learning_rate": 1.5077147072576932e-05, | |
| "loss": 0.4277, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 1.1988527724665392, | |
| "grad_norm": 0.49311177861969074, | |
| "learning_rate": 1.5019324400322244e-05, | |
| "loss": 0.4359, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 1.2045889101338432, | |
| "grad_norm": 0.5662399638263612, | |
| "learning_rate": 1.4961276512865954e-05, | |
| "loss": 0.4269, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 1.2103250478011471, | |
| "grad_norm": 0.4759114943666039, | |
| "learning_rate": 1.4903006014794983e-05, | |
| "loss": 0.4509, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 1.2160611854684513, | |
| "grad_norm": 0.6159634050140991, | |
| "learning_rate": 1.4844515520684703e-05, | |
| "loss": 0.443, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 1.2217973231357553, | |
| "grad_norm": 0.5145704198069356, | |
| "learning_rate": 1.4785807654981627e-05, | |
| "loss": 0.4079, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 1.2275334608030593, | |
| "grad_norm": 0.5006839310785222, | |
| "learning_rate": 1.4726885051885654e-05, | |
| "loss": 0.4102, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 1.2332695984703632, | |
| "grad_norm": 0.6434261075702461, | |
| "learning_rate": 1.4667750355231863e-05, | |
| "loss": 0.4577, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 1.2390057361376674, | |
| "grad_norm": 0.49583426427426724, | |
| "learning_rate": 1.4608406218371894e-05, | |
| "loss": 0.4538, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 1.2447418738049714, | |
| "grad_norm": 0.5367458222855271, | |
| "learning_rate": 1.4548855304054888e-05, | |
| "loss": 0.3844, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 1.2504780114722753, | |
| "grad_norm": 0.5081216759568395, | |
| "learning_rate": 1.4489100284308018e-05, | |
| "loss": 0.3947, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 1.2562141491395793, | |
| "grad_norm": 0.585650284147855, | |
| "learning_rate": 1.4429143840316586e-05, | |
| "loss": 0.4645, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 1.2619502868068833, | |
| "grad_norm": 0.5171763518723317, | |
| "learning_rate": 1.4368988662303733e-05, | |
| "loss": 0.452, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 1.2676864244741874, | |
| "grad_norm": 0.48039143238596627, | |
| "learning_rate": 1.4308637449409705e-05, | |
| "loss": 0.441, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 1.2734225621414914, | |
| "grad_norm": 0.5684128265489994, | |
| "learning_rate": 1.4248092909570774e-05, | |
| "loss": 0.457, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 1.2791586998087954, | |
| "grad_norm": 0.6202786095376327, | |
| "learning_rate": 1.4187357759397716e-05, | |
| "loss": 0.474, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 1.2848948374760996, | |
| "grad_norm": 0.5132214354576209, | |
| "learning_rate": 1.4126434724053915e-05, | |
| "loss": 0.4418, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 1.2906309751434035, | |
| "grad_norm": 0.47833838706648946, | |
| "learning_rate": 1.4065326537133094e-05, | |
| "loss": 0.4143, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 1.2963671128107075, | |
| "grad_norm": 0.5126041137931057, | |
| "learning_rate": 1.4004035940536671e-05, | |
| "loss": 0.4301, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 1.3021032504780115, | |
| "grad_norm": 0.4906072135255895, | |
| "learning_rate": 1.39425656843507e-05, | |
| "loss": 0.439, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 1.3078393881453154, | |
| "grad_norm": 0.5366969388908639, | |
| "learning_rate": 1.3880918526722497e-05, | |
| "loss": 0.4653, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 1.3135755258126194, | |
| "grad_norm": 0.5481791341858512, | |
| "learning_rate": 1.3819097233736888e-05, | |
| "loss": 0.4895, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 1.3193116634799236, | |
| "grad_norm": 0.46387604676620864, | |
| "learning_rate": 1.3757104579292082e-05, | |
| "loss": 0.407, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 1.3250478011472275, | |
| "grad_norm": 0.47558528540904305, | |
| "learning_rate": 1.3694943344975214e-05, | |
| "loss": 0.3897, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 1.3307839388145315, | |
| "grad_norm": 0.4776725124785085, | |
| "learning_rate": 1.3632616319937522e-05, | |
| "loss": 0.4569, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 1.3365200764818357, | |
| "grad_norm": 0.5538110118526167, | |
| "learning_rate": 1.3570126300769233e-05, | |
| "loss": 0.4563, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 1.3422562141491396, | |
| "grad_norm": 0.463107688708888, | |
| "learning_rate": 1.3507476091374042e-05, | |
| "loss": 0.454, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 1.3479923518164436, | |
| "grad_norm": 0.5020783938759849, | |
| "learning_rate": 1.344466850284333e-05, | |
| "loss": 0.4314, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 1.3537284894837476, | |
| "grad_norm": 0.6036785088586939, | |
| "learning_rate": 1.3381706353330015e-05, | |
| "loss": 0.4783, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 1.3594646271510515, | |
| "grad_norm": 0.45597006215248087, | |
| "learning_rate": 1.331859246792211e-05, | |
| "loss": 0.3658, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 1.3652007648183555, | |
| "grad_norm": 0.6491828764472608, | |
| "learning_rate": 1.3255329678515959e-05, | |
| "loss": 0.456, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 1.3709369024856597, | |
| "grad_norm": 0.5148839976999712, | |
| "learning_rate": 1.3191920823689178e-05, | |
| "loss": 0.4056, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 1.3766730401529637, | |
| "grad_norm": 0.5798134301190937, | |
| "learning_rate": 1.3128368748573272e-05, | |
| "loss": 0.4938, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.3824091778202676, | |
| "grad_norm": 0.5118762044788047, | |
| "learning_rate": 1.3064676304726001e-05, | |
| "loss": 0.4354, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 1.3881453154875718, | |
| "grad_norm": 0.5579407020665195, | |
| "learning_rate": 1.300084635000341e-05, | |
| "loss": 0.4009, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 1.3938814531548758, | |
| "grad_norm": 0.47914049556136357, | |
| "learning_rate": 1.2936881748431601e-05, | |
| "loss": 0.4139, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 1.3996175908221797, | |
| "grad_norm": 0.6781741684919476, | |
| "learning_rate": 1.287278537007824e-05, | |
| "loss": 0.4831, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 1.4053537284894837, | |
| "grad_norm": 0.47111455679765774, | |
| "learning_rate": 1.280856009092376e-05, | |
| "loss": 0.4599, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 1.4110898661567877, | |
| "grad_norm": 0.46738967278396626, | |
| "learning_rate": 1.2744208792732324e-05, | |
| "loss": 0.4387, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 1.4168260038240919, | |
| "grad_norm": 0.5304710050520607, | |
| "learning_rate": 1.267973436292253e-05, | |
| "loss": 0.4513, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 1.4225621414913958, | |
| "grad_norm": 0.43485659330912546, | |
| "learning_rate": 1.2615139694437835e-05, | |
| "loss": 0.4107, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 1.4282982791586998, | |
| "grad_norm": 0.4874553497224479, | |
| "learning_rate": 1.2550427685616767e-05, | |
| "loss": 0.4504, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 1.4340344168260037, | |
| "grad_norm": 0.47534151482068415, | |
| "learning_rate": 1.2485601240062868e-05, | |
| "loss": 0.4142, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.439770554493308, | |
| "grad_norm": 0.49136255110854465, | |
| "learning_rate": 1.2420663266514419e-05, | |
| "loss": 0.4523, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 1.445506692160612, | |
| "grad_norm": 0.4859920796473984, | |
| "learning_rate": 1.2355616678713909e-05, | |
| "loss": 0.4543, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 1.4512428298279159, | |
| "grad_norm": 0.4106107196876608, | |
| "learning_rate": 1.229046439527732e-05, | |
| "loss": 0.3931, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 1.4569789674952198, | |
| "grad_norm": 0.4971544355198163, | |
| "learning_rate": 1.2225209339563144e-05, | |
| "loss": 0.44, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 1.4627151051625238, | |
| "grad_norm": 0.4521844159684417, | |
| "learning_rate": 1.2159854439541245e-05, | |
| "loss": 0.4551, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 1.468451242829828, | |
| "grad_norm": 0.45723395845362985, | |
| "learning_rate": 1.2094402627661447e-05, | |
| "loss": 0.443, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 1.474187380497132, | |
| "grad_norm": 0.4826661110709425, | |
| "learning_rate": 1.2028856840721975e-05, | |
| "loss": 0.4656, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 1.479923518164436, | |
| "grad_norm": 0.39565718965895136, | |
| "learning_rate": 1.1963220019737691e-05, | |
| "loss": 0.4234, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 1.48565965583174, | |
| "grad_norm": 0.47431335698220256, | |
| "learning_rate": 1.1897495109808108e-05, | |
| "loss": 0.4543, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 1.491395793499044, | |
| "grad_norm": 0.4507720979158589, | |
| "learning_rate": 1.1831685059985263e-05, | |
| "loss": 0.4546, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.497131931166348, | |
| "grad_norm": 0.40102046460904306, | |
| "learning_rate": 1.1765792823141385e-05, | |
| "loss": 0.4111, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 1.502868068833652, | |
| "grad_norm": 0.4589104612978405, | |
| "learning_rate": 1.169982135583641e-05, | |
| "loss": 0.4134, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 1.508604206500956, | |
| "grad_norm": 0.4683338597962924, | |
| "learning_rate": 1.1633773618185302e-05, | |
| "loss": 0.4397, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 1.51434034416826, | |
| "grad_norm": 0.4558640349219329, | |
| "learning_rate": 1.1567652573725263e-05, | |
| "loss": 0.4484, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 1.520076481835564, | |
| "grad_norm": 0.439951790644891, | |
| "learning_rate": 1.1501461189282734e-05, | |
| "loss": 0.4027, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 1.525812619502868, | |
| "grad_norm": 0.44164693690672613, | |
| "learning_rate": 1.1435202434840287e-05, | |
| "loss": 0.4266, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 1.5315487571701722, | |
| "grad_norm": 0.43551426817891264, | |
| "learning_rate": 1.136887928340336e-05, | |
| "loss": 0.4279, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 1.5372848948374762, | |
| "grad_norm": 0.460751533150955, | |
| "learning_rate": 1.1302494710866859e-05, | |
| "loss": 0.4179, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 1.5430210325047802, | |
| "grad_norm": 0.4746552779750239, | |
| "learning_rate": 1.1236051695881634e-05, | |
| "loss": 0.4769, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 1.5487571701720841, | |
| "grad_norm": 0.47105209380963015, | |
| "learning_rate": 1.1169553219720828e-05, | |
| "loss": 0.4599, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.554493307839388, | |
| "grad_norm": 0.4566600321680249, | |
| "learning_rate": 1.1103002266146096e-05, | |
| "loss": 0.4515, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 1.560229445506692, | |
| "grad_norm": 0.387915706277622, | |
| "learning_rate": 1.103640182127375e-05, | |
| "loss": 0.4265, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 1.565965583173996, | |
| "grad_norm": 0.44038850987015166, | |
| "learning_rate": 1.0969754873440743e-05, | |
| "loss": 0.4486, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 1.5717017208413002, | |
| "grad_norm": 0.46869796330160723, | |
| "learning_rate": 1.0903064413070611e-05, | |
| "loss": 0.4834, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 1.5774378585086042, | |
| "grad_norm": 0.40500478010238716, | |
| "learning_rate": 1.0836333432539272e-05, | |
| "loss": 0.4084, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 1.5831739961759084, | |
| "grad_norm": 0.39581081249275896, | |
| "learning_rate": 1.076956492604077e-05, | |
| "loss": 0.4574, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 1.5889101338432123, | |
| "grad_norm": 0.4621337562923899, | |
| "learning_rate": 1.070276188945293e-05, | |
| "loss": 0.44, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 1.5946462715105163, | |
| "grad_norm": 0.403297544236656, | |
| "learning_rate": 1.0635927320202928e-05, | |
| "loss": 0.428, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 1.6003824091778203, | |
| "grad_norm": 0.40429520535585933, | |
| "learning_rate": 1.0569064217132791e-05, | |
| "loss": 0.3999, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 1.6061185468451242, | |
| "grad_norm": 0.44992301434912374, | |
| "learning_rate": 1.0502175580364857e-05, | |
| "loss": 0.4629, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.6118546845124282, | |
| "grad_norm": 0.5317926764729383, | |
| "learning_rate": 1.0435264411167148e-05, | |
| "loss": 0.426, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 1.6175908221797322, | |
| "grad_norm": 0.43995618220520716, | |
| "learning_rate": 1.036833371181871e-05, | |
| "loss": 0.4835, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 1.6233269598470363, | |
| "grad_norm": 0.401770871391019, | |
| "learning_rate": 1.0301386485474888e-05, | |
| "loss": 0.4303, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 1.6290630975143403, | |
| "grad_norm": 0.3924022810984957, | |
| "learning_rate": 1.0234425736032607e-05, | |
| "loss": 0.4528, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 1.6347992351816445, | |
| "grad_norm": 0.43649719543388227, | |
| "learning_rate": 1.016745446799555e-05, | |
| "loss": 0.4054, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 1.6405353728489485, | |
| "grad_norm": 0.44190590472557323, | |
| "learning_rate": 1.010047568633938e-05, | |
| "loss": 0.4622, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 1.6462715105162524, | |
| "grad_norm": 0.4138119608613635, | |
| "learning_rate": 1.0033492396376879e-05, | |
| "loss": 0.4443, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 1.6520076481835564, | |
| "grad_norm": 0.410370717741024, | |
| "learning_rate": 9.966507603623125e-06, | |
| "loss": 0.4255, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 1.6577437858508604, | |
| "grad_norm": 0.3851173486609221, | |
| "learning_rate": 9.899524313660623e-06, | |
| "loss": 0.439, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 1.6634799235181643, | |
| "grad_norm": 0.38567722494532186, | |
| "learning_rate": 9.832545532004454e-06, | |
| "loss": 0.4339, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.6692160611854685, | |
| "grad_norm": 0.3955204462600041, | |
| "learning_rate": 9.765574263967397e-06, | |
| "loss": 0.3977, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 1.6749521988527725, | |
| "grad_norm": 0.42445834780458475, | |
| "learning_rate": 9.698613514525117e-06, | |
| "loss": 0.422, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 1.6806883365200764, | |
| "grad_norm": 0.44194756806146035, | |
| "learning_rate": 9.631666288181293e-06, | |
| "loss": 0.4903, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 1.6864244741873806, | |
| "grad_norm": 0.46162371183733075, | |
| "learning_rate": 9.564735588832857e-06, | |
| "loss": 0.5333, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 1.6921606118546846, | |
| "grad_norm": 0.4051316346724794, | |
| "learning_rate": 9.497824419635145e-06, | |
| "loss": 0.3805, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 1.6978967495219885, | |
| "grad_norm": 0.3960838463524368, | |
| "learning_rate": 9.430935782867214e-06, | |
| "loss": 0.4379, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 1.7036328871892925, | |
| "grad_norm": 0.40765156595116686, | |
| "learning_rate": 9.364072679797074e-06, | |
| "loss": 0.439, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 1.7093690248565965, | |
| "grad_norm": 0.4258417890684251, | |
| "learning_rate": 9.297238110547075e-06, | |
| "loss": 0.4748, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 1.7151051625239004, | |
| "grad_norm": 0.398962552995342, | |
| "learning_rate": 9.230435073959232e-06, | |
| "loss": 0.4381, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 1.7208413001912046, | |
| "grad_norm": 0.38078824772489206, | |
| "learning_rate": 9.163666567460735e-06, | |
| "loss": 0.3894, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.7265774378585086, | |
| "grad_norm": 0.4002207799336185, | |
| "learning_rate": 9.096935586929392e-06, | |
| "loss": 0.4284, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 1.7323135755258128, | |
| "grad_norm": 0.4385360843975406, | |
| "learning_rate": 9.030245126559262e-06, | |
| "loss": 0.4843, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 1.7380497131931167, | |
| "grad_norm": 0.402310432836796, | |
| "learning_rate": 8.963598178726254e-06, | |
| "loss": 0.3751, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 1.7437858508604207, | |
| "grad_norm": 0.438445094499713, | |
| "learning_rate": 8.896997733853904e-06, | |
| "loss": 0.4418, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 1.7495219885277247, | |
| "grad_norm": 0.440914495029627, | |
| "learning_rate": 8.830446780279175e-06, | |
| "loss": 0.4854, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 1.7552581261950286, | |
| "grad_norm": 0.38274381913523997, | |
| "learning_rate": 8.763948304118368e-06, | |
| "loss": 0.4132, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 1.7609942638623326, | |
| "grad_norm": 0.46352350104923684, | |
| "learning_rate": 8.697505289133146e-06, | |
| "loss": 0.4512, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 1.7667304015296366, | |
| "grad_norm": 0.3717919590029904, | |
| "learning_rate": 8.631120716596642e-06, | |
| "loss": 0.3694, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 1.7724665391969407, | |
| "grad_norm": 0.3770809977975791, | |
| "learning_rate": 8.564797565159715e-06, | |
| "loss": 0.4099, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 1.7782026768642447, | |
| "grad_norm": 0.430430532759824, | |
| "learning_rate": 8.498538810717267e-06, | |
| "loss": 0.4327, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.783938814531549, | |
| "grad_norm": 0.4097548018133223, | |
| "learning_rate": 8.432347426274739e-06, | |
| "loss": 0.4541, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 1.7896749521988529, | |
| "grad_norm": 0.4332198498570345, | |
| "learning_rate": 8.366226381814698e-06, | |
| "loss": 0.4592, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 1.7954110898661568, | |
| "grad_norm": 0.45602674402620935, | |
| "learning_rate": 8.300178644163593e-06, | |
| "loss": 0.4062, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 1.8011472275334608, | |
| "grad_norm": 0.4580406538791895, | |
| "learning_rate": 8.234207176858615e-06, | |
| "loss": 0.4568, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 1.8068833652007648, | |
| "grad_norm": 0.39852421924765624, | |
| "learning_rate": 8.168314940014742e-06, | |
| "loss": 0.4277, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 1.8126195028680687, | |
| "grad_norm": 0.4439622106678351, | |
| "learning_rate": 8.102504890191892e-06, | |
| "loss": 0.46, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 1.8183556405353727, | |
| "grad_norm": 0.3778756198759015, | |
| "learning_rate": 8.036779980262312e-06, | |
| "loss": 0.4223, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 1.8240917782026769, | |
| "grad_norm": 0.40171442544483804, | |
| "learning_rate": 7.971143159278025e-06, | |
| "loss": 0.4132, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 1.8298279158699808, | |
| "grad_norm": 0.41526173759534935, | |
| "learning_rate": 7.905597372338558e-06, | |
| "loss": 0.4391, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 1.835564053537285, | |
| "grad_norm": 0.38573779193525365, | |
| "learning_rate": 7.840145560458756e-06, | |
| "loss": 0.3864, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 1.841300191204589, | |
| "grad_norm": 0.41638087217030073, | |
| "learning_rate": 7.774790660436857e-06, | |
| "loss": 0.414, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 1.847036328871893, | |
| "grad_norm": 0.4526590955714858, | |
| "learning_rate": 7.709535604722685e-06, | |
| "loss": 0.4179, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 1.852772466539197, | |
| "grad_norm": 0.4080092443695909, | |
| "learning_rate": 7.644383321286095e-06, | |
| "loss": 0.4631, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 1.8585086042065009, | |
| "grad_norm": 0.38618254565446664, | |
| "learning_rate": 7.579336733485584e-06, | |
| "loss": 0.452, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 1.8642447418738048, | |
| "grad_norm": 0.374394051360769, | |
| "learning_rate": 7.514398759937135e-06, | |
| "loss": 0.4218, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 1.869980879541109, | |
| "grad_norm": 0.44912456810666246, | |
| "learning_rate": 7.449572314383237e-06, | |
| "loss": 0.4457, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 1.875717017208413, | |
| "grad_norm": 0.4076165446267555, | |
| "learning_rate": 7.384860305562171e-06, | |
| "loss": 0.4096, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 1.8814531548757172, | |
| "grad_norm": 0.41473117525412695, | |
| "learning_rate": 7.320265637077473e-06, | |
| "loss": 0.4833, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 1.8871892925430211, | |
| "grad_norm": 0.37006808150559667, | |
| "learning_rate": 7.255791207267679e-06, | |
| "loss": 0.3945, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 1.892925430210325, | |
| "grad_norm": 0.3956745154722134, | |
| "learning_rate": 7.191439909076243e-06, | |
| "loss": 0.4165, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.898661567877629, | |
| "grad_norm": 0.40315743805579995, | |
| "learning_rate": 7.127214629921765e-06, | |
| "loss": 0.408, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 1.904397705544933, | |
| "grad_norm": 0.40167879385408956, | |
| "learning_rate": 7.0631182515684e-06, | |
| "loss": 0.4069, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 1.910133843212237, | |
| "grad_norm": 0.4110824483433327, | |
| "learning_rate": 6.999153649996595e-06, | |
| "loss": 0.4033, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 1.915869980879541, | |
| "grad_norm": 0.4023551235142921, | |
| "learning_rate": 6.935323695274002e-06, | |
| "loss": 0.4244, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 1.9216061185468452, | |
| "grad_norm": 0.4076645867761283, | |
| "learning_rate": 6.871631251426729e-06, | |
| "loss": 0.4935, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 1.9273422562141491, | |
| "grad_norm": 0.4040181892237224, | |
| "learning_rate": 6.808079176310826e-06, | |
| "loss": 0.3796, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 1.9330783938814533, | |
| "grad_norm": 0.4188015305713764, | |
| "learning_rate": 6.744670321484044e-06, | |
| "loss": 0.406, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 1.9388145315487573, | |
| "grad_norm": 0.4143577555520623, | |
| "learning_rate": 6.681407532077895e-06, | |
| "loss": 0.4365, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 1.9445506692160612, | |
| "grad_norm": 0.3764270258745421, | |
| "learning_rate": 6.618293646669987e-06, | |
| "loss": 0.4366, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 1.9502868068833652, | |
| "grad_norm": 0.4210114400843068, | |
| "learning_rate": 6.555331497156671e-06, | |
| "loss": 0.4551, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.9560229445506692, | |
| "grad_norm": 0.4453880959527477, | |
| "learning_rate": 6.492523908625958e-06, | |
| "loss": 0.4564, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 1.9617590822179731, | |
| "grad_norm": 0.4037182452331645, | |
| "learning_rate": 6.429873699230771e-06, | |
| "loss": 0.4411, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 1.967495219885277, | |
| "grad_norm": 0.3808388760596802, | |
| "learning_rate": 6.3673836800624775e-06, | |
| "loss": 0.3952, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 1.9732313575525813, | |
| "grad_norm": 0.41924994933550513, | |
| "learning_rate": 6.30505665502479e-06, | |
| "loss": 0.3802, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 1.9789674952198852, | |
| "grad_norm": 0.44573552239332564, | |
| "learning_rate": 6.242895420707917e-06, | |
| "loss": 0.4741, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 1.9847036328871894, | |
| "grad_norm": 0.44311634814108447, | |
| "learning_rate": 6.180902766263113e-06, | |
| "loss": 0.4568, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 1.9904397705544934, | |
| "grad_norm": 0.403141283403809, | |
| "learning_rate": 6.119081473277502e-06, | |
| "loss": 0.4439, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 1.9961759082217974, | |
| "grad_norm": 0.3696751925678653, | |
| "learning_rate": 6.057434315649304e-06, | |
| "loss": 0.4076, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 2.0019120458891013, | |
| "grad_norm": 0.8972308979080702, | |
| "learning_rate": 5.9959640594633304e-06, | |
| "loss": 0.6431, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 2.0076481835564053, | |
| "grad_norm": 0.4952120180229906, | |
| "learning_rate": 5.934673462866907e-06, | |
| "loss": 0.4089, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 2.0133843212237093, | |
| "grad_norm": 0.5208407249986332, | |
| "learning_rate": 5.873565275946088e-06, | |
| "loss": 0.3998, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 2.019120458891013, | |
| "grad_norm": 0.5507106804898475, | |
| "learning_rate": 5.812642240602289e-06, | |
| "loss": 0.3835, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 2.024856596558317, | |
| "grad_norm": 0.5141610472881658, | |
| "learning_rate": 5.7519070904292255e-06, | |
| "loss": 0.3859, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 2.0305927342256216, | |
| "grad_norm": 0.46442820428267456, | |
| "learning_rate": 5.6913625505902966e-06, | |
| "loss": 0.364, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 2.0363288718929256, | |
| "grad_norm": 0.4645855145414816, | |
| "learning_rate": 5.631011337696272e-06, | |
| "loss": 0.3577, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 2.0420650095602295, | |
| "grad_norm": 0.5129160739607502, | |
| "learning_rate": 5.570856159683418e-06, | |
| "loss": 0.3708, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 2.0478011472275335, | |
| "grad_norm": 0.44092823234948325, | |
| "learning_rate": 5.510899715691984e-06, | |
| "loss": 0.3954, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 2.0535372848948374, | |
| "grad_norm": 0.4307147154928634, | |
| "learning_rate": 5.451144695945116e-06, | |
| "loss": 0.3767, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 2.0592734225621414, | |
| "grad_norm": 0.3954871188814476, | |
| "learning_rate": 5.391593781628109e-06, | |
| "loss": 0.3096, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 2.0650095602294454, | |
| "grad_norm": 0.45723009765562855, | |
| "learning_rate": 5.332249644768142e-06, | |
| "loss": 0.3795, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 2.0707456978967493, | |
| "grad_norm": 0.41297055229228974, | |
| "learning_rate": 5.273114948114346e-06, | |
| "loss": 0.3177, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 2.0764818355640537, | |
| "grad_norm": 0.4106872355240582, | |
| "learning_rate": 5.214192345018374e-06, | |
| "loss": 0.3515, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 2.0822179732313577, | |
| "grad_norm": 0.42883512067861784, | |
| "learning_rate": 5.1554844793153005e-06, | |
| "loss": 0.4205, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 2.0879541108986617, | |
| "grad_norm": 0.3815636168034508, | |
| "learning_rate": 5.096993985205023e-06, | |
| "loss": 0.3495, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 2.0936902485659656, | |
| "grad_norm": 0.40415485231377263, | |
| "learning_rate": 5.038723487134049e-06, | |
| "loss": 0.3969, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 2.0994263862332696, | |
| "grad_norm": 0.3948215113870869, | |
| "learning_rate": 4.9806755996777565e-06, | |
| "loss": 0.37, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 2.1051625239005736, | |
| "grad_norm": 0.3961343148482674, | |
| "learning_rate": 4.92285292742307e-06, | |
| "loss": 0.4183, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 2.1108986615678775, | |
| "grad_norm": 0.35475813700193604, | |
| "learning_rate": 4.865258064851579e-06, | |
| "loss": 0.3405, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 2.1166347992351815, | |
| "grad_norm": 0.38458880165760984, | |
| "learning_rate": 4.807893596223152e-06, | |
| "loss": 0.3702, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 2.1223709369024855, | |
| "grad_norm": 0.4018427850015987, | |
| "learning_rate": 4.75076209545996e-06, | |
| "loss": 0.3882, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 2.12810707456979, | |
| "grad_norm": 0.3649236891798529, | |
| "learning_rate": 4.693866126030995e-06, | |
| "loss": 0.3192, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 2.133843212237094, | |
| "grad_norm": 0.4081333849276713, | |
| "learning_rate": 4.637208240837042e-06, | |
| "loss": 0.3954, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 2.139579349904398, | |
| "grad_norm": 0.35987845861598494, | |
| "learning_rate": 4.580790982096149e-06, | |
| "loss": 0.3096, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 2.1453154875717018, | |
| "grad_norm": 0.430627026676002, | |
| "learning_rate": 4.5246168812295286e-06, | |
| "loss": 0.4717, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 2.1510516252390057, | |
| "grad_norm": 0.3338158346155251, | |
| "learning_rate": 4.468688458748006e-06, | |
| "loss": 0.2981, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 2.1567877629063097, | |
| "grad_norm": 0.38106855044885324, | |
| "learning_rate": 4.4130082241388974e-06, | |
| "loss": 0.3767, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 2.1625239005736137, | |
| "grad_norm": 0.36059174781408515, | |
| "learning_rate": 4.357578675753432e-06, | |
| "loss": 0.3631, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 2.1682600382409176, | |
| "grad_norm": 0.3882154722055546, | |
| "learning_rate": 4.302402300694636e-06, | |
| "loss": 0.3738, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 2.173996175908222, | |
| "grad_norm": 0.3978520905363434, | |
| "learning_rate": 4.247481574705744e-06, | |
| "loss": 0.3309, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 2.179732313575526, | |
| "grad_norm": 0.3930085794069971, | |
| "learning_rate": 4.192818962059112e-06, | |
| "loss": 0.3395, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 2.18546845124283, | |
| "grad_norm": 0.3679820549907332, | |
| "learning_rate": 4.138416915445656e-06, | |
| "loss": 0.3521, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 2.191204588910134, | |
| "grad_norm": 0.3851345683393216, | |
| "learning_rate": 4.084277875864776e-06, | |
| "loss": 0.379, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 2.196940726577438, | |
| "grad_norm": 0.44781870089318787, | |
| "learning_rate": 4.030404272514864e-06, | |
| "loss": 0.3803, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 2.202676864244742, | |
| "grad_norm": 0.39666830508510403, | |
| "learning_rate": 3.97679852268427e-06, | |
| "loss": 0.3622, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 2.208413001912046, | |
| "grad_norm": 0.3976428991458035, | |
| "learning_rate": 3.923463031642873e-06, | |
| "loss": 0.3673, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 2.21414913957935, | |
| "grad_norm": 0.34487643216663577, | |
| "learning_rate": 3.870400192534128e-06, | |
| "loss": 0.3647, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 2.2198852772466537, | |
| "grad_norm": 0.37349012951330834, | |
| "learning_rate": 3.81761238626771e-06, | |
| "loss": 0.3822, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 2.2256214149139577, | |
| "grad_norm": 0.40320901857132385, | |
| "learning_rate": 3.7651019814126656e-06, | |
| "loss": 0.4045, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 2.231357552581262, | |
| "grad_norm": 0.38833879539410954, | |
| "learning_rate": 3.712871334091154e-06, | |
| "loss": 0.3614, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 2.237093690248566, | |
| "grad_norm": 0.37829756392287156, | |
| "learning_rate": 3.6609227878727062e-06, | |
| "loss": 0.3271, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 2.24282982791587, | |
| "grad_norm": 0.3579239446318294, | |
| "learning_rate": 3.609258673669097e-06, | |
| "loss": 0.3428, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 2.248565965583174, | |
| "grad_norm": 0.3686157523103381, | |
| "learning_rate": 3.5578813096297293e-06, | |
| "loss": 0.3861, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 2.254302103250478, | |
| "grad_norm": 0.4030570129715361, | |
| "learning_rate": 3.5067930010376484e-06, | |
| "loss": 0.42, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 2.260038240917782, | |
| "grad_norm": 0.37486030062507975, | |
| "learning_rate": 3.4559960402060764e-06, | |
| "loss": 0.3634, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 2.265774378585086, | |
| "grad_norm": 0.398111741855188, | |
| "learning_rate": 3.4054927063755793e-06, | |
| "loss": 0.4019, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 2.27151051625239, | |
| "grad_norm": 0.3742173600629292, | |
| "learning_rate": 3.355285265611784e-06, | |
| "loss": 0.3547, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 2.2772466539196943, | |
| "grad_norm": 0.4098405397196824, | |
| "learning_rate": 3.3053759707037116e-06, | |
| "loss": 0.3624, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 2.2829827915869982, | |
| "grad_norm": 0.42987789489488126, | |
| "learning_rate": 3.2557670610626924e-06, | |
| "loss": 0.3771, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 2.288718929254302, | |
| "grad_norm": 0.37950071404433056, | |
| "learning_rate": 3.2064607626218737e-06, | |
| "loss": 0.3793, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 2.294455066921606, | |
| "grad_norm": 0.38352933857672544, | |
| "learning_rate": 3.157459287736362e-06, | |
| "loss": 0.3784, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 2.30019120458891, | |
| "grad_norm": 0.3915772618856507, | |
| "learning_rate": 3.1087648350839382e-06, | |
| "loss": 0.3892, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 2.305927342256214, | |
| "grad_norm": 0.38035400732236047, | |
| "learning_rate": 3.0603795895664125e-06, | |
| "loss": 0.3596, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 2.311663479923518, | |
| "grad_norm": 0.3966721118627521, | |
| "learning_rate": 3.0123057222115835e-06, | |
| "loss": 0.3933, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 2.317399617590822, | |
| "grad_norm": 0.3928138522564511, | |
| "learning_rate": 2.9645453900758415e-06, | |
| "loss": 0.3858, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 2.323135755258126, | |
| "grad_norm": 0.3574946707024923, | |
| "learning_rate": 2.9171007361473512e-06, | |
| "loss": 0.3503, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 2.3288718929254304, | |
| "grad_norm": 0.3638977416533278, | |
| "learning_rate": 2.869973889249933e-06, | |
| "loss": 0.3211, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 2.3346080305927344, | |
| "grad_norm": 0.396313353596241, | |
| "learning_rate": 2.8231669639475068e-06, | |
| "loss": 0.3283, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 2.3403441682600383, | |
| "grad_norm": 0.4350084113143233, | |
| "learning_rate": 2.7766820604492474e-06, | |
| "loss": 0.3694, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 2.3460803059273423, | |
| "grad_norm": 0.36417177098203485, | |
| "learning_rate": 2.7305212645153213e-06, | |
| "loss": 0.3277, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 2.3518164435946463, | |
| "grad_norm": 0.36228834197930426, | |
| "learning_rate": 2.6846866473633126e-06, | |
| "loss": 0.3648, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 2.35755258126195, | |
| "grad_norm": 0.37217108995589493, | |
| "learning_rate": 2.6391802655752853e-06, | |
| "loss": 0.3553, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 2.363288718929254, | |
| "grad_norm": 0.3796921652078883, | |
| "learning_rate": 2.594004161005511e-06, | |
| "loss": 0.3953, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 2.369024856596558, | |
| "grad_norm": 0.37234421223477726, | |
| "learning_rate": 2.5491603606888384e-06, | |
| "loss": 0.3431, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 2.3747609942638626, | |
| "grad_norm": 0.3479981334692919, | |
| "learning_rate": 2.50465087674976e-06, | |
| "loss": 0.3587, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 2.3804971319311665, | |
| "grad_norm": 0.37874728957661286, | |
| "learning_rate": 2.460477706312103e-06, | |
| "loss": 0.3871, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 2.3862332695984705, | |
| "grad_norm": 0.3570282223249849, | |
| "learning_rate": 2.4166428314094514e-06, | |
| "loss": 0.3796, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 2.3919694072657744, | |
| "grad_norm": 0.3547259024756538, | |
| "learning_rate": 2.373148218896182e-06, | |
| "loss": 0.3723, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 2.3977055449330784, | |
| "grad_norm": 0.36243719147680803, | |
| "learning_rate": 2.32999582035923e-06, | |
| "loss": 0.375, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 2.4034416826003824, | |
| "grad_norm": 0.3397982054868222, | |
| "learning_rate": 2.2871875720305158e-06, | |
| "loss": 0.359, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 2.4091778202676863, | |
| "grad_norm": 0.3706667244009237, | |
| "learning_rate": 2.244725394700079e-06, | |
| "loss": 0.4019, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 2.4149139579349903, | |
| "grad_norm": 0.3521855735970329, | |
| "learning_rate": 2.2026111936298687e-06, | |
| "loss": 0.339, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 2.4206500956022943, | |
| "grad_norm": 0.36197027269129106, | |
| "learning_rate": 2.160846858468285e-06, | |
| "loss": 0.4068, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 2.4263862332695982, | |
| "grad_norm": 0.3486550600296997, | |
| "learning_rate": 2.119434263165361e-06, | |
| "loss": 0.3472, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 2.4321223709369026, | |
| "grad_norm": 0.36592179113764045, | |
| "learning_rate": 2.078375265888707e-06, | |
| "loss": 0.341, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 2.4378585086042066, | |
| "grad_norm": 0.381502441618795, | |
| "learning_rate": 2.0376717089401166e-06, | |
| "loss": 0.3826, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 2.4435946462715106, | |
| "grad_norm": 0.3690496531978919, | |
| "learning_rate": 1.9973254186729084e-06, | |
| "loss": 0.3562, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 2.4493307839388145, | |
| "grad_norm": 0.37416288175512286, | |
| "learning_rate": 1.9573382054099786e-06, | |
| "loss": 0.4103, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 2.4550669216061185, | |
| "grad_norm": 0.35395175975410714, | |
| "learning_rate": 1.917711863362581e-06, | |
| "loss": 0.3632, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 2.4608030592734225, | |
| "grad_norm": 0.36581947179840435, | |
| "learning_rate": 1.8784481705498014e-06, | |
| "loss": 0.4023, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 2.4665391969407264, | |
| "grad_norm": 0.33261262472276076, | |
| "learning_rate": 1.8395488887188007e-06, | |
| "loss": 0.3394, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 2.472275334608031, | |
| "grad_norm": 0.3731396992459488, | |
| "learning_rate": 1.8010157632657544e-06, | |
| "loss": 0.3784, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 2.478011472275335, | |
| "grad_norm": 0.33337120708056256, | |
| "learning_rate": 1.7628505231575321e-06, | |
| "loss": 0.372, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 2.4837476099426388, | |
| "grad_norm": 0.3638826195090011, | |
| "learning_rate": 1.7250548808541324e-06, | |
| "loss": 0.4246, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 2.4894837476099427, | |
| "grad_norm": 0.36142571281527636, | |
| "learning_rate": 1.687630532231833e-06, | |
| "loss": 0.3674, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 2.4952198852772467, | |
| "grad_norm": 0.41140218172654, | |
| "learning_rate": 1.6505791565071139e-06, | |
| "loss": 0.3625, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 2.5009560229445507, | |
| "grad_norm": 0.3451954148555015, | |
| "learning_rate": 1.6139024161612882e-06, | |
| "loss": 0.3604, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 2.5066921606118546, | |
| "grad_norm": 0.32260290363728555, | |
| "learning_rate": 1.577601956865934e-06, | |
| "loss": 0.3679, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 2.5124282982791586, | |
| "grad_norm": 0.34462182092709037, | |
| "learning_rate": 1.5416794074090258e-06, | |
| "loss": 0.3741, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 2.5181644359464626, | |
| "grad_norm": 0.4730909294396266, | |
| "learning_rate": 1.5061363796218787e-06, | |
| "loss": 0.3667, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 2.5239005736137665, | |
| "grad_norm": 0.3857798475766756, | |
| "learning_rate": 1.470974468306804e-06, | |
| "loss": 0.4242, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 2.5296367112810705, | |
| "grad_norm": 0.3620511836720628, | |
| "learning_rate": 1.4361952511655618e-06, | |
| "loss": 0.3954, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 2.535372848948375, | |
| "grad_norm": 0.330256905865385, | |
| "learning_rate": 1.4018002887285687e-06, | |
| "loss": 0.3261, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 2.541108986615679, | |
| "grad_norm": 0.3714557217698261, | |
| "learning_rate": 1.3677911242848807e-06, | |
| "loss": 0.3538, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 2.546845124282983, | |
| "grad_norm": 0.35180843150904273, | |
| "learning_rate": 1.334169283812936e-06, | |
| "loss": 0.3534, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 2.552581261950287, | |
| "grad_norm": 0.3461429617840913, | |
| "learning_rate": 1.300936275912098e-06, | |
| "loss": 0.3934, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 2.5583173996175907, | |
| "grad_norm": 0.3481927841057214, | |
| "learning_rate": 1.2680935917349524e-06, | |
| "loss": 0.3627, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 2.5640535372848947, | |
| "grad_norm": 0.323481747190712, | |
| "learning_rate": 1.2356427049204122e-06, | |
| "loss": 0.3545, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 2.569789674952199, | |
| "grad_norm": 0.3437913936704319, | |
| "learning_rate": 1.2035850715275865e-06, | |
| "loss": 0.3776, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 2.575525812619503, | |
| "grad_norm": 0.3480873955124251, | |
| "learning_rate": 1.1719221299704497e-06, | |
| "loss": 0.3266, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 2.581261950286807, | |
| "grad_norm": 0.35378460832555164, | |
| "learning_rate": 1.1406553009533028e-06, | |
| "loss": 0.3243, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 2.586998087954111, | |
| "grad_norm": 0.3763656152527704, | |
| "learning_rate": 1.1097859874070294e-06, | |
| "loss": 0.4062, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 2.592734225621415, | |
| "grad_norm": 0.3569062431922278, | |
| "learning_rate": 1.0793155744261352e-06, | |
| "loss": 0.3596, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 2.598470363288719, | |
| "grad_norm": 0.3428907211304672, | |
| "learning_rate": 1.0492454292066178e-06, | |
| "loss": 0.3834, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 2.604206500956023, | |
| "grad_norm": 0.33957138193960923, | |
| "learning_rate": 1.0195769009845992e-06, | |
| "loss": 0.3485, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 2.609942638623327, | |
| "grad_norm": 0.3506861012073475, | |
| "learning_rate": 9.903113209758098e-07, | |
| "loss": 0.401, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 2.615678776290631, | |
| "grad_norm": 0.35438156085082345, | |
| "learning_rate": 9.614500023158335e-07, | |
| "loss": 0.3622, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 2.621414913957935, | |
| "grad_norm": 0.3235710698929606, | |
| "learning_rate": 9.329942400012059e-07, | |
| "loss": 0.3383, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 2.6271510516252388, | |
| "grad_norm": 0.32641057302722415, | |
| "learning_rate": 9.049453108312967e-07, | |
| "loss": 0.3342, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 2.632887189292543, | |
| "grad_norm": 0.3738134966287739, | |
| "learning_rate": 8.773044733510338e-07, | |
| "loss": 0.3916, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 2.638623326959847, | |
| "grad_norm": 0.37276281277800666, | |
| "learning_rate": 8.50072967794413e-07, | |
| "loss": 0.398, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 2.644359464627151, | |
| "grad_norm": 0.35059967338877185, | |
| "learning_rate": 8.232520160288704e-07, | |
| "loss": 0.3666, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 2.650095602294455, | |
| "grad_norm": 0.34073212808078274, | |
| "learning_rate": 7.96842821500442e-07, | |
| "loss": 0.3533, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 2.655831739961759, | |
| "grad_norm": 0.3648080091480956, | |
| "learning_rate": 7.708465691797718e-07, | |
| "loss": 0.3904, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 2.661567877629063, | |
| "grad_norm": 0.3662958371430221, | |
| "learning_rate": 7.452644255089425e-07, | |
| "loss": 0.366, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 2.667304015296367, | |
| "grad_norm": 0.33496912547986984, | |
| "learning_rate": 7.20097538349136e-07, | |
| "loss": 0.3518, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 2.6730401529636714, | |
| "grad_norm": 0.3447271042188862, | |
| "learning_rate": 6.953470369291349e-07, | |
| "loss": 0.3531, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 2.6787762906309753, | |
| "grad_norm": 0.3460068944711527, | |
| "learning_rate": 6.710140317946424e-07, | |
| "loss": 0.3756, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 2.6845124282982793, | |
| "grad_norm": 0.34479127909993756, | |
| "learning_rate": 6.470996147584684e-07, | |
| "loss": 0.3952, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 2.6902485659655833, | |
| "grad_norm": 0.3555895183537466, | |
| "learning_rate": 6.236048588515242e-07, | |
| "loss": 0.3534, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 2.6959847036328872, | |
| "grad_norm": 0.36902112669931375, | |
| "learning_rate": 6.005308182746906e-07, | |
| "loss": 0.3591, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 2.701720841300191, | |
| "grad_norm": 0.35072958260638964, | |
| "learning_rate": 5.778785283515054e-07, | |
| "loss": 0.3582, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 2.707456978967495, | |
| "grad_norm": 0.3435970056521967, | |
| "learning_rate": 5.556490054817132e-07, | |
| "loss": 0.3988, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 2.713193116634799, | |
| "grad_norm": 0.3360949092846702, | |
| "learning_rate": 5.33843247095659e-07, | |
| "loss": 0.3857, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 2.718929254302103, | |
| "grad_norm": 0.36687321532000483, | |
| "learning_rate": 5.124622316095384e-07, | |
| "loss": 0.398, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 2.724665391969407, | |
| "grad_norm": 0.34930058861444474, | |
| "learning_rate": 4.91506918381488e-07, | |
| "loss": 0.3443, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 2.730401529636711, | |
| "grad_norm": 0.3562939165406292, | |
| "learning_rate": 4.709782476685476e-07, | |
| "loss": 0.393, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 2.7361376673040154, | |
| "grad_norm": 0.3394312815950415, | |
| "learning_rate": 4.508771405844636e-07, | |
| "loss": 0.363, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 2.7418738049713194, | |
| "grad_norm": 0.3132735001523765, | |
| "learning_rate": 4.3120449905836746e-07, | |
| "loss": 0.2888, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 2.7476099426386233, | |
| "grad_norm": 0.35656415221445675, | |
| "learning_rate": 4.1196120579429786e-07, | |
| "loss": 0.369, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 2.7533460803059273, | |
| "grad_norm": 0.32703889632124866, | |
| "learning_rate": 3.931481242315993e-07, | |
| "loss": 0.3687, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 2.7590822179732313, | |
| "grad_norm": 0.3301574712781911, | |
| "learning_rate": 3.747660985061785e-07, | |
| "loss": 0.3511, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 2.7648183556405352, | |
| "grad_norm": 0.3445182321765397, | |
| "learning_rate": 3.5681595341263144e-07, | |
| "loss": 0.3887, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 2.7705544933078396, | |
| "grad_norm": 0.35436336092040566, | |
| "learning_rate": 3.392984943672273e-07, | |
| "loss": 0.3865, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 2.7762906309751436, | |
| "grad_norm": 0.3384259106811793, | |
| "learning_rate": 3.2221450737178083e-07, | |
| "loss": 0.3501, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 2.7820267686424476, | |
| "grad_norm": 0.3247213284472023, | |
| "learning_rate": 3.055647589783717e-07, | |
| "loss": 0.3548, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 2.7877629063097515, | |
| "grad_norm": 0.3557473549812145, | |
| "learning_rate": 2.8934999625496287e-07, | |
| "loss": 0.3662, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 2.7934990439770555, | |
| "grad_norm": 0.3086740514405346, | |
| "learning_rate": 2.735709467518699e-07, | |
| "loss": 0.3535, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 2.7992351816443595, | |
| "grad_norm": 0.332762209409699, | |
| "learning_rate": 2.5822831846912035e-07, | |
| "loss": 0.3995, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 2.8049713193116634, | |
| "grad_norm": 0.3355698798094922, | |
| "learning_rate": 2.4332279982468454e-07, | |
| "loss": 0.35, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 2.8107074569789674, | |
| "grad_norm": 0.33988196442756863, | |
| "learning_rate": 2.2885505962359055e-07, | |
| "loss": 0.3727, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 2.8164435946462714, | |
| "grad_norm": 0.3528168842787923, | |
| "learning_rate": 2.1482574702790804e-07, | |
| "loss": 0.3947, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 2.8221797323135753, | |
| "grad_norm": 0.3440922584757431, | |
| "learning_rate": 2.0123549152762823e-07, | |
| "loss": 0.386, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 2.8279158699808793, | |
| "grad_norm": 0.34807725449747917, | |
| "learning_rate": 1.8808490291241433e-07, | |
| "loss": 0.4096, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 2.8336520076481837, | |
| "grad_norm": 0.3305293457677881, | |
| "learning_rate": 1.7537457124423896e-07, | |
| "loss": 0.3414, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 2.8393881453154877, | |
| "grad_norm": 0.36760723971309167, | |
| "learning_rate": 1.631050668309131e-07, | |
| "loss": 0.3896, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 2.8451242829827916, | |
| "grad_norm": 0.31341641748111104, | |
| "learning_rate": 1.5127694020049432e-07, | |
| "loss": 0.3823, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 2.8508604206500956, | |
| "grad_norm": 0.34699149322701534, | |
| "learning_rate": 1.3989072207658328e-07, | |
| "loss": 0.3725, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 2.8565965583173996, | |
| "grad_norm": 0.315099034771401, | |
| "learning_rate": 1.2894692335451376e-07, | |
| "loss": 0.2904, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 2.8623326959847035, | |
| "grad_norm": 0.3665483830887578, | |
| "learning_rate": 1.1844603507842667e-07, | |
| "loss": 0.4438, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 2.8680688336520075, | |
| "grad_norm": 0.32419747257007453, | |
| "learning_rate": 1.0838852841923541e-07, | |
| "loss": 0.3444, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 2.873804971319312, | |
| "grad_norm": 0.33442199544484646, | |
| "learning_rate": 9.877485465349057e-08, | |
| "loss": 0.3635, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 2.879541108986616, | |
| "grad_norm": 0.34177643056980267, | |
| "learning_rate": 8.960544514312275e-08, | |
| "loss": 0.3544, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 2.88527724665392, | |
| "grad_norm": 0.3616719577615873, | |
| "learning_rate": 8.088071131609587e-08, | |
| "loss": 0.3823, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 2.891013384321224, | |
| "grad_norm": 0.3351305518870644, | |
| "learning_rate": 7.260104464793971e-08, | |
| "loss": 0.3123, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 2.8967495219885278, | |
| "grad_norm": 0.31649142659834123, | |
| "learning_rate": 6.476681664419171e-08, | |
| "loss": 0.329, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 2.9024856596558317, | |
| "grad_norm": 0.34029420103528796, | |
| "learning_rate": 5.737837882371921e-08, | |
| "loss": 0.4483, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 2.9082217973231357, | |
| "grad_norm": 0.35364341776084457, | |
| "learning_rate": 5.0436062702956536e-08, | |
| "loss": 0.4067, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 2.9139579349904396, | |
| "grad_norm": 0.32827906264765444, | |
| "learning_rate": 4.394017978101905e-08, | |
| "loss": 0.3275, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 2.9196940726577436, | |
| "grad_norm": 0.35463880013349575, | |
| "learning_rate": 3.789102152573665e-08, | |
| "loss": 0.3467, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 2.9254302103250476, | |
| "grad_norm": 0.3610584326561177, | |
| "learning_rate": 3.228885936056858e-08, | |
| "loss": 0.3955, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 2.9311663479923515, | |
| "grad_norm": 0.337230906489088, | |
| "learning_rate": 2.7133944652429912e-08, | |
| "loss": 0.3531, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 2.936902485659656, | |
| "grad_norm": 0.33412720856216777, | |
| "learning_rate": 2.242650870040497e-08, | |
| "loss": 0.368, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 2.94263862332696, | |
| "grad_norm": 0.3286692837778851, | |
| "learning_rate": 1.8166762725381205e-08, | |
| "loss": 0.3218, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 2.948374760994264, | |
| "grad_norm": 0.32866930833280994, | |
| "learning_rate": 1.4354897860558992e-08, | |
| "loss": 0.3885, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 2.954110898661568, | |
| "grad_norm": 0.34657396795048584, | |
| "learning_rate": 1.099108514288627e-08, | |
| "loss": 0.3649, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 2.959847036328872, | |
| "grad_norm": 0.36485883198814917, | |
| "learning_rate": 8.075475505373576e-09, | |
| "loss": 0.3413, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 2.9655831739961758, | |
| "grad_norm": 0.34064688921305847, | |
| "learning_rate": 5.608199770334999e-09, | |
| "loss": 0.337, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 2.97131931166348, | |
| "grad_norm": 0.34881696844494053, | |
| "learning_rate": 3.5893686435028995e-09, | |
| "loss": 0.4078, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 2.977055449330784, | |
| "grad_norm": 0.3403683643532366, | |
| "learning_rate": 2.019072709074088e-09, | |
| "loss": 0.3268, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 2.982791586998088, | |
| "grad_norm": 0.3337616239063328, | |
| "learning_rate": 8.973824256364172e-10, | |
| "loss": 0.3836, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 2.988527724665392, | |
| "grad_norm": 0.34240634016525656, | |
| "learning_rate": 2.2434812301352915e-10, | |
| "loss": 0.3802, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 2.994263862332696, | |
| "grad_norm": 0.3228196417505892, | |
| "learning_rate": 0.0, | |
| "loss": 0.3441, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 2.994263862332696, | |
| "step": 522, | |
| "total_flos": 5.620362022127534e+17, | |
| "train_loss": 0.45750690551324824, | |
| "train_runtime": 8551.5314, | |
| "train_samples_per_second": 5.862, | |
| "train_steps_per_second": 0.061 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 522, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 5.620362022127534e+17, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |