| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.9984871406959153, | |
| "eval_steps": 42, | |
| "global_step": 165, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.006051437216338881, | |
| "grad_norm": 322108055.4732022, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 1.5249, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.012102874432677761, | |
| "grad_norm": 355567958.2983011, | |
| "learning_rate": 8.000000000000001e-06, | |
| "loss": 1.5229, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.018154311649016642, | |
| "grad_norm": 423.8526182225412, | |
| "learning_rate": 1.2e-05, | |
| "loss": 1.5354, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.024205748865355523, | |
| "grad_norm": 277.5486933849194, | |
| "learning_rate": 1.6000000000000003e-05, | |
| "loss": 1.4714, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.030257186081694403, | |
| "grad_norm": 273.8460168069521, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3484, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.036308623298033284, | |
| "grad_norm": 112.32279225722033, | |
| "learning_rate": 1.9998072404820648e-05, | |
| "loss": 1.1769, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.04236006051437216, | |
| "grad_norm": 218.62284085206207, | |
| "learning_rate": 1.9992290362407232e-05, | |
| "loss": 1.1161, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.048411497730711045, | |
| "grad_norm": 19.085494659650685, | |
| "learning_rate": 1.998265610184716e-05, | |
| "loss": 1.0773, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.05446293494704992, | |
| "grad_norm": 4.403325468378393, | |
| "learning_rate": 1.9969173337331283e-05, | |
| "loss": 1.0162, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.060514372163388806, | |
| "grad_norm": 5.142034077751713, | |
| "learning_rate": 1.995184726672197e-05, | |
| "loss": 1.0376, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.06656580937972768, | |
| "grad_norm": 4.287306300328979, | |
| "learning_rate": 1.9930684569549265e-05, | |
| "loss": 1.0029, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.07261724659606657, | |
| "grad_norm": 3.9972486646227705, | |
| "learning_rate": 1.990569340443577e-05, | |
| "loss": 1.0038, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.07866868381240545, | |
| "grad_norm": 3.590191395959283, | |
| "learning_rate": 1.9876883405951378e-05, | |
| "loss": 0.9865, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.08472012102874432, | |
| "grad_norm": 2.7816117361481307, | |
| "learning_rate": 1.9844265680898917e-05, | |
| "loss": 0.9811, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.0907715582450832, | |
| "grad_norm": 3.579650046953478, | |
| "learning_rate": 1.9807852804032306e-05, | |
| "loss": 0.9568, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.09682299546142209, | |
| "grad_norm": 2.8686935445684334, | |
| "learning_rate": 1.9767658813208725e-05, | |
| "loss": 0.9674, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.10287443267776097, | |
| "grad_norm": 3.6458707387233273, | |
| "learning_rate": 1.9723699203976768e-05, | |
| "loss": 0.9359, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.10892586989409984, | |
| "grad_norm": 2.883416632767117, | |
| "learning_rate": 1.96759909236026e-05, | |
| "loss": 0.9651, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.11497730711043873, | |
| "grad_norm": 2.8600382067348784, | |
| "learning_rate": 1.9624552364536472e-05, | |
| "loss": 0.9438, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.12102874432677761, | |
| "grad_norm": 2.884830361179161, | |
| "learning_rate": 1.956940335732209e-05, | |
| "loss": 0.9554, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.12708018154311648, | |
| "grad_norm": 2.476836373159168, | |
| "learning_rate": 1.9510565162951538e-05, | |
| "loss": 0.947, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.13313161875945537, | |
| "grad_norm": 2.944165972805459, | |
| "learning_rate": 1.944806046466878e-05, | |
| "loss": 0.9527, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.13918305597579425, | |
| "grad_norm": 2.3742259285155614, | |
| "learning_rate": 1.9381913359224844e-05, | |
| "loss": 0.902, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.14523449319213314, | |
| "grad_norm": 3.0336829637888365, | |
| "learning_rate": 1.9312149347588035e-05, | |
| "loss": 0.915, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.15128593040847202, | |
| "grad_norm": 2.6677796541697467, | |
| "learning_rate": 1.9238795325112867e-05, | |
| "loss": 0.953, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.1573373676248109, | |
| "grad_norm": 2.309703771715573, | |
| "learning_rate": 1.916187957117136e-05, | |
| "loss": 0.8936, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.16338880484114976, | |
| "grad_norm": 2.629974494687599, | |
| "learning_rate": 1.9081431738250815e-05, | |
| "loss": 0.9079, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.16944024205748864, | |
| "grad_norm": 2.5585105826631103, | |
| "learning_rate": 1.8997482840522218e-05, | |
| "loss": 0.889, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.17549167927382753, | |
| "grad_norm": 2.47972204943994, | |
| "learning_rate": 1.891006524188368e-05, | |
| "loss": 0.9031, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.1815431164901664, | |
| "grad_norm": 2.321681224058349, | |
| "learning_rate": 1.881921264348355e-05, | |
| "loss": 0.8974, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.1875945537065053, | |
| "grad_norm": 2.332021782915067, | |
| "learning_rate": 1.8724960070727974e-05, | |
| "loss": 0.8621, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.19364599092284418, | |
| "grad_norm": 2.2862950649440172, | |
| "learning_rate": 1.862734385977792e-05, | |
| "loss": 0.8973, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.19969742813918306, | |
| "grad_norm": 2.29932921318996, | |
| "learning_rate": 1.8526401643540924e-05, | |
| "loss": 0.8894, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.20574886535552195, | |
| "grad_norm": 2.3159798136981187, | |
| "learning_rate": 1.8422172337162865e-05, | |
| "loss": 0.9114, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.2118003025718608, | |
| "grad_norm": 2.3198744636701965, | |
| "learning_rate": 1.8314696123025456e-05, | |
| "loss": 0.8793, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.2178517397881997, | |
| "grad_norm": 2.434268725037439, | |
| "learning_rate": 1.8204014435255136e-05, | |
| "loss": 0.8814, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.22390317700453857, | |
| "grad_norm": 2.405117668347263, | |
| "learning_rate": 1.8090169943749477e-05, | |
| "loss": 0.8571, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.22995461422087746, | |
| "grad_norm": 2.342704896759622, | |
| "learning_rate": 1.797320653772707e-05, | |
| "loss": 0.8755, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.23600605143721634, | |
| "grad_norm": 2.417838836739983, | |
| "learning_rate": 1.785316930880745e-05, | |
| "loss": 0.9081, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.24205748865355523, | |
| "grad_norm": 2.2078057388117562, | |
| "learning_rate": 1.773010453362737e-05, | |
| "loss": 0.8563, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.2481089258698941, | |
| "grad_norm": 98.18997315518052, | |
| "learning_rate": 1.7604059656000313e-05, | |
| "loss": 0.8809, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.25416036308623297, | |
| "grad_norm": 5.131232569983683, | |
| "learning_rate": 1.747508326862597e-05, | |
| "loss": 0.8612, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.25416036308623297, | |
| "eval_loss": 0.8720352053642273, | |
| "eval_runtime": 18.6299, | |
| "eval_samples_per_second": 227.054, | |
| "eval_steps_per_second": 3.596, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.26021180030257185, | |
| "grad_norm": 2.64020419160499, | |
| "learning_rate": 1.7343225094356857e-05, | |
| "loss": 0.8713, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.26626323751891073, | |
| "grad_norm": 2.055772213227932, | |
| "learning_rate": 1.720853596702919e-05, | |
| "loss": 0.8599, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.2723146747352496, | |
| "grad_norm": 2.391193584495332, | |
| "learning_rate": 1.7071067811865477e-05, | |
| "loss": 0.8716, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.2783661119515885, | |
| "grad_norm": 2.3262868956430935, | |
| "learning_rate": 1.6930873625456362e-05, | |
| "loss": 0.8696, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.2844175491679274, | |
| "grad_norm": 2.4150484400617063, | |
| "learning_rate": 1.678800745532942e-05, | |
| "loss": 0.8601, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.29046898638426627, | |
| "grad_norm": 2.1160799680175355, | |
| "learning_rate": 1.664252437911282e-05, | |
| "loss": 0.8683, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.29652042360060515, | |
| "grad_norm": 2.141034341547075, | |
| "learning_rate": 1.6494480483301836e-05, | |
| "loss": 0.8488, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.30257186081694404, | |
| "grad_norm": 2.2446379091355326, | |
| "learning_rate": 1.6343932841636455e-05, | |
| "loss": 0.8498, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.3086232980332829, | |
| "grad_norm": 1.8953751487603734, | |
| "learning_rate": 1.6190939493098344e-05, | |
| "loss": 0.8436, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.3146747352496218, | |
| "grad_norm": 2.3495639918564706, | |
| "learning_rate": 1.6035559419535714e-05, | |
| "loss": 0.8514, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.3207261724659607, | |
| "grad_norm": 2.089228593794166, | |
| "learning_rate": 1.5877852522924733e-05, | |
| "loss": 0.8186, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.3267776096822995, | |
| "grad_norm": 1.9846619076339054, | |
| "learning_rate": 1.5717879602276123e-05, | |
| "loss": 0.8099, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.3328290468986384, | |
| "grad_norm": 2.1032076715725228, | |
| "learning_rate": 1.5555702330196024e-05, | |
| "loss": 0.8257, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.3388804841149773, | |
| "grad_norm": 1.9288875770005138, | |
| "learning_rate": 1.5391383229110005e-05, | |
| "loss": 0.8272, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.34493192133131617, | |
| "grad_norm": 2.0377322835567218, | |
| "learning_rate": 1.5224985647159489e-05, | |
| "loss": 0.842, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.35098335854765506, | |
| "grad_norm": 1.9170865504462093, | |
| "learning_rate": 1.5056573733779848e-05, | |
| "loss": 0.8247, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.35703479576399394, | |
| "grad_norm": 1.9956821044576363, | |
| "learning_rate": 1.4886212414969551e-05, | |
| "loss": 0.819, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.3630862329803328, | |
| "grad_norm": 18.62969206014239, | |
| "learning_rate": 1.4713967368259981e-05, | |
| "loss": 0.8437, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.3691376701966717, | |
| "grad_norm": 3.367255140934318, | |
| "learning_rate": 1.4539904997395468e-05, | |
| "loss": 0.8367, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.3751891074130106, | |
| "grad_norm": 2.5151226639465265, | |
| "learning_rate": 1.436409240673342e-05, | |
| "loss": 0.8425, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.3812405446293495, | |
| "grad_norm": 1.9600047451684992, | |
| "learning_rate": 1.4186597375374283e-05, | |
| "loss": 0.8658, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.38729198184568836, | |
| "grad_norm": 2.4222060882815892, | |
| "learning_rate": 1.4007488331031409e-05, | |
| "loss": 0.8161, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.39334341906202724, | |
| "grad_norm": 2.2477181276420173, | |
| "learning_rate": 1.3826834323650899e-05, | |
| "loss": 0.8656, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.39939485627836613, | |
| "grad_norm": 1.8742784721591048, | |
| "learning_rate": 1.3644704998791501e-05, | |
| "loss": 0.809, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.405446293494705, | |
| "grad_norm": 2.1877492110983243, | |
| "learning_rate": 1.346117057077493e-05, | |
| "loss": 0.8128, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.4114977307110439, | |
| "grad_norm": 2.5045082263088756, | |
| "learning_rate": 1.3276301795616937e-05, | |
| "loss": 0.8306, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.4175491679273828, | |
| "grad_norm": 9.178317432612376, | |
| "learning_rate": 1.3090169943749475e-05, | |
| "loss": 0.8362, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.4236006051437216, | |
| "grad_norm": 1.9516499520055601, | |
| "learning_rate": 1.2902846772544625e-05, | |
| "loss": 0.8416, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.4296520423600605, | |
| "grad_norm": 2.056957203256862, | |
| "learning_rate": 1.2714404498650743e-05, | |
| "loss": 0.8175, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.4357034795763994, | |
| "grad_norm": 1.7996994310274146, | |
| "learning_rate": 1.252491577015158e-05, | |
| "loss": 0.8221, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.44175491679273826, | |
| "grad_norm": 2.0147397373742324, | |
| "learning_rate": 1.2334453638559057e-05, | |
| "loss": 0.8303, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.44780635400907715, | |
| "grad_norm": 1.927010852316909, | |
| "learning_rate": 1.2143091530650508e-05, | |
| "loss": 0.8164, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.45385779122541603, | |
| "grad_norm": 1.8247659306832096, | |
| "learning_rate": 1.1950903220161286e-05, | |
| "loss": 0.8155, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.4599092284417549, | |
| "grad_norm": 1.8869494426432945, | |
| "learning_rate": 1.1757962799343548e-05, | |
| "loss": 0.7975, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.4659606656580938, | |
| "grad_norm": 1.803043385328956, | |
| "learning_rate": 1.156434465040231e-05, | |
| "loss": 0.7827, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.4720121028744327, | |
| "grad_norm": 2.217030000986005, | |
| "learning_rate": 1.1370123416819683e-05, | |
| "loss": 0.8282, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.47806354009077157, | |
| "grad_norm": 1.901237420277299, | |
| "learning_rate": 1.1175373974578378e-05, | |
| "loss": 0.8325, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.48411497730711045, | |
| "grad_norm": 1.7707085530448825, | |
| "learning_rate": 1.098017140329561e-05, | |
| "loss": 0.8161, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.49016641452344933, | |
| "grad_norm": 1.8472271061761294, | |
| "learning_rate": 1.0784590957278452e-05, | |
| "loss": 0.8233, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.4962178517397882, | |
| "grad_norm": 1.7924659284438058, | |
| "learning_rate": 1.058870803651189e-05, | |
| "loss": 0.8014, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.5022692889561271, | |
| "grad_norm": 1.8913059036929798, | |
| "learning_rate": 1.0392598157590687e-05, | |
| "loss": 0.8398, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.5083207261724659, | |
| "grad_norm": 2.0067650663505376, | |
| "learning_rate": 1.0196336924606282e-05, | |
| "loss": 0.8251, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.5083207261724659, | |
| "eval_loss": 0.8105137348175049, | |
| "eval_runtime": 15.8887, | |
| "eval_samples_per_second": 266.227, | |
| "eval_steps_per_second": 4.217, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.5143721633888049, | |
| "grad_norm": 1.8193592150156823, | |
| "learning_rate": 1e-05, | |
| "loss": 0.8059, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.5204236006051437, | |
| "grad_norm": 1.7674018898267811, | |
| "learning_rate": 9.80366307539372e-06, | |
| "loss": 0.7919, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.5264750378214826, | |
| "grad_norm": 1.821253850079311, | |
| "learning_rate": 9.607401842409318e-06, | |
| "loss": 0.8175, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.5325264750378215, | |
| "grad_norm": 1.7701691651746592, | |
| "learning_rate": 9.41129196348811e-06, | |
| "loss": 0.8067, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.5385779122541604, | |
| "grad_norm": 1.8706627989987974, | |
| "learning_rate": 9.215409042721553e-06, | |
| "loss": 0.8277, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.5446293494704992, | |
| "grad_norm": 1.7301034510569795, | |
| "learning_rate": 9.019828596704394e-06, | |
| "loss": 0.7623, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.5506807866868382, | |
| "grad_norm": 1.8109305813892944, | |
| "learning_rate": 8.824626025421625e-06, | |
| "loss": 0.7968, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.556732223903177, | |
| "grad_norm": 1.7916083013522495, | |
| "learning_rate": 8.629876583180322e-06, | |
| "loss": 0.7906, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.5627836611195158, | |
| "grad_norm": 1.723041951018134, | |
| "learning_rate": 8.43565534959769e-06, | |
| "loss": 0.7923, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.5688350983358548, | |
| "grad_norm": 1.906015357724372, | |
| "learning_rate": 8.242037200656455e-06, | |
| "loss": 0.8064, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.5748865355521936, | |
| "grad_norm": 1.7522630769630916, | |
| "learning_rate": 8.04909677983872e-06, | |
| "loss": 0.7945, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.5809379727685325, | |
| "grad_norm": 1.7968696551333927, | |
| "learning_rate": 7.856908469349495e-06, | |
| "loss": 0.7894, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.5869894099848714, | |
| "grad_norm": 1.8942858022507922, | |
| "learning_rate": 7.66554636144095e-06, | |
| "loss": 0.8282, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.5930408472012103, | |
| "grad_norm": 1.8099448490609888, | |
| "learning_rate": 7.4750842298484205e-06, | |
| "loss": 0.8045, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.5990922844175491, | |
| "grad_norm": 1.777972803175514, | |
| "learning_rate": 7.285595501349259e-06, | |
| "loss": 0.8025, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.6051437216338881, | |
| "grad_norm": 1.783144416540341, | |
| "learning_rate": 7.097153227455379e-06, | |
| "loss": 0.8243, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.6111951588502269, | |
| "grad_norm": 1.7896936879748115, | |
| "learning_rate": 6.909830056250527e-06, | |
| "loss": 0.8098, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.6172465960665658, | |
| "grad_norm": 1.7287133254606566, | |
| "learning_rate": 6.723698204383067e-06, | |
| "loss": 0.7946, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.6232980332829047, | |
| "grad_norm": 1.8036680843631874, | |
| "learning_rate": 6.538829429225068e-06, | |
| "loss": 0.7762, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.6293494704992436, | |
| "grad_norm": 1.8545633375284538, | |
| "learning_rate": 6.355295001208504e-06, | |
| "loss": 0.8059, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.6354009077155824, | |
| "grad_norm": 1.865666266019574, | |
| "learning_rate": 6.173165676349103e-06, | |
| "loss": 0.7859, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.6414523449319214, | |
| "grad_norm": 1.6652988720333, | |
| "learning_rate": 5.9925116689685925e-06, | |
| "loss": 0.767, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.6475037821482602, | |
| "grad_norm": 1.761164487737613, | |
| "learning_rate": 5.813402624625722e-06, | |
| "loss": 0.7818, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.653555219364599, | |
| "grad_norm": 1.8325436940760458, | |
| "learning_rate": 5.635907593266578e-06, | |
| "loss": 0.7797, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.659606656580938, | |
| "grad_norm": 1.7458851309984722, | |
| "learning_rate": 5.460095002604533e-06, | |
| "loss": 0.7929, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.6656580937972768, | |
| "grad_norm": 1.7511730155909075, | |
| "learning_rate": 5.286032631740023e-06, | |
| "loss": 0.8146, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.6717095310136157, | |
| "grad_norm": 1.6942068341047123, | |
| "learning_rate": 5.1137875850304545e-06, | |
| "loss": 0.7714, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.6777609682299546, | |
| "grad_norm": 1.7566917917926772, | |
| "learning_rate": 4.943426266220156e-06, | |
| "loss": 0.7956, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.6838124054462935, | |
| "grad_norm": 1.713757862020219, | |
| "learning_rate": 4.775014352840512e-06, | |
| "loss": 0.7862, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.6898638426626323, | |
| "grad_norm": 1.7331168505190726, | |
| "learning_rate": 4.608616770889998e-06, | |
| "loss": 0.7804, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.6959152798789713, | |
| "grad_norm": 1.8104874747906492, | |
| "learning_rate": 4.444297669803981e-06, | |
| "loss": 0.8187, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.7019667170953101, | |
| "grad_norm": 1.6744618936972424, | |
| "learning_rate": 4.282120397723879e-06, | |
| "loss": 0.7704, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.708018154311649, | |
| "grad_norm": 1.7261151885353918, | |
| "learning_rate": 4.12214747707527e-06, | |
| "loss": 0.7902, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.7140695915279879, | |
| "grad_norm": 1.7369036940485543, | |
| "learning_rate": 3.964440580464286e-06, | |
| "loss": 0.8147, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.7201210287443268, | |
| "grad_norm": 1.7029028180642818, | |
| "learning_rate": 3.8090605069016596e-06, | |
| "loss": 0.8068, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.7261724659606656, | |
| "grad_norm": 1.7441391740410612, | |
| "learning_rate": 3.6560671583635467e-06, | |
| "loss": 0.8065, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.7322239031770046, | |
| "grad_norm": 1.7387201084449406, | |
| "learning_rate": 3.505519516698165e-06, | |
| "loss": 0.8015, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.7382753403933434, | |
| "grad_norm": 1.697622312997327, | |
| "learning_rate": 3.3574756208871862e-06, | |
| "loss": 0.7832, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.7443267776096822, | |
| "grad_norm": 1.731136127294693, | |
| "learning_rate": 3.2119925446705824e-06, | |
| "loss": 0.79, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.7503782148260212, | |
| "grad_norm": 1.6365994311291872, | |
| "learning_rate": 3.069126374543643e-06, | |
| "loss": 0.7865, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.75642965204236, | |
| "grad_norm": 1.6359863467519138, | |
| "learning_rate": 2.9289321881345257e-06, | |
| "loss": 0.7668, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.762481089258699, | |
| "grad_norm": 1.709421331560918, | |
| "learning_rate": 2.791464032970812e-06, | |
| "loss": 0.7812, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.762481089258699, | |
| "eval_loss": 0.7835145592689514, | |
| "eval_runtime": 15.87, | |
| "eval_samples_per_second": 266.541, | |
| "eval_steps_per_second": 4.222, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.7685325264750378, | |
| "grad_norm": 1.7259657772685697, | |
| "learning_rate": 2.656774905643147e-06, | |
| "loss": 0.7763, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.7745839636913767, | |
| "grad_norm": 1.663157745899699, | |
| "learning_rate": 2.5249167313740307e-06, | |
| "loss": 0.7886, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.7806354009077155, | |
| "grad_norm": 1.7071181408125617, | |
| "learning_rate": 2.395940343999691e-06, | |
| "loss": 0.7974, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.7866868381240545, | |
| "grad_norm": 1.7398875364830548, | |
| "learning_rate": 2.26989546637263e-06, | |
| "loss": 0.7922, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.7927382753403933, | |
| "grad_norm": 1.7262145967298637, | |
| "learning_rate": 2.146830691192553e-06, | |
| "loss": 0.7857, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.7987897125567323, | |
| "grad_norm": 1.7599571064642554, | |
| "learning_rate": 2.02679346227293e-06, | |
| "loss": 0.8111, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.8048411497730711, | |
| "grad_norm": 1.764758571264166, | |
| "learning_rate": 1.9098300562505266e-06, | |
| "loss": 0.7884, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.81089258698941, | |
| "grad_norm": 1.666273478784506, | |
| "learning_rate": 1.7959855647448642e-06, | |
| "loss": 0.7669, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.8169440242057489, | |
| "grad_norm": 1.644818476714854, | |
| "learning_rate": 1.6853038769745466e-06, | |
| "loss": 0.7879, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.8229954614220878, | |
| "grad_norm": 1.6371021662929024, | |
| "learning_rate": 1.577827662837136e-06, | |
| "loss": 0.7672, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.8290468986384266, | |
| "grad_norm": 1.7096483505883402, | |
| "learning_rate": 1.4735983564590784e-06, | |
| "loss": 0.7785, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.8350983358547656, | |
| "grad_norm": 1.637601012545836, | |
| "learning_rate": 1.3726561402220818e-06, | |
| "loss": 0.7837, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.8411497730711044, | |
| "grad_norm": 1.6793180233293152, | |
| "learning_rate": 1.2750399292720284e-06, | |
| "loss": 0.7769, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.8472012102874432, | |
| "grad_norm": 1.6978760771073718, | |
| "learning_rate": 1.1807873565164507e-06, | |
| "loss": 0.7658, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.8532526475037822, | |
| "grad_norm": 1.6749246259946589, | |
| "learning_rate": 1.0899347581163222e-06, | |
| "loss": 0.7711, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.859304084720121, | |
| "grad_norm": 1.6758050707650343, | |
| "learning_rate": 1.0025171594777872e-06, | |
| "loss": 0.7686, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.8653555219364599, | |
| "grad_norm": 1.7468688446067502, | |
| "learning_rate": 9.185682617491865e-07, | |
| "loss": 0.8124, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.8714069591527988, | |
| "grad_norm": 1.6696901769364607, | |
| "learning_rate": 8.381204288286415e-07, | |
| "loss": 0.7753, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.8774583963691377, | |
| "grad_norm": 1.7545342313458392, | |
| "learning_rate": 7.612046748871327e-07, | |
| "loss": 0.7826, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.8835098335854765, | |
| "grad_norm": 1.720087490611314, | |
| "learning_rate": 6.878506524119644e-07, | |
| "loss": 0.7841, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.8895612708018155, | |
| "grad_norm": 1.7154929931979837, | |
| "learning_rate": 6.180866407751595e-07, | |
| "loss": 0.8065, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.8956127080181543, | |
| "grad_norm": 1.6938285717094959, | |
| "learning_rate": 5.519395353312195e-07, | |
| "loss": 0.7599, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.9016641452344932, | |
| "grad_norm": 1.7221938520673765, | |
| "learning_rate": 4.894348370484648e-07, | |
| "loss": 0.7921, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.9077155824508321, | |
| "grad_norm": 1.7085140075465948, | |
| "learning_rate": 4.305966426779118e-07, | |
| "loss": 0.7883, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.913767019667171, | |
| "grad_norm": 1.6823778148451014, | |
| "learning_rate": 3.7544763546352834e-07, | |
| "loss": 0.762, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.9198184568835098, | |
| "grad_norm": 1.6556587985252844, | |
| "learning_rate": 3.2400907639740243e-07, | |
| "loss": 0.7801, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.9258698940998488, | |
| "grad_norm": 1.682814477644372, | |
| "learning_rate": 2.7630079602323447e-07, | |
| "loss": 0.7711, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.9319213313161876, | |
| "grad_norm": 1.6477141580840275, | |
| "learning_rate": 2.3234118679127615e-07, | |
| "loss": 0.7615, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.9379727685325264, | |
| "grad_norm": 1.7152855010369743, | |
| "learning_rate": 1.921471959676957e-07, | |
| "loss": 0.8066, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.9440242057488654, | |
| "grad_norm": 1.7156519942956723, | |
| "learning_rate": 1.5573431910108404e-07, | |
| "loss": 0.7754, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.9500756429652042, | |
| "grad_norm": 1.6843373848088317, | |
| "learning_rate": 1.231165940486234e-07, | |
| "loss": 0.7766, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.9561270801815431, | |
| "grad_norm": 1.6626529442380376, | |
| "learning_rate": 9.43065955642275e-08, | |
| "loss": 0.7709, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.962178517397882, | |
| "grad_norm": 1.648515446524827, | |
| "learning_rate": 6.931543045073708e-08, | |
| "loss": 0.7766, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.9682299546142209, | |
| "grad_norm": 1.666828705980742, | |
| "learning_rate": 4.815273327803183e-08, | |
| "loss": 0.7756, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.9742813918305597, | |
| "grad_norm": 1.6783538075057698, | |
| "learning_rate": 3.082666266872036e-08, | |
| "loss": 0.7934, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.9803328290468987, | |
| "grad_norm": 1.6237786363290807, | |
| "learning_rate": 1.7343898152841765e-08, | |
| "loss": 0.7769, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.9863842662632375, | |
| "grad_norm": 1.619244118831079, | |
| "learning_rate": 7.70963759277099e-09, | |
| "loss": 0.7587, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.9924357034795764, | |
| "grad_norm": 1.7166293751335602, | |
| "learning_rate": 1.9275951793518154e-09, | |
| "loss": 0.7877, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.9984871406959153, | |
| "grad_norm": 1.7273091506087734, | |
| "learning_rate": 0.0, | |
| "loss": 0.7917, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.9984871406959153, | |
| "step": 165, | |
| "total_flos": 104016591716352.0, | |
| "train_loss": 0.8553301778706637, | |
| "train_runtime": 654.3507, | |
| "train_samples_per_second": 64.649, | |
| "train_steps_per_second": 0.252 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 165, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 42, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 104016591716352.0, | |
| "train_batch_size": 2, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |