| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 3.0, | |
| "eval_steps": 500, | |
| "global_step": 12021, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.012478163214374845, | |
| "grad_norm": 4.458792209625244, | |
| "learning_rate": 6.786703601108035e-07, | |
| "loss": 0.5794, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.02495632642874969, | |
| "grad_norm": 3.041719436645508, | |
| "learning_rate": 1.371191135734072e-06, | |
| "loss": 0.5188, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.037434489643124534, | |
| "grad_norm": 10.141822814941406, | |
| "learning_rate": 2.0637119113573407e-06, | |
| "loss": 0.5702, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.04991265285749938, | |
| "grad_norm": 4.34501838684082, | |
| "learning_rate": 2.7562326869806094e-06, | |
| "loss": 0.5027, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.062390816071874224, | |
| "grad_norm": 6.478577613830566, | |
| "learning_rate": 3.448753462603878e-06, | |
| "loss": 0.5349, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.07486897928624907, | |
| "grad_norm": 4.510005950927734, | |
| "learning_rate": 4.141274238227147e-06, | |
| "loss": 0.5024, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.0873471425006239, | |
| "grad_norm": 4.612236976623535, | |
| "learning_rate": 4.833795013850416e-06, | |
| "loss": 0.4693, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.09982530571499876, | |
| "grad_norm": 12.051955223083496, | |
| "learning_rate": 5.526315789473685e-06, | |
| "loss": 0.5274, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.1123034689293736, | |
| "grad_norm": 11.082642555236816, | |
| "learning_rate": 6.218836565096953e-06, | |
| "loss": 0.4646, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.12478163214374845, | |
| "grad_norm": 5.821896553039551, | |
| "learning_rate": 6.911357340720221e-06, | |
| "loss": 0.4762, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.13725979535812327, | |
| "grad_norm": 10.287369728088379, | |
| "learning_rate": 7.6038781163434905e-06, | |
| "loss": 0.5373, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.14973795857249814, | |
| "grad_norm": 6.837547779083252, | |
| "learning_rate": 8.29639889196676e-06, | |
| "loss": 0.4746, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.16221612178687297, | |
| "grad_norm": 5.090405464172363, | |
| "learning_rate": 8.988919667590029e-06, | |
| "loss": 0.4974, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.1746942850012478, | |
| "grad_norm": 7.571693420410156, | |
| "learning_rate": 9.681440443213298e-06, | |
| "loss": 0.4859, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.18717244821562265, | |
| "grad_norm": 5.327805042266846, | |
| "learning_rate": 9.999859108359658e-06, | |
| "loss": 0.4728, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.19965061142999752, | |
| "grad_norm": 8.96234130859375, | |
| "learning_rate": 9.998854158364854e-06, | |
| "loss": 0.4707, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.21212877464437235, | |
| "grad_norm": 6.393317222595215, | |
| "learning_rate": 9.996883105864235e-06, | |
| "loss": 0.4647, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.2246069378587472, | |
| "grad_norm": 6.38814115524292, | |
| "learning_rate": 9.993946331792856e-06, | |
| "loss": 0.4589, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.23708510107312203, | |
| "grad_norm": 16.268198013305664, | |
| "learning_rate": 9.990044403725743e-06, | |
| "loss": 0.4048, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.2495632642874969, | |
| "grad_norm": 3.3333048820495605, | |
| "learning_rate": 9.985178075768209e-06, | |
| "loss": 0.5081, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.2620414275018717, | |
| "grad_norm": 10.300507545471191, | |
| "learning_rate": 9.979348288410111e-06, | |
| "loss": 0.4608, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.27451959071624654, | |
| "grad_norm": 6.351011276245117, | |
| "learning_rate": 9.97255616834409e-06, | |
| "loss": 0.4638, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.28699775393062144, | |
| "grad_norm": 7.574777603149414, | |
| "learning_rate": 9.964803028247807e-06, | |
| "loss": 0.427, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.2994759171449963, | |
| "grad_norm": 12.513327598571777, | |
| "learning_rate": 9.95609036653027e-06, | |
| "loss": 0.451, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.3119540803593711, | |
| "grad_norm": 7.875536918640137, | |
| "learning_rate": 9.946419867042226e-06, | |
| "loss": 0.4535, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.32443224357374595, | |
| "grad_norm": 3.6338303089141846, | |
| "learning_rate": 9.935793398750741e-06, | |
| "loss": 0.4618, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.3369104067881208, | |
| "grad_norm": 122.68341827392578, | |
| "learning_rate": 9.924213015377991e-06, | |
| "loss": 0.4842, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.3493885700024956, | |
| "grad_norm": 14.782501220703125, | |
| "learning_rate": 9.911680955004353e-06, | |
| "loss": 0.4984, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.36186673321687046, | |
| "grad_norm": 8.127388000488281, | |
| "learning_rate": 9.89819963963586e-06, | |
| "loss": 0.5211, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.3743448964312453, | |
| "grad_norm": 9.87590503692627, | |
| "learning_rate": 9.883771674736113e-06, | |
| "loss": 0.4535, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.38682305964562014, | |
| "grad_norm": 9.47072696685791, | |
| "learning_rate": 9.868399848722738e-06, | |
| "loss": 0.4273, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.39930122285999503, | |
| "grad_norm": 7.544781684875488, | |
| "learning_rate": 9.852087132428483e-06, | |
| "loss": 0.4068, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.41177938607436987, | |
| "grad_norm": 8.554336547851562, | |
| "learning_rate": 9.834836678527055e-06, | |
| "loss": 0.4562, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.4242575492887447, | |
| "grad_norm": 11.270395278930664, | |
| "learning_rate": 9.816651820923832e-06, | |
| "loss": 0.4057, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.43673571250311954, | |
| "grad_norm": 4.841072082519531, | |
| "learning_rate": 9.797536074111523e-06, | |
| "loss": 0.5079, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.4492138757174944, | |
| "grad_norm": 10.340139389038086, | |
| "learning_rate": 9.777493132490945e-06, | |
| "loss": 0.4431, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.4616920389318692, | |
| "grad_norm": 4.847078800201416, | |
| "learning_rate": 9.75652686965703e-06, | |
| "loss": 0.4961, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.47417020214624406, | |
| "grad_norm": 3.77082896232605, | |
| "learning_rate": 9.734641337650186e-06, | |
| "loss": 0.3592, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.4866483653606189, | |
| "grad_norm": 11.575835227966309, | |
| "learning_rate": 9.711840766173195e-06, | |
| "loss": 0.493, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.4991265285749938, | |
| "grad_norm": 8.841684341430664, | |
| "learning_rate": 9.688129561773742e-06, | |
| "loss": 0.4283, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.5116046917893686, | |
| "grad_norm": 14.447687149047852, | |
| "learning_rate": 9.663512306992797e-06, | |
| "loss": 0.4683, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.5240828550037434, | |
| "grad_norm": 5.287663459777832, | |
| "learning_rate": 9.637993759478972e-06, | |
| "loss": 0.4147, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.5365610182181183, | |
| "grad_norm": 5.344033718109131, | |
| "learning_rate": 9.611578851069023e-06, | |
| "loss": 0.4346, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.5490391814324931, | |
| "grad_norm": 12.01088809967041, | |
| "learning_rate": 9.584272686834716e-06, | |
| "loss": 0.364, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.561517344646868, | |
| "grad_norm": 4.283223628997803, | |
| "learning_rate": 9.55608054409618e-06, | |
| "loss": 0.4039, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.5739955078612429, | |
| "grad_norm": 5.19553804397583, | |
| "learning_rate": 9.527007871402002e-06, | |
| "loss": 0.4427, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.5864736710756177, | |
| "grad_norm": 13.137829780578613, | |
| "learning_rate": 9.4970602874762e-06, | |
| "loss": 0.515, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.5989518342899925, | |
| "grad_norm": 9.434691429138184, | |
| "learning_rate": 9.466243580132336e-06, | |
| "loss": 0.3969, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.6114299975043673, | |
| "grad_norm": 6.398521423339844, | |
| "learning_rate": 9.434563705154922e-06, | |
| "loss": 0.4726, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 0.6239081607187422, | |
| "grad_norm": 11.770699501037598, | |
| "learning_rate": 9.402026785148385e-06, | |
| "loss": 0.4433, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.636386323933117, | |
| "grad_norm": 3.9562103748321533, | |
| "learning_rate": 9.368639108353781e-06, | |
| "loss": 0.3859, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 0.6488644871474919, | |
| "grad_norm": 8.58365535736084, | |
| "learning_rate": 9.3344071274335e-06, | |
| "loss": 0.5342, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.6613426503618667, | |
| "grad_norm": 5.637104511260986, | |
| "learning_rate": 9.29933745822419e-06, | |
| "loss": 0.3778, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 0.6738208135762416, | |
| "grad_norm": 7.481651306152344, | |
| "learning_rate": 9.263436878458151e-06, | |
| "loss": 0.4751, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.6862989767906165, | |
| "grad_norm": 5.351173400878906, | |
| "learning_rate": 9.226712326453446e-06, | |
| "loss": 0.4493, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 0.6987771400049912, | |
| "grad_norm": 7.108247756958008, | |
| "learning_rate": 9.189170899772949e-06, | |
| "loss": 0.4515, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 0.7112553032193661, | |
| "grad_norm": 8.506830215454102, | |
| "learning_rate": 9.150819853852656e-06, | |
| "loss": 0.3848, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 0.7237334664337409, | |
| "grad_norm": 5.16771936416626, | |
| "learning_rate": 9.111666600599455e-06, | |
| "loss": 0.4331, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 0.7362116296481158, | |
| "grad_norm": 6.397897243499756, | |
| "learning_rate": 9.071718706958662e-06, | |
| "loss": 0.374, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 0.7486897928624906, | |
| "grad_norm": 11.78195571899414, | |
| "learning_rate": 9.030983893451612e-06, | |
| "loss": 0.415, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.7611679560768655, | |
| "grad_norm": 16.80957794189453, | |
| "learning_rate": 8.989470032683534e-06, | |
| "loss": 0.3573, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 0.7736461192912403, | |
| "grad_norm": 7.505095958709717, | |
| "learning_rate": 8.947185147822064e-06, | |
| "loss": 0.4102, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 0.7861242825056152, | |
| "grad_norm": 8.942618370056152, | |
| "learning_rate": 8.904137411046656e-06, | |
| "loss": 0.385, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 0.7986024457199901, | |
| "grad_norm": 12.872928619384766, | |
| "learning_rate": 8.860335141969173e-06, | |
| "loss": 0.4918, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 0.8110806089343648, | |
| "grad_norm": 7.009009838104248, | |
| "learning_rate": 8.815786806026008e-06, | |
| "loss": 0.3944, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 0.8235587721487397, | |
| "grad_norm": 9.012943267822266, | |
| "learning_rate": 8.77050101284201e-06, | |
| "loss": 0.4019, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 0.8360369353631145, | |
| "grad_norm": 4.293410301208496, | |
| "learning_rate": 8.72448651456655e-06, | |
| "loss": 0.4727, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 0.8485150985774894, | |
| "grad_norm": 6.573951721191406, | |
| "learning_rate": 8.677752204182038e-06, | |
| "loss": 0.3932, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 0.8609932617918642, | |
| "grad_norm": 7.871060848236084, | |
| "learning_rate": 8.630307113785214e-06, | |
| "loss": 0.4232, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 0.8734714250062391, | |
| "grad_norm": 6.599925994873047, | |
| "learning_rate": 8.582160412841578e-06, | |
| "loss": 0.4354, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.8859495882206139, | |
| "grad_norm": 8.174042701721191, | |
| "learning_rate": 8.533321406413238e-06, | |
| "loss": 0.4637, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 0.8984277514349888, | |
| "grad_norm": 11.967002868652344, | |
| "learning_rate": 8.48379953336058e-06, | |
| "loss": 0.3552, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 0.9109059146493637, | |
| "grad_norm": 21.131765365600586, | |
| "learning_rate": 8.433604364518067e-06, | |
| "loss": 0.3665, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 0.9233840778637384, | |
| "grad_norm": 7.3277907371521, | |
| "learning_rate": 8.38274560084453e-06, | |
| "loss": 0.4076, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 0.9358622410781133, | |
| "grad_norm": 6.7681732177734375, | |
| "learning_rate": 8.331233071548318e-06, | |
| "loss": 0.4331, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 0.9483404042924881, | |
| "grad_norm": 5.008552551269531, | |
| "learning_rate": 8.27907673218766e-06, | |
| "loss": 0.4162, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 0.960818567506863, | |
| "grad_norm": 11.283330917358398, | |
| "learning_rate": 8.226286662746594e-06, | |
| "loss": 0.3277, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 0.9732967307212378, | |
| "grad_norm": 7.3569746017456055, | |
| "learning_rate": 8.172873065686877e-06, | |
| "loss": 0.4533, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 0.9857748939356127, | |
| "grad_norm": 15.550161361694336, | |
| "learning_rate": 8.1188462639762e-06, | |
| "loss": 0.4304, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 0.9982530571499876, | |
| "grad_norm": 9.424723625183105, | |
| "learning_rate": 8.06421669909312e-06, | |
| "loss": 0.4069, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 1.0107312203643624, | |
| "grad_norm": 5.943562030792236, | |
| "learning_rate": 8.008994929009097e-06, | |
| "loss": 0.3779, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 1.0232093835787373, | |
| "grad_norm": 13.873172760009766, | |
| "learning_rate": 7.953191626148002e-06, | |
| "loss": 0.3955, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 1.0356875467931121, | |
| "grad_norm": 6.041378974914551, | |
| "learning_rate": 7.896817575323516e-06, | |
| "loss": 0.3335, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 1.0481657100074868, | |
| "grad_norm": 9.297985076904297, | |
| "learning_rate": 7.839883671654814e-06, | |
| "loss": 0.3622, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 1.0606438732218617, | |
| "grad_norm": 4.703738689422607, | |
| "learning_rate": 7.7824009184609e-06, | |
| "loss": 0.3698, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 1.0731220364362366, | |
| "grad_norm": 8.91132926940918, | |
| "learning_rate": 7.724380425134082e-06, | |
| "loss": 0.2974, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 1.0856001996506115, | |
| "grad_norm": 9.209162712097168, | |
| "learning_rate": 7.66583340499289e-06, | |
| "loss": 0.4056, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 1.0980783628649862, | |
| "grad_norm": 6.217947959899902, | |
| "learning_rate": 7.606771173114962e-06, | |
| "loss": 0.4447, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 1.110556526079361, | |
| "grad_norm": 7.217406272888184, | |
| "learning_rate": 7.547205144150211e-06, | |
| "loss": 0.3444, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 1.123034689293736, | |
| "grad_norm": 9.686402320861816, | |
| "learning_rate": 7.487146830114797e-06, | |
| "loss": 0.39, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 1.1355128525081108, | |
| "grad_norm": 7.340874671936035, | |
| "learning_rate": 7.426607838166252e-06, | |
| "loss": 0.3825, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 1.1479910157224857, | |
| "grad_norm": 24.946033477783203, | |
| "learning_rate": 7.36559986836022e-06, | |
| "loss": 0.3891, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 1.1604691789368604, | |
| "grad_norm": 9.491501808166504, | |
| "learning_rate": 7.304134711389256e-06, | |
| "loss": 0.3215, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 1.1729473421512353, | |
| "grad_norm": 7.190339088439941, | |
| "learning_rate": 7.242224246304095e-06, | |
| "loss": 0.3614, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 1.1854255053656102, | |
| "grad_norm": 32.30070877075195, | |
| "learning_rate": 7.179880438217854e-06, | |
| "loss": 0.3325, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 1.197903668579985, | |
| "grad_norm": 8.440774917602539, | |
| "learning_rate": 7.1171153359935945e-06, | |
| "loss": 0.3708, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 1.21038183179436, | |
| "grad_norm": 7.894869804382324, | |
| "learning_rate": 7.0539410699156995e-06, | |
| "loss": 0.3683, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 1.2228599950087347, | |
| "grad_norm": 10.961115837097168, | |
| "learning_rate": 6.990369849345519e-06, | |
| "loss": 0.3572, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 1.2353381582231096, | |
| "grad_norm": 7.400118350982666, | |
| "learning_rate": 6.926413960361734e-06, | |
| "loss": 0.2907, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 1.2478163214374844, | |
| "grad_norm": 7.078188419342041, | |
| "learning_rate": 6.862085763385882e-06, | |
| "loss": 0.3524, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 1.2602944846518591, | |
| "grad_norm": 7.620141983032227, | |
| "learning_rate": 6.797397690793528e-06, | |
| "loss": 0.3271, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 1.272772647866234, | |
| "grad_norm": 7.046457290649414, | |
| "learning_rate": 6.732362244511523e-06, | |
| "loss": 0.4414, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 1.285250811080609, | |
| "grad_norm": 15.27204704284668, | |
| "learning_rate": 6.666991993601821e-06, | |
| "loss": 0.297, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 1.2977289742949838, | |
| "grad_norm": 10.220624923706055, | |
| "learning_rate": 6.601299571832325e-06, | |
| "loss": 0.3015, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 1.3102071375093587, | |
| "grad_norm": 12.588732719421387, | |
| "learning_rate": 6.535297675235221e-06, | |
| "loss": 0.3472, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 1.3226853007237334, | |
| "grad_norm": 10.140027046203613, | |
| "learning_rate": 6.46899905965328e-06, | |
| "loss": 0.3691, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 1.3351634639381083, | |
| "grad_norm": 7.856444358825684, | |
| "learning_rate": 6.402416538274609e-06, | |
| "loss": 0.3839, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 1.3476416271524831, | |
| "grad_norm": 7.6511712074279785, | |
| "learning_rate": 6.3355629791563085e-06, | |
| "loss": 0.361, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 1.360119790366858, | |
| "grad_norm": 4.789505481719971, | |
| "learning_rate": 6.2684513027375336e-06, | |
| "loss": 0.4478, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 1.372597953581233, | |
| "grad_norm": 7.044528007507324, | |
| "learning_rate": 6.201094479342419e-06, | |
| "loss": 0.3199, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 1.3850761167956076, | |
| "grad_norm": 8.48816967010498, | |
| "learning_rate": 6.133505526673387e-06, | |
| "loss": 0.3087, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 1.3975542800099825, | |
| "grad_norm": 11.27566146850586, | |
| "learning_rate": 6.065697507295267e-06, | |
| "loss": 0.3392, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 1.4100324432243574, | |
| "grad_norm": 8.946170806884766, | |
| "learning_rate": 5.997683526110779e-06, | |
| "loss": 0.3173, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 1.4225106064387323, | |
| "grad_norm": 14.0136137008667, | |
| "learning_rate": 5.929476727827801e-06, | |
| "loss": 0.3957, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 1.4349887696531072, | |
| "grad_norm": 8.724675178527832, | |
| "learning_rate": 5.861090294418973e-06, | |
| "loss": 0.4072, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 1.4474669328674818, | |
| "grad_norm": 7.454153537750244, | |
| "learning_rate": 5.792537442574076e-06, | |
| "loss": 0.3562, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 1.4599450960818567, | |
| "grad_norm": 8.835644721984863, | |
| "learning_rate": 5.723831421145718e-06, | |
| "loss": 0.3093, | |
| "step": 5850 | |
| }, | |
| { | |
| "epoch": 1.4724232592962316, | |
| "grad_norm": 26.4691219329834, | |
| "learning_rate": 5.6549855085887925e-06, | |
| "loss": 0.3797, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 1.4849014225106063, | |
| "grad_norm": 7.992363452911377, | |
| "learning_rate": 5.586013010394226e-06, | |
| "loss": 0.2999, | |
| "step": 5950 | |
| }, | |
| { | |
| "epoch": 1.4973795857249814, | |
| "grad_norm": 8.84736156463623, | |
| "learning_rate": 5.5169272565174866e-06, | |
| "loss": 0.3515, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 1.509857748939356, | |
| "grad_norm": 14.053621292114258, | |
| "learning_rate": 5.447741598802384e-06, | |
| "loss": 0.3372, | |
| "step": 6050 | |
| }, | |
| { | |
| "epoch": 1.522335912153731, | |
| "grad_norm": 12.153364181518555, | |
| "learning_rate": 5.378469408400618e-06, | |
| "loss": 0.3291, | |
| "step": 6100 | |
| }, | |
| { | |
| "epoch": 1.5348140753681059, | |
| "grad_norm": 8.285240173339844, | |
| "learning_rate": 5.309124073187613e-06, | |
| "loss": 0.3676, | |
| "step": 6150 | |
| }, | |
| { | |
| "epoch": 1.5472922385824806, | |
| "grad_norm": 11.175989151000977, | |
| "learning_rate": 5.239718995175107e-06, | |
| "loss": 0.3833, | |
| "step": 6200 | |
| }, | |
| { | |
| "epoch": 1.5597704017968557, | |
| "grad_norm": 7.0034003257751465, | |
| "learning_rate": 5.170267587921021e-06, | |
| "loss": 0.3275, | |
| "step": 6250 | |
| }, | |
| { | |
| "epoch": 1.5722485650112303, | |
| "grad_norm": 14.088438987731934, | |
| "learning_rate": 5.100783273937084e-06, | |
| "loss": 0.3754, | |
| "step": 6300 | |
| }, | |
| { | |
| "epoch": 1.5847267282256052, | |
| "grad_norm": 7.637153625488281, | |
| "learning_rate": 5.031279482094741e-06, | |
| "loss": 0.2582, | |
| "step": 6350 | |
| }, | |
| { | |
| "epoch": 1.5972048914399801, | |
| "grad_norm": 16.597148895263672, | |
| "learning_rate": 4.961769645029815e-06, | |
| "loss": 0.3279, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 1.6096830546543548, | |
| "grad_norm": 5.401224613189697, | |
| "learning_rate": 4.892267196546468e-06, | |
| "loss": 0.3662, | |
| "step": 6450 | |
| }, | |
| { | |
| "epoch": 1.6221612178687297, | |
| "grad_norm": 17.39789390563965, | |
| "learning_rate": 4.822785569020898e-06, | |
| "loss": 0.3882, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 1.6346393810831046, | |
| "grad_norm": 7.322461128234863, | |
| "learning_rate": 4.753338190805354e-06, | |
| "loss": 0.3656, | |
| "step": 6550 | |
| }, | |
| { | |
| "epoch": 1.6471175442974793, | |
| "grad_norm": 11.221035957336426, | |
| "learning_rate": 4.683938483632893e-06, | |
| "loss": 0.3346, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 1.6595957075118544, | |
| "grad_norm": 12.337651252746582, | |
| "learning_rate": 4.6145998600234405e-06, | |
| "loss": 0.2959, | |
| "step": 6650 | |
| }, | |
| { | |
| "epoch": 1.672073870726229, | |
| "grad_norm": 9.140985488891602, | |
| "learning_rate": 4.545335720691619e-06, | |
| "loss": 0.2888, | |
| "step": 6700 | |
| }, | |
| { | |
| "epoch": 1.684552033940604, | |
| "grad_norm": 12.107025146484375, | |
| "learning_rate": 4.476159451956864e-06, | |
| "loss": 0.3458, | |
| "step": 6750 | |
| }, | |
| { | |
| "epoch": 1.6970301971549788, | |
| "grad_norm": 12.792327880859375, | |
| "learning_rate": 4.407084423156318e-06, | |
| "loss": 0.3031, | |
| "step": 6800 | |
| }, | |
| { | |
| "epoch": 1.7095083603693535, | |
| "grad_norm": 7.57142448425293, | |
| "learning_rate": 4.338123984061008e-06, | |
| "loss": 0.3177, | |
| "step": 6850 | |
| }, | |
| { | |
| "epoch": 1.7219865235837286, | |
| "grad_norm": 13.280131340026855, | |
| "learning_rate": 4.269291462295805e-06, | |
| "loss": 0.3068, | |
| "step": 6900 | |
| }, | |
| { | |
| "epoch": 1.7344646867981033, | |
| "grad_norm": 19.578649520874023, | |
| "learning_rate": 4.200600160763666e-06, | |
| "loss": 0.2977, | |
| "step": 6950 | |
| }, | |
| { | |
| "epoch": 1.7469428500124782, | |
| "grad_norm": 13.223540306091309, | |
| "learning_rate": 4.1320633550746464e-06, | |
| "loss": 0.3542, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 1.759421013226853, | |
| "grad_norm": 13.519241333007812, | |
| "learning_rate": 4.063694290980198e-06, | |
| "loss": 0.2883, | |
| "step": 7050 | |
| }, | |
| { | |
| "epoch": 1.7718991764412277, | |
| "grad_norm": 14.22149658203125, | |
| "learning_rate": 3.995506181813225e-06, | |
| "loss": 0.2897, | |
| "step": 7100 | |
| }, | |
| { | |
| "epoch": 1.7843773396556029, | |
| "grad_norm": 15.721160888671875, | |
| "learning_rate": 3.927512205934419e-06, | |
| "loss": 0.383, | |
| "step": 7150 | |
| }, | |
| { | |
| "epoch": 1.7968555028699775, | |
| "grad_norm": 8.207934379577637, | |
| "learning_rate": 3.859725504185337e-06, | |
| "loss": 0.2854, | |
| "step": 7200 | |
| }, | |
| { | |
| "epoch": 1.8093336660843524, | |
| "grad_norm": 13.97625732421875, | |
| "learning_rate": 3.7921591773487496e-06, | |
| "loss": 0.296, | |
| "step": 7250 | |
| }, | |
| { | |
| "epoch": 1.8218118292987273, | |
| "grad_norm": 12.828845024108887, | |
| "learning_rate": 3.7248262836167133e-06, | |
| "loss": 0.2767, | |
| "step": 7300 | |
| }, | |
| { | |
| "epoch": 1.834289992513102, | |
| "grad_norm": 12.510878562927246, | |
| "learning_rate": 3.6577398360668858e-06, | |
| "loss": 0.3011, | |
| "step": 7350 | |
| }, | |
| { | |
| "epoch": 1.8467681557274769, | |
| "grad_norm": 9.349883079528809, | |
| "learning_rate": 3.5909128001475523e-06, | |
| "loss": 0.4221, | |
| "step": 7400 | |
| }, | |
| { | |
| "epoch": 1.8592463189418518, | |
| "grad_norm": 6.913302898406982, | |
| "learning_rate": 3.5243580911718666e-06, | |
| "loss": 0.3221, | |
| "step": 7450 | |
| }, | |
| { | |
| "epoch": 1.8717244821562264, | |
| "grad_norm": 7.532046318054199, | |
| "learning_rate": 3.45808857182177e-06, | |
| "loss": 0.2691, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 1.8842026453706016, | |
| "grad_norm": 6.2996296882629395, | |
| "learning_rate": 3.3921170496620926e-06, | |
| "loss": 0.3504, | |
| "step": 7550 | |
| }, | |
| { | |
| "epoch": 1.8966808085849762, | |
| "grad_norm": 13.062787055969238, | |
| "learning_rate": 3.3264562746652963e-06, | |
| "loss": 0.2446, | |
| "step": 7600 | |
| }, | |
| { | |
| "epoch": 1.9091589717993511, | |
| "grad_norm": 16.110904693603516, | |
| "learning_rate": 3.261118936747362e-06, | |
| "loss": 0.3326, | |
| "step": 7650 | |
| }, | |
| { | |
| "epoch": 1.921637135013726, | |
| "grad_norm": 13.394048690795898, | |
| "learning_rate": 3.1961176633152657e-06, | |
| "loss": 0.3525, | |
| "step": 7700 | |
| }, | |
| { | |
| "epoch": 1.9341152982281007, | |
| "grad_norm": 9.24908447265625, | |
| "learning_rate": 3.131465016826556e-06, | |
| "loss": 0.3851, | |
| "step": 7750 | |
| }, | |
| { | |
| "epoch": 1.9465934614424758, | |
| "grad_norm": 6.787208080291748, | |
| "learning_rate": 3.067173492361465e-06, | |
| "loss": 0.3336, | |
| "step": 7800 | |
| }, | |
| { | |
| "epoch": 1.9590716246568505, | |
| "grad_norm": 7.182611465454102, | |
| "learning_rate": 3.003255515208052e-06, | |
| "loss": 0.332, | |
| "step": 7850 | |
| }, | |
| { | |
| "epoch": 1.9715497878712254, | |
| "grad_norm": 12.668556213378906, | |
| "learning_rate": 2.9397234384608347e-06, | |
| "loss": 0.3065, | |
| "step": 7900 | |
| }, | |
| { | |
| "epoch": 1.9840279510856003, | |
| "grad_norm": 8.110651969909668, | |
| "learning_rate": 2.8765895406333688e-06, | |
| "loss": 0.3478, | |
| "step": 7950 | |
| }, | |
| { | |
| "epoch": 1.996506114299975, | |
| "grad_norm": 7.546565532684326, | |
| "learning_rate": 2.8138660232852413e-06, | |
| "loss": 0.2889, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 2.00898427751435, | |
| "grad_norm": 9.974661827087402, | |
| "learning_rate": 2.751565008663948e-06, | |
| "loss": 0.2843, | |
| "step": 8050 | |
| }, | |
| { | |
| "epoch": 2.0214624407287247, | |
| "grad_norm": 17.447463989257812, | |
| "learning_rate": 2.6896985373620765e-06, | |
| "loss": 0.186, | |
| "step": 8100 | |
| }, | |
| { | |
| "epoch": 2.0339406039430994, | |
| "grad_norm": 10.162267684936523, | |
| "learning_rate": 2.6282785659902933e-06, | |
| "loss": 0.1835, | |
| "step": 8150 | |
| }, | |
| { | |
| "epoch": 2.0464187671574745, | |
| "grad_norm": 58.194740295410156, | |
| "learning_rate": 2.567316964866552e-06, | |
| "loss": 0.2702, | |
| "step": 8200 | |
| }, | |
| { | |
| "epoch": 2.058896930371849, | |
| "grad_norm": 15.600176811218262, | |
| "learning_rate": 2.506825515721969e-06, | |
| "loss": 0.2462, | |
| "step": 8250 | |
| }, | |
| { | |
| "epoch": 2.0713750935862243, | |
| "grad_norm": 8.652205467224121, | |
| "learning_rate": 2.4468159094238447e-06, | |
| "loss": 0.183, | |
| "step": 8300 | |
| }, | |
| { | |
| "epoch": 2.083853256800599, | |
| "grad_norm": 22.57020378112793, | |
| "learning_rate": 2.3872997437162163e-06, | |
| "loss": 0.1453, | |
| "step": 8350 | |
| }, | |
| { | |
| "epoch": 2.0963314200149736, | |
| "grad_norm": 19.869333267211914, | |
| "learning_rate": 2.32828852097843e-06, | |
| "loss": 0.1631, | |
| "step": 8400 | |
| }, | |
| { | |
| "epoch": 2.1088095832293487, | |
| "grad_norm": 32.57496643066406, | |
| "learning_rate": 2.2697936460021323e-06, | |
| "loss": 0.3009, | |
| "step": 8450 | |
| }, | |
| { | |
| "epoch": 2.1212877464437234, | |
| "grad_norm": 13.605443954467773, | |
| "learning_rate": 2.211826423787134e-06, | |
| "loss": 0.204, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 2.1337659096580985, | |
| "grad_norm": 13.936535835266113, | |
| "learning_rate": 2.1543980573565375e-06, | |
| "loss": 0.2448, | |
| "step": 8550 | |
| }, | |
| { | |
| "epoch": 2.146244072872473, | |
| "grad_norm": 38.21134948730469, | |
| "learning_rate": 2.0975196455915986e-06, | |
| "loss": 0.1699, | |
| "step": 8600 | |
| }, | |
| { | |
| "epoch": 2.158722236086848, | |
| "grad_norm": 32.29157638549805, | |
| "learning_rate": 2.041202181086696e-06, | |
| "loss": 0.1233, | |
| "step": 8650 | |
| }, | |
| { | |
| "epoch": 2.171200399301223, | |
| "grad_norm": 19.12054443359375, | |
| "learning_rate": 1.985456548024859e-06, | |
| "loss": 0.1861, | |
| "step": 8700 | |
| }, | |
| { | |
| "epoch": 2.1836785625155977, | |
| "grad_norm": 44.0279655456543, | |
| "learning_rate": 1.9302935200742274e-06, | |
| "loss": 0.188, | |
| "step": 8750 | |
| }, | |
| { | |
| "epoch": 2.1961567257299723, | |
| "grad_norm": 19.22915267944336, | |
| "learning_rate": 1.8757237583058874e-06, | |
| "loss": 0.1243, | |
| "step": 8800 | |
| }, | |
| { | |
| "epoch": 2.2086348889443475, | |
| "grad_norm": 15.253983497619629, | |
| "learning_rate": 1.8217578091334581e-06, | |
| "loss": 0.1867, | |
| "step": 8850 | |
| }, | |
| { | |
| "epoch": 2.221113052158722, | |
| "grad_norm": 7.043644905090332, | |
| "learning_rate": 1.7684061022748488e-06, | |
| "loss": 0.2013, | |
| "step": 8900 | |
| }, | |
| { | |
| "epoch": 2.2335912153730972, | |
| "grad_norm": 26.98235511779785, | |
| "learning_rate": 1.7156789487365517e-06, | |
| "loss": 0.1564, | |
| "step": 8950 | |
| }, | |
| { | |
| "epoch": 2.246069378587472, | |
| "grad_norm": 10.906401634216309, | |
| "learning_rate": 1.66358653882089e-06, | |
| "loss": 0.2835, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 2.2585475418018466, | |
| "grad_norm": 19.038333892822266, | |
| "learning_rate": 1.6121389401566e-06, | |
| "loss": 0.1398, | |
| "step": 9050 | |
| }, | |
| { | |
| "epoch": 2.2710257050162217, | |
| "grad_norm": 18.82282066345215, | |
| "learning_rate": 1.5613460957531006e-06, | |
| "loss": 0.0903, | |
| "step": 9100 | |
| }, | |
| { | |
| "epoch": 2.2835038682305964, | |
| "grad_norm": 16.515575408935547, | |
| "learning_rate": 1.5112178220788659e-06, | |
| "loss": 0.2314, | |
| "step": 9150 | |
| }, | |
| { | |
| "epoch": 2.2959820314449715, | |
| "grad_norm": 30.675432205200195, | |
| "learning_rate": 1.4617638071642465e-06, | |
| "loss": 0.1902, | |
| "step": 9200 | |
| }, | |
| { | |
| "epoch": 2.308460194659346, | |
| "grad_norm": 20.497865676879883, | |
| "learning_rate": 1.4129936087291185e-06, | |
| "loss": 0.1729, | |
| "step": 9250 | |
| }, | |
| { | |
| "epoch": 2.320938357873721, | |
| "grad_norm": 51.38520812988281, | |
| "learning_rate": 1.3649166523357032e-06, | |
| "loss": 0.2189, | |
| "step": 9300 | |
| }, | |
| { | |
| "epoch": 2.333416521088096, | |
| "grad_norm": 32.2943229675293, | |
| "learning_rate": 1.3175422295669465e-06, | |
| "loss": 0.1505, | |
| "step": 9350 | |
| }, | |
| { | |
| "epoch": 2.3458946843024706, | |
| "grad_norm": 19.56814193725586, | |
| "learning_rate": 1.270879496230778e-06, | |
| "loss": 0.2142, | |
| "step": 9400 | |
| }, | |
| { | |
| "epoch": 2.3583728475168453, | |
| "grad_norm": 26.465316772460938, | |
| "learning_rate": 1.2249374705906213e-06, | |
| "loss": 0.2112, | |
| "step": 9450 | |
| }, | |
| { | |
| "epoch": 2.3708510107312204, | |
| "grad_norm": 12.468720436096191, | |
| "learning_rate": 1.1797250316224735e-06, | |
| "loss": 0.2482, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 2.383329173945595, | |
| "grad_norm": 20.90337562561035, | |
| "learning_rate": 1.1352509172989178e-06, | |
| "loss": 0.1546, | |
| "step": 9550 | |
| }, | |
| { | |
| "epoch": 2.39580733715997, | |
| "grad_norm": 13.431262016296387, | |
| "learning_rate": 1.0915237229003773e-06, | |
| "loss": 0.2152, | |
| "step": 9600 | |
| }, | |
| { | |
| "epoch": 2.408285500374345, | |
| "grad_norm": 14.718096733093262, | |
| "learning_rate": 1.0485518993539557e-06, | |
| "loss": 0.1225, | |
| "step": 9650 | |
| }, | |
| { | |
| "epoch": 2.42076366358872, | |
| "grad_norm": 33.32410430908203, | |
| "learning_rate": 1.0063437516001618e-06, | |
| "loss": 0.1854, | |
| "step": 9700 | |
| }, | |
| { | |
| "epoch": 2.4332418268030946, | |
| "grad_norm": 18.032928466796875, | |
| "learning_rate": 9.649074369878642e-07, | |
| "loss": 0.1911, | |
| "step": 9750 | |
| }, | |
| { | |
| "epoch": 2.4457199900174693, | |
| "grad_norm": 42.814205169677734, | |
| "learning_rate": 9.242509636977587e-07, | |
| "loss": 0.1137, | |
| "step": 9800 | |
| }, | |
| { | |
| "epoch": 2.4581981532318444, | |
| "grad_norm": 19.89804458618164, | |
| "learning_rate": 8.843821891946741e-07, | |
| "loss": 0.1548, | |
| "step": 9850 | |
| }, | |
| { | |
| "epoch": 2.470676316446219, | |
| "grad_norm": 20.889028549194336, | |
| "learning_rate": 8.453088187089947e-07, | |
| "loss": 0.139, | |
| "step": 9900 | |
| }, | |
| { | |
| "epoch": 2.4831544796605938, | |
| "grad_norm": 12.794198036193848, | |
| "learning_rate": 8.070384037475143e-07, | |
| "loss": 0.1279, | |
| "step": 9950 | |
| }, | |
| { | |
| "epoch": 2.495632642874969, | |
| "grad_norm": 11.888481140136719, | |
| "learning_rate": 7.695783406339924e-07, | |
| "loss": 0.2145, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 2.5081108060893436, | |
| "grad_norm": 14.029563903808594, | |
| "learning_rate": 7.329358690797117e-07, | |
| "loss": 0.1323, | |
| "step": 10050 | |
| }, | |
| { | |
| "epoch": 2.5205889693037182, | |
| "grad_norm": 19.522018432617188, | |
| "learning_rate": 6.971180707842922e-07, | |
| "loss": 0.158, | |
| "step": 10100 | |
| }, | |
| { | |
| "epoch": 2.5330671325180933, | |
| "grad_norm": 28.709199905395508, | |
| "learning_rate": 6.621318680670463e-07, | |
| "loss": 0.2427, | |
| "step": 10150 | |
| }, | |
| { | |
| "epoch": 2.545545295732468, | |
| "grad_norm": 13.266064643859863, | |
| "learning_rate": 6.2798402252914e-07, | |
| "loss": 0.2196, | |
| "step": 10200 | |
| }, | |
| { | |
| "epoch": 2.558023458946843, | |
| "grad_norm": 18.30403709411621, | |
| "learning_rate": 5.946811337468183e-07, | |
| "loss": 0.1252, | |
| "step": 10250 | |
| }, | |
| { | |
| "epoch": 2.570501622161218, | |
| "grad_norm": 16.022560119628906, | |
| "learning_rate": 5.622296379959313e-07, | |
| "loss": 0.1611, | |
| "step": 10300 | |
| }, | |
| { | |
| "epoch": 2.582979785375593, | |
| "grad_norm": 18.459980010986328, | |
| "learning_rate": 5.306358070080336e-07, | |
| "loss": 0.1977, | |
| "step": 10350 | |
| }, | |
| { | |
| "epoch": 2.5954579485899676, | |
| "grad_norm": 15.34239387512207, | |
| "learning_rate": 4.999057467582774e-07, | |
| "loss": 0.0865, | |
| "step": 10400 | |
| }, | |
| { | |
| "epoch": 2.6079361118043423, | |
| "grad_norm": 17.409448623657227, | |
| "learning_rate": 4.7004539628534627e-07, | |
| "loss": 0.1527, | |
| "step": 10450 | |
| }, | |
| { | |
| "epoch": 2.6204142750187174, | |
| "grad_norm": 17.405305862426758, | |
| "learning_rate": 4.4106052654364296e-07, | |
| "loss": 0.1455, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 2.632892438233092, | |
| "grad_norm": 16.738950729370117, | |
| "learning_rate": 4.1295673928796906e-07, | |
| "loss": 0.142, | |
| "step": 10550 | |
| }, | |
| { | |
| "epoch": 2.6453706014474667, | |
| "grad_norm": 19.793495178222656, | |
| "learning_rate": 3.8573946599090417e-07, | |
| "loss": 0.0825, | |
| "step": 10600 | |
| }, | |
| { | |
| "epoch": 2.657848764661842, | |
| "grad_norm": 19.56559181213379, | |
| "learning_rate": 3.5941396679309405e-07, | |
| "loss": 0.1802, | |
| "step": 10650 | |
| }, | |
| { | |
| "epoch": 2.6703269278762165, | |
| "grad_norm": 13.598167419433594, | |
| "learning_rate": 3.339853294866469e-07, | |
| "loss": 0.0642, | |
| "step": 10700 | |
| }, | |
| { | |
| "epoch": 2.6828050910905916, | |
| "grad_norm": 23.991165161132812, | |
| "learning_rate": 3.094584685318458e-07, | |
| "loss": 0.1971, | |
| "step": 10750 | |
| }, | |
| { | |
| "epoch": 2.6952832543049663, | |
| "grad_norm": 17.074853897094727, | |
| "learning_rate": 2.858381241073538e-07, | |
| "loss": 0.1918, | |
| "step": 10800 | |
| }, | |
| { | |
| "epoch": 2.7077614175193414, | |
| "grad_norm": 19.827077865600586, | |
| "learning_rate": 2.631288611941096e-07, | |
| "loss": 0.2273, | |
| "step": 10850 | |
| }, | |
| { | |
| "epoch": 2.720239580733716, | |
| "grad_norm": 14.52009105682373, | |
| "learning_rate": 2.4133506869307043e-07, | |
| "loss": 0.1404, | |
| "step": 10900 | |
| }, | |
| { | |
| "epoch": 2.7327177439480907, | |
| "grad_norm": 12.993230819702148, | |
| "learning_rate": 2.2046095857699623e-07, | |
| "loss": 0.1339, | |
| "step": 10950 | |
| }, | |
| { | |
| "epoch": 2.745195907162466, | |
| "grad_norm": 40.58110427856445, | |
| "learning_rate": 2.0051056507642164e-07, | |
| "loss": 0.1198, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 2.7576740703768405, | |
| "grad_norm": 24.50381088256836, | |
| "learning_rate": 1.8148774389998626e-07, | |
| "loss": 0.1825, | |
| "step": 11050 | |
| }, | |
| { | |
| "epoch": 2.770152233591215, | |
| "grad_norm": 24.359201431274414, | |
| "learning_rate": 1.633961714892529e-07, | |
| "loss": 0.1136, | |
| "step": 11100 | |
| }, | |
| { | |
| "epoch": 2.7826303968055903, | |
| "grad_norm": 36.12211227416992, | |
| "learning_rate": 1.462393443081894e-07, | |
| "loss": 0.1704, | |
| "step": 11150 | |
| }, | |
| { | |
| "epoch": 2.795108560019965, | |
| "grad_norm": 39.766082763671875, | |
| "learning_rate": 1.300205781674202e-07, | |
| "loss": 0.2094, | |
| "step": 11200 | |
| }, | |
| { | |
| "epoch": 2.8075867232343397, | |
| "grad_norm": 22.819293975830078, | |
| "learning_rate": 1.1474300758340052e-07, | |
| "loss": 0.0836, | |
| "step": 11250 | |
| }, | |
| { | |
| "epoch": 2.820064886448715, | |
| "grad_norm": 13.354026794433594, | |
| "learning_rate": 1.0040958517261945e-07, | |
| "loss": 0.146, | |
| "step": 11300 | |
| }, | |
| { | |
| "epoch": 2.8325430496630895, | |
| "grad_norm": 33.82267379760742, | |
| "learning_rate": 8.702308108097024e-08, | |
| "loss": 0.1747, | |
| "step": 11350 | |
| }, | |
| { | |
| "epoch": 2.8450212128774646, | |
| "grad_norm": 26.619991302490234, | |
| "learning_rate": 7.458608244837073e-08, | |
| "loss": 0.1173, | |
| "step": 11400 | |
| }, | |
| { | |
| "epoch": 2.8574993760918392, | |
| "grad_norm": 18.810808181762695, | |
| "learning_rate": 6.31009929087678e-08, | |
| "loss": 0.1694, | |
| "step": 11450 | |
| }, | |
| { | |
| "epoch": 2.8699775393062144, | |
| "grad_norm": 15.145133972167969, | |
| "learning_rate": 5.257003212559453e-08, | |
| "loss": 0.1515, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 2.882455702520589, | |
| "grad_norm": 36.28367233276367, | |
| "learning_rate": 4.299523536278938e-08, | |
| "loss": 0.0779, | |
| "step": 11550 | |
| }, | |
| { | |
| "epoch": 2.8949338657349637, | |
| "grad_norm": 21.905363082885742, | |
| "learning_rate": 3.437845309144983e-08, | |
| "loss": 0.0904, | |
| "step": 11600 | |
| }, | |
| { | |
| "epoch": 2.907412028949339, | |
| "grad_norm": 23.147872924804688, | |
| "learning_rate": 2.6721350632205044e-08, | |
| "loss": 0.2007, | |
| "step": 11650 | |
| }, | |
| { | |
| "epoch": 2.9198901921637135, | |
| "grad_norm": 33.980567932128906, | |
| "learning_rate": 2.0025407833362265e-08, | |
| "loss": 0.1442, | |
| "step": 11700 | |
| }, | |
| { | |
| "epoch": 2.932368355378088, | |
| "grad_norm": 33.668724060058594, | |
| "learning_rate": 1.4291918784910542e-08, | |
| "loss": 0.1689, | |
| "step": 11750 | |
| }, | |
| { | |
| "epoch": 2.9448465185924633, | |
| "grad_norm": 41.63605499267578, | |
| "learning_rate": 9.521991568414158e-09, | |
| "loss": 0.1675, | |
| "step": 11800 | |
| }, | |
| { | |
| "epoch": 2.957324681806838, | |
| "grad_norm": 27.84828758239746, | |
| "learning_rate": 5.7165480428611475e-09, | |
| "loss": 0.201, | |
| "step": 11850 | |
| }, | |
| { | |
| "epoch": 2.9698028450212126, | |
| "grad_norm": 20.36393165588379, | |
| "learning_rate": 2.8763236665002582e-09, | |
| "loss": 0.195, | |
| "step": 11900 | |
| }, | |
| { | |
| "epoch": 2.9822810082355877, | |
| "grad_norm": 10.760637283325195, | |
| "learning_rate": 1.001867354702979e-09, | |
| "loss": 0.1057, | |
| "step": 11950 | |
| }, | |
| { | |
| "epoch": 2.994759171449963, | |
| "grad_norm": 24.99573516845703, | |
| "learning_rate": 9.354137387729278e-11, | |
| "loss": 0.1154, | |
| "step": 12000 | |
| } | |
| ], | |
| "logging_steps": 50, | |
| "max_steps": 12021, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 8.960860132044595e+16, | |
| "train_batch_size": 8, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |