| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 3.0, | |
| "eval_steps": 500, | |
| "global_step": 74523, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.020128014170121975, | |
| "grad_norm": 4.302577018737793, | |
| "learning_rate": 7.94643264495525e-05, | |
| "loss": 3.4263, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.04025602834024395, | |
| "grad_norm": 4.915486812591553, | |
| "learning_rate": 7.892757940501591e-05, | |
| "loss": 3.0255, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.06038404251036593, | |
| "grad_norm": 6.236612796783447, | |
| "learning_rate": 7.839083236047933e-05, | |
| "loss": 2.8404, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.0805120566804879, | |
| "grad_norm": 8.648529052734375, | |
| "learning_rate": 7.785408531594274e-05, | |
| "loss": 2.5965, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.10064007085060987, | |
| "grad_norm": 9.566611289978027, | |
| "learning_rate": 7.731733827140614e-05, | |
| "loss": 2.3771, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.12076808502073186, | |
| "grad_norm": 11.978983879089355, | |
| "learning_rate": 7.678059122686957e-05, | |
| "loss": 2.2719, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.14089609919085383, | |
| "grad_norm": 9.598978996276855, | |
| "learning_rate": 7.624384418233297e-05, | |
| "loss": 2.1877, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.1610241133609758, | |
| "grad_norm": 13.149515151977539, | |
| "learning_rate": 7.57070971377964e-05, | |
| "loss": 2.1664, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.18115212753109777, | |
| "grad_norm": 7.942020416259766, | |
| "learning_rate": 7.51703500932598e-05, | |
| "loss": 2.0778, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.20128014170121974, | |
| "grad_norm": 9.4037504196167, | |
| "learning_rate": 7.463360304872321e-05, | |
| "loss": 2.064, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.22140815587134174, | |
| "grad_norm": 8.469627380371094, | |
| "learning_rate": 7.409685600418663e-05, | |
| "loss": 1.9481, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.24153617004146372, | |
| "grad_norm": 13.245635032653809, | |
| "learning_rate": 7.356010895965004e-05, | |
| "loss": 1.9162, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.26166418421158566, | |
| "grad_norm": 14.120226860046387, | |
| "learning_rate": 7.302336191511346e-05, | |
| "loss": 1.8887, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.28179219838170766, | |
| "grad_norm": 10.026687622070312, | |
| "learning_rate": 7.248661487057687e-05, | |
| "loss": 1.8418, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.30192021255182966, | |
| "grad_norm": 11.791897773742676, | |
| "learning_rate": 7.194986782604029e-05, | |
| "loss": 1.8123, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 0.3220482267219516, | |
| "grad_norm": 8.655497550964355, | |
| "learning_rate": 7.141312078150371e-05, | |
| "loss": 1.7765, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 0.3421762408920736, | |
| "grad_norm": 10.105185508728027, | |
| "learning_rate": 7.087637373696712e-05, | |
| "loss": 1.7373, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 0.36230425506219555, | |
| "grad_norm": 10.553141593933105, | |
| "learning_rate": 7.033962669243054e-05, | |
| "loss": 1.7426, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 0.38243226923231755, | |
| "grad_norm": 10.911526679992676, | |
| "learning_rate": 6.980287964789395e-05, | |
| "loss": 1.666, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 0.4025602834024395, | |
| "grad_norm": 14.893563270568848, | |
| "learning_rate": 6.926613260335736e-05, | |
| "loss": 1.664, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 0.4226882975725615, | |
| "grad_norm": 11.424589157104492, | |
| "learning_rate": 6.872938555882078e-05, | |
| "loss": 1.6313, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 0.4428163117426835, | |
| "grad_norm": 11.619711875915527, | |
| "learning_rate": 6.819263851428418e-05, | |
| "loss": 1.6002, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 0.46294432591280543, | |
| "grad_norm": 10.775858879089355, | |
| "learning_rate": 6.76558914697476e-05, | |
| "loss": 1.5659, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 0.48307234008292743, | |
| "grad_norm": 10.806571960449219, | |
| "learning_rate": 6.711914442521101e-05, | |
| "loss": 1.5198, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 0.5032003542530494, | |
| "grad_norm": 9.920073509216309, | |
| "learning_rate": 6.658239738067442e-05, | |
| "loss": 1.4948, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 0.5233283684231713, | |
| "grad_norm": 18.91466522216797, | |
| "learning_rate": 6.604565033613784e-05, | |
| "loss": 1.4626, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 0.5434563825932933, | |
| "grad_norm": 13.546745300292969, | |
| "learning_rate": 6.550890329160125e-05, | |
| "loss": 1.4395, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 0.5635843967634153, | |
| "grad_norm": 17.364229202270508, | |
| "learning_rate": 6.497215624706467e-05, | |
| "loss": 1.4607, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 0.5837124109335373, | |
| "grad_norm": 6.96090841293335, | |
| "learning_rate": 6.443540920252808e-05, | |
| "loss": 1.4053, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 0.6038404251036593, | |
| "grad_norm": 11.06962776184082, | |
| "learning_rate": 6.389866215799149e-05, | |
| "loss": 1.3991, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 0.6239684392737812, | |
| "grad_norm": 7.865960597991943, | |
| "learning_rate": 6.336191511345491e-05, | |
| "loss": 1.3924, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 0.6440964534439032, | |
| "grad_norm": 11.192516326904297, | |
| "learning_rate": 6.282516806891833e-05, | |
| "loss": 1.3526, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 0.6642244676140252, | |
| "grad_norm": 16.678586959838867, | |
| "learning_rate": 6.228842102438174e-05, | |
| "loss": 1.3077, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 0.6843524817841472, | |
| "grad_norm": 10.391363143920898, | |
| "learning_rate": 6.175167397984516e-05, | |
| "loss": 1.2956, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 0.7044804959542692, | |
| "grad_norm": 14.913769721984863, | |
| "learning_rate": 6.121492693530857e-05, | |
| "loss": 1.2836, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 0.7246085101243911, | |
| "grad_norm": 8.673684120178223, | |
| "learning_rate": 6.067817989077198e-05, | |
| "loss": 1.3183, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 0.7447365242945131, | |
| "grad_norm": 16.5867977142334, | |
| "learning_rate": 6.0141432846235395e-05, | |
| "loss": 1.2462, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 0.7648645384646351, | |
| "grad_norm": 10.297107696533203, | |
| "learning_rate": 5.960468580169881e-05, | |
| "loss": 1.2573, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 0.7849925526347571, | |
| "grad_norm": 11.904420852661133, | |
| "learning_rate": 5.9067938757162224e-05, | |
| "loss": 1.2147, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 0.805120566804879, | |
| "grad_norm": 11.955267906188965, | |
| "learning_rate": 5.853119171262564e-05, | |
| "loss": 1.2139, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 0.825248580975001, | |
| "grad_norm": 15.51834487915039, | |
| "learning_rate": 5.7994444668089053e-05, | |
| "loss": 1.1777, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 0.845376595145123, | |
| "grad_norm": 18.978769302368164, | |
| "learning_rate": 5.745769762355246e-05, | |
| "loss": 1.18, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 0.865504609315245, | |
| "grad_norm": 10.92957592010498, | |
| "learning_rate": 5.692095057901588e-05, | |
| "loss": 1.1882, | |
| "step": 21500 | |
| }, | |
| { | |
| "epoch": 0.885632623485367, | |
| "grad_norm": 13.877032279968262, | |
| "learning_rate": 5.638420353447929e-05, | |
| "loss": 1.1327, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 0.9057606376554889, | |
| "grad_norm": 10.083035469055176, | |
| "learning_rate": 5.584745648994271e-05, | |
| "loss": 1.1209, | |
| "step": 22500 | |
| }, | |
| { | |
| "epoch": 0.9258886518256109, | |
| "grad_norm": 11.10254955291748, | |
| "learning_rate": 5.531070944540612e-05, | |
| "loss": 1.1248, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 0.9460166659957329, | |
| "grad_norm": 10.709171295166016, | |
| "learning_rate": 5.4773962400869534e-05, | |
| "loss": 1.1159, | |
| "step": 23500 | |
| }, | |
| { | |
| "epoch": 0.9661446801658549, | |
| "grad_norm": 6.209949016571045, | |
| "learning_rate": 5.423721535633295e-05, | |
| "loss": 1.1064, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 0.9862726943359769, | |
| "grad_norm": 9.580244064331055, | |
| "learning_rate": 5.370046831179636e-05, | |
| "loss": 1.099, | |
| "step": 24500 | |
| }, | |
| { | |
| "epoch": 1.0064007085060989, | |
| "grad_norm": 7.485245704650879, | |
| "learning_rate": 5.316372126725978e-05, | |
| "loss": 1.0562, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 1.0265287226762208, | |
| "grad_norm": 12.7084321975708, | |
| "learning_rate": 5.262697422272319e-05, | |
| "loss": 1.0241, | |
| "step": 25500 | |
| }, | |
| { | |
| "epoch": 1.0466567368463426, | |
| "grad_norm": 11.621143341064453, | |
| "learning_rate": 5.20902271781866e-05, | |
| "loss": 0.9977, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 1.0667847510164647, | |
| "grad_norm": 11.446859359741211, | |
| "learning_rate": 5.155348013365002e-05, | |
| "loss": 0.9963, | |
| "step": 26500 | |
| }, | |
| { | |
| "epoch": 1.0869127651865866, | |
| "grad_norm": 18.473350524902344, | |
| "learning_rate": 5.101673308911343e-05, | |
| "loss": 1.0238, | |
| "step": 27000 | |
| }, | |
| { | |
| "epoch": 1.1070407793567087, | |
| "grad_norm": 9.876648902893066, | |
| "learning_rate": 5.047998604457685e-05, | |
| "loss": 0.9826, | |
| "step": 27500 | |
| }, | |
| { | |
| "epoch": 1.1271687935268306, | |
| "grad_norm": 8.748170852661133, | |
| "learning_rate": 4.994323900004026e-05, | |
| "loss": 0.9525, | |
| "step": 28000 | |
| }, | |
| { | |
| "epoch": 1.1472968076969525, | |
| "grad_norm": 15.020739555358887, | |
| "learning_rate": 4.940649195550367e-05, | |
| "loss": 0.9606, | |
| "step": 28500 | |
| }, | |
| { | |
| "epoch": 1.1674248218670746, | |
| "grad_norm": 12.53282642364502, | |
| "learning_rate": 4.886974491096709e-05, | |
| "loss": 0.9428, | |
| "step": 29000 | |
| }, | |
| { | |
| "epoch": 1.1875528360371965, | |
| "grad_norm": 22.736968994140625, | |
| "learning_rate": 4.83329978664305e-05, | |
| "loss": 0.9665, | |
| "step": 29500 | |
| }, | |
| { | |
| "epoch": 1.2076808502073186, | |
| "grad_norm": 10.772764205932617, | |
| "learning_rate": 4.779625082189392e-05, | |
| "loss": 0.9221, | |
| "step": 30000 | |
| }, | |
| { | |
| "epoch": 1.2278088643774405, | |
| "grad_norm": 19.697633743286133, | |
| "learning_rate": 4.725950377735733e-05, | |
| "loss": 0.9411, | |
| "step": 30500 | |
| }, | |
| { | |
| "epoch": 1.2479368785475624, | |
| "grad_norm": 12.230613708496094, | |
| "learning_rate": 4.672275673282074e-05, | |
| "loss": 0.9224, | |
| "step": 31000 | |
| }, | |
| { | |
| "epoch": 1.2680648927176845, | |
| "grad_norm": 12.695961952209473, | |
| "learning_rate": 4.618600968828416e-05, | |
| "loss": 0.911, | |
| "step": 31500 | |
| }, | |
| { | |
| "epoch": 1.2881929068878064, | |
| "grad_norm": 10.137328147888184, | |
| "learning_rate": 4.564926264374757e-05, | |
| "loss": 0.8909, | |
| "step": 32000 | |
| }, | |
| { | |
| "epoch": 1.3083209210579283, | |
| "grad_norm": 14.675042152404785, | |
| "learning_rate": 4.511251559921099e-05, | |
| "loss": 0.8834, | |
| "step": 32500 | |
| }, | |
| { | |
| "epoch": 1.3284489352280504, | |
| "grad_norm": 9.264530181884766, | |
| "learning_rate": 4.4575768554674396e-05, | |
| "loss": 0.8855, | |
| "step": 33000 | |
| }, | |
| { | |
| "epoch": 1.3485769493981723, | |
| "grad_norm": 18.74481201171875, | |
| "learning_rate": 4.403902151013781e-05, | |
| "loss": 0.8788, | |
| "step": 33500 | |
| }, | |
| { | |
| "epoch": 1.3687049635682944, | |
| "grad_norm": 8.973100662231445, | |
| "learning_rate": 4.350227446560123e-05, | |
| "loss": 0.863, | |
| "step": 34000 | |
| }, | |
| { | |
| "epoch": 1.3888329777384163, | |
| "grad_norm": 7.543423175811768, | |
| "learning_rate": 4.296552742106464e-05, | |
| "loss": 0.888, | |
| "step": 34500 | |
| }, | |
| { | |
| "epoch": 1.4089609919085384, | |
| "grad_norm": 11.946640014648438, | |
| "learning_rate": 4.242878037652806e-05, | |
| "loss": 0.8757, | |
| "step": 35000 | |
| }, | |
| { | |
| "epoch": 1.4290890060786603, | |
| "grad_norm": 13.646244049072266, | |
| "learning_rate": 4.189203333199147e-05, | |
| "loss": 0.848, | |
| "step": 35500 | |
| }, | |
| { | |
| "epoch": 1.4492170202487822, | |
| "grad_norm": 12.406970977783203, | |
| "learning_rate": 4.1355286287454876e-05, | |
| "loss": 0.8628, | |
| "step": 36000 | |
| }, | |
| { | |
| "epoch": 1.4693450344189043, | |
| "grad_norm": 8.028214454650879, | |
| "learning_rate": 4.08185392429183e-05, | |
| "loss": 0.8727, | |
| "step": 36500 | |
| }, | |
| { | |
| "epoch": 1.4894730485890262, | |
| "grad_norm": 13.901910781860352, | |
| "learning_rate": 4.0281792198381705e-05, | |
| "loss": 0.8496, | |
| "step": 37000 | |
| }, | |
| { | |
| "epoch": 1.509601062759148, | |
| "grad_norm": 7.470578193664551, | |
| "learning_rate": 3.974504515384512e-05, | |
| "loss": 0.8504, | |
| "step": 37500 | |
| }, | |
| { | |
| "epoch": 1.5297290769292702, | |
| "grad_norm": 7.381893634796143, | |
| "learning_rate": 3.920829810930854e-05, | |
| "loss": 0.8462, | |
| "step": 38000 | |
| }, | |
| { | |
| "epoch": 1.5498570910993923, | |
| "grad_norm": 8.749372482299805, | |
| "learning_rate": 3.8671551064771956e-05, | |
| "loss": 0.805, | |
| "step": 38500 | |
| }, | |
| { | |
| "epoch": 1.569985105269514, | |
| "grad_norm": 15.834980964660645, | |
| "learning_rate": 3.813480402023537e-05, | |
| "loss": 0.815, | |
| "step": 39000 | |
| }, | |
| { | |
| "epoch": 1.590113119439636, | |
| "grad_norm": 8.48942756652832, | |
| "learning_rate": 3.7598056975698785e-05, | |
| "loss": 0.811, | |
| "step": 39500 | |
| }, | |
| { | |
| "epoch": 1.6102411336097582, | |
| "grad_norm": 17.017009735107422, | |
| "learning_rate": 3.706130993116219e-05, | |
| "loss": 0.8117, | |
| "step": 40000 | |
| }, | |
| { | |
| "epoch": 1.63036914777988, | |
| "grad_norm": 10.621646881103516, | |
| "learning_rate": 3.652456288662561e-05, | |
| "loss": 0.794, | |
| "step": 40500 | |
| }, | |
| { | |
| "epoch": 1.650497161950002, | |
| "grad_norm": 12.966856002807617, | |
| "learning_rate": 3.598781584208902e-05, | |
| "loss": 0.8064, | |
| "step": 41000 | |
| }, | |
| { | |
| "epoch": 1.670625176120124, | |
| "grad_norm": 5.664041519165039, | |
| "learning_rate": 3.5451068797552436e-05, | |
| "loss": 0.8125, | |
| "step": 41500 | |
| }, | |
| { | |
| "epoch": 1.690753190290246, | |
| "grad_norm": 9.313810348510742, | |
| "learning_rate": 3.491432175301585e-05, | |
| "loss": 0.8086, | |
| "step": 42000 | |
| }, | |
| { | |
| "epoch": 1.7108812044603678, | |
| "grad_norm": 7.844326019287109, | |
| "learning_rate": 3.4377574708479265e-05, | |
| "loss": 0.7769, | |
| "step": 42500 | |
| }, | |
| { | |
| "epoch": 1.73100921863049, | |
| "grad_norm": 11.637877464294434, | |
| "learning_rate": 3.384082766394268e-05, | |
| "loss": 0.7954, | |
| "step": 43000 | |
| }, | |
| { | |
| "epoch": 1.7511372328006118, | |
| "grad_norm": 10.841877937316895, | |
| "learning_rate": 3.3304080619406094e-05, | |
| "loss": 0.7756, | |
| "step": 43500 | |
| }, | |
| { | |
| "epoch": 1.7712652469707337, | |
| "grad_norm": 8.754426002502441, | |
| "learning_rate": 3.276733357486951e-05, | |
| "loss": 0.7891, | |
| "step": 44000 | |
| }, | |
| { | |
| "epoch": 1.7913932611408558, | |
| "grad_norm": 7.297679901123047, | |
| "learning_rate": 3.223058653033292e-05, | |
| "loss": 0.7789, | |
| "step": 44500 | |
| }, | |
| { | |
| "epoch": 1.811521275310978, | |
| "grad_norm": 11.88018798828125, | |
| "learning_rate": 3.169383948579634e-05, | |
| "loss": 0.7613, | |
| "step": 45000 | |
| }, | |
| { | |
| "epoch": 1.8316492894810998, | |
| "grad_norm": 13.821844100952148, | |
| "learning_rate": 3.1157092441259745e-05, | |
| "loss": 0.7653, | |
| "step": 45500 | |
| }, | |
| { | |
| "epoch": 1.8517773036512217, | |
| "grad_norm": 12.671835899353027, | |
| "learning_rate": 3.062034539672316e-05, | |
| "loss": 0.7838, | |
| "step": 46000 | |
| }, | |
| { | |
| "epoch": 1.8719053178213438, | |
| "grad_norm": 12.358460426330566, | |
| "learning_rate": 3.0083598352186574e-05, | |
| "loss": 0.7845, | |
| "step": 46500 | |
| }, | |
| { | |
| "epoch": 1.8920333319914657, | |
| "grad_norm": 11.423474311828613, | |
| "learning_rate": 2.9546851307649992e-05, | |
| "loss": 0.757, | |
| "step": 47000 | |
| }, | |
| { | |
| "epoch": 1.9121613461615876, | |
| "grad_norm": 13.875541687011719, | |
| "learning_rate": 2.9010104263113407e-05, | |
| "loss": 0.776, | |
| "step": 47500 | |
| }, | |
| { | |
| "epoch": 1.9322893603317097, | |
| "grad_norm": 8.722099304199219, | |
| "learning_rate": 2.8473357218576815e-05, | |
| "loss": 0.7385, | |
| "step": 48000 | |
| }, | |
| { | |
| "epoch": 1.9524173745018316, | |
| "grad_norm": 18.633831024169922, | |
| "learning_rate": 2.793661017404023e-05, | |
| "loss": 0.7401, | |
| "step": 48500 | |
| }, | |
| { | |
| "epoch": 1.9725453886719535, | |
| "grad_norm": 8.491524696350098, | |
| "learning_rate": 2.7399863129503647e-05, | |
| "loss": 0.7236, | |
| "step": 49000 | |
| }, | |
| { | |
| "epoch": 1.9926734028420756, | |
| "grad_norm": 3.7893919944763184, | |
| "learning_rate": 2.686311608496706e-05, | |
| "loss": 0.7242, | |
| "step": 49500 | |
| }, | |
| { | |
| "epoch": 2.0128014170121977, | |
| "grad_norm": 8.858596801757812, | |
| "learning_rate": 2.6326369040430476e-05, | |
| "loss": 0.6995, | |
| "step": 50000 | |
| }, | |
| { | |
| "epoch": 2.0329294311823194, | |
| "grad_norm": 9.343021392822266, | |
| "learning_rate": 2.5789621995893884e-05, | |
| "loss": 0.6833, | |
| "step": 50500 | |
| }, | |
| { | |
| "epoch": 2.0530574453524415, | |
| "grad_norm": 11.943695068359375, | |
| "learning_rate": 2.5252874951357302e-05, | |
| "loss": 0.6796, | |
| "step": 51000 | |
| }, | |
| { | |
| "epoch": 2.0731854595225636, | |
| "grad_norm": 8.132844924926758, | |
| "learning_rate": 2.4716127906820716e-05, | |
| "loss": 0.6655, | |
| "step": 51500 | |
| }, | |
| { | |
| "epoch": 2.0933134736926853, | |
| "grad_norm": 14.167099952697754, | |
| "learning_rate": 2.417938086228413e-05, | |
| "loss": 0.6843, | |
| "step": 52000 | |
| }, | |
| { | |
| "epoch": 2.1134414878628074, | |
| "grad_norm": 7.767176628112793, | |
| "learning_rate": 2.3642633817747545e-05, | |
| "loss": 0.6785, | |
| "step": 52500 | |
| }, | |
| { | |
| "epoch": 2.1335695020329295, | |
| "grad_norm": 12.753235816955566, | |
| "learning_rate": 2.3105886773210956e-05, | |
| "loss": 0.6591, | |
| "step": 53000 | |
| }, | |
| { | |
| "epoch": 2.1536975162030516, | |
| "grad_norm": 15.783185005187988, | |
| "learning_rate": 2.256913972867437e-05, | |
| "loss": 0.6455, | |
| "step": 53500 | |
| }, | |
| { | |
| "epoch": 2.1738255303731733, | |
| "grad_norm": 17.965497970581055, | |
| "learning_rate": 2.2032392684137785e-05, | |
| "loss": 0.6674, | |
| "step": 54000 | |
| }, | |
| { | |
| "epoch": 2.1939535445432954, | |
| "grad_norm": 18.289630889892578, | |
| "learning_rate": 2.14956456396012e-05, | |
| "loss": 0.6584, | |
| "step": 54500 | |
| }, | |
| { | |
| "epoch": 2.2140815587134175, | |
| "grad_norm": 17.599037170410156, | |
| "learning_rate": 2.0958898595064615e-05, | |
| "loss": 0.6432, | |
| "step": 55000 | |
| }, | |
| { | |
| "epoch": 2.234209572883539, | |
| "grad_norm": 8.277852058410645, | |
| "learning_rate": 2.042215155052803e-05, | |
| "loss": 0.6373, | |
| "step": 55500 | |
| }, | |
| { | |
| "epoch": 2.2543375870536613, | |
| "grad_norm": 11.451165199279785, | |
| "learning_rate": 1.988540450599144e-05, | |
| "loss": 0.651, | |
| "step": 56000 | |
| }, | |
| { | |
| "epoch": 2.2744656012237834, | |
| "grad_norm": 7.643725872039795, | |
| "learning_rate": 1.9348657461454855e-05, | |
| "loss": 0.6928, | |
| "step": 56500 | |
| }, | |
| { | |
| "epoch": 2.294593615393905, | |
| "grad_norm": 11.526747703552246, | |
| "learning_rate": 1.881191041691827e-05, | |
| "loss": 0.6581, | |
| "step": 57000 | |
| }, | |
| { | |
| "epoch": 2.314721629564027, | |
| "grad_norm": 19.298818588256836, | |
| "learning_rate": 1.827516337238168e-05, | |
| "loss": 0.625, | |
| "step": 57500 | |
| }, | |
| { | |
| "epoch": 2.3348496437341493, | |
| "grad_norm": 14.259527206420898, | |
| "learning_rate": 1.7738416327845095e-05, | |
| "loss": 0.6402, | |
| "step": 58000 | |
| }, | |
| { | |
| "epoch": 2.3549776579042714, | |
| "grad_norm": 11.155771255493164, | |
| "learning_rate": 1.720166928330851e-05, | |
| "loss": 0.6362, | |
| "step": 58500 | |
| }, | |
| { | |
| "epoch": 2.375105672074393, | |
| "grad_norm": 18.191736221313477, | |
| "learning_rate": 1.6664922238771924e-05, | |
| "loss": 0.6282, | |
| "step": 59000 | |
| }, | |
| { | |
| "epoch": 2.395233686244515, | |
| "grad_norm": 14.766573905944824, | |
| "learning_rate": 1.612817519423534e-05, | |
| "loss": 0.6315, | |
| "step": 59500 | |
| }, | |
| { | |
| "epoch": 2.4153617004146373, | |
| "grad_norm": 13.01074504852295, | |
| "learning_rate": 1.5591428149698753e-05, | |
| "loss": 0.6321, | |
| "step": 60000 | |
| }, | |
| { | |
| "epoch": 2.435489714584759, | |
| "grad_norm": 8.57907485961914, | |
| "learning_rate": 1.5054681105162166e-05, | |
| "loss": 0.6049, | |
| "step": 60500 | |
| }, | |
| { | |
| "epoch": 2.455617728754881, | |
| "grad_norm": 9.666173934936523, | |
| "learning_rate": 1.451793406062558e-05, | |
| "loss": 0.6389, | |
| "step": 61000 | |
| }, | |
| { | |
| "epoch": 2.475745742925003, | |
| "grad_norm": 11.621274948120117, | |
| "learning_rate": 1.3981187016088993e-05, | |
| "loss": 0.6134, | |
| "step": 61500 | |
| }, | |
| { | |
| "epoch": 2.495873757095125, | |
| "grad_norm": 6.330214500427246, | |
| "learning_rate": 1.3444439971552408e-05, | |
| "loss": 0.6183, | |
| "step": 62000 | |
| }, | |
| { | |
| "epoch": 2.516001771265247, | |
| "grad_norm": 5.045897006988525, | |
| "learning_rate": 1.2907692927015822e-05, | |
| "loss": 0.6184, | |
| "step": 62500 | |
| }, | |
| { | |
| "epoch": 2.536129785435369, | |
| "grad_norm": 5.904106140136719, | |
| "learning_rate": 1.2370945882479235e-05, | |
| "loss": 0.6352, | |
| "step": 63000 | |
| }, | |
| { | |
| "epoch": 2.5562577996054907, | |
| "grad_norm": 8.777071952819824, | |
| "learning_rate": 1.183419883794265e-05, | |
| "loss": 0.6343, | |
| "step": 63500 | |
| }, | |
| { | |
| "epoch": 2.576385813775613, | |
| "grad_norm": 14.809232711791992, | |
| "learning_rate": 1.1297451793406064e-05, | |
| "loss": 0.635, | |
| "step": 64000 | |
| }, | |
| { | |
| "epoch": 2.596513827945735, | |
| "grad_norm": 9.59287166595459, | |
| "learning_rate": 1.0760704748869477e-05, | |
| "loss": 0.6307, | |
| "step": 64500 | |
| }, | |
| { | |
| "epoch": 2.6166418421158566, | |
| "grad_norm": 4.077784538269043, | |
| "learning_rate": 1.0223957704332891e-05, | |
| "loss": 0.614, | |
| "step": 65000 | |
| }, | |
| { | |
| "epoch": 2.6367698562859787, | |
| "grad_norm": 17.284059524536133, | |
| "learning_rate": 9.687210659796306e-06, | |
| "loss": 0.6556, | |
| "step": 65500 | |
| }, | |
| { | |
| "epoch": 2.656897870456101, | |
| "grad_norm": 13.630504608154297, | |
| "learning_rate": 9.150463615259719e-06, | |
| "loss": 0.5926, | |
| "step": 66000 | |
| }, | |
| { | |
| "epoch": 2.677025884626223, | |
| "grad_norm": 12.159080505371094, | |
| "learning_rate": 8.613716570723133e-06, | |
| "loss": 0.6097, | |
| "step": 66500 | |
| }, | |
| { | |
| "epoch": 2.6971538987963446, | |
| "grad_norm": 11.280355453491211, | |
| "learning_rate": 8.076969526186546e-06, | |
| "loss": 0.614, | |
| "step": 67000 | |
| }, | |
| { | |
| "epoch": 2.7172819129664667, | |
| "grad_norm": 16.78717613220215, | |
| "learning_rate": 7.540222481649961e-06, | |
| "loss": 0.6232, | |
| "step": 67500 | |
| }, | |
| { | |
| "epoch": 2.737409927136589, | |
| "grad_norm": 10.830788612365723, | |
| "learning_rate": 7.003475437113375e-06, | |
| "loss": 0.5923, | |
| "step": 68000 | |
| }, | |
| { | |
| "epoch": 2.757537941306711, | |
| "grad_norm": 6.981812000274658, | |
| "learning_rate": 6.466728392576789e-06, | |
| "loss": 0.5949, | |
| "step": 68500 | |
| }, | |
| { | |
| "epoch": 2.7776659554768326, | |
| "grad_norm": 9.136080741882324, | |
| "learning_rate": 5.929981348040202e-06, | |
| "loss": 0.6128, | |
| "step": 69000 | |
| }, | |
| { | |
| "epoch": 2.7977939696469547, | |
| "grad_norm": 10.217459678649902, | |
| "learning_rate": 5.393234303503617e-06, | |
| "loss": 0.5998, | |
| "step": 69500 | |
| }, | |
| { | |
| "epoch": 2.817921983817077, | |
| "grad_norm": 10.00894832611084, | |
| "learning_rate": 4.8564872589670306e-06, | |
| "loss": 0.5922, | |
| "step": 70000 | |
| }, | |
| { | |
| "epoch": 2.8380499979871985, | |
| "grad_norm": 12.962599754333496, | |
| "learning_rate": 4.319740214430444e-06, | |
| "loss": 0.5883, | |
| "step": 70500 | |
| }, | |
| { | |
| "epoch": 2.8581780121573206, | |
| "grad_norm": 13.627707481384277, | |
| "learning_rate": 3.7829931698938588e-06, | |
| "loss": 0.5994, | |
| "step": 71000 | |
| }, | |
| { | |
| "epoch": 2.8783060263274427, | |
| "grad_norm": 11.855643272399902, | |
| "learning_rate": 3.2462461253572724e-06, | |
| "loss": 0.6179, | |
| "step": 71500 | |
| }, | |
| { | |
| "epoch": 2.8984340404975644, | |
| "grad_norm": 9.207647323608398, | |
| "learning_rate": 2.7094990808206865e-06, | |
| "loss": 0.6169, | |
| "step": 72000 | |
| }, | |
| { | |
| "epoch": 2.9185620546676865, | |
| "grad_norm": 13.871038436889648, | |
| "learning_rate": 2.1727520362841e-06, | |
| "loss": 0.6037, | |
| "step": 72500 | |
| }, | |
| { | |
| "epoch": 2.9386900688378086, | |
| "grad_norm": 10.001399040222168, | |
| "learning_rate": 1.6360049917475143e-06, | |
| "loss": 0.619, | |
| "step": 73000 | |
| }, | |
| { | |
| "epoch": 2.9588180830079303, | |
| "grad_norm": 15.267190933227539, | |
| "learning_rate": 1.0992579472109282e-06, | |
| "loss": 0.6055, | |
| "step": 73500 | |
| }, | |
| { | |
| "epoch": 2.9789460971780524, | |
| "grad_norm": 3.722136974334717, | |
| "learning_rate": 5.625109026743423e-07, | |
| "loss": 0.591, | |
| "step": 74000 | |
| }, | |
| { | |
| "epoch": 2.9990741113481745, | |
| "grad_norm": 9.667593955993652, | |
| "learning_rate": 2.576385813775613e-08, | |
| "loss": 0.6077, | |
| "step": 74500 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "step": 74523, | |
| "total_flos": 1.6739482297393152e+16, | |
| "train_loss": 1.0437313645233104, | |
| "train_runtime": 12605.9624, | |
| "train_samples_per_second": 94.587, | |
| "train_steps_per_second": 5.912 | |
| } | |
| ], | |
| "logging_steps": 500, | |
| "max_steps": 74523, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": false, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.6739482297393152e+16, | |
| "train_batch_size": 16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |