| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 1869, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0005350454788657035, | |
| "grad_norm": 23.14862717209723, | |
| "learning_rate": 5.3475935828877005e-08, | |
| "loss": 1.3341, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.002675227394328518, | |
| "grad_norm": 23.77620185109602, | |
| "learning_rate": 2.6737967914438503e-07, | |
| "loss": 1.355, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.005350454788657036, | |
| "grad_norm": 14.708477613885714, | |
| "learning_rate": 5.347593582887701e-07, | |
| "loss": 1.3095, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.008025682182985553, | |
| "grad_norm": 12.246536127500569, | |
| "learning_rate": 8.021390374331551e-07, | |
| "loss": 1.1577, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.010700909577314071, | |
| "grad_norm": 9.616225734354739, | |
| "learning_rate": 1.0695187165775401e-06, | |
| "loss": 1.042, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.01337613697164259, | |
| "grad_norm": 3.4924370385559005, | |
| "learning_rate": 1.3368983957219254e-06, | |
| "loss": 0.9362, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.016051364365971106, | |
| "grad_norm": 3.5822272920001237, | |
| "learning_rate": 1.6042780748663103e-06, | |
| "loss": 0.9197, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.018726591760299626, | |
| "grad_norm": 2.9720674905316393, | |
| "learning_rate": 1.8716577540106954e-06, | |
| "loss": 0.8822, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.021401819154628143, | |
| "grad_norm": 3.0124107376781093, | |
| "learning_rate": 2.1390374331550802e-06, | |
| "loss": 0.8698, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.024077046548956663, | |
| "grad_norm": 2.963506013876851, | |
| "learning_rate": 2.4064171122994653e-06, | |
| "loss": 0.8525, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.02675227394328518, | |
| "grad_norm": 3.160528243293502, | |
| "learning_rate": 2.673796791443851e-06, | |
| "loss": 0.849, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.029427501337613696, | |
| "grad_norm": 2.8952214901225934, | |
| "learning_rate": 2.9411764705882355e-06, | |
| "loss": 0.8312, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.03210272873194221, | |
| "grad_norm": 3.173564660838415, | |
| "learning_rate": 3.2085561497326205e-06, | |
| "loss": 0.8168, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.034777956126270736, | |
| "grad_norm": 3.0478716727585176, | |
| "learning_rate": 3.4759358288770056e-06, | |
| "loss": 0.818, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.03745318352059925, | |
| "grad_norm": 3.194717995617946, | |
| "learning_rate": 3.7433155080213907e-06, | |
| "loss": 0.7971, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.04012841091492777, | |
| "grad_norm": 3.1820878704526296, | |
| "learning_rate": 4.010695187165775e-06, | |
| "loss": 0.7929, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.042803638309256285, | |
| "grad_norm": 3.228418173848044, | |
| "learning_rate": 4.2780748663101604e-06, | |
| "loss": 0.7792, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.0454788657035848, | |
| "grad_norm": 3.384522613177732, | |
| "learning_rate": 4.5454545454545455e-06, | |
| "loss": 0.7775, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.048154093097913325, | |
| "grad_norm": 3.183805408679174, | |
| "learning_rate": 4.812834224598931e-06, | |
| "loss": 0.7577, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.05082932049224184, | |
| "grad_norm": 3.4760481232015388, | |
| "learning_rate": 5.0802139037433165e-06, | |
| "loss": 0.7563, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.05350454788657036, | |
| "grad_norm": 3.1673853649146277, | |
| "learning_rate": 5.347593582887702e-06, | |
| "loss": 0.749, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.056179775280898875, | |
| "grad_norm": 3.0292770505818254, | |
| "learning_rate": 5.614973262032086e-06, | |
| "loss": 0.7446, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.05885500267522739, | |
| "grad_norm": 3.116848479877184, | |
| "learning_rate": 5.882352941176471e-06, | |
| "loss": 0.7439, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.061530230069555915, | |
| "grad_norm": 3.2398528780308067, | |
| "learning_rate": 6.149732620320856e-06, | |
| "loss": 0.7466, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.06420545746388442, | |
| "grad_norm": 3.303341515373773, | |
| "learning_rate": 6.417112299465241e-06, | |
| "loss": 0.7311, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.06688068485821295, | |
| "grad_norm": 3.29094819560745, | |
| "learning_rate": 6.684491978609626e-06, | |
| "loss": 0.7417, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.06955591225254147, | |
| "grad_norm": 3.0583806564933314, | |
| "learning_rate": 6.951871657754011e-06, | |
| "loss": 0.7215, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.07223113964686999, | |
| "grad_norm": 2.922761971640661, | |
| "learning_rate": 7.219251336898396e-06, | |
| "loss": 0.7148, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.0749063670411985, | |
| "grad_norm": 3.0911163153076058, | |
| "learning_rate": 7.486631016042781e-06, | |
| "loss": 0.7383, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.07758159443552702, | |
| "grad_norm": 2.8281929004877924, | |
| "learning_rate": 7.754010695187166e-06, | |
| "loss": 0.7043, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.08025682182985554, | |
| "grad_norm": 3.1040357514576953, | |
| "learning_rate": 8.02139037433155e-06, | |
| "loss": 0.7151, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.08293204922418405, | |
| "grad_norm": 3.1558916042229908, | |
| "learning_rate": 8.288770053475937e-06, | |
| "loss": 0.7143, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.08560727661851257, | |
| "grad_norm": 2.943346250498008, | |
| "learning_rate": 8.556149732620321e-06, | |
| "loss": 0.7013, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.08828250401284109, | |
| "grad_norm": 3.00338601363791, | |
| "learning_rate": 8.823529411764707e-06, | |
| "loss": 0.7168, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.0909577314071696, | |
| "grad_norm": 2.9568937036058505, | |
| "learning_rate": 9.090909090909091e-06, | |
| "loss": 0.7077, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.09363295880149813, | |
| "grad_norm": 3.0004482748153056, | |
| "learning_rate": 9.358288770053477e-06, | |
| "loss": 0.7111, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.09630818619582665, | |
| "grad_norm": 2.7753342166626074, | |
| "learning_rate": 9.625668449197861e-06, | |
| "loss": 0.7216, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.09898341359015517, | |
| "grad_norm": 2.9030858254206264, | |
| "learning_rate": 9.893048128342247e-06, | |
| "loss": 0.7118, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.10165864098448368, | |
| "grad_norm": 2.685107361232292, | |
| "learning_rate": 9.999921507322408e-06, | |
| "loss": 0.7021, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.1043338683788122, | |
| "grad_norm": 2.685192038995974, | |
| "learning_rate": 9.999441838772916e-06, | |
| "loss": 0.6988, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.10700909577314072, | |
| "grad_norm": 2.918955400249994, | |
| "learning_rate": 9.99852615049999e-06, | |
| "loss": 0.6967, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.10968432316746923, | |
| "grad_norm": 2.869330230423005, | |
| "learning_rate": 9.997174522364177e-06, | |
| "loss": 0.6947, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.11235955056179775, | |
| "grad_norm": 2.703271557308526, | |
| "learning_rate": 9.995387072245939e-06, | |
| "loss": 0.7111, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.11503477795612627, | |
| "grad_norm": 2.747549810108466, | |
| "learning_rate": 9.993163956035381e-06, | |
| "loss": 0.7059, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.11771000535045478, | |
| "grad_norm": 3.111810523251162, | |
| "learning_rate": 9.990505367618647e-06, | |
| "loss": 0.7081, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.12038523274478331, | |
| "grad_norm": 2.59798000022919, | |
| "learning_rate": 9.987411538861023e-06, | |
| "loss": 0.6963, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.12306046013911183, | |
| "grad_norm": 2.6949538130277695, | |
| "learning_rate": 9.9838827395867e-06, | |
| "loss": 0.6935, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.12573568753344033, | |
| "grad_norm": 2.628051172115821, | |
| "learning_rate": 9.979919277555247e-06, | |
| "loss": 0.6869, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.12841091492776885, | |
| "grad_norm": 2.5260508130338133, | |
| "learning_rate": 9.97552149843478e-06, | |
| "loss": 0.6975, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.13108614232209737, | |
| "grad_norm": 2.6307934786366896, | |
| "learning_rate": 9.970689785771798e-06, | |
| "loss": 0.6972, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.1337613697164259, | |
| "grad_norm": 2.9060873285695155, | |
| "learning_rate": 9.96542456095775e-06, | |
| "loss": 0.6914, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.13643659711075443, | |
| "grad_norm": 2.758259640760469, | |
| "learning_rate": 9.95972628319227e-06, | |
| "loss": 0.6899, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.13911182450508294, | |
| "grad_norm": 2.686451625364134, | |
| "learning_rate": 9.953595449443134e-06, | |
| "loss": 0.6975, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.14178705189941146, | |
| "grad_norm": 2.654857011678671, | |
| "learning_rate": 9.947032594402917e-06, | |
| "loss": 0.6882, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.14446227929373998, | |
| "grad_norm": 3.0168306473010813, | |
| "learning_rate": 9.940038290442362e-06, | |
| "loss": 0.6798, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.1471375066880685, | |
| "grad_norm": 2.581123619862379, | |
| "learning_rate": 9.932613147560464e-06, | |
| "loss": 0.6996, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.149812734082397, | |
| "grad_norm": 2.5125742789046988, | |
| "learning_rate": 9.924757813331256e-06, | |
| "loss": 0.6792, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.15248796147672553, | |
| "grad_norm": 2.6254290649886274, | |
| "learning_rate": 9.916472972847353e-06, | |
| "loss": 0.681, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.15516318887105404, | |
| "grad_norm": 2.5307860850663197, | |
| "learning_rate": 9.907759348660186e-06, | |
| "loss": 0.6597, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.15783841626538256, | |
| "grad_norm": 2.7548412137167997, | |
| "learning_rate": 9.898617700716988e-06, | |
| "loss": 0.6708, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.16051364365971107, | |
| "grad_norm": 2.5617275751142894, | |
| "learning_rate": 9.889048826294527e-06, | |
| "loss": 0.6816, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.1631888710540396, | |
| "grad_norm": 2.454236841573611, | |
| "learning_rate": 9.879053559929556e-06, | |
| "loss": 0.6685, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.1658640984483681, | |
| "grad_norm": 2.522665651038106, | |
| "learning_rate": 9.868632773346044e-06, | |
| "loss": 0.6689, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.16853932584269662, | |
| "grad_norm": 2.531527428246295, | |
| "learning_rate": 9.857787375379144e-06, | |
| "loss": 0.6681, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.17121455323702514, | |
| "grad_norm": 2.4693271769445175, | |
| "learning_rate": 9.84651831189593e-06, | |
| "loss": 0.6616, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.17388978063135366, | |
| "grad_norm": 2.537474607100158, | |
| "learning_rate": 9.834826565712901e-06, | |
| "loss": 0.6613, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.17656500802568217, | |
| "grad_norm": 2.908331796282016, | |
| "learning_rate": 9.822713156510278e-06, | |
| "loss": 0.6542, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.1792402354200107, | |
| "grad_norm": 2.536053924127175, | |
| "learning_rate": 9.81017914074306e-06, | |
| "loss": 0.6596, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.1819154628143392, | |
| "grad_norm": 2.392506050203404, | |
| "learning_rate": 9.797225611548896e-06, | |
| "loss": 0.6466, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.18459069020866772, | |
| "grad_norm": 2.4001855751228414, | |
| "learning_rate": 9.783853698652737e-06, | |
| "loss": 0.6726, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.18726591760299627, | |
| "grad_norm": 2.4631052280555163, | |
| "learning_rate": 9.770064568268329e-06, | |
| "loss": 0.6531, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.18994114499732478, | |
| "grad_norm": 2.5426722525248384, | |
| "learning_rate": 9.75585942299648e-06, | |
| "loss": 0.6506, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.1926163723916533, | |
| "grad_norm": 2.5531515576743997, | |
| "learning_rate": 9.741239501720197e-06, | |
| "loss": 0.6687, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.19529159978598182, | |
| "grad_norm": 2.5933299113098416, | |
| "learning_rate": 9.726206079496619e-06, | |
| "loss": 0.649, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.19796682718031033, | |
| "grad_norm": 2.556848619149725, | |
| "learning_rate": 9.71076046744583e-06, | |
| "loss": 0.6557, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.20064205457463885, | |
| "grad_norm": 2.5209850469412944, | |
| "learning_rate": 9.694904012636509e-06, | |
| "loss": 0.6386, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.20331728196896737, | |
| "grad_norm": 2.630494859882968, | |
| "learning_rate": 9.678638097968435e-06, | |
| "loss": 0.6565, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.20599250936329588, | |
| "grad_norm": 2.4636175372087408, | |
| "learning_rate": 9.661964142051896e-06, | |
| "loss": 0.6592, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.2086677367576244, | |
| "grad_norm": 2.608708318283724, | |
| "learning_rate": 9.644883599083959e-06, | |
| "loss": 0.6462, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.21134296415195292, | |
| "grad_norm": 2.4564835809094094, | |
| "learning_rate": 9.627397958721638e-06, | |
| "loss": 0.6303, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.21401819154628143, | |
| "grad_norm": 2.628178024205104, | |
| "learning_rate": 9.609508745951988e-06, | |
| "loss": 0.6531, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.21669341894060995, | |
| "grad_norm": 2.3740161535009086, | |
| "learning_rate": 9.591217520959095e-06, | |
| "loss": 0.6378, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.21936864633493847, | |
| "grad_norm": 2.543725289784257, | |
| "learning_rate": 9.572525878988014e-06, | |
| "loss": 0.6499, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.22204387372926698, | |
| "grad_norm": 2.500580635281771, | |
| "learning_rate": 9.55343545020564e-06, | |
| "loss": 0.6584, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.2247191011235955, | |
| "grad_norm": 2.505489876568994, | |
| "learning_rate": 9.533947899558521e-06, | |
| "loss": 0.613, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.22739432851792402, | |
| "grad_norm": 2.3654360169031095, | |
| "learning_rate": 9.514064926627684e-06, | |
| "loss": 0.6378, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.23006955591225253, | |
| "grad_norm": 2.4607999942944696, | |
| "learning_rate": 9.49378826548037e-06, | |
| "loss": 0.6315, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.23274478330658105, | |
| "grad_norm": 2.5106286517681893, | |
| "learning_rate": 9.473119684518834e-06, | |
| "loss": 0.6093, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.23542001070090957, | |
| "grad_norm": 2.3439624704485684, | |
| "learning_rate": 9.452060986326088e-06, | |
| "loss": 0.6158, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.23809523809523808, | |
| "grad_norm": 2.5382097517829103, | |
| "learning_rate": 9.430614007508712e-06, | |
| "loss": 0.6404, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.24077046548956663, | |
| "grad_norm": 2.3306242351666304, | |
| "learning_rate": 9.408780618536664e-06, | |
| "loss": 0.6287, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.24344569288389514, | |
| "grad_norm": 2.4713046988918976, | |
| "learning_rate": 9.386562723580155e-06, | |
| "loss": 0.639, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.24612092027822366, | |
| "grad_norm": 2.504614194214681, | |
| "learning_rate": 9.363962260343577e-06, | |
| "loss": 0.6267, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.24879614767255218, | |
| "grad_norm": 2.401453830090896, | |
| "learning_rate": 9.340981199896515e-06, | |
| "loss": 0.6193, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.25147137506688066, | |
| "grad_norm": 2.529978330035686, | |
| "learning_rate": 9.317621546501827e-06, | |
| "loss": 0.6306, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.2541466024612092, | |
| "grad_norm": 2.4063433996440384, | |
| "learning_rate": 9.293885337440869e-06, | |
| "loss": 0.6245, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.2568218298555377, | |
| "grad_norm": 2.4727301792403638, | |
| "learning_rate": 9.26977464283579e-06, | |
| "loss": 0.6218, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.2594970572498662, | |
| "grad_norm": 2.5289972466377906, | |
| "learning_rate": 9.245291565469007e-06, | |
| "loss": 0.5927, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.26217228464419473, | |
| "grad_norm": 2.4439695346708765, | |
| "learning_rate": 9.220438240599813e-06, | |
| "loss": 0.6207, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.26484751203852325, | |
| "grad_norm": 2.5321941557330914, | |
| "learning_rate": 9.19521683577814e-06, | |
| "loss": 0.6036, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.2675227394328518, | |
| "grad_norm": 2.6711898378305117, | |
| "learning_rate": 9.169629550655532e-06, | |
| "loss": 0.6146, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.27019796682718034, | |
| "grad_norm": 2.656233077443392, | |
| "learning_rate": 9.143678616793299e-06, | |
| "loss": 0.5851, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.27287319422150885, | |
| "grad_norm": 2.4406113611708684, | |
| "learning_rate": 9.117366297467899e-06, | |
| "loss": 0.6103, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.27554842161583737, | |
| "grad_norm": 2.4790430946398656, | |
| "learning_rate": 9.090694887473539e-06, | |
| "loss": 0.6112, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.2782236490101659, | |
| "grad_norm": 2.543863410127765, | |
| "learning_rate": 9.063666712922054e-06, | |
| "loss": 0.5993, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.2808988764044944, | |
| "grad_norm": 2.4241050160355586, | |
| "learning_rate": 9.036284131040027e-06, | |
| "loss": 0.5915, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.2835741037988229, | |
| "grad_norm": 2.4022785872888774, | |
| "learning_rate": 9.008549529963202e-06, | |
| "loss": 0.5786, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.28624933119315144, | |
| "grad_norm": 2.426209034424006, | |
| "learning_rate": 8.98046532852822e-06, | |
| "loss": 0.6024, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.28892455858747995, | |
| "grad_norm": 2.576690122159131, | |
| "learning_rate": 8.952033976061651e-06, | |
| "loss": 0.604, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.29159978598180847, | |
| "grad_norm": 2.3829850107227957, | |
| "learning_rate": 8.923257952166391e-06, | |
| "loss": 0.605, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.294275013376137, | |
| "grad_norm": 2.4150085111527386, | |
| "learning_rate": 8.894139766505391e-06, | |
| "loss": 0.5758, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.2969502407704655, | |
| "grad_norm": 2.4302320504990322, | |
| "learning_rate": 8.864681958582795e-06, | |
| "loss": 0.5879, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.299625468164794, | |
| "grad_norm": 2.3024260962101106, | |
| "learning_rate": 8.834887097522452e-06, | |
| "loss": 0.5811, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.30230069555912253, | |
| "grad_norm": 2.658836269132944, | |
| "learning_rate": 8.80475778184386e-06, | |
| "loss": 0.5935, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.30497592295345105, | |
| "grad_norm": 2.3872106733021194, | |
| "learning_rate": 8.774296639235527e-06, | |
| "loss": 0.5822, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.30765115034777957, | |
| "grad_norm": 2.471165118682251, | |
| "learning_rate": 8.743506326325814e-06, | |
| "loss": 0.59, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.3103263777421081, | |
| "grad_norm": 2.4868757117121727, | |
| "learning_rate": 8.712389528451236e-06, | |
| "loss": 0.5883, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.3130016051364366, | |
| "grad_norm": 2.4138883516606224, | |
| "learning_rate": 8.680948959422266e-06, | |
| "loss": 0.5742, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.3156768325307651, | |
| "grad_norm": 2.507100593913838, | |
| "learning_rate": 8.649187361286641e-06, | |
| "loss": 0.5898, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.31835205992509363, | |
| "grad_norm": 2.4100835783974643, | |
| "learning_rate": 8.617107504090239e-06, | |
| "loss": 0.5697, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.32102728731942215, | |
| "grad_norm": 3.2433002290475343, | |
| "learning_rate": 8.584712185635477e-06, | |
| "loss": 0.5831, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.32370251471375067, | |
| "grad_norm": 2.3571626089226743, | |
| "learning_rate": 8.552004231237308e-06, | |
| "loss": 0.5565, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.3263777421080792, | |
| "grad_norm": 2.4172002061114326, | |
| "learning_rate": 8.518986493476819e-06, | |
| "loss": 0.5624, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.3290529695024077, | |
| "grad_norm": 2.3953355643764143, | |
| "learning_rate": 8.485661851952443e-06, | |
| "loss": 0.5765, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.3317281968967362, | |
| "grad_norm": 2.286011388064113, | |
| "learning_rate": 8.452033213028822e-06, | |
| "loss": 0.5658, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.33440342429106473, | |
| "grad_norm": 2.648351244045331, | |
| "learning_rate": 8.418103509583323e-06, | |
| "loss": 0.5648, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.33707865168539325, | |
| "grad_norm": 2.5355298785191813, | |
| "learning_rate": 8.383875700750272e-06, | |
| "loss": 0.5607, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.33975387907972177, | |
| "grad_norm": 2.317259690798318, | |
| "learning_rate": 8.349352771662848e-06, | |
| "loss": 0.5779, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.3424291064740503, | |
| "grad_norm": 2.2963059012347946, | |
| "learning_rate": 8.314537733192762e-06, | |
| "loss": 0.5791, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.3451043338683788, | |
| "grad_norm": 2.3822664292618714, | |
| "learning_rate": 8.279433621687658e-06, | |
| "loss": 0.5664, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.3477795612627073, | |
| "grad_norm": 2.405118103743639, | |
| "learning_rate": 8.2440434987063e-06, | |
| "loss": 0.5886, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.35045478865703583, | |
| "grad_norm": 2.4776915022762425, | |
| "learning_rate": 8.208370450751568e-06, | |
| "loss": 0.5581, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.35313001605136435, | |
| "grad_norm": 2.2672895652439675, | |
| "learning_rate": 8.172417589001275e-06, | |
| "loss": 0.5775, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.35580524344569286, | |
| "grad_norm": 2.3099031427696533, | |
| "learning_rate": 8.136188049036817e-06, | |
| "loss": 0.5547, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.3584804708400214, | |
| "grad_norm": 2.3577031703387408, | |
| "learning_rate": 8.099684990569717e-06, | |
| "loss": 0.5659, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.3611556982343499, | |
| "grad_norm": 2.3991198530773383, | |
| "learning_rate": 8.06291159716606e-06, | |
| "loss": 0.5619, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.3638309256286784, | |
| "grad_norm": 2.5099299299975297, | |
| "learning_rate": 8.025871075968828e-06, | |
| "loss": 0.5435, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.36650615302300693, | |
| "grad_norm": 2.4698105344888814, | |
| "learning_rate": 7.988566657418202e-06, | |
| "loss": 0.5406, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.36918138041733545, | |
| "grad_norm": 2.4691995327954577, | |
| "learning_rate": 7.951001594969827e-06, | |
| "loss": 0.5299, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.37185660781166396, | |
| "grad_norm": 2.4264629578672716, | |
| "learning_rate": 7.91317916481106e-06, | |
| "loss": 0.5576, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.37453183520599254, | |
| "grad_norm": 2.551658003203302, | |
| "learning_rate": 7.875102665575241e-06, | |
| "loss": 0.543, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.37720706260032105, | |
| "grad_norm": 6.107040089103125, | |
| "learning_rate": 7.83677541805401e-06, | |
| "loss": 0.5466, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.37988228999464957, | |
| "grad_norm": 2.579913197271776, | |
| "learning_rate": 7.798200764907691e-06, | |
| "loss": 0.5392, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.3825575173889781, | |
| "grad_norm": 2.401363945793991, | |
| "learning_rate": 7.759382070373755e-06, | |
| "loss": 0.5441, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.3852327447833066, | |
| "grad_norm": 2.3279189361371366, | |
| "learning_rate": 7.720322719973433e-06, | |
| "loss": 0.546, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.3879079721776351, | |
| "grad_norm": 2.5184518351120855, | |
| "learning_rate": 7.68102612021643e-06, | |
| "loss": 0.5433, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.39058319957196364, | |
| "grad_norm": 2.3841613948426184, | |
| "learning_rate": 7.641495698303844e-06, | |
| "loss": 0.5291, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.39325842696629215, | |
| "grad_norm": 2.342459814244885, | |
| "learning_rate": 7.60173490182926e-06, | |
| "loss": 0.5101, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.39593365436062067, | |
| "grad_norm": 2.492506659424794, | |
| "learning_rate": 7.5617471984780885e-06, | |
| "loss": 0.5454, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.3986088817549492, | |
| "grad_norm": 2.515570336738518, | |
| "learning_rate": 7.521536075725106e-06, | |
| "loss": 0.5367, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.4012841091492777, | |
| "grad_norm": 2.460454444661722, | |
| "learning_rate": 7.481105040530334e-06, | |
| "loss": 0.5207, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.4039593365436062, | |
| "grad_norm": 2.4664528062162603, | |
| "learning_rate": 7.440457619033155e-06, | |
| "loss": 0.5368, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.40663456393793473, | |
| "grad_norm": 2.458451790530426, | |
| "learning_rate": 7.3995973562448065e-06, | |
| "loss": 0.541, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.40930979133226325, | |
| "grad_norm": 2.388965484607956, | |
| "learning_rate": 7.358527815739192e-06, | |
| "loss": 0.5263, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.41198501872659177, | |
| "grad_norm": 2.350342470688958, | |
| "learning_rate": 7.317252579342096e-06, | |
| "loss": 0.5189, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.4146602461209203, | |
| "grad_norm": 2.334939004267835, | |
| "learning_rate": 7.275775246818802e-06, | |
| "loss": 0.5355, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.4173354735152488, | |
| "grad_norm": 2.466335488768805, | |
| "learning_rate": 7.23409943556014e-06, | |
| "loss": 0.5411, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.4200107009095773, | |
| "grad_norm": 2.3840732023117575, | |
| "learning_rate": 7.192228780266997e-06, | |
| "loss": 0.5103, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.42268592830390583, | |
| "grad_norm": 2.2931802265917054, | |
| "learning_rate": 7.150166932633328e-06, | |
| "loss": 0.5204, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.42536115569823435, | |
| "grad_norm": 2.4464517807290287, | |
| "learning_rate": 7.1079175610276775e-06, | |
| "loss": 0.5145, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.42803638309256287, | |
| "grad_norm": 3.062201453967221, | |
| "learning_rate": 7.065484350173242e-06, | |
| "loss": 0.5089, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.4307116104868914, | |
| "grad_norm": 2.3975551612190507, | |
| "learning_rate": 7.022871000826519e-06, | |
| "loss": 0.518, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.4333868378812199, | |
| "grad_norm": 2.393531302974144, | |
| "learning_rate": 6.980081229454545e-06, | |
| "loss": 0.5296, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.4360620652755484, | |
| "grad_norm": 2.3389658387842145, | |
| "learning_rate": 6.937118767910771e-06, | |
| "loss": 0.5225, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 0.43873729266987693, | |
| "grad_norm": 2.304590616472174, | |
| "learning_rate": 6.893987363109595e-06, | |
| "loss": 0.5174, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.44141252006420545, | |
| "grad_norm": 2.2893102909832703, | |
| "learning_rate": 6.850690776699574e-06, | |
| "loss": 0.5157, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.44408774745853397, | |
| "grad_norm": 2.3528835058549693, | |
| "learning_rate": 6.807232784735363e-06, | |
| "loss": 0.5186, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.4467629748528625, | |
| "grad_norm": 2.3960495391474916, | |
| "learning_rate": 6.763617177348394e-06, | |
| "loss": 0.494, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 0.449438202247191, | |
| "grad_norm": 2.391730693829397, | |
| "learning_rate": 6.719847758416316e-06, | |
| "loss": 0.4952, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.4521134296415195, | |
| "grad_norm": 2.3102028249018027, | |
| "learning_rate": 6.675928345231248e-06, | |
| "loss": 0.4945, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 0.45478865703584803, | |
| "grad_norm": 2.340089688669794, | |
| "learning_rate": 6.631862768166861e-06, | |
| "loss": 0.5206, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.45746388443017655, | |
| "grad_norm": 2.3869663318928818, | |
| "learning_rate": 6.587654870344318e-06, | |
| "loss": 0.4904, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 0.46013911182450506, | |
| "grad_norm": 2.3186471369565016, | |
| "learning_rate": 6.543308507297094e-06, | |
| "loss": 0.4955, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.4628143392188336, | |
| "grad_norm": 2.348787884210591, | |
| "learning_rate": 6.498827546634733e-06, | |
| "loss": 0.4857, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 0.4654895666131621, | |
| "grad_norm": 2.3022368035042313, | |
| "learning_rate": 6.454215867705526e-06, | |
| "loss": 0.5017, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.4681647940074906, | |
| "grad_norm": 2.2741836629628804, | |
| "learning_rate": 6.409477361258188e-06, | |
| "loss": 0.5068, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 0.47084002140181913, | |
| "grad_norm": 2.360316687024575, | |
| "learning_rate": 6.364615929102531e-06, | |
| "loss": 0.494, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.47351524879614765, | |
| "grad_norm": 2.379176924172674, | |
| "learning_rate": 6.319635483769164e-06, | |
| "loss": 0.4897, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 0.47619047619047616, | |
| "grad_norm": 2.3319354135997843, | |
| "learning_rate": 6.274539948168279e-06, | |
| "loss": 0.5032, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.47886570358480474, | |
| "grad_norm": 2.3550908404304067, | |
| "learning_rate": 6.229333255247511e-06, | |
| "loss": 0.4881, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 0.48154093097913325, | |
| "grad_norm": 2.3143929315173044, | |
| "learning_rate": 6.184019347648939e-06, | |
| "loss": 0.4836, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.48421615837346177, | |
| "grad_norm": 2.48991053989977, | |
| "learning_rate": 6.138602177365218e-06, | |
| "loss": 0.4929, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 0.4868913857677903, | |
| "grad_norm": 2.3913793797288783, | |
| "learning_rate": 6.093085705394934e-06, | |
| "loss": 0.4693, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.4895666131621188, | |
| "grad_norm": 2.2543254040593763, | |
| "learning_rate": 6.04747390139713e-06, | |
| "loss": 0.4743, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 0.4922418405564473, | |
| "grad_norm": 2.348339630964883, | |
| "learning_rate": 6.001770743345108e-06, | |
| "loss": 0.4823, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.49491706795077584, | |
| "grad_norm": 2.2449795227494005, | |
| "learning_rate": 5.9559802171794955e-06, | |
| "loss": 0.4846, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 0.49759229534510435, | |
| "grad_norm": 2.241213857361928, | |
| "learning_rate": 5.9101063164606165e-06, | |
| "loss": 0.4805, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.5002675227394329, | |
| "grad_norm": 2.4169638599052523, | |
| "learning_rate": 5.864153042020191e-06, | |
| "loss": 0.4691, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 0.5029427501337613, | |
| "grad_norm": 2.25529384343331, | |
| "learning_rate": 5.818124401612416e-06, | |
| "loss": 0.4899, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.5056179775280899, | |
| "grad_norm": 2.383950239796953, | |
| "learning_rate": 5.7720244095644305e-06, | |
| "loss": 0.4741, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 0.5082932049224184, | |
| "grad_norm": 2.502424957725384, | |
| "learning_rate": 5.725857086426216e-06, | |
| "loss": 0.4771, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.5109684323167469, | |
| "grad_norm": 2.3038774447416808, | |
| "learning_rate": 5.679626458619947e-06, | |
| "loss": 0.4703, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 0.5136436597110754, | |
| "grad_norm": 2.346899662548698, | |
| "learning_rate": 5.633336558088823e-06, | |
| "loss": 0.4887, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.516318887105404, | |
| "grad_norm": 2.2355161245769843, | |
| "learning_rate": 5.586991421945445e-06, | |
| "loss": 0.4656, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 0.5189941144997324, | |
| "grad_norm": 2.3292739097458153, | |
| "learning_rate": 5.540595092119709e-06, | |
| "loss": 0.4585, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.521669341894061, | |
| "grad_norm": 2.4214982730972414, | |
| "learning_rate": 5.494151615006307e-06, | |
| "loss": 0.4694, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 0.5243445692883895, | |
| "grad_norm": 2.3116711052429353, | |
| "learning_rate": 5.44766504111181e-06, | |
| "loss": 0.4527, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.527019796682718, | |
| "grad_norm": 2.36653247169498, | |
| "learning_rate": 5.401139424701427e-06, | |
| "loss": 0.4663, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 0.5296950240770465, | |
| "grad_norm": 2.41785279589817, | |
| "learning_rate": 5.354578823445404e-06, | |
| "loss": 0.4645, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.5323702514713751, | |
| "grad_norm": 2.2795559605311935, | |
| "learning_rate": 5.307987298065145e-06, | |
| "loss": 0.4688, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 0.5350454788657036, | |
| "grad_norm": 2.420758151365417, | |
| "learning_rate": 5.26136891197906e-06, | |
| "loss": 0.4772, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.5377207062600321, | |
| "grad_norm": 2.286837120031368, | |
| "learning_rate": 5.214727730948181e-06, | |
| "loss": 0.4579, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 0.5403959336543607, | |
| "grad_norm": 2.5357314724486684, | |
| "learning_rate": 5.1680678227215705e-06, | |
| "loss": 0.4448, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.5430711610486891, | |
| "grad_norm": 2.4433154714045737, | |
| "learning_rate": 5.121393256681561e-06, | |
| "loss": 0.4512, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 0.5457463884430177, | |
| "grad_norm": 2.3443908369260584, | |
| "learning_rate": 5.07470810348884e-06, | |
| "loss": 0.445, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.5484216158373462, | |
| "grad_norm": 2.341118659715022, | |
| "learning_rate": 5.02801643472745e-06, | |
| "loss": 0.4547, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 0.5510968432316747, | |
| "grad_norm": 2.5523488397688445, | |
| "learning_rate": 4.98132232254967e-06, | |
| "loss": 0.4551, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.5537720706260032, | |
| "grad_norm": 2.341888099144205, | |
| "learning_rate": 4.934629839320885e-06, | |
| "loss": 0.447, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 0.5564472980203318, | |
| "grad_norm": 2.367969391536302, | |
| "learning_rate": 4.88794305726441e-06, | |
| "loss": 0.4562, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.5591225254146602, | |
| "grad_norm": 2.4141297608989505, | |
| "learning_rate": 4.841266048106343e-06, | |
| "loss": 0.4588, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 0.5617977528089888, | |
| "grad_norm": 2.2942217661884463, | |
| "learning_rate": 4.794602882720448e-06, | |
| "loss": 0.4519, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.5644729802033173, | |
| "grad_norm": 2.4388470304507184, | |
| "learning_rate": 4.747957630773124e-06, | |
| "loss": 0.4381, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 0.5671482075976458, | |
| "grad_norm": 2.426362017801771, | |
| "learning_rate": 4.701334360368473e-06, | |
| "loss": 0.4389, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.5698234349919743, | |
| "grad_norm": 2.187091633024644, | |
| "learning_rate": 4.654737137693508e-06, | |
| "loss": 0.4443, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 0.5724986623863029, | |
| "grad_norm": 2.2703979543148227, | |
| "learning_rate": 4.6081700266635195e-06, | |
| "loss": 0.4495, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.5751738897806313, | |
| "grad_norm": 2.583776287215632, | |
| "learning_rate": 4.561637088567654e-06, | |
| "loss": 0.4461, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 0.5778491171749599, | |
| "grad_norm": 2.2365322610979295, | |
| "learning_rate": 4.51514238171471e-06, | |
| "loss": 0.4529, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.5805243445692884, | |
| "grad_norm": 2.180037600496683, | |
| "learning_rate": 4.468689961079195e-06, | |
| "loss": 0.4323, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 0.5831995719636169, | |
| "grad_norm": 2.081559354724233, | |
| "learning_rate": 4.4222838779476866e-06, | |
| "loss": 0.4214, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.5858747993579454, | |
| "grad_norm": 2.3076982125413372, | |
| "learning_rate": 4.375928179565494e-06, | |
| "loss": 0.4468, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 0.588550026752274, | |
| "grad_norm": 2.283053955784425, | |
| "learning_rate": 4.329626908783685e-06, | |
| "loss": 0.4417, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.5912252541466024, | |
| "grad_norm": 2.254830049137223, | |
| "learning_rate": 4.2833841037065e-06, | |
| "loss": 0.4423, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 0.593900481540931, | |
| "grad_norm": 2.3109882572246687, | |
| "learning_rate": 4.237203797339169e-06, | |
| "loss": 0.4431, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.5965757089352595, | |
| "grad_norm": 2.3244488841789597, | |
| "learning_rate": 4.191090017236177e-06, | |
| "loss": 0.4283, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 0.599250936329588, | |
| "grad_norm": 2.257838237616692, | |
| "learning_rate": 4.145046785150013e-06, | |
| "loss": 0.433, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.6019261637239165, | |
| "grad_norm": 2.1584220334955293, | |
| "learning_rate": 4.09907811668041e-06, | |
| "loss": 0.4162, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 0.6046013911182451, | |
| "grad_norm": 2.298272375780168, | |
| "learning_rate": 4.0531880209241356e-06, | |
| "loss": 0.426, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.6072766185125735, | |
| "grad_norm": 2.215799524593221, | |
| "learning_rate": 4.0073805001253405e-06, | |
| "loss": 0.4326, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 0.6099518459069021, | |
| "grad_norm": 2.2218734417372414, | |
| "learning_rate": 3.961659549326512e-06, | |
| "loss": 0.4227, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.6126270733012306, | |
| "grad_norm": 2.208228128926267, | |
| "learning_rate": 3.916029156020044e-06, | |
| "loss": 0.4341, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 0.6153023006955591, | |
| "grad_norm": 2.306119953673732, | |
| "learning_rate": 3.870493299800484e-06, | |
| "loss": 0.4173, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.6179775280898876, | |
| "grad_norm": 2.1515931379516977, | |
| "learning_rate": 3.82505595201745e-06, | |
| "loss": 0.4133, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 0.6206527554842162, | |
| "grad_norm": 2.2524460175233503, | |
| "learning_rate": 3.7797210754292766e-06, | |
| "loss": 0.4162, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.6233279828785446, | |
| "grad_norm": 2.301799592604272, | |
| "learning_rate": 3.7344926238574074e-06, | |
| "loss": 0.4006, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 0.6260032102728732, | |
| "grad_norm": 2.1524308460470527, | |
| "learning_rate": 3.6893745418415692e-06, | |
| "loss": 0.4263, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.6286784376672017, | |
| "grad_norm": 2.2748089398628597, | |
| "learning_rate": 3.6443707642957526e-06, | |
| "loss": 0.4205, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 0.6313536650615302, | |
| "grad_norm": 2.2999788424441974, | |
| "learning_rate": 3.5994852161650386e-06, | |
| "loss": 0.4114, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.6340288924558587, | |
| "grad_norm": 2.250922812696413, | |
| "learning_rate": 3.5547218120832807e-06, | |
| "loss": 0.4272, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 0.6367041198501873, | |
| "grad_norm": 2.199876026491523, | |
| "learning_rate": 3.5100844560317028e-06, | |
| "loss": 0.4263, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.6393793472445158, | |
| "grad_norm": 2.4231067314017607, | |
| "learning_rate": 3.465577040998417e-06, | |
| "loss": 0.4081, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 0.6420545746388443, | |
| "grad_norm": 2.329769969490132, | |
| "learning_rate": 3.4212034486388972e-06, | |
| "loss": 0.4162, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.6447298020331729, | |
| "grad_norm": 2.2358270007097603, | |
| "learning_rate": 3.376967548937457e-06, | |
| "loss": 0.4104, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 0.6474050294275013, | |
| "grad_norm": 2.232847334212105, | |
| "learning_rate": 3.332873199869719e-06, | |
| "loss": 0.4224, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.6500802568218299, | |
| "grad_norm": 2.2160061126201525, | |
| "learning_rate": 3.2889242470661553e-06, | |
| "loss": 0.4225, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 0.6527554842161584, | |
| "grad_norm": 2.336455552359997, | |
| "learning_rate": 3.245124523476699e-06, | |
| "loss": 0.413, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.6554307116104869, | |
| "grad_norm": 2.180496144947931, | |
| "learning_rate": 3.2014778490364484e-06, | |
| "loss": 0.4101, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 0.6581059390048154, | |
| "grad_norm": 2.208527174103595, | |
| "learning_rate": 3.157988030332526e-06, | |
| "loss": 0.4028, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.660781166399144, | |
| "grad_norm": 2.1964688689281195, | |
| "learning_rate": 3.1146588602720884e-06, | |
| "loss": 0.3957, | |
| "step": 1235 | |
| }, | |
| { | |
| "epoch": 0.6634563937934724, | |
| "grad_norm": 2.259624100422769, | |
| "learning_rate": 3.0714941177515307e-06, | |
| "loss": 0.4073, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.666131621187801, | |
| "grad_norm": 2.193290344140555, | |
| "learning_rate": 3.0284975673269175e-06, | |
| "loss": 0.4093, | |
| "step": 1245 | |
| }, | |
| { | |
| "epoch": 0.6688068485821295, | |
| "grad_norm": 2.1954151969989586, | |
| "learning_rate": 2.9856729588856615e-06, | |
| "loss": 0.4117, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.671482075976458, | |
| "grad_norm": 2.2138053404473195, | |
| "learning_rate": 2.9430240273194844e-06, | |
| "loss": 0.4082, | |
| "step": 1255 | |
| }, | |
| { | |
| "epoch": 0.6741573033707865, | |
| "grad_norm": 2.257221465226075, | |
| "learning_rate": 2.9005544921986774e-06, | |
| "loss": 0.4069, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.6768325307651151, | |
| "grad_norm": 2.1775802118360277, | |
| "learning_rate": 2.858268057447712e-06, | |
| "loss": 0.3973, | |
| "step": 1265 | |
| }, | |
| { | |
| "epoch": 0.6795077581594435, | |
| "grad_norm": 2.436885814724992, | |
| "learning_rate": 2.8161684110221987e-06, | |
| "loss": 0.403, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.6821829855537721, | |
| "grad_norm": 2.2209153390752077, | |
| "learning_rate": 2.7742592245872523e-06, | |
| "loss": 0.4023, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 0.6848582129481006, | |
| "grad_norm": 2.2109764032679537, | |
| "learning_rate": 2.7325441531972685e-06, | |
| "loss": 0.3994, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.6875334403424291, | |
| "grad_norm": 2.1795809319816333, | |
| "learning_rate": 2.691026834977161e-06, | |
| "loss": 0.3986, | |
| "step": 1285 | |
| }, | |
| { | |
| "epoch": 0.6902086677367576, | |
| "grad_norm": 2.2391940715660676, | |
| "learning_rate": 2.649710890805055e-06, | |
| "loss": 0.3935, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.6928838951310862, | |
| "grad_norm": 2.2035616343677322, | |
| "learning_rate": 2.6085999239965094e-06, | |
| "loss": 0.387, | |
| "step": 1295 | |
| }, | |
| { | |
| "epoch": 0.6955591225254146, | |
| "grad_norm": 2.22131504261394, | |
| "learning_rate": 2.567697519990249e-06, | |
| "loss": 0.3973, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.6982343499197432, | |
| "grad_norm": 2.098207954779385, | |
| "learning_rate": 2.52700724603547e-06, | |
| "loss": 0.3871, | |
| "step": 1305 | |
| }, | |
| { | |
| "epoch": 0.7009095773140717, | |
| "grad_norm": 2.2983786280371348, | |
| "learning_rate": 2.4865326508807274e-06, | |
| "loss": 0.3977, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.7035848047084002, | |
| "grad_norm": 2.319405000990969, | |
| "learning_rate": 2.446277264464431e-06, | |
| "loss": 0.4034, | |
| "step": 1315 | |
| }, | |
| { | |
| "epoch": 0.7062600321027287, | |
| "grad_norm": 2.109060490520534, | |
| "learning_rate": 2.406244597606994e-06, | |
| "loss": 0.3811, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.7089352594970573, | |
| "grad_norm": 2.177607834388425, | |
| "learning_rate": 2.3664381417046362e-06, | |
| "loss": 0.3786, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 0.7116104868913857, | |
| "grad_norm": 2.1527244194926785, | |
| "learning_rate": 2.3268613684248846e-06, | |
| "loss": 0.3747, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.7142857142857143, | |
| "grad_norm": 2.205331372771754, | |
| "learning_rate": 2.287517729403802e-06, | |
| "loss": 0.4021, | |
| "step": 1335 | |
| }, | |
| { | |
| "epoch": 0.7169609416800428, | |
| "grad_norm": 2.248309322312752, | |
| "learning_rate": 2.2484106559449527e-06, | |
| "loss": 0.3822, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.7196361690743713, | |
| "grad_norm": 2.253569974888193, | |
| "learning_rate": 2.2095435587201487e-06, | |
| "loss": 0.3933, | |
| "step": 1345 | |
| }, | |
| { | |
| "epoch": 0.7223113964686998, | |
| "grad_norm": 2.1475317223597834, | |
| "learning_rate": 2.1709198274719908e-06, | |
| "loss": 0.383, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.7249866238630284, | |
| "grad_norm": 2.156341132086858, | |
| "learning_rate": 2.1325428307182357e-06, | |
| "loss": 0.3937, | |
| "step": 1355 | |
| }, | |
| { | |
| "epoch": 0.7276618512573568, | |
| "grad_norm": 2.3908806324651497, | |
| "learning_rate": 2.0944159154580225e-06, | |
| "loss": 0.384, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.7303370786516854, | |
| "grad_norm": 2.1302675049467847, | |
| "learning_rate": 2.056542406879957e-06, | |
| "loss": 0.3985, | |
| "step": 1365 | |
| }, | |
| { | |
| "epoch": 0.7330123060460139, | |
| "grad_norm": 2.1818319060840143, | |
| "learning_rate": 2.018925608072118e-06, | |
| "loss": 0.3911, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.7356875334403424, | |
| "grad_norm": 2.3780828415180326, | |
| "learning_rate": 1.981568799733979e-06, | |
| "loss": 0.3948, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 0.7383627608346709, | |
| "grad_norm": 2.1564398789314896, | |
| "learning_rate": 1.9444752398902874e-06, | |
| "loss": 0.393, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.7410379882289995, | |
| "grad_norm": 2.225312239443466, | |
| "learning_rate": 1.907648163606925e-06, | |
| "loss": 0.3797, | |
| "step": 1385 | |
| }, | |
| { | |
| "epoch": 0.7437132156233279, | |
| "grad_norm": 2.1676481802841594, | |
| "learning_rate": 1.871090782708756e-06, | |
| "loss": 0.3717, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.7463884430176565, | |
| "grad_norm": 2.1201954498907543, | |
| "learning_rate": 1.834806285499519e-06, | |
| "loss": 0.3847, | |
| "step": 1395 | |
| }, | |
| { | |
| "epoch": 0.7490636704119851, | |
| "grad_norm": 2.0493679026239997, | |
| "learning_rate": 1.7987978364837649e-06, | |
| "loss": 0.3783, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.7517388978063135, | |
| "grad_norm": 2.240394083021461, | |
| "learning_rate": 1.7630685760908623e-06, | |
| "loss": 0.3787, | |
| "step": 1405 | |
| }, | |
| { | |
| "epoch": 0.7544141252006421, | |
| "grad_norm": 2.2507322502146, | |
| "learning_rate": 1.727621620401112e-06, | |
| "loss": 0.3839, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.7570893525949706, | |
| "grad_norm": 2.050018308953964, | |
| "learning_rate": 1.6924600608739843e-06, | |
| "loss": 0.3805, | |
| "step": 1415 | |
| }, | |
| { | |
| "epoch": 0.7597645799892991, | |
| "grad_norm": 2.216364526482266, | |
| "learning_rate": 1.6575869640784998e-06, | |
| "loss": 0.3874, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.7624398073836276, | |
| "grad_norm": 2.415575248697165, | |
| "learning_rate": 1.6230053714257821e-06, | |
| "loss": 0.3813, | |
| "step": 1425 | |
| }, | |
| { | |
| "epoch": 0.7651150347779562, | |
| "grad_norm": 2.2306031322832034, | |
| "learning_rate": 1.588718298903803e-06, | |
| "loss": 0.3864, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.7677902621722846, | |
| "grad_norm": 2.111010889830229, | |
| "learning_rate": 1.554728736814356e-06, | |
| "loss": 0.3802, | |
| "step": 1435 | |
| }, | |
| { | |
| "epoch": 0.7704654895666132, | |
| "grad_norm": 2.121564095465321, | |
| "learning_rate": 1.5210396495122481e-06, | |
| "loss": 0.3754, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.7731407169609417, | |
| "grad_norm": 2.120725109177726, | |
| "learning_rate": 1.4876539751467806e-06, | |
| "loss": 0.3689, | |
| "step": 1445 | |
| }, | |
| { | |
| "epoch": 0.7758159443552702, | |
| "grad_norm": 2.3772037200731435, | |
| "learning_rate": 1.45457462540549e-06, | |
| "loss": 0.3814, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.7784911717495987, | |
| "grad_norm": 2.2229585889968937, | |
| "learning_rate": 1.4218044852602176e-06, | |
| "loss": 0.3647, | |
| "step": 1455 | |
| }, | |
| { | |
| "epoch": 0.7811663991439273, | |
| "grad_norm": 2.1273873598379405, | |
| "learning_rate": 1.3893464127154976e-06, | |
| "loss": 0.3641, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.7838416265382557, | |
| "grad_norm": 2.238369501132279, | |
| "learning_rate": 1.3572032385592999e-06, | |
| "loss": 0.3736, | |
| "step": 1465 | |
| }, | |
| { | |
| "epoch": 0.7865168539325843, | |
| "grad_norm": 2.367411256466404, | |
| "learning_rate": 1.325377766116146e-06, | |
| "loss": 0.3695, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.7891920813269128, | |
| "grad_norm": 2.154614847558025, | |
| "learning_rate": 1.293872771002625e-06, | |
| "loss": 0.3668, | |
| "step": 1475 | |
| }, | |
| { | |
| "epoch": 0.7918673087212413, | |
| "grad_norm": 2.0973822868623198, | |
| "learning_rate": 1.2626910008853154e-06, | |
| "loss": 0.3605, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.7945425361155698, | |
| "grad_norm": 2.2044842996768637, | |
| "learning_rate": 1.231835175241155e-06, | |
| "loss": 0.3608, | |
| "step": 1485 | |
| }, | |
| { | |
| "epoch": 0.7972177635098984, | |
| "grad_norm": 2.1338982634951984, | |
| "learning_rate": 1.2013079851202642e-06, | |
| "loss": 0.3633, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.7998929909042268, | |
| "grad_norm": 2.26851173489404, | |
| "learning_rate": 1.1711120929112507e-06, | |
| "loss": 0.369, | |
| "step": 1495 | |
| }, | |
| { | |
| "epoch": 0.8025682182985554, | |
| "grad_norm": 2.1372863507277753, | |
| "learning_rate": 1.141250132109009e-06, | |
| "loss": 0.3828, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.8052434456928839, | |
| "grad_norm": 2.1821710728521206, | |
| "learning_rate": 1.1117247070850534e-06, | |
| "loss": 0.3751, | |
| "step": 1505 | |
| }, | |
| { | |
| "epoch": 0.8079186730872124, | |
| "grad_norm": 2.289464427582727, | |
| "learning_rate": 1.0825383928603656e-06, | |
| "loss": 0.3689, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.8105939004815409, | |
| "grad_norm": 2.144427258095427, | |
| "learning_rate": 1.0536937348808341e-06, | |
| "loss": 0.3625, | |
| "step": 1515 | |
| }, | |
| { | |
| "epoch": 0.8132691278758695, | |
| "grad_norm": 2.1622070889222695, | |
| "learning_rate": 1.0251932487952437e-06, | |
| "loss": 0.3724, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.8159443552701979, | |
| "grad_norm": 2.343707532992404, | |
| "learning_rate": 9.97039420235884e-07, | |
| "loss": 0.3685, | |
| "step": 1525 | |
| }, | |
| { | |
| "epoch": 0.8186195826645265, | |
| "grad_norm": 2.044139501535542, | |
| "learning_rate": 9.692347046017647e-07, | |
| "loss": 0.3593, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.821294810058855, | |
| "grad_norm": 2.070900945818719, | |
| "learning_rate": 9.417815268444719e-07, | |
| "loss": 0.3795, | |
| "step": 1535 | |
| }, | |
| { | |
| "epoch": 0.8239700374531835, | |
| "grad_norm": 2.202544122311645, | |
| "learning_rate": 9.146822812566819e-07, | |
| "loss": 0.3628, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.826645264847512, | |
| "grad_norm": 2.2552592796377535, | |
| "learning_rate": 8.879393312633405e-07, | |
| "loss": 0.3626, | |
| "step": 1545 | |
| }, | |
| { | |
| "epoch": 0.8293204922418406, | |
| "grad_norm": 2.056735418503588, | |
| "learning_rate": 8.615550092155478e-07, | |
| "loss": 0.3633, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.831995719636169, | |
| "grad_norm": 2.2605962525988486, | |
| "learning_rate": 8.355316161871369e-07, | |
| "loss": 0.3576, | |
| "step": 1555 | |
| }, | |
| { | |
| "epoch": 0.8346709470304976, | |
| "grad_norm": 1.9325644963312696, | |
| "learning_rate": 8.098714217739928e-07, | |
| "loss": 0.3592, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.8373461744248261, | |
| "grad_norm": 2.062402471762311, | |
| "learning_rate": 7.845766638961172e-07, | |
| "loss": 0.3471, | |
| "step": 1565 | |
| }, | |
| { | |
| "epoch": 0.8400214018191546, | |
| "grad_norm": 1.881539114622424, | |
| "learning_rate": 7.596495486024402e-07, | |
| "loss": 0.3514, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.8426966292134831, | |
| "grad_norm": 1.950588612928137, | |
| "learning_rate": 7.350922498784335e-07, | |
| "loss": 0.353, | |
| "step": 1575 | |
| }, | |
| { | |
| "epoch": 0.8453718566078117, | |
| "grad_norm": 2.1988421073847575, | |
| "learning_rate": 7.109069094565024e-07, | |
| "loss": 0.3681, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.8480470840021401, | |
| "grad_norm": 2.113659026819843, | |
| "learning_rate": 6.870956366291998e-07, | |
| "loss": 0.3544, | |
| "step": 1585 | |
| }, | |
| { | |
| "epoch": 0.8507223113964687, | |
| "grad_norm": 2.33180266595057, | |
| "learning_rate": 6.636605080652686e-07, | |
| "loss": 0.3724, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.8533975387907973, | |
| "grad_norm": 1.867424119206801, | |
| "learning_rate": 6.406035676285244e-07, | |
| "loss": 0.362, | |
| "step": 1595 | |
| }, | |
| { | |
| "epoch": 0.8560727661851257, | |
| "grad_norm": 2.0665126499518767, | |
| "learning_rate": 6.179268261996052e-07, | |
| "loss": 0.3595, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.8587479935794543, | |
| "grad_norm": 1.9884903895103176, | |
| "learning_rate": 5.956322615005928e-07, | |
| "loss": 0.3575, | |
| "step": 1605 | |
| }, | |
| { | |
| "epoch": 0.8614232209737828, | |
| "grad_norm": 2.2045330930282083, | |
| "learning_rate": 5.737218179225318e-07, | |
| "loss": 0.3606, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.8640984483681113, | |
| "grad_norm": 2.055450620418203, | |
| "learning_rate": 5.521974063558477e-07, | |
| "loss": 0.3674, | |
| "step": 1615 | |
| }, | |
| { | |
| "epoch": 0.8667736757624398, | |
| "grad_norm": 1.9535432905188745, | |
| "learning_rate": 5.310609040236963e-07, | |
| "loss": 0.3554, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.8694489031567684, | |
| "grad_norm": 2.119370192450794, | |
| "learning_rate": 5.103141543182389e-07, | |
| "loss": 0.3526, | |
| "step": 1625 | |
| }, | |
| { | |
| "epoch": 0.8721241305510968, | |
| "grad_norm": 1.954481761832286, | |
| "learning_rate": 4.89958966639878e-07, | |
| "loss": 0.3481, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.8747993579454254, | |
| "grad_norm": 2.133312045046774, | |
| "learning_rate": 4.6999711623944787e-07, | |
| "loss": 0.3472, | |
| "step": 1635 | |
| }, | |
| { | |
| "epoch": 0.8774745853397539, | |
| "grad_norm": 2.180255754156536, | |
| "learning_rate": 4.504303440633928e-07, | |
| "loss": 0.3511, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.8801498127340824, | |
| "grad_norm": 2.1358891158089612, | |
| "learning_rate": 4.3126035660193076e-07, | |
| "loss": 0.3571, | |
| "step": 1645 | |
| }, | |
| { | |
| "epoch": 0.8828250401284109, | |
| "grad_norm": 1.84315836717447, | |
| "learning_rate": 4.124888257402243e-07, | |
| "loss": 0.3624, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.8855002675227395, | |
| "grad_norm": 2.0554826944518663, | |
| "learning_rate": 3.9411738861256934e-07, | |
| "loss": 0.3579, | |
| "step": 1655 | |
| }, | |
| { | |
| "epoch": 0.8881754949170679, | |
| "grad_norm": 2.4601678197896897, | |
| "learning_rate": 3.7614764745961377e-07, | |
| "loss": 0.3531, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.8908507223113965, | |
| "grad_norm": 2.012754969035083, | |
| "learning_rate": 3.585811694886232e-07, | |
| "loss": 0.349, | |
| "step": 1665 | |
| }, | |
| { | |
| "epoch": 0.893525949705725, | |
| "grad_norm": 2.0385968561479237, | |
| "learning_rate": 3.4141948673679593e-07, | |
| "loss": 0.3616, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.8962011771000535, | |
| "grad_norm": 2.244778290107756, | |
| "learning_rate": 3.2466409593764734e-07, | |
| "loss": 0.3564, | |
| "step": 1675 | |
| }, | |
| { | |
| "epoch": 0.898876404494382, | |
| "grad_norm": 1.9854065627744804, | |
| "learning_rate": 3.083164583904802e-07, | |
| "loss": 0.3612, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.9015516318887106, | |
| "grad_norm": 1.9752335861803505, | |
| "learning_rate": 2.923779998329318e-07, | |
| "loss": 0.3564, | |
| "step": 1685 | |
| }, | |
| { | |
| "epoch": 0.904226859283039, | |
| "grad_norm": 2.158162194461494, | |
| "learning_rate": 2.76850110316636e-07, | |
| "loss": 0.3529, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.9069020866773676, | |
| "grad_norm": 2.0891285570667555, | |
| "learning_rate": 2.617341440859883e-07, | |
| "loss": 0.3492, | |
| "step": 1695 | |
| }, | |
| { | |
| "epoch": 0.9095773140716961, | |
| "grad_norm": 2.0909554368961816, | |
| "learning_rate": 2.470314194600376e-07, | |
| "loss": 0.3468, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.9122525414660246, | |
| "grad_norm": 2.1078161689569015, | |
| "learning_rate": 2.3274321871751436e-07, | |
| "loss": 0.3461, | |
| "step": 1705 | |
| }, | |
| { | |
| "epoch": 0.9149277688603531, | |
| "grad_norm": 2.2260290109873155, | |
| "learning_rate": 2.1887078798499272e-07, | |
| "loss": 0.3422, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.9176029962546817, | |
| "grad_norm": 1.962831389446335, | |
| "learning_rate": 2.0541533712821527e-07, | |
| "loss": 0.3519, | |
| "step": 1715 | |
| }, | |
| { | |
| "epoch": 0.9202782236490101, | |
| "grad_norm": 1.930590364778258, | |
| "learning_rate": 1.923780396465741e-07, | |
| "loss": 0.3495, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.9229534510433387, | |
| "grad_norm": 2.021344220414449, | |
| "learning_rate": 1.7976003257076823e-07, | |
| "loss": 0.3488, | |
| "step": 1725 | |
| }, | |
| { | |
| "epoch": 0.9256286784376672, | |
| "grad_norm": 2.247214559592625, | |
| "learning_rate": 1.6756241636363413e-07, | |
| "loss": 0.3559, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.9283039058319957, | |
| "grad_norm": 2.134980518478257, | |
| "learning_rate": 1.557862548241762e-07, | |
| "loss": 0.3489, | |
| "step": 1735 | |
| }, | |
| { | |
| "epoch": 0.9309791332263242, | |
| "grad_norm": 2.0441140794451655, | |
| "learning_rate": 1.4443257499478447e-07, | |
| "loss": 0.3642, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.9336543606206528, | |
| "grad_norm": 2.109259605080949, | |
| "learning_rate": 1.3350236707166508e-07, | |
| "loss": 0.3587, | |
| "step": 1745 | |
| }, | |
| { | |
| "epoch": 0.9363295880149812, | |
| "grad_norm": 1.9915781898230962, | |
| "learning_rate": 1.229965843184805e-07, | |
| "loss": 0.3525, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.9390048154093098, | |
| "grad_norm": 2.127228440866396, | |
| "learning_rate": 1.1291614298321097e-07, | |
| "loss": 0.3432, | |
| "step": 1755 | |
| }, | |
| { | |
| "epoch": 0.9416800428036383, | |
| "grad_norm": 2.0576461493114833, | |
| "learning_rate": 1.0326192221824738e-07, | |
| "loss": 0.3535, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.9443552701979668, | |
| "grad_norm": 1.9060966657777367, | |
| "learning_rate": 9.403476400371425e-08, | |
| "loss": 0.357, | |
| "step": 1765 | |
| }, | |
| { | |
| "epoch": 0.9470304975922953, | |
| "grad_norm": 2.1105664370498967, | |
| "learning_rate": 8.523547307404179e-08, | |
| "loss": 0.3522, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.9497057249866239, | |
| "grad_norm": 1.9675175424050448, | |
| "learning_rate": 7.686481684777758e-08, | |
| "loss": 0.3507, | |
| "step": 1775 | |
| }, | |
| { | |
| "epoch": 0.9523809523809523, | |
| "grad_norm": 2.042054668983028, | |
| "learning_rate": 6.89235253606596e-08, | |
| "loss": 0.3433, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.9550561797752809, | |
| "grad_norm": 2.1508400346866097, | |
| "learning_rate": 6.141229120194714e-08, | |
| "loss": 0.339, | |
| "step": 1785 | |
| }, | |
| { | |
| "epoch": 0.9577314071696095, | |
| "grad_norm": 2.2052606813491997, | |
| "learning_rate": 5.4331769454016306e-08, | |
| "loss": 0.3517, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.9604066345639379, | |
| "grad_norm": 2.198267503179661, | |
| "learning_rate": 4.76825776352291e-08, | |
| "loss": 0.3551, | |
| "step": 1795 | |
| }, | |
| { | |
| "epoch": 0.9630818619582665, | |
| "grad_norm": 1.908998457696475, | |
| "learning_rate": 4.1465295646076484e-08, | |
| "loss": 0.3478, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.965757089352595, | |
| "grad_norm": 1.9536183637810103, | |
| "learning_rate": 3.568046571860384e-08, | |
| "loss": 0.3491, | |
| "step": 1805 | |
| }, | |
| { | |
| "epoch": 0.9684323167469235, | |
| "grad_norm": 1.9559281422900325, | |
| "learning_rate": 3.0328592369120443e-08, | |
| "loss": 0.3458, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.971107544141252, | |
| "grad_norm": 2.0469193106737666, | |
| "learning_rate": 2.541014235419914e-08, | |
| "loss": 0.3499, | |
| "step": 1815 | |
| }, | |
| { | |
| "epoch": 0.9737827715355806, | |
| "grad_norm": 2.1719220701561417, | |
| "learning_rate": 2.0925544629967763e-08, | |
| "loss": 0.3513, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.976457998929909, | |
| "grad_norm": 2.0542316026685015, | |
| "learning_rate": 1.6875190314700197e-08, | |
| "loss": 0.3469, | |
| "step": 1825 | |
| }, | |
| { | |
| "epoch": 0.9791332263242376, | |
| "grad_norm": 1.9390094223227383, | |
| "learning_rate": 1.3259432654703641e-08, | |
| "loss": 0.3597, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.9818084537185661, | |
| "grad_norm": 1.9765858839925157, | |
| "learning_rate": 1.0078586993511052e-08, | |
| "loss": 0.3415, | |
| "step": 1835 | |
| }, | |
| { | |
| "epoch": 0.9844836811128946, | |
| "grad_norm": 2.124520750341455, | |
| "learning_rate": 7.332930744380906e-09, | |
| "loss": 0.3558, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.9871589085072231, | |
| "grad_norm": 2.046502566885212, | |
| "learning_rate": 5.0227033660987804e-09, | |
| "loss": 0.3552, | |
| "step": 1845 | |
| }, | |
| { | |
| "epoch": 0.9898341359015517, | |
| "grad_norm": 2.166766019345835, | |
| "learning_rate": 3.1481063420985e-09, | |
| "loss": 0.3501, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.9925093632958801, | |
| "grad_norm": 2.1135732207205677, | |
| "learning_rate": 1.7093031628850899e-09, | |
| "loss": 0.342, | |
| "step": 1855 | |
| }, | |
| { | |
| "epoch": 0.9951845906902087, | |
| "grad_norm": 2.0348406821892095, | |
| "learning_rate": 7.064193117806151e-10, | |
| "loss": 0.3518, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.9978598180845372, | |
| "grad_norm": 2.0634139080985343, | |
| "learning_rate": 1.3954225397516673e-10, | |
| "loss": 0.3575, | |
| "step": 1865 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_runtime": 3.3837, | |
| "eval_samples_per_second": 2.955, | |
| "eval_steps_per_second": 0.887, | |
| "step": 1869 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 1869, | |
| "total_flos": 195665288232960.0, | |
| "train_loss": 0.5137065978787556, | |
| "train_runtime": 16737.1252, | |
| "train_samples_per_second": 1.786, | |
| "train_steps_per_second": 0.112 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 1869, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 100, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 195665288232960.0, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |