| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.0, | |
| "eval_steps": 500, | |
| "global_step": 11670, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.001713796058269066, | |
| "grad_norm": 3.5197439193725586, | |
| "learning_rate": 4.9974293059125964e-05, | |
| "loss": 3.229, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.003427592116538132, | |
| "grad_norm": 1.788411259651184, | |
| "learning_rate": 4.994572979148815e-05, | |
| "loss": 1.4466, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.005141388174807198, | |
| "grad_norm": 1.3185161352157593, | |
| "learning_rate": 4.991716652385033e-05, | |
| "loss": 1.0682, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.006855184233076264, | |
| "grad_norm": 0.9748584628105164, | |
| "learning_rate": 4.9888603256212515e-05, | |
| "loss": 0.9275, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.00856898029134533, | |
| "grad_norm": 0.9990955591201782, | |
| "learning_rate": 4.9860039988574694e-05, | |
| "loss": 0.8359, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.010282776349614395, | |
| "grad_norm": 1.0820547342300415, | |
| "learning_rate": 4.9831476720936873e-05, | |
| "loss": 0.7742, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.011996572407883462, | |
| "grad_norm": 1.0009771585464478, | |
| "learning_rate": 4.980291345329906e-05, | |
| "loss": 0.7429, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.013710368466152529, | |
| "grad_norm": 0.8651469349861145, | |
| "learning_rate": 4.977435018566124e-05, | |
| "loss": 0.7075, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.015424164524421594, | |
| "grad_norm": 1.003004789352417, | |
| "learning_rate": 4.9745786918023425e-05, | |
| "loss": 0.7025, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.01713796058269066, | |
| "grad_norm": 1.044442057609558, | |
| "learning_rate": 4.971722365038561e-05, | |
| "loss": 0.6876, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.018851756640959727, | |
| "grad_norm": 0.912819504737854, | |
| "learning_rate": 4.968866038274779e-05, | |
| "loss": 0.6582, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.02056555269922879, | |
| "grad_norm": 0.7841666340827942, | |
| "learning_rate": 4.966009711510997e-05, | |
| "loss": 0.6521, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.022279348757497857, | |
| "grad_norm": 0.8431994915008545, | |
| "learning_rate": 4.9631533847472155e-05, | |
| "loss": 0.6473, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.023993144815766924, | |
| "grad_norm": 0.854539692401886, | |
| "learning_rate": 4.9602970579834334e-05, | |
| "loss": 0.6686, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.02570694087403599, | |
| "grad_norm": 0.8027311563491821, | |
| "learning_rate": 4.957440731219652e-05, | |
| "loss": 0.6357, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.027420736932305057, | |
| "grad_norm": 0.7218838930130005, | |
| "learning_rate": 4.95458440445587e-05, | |
| "loss": 0.636, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.02913453299057412, | |
| "grad_norm": 0.8693987727165222, | |
| "learning_rate": 4.9517280776920886e-05, | |
| "loss": 0.6319, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.030848329048843187, | |
| "grad_norm": 0.7060816884040833, | |
| "learning_rate": 4.9488717509283065e-05, | |
| "loss": 0.6215, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.032562125107112254, | |
| "grad_norm": 0.8361769914627075, | |
| "learning_rate": 4.9460154241645244e-05, | |
| "loss": 0.6304, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.03427592116538132, | |
| "grad_norm": 1.2885167598724365, | |
| "learning_rate": 4.943159097400743e-05, | |
| "loss": 0.5948, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.03598971722365039, | |
| "grad_norm": 0.8330972790718079, | |
| "learning_rate": 4.940302770636961e-05, | |
| "loss": 0.5624, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.037703513281919454, | |
| "grad_norm": 0.8592369556427002, | |
| "learning_rate": 4.9374464438731795e-05, | |
| "loss": 0.6062, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.03941730934018852, | |
| "grad_norm": 0.6641691327095032, | |
| "learning_rate": 4.9345901171093975e-05, | |
| "loss": 0.5822, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.04113110539845758, | |
| "grad_norm": 0.7218849062919617, | |
| "learning_rate": 4.9317337903456154e-05, | |
| "loss": 0.5898, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.04284490145672665, | |
| "grad_norm": 0.7507635951042175, | |
| "learning_rate": 4.928877463581834e-05, | |
| "loss": 0.5637, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.044558697514995714, | |
| "grad_norm": 0.6507437229156494, | |
| "learning_rate": 4.926021136818052e-05, | |
| "loss": 0.5765, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.04627249357326478, | |
| "grad_norm": 0.6712321043014526, | |
| "learning_rate": 4.9231648100542705e-05, | |
| "loss": 0.5637, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.04798628963153385, | |
| "grad_norm": 0.8501739501953125, | |
| "learning_rate": 4.920308483290489e-05, | |
| "loss": 0.5514, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.049700085689802914, | |
| "grad_norm": 0.7565245032310486, | |
| "learning_rate": 4.917452156526707e-05, | |
| "loss": 0.5569, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.05141388174807198, | |
| "grad_norm": 0.7239087224006653, | |
| "learning_rate": 4.914595829762925e-05, | |
| "loss": 0.5468, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.05312767780634105, | |
| "grad_norm": 0.7810051441192627, | |
| "learning_rate": 4.911739502999143e-05, | |
| "loss": 0.5516, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.054841473864610114, | |
| "grad_norm": 0.9277384877204895, | |
| "learning_rate": 4.9088831762353615e-05, | |
| "loss": 0.5593, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.056555269922879174, | |
| "grad_norm": 0.7718018293380737, | |
| "learning_rate": 4.90602684947158e-05, | |
| "loss": 0.5402, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.05826906598114824, | |
| "grad_norm": 0.6351784467697144, | |
| "learning_rate": 4.903170522707798e-05, | |
| "loss": 0.5221, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.05998286203941731, | |
| "grad_norm": 0.7788419723510742, | |
| "learning_rate": 4.9003141959440166e-05, | |
| "loss": 0.5186, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.061696658097686374, | |
| "grad_norm": 0.7004103064537048, | |
| "learning_rate": 4.897457869180234e-05, | |
| "loss": 0.5543, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.06341045415595545, | |
| "grad_norm": 0.6299038529396057, | |
| "learning_rate": 4.8946015424164525e-05, | |
| "loss": 0.5385, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.06512425021422451, | |
| "grad_norm": 0.6537446975708008, | |
| "learning_rate": 4.891745215652671e-05, | |
| "loss": 0.5068, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.06683804627249357, | |
| "grad_norm": 0.7129578590393066, | |
| "learning_rate": 4.888888888888889e-05, | |
| "loss": 0.5386, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.06855184233076264, | |
| "grad_norm": 0.6344906687736511, | |
| "learning_rate": 4.8860325621251076e-05, | |
| "loss": 0.5402, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.0702656383890317, | |
| "grad_norm": 0.7094561457633972, | |
| "learning_rate": 4.8831762353613255e-05, | |
| "loss": 0.5, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.07197943444730077, | |
| "grad_norm": 0.7106287479400635, | |
| "learning_rate": 4.8803199085975434e-05, | |
| "loss": 0.5181, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.07369323050556983, | |
| "grad_norm": 0.6683750748634338, | |
| "learning_rate": 4.877463581833762e-05, | |
| "loss": 0.525, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.07540702656383891, | |
| "grad_norm": 0.6109952330589294, | |
| "learning_rate": 4.87460725506998e-05, | |
| "loss": 0.5109, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.07712082262210797, | |
| "grad_norm": 0.6429896950721741, | |
| "learning_rate": 4.8717509283061986e-05, | |
| "loss": 0.5295, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.07883461868037704, | |
| "grad_norm": 0.6576216816902161, | |
| "learning_rate": 4.868894601542417e-05, | |
| "loss": 0.5145, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.0805484147386461, | |
| "grad_norm": 0.7273991107940674, | |
| "learning_rate": 4.866038274778635e-05, | |
| "loss": 0.512, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.08226221079691516, | |
| "grad_norm": 0.652603268623352, | |
| "learning_rate": 4.863181948014853e-05, | |
| "loss": 0.521, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.08397600685518423, | |
| "grad_norm": 0.7902228832244873, | |
| "learning_rate": 4.860325621251071e-05, | |
| "loss": 0.5337, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.0856898029134533, | |
| "grad_norm": 0.6212602257728577, | |
| "learning_rate": 4.8574692944872895e-05, | |
| "loss": 0.5135, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.08740359897172237, | |
| "grad_norm": 0.6101359724998474, | |
| "learning_rate": 4.854612967723508e-05, | |
| "loss": 0.4812, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.08911739502999143, | |
| "grad_norm": 0.623332142829895, | |
| "learning_rate": 4.851756640959726e-05, | |
| "loss": 0.4979, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.0908311910882605, | |
| "grad_norm": 0.6756040453910828, | |
| "learning_rate": 4.848900314195945e-05, | |
| "loss": 0.488, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.09254498714652956, | |
| "grad_norm": 0.6245468854904175, | |
| "learning_rate": 4.8460439874321626e-05, | |
| "loss": 0.4887, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.09425878320479864, | |
| "grad_norm": 0.6777324676513672, | |
| "learning_rate": 4.8431876606683805e-05, | |
| "loss": 0.518, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.0959725792630677, | |
| "grad_norm": 0.6594008207321167, | |
| "learning_rate": 4.840331333904599e-05, | |
| "loss": 0.5027, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.09768637532133675, | |
| "grad_norm": 0.6377647519111633, | |
| "learning_rate": 4.837475007140817e-05, | |
| "loss": 0.4977, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.09940017137960583, | |
| "grad_norm": 0.637972354888916, | |
| "learning_rate": 4.8346186803770356e-05, | |
| "loss": 0.5055, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.10111396743787489, | |
| "grad_norm": 0.5835251808166504, | |
| "learning_rate": 4.8317623536132536e-05, | |
| "loss": 0.4866, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.10282776349614396, | |
| "grad_norm": 0.5735305547714233, | |
| "learning_rate": 4.8289060268494715e-05, | |
| "loss": 0.4788, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.10454155955441302, | |
| "grad_norm": 0.643041729927063, | |
| "learning_rate": 4.82604970008569e-05, | |
| "loss": 0.4675, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.1062553556126821, | |
| "grad_norm": 0.6759035587310791, | |
| "learning_rate": 4.823193373321908e-05, | |
| "loss": 0.4864, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.10796915167095116, | |
| "grad_norm": 0.6026252508163452, | |
| "learning_rate": 4.8203370465581266e-05, | |
| "loss": 0.5074, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.10968294772922023, | |
| "grad_norm": 0.6817514300346375, | |
| "learning_rate": 4.8174807197943445e-05, | |
| "loss": 0.4844, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.11139674378748929, | |
| "grad_norm": 0.5522242188453674, | |
| "learning_rate": 4.814624393030563e-05, | |
| "loss": 0.5001, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.11311053984575835, | |
| "grad_norm": 0.5717945098876953, | |
| "learning_rate": 4.811768066266781e-05, | |
| "loss": 0.4829, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.11482433590402742, | |
| "grad_norm": 0.5866938233375549, | |
| "learning_rate": 4.808911739502999e-05, | |
| "loss": 0.5096, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.11653813196229648, | |
| "grad_norm": 0.5821776390075684, | |
| "learning_rate": 4.8060554127392176e-05, | |
| "loss": 0.49, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.11825192802056556, | |
| "grad_norm": 0.6838080883026123, | |
| "learning_rate": 4.8031990859754355e-05, | |
| "loss": 0.4756, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.11996572407883462, | |
| "grad_norm": 0.5338395237922668, | |
| "learning_rate": 4.800342759211654e-05, | |
| "loss": 0.4788, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.12167952013710369, | |
| "grad_norm": 0.6005649566650391, | |
| "learning_rate": 4.797486432447873e-05, | |
| "loss": 0.476, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.12339331619537275, | |
| "grad_norm": 0.5598942637443542, | |
| "learning_rate": 4.7946301056840906e-05, | |
| "loss": 0.4721, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.12510711225364182, | |
| "grad_norm": 0.6488958597183228, | |
| "learning_rate": 4.7917737789203086e-05, | |
| "loss": 0.4608, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.1268209083119109, | |
| "grad_norm": 0.6719090342521667, | |
| "learning_rate": 4.788917452156527e-05, | |
| "loss": 0.4972, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.12853470437017994, | |
| "grad_norm": 0.722967803478241, | |
| "learning_rate": 4.786061125392745e-05, | |
| "loss": 0.4663, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.13024850042844902, | |
| "grad_norm": 0.6082452535629272, | |
| "learning_rate": 4.783204798628964e-05, | |
| "loss": 0.4922, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.1319622964867181, | |
| "grad_norm": 0.5927706956863403, | |
| "learning_rate": 4.7803484718651816e-05, | |
| "loss": 0.4894, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.13367609254498714, | |
| "grad_norm": 0.5587890148162842, | |
| "learning_rate": 4.7774921451013995e-05, | |
| "loss": 0.4895, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.1353898886032562, | |
| "grad_norm": 0.6011940240859985, | |
| "learning_rate": 4.774635818337618e-05, | |
| "loss": 0.4714, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.13710368466152528, | |
| "grad_norm": 0.6309077143669128, | |
| "learning_rate": 4.771779491573836e-05, | |
| "loss": 0.4896, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.13881748071979436, | |
| "grad_norm": 0.5483425855636597, | |
| "learning_rate": 4.7689231648100547e-05, | |
| "loss": 0.4906, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.1405312767780634, | |
| "grad_norm": 0.6498242020606995, | |
| "learning_rate": 4.7660668380462726e-05, | |
| "loss": 0.4845, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.14224507283633248, | |
| "grad_norm": 0.6233829259872437, | |
| "learning_rate": 4.763210511282491e-05, | |
| "loss": 0.4587, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.14395886889460155, | |
| "grad_norm": 0.586863100528717, | |
| "learning_rate": 4.760354184518709e-05, | |
| "loss": 0.4577, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.1456726649528706, | |
| "grad_norm": 0.5762030482292175, | |
| "learning_rate": 4.757497857754927e-05, | |
| "loss": 0.4692, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.14738646101113967, | |
| "grad_norm": 0.5599485635757446, | |
| "learning_rate": 4.7546415309911456e-05, | |
| "loss": 0.4843, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.14910025706940874, | |
| "grad_norm": 0.5596937537193298, | |
| "learning_rate": 4.7517852042273636e-05, | |
| "loss": 0.4819, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.15081405312767782, | |
| "grad_norm": 0.6436209678649902, | |
| "learning_rate": 4.748928877463582e-05, | |
| "loss": 0.4549, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.15252784918594686, | |
| "grad_norm": 0.6014467477798462, | |
| "learning_rate": 4.746072550699801e-05, | |
| "loss": 0.4606, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.15424164524421594, | |
| "grad_norm": 0.6054000854492188, | |
| "learning_rate": 4.743216223936019e-05, | |
| "loss": 0.4556, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.155955441302485, | |
| "grad_norm": 0.5195685029029846, | |
| "learning_rate": 4.7403598971722366e-05, | |
| "loss": 0.4582, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.15766923736075408, | |
| "grad_norm": 0.6742907762527466, | |
| "learning_rate": 4.7375035704084545e-05, | |
| "loss": 0.4666, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.15938303341902313, | |
| "grad_norm": 0.8041543960571289, | |
| "learning_rate": 4.734647243644673e-05, | |
| "loss": 0.4533, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.1610968294772922, | |
| "grad_norm": 0.5654326677322388, | |
| "learning_rate": 4.731790916880892e-05, | |
| "loss": 0.4534, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.16281062553556128, | |
| "grad_norm": 0.5578976273536682, | |
| "learning_rate": 4.7289345901171097e-05, | |
| "loss": 0.4593, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.16452442159383032, | |
| "grad_norm": 0.5508532524108887, | |
| "learning_rate": 4.726078263353328e-05, | |
| "loss": 0.4528, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.1662382176520994, | |
| "grad_norm": 0.5675921440124512, | |
| "learning_rate": 4.7232219365895455e-05, | |
| "loss": 0.4556, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.16795201371036847, | |
| "grad_norm": 0.5847021341323853, | |
| "learning_rate": 4.720365609825764e-05, | |
| "loss": 0.4573, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.16966580976863754, | |
| "grad_norm": 0.6159939765930176, | |
| "learning_rate": 4.717509283061983e-05, | |
| "loss": 0.4497, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.1713796058269066, | |
| "grad_norm": 0.5292489528656006, | |
| "learning_rate": 4.7146529562982006e-05, | |
| "loss": 0.466, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.17309340188517566, | |
| "grad_norm": 0.5373420715332031, | |
| "learning_rate": 4.711796629534419e-05, | |
| "loss": 0.4543, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.17480719794344474, | |
| "grad_norm": 0.5367053747177124, | |
| "learning_rate": 4.708940302770637e-05, | |
| "loss": 0.4555, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.17652099400171378, | |
| "grad_norm": 0.5302269458770752, | |
| "learning_rate": 4.706083976006855e-05, | |
| "loss": 0.4526, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.17823479005998286, | |
| "grad_norm": 0.5884714126586914, | |
| "learning_rate": 4.703227649243074e-05, | |
| "loss": 0.4533, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.17994858611825193, | |
| "grad_norm": 0.5478530526161194, | |
| "learning_rate": 4.7003713224792916e-05, | |
| "loss": 0.454, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.181662382176521, | |
| "grad_norm": 0.5300725698471069, | |
| "learning_rate": 4.69751499571551e-05, | |
| "loss": 0.4413, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.18337617823479005, | |
| "grad_norm": 0.5762424468994141, | |
| "learning_rate": 4.694658668951729e-05, | |
| "loss": 0.472, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.18508997429305912, | |
| "grad_norm": 0.5916193127632141, | |
| "learning_rate": 4.691802342187947e-05, | |
| "loss": 0.4465, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.1868037703513282, | |
| "grad_norm": 0.5725514888763428, | |
| "learning_rate": 4.6889460154241646e-05, | |
| "loss": 0.4694, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.18851756640959727, | |
| "grad_norm": 0.6294386386871338, | |
| "learning_rate": 4.6860896886603826e-05, | |
| "loss": 0.4513, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.19023136246786632, | |
| "grad_norm": 0.5739086270332336, | |
| "learning_rate": 4.683233361896601e-05, | |
| "loss": 0.4769, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.1919451585261354, | |
| "grad_norm": 0.5288265347480774, | |
| "learning_rate": 4.68037703513282e-05, | |
| "loss": 0.4506, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.19365895458440446, | |
| "grad_norm": 0.5287862420082092, | |
| "learning_rate": 4.677520708369038e-05, | |
| "loss": 0.436, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.1953727506426735, | |
| "grad_norm": 0.6703059077262878, | |
| "learning_rate": 4.674664381605256e-05, | |
| "loss": 0.447, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.19708654670094258, | |
| "grad_norm": 0.5269696116447449, | |
| "learning_rate": 4.6718080548414735e-05, | |
| "loss": 0.4742, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.19880034275921166, | |
| "grad_norm": 0.5767235159873962, | |
| "learning_rate": 4.668951728077692e-05, | |
| "loss": 0.4422, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.20051413881748073, | |
| "grad_norm": 0.5331224799156189, | |
| "learning_rate": 4.666095401313911e-05, | |
| "loss": 0.4441, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.20222793487574978, | |
| "grad_norm": 0.6412308216094971, | |
| "learning_rate": 4.663239074550129e-05, | |
| "loss": 0.4354, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.20394173093401885, | |
| "grad_norm": 0.519543468952179, | |
| "learning_rate": 4.660382747786347e-05, | |
| "loss": 0.4587, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.20565552699228792, | |
| "grad_norm": 0.5464423298835754, | |
| "learning_rate": 4.657526421022565e-05, | |
| "loss": 0.445, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.207369323050557, | |
| "grad_norm": 0.5126811861991882, | |
| "learning_rate": 4.654670094258783e-05, | |
| "loss": 0.4336, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.20908311910882604, | |
| "grad_norm": 0.49116766452789307, | |
| "learning_rate": 4.651813767495002e-05, | |
| "loss": 0.4542, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.21079691516709512, | |
| "grad_norm": 0.562121570110321, | |
| "learning_rate": 4.6489574407312196e-05, | |
| "loss": 0.447, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.2125107112253642, | |
| "grad_norm": 0.5438272356987, | |
| "learning_rate": 4.646101113967438e-05, | |
| "loss": 0.4473, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.21422450728363324, | |
| "grad_norm": 0.5589945316314697, | |
| "learning_rate": 4.643244787203656e-05, | |
| "loss": 0.4365, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.2159383033419023, | |
| "grad_norm": 0.49309206008911133, | |
| "learning_rate": 4.640388460439875e-05, | |
| "loss": 0.4549, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.21765209940017138, | |
| "grad_norm": 0.5735007524490356, | |
| "learning_rate": 4.637532133676093e-05, | |
| "loss": 0.434, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.21936589545844046, | |
| "grad_norm": 0.5194084048271179, | |
| "learning_rate": 4.6346758069123106e-05, | |
| "loss": 0.4324, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.2210796915167095, | |
| "grad_norm": 0.5877565741539001, | |
| "learning_rate": 4.631819480148529e-05, | |
| "loss": 0.4328, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.22279348757497858, | |
| "grad_norm": 0.6073413491249084, | |
| "learning_rate": 4.628963153384747e-05, | |
| "loss": 0.4413, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.22450728363324765, | |
| "grad_norm": 0.5142035484313965, | |
| "learning_rate": 4.626106826620966e-05, | |
| "loss": 0.4277, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.2262210796915167, | |
| "grad_norm": 0.6134445071220398, | |
| "learning_rate": 4.6232504998571843e-05, | |
| "loss": 0.4399, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.22793487574978577, | |
| "grad_norm": 0.5537267327308655, | |
| "learning_rate": 4.620394173093402e-05, | |
| "loss": 0.4227, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.22964867180805484, | |
| "grad_norm": 0.5451083779335022, | |
| "learning_rate": 4.61753784632962e-05, | |
| "loss": 0.4462, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.23136246786632392, | |
| "grad_norm": 0.5471403002738953, | |
| "learning_rate": 4.614681519565839e-05, | |
| "loss": 0.425, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.23307626392459296, | |
| "grad_norm": 0.5239073634147644, | |
| "learning_rate": 4.611825192802057e-05, | |
| "loss": 0.4396, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.23479005998286204, | |
| "grad_norm": 0.5054474472999573, | |
| "learning_rate": 4.608968866038275e-05, | |
| "loss": 0.4185, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.2365038560411311, | |
| "grad_norm": 0.5151476860046387, | |
| "learning_rate": 4.606112539274493e-05, | |
| "loss": 0.4284, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.23821765209940018, | |
| "grad_norm": 0.5287209749221802, | |
| "learning_rate": 4.603256212510711e-05, | |
| "loss": 0.4349, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.23993144815766923, | |
| "grad_norm": 0.5235722661018372, | |
| "learning_rate": 4.60039988574693e-05, | |
| "loss": 0.4283, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.2416452442159383, | |
| "grad_norm": 0.5603968501091003, | |
| "learning_rate": 4.597543558983148e-05, | |
| "loss": 0.4428, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.24335904027420738, | |
| "grad_norm": 0.54140704870224, | |
| "learning_rate": 4.594687232219366e-05, | |
| "loss": 0.4289, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.24507283633247642, | |
| "grad_norm": 0.5573848485946655, | |
| "learning_rate": 4.591830905455584e-05, | |
| "loss": 0.4377, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.2467866323907455, | |
| "grad_norm": 0.5520525574684143, | |
| "learning_rate": 4.588974578691803e-05, | |
| "loss": 0.4365, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.24850042844901457, | |
| "grad_norm": 0.6135875582695007, | |
| "learning_rate": 4.586118251928021e-05, | |
| "loss": 0.4573, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.25021422450728364, | |
| "grad_norm": 0.5709384679794312, | |
| "learning_rate": 4.583261925164239e-05, | |
| "loss": 0.4293, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.2519280205655527, | |
| "grad_norm": 0.49724119901657104, | |
| "learning_rate": 4.580405598400457e-05, | |
| "loss": 0.4393, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.2536418166238218, | |
| "grad_norm": 0.5525110363960266, | |
| "learning_rate": 4.577549271636675e-05, | |
| "loss": 0.4281, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.25535561268209084, | |
| "grad_norm": 0.5513701438903809, | |
| "learning_rate": 4.574692944872894e-05, | |
| "loss": 0.4395, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.2570694087403599, | |
| "grad_norm": 0.4892454743385315, | |
| "learning_rate": 4.5718366181091124e-05, | |
| "loss": 0.4315, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.258783204798629, | |
| "grad_norm": 0.5723627209663391, | |
| "learning_rate": 4.56898029134533e-05, | |
| "loss": 0.4402, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.26049700085689803, | |
| "grad_norm": 0.5150974988937378, | |
| "learning_rate": 4.566123964581548e-05, | |
| "loss": 0.4367, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.2622107969151671, | |
| "grad_norm": 0.5223197340965271, | |
| "learning_rate": 4.563267637817766e-05, | |
| "loss": 0.4217, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.2639245929734362, | |
| "grad_norm": 0.4551326036453247, | |
| "learning_rate": 4.560411311053985e-05, | |
| "loss": 0.4222, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.2656383890317052, | |
| "grad_norm": 0.5828386545181274, | |
| "learning_rate": 4.5575549842902034e-05, | |
| "loss": 0.4416, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.26735218508997427, | |
| "grad_norm": 0.5964780449867249, | |
| "learning_rate": 4.554698657526421e-05, | |
| "loss": 0.4306, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.26906598114824337, | |
| "grad_norm": 0.5186784267425537, | |
| "learning_rate": 4.55184233076264e-05, | |
| "loss": 0.4263, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.2707797772065124, | |
| "grad_norm": 0.49257805943489075, | |
| "learning_rate": 4.548986003998857e-05, | |
| "loss": 0.4066, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.27249357326478146, | |
| "grad_norm": 0.5377402901649475, | |
| "learning_rate": 4.546129677235076e-05, | |
| "loss": 0.4257, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.27420736932305056, | |
| "grad_norm": 0.4789324104785919, | |
| "learning_rate": 4.543273350471294e-05, | |
| "loss": 0.444, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.2759211653813196, | |
| "grad_norm": 0.5270596742630005, | |
| "learning_rate": 4.540417023707512e-05, | |
| "loss": 0.4391, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.2776349614395887, | |
| "grad_norm": 0.5666368007659912, | |
| "learning_rate": 4.537560696943731e-05, | |
| "loss": 0.4238, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.27934875749785776, | |
| "grad_norm": 0.5089669823646545, | |
| "learning_rate": 4.534704370179949e-05, | |
| "loss": 0.4322, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.2810625535561268, | |
| "grad_norm": 0.5666882991790771, | |
| "learning_rate": 4.531848043416167e-05, | |
| "loss": 0.4238, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.2827763496143959, | |
| "grad_norm": 0.48932990431785583, | |
| "learning_rate": 4.528991716652385e-05, | |
| "loss": 0.4275, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.28449014567266495, | |
| "grad_norm": 0.5295417904853821, | |
| "learning_rate": 4.526135389888603e-05, | |
| "loss": 0.4321, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.286203941730934, | |
| "grad_norm": 0.5406290888786316, | |
| "learning_rate": 4.523279063124822e-05, | |
| "loss": 0.4351, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.2879177377892031, | |
| "grad_norm": 0.515723466873169, | |
| "learning_rate": 4.5204227363610404e-05, | |
| "loss": 0.4359, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.28963153384747214, | |
| "grad_norm": 0.6831620335578918, | |
| "learning_rate": 4.5175664095972584e-05, | |
| "loss": 0.4173, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.2913453299057412, | |
| "grad_norm": 0.4642072319984436, | |
| "learning_rate": 4.514710082833476e-05, | |
| "loss": 0.4148, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.2930591259640103, | |
| "grad_norm": 0.5784996747970581, | |
| "learning_rate": 4.511853756069694e-05, | |
| "loss": 0.4344, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.29477292202227934, | |
| "grad_norm": 0.46822839975357056, | |
| "learning_rate": 4.508997429305913e-05, | |
| "loss": 0.4192, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.29648671808054844, | |
| "grad_norm": 0.5901206731796265, | |
| "learning_rate": 4.5061411025421314e-05, | |
| "loss": 0.4224, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.2982005141388175, | |
| "grad_norm": 0.5184633135795593, | |
| "learning_rate": 4.503284775778349e-05, | |
| "loss": 0.4459, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.29991431019708653, | |
| "grad_norm": 0.47953465580940247, | |
| "learning_rate": 4.500428449014568e-05, | |
| "loss": 0.4075, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.30162810625535563, | |
| "grad_norm": 0.508063554763794, | |
| "learning_rate": 4.497572122250785e-05, | |
| "loss": 0.4201, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.3033419023136247, | |
| "grad_norm": 0.5239994525909424, | |
| "learning_rate": 4.494715795487004e-05, | |
| "loss": 0.4097, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.3050556983718937, | |
| "grad_norm": 0.47601497173309326, | |
| "learning_rate": 4.4918594687232224e-05, | |
| "loss": 0.4176, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.3067694944301628, | |
| "grad_norm": 0.5476956367492676, | |
| "learning_rate": 4.48900314195944e-05, | |
| "loss": 0.4325, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.30848329048843187, | |
| "grad_norm": 0.5456777215003967, | |
| "learning_rate": 4.486146815195659e-05, | |
| "loss": 0.4202, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.3101970865467009, | |
| "grad_norm": 0.5144990086555481, | |
| "learning_rate": 4.483290488431877e-05, | |
| "loss": 0.4349, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.31191088260497, | |
| "grad_norm": 0.483378142118454, | |
| "learning_rate": 4.480434161668095e-05, | |
| "loss": 0.4414, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.31362467866323906, | |
| "grad_norm": 0.542691707611084, | |
| "learning_rate": 4.4775778349043134e-05, | |
| "loss": 0.4298, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.31533847472150817, | |
| "grad_norm": 0.49622848629951477, | |
| "learning_rate": 4.474721508140531e-05, | |
| "loss": 0.4151, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.3170522707797772, | |
| "grad_norm": 0.48325487971305847, | |
| "learning_rate": 4.47186518137675e-05, | |
| "loss": 0.4111, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.31876606683804626, | |
| "grad_norm": 0.47797560691833496, | |
| "learning_rate": 4.469008854612968e-05, | |
| "loss": 0.4374, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.32047986289631536, | |
| "grad_norm": 0.48413246870040894, | |
| "learning_rate": 4.4661525278491864e-05, | |
| "loss": 0.4144, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.3221936589545844, | |
| "grad_norm": 0.48628735542297363, | |
| "learning_rate": 4.463296201085404e-05, | |
| "loss": 0.4199, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.32390745501285345, | |
| "grad_norm": 0.5147850513458252, | |
| "learning_rate": 4.460439874321622e-05, | |
| "loss": 0.4185, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.32562125107112255, | |
| "grad_norm": 0.5466386079788208, | |
| "learning_rate": 4.457583547557841e-05, | |
| "loss": 0.4086, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.3273350471293916, | |
| "grad_norm": 0.5725340247154236, | |
| "learning_rate": 4.454727220794059e-05, | |
| "loss": 0.4036, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.32904884318766064, | |
| "grad_norm": 0.5826147794723511, | |
| "learning_rate": 4.4518708940302774e-05, | |
| "loss": 0.4045, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.33076263924592975, | |
| "grad_norm": 0.5249732136726379, | |
| "learning_rate": 4.449014567266496e-05, | |
| "loss": 0.4046, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.3324764353041988, | |
| "grad_norm": 0.49851375818252563, | |
| "learning_rate": 4.446158240502714e-05, | |
| "loss": 0.4303, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.3341902313624679, | |
| "grad_norm": 0.49083590507507324, | |
| "learning_rate": 4.443301913738932e-05, | |
| "loss": 0.4139, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.33590402742073694, | |
| "grad_norm": 0.4749123454093933, | |
| "learning_rate": 4.44044558697515e-05, | |
| "loss": 0.4178, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.337617823479006, | |
| "grad_norm": 0.474155455827713, | |
| "learning_rate": 4.4375892602113684e-05, | |
| "loss": 0.4198, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.3393316195372751, | |
| "grad_norm": 0.48373639583587646, | |
| "learning_rate": 4.434732933447587e-05, | |
| "loss": 0.4289, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.34104541559554413, | |
| "grad_norm": 0.4969842731952667, | |
| "learning_rate": 4.431876606683805e-05, | |
| "loss": 0.4071, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.3427592116538132, | |
| "grad_norm": 0.4806075990200043, | |
| "learning_rate": 4.429020279920023e-05, | |
| "loss": 0.4005, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.3444730077120823, | |
| "grad_norm": 0.5082671642303467, | |
| "learning_rate": 4.4261639531562414e-05, | |
| "loss": 0.4015, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.3461868037703513, | |
| "grad_norm": 0.530406653881073, | |
| "learning_rate": 4.423307626392459e-05, | |
| "loss": 0.4169, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 0.34790059982862037, | |
| "grad_norm": 0.4801930785179138, | |
| "learning_rate": 4.420451299628678e-05, | |
| "loss": 0.4212, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 0.3496143958868895, | |
| "grad_norm": 0.4946390688419342, | |
| "learning_rate": 4.417594972864896e-05, | |
| "loss": 0.4171, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.3513281919451585, | |
| "grad_norm": 0.4361880123615265, | |
| "learning_rate": 4.4147386461011145e-05, | |
| "loss": 0.4207, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.35304198800342756, | |
| "grad_norm": 0.48112979531288147, | |
| "learning_rate": 4.4118823193373324e-05, | |
| "loss": 0.4189, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 0.35475578406169667, | |
| "grad_norm": 0.49672552943229675, | |
| "learning_rate": 4.40902599257355e-05, | |
| "loss": 0.4298, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 0.3564695801199657, | |
| "grad_norm": 0.4827049970626831, | |
| "learning_rate": 4.406169665809769e-05, | |
| "loss": 0.4138, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 0.3581833761782348, | |
| "grad_norm": 0.49597135186195374, | |
| "learning_rate": 4.403313339045987e-05, | |
| "loss": 0.4109, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 0.35989717223650386, | |
| "grad_norm": 0.5000253915786743, | |
| "learning_rate": 4.4004570122822054e-05, | |
| "loss": 0.4219, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.3616109682947729, | |
| "grad_norm": 0.5186734795570374, | |
| "learning_rate": 4.397600685518424e-05, | |
| "loss": 0.4304, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 0.363324764353042, | |
| "grad_norm": 0.5129287242889404, | |
| "learning_rate": 4.394744358754642e-05, | |
| "loss": 0.416, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 0.36503856041131105, | |
| "grad_norm": 0.5247070789337158, | |
| "learning_rate": 4.39188803199086e-05, | |
| "loss": 0.414, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 0.3667523564695801, | |
| "grad_norm": 0.4930817186832428, | |
| "learning_rate": 4.389031705227078e-05, | |
| "loss": 0.3926, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 0.3684661525278492, | |
| "grad_norm": 0.49000611901283264, | |
| "learning_rate": 4.3861753784632964e-05, | |
| "loss": 0.4318, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.37017994858611825, | |
| "grad_norm": 0.4887852966785431, | |
| "learning_rate": 4.383319051699515e-05, | |
| "loss": 0.4099, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 0.3718937446443873, | |
| "grad_norm": 0.49520814418792725, | |
| "learning_rate": 4.380462724935733e-05, | |
| "loss": 0.4026, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 0.3736075407026564, | |
| "grad_norm": 0.5411486029624939, | |
| "learning_rate": 4.377606398171951e-05, | |
| "loss": 0.4369, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 0.37532133676092544, | |
| "grad_norm": 0.5404260158538818, | |
| "learning_rate": 4.374750071408169e-05, | |
| "loss": 0.4014, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 0.37703513281919454, | |
| "grad_norm": 0.4977183938026428, | |
| "learning_rate": 4.3718937446443874e-05, | |
| "loss": 0.4055, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.3787489288774636, | |
| "grad_norm": 0.5122350454330444, | |
| "learning_rate": 4.369037417880606e-05, | |
| "loss": 0.414, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 0.38046272493573263, | |
| "grad_norm": 0.46838364005088806, | |
| "learning_rate": 4.366181091116824e-05, | |
| "loss": 0.3943, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 0.38217652099400173, | |
| "grad_norm": 0.5282347798347473, | |
| "learning_rate": 4.3633247643530425e-05, | |
| "loss": 0.4083, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 0.3838903170522708, | |
| "grad_norm": 0.5209845304489136, | |
| "learning_rate": 4.3604684375892604e-05, | |
| "loss": 0.4067, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 0.3856041131105398, | |
| "grad_norm": 0.47322601079940796, | |
| "learning_rate": 4.3576121108254783e-05, | |
| "loss": 0.3973, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.3873179091688089, | |
| "grad_norm": 0.48601487278938293, | |
| "learning_rate": 4.354755784061697e-05, | |
| "loss": 0.3997, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 0.389031705227078, | |
| "grad_norm": 0.5330342054367065, | |
| "learning_rate": 4.351899457297915e-05, | |
| "loss": 0.4055, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 0.390745501285347, | |
| "grad_norm": 0.5453196167945862, | |
| "learning_rate": 4.3490431305341335e-05, | |
| "loss": 0.4296, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 0.3924592973436161, | |
| "grad_norm": 0.4657805860042572, | |
| "learning_rate": 4.346186803770352e-05, | |
| "loss": 0.4034, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 0.39417309340188517, | |
| "grad_norm": 0.5053611993789673, | |
| "learning_rate": 4.34333047700657e-05, | |
| "loss": 0.3915, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.39588688946015427, | |
| "grad_norm": 0.46003392338752747, | |
| "learning_rate": 4.340474150242788e-05, | |
| "loss": 0.4058, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 0.3976006855184233, | |
| "grad_norm": 0.5221563577651978, | |
| "learning_rate": 4.337617823479006e-05, | |
| "loss": 0.3929, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 0.39931448157669236, | |
| "grad_norm": 0.5037152767181396, | |
| "learning_rate": 4.3347614967152244e-05, | |
| "loss": 0.407, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 0.40102827763496146, | |
| "grad_norm": 0.4151383340358734, | |
| "learning_rate": 4.331905169951443e-05, | |
| "loss": 0.4127, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 0.4027420736932305, | |
| "grad_norm": 0.45278656482696533, | |
| "learning_rate": 4.329048843187661e-05, | |
| "loss": 0.3931, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.40445586975149955, | |
| "grad_norm": 0.4748023748397827, | |
| "learning_rate": 4.3261925164238796e-05, | |
| "loss": 0.4079, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 0.40616966580976865, | |
| "grad_norm": 0.4828534424304962, | |
| "learning_rate": 4.323336189660097e-05, | |
| "loss": 0.4322, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 0.4078834618680377, | |
| "grad_norm": 0.4558246433734894, | |
| "learning_rate": 4.3204798628963154e-05, | |
| "loss": 0.4125, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 0.40959725792630675, | |
| "grad_norm": 0.4439447522163391, | |
| "learning_rate": 4.317623536132534e-05, | |
| "loss": 0.4051, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 0.41131105398457585, | |
| "grad_norm": 0.5642324090003967, | |
| "learning_rate": 4.314767209368752e-05, | |
| "loss": 0.4008, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.4130248500428449, | |
| "grad_norm": 0.4604392945766449, | |
| "learning_rate": 4.3119108826049705e-05, | |
| "loss": 0.4047, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 0.414738646101114, | |
| "grad_norm": 0.45364564657211304, | |
| "learning_rate": 4.3090545558411885e-05, | |
| "loss": 0.3789, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 0.41645244215938304, | |
| "grad_norm": 0.47339892387390137, | |
| "learning_rate": 4.3061982290774064e-05, | |
| "loss": 0.3985, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 0.4181662382176521, | |
| "grad_norm": 0.4818185269832611, | |
| "learning_rate": 4.303341902313625e-05, | |
| "loss": 0.4123, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 0.4198800342759212, | |
| "grad_norm": 0.4778873324394226, | |
| "learning_rate": 4.300485575549843e-05, | |
| "loss": 0.3915, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 0.42159383033419023, | |
| "grad_norm": 0.4846336841583252, | |
| "learning_rate": 4.2976292487860615e-05, | |
| "loss": 0.42, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 0.4233076263924593, | |
| "grad_norm": 0.5048242211341858, | |
| "learning_rate": 4.2947729220222794e-05, | |
| "loss": 0.3926, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 0.4250214224507284, | |
| "grad_norm": 0.5179741382598877, | |
| "learning_rate": 4.291916595258498e-05, | |
| "loss": 0.4047, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 0.4267352185089974, | |
| "grad_norm": 0.4281986653804779, | |
| "learning_rate": 4.289060268494716e-05, | |
| "loss": 0.3953, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 0.4284490145672665, | |
| "grad_norm": 0.47407853603363037, | |
| "learning_rate": 4.286203941730934e-05, | |
| "loss": 0.4099, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.4301628106255356, | |
| "grad_norm": 0.4928293228149414, | |
| "learning_rate": 4.2833476149671525e-05, | |
| "loss": 0.4039, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 0.4318766066838046, | |
| "grad_norm": 0.45097365975379944, | |
| "learning_rate": 4.2804912882033704e-05, | |
| "loss": 0.401, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 0.43359040274207367, | |
| "grad_norm": 0.5395087003707886, | |
| "learning_rate": 4.277634961439589e-05, | |
| "loss": 0.4043, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 0.43530419880034277, | |
| "grad_norm": 0.48334208130836487, | |
| "learning_rate": 4.2747786346758076e-05, | |
| "loss": 0.4065, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 0.4370179948586118, | |
| "grad_norm": 0.4694972336292267, | |
| "learning_rate": 4.271922307912025e-05, | |
| "loss": 0.4004, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 0.4387317909168809, | |
| "grad_norm": 0.45093536376953125, | |
| "learning_rate": 4.2690659811482435e-05, | |
| "loss": 0.3972, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 0.44044558697514996, | |
| "grad_norm": 0.606305718421936, | |
| "learning_rate": 4.2662096543844614e-05, | |
| "loss": 0.4081, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 0.442159383033419, | |
| "grad_norm": 0.5000024437904358, | |
| "learning_rate": 4.26335332762068e-05, | |
| "loss": 0.4007, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 0.4438731790916881, | |
| "grad_norm": 0.5056565403938293, | |
| "learning_rate": 4.2604970008568986e-05, | |
| "loss": 0.4004, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 0.44558697514995715, | |
| "grad_norm": 0.5103461146354675, | |
| "learning_rate": 4.2576406740931165e-05, | |
| "loss": 0.4072, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.4473007712082262, | |
| "grad_norm": 0.5315241813659668, | |
| "learning_rate": 4.2547843473293344e-05, | |
| "loss": 0.4081, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 0.4490145672664953, | |
| "grad_norm": 0.51112961769104, | |
| "learning_rate": 4.251928020565553e-05, | |
| "loss": 0.3999, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 0.45072836332476435, | |
| "grad_norm": 0.5215428471565247, | |
| "learning_rate": 4.249071693801771e-05, | |
| "loss": 0.4108, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 0.4524421593830334, | |
| "grad_norm": 0.43863195180892944, | |
| "learning_rate": 4.2462153670379896e-05, | |
| "loss": 0.3979, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 0.4541559554413025, | |
| "grad_norm": 0.4583381116390228, | |
| "learning_rate": 4.2433590402742075e-05, | |
| "loss": 0.4118, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 0.45586975149957154, | |
| "grad_norm": 0.4846024811267853, | |
| "learning_rate": 4.240502713510426e-05, | |
| "loss": 0.4165, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 0.45758354755784064, | |
| "grad_norm": 0.5279461741447449, | |
| "learning_rate": 4.237646386746644e-05, | |
| "loss": 0.4105, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 0.4592973436161097, | |
| "grad_norm": 0.4175077974796295, | |
| "learning_rate": 4.234790059982862e-05, | |
| "loss": 0.3939, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 0.46101113967437873, | |
| "grad_norm": 0.5505842566490173, | |
| "learning_rate": 4.2319337332190805e-05, | |
| "loss": 0.4048, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 0.46272493573264784, | |
| "grad_norm": 0.4925336539745331, | |
| "learning_rate": 4.2290774064552985e-05, | |
| "loss": 0.4104, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.4644387317909169, | |
| "grad_norm": 0.5095216035842896, | |
| "learning_rate": 4.226221079691517e-05, | |
| "loss": 0.4181, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 0.4661525278491859, | |
| "grad_norm": 0.5652039647102356, | |
| "learning_rate": 4.223364752927736e-05, | |
| "loss": 0.3889, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 0.46786632390745503, | |
| "grad_norm": 0.49469706416130066, | |
| "learning_rate": 4.2205084261639536e-05, | |
| "loss": 0.4209, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 0.4695801199657241, | |
| "grad_norm": 0.42379167675971985, | |
| "learning_rate": 4.2176520994001715e-05, | |
| "loss": 0.3982, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 0.4712939160239931, | |
| "grad_norm": 0.5219408273696899, | |
| "learning_rate": 4.2147957726363894e-05, | |
| "loss": 0.3772, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 0.4730077120822622, | |
| "grad_norm": 0.4595051407814026, | |
| "learning_rate": 4.211939445872608e-05, | |
| "loss": 0.3932, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 0.47472150814053127, | |
| "grad_norm": 0.4492412805557251, | |
| "learning_rate": 4.2090831191088266e-05, | |
| "loss": 0.408, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 0.47643530419880037, | |
| "grad_norm": 0.5153499841690063, | |
| "learning_rate": 4.2062267923450446e-05, | |
| "loss": 0.3902, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 0.4781491002570694, | |
| "grad_norm": 0.49975866079330444, | |
| "learning_rate": 4.2033704655812625e-05, | |
| "loss": 0.3845, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 0.47986289631533846, | |
| "grad_norm": 0.4870286285877228, | |
| "learning_rate": 4.2005141388174804e-05, | |
| "loss": 0.4034, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 0.48157669237360756, | |
| "grad_norm": 0.4432912766933441, | |
| "learning_rate": 4.197657812053699e-05, | |
| "loss": 0.3981, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 0.4832904884318766, | |
| "grad_norm": 0.3999868929386139, | |
| "learning_rate": 4.1948014852899176e-05, | |
| "loss": 0.374, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 0.48500428449014565, | |
| "grad_norm": 0.5054958462715149, | |
| "learning_rate": 4.1919451585261355e-05, | |
| "loss": 0.4125, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 0.48671808054841476, | |
| "grad_norm": 0.4698047935962677, | |
| "learning_rate": 4.189088831762354e-05, | |
| "loss": 0.3797, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 0.4884318766066838, | |
| "grad_norm": 0.45830556750297546, | |
| "learning_rate": 4.186232504998572e-05, | |
| "loss": 0.4044, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 0.49014567266495285, | |
| "grad_norm": 0.509774386882782, | |
| "learning_rate": 4.18337617823479e-05, | |
| "loss": 0.3836, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 0.49185946872322195, | |
| "grad_norm": 0.49995678663253784, | |
| "learning_rate": 4.1805198514710086e-05, | |
| "loss": 0.3799, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 0.493573264781491, | |
| "grad_norm": 0.49276742339134216, | |
| "learning_rate": 4.1776635247072265e-05, | |
| "loss": 0.4116, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 0.4952870608397601, | |
| "grad_norm": 0.4521953761577606, | |
| "learning_rate": 4.174807197943445e-05, | |
| "loss": 0.3889, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 0.49700085689802914, | |
| "grad_norm": 0.5152191519737244, | |
| "learning_rate": 4.171950871179663e-05, | |
| "loss": 0.3982, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 0.4987146529562982, | |
| "grad_norm": 0.4679267406463623, | |
| "learning_rate": 4.1690945444158816e-05, | |
| "loss": 0.3947, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 0.5004284490145673, | |
| "grad_norm": 0.45497140288352966, | |
| "learning_rate": 4.1662382176520996e-05, | |
| "loss": 0.3759, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 0.5021422450728363, | |
| "grad_norm": 0.5409018993377686, | |
| "learning_rate": 4.1633818908883175e-05, | |
| "loss": 0.4003, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 0.5038560411311054, | |
| "grad_norm": 0.4464910328388214, | |
| "learning_rate": 4.160525564124536e-05, | |
| "loss": 0.3915, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 0.5055698371893744, | |
| "grad_norm": 0.5109448432922363, | |
| "learning_rate": 4.157669237360755e-05, | |
| "loss": 0.3709, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 0.5072836332476436, | |
| "grad_norm": 0.4784081280231476, | |
| "learning_rate": 4.1548129105969726e-05, | |
| "loss": 0.3896, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 0.5089974293059126, | |
| "grad_norm": 0.4331073462963104, | |
| "learning_rate": 4.1519565838331905e-05, | |
| "loss": 0.3739, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 0.5107112253641817, | |
| "grad_norm": 0.43807411193847656, | |
| "learning_rate": 4.1491002570694085e-05, | |
| "loss": 0.3891, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 0.5124250214224507, | |
| "grad_norm": 0.4344954788684845, | |
| "learning_rate": 4.146243930305627e-05, | |
| "loss": 0.4073, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 0.5141388174807198, | |
| "grad_norm": 0.45541128516197205, | |
| "learning_rate": 4.1433876035418457e-05, | |
| "loss": 0.4017, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.5158526135389888, | |
| "grad_norm": 0.4948163330554962, | |
| "learning_rate": 4.1405312767780636e-05, | |
| "loss": 0.3836, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 0.517566409597258, | |
| "grad_norm": 0.5308042764663696, | |
| "learning_rate": 4.137674950014282e-05, | |
| "loss": 0.3793, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 0.519280205655527, | |
| "grad_norm": 0.46851393580436707, | |
| "learning_rate": 4.1348186232505e-05, | |
| "loss": 0.3849, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 0.5209940017137961, | |
| "grad_norm": 0.482305645942688, | |
| "learning_rate": 4.131962296486718e-05, | |
| "loss": 0.3998, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 0.5227077977720651, | |
| "grad_norm": 0.41640397906303406, | |
| "learning_rate": 4.1291059697229366e-05, | |
| "loss": 0.3817, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 0.5244215938303342, | |
| "grad_norm": 0.46141761541366577, | |
| "learning_rate": 4.1262496429591546e-05, | |
| "loss": 0.3731, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 0.5261353898886033, | |
| "grad_norm": 0.46264177560806274, | |
| "learning_rate": 4.123393316195373e-05, | |
| "loss": 0.381, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 0.5278491859468724, | |
| "grad_norm": 0.4680739641189575, | |
| "learning_rate": 4.120536989431591e-05, | |
| "loss": 0.3817, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 0.5295629820051414, | |
| "grad_norm": 0.4986321032047272, | |
| "learning_rate": 4.11768066266781e-05, | |
| "loss": 0.3897, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 0.5312767780634104, | |
| "grad_norm": 0.4697842597961426, | |
| "learning_rate": 4.1148243359040276e-05, | |
| "loss": 0.3843, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 0.5329905741216795, | |
| "grad_norm": 0.4521206319332123, | |
| "learning_rate": 4.1119680091402455e-05, | |
| "loss": 0.3856, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 0.5347043701799485, | |
| "grad_norm": 0.46120381355285645, | |
| "learning_rate": 4.109111682376464e-05, | |
| "loss": 0.3976, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 0.5364181662382177, | |
| "grad_norm": 0.48239853978157043, | |
| "learning_rate": 4.106255355612682e-05, | |
| "loss": 0.3949, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 0.5381319622964867, | |
| "grad_norm": 0.4508815109729767, | |
| "learning_rate": 4.1033990288489007e-05, | |
| "loss": 0.4015, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 0.5398457583547558, | |
| "grad_norm": 0.4042247533798218, | |
| "learning_rate": 4.100542702085119e-05, | |
| "loss": 0.4032, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 0.5415595544130248, | |
| "grad_norm": 0.49115559458732605, | |
| "learning_rate": 4.0976863753213365e-05, | |
| "loss": 0.3847, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 0.5432733504712939, | |
| "grad_norm": 0.4663139581680298, | |
| "learning_rate": 4.094830048557555e-05, | |
| "loss": 0.3991, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 0.5449871465295629, | |
| "grad_norm": 0.5062701106071472, | |
| "learning_rate": 4.091973721793773e-05, | |
| "loss": 0.3901, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 0.5467009425878321, | |
| "grad_norm": 0.49512970447540283, | |
| "learning_rate": 4.0891173950299916e-05, | |
| "loss": 0.3921, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 0.5484147386461011, | |
| "grad_norm": 0.6591855883598328, | |
| "learning_rate": 4.08626106826621e-05, | |
| "loss": 0.3768, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 0.5501285347043702, | |
| "grad_norm": 0.45946204662323, | |
| "learning_rate": 4.083404741502428e-05, | |
| "loss": 0.3922, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 0.5518423307626392, | |
| "grad_norm": 0.4640927314758301, | |
| "learning_rate": 4.080548414738646e-05, | |
| "loss": 0.379, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 0.5535561268209083, | |
| "grad_norm": 0.4628278911113739, | |
| "learning_rate": 4.077692087974865e-05, | |
| "loss": 0.4021, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 0.5552699228791774, | |
| "grad_norm": 0.49231502413749695, | |
| "learning_rate": 4.0748357612110826e-05, | |
| "loss": 0.3959, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 0.5569837189374465, | |
| "grad_norm": 0.48861929774284363, | |
| "learning_rate": 4.071979434447301e-05, | |
| "loss": 0.3883, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 0.5586975149957155, | |
| "grad_norm": 0.5438745617866516, | |
| "learning_rate": 4.069123107683519e-05, | |
| "loss": 0.3985, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 0.5604113110539846, | |
| "grad_norm": 0.5093244314193726, | |
| "learning_rate": 4.066266780919738e-05, | |
| "loss": 0.3536, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 0.5621251071122536, | |
| "grad_norm": 0.43954500555992126, | |
| "learning_rate": 4.0634104541559556e-05, | |
| "loss": 0.3826, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 0.5638389031705227, | |
| "grad_norm": 0.5128121376037598, | |
| "learning_rate": 4.0605541273921736e-05, | |
| "loss": 0.4037, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 0.5655526992287918, | |
| "grad_norm": 0.4617377519607544, | |
| "learning_rate": 4.057697800628392e-05, | |
| "loss": 0.4009, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 0.5672664952870609, | |
| "grad_norm": 0.46787700057029724, | |
| "learning_rate": 4.05484147386461e-05, | |
| "loss": 0.3989, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 0.5689802913453299, | |
| "grad_norm": 0.5328530669212341, | |
| "learning_rate": 4.051985147100829e-05, | |
| "loss": 0.3814, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 0.570694087403599, | |
| "grad_norm": 0.4385411739349365, | |
| "learning_rate": 4.049128820337047e-05, | |
| "loss": 0.3681, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 0.572407883461868, | |
| "grad_norm": 0.45673176646232605, | |
| "learning_rate": 4.0462724935732645e-05, | |
| "loss": 0.3829, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 0.5741216795201372, | |
| "grad_norm": 0.45383307337760925, | |
| "learning_rate": 4.043416166809483e-05, | |
| "loss": 0.3747, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 0.5758354755784062, | |
| "grad_norm": 0.4405433237552643, | |
| "learning_rate": 4.040559840045701e-05, | |
| "loss": 0.3827, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 0.5775492716366752, | |
| "grad_norm": 0.45892763137817383, | |
| "learning_rate": 4.03770351328192e-05, | |
| "loss": 0.3781, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 0.5792630676949443, | |
| "grad_norm": 0.48304322361946106, | |
| "learning_rate": 4.034847186518138e-05, | |
| "loss": 0.398, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 0.5809768637532133, | |
| "grad_norm": 0.4620954692363739, | |
| "learning_rate": 4.031990859754356e-05, | |
| "loss": 0.3927, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 0.5826906598114824, | |
| "grad_norm": 0.4997206926345825, | |
| "learning_rate": 4.029134532990574e-05, | |
| "loss": 0.383, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 0.5844044558697515, | |
| "grad_norm": 0.5515158176422119, | |
| "learning_rate": 4.026278206226792e-05, | |
| "loss": 0.3983, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 0.5861182519280206, | |
| "grad_norm": 0.5057517290115356, | |
| "learning_rate": 4.0234218794630106e-05, | |
| "loss": 0.4052, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 0.5878320479862896, | |
| "grad_norm": 0.4466065764427185, | |
| "learning_rate": 4.020565552699229e-05, | |
| "loss": 0.4019, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 0.5895458440445587, | |
| "grad_norm": 0.44154247641563416, | |
| "learning_rate": 4.017709225935447e-05, | |
| "loss": 0.382, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 0.5912596401028277, | |
| "grad_norm": 0.46753990650177, | |
| "learning_rate": 4.014852899171666e-05, | |
| "loss": 0.383, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 0.5929734361610969, | |
| "grad_norm": 0.4521337151527405, | |
| "learning_rate": 4.011996572407884e-05, | |
| "loss": 0.3822, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 0.5946872322193659, | |
| "grad_norm": 0.46324121952056885, | |
| "learning_rate": 4.0091402456441016e-05, | |
| "loss": 0.3954, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 0.596401028277635, | |
| "grad_norm": 0.499546617269516, | |
| "learning_rate": 4.00628391888032e-05, | |
| "loss": 0.4129, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 0.598114824335904, | |
| "grad_norm": 0.44482311606407166, | |
| "learning_rate": 4.003427592116538e-05, | |
| "loss": 0.392, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 0.5998286203941731, | |
| "grad_norm": 0.5121467113494873, | |
| "learning_rate": 4.000571265352757e-05, | |
| "loss": 0.3792, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.6015424164524421, | |
| "grad_norm": 0.4905322790145874, | |
| "learning_rate": 3.997714938588975e-05, | |
| "loss": 0.3858, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 0.6032562125107113, | |
| "grad_norm": 0.4642052948474884, | |
| "learning_rate": 3.994858611825193e-05, | |
| "loss": 0.3872, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 0.6049700085689803, | |
| "grad_norm": 0.47320064902305603, | |
| "learning_rate": 3.992002285061411e-05, | |
| "loss": 0.3881, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 0.6066838046272494, | |
| "grad_norm": 0.45698079466819763, | |
| "learning_rate": 3.989145958297629e-05, | |
| "loss": 0.3894, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 0.6083976006855184, | |
| "grad_norm": 0.4781307876110077, | |
| "learning_rate": 3.986289631533848e-05, | |
| "loss": 0.3826, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 0.6101113967437874, | |
| "grad_norm": 0.48909029364585876, | |
| "learning_rate": 3.983433304770066e-05, | |
| "loss": 0.3853, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 0.6118251928020566, | |
| "grad_norm": 0.45835670828819275, | |
| "learning_rate": 3.980576978006284e-05, | |
| "loss": 0.3905, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 0.6135389888603257, | |
| "grad_norm": 0.4872204065322876, | |
| "learning_rate": 3.977720651242502e-05, | |
| "loss": 0.3856, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 0.6152527849185947, | |
| "grad_norm": 0.4030555486679077, | |
| "learning_rate": 3.97486432447872e-05, | |
| "loss": 0.3713, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 0.6169665809768637, | |
| "grad_norm": 0.5011148452758789, | |
| "learning_rate": 3.972007997714939e-05, | |
| "loss": 0.3918, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 0.6186803770351328, | |
| "grad_norm": 0.4357930123806, | |
| "learning_rate": 3.969151670951157e-05, | |
| "loss": 0.3775, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 0.6203941730934018, | |
| "grad_norm": 0.47152426838874817, | |
| "learning_rate": 3.966295344187375e-05, | |
| "loss": 0.3727, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 0.622107969151671, | |
| "grad_norm": 0.4790559709072113, | |
| "learning_rate": 3.963439017423594e-05, | |
| "loss": 0.3844, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 0.62382176520994, | |
| "grad_norm": 0.43179333209991455, | |
| "learning_rate": 3.960582690659812e-05, | |
| "loss": 0.3849, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 0.6255355612682091, | |
| "grad_norm": 0.4304300546646118, | |
| "learning_rate": 3.95772636389603e-05, | |
| "loss": 0.3839, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 0.6272493573264781, | |
| "grad_norm": 0.4341460168361664, | |
| "learning_rate": 3.954870037132248e-05, | |
| "loss": 0.3855, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 0.6289631533847472, | |
| "grad_norm": 0.4285462200641632, | |
| "learning_rate": 3.952013710368466e-05, | |
| "loss": 0.3673, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 0.6306769494430163, | |
| "grad_norm": 0.49335575103759766, | |
| "learning_rate": 3.949157383604685e-05, | |
| "loss": 0.4034, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 0.6323907455012854, | |
| "grad_norm": 0.44308072328567505, | |
| "learning_rate": 3.946301056840903e-05, | |
| "loss": 0.3802, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 0.6341045415595544, | |
| "grad_norm": 0.4259876012802124, | |
| "learning_rate": 3.943444730077121e-05, | |
| "loss": 0.3807, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 0.6358183376178235, | |
| "grad_norm": 0.4075721204280853, | |
| "learning_rate": 3.940588403313339e-05, | |
| "loss": 0.3853, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 0.6375321336760925, | |
| "grad_norm": 0.4169134199619293, | |
| "learning_rate": 3.937732076549557e-05, | |
| "loss": 0.3715, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 0.6392459297343616, | |
| "grad_norm": 0.45992928743362427, | |
| "learning_rate": 3.934875749785776e-05, | |
| "loss": 0.3808, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 0.6409597257926307, | |
| "grad_norm": 0.42471954226493835, | |
| "learning_rate": 3.932019423021994e-05, | |
| "loss": 0.3902, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 0.6426735218508998, | |
| "grad_norm": 0.4044492840766907, | |
| "learning_rate": 3.929163096258212e-05, | |
| "loss": 0.3775, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 0.6443873179091688, | |
| "grad_norm": 0.41978341341018677, | |
| "learning_rate": 3.92630676949443e-05, | |
| "loss": 0.3626, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 0.6461011139674379, | |
| "grad_norm": 0.4912661910057068, | |
| "learning_rate": 3.923450442730648e-05, | |
| "loss": 0.3698, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 0.6478149100257069, | |
| "grad_norm": 0.44264456629753113, | |
| "learning_rate": 3.920594115966867e-05, | |
| "loss": 0.3761, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 0.6495287060839761, | |
| "grad_norm": 0.45757827162742615, | |
| "learning_rate": 3.9177377892030847e-05, | |
| "loss": 0.3851, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 0.6512425021422451, | |
| "grad_norm": 0.5428141355514526, | |
| "learning_rate": 3.914881462439303e-05, | |
| "loss": 0.397, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 0.6529562982005142, | |
| "grad_norm": 0.45041322708129883, | |
| "learning_rate": 3.912025135675522e-05, | |
| "loss": 0.3966, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 0.6546700942587832, | |
| "grad_norm": 0.4285386800765991, | |
| "learning_rate": 3.90916880891174e-05, | |
| "loss": 0.3889, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 0.6563838903170522, | |
| "grad_norm": 0.45799657702445984, | |
| "learning_rate": 3.906312482147958e-05, | |
| "loss": 0.4048, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 0.6580976863753213, | |
| "grad_norm": 0.445076048374176, | |
| "learning_rate": 3.9034561553841756e-05, | |
| "loss": 0.3675, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 0.6598114824335904, | |
| "grad_norm": 0.4635215103626251, | |
| "learning_rate": 3.900599828620394e-05, | |
| "loss": 0.3991, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 0.6615252784918595, | |
| "grad_norm": 0.42482051253318787, | |
| "learning_rate": 3.897743501856613e-05, | |
| "loss": 0.3899, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 0.6632390745501285, | |
| "grad_norm": 0.4092475175857544, | |
| "learning_rate": 3.894887175092831e-05, | |
| "loss": 0.382, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 0.6649528706083976, | |
| "grad_norm": 0.4773096740245819, | |
| "learning_rate": 3.8920308483290494e-05, | |
| "loss": 0.3721, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 0.6666666666666666, | |
| "grad_norm": 0.46646052598953247, | |
| "learning_rate": 3.889174521565267e-05, | |
| "loss": 0.3584, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 0.6683804627249358, | |
| "grad_norm": 0.4377121925354004, | |
| "learning_rate": 3.886318194801485e-05, | |
| "loss": 0.3715, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 0.6700942587832048, | |
| "grad_norm": 0.434952974319458, | |
| "learning_rate": 3.883461868037704e-05, | |
| "loss": 0.3874, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 0.6718080548414739, | |
| "grad_norm": 0.44789665937423706, | |
| "learning_rate": 3.880605541273922e-05, | |
| "loss": 0.3832, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 0.6735218508997429, | |
| "grad_norm": 0.4606776833534241, | |
| "learning_rate": 3.87774921451014e-05, | |
| "loss": 0.3864, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 0.675235646958012, | |
| "grad_norm": 0.4751772880554199, | |
| "learning_rate": 3.874892887746359e-05, | |
| "loss": 0.3901, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 0.676949443016281, | |
| "grad_norm": 0.4270033538341522, | |
| "learning_rate": 3.872036560982576e-05, | |
| "loss": 0.3792, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 0.6786632390745502, | |
| "grad_norm": 0.36948758363723755, | |
| "learning_rate": 3.869180234218795e-05, | |
| "loss": 0.3615, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 0.6803770351328192, | |
| "grad_norm": 0.40578505396842957, | |
| "learning_rate": 3.866323907455013e-05, | |
| "loss": 0.3905, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 0.6820908311910883, | |
| "grad_norm": 0.43324077129364014, | |
| "learning_rate": 3.863467580691231e-05, | |
| "loss": 0.3976, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 0.6838046272493573, | |
| "grad_norm": 0.4060199558734894, | |
| "learning_rate": 3.86061125392745e-05, | |
| "loss": 0.384, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 0.6855184233076264, | |
| "grad_norm": 0.41697928309440613, | |
| "learning_rate": 3.857754927163668e-05, | |
| "loss": 0.3907, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.6872322193658955, | |
| "grad_norm": 0.43197640776634216, | |
| "learning_rate": 3.854898600399886e-05, | |
| "loss": 0.3644, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 0.6889460154241646, | |
| "grad_norm": 0.4213910400867462, | |
| "learning_rate": 3.852042273636104e-05, | |
| "loss": 0.3828, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 0.6906598114824336, | |
| "grad_norm": 0.45907771587371826, | |
| "learning_rate": 3.849185946872322e-05, | |
| "loss": 0.3796, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 0.6923736075407027, | |
| "grad_norm": 0.4346909523010254, | |
| "learning_rate": 3.846329620108541e-05, | |
| "loss": 0.3819, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 0.6940874035989717, | |
| "grad_norm": 0.4609411060810089, | |
| "learning_rate": 3.843473293344759e-05, | |
| "loss": 0.3678, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 0.6958011996572407, | |
| "grad_norm": 0.4279443323612213, | |
| "learning_rate": 3.8406169665809774e-05, | |
| "loss": 0.3646, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 0.6975149957155099, | |
| "grad_norm": 0.42237308621406555, | |
| "learning_rate": 3.837760639817195e-05, | |
| "loss": 0.3764, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 0.699228791773779, | |
| "grad_norm": 0.40210121870040894, | |
| "learning_rate": 3.834904313053413e-05, | |
| "loss": 0.3715, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 0.700942587832048, | |
| "grad_norm": 0.5159941911697388, | |
| "learning_rate": 3.832047986289632e-05, | |
| "loss": 0.3653, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 0.702656383890317, | |
| "grad_norm": 0.39419978857040405, | |
| "learning_rate": 3.82919165952585e-05, | |
| "loss": 0.3966, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 0.7043701799485861, | |
| "grad_norm": 0.459267795085907, | |
| "learning_rate": 3.8263353327620684e-05, | |
| "loss": 0.3784, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 0.7060839760068551, | |
| "grad_norm": 0.46595674753189087, | |
| "learning_rate": 3.823479005998286e-05, | |
| "loss": 0.3841, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 0.7077977720651243, | |
| "grad_norm": 0.42522045969963074, | |
| "learning_rate": 3.820622679234505e-05, | |
| "loss": 0.3847, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 0.7095115681233933, | |
| "grad_norm": 0.4188786745071411, | |
| "learning_rate": 3.817766352470723e-05, | |
| "loss": 0.3744, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 0.7112253641816624, | |
| "grad_norm": 0.46960318088531494, | |
| "learning_rate": 3.814910025706941e-05, | |
| "loss": 0.3566, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 0.7129391602399314, | |
| "grad_norm": 0.4544188380241394, | |
| "learning_rate": 3.8120536989431594e-05, | |
| "loss": 0.3798, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 0.7146529562982005, | |
| "grad_norm": 0.44834500551223755, | |
| "learning_rate": 3.809197372179378e-05, | |
| "loss": 0.377, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 0.7163667523564696, | |
| "grad_norm": 0.4910108149051666, | |
| "learning_rate": 3.806341045415596e-05, | |
| "loss": 0.3867, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 0.7180805484147387, | |
| "grad_norm": 0.4388539493083954, | |
| "learning_rate": 3.803484718651814e-05, | |
| "loss": 0.3739, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 0.7197943444730077, | |
| "grad_norm": 0.4148114323616028, | |
| "learning_rate": 3.800628391888032e-05, | |
| "loss": 0.3592, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 0.7215081405312768, | |
| "grad_norm": 0.399849534034729, | |
| "learning_rate": 3.79777206512425e-05, | |
| "loss": 0.3705, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 0.7232219365895458, | |
| "grad_norm": 0.4451843202114105, | |
| "learning_rate": 3.794915738360469e-05, | |
| "loss": 0.3772, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 0.7249357326478149, | |
| "grad_norm": 0.4321479797363281, | |
| "learning_rate": 3.792059411596687e-05, | |
| "loss": 0.3654, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 0.726649528706084, | |
| "grad_norm": 0.444069504737854, | |
| "learning_rate": 3.7892030848329055e-05, | |
| "loss": 0.3614, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 0.7283633247643531, | |
| "grad_norm": 0.4706447720527649, | |
| "learning_rate": 3.7863467580691234e-05, | |
| "loss": 0.3809, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 0.7300771208226221, | |
| "grad_norm": 0.3922620713710785, | |
| "learning_rate": 3.783490431305341e-05, | |
| "loss": 0.3925, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 0.7317909168808912, | |
| "grad_norm": 0.4120844602584839, | |
| "learning_rate": 3.78063410454156e-05, | |
| "loss": 0.3617, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 0.7335047129391602, | |
| "grad_norm": 0.4326859414577484, | |
| "learning_rate": 3.777777777777778e-05, | |
| "loss": 0.3647, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 0.7352185089974294, | |
| "grad_norm": 0.40129154920578003, | |
| "learning_rate": 3.7749214510139964e-05, | |
| "loss": 0.3914, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 0.7369323050556984, | |
| "grad_norm": 0.480270117521286, | |
| "learning_rate": 3.7720651242502143e-05, | |
| "loss": 0.3774, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 0.7386461011139674, | |
| "grad_norm": 0.42428138852119446, | |
| "learning_rate": 3.769208797486433e-05, | |
| "loss": 0.3895, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 0.7403598971722365, | |
| "grad_norm": 0.4401046931743622, | |
| "learning_rate": 3.766352470722651e-05, | |
| "loss": 0.3877, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 0.7420736932305055, | |
| "grad_norm": 0.44101765751838684, | |
| "learning_rate": 3.763496143958869e-05, | |
| "loss": 0.3568, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 0.7437874892887746, | |
| "grad_norm": 0.46002933382987976, | |
| "learning_rate": 3.7606398171950874e-05, | |
| "loss": 0.3699, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 0.7455012853470437, | |
| "grad_norm": 0.40729400515556335, | |
| "learning_rate": 3.757783490431305e-05, | |
| "loss": 0.3693, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 0.7472150814053128, | |
| "grad_norm": 0.39425283670425415, | |
| "learning_rate": 3.754927163667524e-05, | |
| "loss": 0.392, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 0.7489288774635818, | |
| "grad_norm": 0.44731876254081726, | |
| "learning_rate": 3.752070836903742e-05, | |
| "loss": 0.38, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 0.7506426735218509, | |
| "grad_norm": 0.4442377984523773, | |
| "learning_rate": 3.74921451013996e-05, | |
| "loss": 0.3871, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 0.7523564695801199, | |
| "grad_norm": 0.42616844177246094, | |
| "learning_rate": 3.7463581833761784e-05, | |
| "loss": 0.3663, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 0.7540702656383891, | |
| "grad_norm": 0.44392848014831543, | |
| "learning_rate": 3.743501856612396e-05, | |
| "loss": 0.3666, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 0.7557840616966581, | |
| "grad_norm": 0.41869884729385376, | |
| "learning_rate": 3.740645529848615e-05, | |
| "loss": 0.3715, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 0.7574978577549272, | |
| "grad_norm": 0.5005581378936768, | |
| "learning_rate": 3.7377892030848335e-05, | |
| "loss": 0.3933, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 0.7592116538131962, | |
| "grad_norm": 0.42626211047172546, | |
| "learning_rate": 3.7349328763210514e-05, | |
| "loss": 0.396, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 0.7609254498714653, | |
| "grad_norm": 0.4285553991794586, | |
| "learning_rate": 3.7320765495572693e-05, | |
| "loss": 0.3865, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 0.7626392459297343, | |
| "grad_norm": 0.44240063428878784, | |
| "learning_rate": 3.729220222793487e-05, | |
| "loss": 0.4041, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 0.7643530419880035, | |
| "grad_norm": 0.44902411103248596, | |
| "learning_rate": 3.726363896029706e-05, | |
| "loss": 0.3851, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 0.7660668380462725, | |
| "grad_norm": 0.4316774904727936, | |
| "learning_rate": 3.7235075692659245e-05, | |
| "loss": 0.3647, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 0.7677806341045416, | |
| "grad_norm": 0.4804839491844177, | |
| "learning_rate": 3.7206512425021424e-05, | |
| "loss": 0.3811, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 0.7694944301628106, | |
| "grad_norm": 0.44255098700523376, | |
| "learning_rate": 3.717794915738361e-05, | |
| "loss": 0.3731, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 0.7712082262210797, | |
| "grad_norm": 0.47301185131073, | |
| "learning_rate": 3.714938588974579e-05, | |
| "loss": 0.3777, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.7729220222793488, | |
| "grad_norm": 0.4279269874095917, | |
| "learning_rate": 3.712082262210797e-05, | |
| "loss": 0.3839, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 0.7746358183376179, | |
| "grad_norm": 0.47981521487236023, | |
| "learning_rate": 3.7092259354470154e-05, | |
| "loss": 0.366, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 0.7763496143958869, | |
| "grad_norm": 0.4463767111301422, | |
| "learning_rate": 3.7063696086832334e-05, | |
| "loss": 0.387, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 0.778063410454156, | |
| "grad_norm": 0.4834374487400055, | |
| "learning_rate": 3.703513281919452e-05, | |
| "loss": 0.3689, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 0.779777206512425, | |
| "grad_norm": 0.4108034074306488, | |
| "learning_rate": 3.7006569551556706e-05, | |
| "loss": 0.3528, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 0.781491002570694, | |
| "grad_norm": 0.46595698595046997, | |
| "learning_rate": 3.697800628391888e-05, | |
| "loss": 0.3776, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 0.7832047986289632, | |
| "grad_norm": 0.556064784526825, | |
| "learning_rate": 3.6949443016281064e-05, | |
| "loss": 0.3689, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 0.7849185946872322, | |
| "grad_norm": 0.49466294050216675, | |
| "learning_rate": 3.6920879748643243e-05, | |
| "loss": 0.3773, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 0.7866323907455013, | |
| "grad_norm": 0.43849724531173706, | |
| "learning_rate": 3.689231648100543e-05, | |
| "loss": 0.3786, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 0.7883461868037703, | |
| "grad_norm": 0.4461914598941803, | |
| "learning_rate": 3.6863753213367615e-05, | |
| "loss": 0.3731, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 0.7900599828620394, | |
| "grad_norm": 0.39898931980133057, | |
| "learning_rate": 3.6835189945729795e-05, | |
| "loss": 0.3694, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 0.7917737789203085, | |
| "grad_norm": 0.4689505398273468, | |
| "learning_rate": 3.6806626678091974e-05, | |
| "loss": 0.3742, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 0.7934875749785776, | |
| "grad_norm": 0.4229956269264221, | |
| "learning_rate": 3.677806341045415e-05, | |
| "loss": 0.3465, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 0.7952013710368466, | |
| "grad_norm": 0.4562397301197052, | |
| "learning_rate": 3.674950014281634e-05, | |
| "loss": 0.3684, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 0.7969151670951157, | |
| "grad_norm": 0.3931344151496887, | |
| "learning_rate": 3.6720936875178525e-05, | |
| "loss": 0.3704, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 0.7986289631533847, | |
| "grad_norm": 0.42493489384651184, | |
| "learning_rate": 3.6692373607540704e-05, | |
| "loss": 0.3837, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 0.8003427592116538, | |
| "grad_norm": 0.42890891432762146, | |
| "learning_rate": 3.666381033990289e-05, | |
| "loss": 0.37, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 0.8020565552699229, | |
| "grad_norm": 0.3955785930156708, | |
| "learning_rate": 3.663524707226507e-05, | |
| "loss": 0.3758, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 0.803770351328192, | |
| "grad_norm": 0.40773460268974304, | |
| "learning_rate": 3.660668380462725e-05, | |
| "loss": 0.3697, | |
| "step": 4690 | |
| }, | |
| { | |
| "epoch": 0.805484147386461, | |
| "grad_norm": 0.45977774262428284, | |
| "learning_rate": 3.6578120536989435e-05, | |
| "loss": 0.371, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 0.8071979434447301, | |
| "grad_norm": 0.44712209701538086, | |
| "learning_rate": 3.6549557269351614e-05, | |
| "loss": 0.3744, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 0.8089117395029991, | |
| "grad_norm": 0.4498061239719391, | |
| "learning_rate": 3.65209940017138e-05, | |
| "loss": 0.356, | |
| "step": 4720 | |
| }, | |
| { | |
| "epoch": 0.8106255355612683, | |
| "grad_norm": 0.42666059732437134, | |
| "learning_rate": 3.649243073407598e-05, | |
| "loss": 0.3752, | |
| "step": 4730 | |
| }, | |
| { | |
| "epoch": 0.8123393316195373, | |
| "grad_norm": 0.46936237812042236, | |
| "learning_rate": 3.646386746643816e-05, | |
| "loss": 0.349, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 0.8140531276778064, | |
| "grad_norm": 0.4472278654575348, | |
| "learning_rate": 3.6435304198800345e-05, | |
| "loss": 0.3826, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 0.8157669237360754, | |
| "grad_norm": 0.4189947545528412, | |
| "learning_rate": 3.6406740931162524e-05, | |
| "loss": 0.359, | |
| "step": 4760 | |
| }, | |
| { | |
| "epoch": 0.8174807197943444, | |
| "grad_norm": 0.43510058522224426, | |
| "learning_rate": 3.637817766352471e-05, | |
| "loss": 0.3757, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 0.8191945158526135, | |
| "grad_norm": 0.4573146104812622, | |
| "learning_rate": 3.634961439588689e-05, | |
| "loss": 0.3666, | |
| "step": 4780 | |
| }, | |
| { | |
| "epoch": 0.8209083119108826, | |
| "grad_norm": 0.4547922909259796, | |
| "learning_rate": 3.6321051128249075e-05, | |
| "loss": 0.3763, | |
| "step": 4790 | |
| }, | |
| { | |
| "epoch": 0.8226221079691517, | |
| "grad_norm": 0.4520827829837799, | |
| "learning_rate": 3.6292487860611254e-05, | |
| "loss": 0.3812, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 0.8243359040274207, | |
| "grad_norm": 0.4624135196208954, | |
| "learning_rate": 3.6263924592973434e-05, | |
| "loss": 0.3747, | |
| "step": 4810 | |
| }, | |
| { | |
| "epoch": 0.8260497000856898, | |
| "grad_norm": 0.4581402838230133, | |
| "learning_rate": 3.623536132533562e-05, | |
| "loss": 0.3758, | |
| "step": 4820 | |
| }, | |
| { | |
| "epoch": 0.8277634961439588, | |
| "grad_norm": 0.4327716827392578, | |
| "learning_rate": 3.6206798057697806e-05, | |
| "loss": 0.3677, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 0.829477292202228, | |
| "grad_norm": 0.41706621646881104, | |
| "learning_rate": 3.6178234790059985e-05, | |
| "loss": 0.3699, | |
| "step": 4840 | |
| }, | |
| { | |
| "epoch": 0.831191088260497, | |
| "grad_norm": 0.48654213547706604, | |
| "learning_rate": 3.614967152242217e-05, | |
| "loss": 0.363, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 0.8329048843187661, | |
| "grad_norm": 0.9540205001831055, | |
| "learning_rate": 3.612110825478435e-05, | |
| "loss": 0.3613, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 0.8346186803770351, | |
| "grad_norm": 0.43841347098350525, | |
| "learning_rate": 3.609254498714653e-05, | |
| "loss": 0.3692, | |
| "step": 4870 | |
| }, | |
| { | |
| "epoch": 0.8363324764353042, | |
| "grad_norm": 0.49240532517433167, | |
| "learning_rate": 3.6063981719508715e-05, | |
| "loss": 0.3826, | |
| "step": 4880 | |
| }, | |
| { | |
| "epoch": 0.8380462724935732, | |
| "grad_norm": 0.4343222379684448, | |
| "learning_rate": 3.6035418451870895e-05, | |
| "loss": 0.3619, | |
| "step": 4890 | |
| }, | |
| { | |
| "epoch": 0.8397600685518424, | |
| "grad_norm": 0.3697074353694916, | |
| "learning_rate": 3.600685518423308e-05, | |
| "loss": 0.3573, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 0.8414738646101114, | |
| "grad_norm": 0.44506195187568665, | |
| "learning_rate": 3.597829191659526e-05, | |
| "loss": 0.3831, | |
| "step": 4910 | |
| }, | |
| { | |
| "epoch": 0.8431876606683805, | |
| "grad_norm": 0.49516722559928894, | |
| "learning_rate": 3.5949728648957446e-05, | |
| "loss": 0.3707, | |
| "step": 4920 | |
| }, | |
| { | |
| "epoch": 0.8449014567266495, | |
| "grad_norm": 0.43179890513420105, | |
| "learning_rate": 3.5921165381319625e-05, | |
| "loss": 0.3754, | |
| "step": 4930 | |
| }, | |
| { | |
| "epoch": 0.8466152527849186, | |
| "grad_norm": 0.4354996681213379, | |
| "learning_rate": 3.5892602113681804e-05, | |
| "loss": 0.376, | |
| "step": 4940 | |
| }, | |
| { | |
| "epoch": 0.8483290488431876, | |
| "grad_norm": 0.4149874150753021, | |
| "learning_rate": 3.586403884604399e-05, | |
| "loss": 0.3712, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 0.8500428449014568, | |
| "grad_norm": 0.40607717633247375, | |
| "learning_rate": 3.583547557840617e-05, | |
| "loss": 0.3775, | |
| "step": 4960 | |
| }, | |
| { | |
| "epoch": 0.8517566409597258, | |
| "grad_norm": 0.39906200766563416, | |
| "learning_rate": 3.5806912310768356e-05, | |
| "loss": 0.3667, | |
| "step": 4970 | |
| }, | |
| { | |
| "epoch": 0.8534704370179949, | |
| "grad_norm": 0.42885473370552063, | |
| "learning_rate": 3.5778349043130535e-05, | |
| "loss": 0.3736, | |
| "step": 4980 | |
| }, | |
| { | |
| "epoch": 0.8551842330762639, | |
| "grad_norm": 0.45682159066200256, | |
| "learning_rate": 3.5749785775492714e-05, | |
| "loss": 0.3758, | |
| "step": 4990 | |
| }, | |
| { | |
| "epoch": 0.856898029134533, | |
| "grad_norm": 0.4330593943595886, | |
| "learning_rate": 3.57212225078549e-05, | |
| "loss": 0.351, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.8586118251928021, | |
| "grad_norm": 0.48278284072875977, | |
| "learning_rate": 3.569265924021708e-05, | |
| "loss": 0.3764, | |
| "step": 5010 | |
| }, | |
| { | |
| "epoch": 0.8603256212510711, | |
| "grad_norm": 0.4309553802013397, | |
| "learning_rate": 3.5664095972579265e-05, | |
| "loss": 0.3537, | |
| "step": 5020 | |
| }, | |
| { | |
| "epoch": 0.8620394173093402, | |
| "grad_norm": 0.43522948026657104, | |
| "learning_rate": 3.563553270494145e-05, | |
| "loss": 0.3737, | |
| "step": 5030 | |
| }, | |
| { | |
| "epoch": 0.8637532133676092, | |
| "grad_norm": 0.41307273507118225, | |
| "learning_rate": 3.560696943730363e-05, | |
| "loss": 0.3751, | |
| "step": 5040 | |
| }, | |
| { | |
| "epoch": 0.8654670094258783, | |
| "grad_norm": 0.4293232262134552, | |
| "learning_rate": 3.557840616966581e-05, | |
| "loss": 0.3806, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 0.8671808054841473, | |
| "grad_norm": 0.46006080508232117, | |
| "learning_rate": 3.554984290202799e-05, | |
| "loss": 0.3597, | |
| "step": 5060 | |
| }, | |
| { | |
| "epoch": 0.8688946015424165, | |
| "grad_norm": 0.439104825258255, | |
| "learning_rate": 3.5521279634390175e-05, | |
| "loss": 0.3648, | |
| "step": 5070 | |
| }, | |
| { | |
| "epoch": 0.8706083976006855, | |
| "grad_norm": 0.39455896615982056, | |
| "learning_rate": 3.549271636675236e-05, | |
| "loss": 0.3671, | |
| "step": 5080 | |
| }, | |
| { | |
| "epoch": 0.8723221936589546, | |
| "grad_norm": 0.41576287150382996, | |
| "learning_rate": 3.546415309911454e-05, | |
| "loss": 0.3744, | |
| "step": 5090 | |
| }, | |
| { | |
| "epoch": 0.8740359897172236, | |
| "grad_norm": 0.43900826573371887, | |
| "learning_rate": 3.5435589831476726e-05, | |
| "loss": 0.3693, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 0.8757497857754927, | |
| "grad_norm": 0.4696173369884491, | |
| "learning_rate": 3.5407026563838906e-05, | |
| "loss": 0.3708, | |
| "step": 5110 | |
| }, | |
| { | |
| "epoch": 0.8774635818337618, | |
| "grad_norm": 0.436869740486145, | |
| "learning_rate": 3.5378463296201085e-05, | |
| "loss": 0.3684, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 0.8791773778920309, | |
| "grad_norm": 0.4194677770137787, | |
| "learning_rate": 3.534990002856327e-05, | |
| "loss": 0.3685, | |
| "step": 5130 | |
| }, | |
| { | |
| "epoch": 0.8808911739502999, | |
| "grad_norm": 0.41912123560905457, | |
| "learning_rate": 3.532133676092545e-05, | |
| "loss": 0.376, | |
| "step": 5140 | |
| }, | |
| { | |
| "epoch": 0.882604970008569, | |
| "grad_norm": 0.410951167345047, | |
| "learning_rate": 3.5292773493287636e-05, | |
| "loss": 0.3623, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 0.884318766066838, | |
| "grad_norm": 0.41262558102607727, | |
| "learning_rate": 3.5264210225649815e-05, | |
| "loss": 0.389, | |
| "step": 5160 | |
| }, | |
| { | |
| "epoch": 0.8860325621251071, | |
| "grad_norm": 0.40964463353157043, | |
| "learning_rate": 3.5235646958011995e-05, | |
| "loss": 0.383, | |
| "step": 5170 | |
| }, | |
| { | |
| "epoch": 0.8877463581833762, | |
| "grad_norm": 0.4126282334327698, | |
| "learning_rate": 3.520708369037418e-05, | |
| "loss": 0.3431, | |
| "step": 5180 | |
| }, | |
| { | |
| "epoch": 0.8894601542416453, | |
| "grad_norm": 0.43680474162101746, | |
| "learning_rate": 3.517852042273636e-05, | |
| "loss": 0.3742, | |
| "step": 5190 | |
| }, | |
| { | |
| "epoch": 0.8911739502999143, | |
| "grad_norm": 0.42978253960609436, | |
| "learning_rate": 3.5149957155098546e-05, | |
| "loss": 0.3621, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 0.8928877463581834, | |
| "grad_norm": 0.4275003969669342, | |
| "learning_rate": 3.512139388746073e-05, | |
| "loss": 0.3784, | |
| "step": 5210 | |
| }, | |
| { | |
| "epoch": 0.8946015424164524, | |
| "grad_norm": 0.5404401421546936, | |
| "learning_rate": 3.509283061982291e-05, | |
| "loss": 0.3778, | |
| "step": 5220 | |
| }, | |
| { | |
| "epoch": 0.8963153384747216, | |
| "grad_norm": 0.4109743535518646, | |
| "learning_rate": 3.506426735218509e-05, | |
| "loss": 0.3597, | |
| "step": 5230 | |
| }, | |
| { | |
| "epoch": 0.8980291345329906, | |
| "grad_norm": 0.4215233623981476, | |
| "learning_rate": 3.503570408454727e-05, | |
| "loss": 0.3705, | |
| "step": 5240 | |
| }, | |
| { | |
| "epoch": 0.8997429305912596, | |
| "grad_norm": 0.5016302466392517, | |
| "learning_rate": 3.5007140816909456e-05, | |
| "loss": 0.3667, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 0.9014567266495287, | |
| "grad_norm": 0.4061182141304016, | |
| "learning_rate": 3.497857754927164e-05, | |
| "loss": 0.3602, | |
| "step": 5260 | |
| }, | |
| { | |
| "epoch": 0.9031705227077977, | |
| "grad_norm": 0.4132539629936218, | |
| "learning_rate": 3.495001428163382e-05, | |
| "loss": 0.3638, | |
| "step": 5270 | |
| }, | |
| { | |
| "epoch": 0.9048843187660668, | |
| "grad_norm": 0.38507506251335144, | |
| "learning_rate": 3.492145101399601e-05, | |
| "loss": 0.3557, | |
| "step": 5280 | |
| }, | |
| { | |
| "epoch": 0.9065981148243359, | |
| "grad_norm": 0.39408767223358154, | |
| "learning_rate": 3.4892887746358186e-05, | |
| "loss": 0.3544, | |
| "step": 5290 | |
| }, | |
| { | |
| "epoch": 0.908311910882605, | |
| "grad_norm": 0.44993147253990173, | |
| "learning_rate": 3.4864324478720365e-05, | |
| "loss": 0.3619, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 0.910025706940874, | |
| "grad_norm": 0.4015878736972809, | |
| "learning_rate": 3.483576121108255e-05, | |
| "loss": 0.3647, | |
| "step": 5310 | |
| }, | |
| { | |
| "epoch": 0.9117395029991431, | |
| "grad_norm": 0.41548463702201843, | |
| "learning_rate": 3.480719794344473e-05, | |
| "loss": 0.3649, | |
| "step": 5320 | |
| }, | |
| { | |
| "epoch": 0.9134532990574121, | |
| "grad_norm": 0.4296075105667114, | |
| "learning_rate": 3.4778634675806917e-05, | |
| "loss": 0.3688, | |
| "step": 5330 | |
| }, | |
| { | |
| "epoch": 0.9151670951156813, | |
| "grad_norm": 0.40596818923950195, | |
| "learning_rate": 3.4750071408169096e-05, | |
| "loss": 0.3553, | |
| "step": 5340 | |
| }, | |
| { | |
| "epoch": 0.9168808911739503, | |
| "grad_norm": 0.4294643998146057, | |
| "learning_rate": 3.4721508140531275e-05, | |
| "loss": 0.3624, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 0.9185946872322194, | |
| "grad_norm": 0.40626177191734314, | |
| "learning_rate": 3.469294487289346e-05, | |
| "loss": 0.358, | |
| "step": 5360 | |
| }, | |
| { | |
| "epoch": 0.9203084832904884, | |
| "grad_norm": 0.4296109974384308, | |
| "learning_rate": 3.466438160525564e-05, | |
| "loss": 0.3613, | |
| "step": 5370 | |
| }, | |
| { | |
| "epoch": 0.9220222793487575, | |
| "grad_norm": 0.4380834400653839, | |
| "learning_rate": 3.4635818337617826e-05, | |
| "loss": 0.3795, | |
| "step": 5380 | |
| }, | |
| { | |
| "epoch": 0.9237360754070265, | |
| "grad_norm": 0.41026878356933594, | |
| "learning_rate": 3.4607255069980005e-05, | |
| "loss": 0.3748, | |
| "step": 5390 | |
| }, | |
| { | |
| "epoch": 0.9254498714652957, | |
| "grad_norm": 0.4788786768913269, | |
| "learning_rate": 3.457869180234219e-05, | |
| "loss": 0.3606, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 0.9271636675235647, | |
| "grad_norm": 0.40038466453552246, | |
| "learning_rate": 3.455012853470437e-05, | |
| "loss": 0.3885, | |
| "step": 5410 | |
| }, | |
| { | |
| "epoch": 0.9288774635818338, | |
| "grad_norm": 0.4130307734012604, | |
| "learning_rate": 3.452156526706655e-05, | |
| "loss": 0.3796, | |
| "step": 5420 | |
| }, | |
| { | |
| "epoch": 0.9305912596401028, | |
| "grad_norm": 0.4447920024394989, | |
| "learning_rate": 3.4493001999428736e-05, | |
| "loss": 0.3951, | |
| "step": 5430 | |
| }, | |
| { | |
| "epoch": 0.9323050556983719, | |
| "grad_norm": 0.41205018758773804, | |
| "learning_rate": 3.446443873179092e-05, | |
| "loss": 0.3549, | |
| "step": 5440 | |
| }, | |
| { | |
| "epoch": 0.934018851756641, | |
| "grad_norm": 0.4112369418144226, | |
| "learning_rate": 3.44358754641531e-05, | |
| "loss": 0.3634, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 0.9357326478149101, | |
| "grad_norm": 0.3979431390762329, | |
| "learning_rate": 3.440731219651529e-05, | |
| "loss": 0.383, | |
| "step": 5460 | |
| }, | |
| { | |
| "epoch": 0.9374464438731791, | |
| "grad_norm": 0.3819255232810974, | |
| "learning_rate": 3.4378748928877466e-05, | |
| "loss": 0.3705, | |
| "step": 5470 | |
| }, | |
| { | |
| "epoch": 0.9391602399314481, | |
| "grad_norm": 0.35595545172691345, | |
| "learning_rate": 3.4350185661239646e-05, | |
| "loss": 0.3624, | |
| "step": 5480 | |
| }, | |
| { | |
| "epoch": 0.9408740359897172, | |
| "grad_norm": 0.453141450881958, | |
| "learning_rate": 3.432162239360183e-05, | |
| "loss": 0.357, | |
| "step": 5490 | |
| }, | |
| { | |
| "epoch": 0.9425878320479862, | |
| "grad_norm": 0.4668983817100525, | |
| "learning_rate": 3.429305912596401e-05, | |
| "loss": 0.3652, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.9443016281062554, | |
| "grad_norm": 0.4746719300746918, | |
| "learning_rate": 3.42644958583262e-05, | |
| "loss": 0.3726, | |
| "step": 5510 | |
| }, | |
| { | |
| "epoch": 0.9460154241645244, | |
| "grad_norm": 0.4136699140071869, | |
| "learning_rate": 3.4235932590688376e-05, | |
| "loss": 0.3664, | |
| "step": 5520 | |
| }, | |
| { | |
| "epoch": 0.9477292202227935, | |
| "grad_norm": 0.50481778383255, | |
| "learning_rate": 3.4207369323050555e-05, | |
| "loss": 0.3729, | |
| "step": 5530 | |
| }, | |
| { | |
| "epoch": 0.9494430162810625, | |
| "grad_norm": 0.43926066160202026, | |
| "learning_rate": 3.417880605541274e-05, | |
| "loss": 0.3689, | |
| "step": 5540 | |
| }, | |
| { | |
| "epoch": 0.9511568123393316, | |
| "grad_norm": 0.3832367956638336, | |
| "learning_rate": 3.415024278777492e-05, | |
| "loss": 0.3679, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 0.9528706083976007, | |
| "grad_norm": 0.46387988328933716, | |
| "learning_rate": 3.412167952013711e-05, | |
| "loss": 0.3538, | |
| "step": 5560 | |
| }, | |
| { | |
| "epoch": 0.9545844044558698, | |
| "grad_norm": 0.4562791585922241, | |
| "learning_rate": 3.4093116252499286e-05, | |
| "loss": 0.3631, | |
| "step": 5570 | |
| }, | |
| { | |
| "epoch": 0.9562982005141388, | |
| "grad_norm": 0.436279833316803, | |
| "learning_rate": 3.406455298486147e-05, | |
| "loss": 0.373, | |
| "step": 5580 | |
| }, | |
| { | |
| "epoch": 0.9580119965724079, | |
| "grad_norm": 0.4309545159339905, | |
| "learning_rate": 3.403598971722365e-05, | |
| "loss": 0.3688, | |
| "step": 5590 | |
| }, | |
| { | |
| "epoch": 0.9597257926306769, | |
| "grad_norm": 0.4345686435699463, | |
| "learning_rate": 3.400742644958583e-05, | |
| "loss": 0.3767, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 0.961439588688946, | |
| "grad_norm": 0.47498658299446106, | |
| "learning_rate": 3.3978863181948016e-05, | |
| "loss": 0.3624, | |
| "step": 5610 | |
| }, | |
| { | |
| "epoch": 0.9631533847472151, | |
| "grad_norm": 0.5105198621749878, | |
| "learning_rate": 3.3950299914310196e-05, | |
| "loss": 0.3579, | |
| "step": 5620 | |
| }, | |
| { | |
| "epoch": 0.9648671808054842, | |
| "grad_norm": 0.42196258902549744, | |
| "learning_rate": 3.392173664667238e-05, | |
| "loss": 0.3641, | |
| "step": 5630 | |
| }, | |
| { | |
| "epoch": 0.9665809768637532, | |
| "grad_norm": 0.4690169095993042, | |
| "learning_rate": 3.389317337903457e-05, | |
| "loss": 0.385, | |
| "step": 5640 | |
| }, | |
| { | |
| "epoch": 0.9682947729220223, | |
| "grad_norm": 0.3849544823169708, | |
| "learning_rate": 3.386461011139675e-05, | |
| "loss": 0.3712, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 0.9700085689802913, | |
| "grad_norm": 0.459349125623703, | |
| "learning_rate": 3.3836046843758926e-05, | |
| "loss": 0.3841, | |
| "step": 5660 | |
| }, | |
| { | |
| "epoch": 0.9717223650385605, | |
| "grad_norm": 0.4056195020675659, | |
| "learning_rate": 3.3807483576121105e-05, | |
| "loss": 0.3581, | |
| "step": 5670 | |
| }, | |
| { | |
| "epoch": 0.9734361610968295, | |
| "grad_norm": 0.46903523802757263, | |
| "learning_rate": 3.377892030848329e-05, | |
| "loss": 0.3661, | |
| "step": 5680 | |
| }, | |
| { | |
| "epoch": 0.9751499571550986, | |
| "grad_norm": 0.41805553436279297, | |
| "learning_rate": 3.375035704084548e-05, | |
| "loss": 0.3711, | |
| "step": 5690 | |
| }, | |
| { | |
| "epoch": 0.9768637532133676, | |
| "grad_norm": 0.4520112872123718, | |
| "learning_rate": 3.372179377320766e-05, | |
| "loss": 0.3554, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 0.9785775492716366, | |
| "grad_norm": 0.4656761884689331, | |
| "learning_rate": 3.369323050556984e-05, | |
| "loss": 0.3681, | |
| "step": 5710 | |
| }, | |
| { | |
| "epoch": 0.9802913453299057, | |
| "grad_norm": 0.4471518099308014, | |
| "learning_rate": 3.3664667237932015e-05, | |
| "loss": 0.3576, | |
| "step": 5720 | |
| }, | |
| { | |
| "epoch": 0.9820051413881749, | |
| "grad_norm": 0.4089897572994232, | |
| "learning_rate": 3.36361039702942e-05, | |
| "loss": 0.3647, | |
| "step": 5730 | |
| }, | |
| { | |
| "epoch": 0.9837189374464439, | |
| "grad_norm": 0.40042605996131897, | |
| "learning_rate": 3.360754070265639e-05, | |
| "loss": 0.3538, | |
| "step": 5740 | |
| }, | |
| { | |
| "epoch": 0.9854327335047129, | |
| "grad_norm": 0.4252929985523224, | |
| "learning_rate": 3.3578977435018566e-05, | |
| "loss": 0.3705, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 0.987146529562982, | |
| "grad_norm": 0.4239642024040222, | |
| "learning_rate": 3.355041416738075e-05, | |
| "loss": 0.3459, | |
| "step": 5760 | |
| }, | |
| { | |
| "epoch": 0.988860325621251, | |
| "grad_norm": 0.42976200580596924, | |
| "learning_rate": 3.352185089974293e-05, | |
| "loss": 0.3481, | |
| "step": 5770 | |
| }, | |
| { | |
| "epoch": 0.9905741216795202, | |
| "grad_norm": 0.4923042058944702, | |
| "learning_rate": 3.349328763210511e-05, | |
| "loss": 0.3704, | |
| "step": 5780 | |
| }, | |
| { | |
| "epoch": 0.9922879177377892, | |
| "grad_norm": 0.44239214062690735, | |
| "learning_rate": 3.34647243644673e-05, | |
| "loss": 0.3615, | |
| "step": 5790 | |
| }, | |
| { | |
| "epoch": 0.9940017137960583, | |
| "grad_norm": 0.4351910948753357, | |
| "learning_rate": 3.3436161096829476e-05, | |
| "loss": 0.3767, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 0.9957155098543273, | |
| "grad_norm": 0.4020942151546478, | |
| "learning_rate": 3.340759782919166e-05, | |
| "loss": 0.3824, | |
| "step": 5810 | |
| }, | |
| { | |
| "epoch": 0.9974293059125964, | |
| "grad_norm": 0.4038684070110321, | |
| "learning_rate": 3.337903456155385e-05, | |
| "loss": 0.3643, | |
| "step": 5820 | |
| }, | |
| { | |
| "epoch": 0.9991431019708654, | |
| "grad_norm": 0.3742600679397583, | |
| "learning_rate": 3.335047129391603e-05, | |
| "loss": 0.3374, | |
| "step": 5830 | |
| }, | |
| { | |
| "epoch": 1.0008568980291346, | |
| "grad_norm": 0.45896250009536743, | |
| "learning_rate": 3.332190802627821e-05, | |
| "loss": 0.3483, | |
| "step": 5840 | |
| }, | |
| { | |
| "epoch": 1.0025706940874035, | |
| "grad_norm": 0.43760430812835693, | |
| "learning_rate": 3.3293344758640386e-05, | |
| "loss": 0.3359, | |
| "step": 5850 | |
| }, | |
| { | |
| "epoch": 1.0042844901456727, | |
| "grad_norm": 0.43930524587631226, | |
| "learning_rate": 3.326478149100257e-05, | |
| "loss": 0.3547, | |
| "step": 5860 | |
| }, | |
| { | |
| "epoch": 1.0059982862039418, | |
| "grad_norm": 0.47293394804000854, | |
| "learning_rate": 3.323621822336476e-05, | |
| "loss": 0.3548, | |
| "step": 5870 | |
| }, | |
| { | |
| "epoch": 1.0077120822622108, | |
| "grad_norm": 0.422246515750885, | |
| "learning_rate": 3.320765495572694e-05, | |
| "loss": 0.3536, | |
| "step": 5880 | |
| }, | |
| { | |
| "epoch": 1.00942587832048, | |
| "grad_norm": 0.3768950402736664, | |
| "learning_rate": 3.317909168808912e-05, | |
| "loss": 0.3375, | |
| "step": 5890 | |
| }, | |
| { | |
| "epoch": 1.0111396743787489, | |
| "grad_norm": 0.40679293870925903, | |
| "learning_rate": 3.3150528420451296e-05, | |
| "loss": 0.3557, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 1.012853470437018, | |
| "grad_norm": 0.42642706632614136, | |
| "learning_rate": 3.312196515281348e-05, | |
| "loss": 0.372, | |
| "step": 5910 | |
| }, | |
| { | |
| "epoch": 1.0145672664952872, | |
| "grad_norm": 0.46030041575431824, | |
| "learning_rate": 3.309340188517567e-05, | |
| "loss": 0.3575, | |
| "step": 5920 | |
| }, | |
| { | |
| "epoch": 1.016281062553556, | |
| "grad_norm": 0.40845292806625366, | |
| "learning_rate": 3.306483861753785e-05, | |
| "loss": 0.3424, | |
| "step": 5930 | |
| }, | |
| { | |
| "epoch": 1.0179948586118253, | |
| "grad_norm": 0.4577491879463196, | |
| "learning_rate": 3.303627534990003e-05, | |
| "loss": 0.3494, | |
| "step": 5940 | |
| }, | |
| { | |
| "epoch": 1.0197086546700942, | |
| "grad_norm": 0.42184576392173767, | |
| "learning_rate": 3.300771208226221e-05, | |
| "loss": 0.3597, | |
| "step": 5950 | |
| }, | |
| { | |
| "epoch": 1.0214224507283634, | |
| "grad_norm": 0.44896042346954346, | |
| "learning_rate": 3.297914881462439e-05, | |
| "loss": 0.3453, | |
| "step": 5960 | |
| }, | |
| { | |
| "epoch": 1.0231362467866323, | |
| "grad_norm": 0.41005054116249084, | |
| "learning_rate": 3.295058554698658e-05, | |
| "loss": 0.353, | |
| "step": 5970 | |
| }, | |
| { | |
| "epoch": 1.0248500428449014, | |
| "grad_norm": 0.38638490438461304, | |
| "learning_rate": 3.2922022279348757e-05, | |
| "loss": 0.339, | |
| "step": 5980 | |
| }, | |
| { | |
| "epoch": 1.0265638389031706, | |
| "grad_norm": 0.4330446422100067, | |
| "learning_rate": 3.289345901171094e-05, | |
| "loss": 0.3291, | |
| "step": 5990 | |
| }, | |
| { | |
| "epoch": 1.0282776349614395, | |
| "grad_norm": 0.38331279158592224, | |
| "learning_rate": 3.286489574407312e-05, | |
| "loss": 0.3667, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 1.0299914310197087, | |
| "grad_norm": 0.42192813754081726, | |
| "learning_rate": 3.283633247643531e-05, | |
| "loss": 0.3419, | |
| "step": 6010 | |
| }, | |
| { | |
| "epoch": 1.0317052270779776, | |
| "grad_norm": 0.467171847820282, | |
| "learning_rate": 3.280776920879749e-05, | |
| "loss": 0.3398, | |
| "step": 6020 | |
| }, | |
| { | |
| "epoch": 1.0334190231362468, | |
| "grad_norm": 0.4578188359737396, | |
| "learning_rate": 3.2779205941159666e-05, | |
| "loss": 0.3555, | |
| "step": 6030 | |
| }, | |
| { | |
| "epoch": 1.035132819194516, | |
| "grad_norm": 0.3848859369754791, | |
| "learning_rate": 3.275064267352185e-05, | |
| "loss": 0.3585, | |
| "step": 6040 | |
| }, | |
| { | |
| "epoch": 1.0368466152527849, | |
| "grad_norm": 0.41844481229782104, | |
| "learning_rate": 3.272207940588404e-05, | |
| "loss": 0.3536, | |
| "step": 6050 | |
| }, | |
| { | |
| "epoch": 1.038560411311054, | |
| "grad_norm": 0.4219590425491333, | |
| "learning_rate": 3.269351613824622e-05, | |
| "loss": 0.3346, | |
| "step": 6060 | |
| }, | |
| { | |
| "epoch": 1.040274207369323, | |
| "grad_norm": 0.397193044424057, | |
| "learning_rate": 3.2664952870608404e-05, | |
| "loss": 0.3524, | |
| "step": 6070 | |
| }, | |
| { | |
| "epoch": 1.0419880034275921, | |
| "grad_norm": 0.3962811231613159, | |
| "learning_rate": 3.263638960297058e-05, | |
| "loss": 0.3528, | |
| "step": 6080 | |
| }, | |
| { | |
| "epoch": 1.0437017994858613, | |
| "grad_norm": 0.42967748641967773, | |
| "learning_rate": 3.260782633533276e-05, | |
| "loss": 0.3452, | |
| "step": 6090 | |
| }, | |
| { | |
| "epoch": 1.0454155955441302, | |
| "grad_norm": 0.46409425139427185, | |
| "learning_rate": 3.257926306769495e-05, | |
| "loss": 0.3503, | |
| "step": 6100 | |
| }, | |
| { | |
| "epoch": 1.0471293916023994, | |
| "grad_norm": 0.49521201848983765, | |
| "learning_rate": 3.255069980005713e-05, | |
| "loss": 0.3545, | |
| "step": 6110 | |
| }, | |
| { | |
| "epoch": 1.0488431876606683, | |
| "grad_norm": 0.4241666793823242, | |
| "learning_rate": 3.252213653241931e-05, | |
| "loss": 0.351, | |
| "step": 6120 | |
| }, | |
| { | |
| "epoch": 1.0505569837189375, | |
| "grad_norm": 0.47557422518730164, | |
| "learning_rate": 3.249357326478149e-05, | |
| "loss": 0.3474, | |
| "step": 6130 | |
| }, | |
| { | |
| "epoch": 1.0522707797772064, | |
| "grad_norm": 0.4359321594238281, | |
| "learning_rate": 3.246500999714367e-05, | |
| "loss": 0.3382, | |
| "step": 6140 | |
| }, | |
| { | |
| "epoch": 1.0539845758354756, | |
| "grad_norm": 0.4463767409324646, | |
| "learning_rate": 3.243644672950586e-05, | |
| "loss": 0.3409, | |
| "step": 6150 | |
| }, | |
| { | |
| "epoch": 1.0556983718937447, | |
| "grad_norm": 0.3884042501449585, | |
| "learning_rate": 3.240788346186804e-05, | |
| "loss": 0.3382, | |
| "step": 6160 | |
| }, | |
| { | |
| "epoch": 1.0574121679520136, | |
| "grad_norm": 0.47423774003982544, | |
| "learning_rate": 3.237932019423022e-05, | |
| "loss": 0.3439, | |
| "step": 6170 | |
| }, | |
| { | |
| "epoch": 1.0591259640102828, | |
| "grad_norm": 0.4362853765487671, | |
| "learning_rate": 3.23507569265924e-05, | |
| "loss": 0.345, | |
| "step": 6180 | |
| }, | |
| { | |
| "epoch": 1.0608397600685517, | |
| "grad_norm": 0.43055716156959534, | |
| "learning_rate": 3.232219365895459e-05, | |
| "loss": 0.3622, | |
| "step": 6190 | |
| }, | |
| { | |
| "epoch": 1.062553556126821, | |
| "grad_norm": 0.4222048819065094, | |
| "learning_rate": 3.229363039131677e-05, | |
| "loss": 0.353, | |
| "step": 6200 | |
| }, | |
| { | |
| "epoch": 1.06426735218509, | |
| "grad_norm": 0.42239975929260254, | |
| "learning_rate": 3.226506712367895e-05, | |
| "loss": 0.3646, | |
| "step": 6210 | |
| }, | |
| { | |
| "epoch": 1.065981148243359, | |
| "grad_norm": 0.43706634640693665, | |
| "learning_rate": 3.223650385604113e-05, | |
| "loss": 0.3551, | |
| "step": 6220 | |
| }, | |
| { | |
| "epoch": 1.0676949443016281, | |
| "grad_norm": 0.4561137557029724, | |
| "learning_rate": 3.220794058840331e-05, | |
| "loss": 0.3494, | |
| "step": 6230 | |
| }, | |
| { | |
| "epoch": 1.069408740359897, | |
| "grad_norm": 0.4455920159816742, | |
| "learning_rate": 3.21793773207655e-05, | |
| "loss": 0.3396, | |
| "step": 6240 | |
| }, | |
| { | |
| "epoch": 1.0711225364181662, | |
| "grad_norm": 0.4367483854293823, | |
| "learning_rate": 3.2150814053127684e-05, | |
| "loss": 0.3551, | |
| "step": 6250 | |
| }, | |
| { | |
| "epoch": 1.0728363324764354, | |
| "grad_norm": 0.40412530303001404, | |
| "learning_rate": 3.212225078548986e-05, | |
| "loss": 0.3404, | |
| "step": 6260 | |
| }, | |
| { | |
| "epoch": 1.0745501285347043, | |
| "grad_norm": 0.4885301887989044, | |
| "learning_rate": 3.209368751785204e-05, | |
| "loss": 0.3493, | |
| "step": 6270 | |
| }, | |
| { | |
| "epoch": 1.0762639245929735, | |
| "grad_norm": 0.4218777120113373, | |
| "learning_rate": 3.206512425021422e-05, | |
| "loss": 0.371, | |
| "step": 6280 | |
| }, | |
| { | |
| "epoch": 1.0779777206512424, | |
| "grad_norm": 0.423092782497406, | |
| "learning_rate": 3.203656098257641e-05, | |
| "loss": 0.3632, | |
| "step": 6290 | |
| }, | |
| { | |
| "epoch": 1.0796915167095116, | |
| "grad_norm": 0.4192599654197693, | |
| "learning_rate": 3.2007997714938594e-05, | |
| "loss": 0.3495, | |
| "step": 6300 | |
| }, | |
| { | |
| "epoch": 1.0814053127677807, | |
| "grad_norm": 0.4132305085659027, | |
| "learning_rate": 3.197943444730077e-05, | |
| "loss": 0.3623, | |
| "step": 6310 | |
| }, | |
| { | |
| "epoch": 1.0831191088260497, | |
| "grad_norm": 0.4898054897785187, | |
| "learning_rate": 3.195087117966295e-05, | |
| "loss": 0.3417, | |
| "step": 6320 | |
| }, | |
| { | |
| "epoch": 1.0848329048843188, | |
| "grad_norm": 0.4401434063911438, | |
| "learning_rate": 3.192230791202513e-05, | |
| "loss": 0.361, | |
| "step": 6330 | |
| }, | |
| { | |
| "epoch": 1.0865467009425878, | |
| "grad_norm": 0.41742950677871704, | |
| "learning_rate": 3.189374464438732e-05, | |
| "loss": 0.3634, | |
| "step": 6340 | |
| }, | |
| { | |
| "epoch": 1.088260497000857, | |
| "grad_norm": 0.3645254075527191, | |
| "learning_rate": 3.1865181376749504e-05, | |
| "loss": 0.3335, | |
| "step": 6350 | |
| }, | |
| { | |
| "epoch": 1.089974293059126, | |
| "grad_norm": 0.3742375671863556, | |
| "learning_rate": 3.183661810911168e-05, | |
| "loss": 0.3526, | |
| "step": 6360 | |
| }, | |
| { | |
| "epoch": 1.091688089117395, | |
| "grad_norm": 0.42631155252456665, | |
| "learning_rate": 3.180805484147387e-05, | |
| "loss": 0.3387, | |
| "step": 6370 | |
| }, | |
| { | |
| "epoch": 1.0934018851756642, | |
| "grad_norm": 0.41597044467926025, | |
| "learning_rate": 3.177949157383605e-05, | |
| "loss": 0.3566, | |
| "step": 6380 | |
| }, | |
| { | |
| "epoch": 1.095115681233933, | |
| "grad_norm": 0.41658124327659607, | |
| "learning_rate": 3.175092830619823e-05, | |
| "loss": 0.3534, | |
| "step": 6390 | |
| }, | |
| { | |
| "epoch": 1.0968294772922023, | |
| "grad_norm": 0.4091750681400299, | |
| "learning_rate": 3.172236503856041e-05, | |
| "loss": 0.3526, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 1.0985432733504712, | |
| "grad_norm": 0.4087865948677063, | |
| "learning_rate": 3.169380177092259e-05, | |
| "loss": 0.3258, | |
| "step": 6410 | |
| }, | |
| { | |
| "epoch": 1.1002570694087404, | |
| "grad_norm": 0.4153346121311188, | |
| "learning_rate": 3.166523850328478e-05, | |
| "loss": 0.3547, | |
| "step": 6420 | |
| }, | |
| { | |
| "epoch": 1.1019708654670095, | |
| "grad_norm": 0.43321630358695984, | |
| "learning_rate": 3.1636675235646965e-05, | |
| "loss": 0.3489, | |
| "step": 6430 | |
| }, | |
| { | |
| "epoch": 1.1036846615252784, | |
| "grad_norm": 0.42528247833251953, | |
| "learning_rate": 3.1608111968009144e-05, | |
| "loss": 0.3463, | |
| "step": 6440 | |
| }, | |
| { | |
| "epoch": 1.1053984575835476, | |
| "grad_norm": 0.3978523015975952, | |
| "learning_rate": 3.157954870037132e-05, | |
| "loss": 0.3506, | |
| "step": 6450 | |
| }, | |
| { | |
| "epoch": 1.1071122536418165, | |
| "grad_norm": 0.48159298300743103, | |
| "learning_rate": 3.15509854327335e-05, | |
| "loss": 0.3726, | |
| "step": 6460 | |
| }, | |
| { | |
| "epoch": 1.1088260497000857, | |
| "grad_norm": 0.4103160798549652, | |
| "learning_rate": 3.152242216509569e-05, | |
| "loss": 0.3409, | |
| "step": 6470 | |
| }, | |
| { | |
| "epoch": 1.1105398457583548, | |
| "grad_norm": 0.496094673871994, | |
| "learning_rate": 3.1493858897457874e-05, | |
| "loss": 0.3455, | |
| "step": 6480 | |
| }, | |
| { | |
| "epoch": 1.1122536418166238, | |
| "grad_norm": 0.4564761519432068, | |
| "learning_rate": 3.1465295629820053e-05, | |
| "loss": 0.3513, | |
| "step": 6490 | |
| }, | |
| { | |
| "epoch": 1.113967437874893, | |
| "grad_norm": 0.4227219223976135, | |
| "learning_rate": 3.143673236218224e-05, | |
| "loss": 0.3581, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 1.1156812339331619, | |
| "grad_norm": 0.42185524106025696, | |
| "learning_rate": 3.140816909454441e-05, | |
| "loss": 0.3514, | |
| "step": 6510 | |
| }, | |
| { | |
| "epoch": 1.117395029991431, | |
| "grad_norm": 0.4504411816596985, | |
| "learning_rate": 3.13796058269066e-05, | |
| "loss": 0.3564, | |
| "step": 6520 | |
| }, | |
| { | |
| "epoch": 1.1191088260497002, | |
| "grad_norm": 0.4239251911640167, | |
| "learning_rate": 3.1351042559268784e-05, | |
| "loss": 0.3483, | |
| "step": 6530 | |
| }, | |
| { | |
| "epoch": 1.1208226221079691, | |
| "grad_norm": 0.42364707589149475, | |
| "learning_rate": 3.132247929163096e-05, | |
| "loss": 0.3585, | |
| "step": 6540 | |
| }, | |
| { | |
| "epoch": 1.1225364181662383, | |
| "grad_norm": 0.40176278352737427, | |
| "learning_rate": 3.129391602399315e-05, | |
| "loss": 0.3432, | |
| "step": 6550 | |
| }, | |
| { | |
| "epoch": 1.1242502142245072, | |
| "grad_norm": 0.42972254753112793, | |
| "learning_rate": 3.126535275635533e-05, | |
| "loss": 0.3632, | |
| "step": 6560 | |
| }, | |
| { | |
| "epoch": 1.1259640102827764, | |
| "grad_norm": 0.4208223223686218, | |
| "learning_rate": 3.123678948871751e-05, | |
| "loss": 0.3487, | |
| "step": 6570 | |
| }, | |
| { | |
| "epoch": 1.1276778063410453, | |
| "grad_norm": 0.41982948780059814, | |
| "learning_rate": 3.1208226221079694e-05, | |
| "loss": 0.3448, | |
| "step": 6580 | |
| }, | |
| { | |
| "epoch": 1.1293916023993145, | |
| "grad_norm": 0.3786119222640991, | |
| "learning_rate": 3.117966295344187e-05, | |
| "loss": 0.3532, | |
| "step": 6590 | |
| }, | |
| { | |
| "epoch": 1.1311053984575836, | |
| "grad_norm": 0.39533334970474243, | |
| "learning_rate": 3.115109968580406e-05, | |
| "loss": 0.3499, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 1.1328191945158526, | |
| "grad_norm": 0.4255177676677704, | |
| "learning_rate": 3.112253641816624e-05, | |
| "loss": 0.3401, | |
| "step": 6610 | |
| }, | |
| { | |
| "epoch": 1.1345329905741217, | |
| "grad_norm": 0.4227048456668854, | |
| "learning_rate": 3.1093973150528424e-05, | |
| "loss": 0.3589, | |
| "step": 6620 | |
| }, | |
| { | |
| "epoch": 1.1362467866323906, | |
| "grad_norm": 0.4483916461467743, | |
| "learning_rate": 3.1065409882890603e-05, | |
| "loss": 0.3547, | |
| "step": 6630 | |
| }, | |
| { | |
| "epoch": 1.1379605826906598, | |
| "grad_norm": 0.4231320917606354, | |
| "learning_rate": 3.103684661525278e-05, | |
| "loss": 0.3588, | |
| "step": 6640 | |
| }, | |
| { | |
| "epoch": 1.139674378748929, | |
| "grad_norm": 0.4227351248264313, | |
| "learning_rate": 3.100828334761497e-05, | |
| "loss": 0.3455, | |
| "step": 6650 | |
| }, | |
| { | |
| "epoch": 1.141388174807198, | |
| "grad_norm": 0.4091714322566986, | |
| "learning_rate": 3.0979720079977155e-05, | |
| "loss": 0.3476, | |
| "step": 6660 | |
| }, | |
| { | |
| "epoch": 1.143101970865467, | |
| "grad_norm": 0.4448917508125305, | |
| "learning_rate": 3.0951156812339334e-05, | |
| "loss": 0.3498, | |
| "step": 6670 | |
| }, | |
| { | |
| "epoch": 1.144815766923736, | |
| "grad_norm": 0.39556825160980225, | |
| "learning_rate": 3.092259354470152e-05, | |
| "loss": 0.3586, | |
| "step": 6680 | |
| }, | |
| { | |
| "epoch": 1.1465295629820051, | |
| "grad_norm": 0.44014015793800354, | |
| "learning_rate": 3.089403027706369e-05, | |
| "loss": 0.3523, | |
| "step": 6690 | |
| }, | |
| { | |
| "epoch": 1.1482433590402743, | |
| "grad_norm": 0.4422236382961273, | |
| "learning_rate": 3.086546700942588e-05, | |
| "loss": 0.3433, | |
| "step": 6700 | |
| }, | |
| { | |
| "epoch": 1.1499571550985432, | |
| "grad_norm": 0.4930676519870758, | |
| "learning_rate": 3.0836903741788064e-05, | |
| "loss": 0.3565, | |
| "step": 6710 | |
| }, | |
| { | |
| "epoch": 1.1516709511568124, | |
| "grad_norm": 0.4265972077846527, | |
| "learning_rate": 3.0808340474150244e-05, | |
| "loss": 0.3353, | |
| "step": 6720 | |
| }, | |
| { | |
| "epoch": 1.1533847472150813, | |
| "grad_norm": 0.4512198865413666, | |
| "learning_rate": 3.077977720651243e-05, | |
| "loss": 0.3382, | |
| "step": 6730 | |
| }, | |
| { | |
| "epoch": 1.1550985432733505, | |
| "grad_norm": 0.435465931892395, | |
| "learning_rate": 3.075121393887461e-05, | |
| "loss": 0.3454, | |
| "step": 6740 | |
| }, | |
| { | |
| "epoch": 1.1568123393316196, | |
| "grad_norm": 0.42844897508621216, | |
| "learning_rate": 3.072265067123679e-05, | |
| "loss": 0.3518, | |
| "step": 6750 | |
| }, | |
| { | |
| "epoch": 1.1585261353898886, | |
| "grad_norm": 0.3897634446620941, | |
| "learning_rate": 3.0694087403598974e-05, | |
| "loss": 0.3495, | |
| "step": 6760 | |
| }, | |
| { | |
| "epoch": 1.1602399314481577, | |
| "grad_norm": 0.46333223581314087, | |
| "learning_rate": 3.0665524135961153e-05, | |
| "loss": 0.3489, | |
| "step": 6770 | |
| }, | |
| { | |
| "epoch": 1.1619537275064267, | |
| "grad_norm": 0.4132658541202545, | |
| "learning_rate": 3.063696086832334e-05, | |
| "loss": 0.3506, | |
| "step": 6780 | |
| }, | |
| { | |
| "epoch": 1.1636675235646958, | |
| "grad_norm": 0.46667787432670593, | |
| "learning_rate": 3.060839760068552e-05, | |
| "loss": 0.3551, | |
| "step": 6790 | |
| }, | |
| { | |
| "epoch": 1.165381319622965, | |
| "grad_norm": 0.3969384729862213, | |
| "learning_rate": 3.0579834333047705e-05, | |
| "loss": 0.3578, | |
| "step": 6800 | |
| }, | |
| { | |
| "epoch": 1.167095115681234, | |
| "grad_norm": 0.39458054304122925, | |
| "learning_rate": 3.0551271065409884e-05, | |
| "loss": 0.3459, | |
| "step": 6810 | |
| }, | |
| { | |
| "epoch": 1.168808911739503, | |
| "grad_norm": 0.40578460693359375, | |
| "learning_rate": 3.052270779777206e-05, | |
| "loss": 0.3339, | |
| "step": 6820 | |
| }, | |
| { | |
| "epoch": 1.170522707797772, | |
| "grad_norm": 0.3973940908908844, | |
| "learning_rate": 3.049414453013425e-05, | |
| "loss": 0.3537, | |
| "step": 6830 | |
| }, | |
| { | |
| "epoch": 1.1722365038560412, | |
| "grad_norm": 0.4285309314727783, | |
| "learning_rate": 3.046558126249643e-05, | |
| "loss": 0.3503, | |
| "step": 6840 | |
| }, | |
| { | |
| "epoch": 1.17395029991431, | |
| "grad_norm": 0.40150052309036255, | |
| "learning_rate": 3.0437017994858614e-05, | |
| "loss": 0.3427, | |
| "step": 6850 | |
| }, | |
| { | |
| "epoch": 1.1756640959725793, | |
| "grad_norm": 0.4046010971069336, | |
| "learning_rate": 3.0408454727220797e-05, | |
| "loss": 0.3417, | |
| "step": 6860 | |
| }, | |
| { | |
| "epoch": 1.1773778920308484, | |
| "grad_norm": 0.42314770817756653, | |
| "learning_rate": 3.0379891459582976e-05, | |
| "loss": 0.3351, | |
| "step": 6870 | |
| }, | |
| { | |
| "epoch": 1.1790916880891174, | |
| "grad_norm": 0.42427992820739746, | |
| "learning_rate": 3.0351328191945162e-05, | |
| "loss": 0.3499, | |
| "step": 6880 | |
| }, | |
| { | |
| "epoch": 1.1808054841473865, | |
| "grad_norm": 0.42444947361946106, | |
| "learning_rate": 3.0322764924307338e-05, | |
| "loss": 0.348, | |
| "step": 6890 | |
| }, | |
| { | |
| "epoch": 1.1825192802056554, | |
| "grad_norm": 0.41960152983665466, | |
| "learning_rate": 3.0294201656669524e-05, | |
| "loss": 0.3464, | |
| "step": 6900 | |
| }, | |
| { | |
| "epoch": 1.1842330762639246, | |
| "grad_norm": 0.42055803537368774, | |
| "learning_rate": 3.026563838903171e-05, | |
| "loss": 0.3442, | |
| "step": 6910 | |
| }, | |
| { | |
| "epoch": 1.1859468723221935, | |
| "grad_norm": 0.3928168714046478, | |
| "learning_rate": 3.0237075121393886e-05, | |
| "loss": 0.3548, | |
| "step": 6920 | |
| }, | |
| { | |
| "epoch": 1.1876606683804627, | |
| "grad_norm": 0.5035582184791565, | |
| "learning_rate": 3.0208511853756072e-05, | |
| "loss": 0.3495, | |
| "step": 6930 | |
| }, | |
| { | |
| "epoch": 1.1893744644387318, | |
| "grad_norm": 0.4152749180793762, | |
| "learning_rate": 3.017994858611825e-05, | |
| "loss": 0.3502, | |
| "step": 6940 | |
| }, | |
| { | |
| "epoch": 1.1910882604970008, | |
| "grad_norm": 0.45122581720352173, | |
| "learning_rate": 3.0151385318480434e-05, | |
| "loss": 0.3553, | |
| "step": 6950 | |
| }, | |
| { | |
| "epoch": 1.19280205655527, | |
| "grad_norm": 0.401976078748703, | |
| "learning_rate": 3.012282205084262e-05, | |
| "loss": 0.3457, | |
| "step": 6960 | |
| }, | |
| { | |
| "epoch": 1.1945158526135389, | |
| "grad_norm": 0.44023826718330383, | |
| "learning_rate": 3.00942587832048e-05, | |
| "loss": 0.344, | |
| "step": 6970 | |
| }, | |
| { | |
| "epoch": 1.196229648671808, | |
| "grad_norm": 0.4113614559173584, | |
| "learning_rate": 3.0065695515566982e-05, | |
| "loss": 0.3554, | |
| "step": 6980 | |
| }, | |
| { | |
| "epoch": 1.1979434447300772, | |
| "grad_norm": 0.43389397859573364, | |
| "learning_rate": 3.0037132247929168e-05, | |
| "loss": 0.337, | |
| "step": 6990 | |
| }, | |
| { | |
| "epoch": 1.1996572407883461, | |
| "grad_norm": 0.45042628049850464, | |
| "learning_rate": 3.0008568980291347e-05, | |
| "loss": 0.3447, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 1.2013710368466153, | |
| "grad_norm": 0.4491696059703827, | |
| "learning_rate": 2.998000571265353e-05, | |
| "loss": 0.3471, | |
| "step": 7010 | |
| }, | |
| { | |
| "epoch": 1.2030848329048842, | |
| "grad_norm": 0.41832613945007324, | |
| "learning_rate": 2.995144244501571e-05, | |
| "loss": 0.3537, | |
| "step": 7020 | |
| }, | |
| { | |
| "epoch": 1.2047986289631534, | |
| "grad_norm": 0.4287663996219635, | |
| "learning_rate": 2.9922879177377895e-05, | |
| "loss": 0.3501, | |
| "step": 7030 | |
| }, | |
| { | |
| "epoch": 1.2065124250214225, | |
| "grad_norm": 0.41054508090019226, | |
| "learning_rate": 2.9894315909740077e-05, | |
| "loss": 0.3476, | |
| "step": 7040 | |
| }, | |
| { | |
| "epoch": 1.2082262210796915, | |
| "grad_norm": 0.4107612371444702, | |
| "learning_rate": 2.9865752642102257e-05, | |
| "loss": 0.3445, | |
| "step": 7050 | |
| }, | |
| { | |
| "epoch": 1.2099400171379606, | |
| "grad_norm": 0.4279773235321045, | |
| "learning_rate": 2.9837189374464443e-05, | |
| "loss": 0.3731, | |
| "step": 7060 | |
| }, | |
| { | |
| "epoch": 1.2116538131962296, | |
| "grad_norm": 0.5119055509567261, | |
| "learning_rate": 2.9808626106826622e-05, | |
| "loss": 0.3352, | |
| "step": 7070 | |
| }, | |
| { | |
| "epoch": 1.2133676092544987, | |
| "grad_norm": 0.41452860832214355, | |
| "learning_rate": 2.9780062839188805e-05, | |
| "loss": 0.3418, | |
| "step": 7080 | |
| }, | |
| { | |
| "epoch": 1.2150814053127679, | |
| "grad_norm": 0.42358824610710144, | |
| "learning_rate": 2.975149957155099e-05, | |
| "loss": 0.349, | |
| "step": 7090 | |
| }, | |
| { | |
| "epoch": 1.2167952013710368, | |
| "grad_norm": 0.3853763937950134, | |
| "learning_rate": 2.9722936303913166e-05, | |
| "loss": 0.3491, | |
| "step": 7100 | |
| }, | |
| { | |
| "epoch": 1.218508997429306, | |
| "grad_norm": 0.4241725206375122, | |
| "learning_rate": 2.9694373036275352e-05, | |
| "loss": 0.3396, | |
| "step": 7110 | |
| }, | |
| { | |
| "epoch": 1.220222793487575, | |
| "grad_norm": 0.4051187336444855, | |
| "learning_rate": 2.9665809768637532e-05, | |
| "loss": 0.3477, | |
| "step": 7120 | |
| }, | |
| { | |
| "epoch": 1.221936589545844, | |
| "grad_norm": 0.41006556153297424, | |
| "learning_rate": 2.9637246500999714e-05, | |
| "loss": 0.3374, | |
| "step": 7130 | |
| }, | |
| { | |
| "epoch": 1.2236503856041132, | |
| "grad_norm": 0.4366990327835083, | |
| "learning_rate": 2.96086832333619e-05, | |
| "loss": 0.3537, | |
| "step": 7140 | |
| }, | |
| { | |
| "epoch": 1.2253641816623821, | |
| "grad_norm": 0.3898869454860687, | |
| "learning_rate": 2.958011996572408e-05, | |
| "loss": 0.3332, | |
| "step": 7150 | |
| }, | |
| { | |
| "epoch": 1.2270779777206513, | |
| "grad_norm": 0.4259560704231262, | |
| "learning_rate": 2.9551556698086262e-05, | |
| "loss": 0.3458, | |
| "step": 7160 | |
| }, | |
| { | |
| "epoch": 1.2287917737789202, | |
| "grad_norm": 0.40676066279411316, | |
| "learning_rate": 2.952299343044844e-05, | |
| "loss": 0.3503, | |
| "step": 7170 | |
| }, | |
| { | |
| "epoch": 1.2305055698371894, | |
| "grad_norm": 0.4602939486503601, | |
| "learning_rate": 2.9494430162810627e-05, | |
| "loss": 0.3307, | |
| "step": 7180 | |
| }, | |
| { | |
| "epoch": 1.2322193658954586, | |
| "grad_norm": 0.4000133275985718, | |
| "learning_rate": 2.946586689517281e-05, | |
| "loss": 0.3699, | |
| "step": 7190 | |
| }, | |
| { | |
| "epoch": 1.2339331619537275, | |
| "grad_norm": 0.4689088463783264, | |
| "learning_rate": 2.943730362753499e-05, | |
| "loss": 0.342, | |
| "step": 7200 | |
| }, | |
| { | |
| "epoch": 1.2356469580119966, | |
| "grad_norm": 0.39080819487571716, | |
| "learning_rate": 2.9408740359897175e-05, | |
| "loss": 0.3325, | |
| "step": 7210 | |
| }, | |
| { | |
| "epoch": 1.2373607540702656, | |
| "grad_norm": 0.42110803723335266, | |
| "learning_rate": 2.9380177092259355e-05, | |
| "loss": 0.3399, | |
| "step": 7220 | |
| }, | |
| { | |
| "epoch": 1.2390745501285347, | |
| "grad_norm": 0.3990210294723511, | |
| "learning_rate": 2.9351613824621537e-05, | |
| "loss": 0.3479, | |
| "step": 7230 | |
| }, | |
| { | |
| "epoch": 1.240788346186804, | |
| "grad_norm": 0.4055481553077698, | |
| "learning_rate": 2.9323050556983723e-05, | |
| "loss": 0.3596, | |
| "step": 7240 | |
| }, | |
| { | |
| "epoch": 1.2425021422450728, | |
| "grad_norm": 0.46355926990509033, | |
| "learning_rate": 2.9294487289345902e-05, | |
| "loss": 0.3534, | |
| "step": 7250 | |
| }, | |
| { | |
| "epoch": 1.244215938303342, | |
| "grad_norm": 0.3957843780517578, | |
| "learning_rate": 2.9265924021708085e-05, | |
| "loss": 0.3328, | |
| "step": 7260 | |
| }, | |
| { | |
| "epoch": 1.245929734361611, | |
| "grad_norm": 0.38406893610954285, | |
| "learning_rate": 2.9237360754070264e-05, | |
| "loss": 0.3633, | |
| "step": 7270 | |
| }, | |
| { | |
| "epoch": 1.24764353041988, | |
| "grad_norm": 0.4850548505783081, | |
| "learning_rate": 2.920879748643245e-05, | |
| "loss": 0.3695, | |
| "step": 7280 | |
| }, | |
| { | |
| "epoch": 1.249357326478149, | |
| "grad_norm": 0.43643587827682495, | |
| "learning_rate": 2.9180234218794633e-05, | |
| "loss": 0.3476, | |
| "step": 7290 | |
| }, | |
| { | |
| "epoch": 1.2510711225364182, | |
| "grad_norm": 0.429887056350708, | |
| "learning_rate": 2.9151670951156812e-05, | |
| "loss": 0.3645, | |
| "step": 7300 | |
| }, | |
| { | |
| "epoch": 1.252784918594687, | |
| "grad_norm": 0.4277980625629425, | |
| "learning_rate": 2.9123107683518995e-05, | |
| "loss": 0.333, | |
| "step": 7310 | |
| }, | |
| { | |
| "epoch": 1.2544987146529563, | |
| "grad_norm": 0.442796915769577, | |
| "learning_rate": 2.909454441588118e-05, | |
| "loss": 0.3382, | |
| "step": 7320 | |
| }, | |
| { | |
| "epoch": 1.2562125107112254, | |
| "grad_norm": 0.4223655164241791, | |
| "learning_rate": 2.906598114824336e-05, | |
| "loss": 0.3503, | |
| "step": 7330 | |
| }, | |
| { | |
| "epoch": 1.2579263067694944, | |
| "grad_norm": 0.38975775241851807, | |
| "learning_rate": 2.9037417880605543e-05, | |
| "loss": 0.3382, | |
| "step": 7340 | |
| }, | |
| { | |
| "epoch": 1.2596401028277635, | |
| "grad_norm": 0.3538907468318939, | |
| "learning_rate": 2.9008854612967722e-05, | |
| "loss": 0.3502, | |
| "step": 7350 | |
| }, | |
| { | |
| "epoch": 1.2613538988860324, | |
| "grad_norm": 0.4272812008857727, | |
| "learning_rate": 2.8980291345329908e-05, | |
| "loss": 0.3492, | |
| "step": 7360 | |
| }, | |
| { | |
| "epoch": 1.2630676949443016, | |
| "grad_norm": 0.42171192169189453, | |
| "learning_rate": 2.895172807769209e-05, | |
| "loss": 0.3447, | |
| "step": 7370 | |
| }, | |
| { | |
| "epoch": 1.2647814910025708, | |
| "grad_norm": 0.4208640456199646, | |
| "learning_rate": 2.892316481005427e-05, | |
| "loss": 0.3395, | |
| "step": 7380 | |
| }, | |
| { | |
| "epoch": 1.2664952870608397, | |
| "grad_norm": 0.3956572711467743, | |
| "learning_rate": 2.8894601542416456e-05, | |
| "loss": 0.3273, | |
| "step": 7390 | |
| }, | |
| { | |
| "epoch": 1.2682090831191088, | |
| "grad_norm": 0.3806025981903076, | |
| "learning_rate": 2.8866038274778635e-05, | |
| "loss": 0.3485, | |
| "step": 7400 | |
| }, | |
| { | |
| "epoch": 1.2699228791773778, | |
| "grad_norm": 0.3972262442111969, | |
| "learning_rate": 2.8837475007140818e-05, | |
| "loss": 0.3521, | |
| "step": 7410 | |
| }, | |
| { | |
| "epoch": 1.271636675235647, | |
| "grad_norm": 0.43498408794403076, | |
| "learning_rate": 2.8808911739503004e-05, | |
| "loss": 0.344, | |
| "step": 7420 | |
| }, | |
| { | |
| "epoch": 1.273350471293916, | |
| "grad_norm": 0.46216341853141785, | |
| "learning_rate": 2.8780348471865183e-05, | |
| "loss": 0.3489, | |
| "step": 7430 | |
| }, | |
| { | |
| "epoch": 1.275064267352185, | |
| "grad_norm": 0.5822446942329407, | |
| "learning_rate": 2.8751785204227366e-05, | |
| "loss": 0.3432, | |
| "step": 7440 | |
| }, | |
| { | |
| "epoch": 1.2767780634104542, | |
| "grad_norm": 0.43710097670555115, | |
| "learning_rate": 2.8723221936589545e-05, | |
| "loss": 0.3449, | |
| "step": 7450 | |
| }, | |
| { | |
| "epoch": 1.2784918594687231, | |
| "grad_norm": 0.5048580765724182, | |
| "learning_rate": 2.869465866895173e-05, | |
| "loss": 0.3395, | |
| "step": 7460 | |
| }, | |
| { | |
| "epoch": 1.2802056555269923, | |
| "grad_norm": 0.3946685492992401, | |
| "learning_rate": 2.8666095401313913e-05, | |
| "loss": 0.3549, | |
| "step": 7470 | |
| }, | |
| { | |
| "epoch": 1.2819194515852614, | |
| "grad_norm": 0.4065665006637573, | |
| "learning_rate": 2.8637532133676093e-05, | |
| "loss": 0.3169, | |
| "step": 7480 | |
| }, | |
| { | |
| "epoch": 1.2836332476435304, | |
| "grad_norm": 0.40248727798461914, | |
| "learning_rate": 2.860896886603828e-05, | |
| "loss": 0.3255, | |
| "step": 7490 | |
| }, | |
| { | |
| "epoch": 1.2853470437017995, | |
| "grad_norm": 0.370870977640152, | |
| "learning_rate": 2.8580405598400454e-05, | |
| "loss": 0.3589, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 1.2870608397600685, | |
| "grad_norm": 0.4085274338722229, | |
| "learning_rate": 2.855184233076264e-05, | |
| "loss": 0.343, | |
| "step": 7510 | |
| }, | |
| { | |
| "epoch": 1.2887746358183376, | |
| "grad_norm": 0.47734397649765015, | |
| "learning_rate": 2.8523279063124823e-05, | |
| "loss": 0.3481, | |
| "step": 7520 | |
| }, | |
| { | |
| "epoch": 1.2904884318766068, | |
| "grad_norm": 0.4094698429107666, | |
| "learning_rate": 2.8494715795487002e-05, | |
| "loss": 0.3519, | |
| "step": 7530 | |
| }, | |
| { | |
| "epoch": 1.2922022279348757, | |
| "grad_norm": 0.3843911290168762, | |
| "learning_rate": 2.846615252784919e-05, | |
| "loss": 0.3539, | |
| "step": 7540 | |
| }, | |
| { | |
| "epoch": 1.2939160239931449, | |
| "grad_norm": 0.44203248620033264, | |
| "learning_rate": 2.8437589260211368e-05, | |
| "loss": 0.3478, | |
| "step": 7550 | |
| }, | |
| { | |
| "epoch": 1.2956298200514138, | |
| "grad_norm": 0.37155306339263916, | |
| "learning_rate": 2.840902599257355e-05, | |
| "loss": 0.331, | |
| "step": 7560 | |
| }, | |
| { | |
| "epoch": 1.297343616109683, | |
| "grad_norm": 0.3852187395095825, | |
| "learning_rate": 2.8380462724935736e-05, | |
| "loss": 0.3325, | |
| "step": 7570 | |
| }, | |
| { | |
| "epoch": 1.2990574121679521, | |
| "grad_norm": 0.391021192073822, | |
| "learning_rate": 2.8351899457297915e-05, | |
| "loss": 0.3501, | |
| "step": 7580 | |
| }, | |
| { | |
| "epoch": 1.300771208226221, | |
| "grad_norm": 0.43556347489356995, | |
| "learning_rate": 2.8323336189660098e-05, | |
| "loss": 0.3458, | |
| "step": 7590 | |
| }, | |
| { | |
| "epoch": 1.3024850042844902, | |
| "grad_norm": 0.4070069491863251, | |
| "learning_rate": 2.8294772922022284e-05, | |
| "loss": 0.3507, | |
| "step": 7600 | |
| }, | |
| { | |
| "epoch": 1.3041988003427591, | |
| "grad_norm": 0.4203561842441559, | |
| "learning_rate": 2.8266209654384463e-05, | |
| "loss": 0.3516, | |
| "step": 7610 | |
| }, | |
| { | |
| "epoch": 1.3059125964010283, | |
| "grad_norm": 0.4148425757884979, | |
| "learning_rate": 2.8237646386746646e-05, | |
| "loss": 0.3302, | |
| "step": 7620 | |
| }, | |
| { | |
| "epoch": 1.3076263924592975, | |
| "grad_norm": 0.4449089467525482, | |
| "learning_rate": 2.8209083119108825e-05, | |
| "loss": 0.3508, | |
| "step": 7630 | |
| }, | |
| { | |
| "epoch": 1.3093401885175664, | |
| "grad_norm": 0.4570872187614441, | |
| "learning_rate": 2.818051985147101e-05, | |
| "loss": 0.35, | |
| "step": 7640 | |
| }, | |
| { | |
| "epoch": 1.3110539845758356, | |
| "grad_norm": 0.38971251249313354, | |
| "learning_rate": 2.8151956583833194e-05, | |
| "loss": 0.3423, | |
| "step": 7650 | |
| }, | |
| { | |
| "epoch": 1.3127677806341045, | |
| "grad_norm": 0.3867109715938568, | |
| "learning_rate": 2.8123393316195373e-05, | |
| "loss": 0.3389, | |
| "step": 7660 | |
| }, | |
| { | |
| "epoch": 1.3144815766923736, | |
| "grad_norm": 0.44314730167388916, | |
| "learning_rate": 2.809483004855756e-05, | |
| "loss": 0.3447, | |
| "step": 7670 | |
| }, | |
| { | |
| "epoch": 1.3161953727506428, | |
| "grad_norm": 0.3955530822277069, | |
| "learning_rate": 2.8066266780919735e-05, | |
| "loss": 0.3385, | |
| "step": 7680 | |
| }, | |
| { | |
| "epoch": 1.3179091688089117, | |
| "grad_norm": 0.44573432207107544, | |
| "learning_rate": 2.803770351328192e-05, | |
| "loss": 0.3407, | |
| "step": 7690 | |
| }, | |
| { | |
| "epoch": 1.3196229648671807, | |
| "grad_norm": 0.38774076104164124, | |
| "learning_rate": 2.8009140245644107e-05, | |
| "loss": 0.3463, | |
| "step": 7700 | |
| }, | |
| { | |
| "epoch": 1.3213367609254498, | |
| "grad_norm": 0.44221237301826477, | |
| "learning_rate": 2.7980576978006283e-05, | |
| "loss": 0.3454, | |
| "step": 7710 | |
| }, | |
| { | |
| "epoch": 1.323050556983719, | |
| "grad_norm": 0.38349688053131104, | |
| "learning_rate": 2.7954870037132248e-05, | |
| "loss": 0.349, | |
| "step": 7720 | |
| }, | |
| { | |
| "epoch": 1.3247643530419881, | |
| "grad_norm": 0.6474334597587585, | |
| "learning_rate": 2.792630676949443e-05, | |
| "loss": 0.3639, | |
| "step": 7730 | |
| }, | |
| { | |
| "epoch": 1.326478149100257, | |
| "grad_norm": 0.39967530965805054, | |
| "learning_rate": 2.7897743501856616e-05, | |
| "loss": 0.3481, | |
| "step": 7740 | |
| }, | |
| { | |
| "epoch": 1.328191945158526, | |
| "grad_norm": 0.463186115026474, | |
| "learning_rate": 2.7869180234218795e-05, | |
| "loss": 0.351, | |
| "step": 7750 | |
| }, | |
| { | |
| "epoch": 1.3299057412167952, | |
| "grad_norm": 0.4431716799736023, | |
| "learning_rate": 2.7840616966580978e-05, | |
| "loss": 0.3315, | |
| "step": 7760 | |
| }, | |
| { | |
| "epoch": 1.3316195372750643, | |
| "grad_norm": 0.42048847675323486, | |
| "learning_rate": 2.7812053698943157e-05, | |
| "loss": 0.3426, | |
| "step": 7770 | |
| }, | |
| { | |
| "epoch": 1.3333333333333333, | |
| "grad_norm": 0.41159260272979736, | |
| "learning_rate": 2.7783490431305343e-05, | |
| "loss": 0.3411, | |
| "step": 7780 | |
| }, | |
| { | |
| "epoch": 1.3350471293916024, | |
| "grad_norm": 0.42812713980674744, | |
| "learning_rate": 2.7754927163667526e-05, | |
| "loss": 0.3561, | |
| "step": 7790 | |
| }, | |
| { | |
| "epoch": 1.3367609254498714, | |
| "grad_norm": 0.40041008591651917, | |
| "learning_rate": 2.7726363896029705e-05, | |
| "loss": 0.3348, | |
| "step": 7800 | |
| }, | |
| { | |
| "epoch": 1.3384747215081405, | |
| "grad_norm": 0.4039687514305115, | |
| "learning_rate": 2.769780062839189e-05, | |
| "loss": 0.3519, | |
| "step": 7810 | |
| }, | |
| { | |
| "epoch": 1.3401885175664097, | |
| "grad_norm": 0.38299936056137085, | |
| "learning_rate": 2.766923736075407e-05, | |
| "loss": 0.3485, | |
| "step": 7820 | |
| }, | |
| { | |
| "epoch": 1.3419023136246786, | |
| "grad_norm": 0.47175827622413635, | |
| "learning_rate": 2.7640674093116253e-05, | |
| "loss": 0.3628, | |
| "step": 7830 | |
| }, | |
| { | |
| "epoch": 1.3436161096829478, | |
| "grad_norm": 0.3974986970424652, | |
| "learning_rate": 2.761211082547844e-05, | |
| "loss": 0.3462, | |
| "step": 7840 | |
| }, | |
| { | |
| "epoch": 1.3453299057412167, | |
| "grad_norm": 0.4005205035209656, | |
| "learning_rate": 2.7583547557840618e-05, | |
| "loss": 0.3528, | |
| "step": 7850 | |
| }, | |
| { | |
| "epoch": 1.3470437017994858, | |
| "grad_norm": 0.46165525913238525, | |
| "learning_rate": 2.75549842902028e-05, | |
| "loss": 0.3423, | |
| "step": 7860 | |
| }, | |
| { | |
| "epoch": 1.348757497857755, | |
| "grad_norm": 0.4195481240749359, | |
| "learning_rate": 2.7526421022564987e-05, | |
| "loss": 0.346, | |
| "step": 7870 | |
| }, | |
| { | |
| "epoch": 1.350471293916024, | |
| "grad_norm": 0.41186022758483887, | |
| "learning_rate": 2.7497857754927163e-05, | |
| "loss": 0.3587, | |
| "step": 7880 | |
| }, | |
| { | |
| "epoch": 1.352185089974293, | |
| "grad_norm": 0.39691275358200073, | |
| "learning_rate": 2.746929448728935e-05, | |
| "loss": 0.3292, | |
| "step": 7890 | |
| }, | |
| { | |
| "epoch": 1.353898886032562, | |
| "grad_norm": 0.43522560596466064, | |
| "learning_rate": 2.7440731219651528e-05, | |
| "loss": 0.3455, | |
| "step": 7900 | |
| }, | |
| { | |
| "epoch": 1.3556126820908312, | |
| "grad_norm": 0.41686180233955383, | |
| "learning_rate": 2.741216795201371e-05, | |
| "loss": 0.3319, | |
| "step": 7910 | |
| }, | |
| { | |
| "epoch": 1.3573264781491003, | |
| "grad_norm": 0.4145779311656952, | |
| "learning_rate": 2.7383604684375897e-05, | |
| "loss": 0.3413, | |
| "step": 7920 | |
| }, | |
| { | |
| "epoch": 1.3590402742073693, | |
| "grad_norm": 0.4023870825767517, | |
| "learning_rate": 2.7355041416738076e-05, | |
| "loss": 0.3451, | |
| "step": 7930 | |
| }, | |
| { | |
| "epoch": 1.3607540702656384, | |
| "grad_norm": 0.3804696798324585, | |
| "learning_rate": 2.732647814910026e-05, | |
| "loss": 0.3454, | |
| "step": 7940 | |
| }, | |
| { | |
| "epoch": 1.3624678663239074, | |
| "grad_norm": 0.41007333993911743, | |
| "learning_rate": 2.7297914881462438e-05, | |
| "loss": 0.327, | |
| "step": 7950 | |
| }, | |
| { | |
| "epoch": 1.3641816623821765, | |
| "grad_norm": 0.37983033061027527, | |
| "learning_rate": 2.7269351613824624e-05, | |
| "loss": 0.3481, | |
| "step": 7960 | |
| }, | |
| { | |
| "epoch": 1.3658954584404457, | |
| "grad_norm": 0.4238635301589966, | |
| "learning_rate": 2.7240788346186806e-05, | |
| "loss": 0.3325, | |
| "step": 7970 | |
| }, | |
| { | |
| "epoch": 1.3676092544987146, | |
| "grad_norm": 0.40278929471969604, | |
| "learning_rate": 2.7212225078548986e-05, | |
| "loss": 0.3391, | |
| "step": 7980 | |
| }, | |
| { | |
| "epoch": 1.3693230505569838, | |
| "grad_norm": 0.42544665932655334, | |
| "learning_rate": 2.718366181091117e-05, | |
| "loss": 0.3436, | |
| "step": 7990 | |
| }, | |
| { | |
| "epoch": 1.3710368466152527, | |
| "grad_norm": 0.3899822235107422, | |
| "learning_rate": 2.715509854327335e-05, | |
| "loss": 0.3608, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 1.3727506426735219, | |
| "grad_norm": 0.40118059515953064, | |
| "learning_rate": 2.7126535275635533e-05, | |
| "loss": 0.3421, | |
| "step": 8010 | |
| }, | |
| { | |
| "epoch": 1.374464438731791, | |
| "grad_norm": 0.3830666244029999, | |
| "learning_rate": 2.709797200799772e-05, | |
| "loss": 0.3455, | |
| "step": 8020 | |
| }, | |
| { | |
| "epoch": 1.37617823479006, | |
| "grad_norm": 0.4546830356121063, | |
| "learning_rate": 2.70694087403599e-05, | |
| "loss": 0.3413, | |
| "step": 8030 | |
| }, | |
| { | |
| "epoch": 1.3778920308483291, | |
| "grad_norm": 0.4319319725036621, | |
| "learning_rate": 2.704084547272208e-05, | |
| "loss": 0.3434, | |
| "step": 8040 | |
| }, | |
| { | |
| "epoch": 1.379605826906598, | |
| "grad_norm": 0.4801386296749115, | |
| "learning_rate": 2.701228220508426e-05, | |
| "loss": 0.3475, | |
| "step": 8050 | |
| }, | |
| { | |
| "epoch": 1.3813196229648672, | |
| "grad_norm": 0.4308033287525177, | |
| "learning_rate": 2.6983718937446447e-05, | |
| "loss": 0.3524, | |
| "step": 8060 | |
| }, | |
| { | |
| "epoch": 1.3830334190231364, | |
| "grad_norm": 0.37144121527671814, | |
| "learning_rate": 2.695515566980863e-05, | |
| "loss": 0.3225, | |
| "step": 8070 | |
| }, | |
| { | |
| "epoch": 1.3847472150814053, | |
| "grad_norm": 0.43441927433013916, | |
| "learning_rate": 2.692659240217081e-05, | |
| "loss": 0.3448, | |
| "step": 8080 | |
| }, | |
| { | |
| "epoch": 1.3864610111396745, | |
| "grad_norm": 0.4089992046356201, | |
| "learning_rate": 2.689802913453299e-05, | |
| "loss": 0.3345, | |
| "step": 8090 | |
| }, | |
| { | |
| "epoch": 1.3881748071979434, | |
| "grad_norm": 0.387023001909256, | |
| "learning_rate": 2.686946586689517e-05, | |
| "loss": 0.3544, | |
| "step": 8100 | |
| }, | |
| { | |
| "epoch": 1.3898886032562126, | |
| "grad_norm": 0.4173690974712372, | |
| "learning_rate": 2.6840902599257356e-05, | |
| "loss": 0.3324, | |
| "step": 8110 | |
| }, | |
| { | |
| "epoch": 1.3916023993144817, | |
| "grad_norm": 0.3853393793106079, | |
| "learning_rate": 2.681233933161954e-05, | |
| "loss": 0.3487, | |
| "step": 8120 | |
| }, | |
| { | |
| "epoch": 1.3933161953727506, | |
| "grad_norm": 0.48058122396469116, | |
| "learning_rate": 2.6783776063981718e-05, | |
| "loss": 0.3375, | |
| "step": 8130 | |
| }, | |
| { | |
| "epoch": 1.3950299914310196, | |
| "grad_norm": 0.43440964818000793, | |
| "learning_rate": 2.6755212796343904e-05, | |
| "loss": 0.3497, | |
| "step": 8140 | |
| }, | |
| { | |
| "epoch": 1.3967437874892887, | |
| "grad_norm": 0.4103083312511444, | |
| "learning_rate": 2.6726649528706083e-05, | |
| "loss": 0.3348, | |
| "step": 8150 | |
| }, | |
| { | |
| "epoch": 1.398457583547558, | |
| "grad_norm": 0.5796851515769958, | |
| "learning_rate": 2.6698086261068266e-05, | |
| "loss": 0.3518, | |
| "step": 8160 | |
| }, | |
| { | |
| "epoch": 1.4001713796058268, | |
| "grad_norm": 0.4238094389438629, | |
| "learning_rate": 2.6669522993430452e-05, | |
| "loss": 0.3414, | |
| "step": 8170 | |
| }, | |
| { | |
| "epoch": 1.401885175664096, | |
| "grad_norm": 0.4287961721420288, | |
| "learning_rate": 2.664095972579263e-05, | |
| "loss": 0.3369, | |
| "step": 8180 | |
| }, | |
| { | |
| "epoch": 1.403598971722365, | |
| "grad_norm": 0.41233572363853455, | |
| "learning_rate": 2.6612396458154814e-05, | |
| "loss": 0.363, | |
| "step": 8190 | |
| }, | |
| { | |
| "epoch": 1.405312767780634, | |
| "grad_norm": 0.4112406373023987, | |
| "learning_rate": 2.6583833190517e-05, | |
| "loss": 0.3507, | |
| "step": 8200 | |
| }, | |
| { | |
| "epoch": 1.4070265638389032, | |
| "grad_norm": 0.3922879695892334, | |
| "learning_rate": 2.655526992287918e-05, | |
| "loss": 0.3444, | |
| "step": 8210 | |
| }, | |
| { | |
| "epoch": 1.4087403598971722, | |
| "grad_norm": 0.41223299503326416, | |
| "learning_rate": 2.6526706655241362e-05, | |
| "loss": 0.3561, | |
| "step": 8220 | |
| }, | |
| { | |
| "epoch": 1.4104541559554413, | |
| "grad_norm": 0.39642488956451416, | |
| "learning_rate": 2.649814338760354e-05, | |
| "loss": 0.3528, | |
| "step": 8230 | |
| }, | |
| { | |
| "epoch": 1.4121679520137103, | |
| "grad_norm": 0.43318673968315125, | |
| "learning_rate": 2.6469580119965727e-05, | |
| "loss": 0.3368, | |
| "step": 8240 | |
| }, | |
| { | |
| "epoch": 1.4138817480719794, | |
| "grad_norm": 0.4054166376590729, | |
| "learning_rate": 2.644101685232791e-05, | |
| "loss": 0.3388, | |
| "step": 8250 | |
| }, | |
| { | |
| "epoch": 1.4155955441302486, | |
| "grad_norm": 0.42911553382873535, | |
| "learning_rate": 2.641245358469009e-05, | |
| "loss": 0.3404, | |
| "step": 8260 | |
| }, | |
| { | |
| "epoch": 1.4173093401885175, | |
| "grad_norm": 0.4328351318836212, | |
| "learning_rate": 2.6383890317052275e-05, | |
| "loss": 0.3388, | |
| "step": 8270 | |
| }, | |
| { | |
| "epoch": 1.4190231362467867, | |
| "grad_norm": 0.37425297498703003, | |
| "learning_rate": 2.635532704941445e-05, | |
| "loss": 0.3482, | |
| "step": 8280 | |
| }, | |
| { | |
| "epoch": 1.4207369323050556, | |
| "grad_norm": 0.4178659915924072, | |
| "learning_rate": 2.6326763781776637e-05, | |
| "loss": 0.3375, | |
| "step": 8290 | |
| }, | |
| { | |
| "epoch": 1.4224507283633248, | |
| "grad_norm": 0.4292641580104828, | |
| "learning_rate": 2.629820051413882e-05, | |
| "loss": 0.3342, | |
| "step": 8300 | |
| }, | |
| { | |
| "epoch": 1.424164524421594, | |
| "grad_norm": 0.39173051714897156, | |
| "learning_rate": 2.6269637246501e-05, | |
| "loss": 0.3351, | |
| "step": 8310 | |
| }, | |
| { | |
| "epoch": 1.4258783204798628, | |
| "grad_norm": 0.39436325430870056, | |
| "learning_rate": 2.6241073978863185e-05, | |
| "loss": 0.3413, | |
| "step": 8320 | |
| }, | |
| { | |
| "epoch": 1.427592116538132, | |
| "grad_norm": 0.412300169467926, | |
| "learning_rate": 2.6212510711225364e-05, | |
| "loss": 0.3363, | |
| "step": 8330 | |
| }, | |
| { | |
| "epoch": 1.429305912596401, | |
| "grad_norm": 0.45384445786476135, | |
| "learning_rate": 2.6183947443587547e-05, | |
| "loss": 0.3558, | |
| "step": 8340 | |
| }, | |
| { | |
| "epoch": 1.43101970865467, | |
| "grad_norm": 0.3842832148075104, | |
| "learning_rate": 2.6155384175949733e-05, | |
| "loss": 0.339, | |
| "step": 8350 | |
| }, | |
| { | |
| "epoch": 1.4327335047129393, | |
| "grad_norm": 0.4295053482055664, | |
| "learning_rate": 2.6126820908311912e-05, | |
| "loss": 0.3447, | |
| "step": 8360 | |
| }, | |
| { | |
| "epoch": 1.4344473007712082, | |
| "grad_norm": 0.4409741163253784, | |
| "learning_rate": 2.6098257640674094e-05, | |
| "loss": 0.3509, | |
| "step": 8370 | |
| }, | |
| { | |
| "epoch": 1.4361610968294773, | |
| "grad_norm": 0.41879725456237793, | |
| "learning_rate": 2.6069694373036274e-05, | |
| "loss": 0.3268, | |
| "step": 8380 | |
| }, | |
| { | |
| "epoch": 1.4378748928877463, | |
| "grad_norm": 0.4443173110485077, | |
| "learning_rate": 2.604113110539846e-05, | |
| "loss": 0.333, | |
| "step": 8390 | |
| }, | |
| { | |
| "epoch": 1.4395886889460154, | |
| "grad_norm": 0.5012508630752563, | |
| "learning_rate": 2.6012567837760642e-05, | |
| "loss": 0.3554, | |
| "step": 8400 | |
| }, | |
| { | |
| "epoch": 1.4413024850042846, | |
| "grad_norm": 0.400483101606369, | |
| "learning_rate": 2.598400457012282e-05, | |
| "loss": 0.3405, | |
| "step": 8410 | |
| }, | |
| { | |
| "epoch": 1.4430162810625535, | |
| "grad_norm": 0.4024072587490082, | |
| "learning_rate": 2.5955441302485007e-05, | |
| "loss": 0.343, | |
| "step": 8420 | |
| }, | |
| { | |
| "epoch": 1.4447300771208227, | |
| "grad_norm": 0.3965204358100891, | |
| "learning_rate": 2.5926878034847187e-05, | |
| "loss": 0.3459, | |
| "step": 8430 | |
| }, | |
| { | |
| "epoch": 1.4464438731790916, | |
| "grad_norm": 0.3904626965522766, | |
| "learning_rate": 2.589831476720937e-05, | |
| "loss": 0.3362, | |
| "step": 8440 | |
| }, | |
| { | |
| "epoch": 1.4481576692373608, | |
| "grad_norm": 0.45727992057800293, | |
| "learning_rate": 2.5869751499571555e-05, | |
| "loss": 0.3298, | |
| "step": 8450 | |
| }, | |
| { | |
| "epoch": 1.44987146529563, | |
| "grad_norm": 0.40625882148742676, | |
| "learning_rate": 2.584118823193373e-05, | |
| "loss": 0.3304, | |
| "step": 8460 | |
| }, | |
| { | |
| "epoch": 1.4515852613538989, | |
| "grad_norm": 0.404329776763916, | |
| "learning_rate": 2.5812624964295917e-05, | |
| "loss": 0.3203, | |
| "step": 8470 | |
| }, | |
| { | |
| "epoch": 1.453299057412168, | |
| "grad_norm": 0.377429723739624, | |
| "learning_rate": 2.5784061696658103e-05, | |
| "loss": 0.3377, | |
| "step": 8480 | |
| }, | |
| { | |
| "epoch": 1.455012853470437, | |
| "grad_norm": 0.41216394305229187, | |
| "learning_rate": 2.575549842902028e-05, | |
| "loss": 0.3363, | |
| "step": 8490 | |
| }, | |
| { | |
| "epoch": 1.4567266495287061, | |
| "grad_norm": 0.4194747805595398, | |
| "learning_rate": 2.5726935161382465e-05, | |
| "loss": 0.3286, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 1.4584404455869753, | |
| "grad_norm": 0.4310661554336548, | |
| "learning_rate": 2.5698371893744644e-05, | |
| "loss": 0.3413, | |
| "step": 8510 | |
| }, | |
| { | |
| "epoch": 1.4601542416452442, | |
| "grad_norm": 0.40952980518341064, | |
| "learning_rate": 2.5669808626106827e-05, | |
| "loss": 0.3529, | |
| "step": 8520 | |
| }, | |
| { | |
| "epoch": 1.4618680377035131, | |
| "grad_norm": 0.3957708477973938, | |
| "learning_rate": 2.5641245358469013e-05, | |
| "loss": 0.3491, | |
| "step": 8530 | |
| }, | |
| { | |
| "epoch": 1.4635818337617823, | |
| "grad_norm": 0.3769155740737915, | |
| "learning_rate": 2.5612682090831192e-05, | |
| "loss": 0.35, | |
| "step": 8540 | |
| }, | |
| { | |
| "epoch": 1.4652956298200515, | |
| "grad_norm": 0.3695950210094452, | |
| "learning_rate": 2.5584118823193375e-05, | |
| "loss": 0.3188, | |
| "step": 8550 | |
| }, | |
| { | |
| "epoch": 1.4670094258783206, | |
| "grad_norm": 0.4204133152961731, | |
| "learning_rate": 2.5555555555555554e-05, | |
| "loss": 0.3462, | |
| "step": 8560 | |
| }, | |
| { | |
| "epoch": 1.4687232219365896, | |
| "grad_norm": 0.4069593548774719, | |
| "learning_rate": 2.552699228791774e-05, | |
| "loss": 0.3431, | |
| "step": 8570 | |
| }, | |
| { | |
| "epoch": 1.4704370179948585, | |
| "grad_norm": 0.44790759682655334, | |
| "learning_rate": 2.5498429020279923e-05, | |
| "loss": 0.3304, | |
| "step": 8580 | |
| }, | |
| { | |
| "epoch": 1.4721508140531276, | |
| "grad_norm": 0.42790570855140686, | |
| "learning_rate": 2.5469865752642102e-05, | |
| "loss": 0.3479, | |
| "step": 8590 | |
| }, | |
| { | |
| "epoch": 1.4738646101113968, | |
| "grad_norm": 0.40536096692085266, | |
| "learning_rate": 2.5441302485004288e-05, | |
| "loss": 0.3492, | |
| "step": 8600 | |
| }, | |
| { | |
| "epoch": 1.4755784061696657, | |
| "grad_norm": 0.41925767064094543, | |
| "learning_rate": 2.5412739217366467e-05, | |
| "loss": 0.3494, | |
| "step": 8610 | |
| }, | |
| { | |
| "epoch": 1.477292202227935, | |
| "grad_norm": 0.44192054867744446, | |
| "learning_rate": 2.538417594972865e-05, | |
| "loss": 0.342, | |
| "step": 8620 | |
| }, | |
| { | |
| "epoch": 1.4790059982862038, | |
| "grad_norm": 0.38461121916770935, | |
| "learning_rate": 2.5355612682090836e-05, | |
| "loss": 0.3346, | |
| "step": 8630 | |
| }, | |
| { | |
| "epoch": 1.480719794344473, | |
| "grad_norm": 0.38452473282814026, | |
| "learning_rate": 2.5327049414453015e-05, | |
| "loss": 0.3392, | |
| "step": 8640 | |
| }, | |
| { | |
| "epoch": 1.4824335904027421, | |
| "grad_norm": 0.42153528332710266, | |
| "learning_rate": 2.5298486146815198e-05, | |
| "loss": 0.3325, | |
| "step": 8650 | |
| }, | |
| { | |
| "epoch": 1.484147386461011, | |
| "grad_norm": 0.4100632965564728, | |
| "learning_rate": 2.5269922879177377e-05, | |
| "loss": 0.3456, | |
| "step": 8660 | |
| }, | |
| { | |
| "epoch": 1.4858611825192802, | |
| "grad_norm": 0.44015470147132874, | |
| "learning_rate": 2.524135961153956e-05, | |
| "loss": 0.3405, | |
| "step": 8670 | |
| }, | |
| { | |
| "epoch": 1.4875749785775492, | |
| "grad_norm": 0.3868488669395447, | |
| "learning_rate": 2.5212796343901746e-05, | |
| "loss": 0.3526, | |
| "step": 8680 | |
| }, | |
| { | |
| "epoch": 1.4892887746358183, | |
| "grad_norm": 0.4163185656070709, | |
| "learning_rate": 2.5184233076263925e-05, | |
| "loss": 0.3439, | |
| "step": 8690 | |
| }, | |
| { | |
| "epoch": 1.4910025706940875, | |
| "grad_norm": 0.3832201361656189, | |
| "learning_rate": 2.5155669808626107e-05, | |
| "loss": 0.3302, | |
| "step": 8700 | |
| }, | |
| { | |
| "epoch": 1.4927163667523564, | |
| "grad_norm": 0.38806959986686707, | |
| "learning_rate": 2.5127106540988287e-05, | |
| "loss": 0.3383, | |
| "step": 8710 | |
| }, | |
| { | |
| "epoch": 1.4944301628106256, | |
| "grad_norm": 0.3635774552822113, | |
| "learning_rate": 2.5098543273350473e-05, | |
| "loss": 0.3396, | |
| "step": 8720 | |
| }, | |
| { | |
| "epoch": 1.4961439588688945, | |
| "grad_norm": 0.40076860785484314, | |
| "learning_rate": 2.5069980005712655e-05, | |
| "loss": 0.3407, | |
| "step": 8730 | |
| }, | |
| { | |
| "epoch": 1.4978577549271637, | |
| "grad_norm": 0.419278621673584, | |
| "learning_rate": 2.5041416738074835e-05, | |
| "loss": 0.3395, | |
| "step": 8740 | |
| }, | |
| { | |
| "epoch": 1.4995715509854328, | |
| "grad_norm": 0.4140276610851288, | |
| "learning_rate": 2.501285347043702e-05, | |
| "loss": 0.3302, | |
| "step": 8750 | |
| }, | |
| { | |
| "epoch": 1.5012853470437018, | |
| "grad_norm": 0.47987890243530273, | |
| "learning_rate": 2.4984290202799203e-05, | |
| "loss": 0.342, | |
| "step": 8760 | |
| }, | |
| { | |
| "epoch": 1.502999143101971, | |
| "grad_norm": 0.4513384997844696, | |
| "learning_rate": 2.4955726935161382e-05, | |
| "loss": 0.3374, | |
| "step": 8770 | |
| }, | |
| { | |
| "epoch": 1.5047129391602398, | |
| "grad_norm": 0.44189688563346863, | |
| "learning_rate": 2.4927163667523565e-05, | |
| "loss": 0.3433, | |
| "step": 8780 | |
| }, | |
| { | |
| "epoch": 1.506426735218509, | |
| "grad_norm": 0.3951323330402374, | |
| "learning_rate": 2.4898600399885748e-05, | |
| "loss": 0.3466, | |
| "step": 8790 | |
| }, | |
| { | |
| "epoch": 1.5081405312767782, | |
| "grad_norm": 0.4419739842414856, | |
| "learning_rate": 2.487003713224793e-05, | |
| "loss": 0.3393, | |
| "step": 8800 | |
| }, | |
| { | |
| "epoch": 1.509854327335047, | |
| "grad_norm": 0.3944937586784363, | |
| "learning_rate": 2.4841473864610113e-05, | |
| "loss": 0.3378, | |
| "step": 8810 | |
| }, | |
| { | |
| "epoch": 1.511568123393316, | |
| "grad_norm": 0.4794803857803345, | |
| "learning_rate": 2.4812910596972296e-05, | |
| "loss": 0.3355, | |
| "step": 8820 | |
| }, | |
| { | |
| "epoch": 1.5132819194515852, | |
| "grad_norm": 0.44282740354537964, | |
| "learning_rate": 2.4784347329334475e-05, | |
| "loss": 0.3376, | |
| "step": 8830 | |
| }, | |
| { | |
| "epoch": 1.5149957155098543, | |
| "grad_norm": 0.4469907581806183, | |
| "learning_rate": 2.475578406169666e-05, | |
| "loss": 0.353, | |
| "step": 8840 | |
| }, | |
| { | |
| "epoch": 1.5167095115681235, | |
| "grad_norm": 0.43816834688186646, | |
| "learning_rate": 2.4727220794058843e-05, | |
| "loss": 0.3479, | |
| "step": 8850 | |
| }, | |
| { | |
| "epoch": 1.5184233076263924, | |
| "grad_norm": 0.36124739050865173, | |
| "learning_rate": 2.4698657526421023e-05, | |
| "loss": 0.3472, | |
| "step": 8860 | |
| }, | |
| { | |
| "epoch": 1.5201371036846614, | |
| "grad_norm": 0.4093656539916992, | |
| "learning_rate": 2.4670094258783205e-05, | |
| "loss": 0.3358, | |
| "step": 8870 | |
| }, | |
| { | |
| "epoch": 1.5218508997429305, | |
| "grad_norm": 0.4121876358985901, | |
| "learning_rate": 2.4641530991145388e-05, | |
| "loss": 0.3348, | |
| "step": 8880 | |
| }, | |
| { | |
| "epoch": 1.5235646958011997, | |
| "grad_norm": 0.38930338621139526, | |
| "learning_rate": 2.461296772350757e-05, | |
| "loss": 0.358, | |
| "step": 8890 | |
| }, | |
| { | |
| "epoch": 1.5252784918594688, | |
| "grad_norm": 0.3835488259792328, | |
| "learning_rate": 2.4584404455869753e-05, | |
| "loss": 0.3419, | |
| "step": 8900 | |
| }, | |
| { | |
| "epoch": 1.5269922879177378, | |
| "grad_norm": 0.387956827878952, | |
| "learning_rate": 2.4555841188231936e-05, | |
| "loss": 0.3288, | |
| "step": 8910 | |
| }, | |
| { | |
| "epoch": 1.5287060839760067, | |
| "grad_norm": 0.39550501108169556, | |
| "learning_rate": 2.4527277920594115e-05, | |
| "loss": 0.348, | |
| "step": 8920 | |
| }, | |
| { | |
| "epoch": 1.5304198800342759, | |
| "grad_norm": 0.44232356548309326, | |
| "learning_rate": 2.44987146529563e-05, | |
| "loss": 0.3407, | |
| "step": 8930 | |
| }, | |
| { | |
| "epoch": 1.532133676092545, | |
| "grad_norm": 0.41980692744255066, | |
| "learning_rate": 2.4470151385318484e-05, | |
| "loss": 0.3244, | |
| "step": 8940 | |
| }, | |
| { | |
| "epoch": 1.5338474721508142, | |
| "grad_norm": 0.40649715065956116, | |
| "learning_rate": 2.4441588117680663e-05, | |
| "loss": 0.3367, | |
| "step": 8950 | |
| }, | |
| { | |
| "epoch": 1.5355612682090831, | |
| "grad_norm": 0.41725024580955505, | |
| "learning_rate": 2.4413024850042845e-05, | |
| "loss": 0.3521, | |
| "step": 8960 | |
| }, | |
| { | |
| "epoch": 1.537275064267352, | |
| "grad_norm": 0.45514604449272156, | |
| "learning_rate": 2.4384461582405028e-05, | |
| "loss": 0.3363, | |
| "step": 8970 | |
| }, | |
| { | |
| "epoch": 1.5389888603256212, | |
| "grad_norm": 0.40341225266456604, | |
| "learning_rate": 2.435589831476721e-05, | |
| "loss": 0.3551, | |
| "step": 8980 | |
| }, | |
| { | |
| "epoch": 1.5407026563838904, | |
| "grad_norm": 0.45389923453330994, | |
| "learning_rate": 2.4327335047129393e-05, | |
| "loss": 0.3458, | |
| "step": 8990 | |
| }, | |
| { | |
| "epoch": 1.5424164524421595, | |
| "grad_norm": 0.4399547576904297, | |
| "learning_rate": 2.4298771779491576e-05, | |
| "loss": 0.3328, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 1.5441302485004285, | |
| "grad_norm": 0.39244237542152405, | |
| "learning_rate": 2.4270208511853755e-05, | |
| "loss": 0.3318, | |
| "step": 9010 | |
| }, | |
| { | |
| "epoch": 1.5458440445586974, | |
| "grad_norm": 0.46719881892204285, | |
| "learning_rate": 2.4241645244215938e-05, | |
| "loss": 0.325, | |
| "step": 9020 | |
| }, | |
| { | |
| "epoch": 1.5475578406169666, | |
| "grad_norm": 0.4076116681098938, | |
| "learning_rate": 2.4213081976578124e-05, | |
| "loss": 0.3152, | |
| "step": 9030 | |
| }, | |
| { | |
| "epoch": 1.5492716366752357, | |
| "grad_norm": 0.4262393116950989, | |
| "learning_rate": 2.4184518708940303e-05, | |
| "loss": 0.3273, | |
| "step": 9040 | |
| }, | |
| { | |
| "epoch": 1.5509854327335049, | |
| "grad_norm": 0.415228933095932, | |
| "learning_rate": 2.4155955441302486e-05, | |
| "loss": 0.3377, | |
| "step": 9050 | |
| }, | |
| { | |
| "epoch": 1.5526992287917738, | |
| "grad_norm": 0.43530064821243286, | |
| "learning_rate": 2.412739217366467e-05, | |
| "loss": 0.3534, | |
| "step": 9060 | |
| }, | |
| { | |
| "epoch": 1.5544130248500427, | |
| "grad_norm": 0.4395502507686615, | |
| "learning_rate": 2.409882890602685e-05, | |
| "loss": 0.3339, | |
| "step": 9070 | |
| }, | |
| { | |
| "epoch": 1.556126820908312, | |
| "grad_norm": 0.41547203063964844, | |
| "learning_rate": 2.4070265638389034e-05, | |
| "loss": 0.3324, | |
| "step": 9080 | |
| }, | |
| { | |
| "epoch": 1.557840616966581, | |
| "grad_norm": 0.39110681414604187, | |
| "learning_rate": 2.4041702370751216e-05, | |
| "loss": 0.3448, | |
| "step": 9090 | |
| }, | |
| { | |
| "epoch": 1.5595544130248502, | |
| "grad_norm": 0.46008628606796265, | |
| "learning_rate": 2.4013139103113395e-05, | |
| "loss": 0.3481, | |
| "step": 9100 | |
| }, | |
| { | |
| "epoch": 1.5612682090831191, | |
| "grad_norm": 0.41481733322143555, | |
| "learning_rate": 2.3984575835475578e-05, | |
| "loss": 0.3323, | |
| "step": 9110 | |
| }, | |
| { | |
| "epoch": 1.562982005141388, | |
| "grad_norm": 0.40251603722572327, | |
| "learning_rate": 2.3956012567837764e-05, | |
| "loss": 0.3313, | |
| "step": 9120 | |
| }, | |
| { | |
| "epoch": 1.5646958011996572, | |
| "grad_norm": 0.37586021423339844, | |
| "learning_rate": 2.3927449300199943e-05, | |
| "loss": 0.3328, | |
| "step": 9130 | |
| }, | |
| { | |
| "epoch": 1.5664095972579264, | |
| "grad_norm": 0.39882418513298035, | |
| "learning_rate": 2.3898886032562126e-05, | |
| "loss": 0.3431, | |
| "step": 9140 | |
| }, | |
| { | |
| "epoch": 1.5681233933161953, | |
| "grad_norm": 0.393945574760437, | |
| "learning_rate": 2.387032276492431e-05, | |
| "loss": 0.3442, | |
| "step": 9150 | |
| }, | |
| { | |
| "epoch": 1.5698371893744645, | |
| "grad_norm": 0.3542807698249817, | |
| "learning_rate": 2.3841759497286488e-05, | |
| "loss": 0.3249, | |
| "step": 9160 | |
| }, | |
| { | |
| "epoch": 1.5715509854327334, | |
| "grad_norm": 0.4241867959499359, | |
| "learning_rate": 2.3813196229648674e-05, | |
| "loss": 0.3391, | |
| "step": 9170 | |
| }, | |
| { | |
| "epoch": 1.5732647814910026, | |
| "grad_norm": 0.44771257042884827, | |
| "learning_rate": 2.3784632962010856e-05, | |
| "loss": 0.3346, | |
| "step": 9180 | |
| }, | |
| { | |
| "epoch": 1.5749785775492717, | |
| "grad_norm": 0.39804545044898987, | |
| "learning_rate": 2.3756069694373036e-05, | |
| "loss": 0.3251, | |
| "step": 9190 | |
| }, | |
| { | |
| "epoch": 1.5766923736075407, | |
| "grad_norm": 0.47698989510536194, | |
| "learning_rate": 2.3727506426735218e-05, | |
| "loss": 0.3327, | |
| "step": 9200 | |
| }, | |
| { | |
| "epoch": 1.5784061696658098, | |
| "grad_norm": 0.43843311071395874, | |
| "learning_rate": 2.3698943159097404e-05, | |
| "loss": 0.3272, | |
| "step": 9210 | |
| }, | |
| { | |
| "epoch": 1.5801199657240788, | |
| "grad_norm": 0.4098581373691559, | |
| "learning_rate": 2.3670379891459584e-05, | |
| "loss": 0.3346, | |
| "step": 9220 | |
| }, | |
| { | |
| "epoch": 1.581833761782348, | |
| "grad_norm": 0.43909481167793274, | |
| "learning_rate": 2.3641816623821766e-05, | |
| "loss": 0.3249, | |
| "step": 9230 | |
| }, | |
| { | |
| "epoch": 1.583547557840617, | |
| "grad_norm": 0.39052918553352356, | |
| "learning_rate": 2.361325335618395e-05, | |
| "loss": 0.3287, | |
| "step": 9240 | |
| }, | |
| { | |
| "epoch": 1.585261353898886, | |
| "grad_norm": 0.4083861708641052, | |
| "learning_rate": 2.3584690088546128e-05, | |
| "loss": 0.3329, | |
| "step": 9250 | |
| }, | |
| { | |
| "epoch": 1.586975149957155, | |
| "grad_norm": 0.41227346658706665, | |
| "learning_rate": 2.3556126820908314e-05, | |
| "loss": 0.3269, | |
| "step": 9260 | |
| }, | |
| { | |
| "epoch": 1.588688946015424, | |
| "grad_norm": 0.4148152768611908, | |
| "learning_rate": 2.3527563553270497e-05, | |
| "loss": 0.322, | |
| "step": 9270 | |
| }, | |
| { | |
| "epoch": 1.5904027420736933, | |
| "grad_norm": 0.41056588292121887, | |
| "learning_rate": 2.3499000285632676e-05, | |
| "loss": 0.3441, | |
| "step": 9280 | |
| }, | |
| { | |
| "epoch": 1.5921165381319624, | |
| "grad_norm": 0.3809565603733063, | |
| "learning_rate": 2.347043701799486e-05, | |
| "loss": 0.3447, | |
| "step": 9290 | |
| }, | |
| { | |
| "epoch": 1.5938303341902313, | |
| "grad_norm": 0.3820127844810486, | |
| "learning_rate": 2.344187375035704e-05, | |
| "loss": 0.3497, | |
| "step": 9300 | |
| }, | |
| { | |
| "epoch": 1.5955441302485003, | |
| "grad_norm": 0.40932685136795044, | |
| "learning_rate": 2.3413310482719224e-05, | |
| "loss": 0.3725, | |
| "step": 9310 | |
| }, | |
| { | |
| "epoch": 1.5972579263067694, | |
| "grad_norm": 0.40701672434806824, | |
| "learning_rate": 2.3384747215081406e-05, | |
| "loss": 0.3167, | |
| "step": 9320 | |
| }, | |
| { | |
| "epoch": 1.5989717223650386, | |
| "grad_norm": 0.43671759963035583, | |
| "learning_rate": 2.335618394744359e-05, | |
| "loss": 0.3232, | |
| "step": 9330 | |
| }, | |
| { | |
| "epoch": 1.6006855184233078, | |
| "grad_norm": 0.4282389283180237, | |
| "learning_rate": 2.332762067980577e-05, | |
| "loss": 0.3399, | |
| "step": 9340 | |
| }, | |
| { | |
| "epoch": 1.6023993144815767, | |
| "grad_norm": 0.4553786814212799, | |
| "learning_rate": 2.3299057412167954e-05, | |
| "loss": 0.3387, | |
| "step": 9350 | |
| }, | |
| { | |
| "epoch": 1.6041131105398456, | |
| "grad_norm": 0.40568017959594727, | |
| "learning_rate": 2.3270494144530137e-05, | |
| "loss": 0.345, | |
| "step": 9360 | |
| }, | |
| { | |
| "epoch": 1.6058269065981148, | |
| "grad_norm": 0.4190521538257599, | |
| "learning_rate": 2.3241930876892316e-05, | |
| "loss": 0.3347, | |
| "step": 9370 | |
| }, | |
| { | |
| "epoch": 1.607540702656384, | |
| "grad_norm": 0.4039881229400635, | |
| "learning_rate": 2.32133676092545e-05, | |
| "loss": 0.3252, | |
| "step": 9380 | |
| }, | |
| { | |
| "epoch": 1.609254498714653, | |
| "grad_norm": 0.4178449511528015, | |
| "learning_rate": 2.318480434161668e-05, | |
| "loss": 0.333, | |
| "step": 9390 | |
| }, | |
| { | |
| "epoch": 1.610968294772922, | |
| "grad_norm": 0.4503561854362488, | |
| "learning_rate": 2.3156241073978864e-05, | |
| "loss": 0.3282, | |
| "step": 9400 | |
| }, | |
| { | |
| "epoch": 1.612682090831191, | |
| "grad_norm": 0.43098393082618713, | |
| "learning_rate": 2.3127677806341047e-05, | |
| "loss": 0.337, | |
| "step": 9410 | |
| }, | |
| { | |
| "epoch": 1.6143958868894601, | |
| "grad_norm": 0.3916527330875397, | |
| "learning_rate": 2.309911453870323e-05, | |
| "loss": 0.331, | |
| "step": 9420 | |
| }, | |
| { | |
| "epoch": 1.6161096829477293, | |
| "grad_norm": 0.43365949392318726, | |
| "learning_rate": 2.3070551271065412e-05, | |
| "loss": 0.3325, | |
| "step": 9430 | |
| }, | |
| { | |
| "epoch": 1.6178234790059984, | |
| "grad_norm": 0.3826988935470581, | |
| "learning_rate": 2.304198800342759e-05, | |
| "loss": 0.3378, | |
| "step": 9440 | |
| }, | |
| { | |
| "epoch": 1.6195372750642674, | |
| "grad_norm": 0.37719157338142395, | |
| "learning_rate": 2.3013424735789777e-05, | |
| "loss": 0.3308, | |
| "step": 9450 | |
| }, | |
| { | |
| "epoch": 1.6212510711225363, | |
| "grad_norm": 0.39002153277397156, | |
| "learning_rate": 2.298486146815196e-05, | |
| "loss": 0.3458, | |
| "step": 9460 | |
| }, | |
| { | |
| "epoch": 1.6229648671808055, | |
| "grad_norm": 0.4165000021457672, | |
| "learning_rate": 2.295629820051414e-05, | |
| "loss": 0.3293, | |
| "step": 9470 | |
| }, | |
| { | |
| "epoch": 1.6246786632390746, | |
| "grad_norm": 0.41711005568504333, | |
| "learning_rate": 2.292773493287632e-05, | |
| "loss": 0.3422, | |
| "step": 9480 | |
| }, | |
| { | |
| "epoch": 1.6263924592973438, | |
| "grad_norm": 0.3922407925128937, | |
| "learning_rate": 2.2899171665238504e-05, | |
| "loss": 0.3314, | |
| "step": 9490 | |
| }, | |
| { | |
| "epoch": 1.6281062553556127, | |
| "grad_norm": 0.40335458517074585, | |
| "learning_rate": 2.2870608397600687e-05, | |
| "loss": 0.3454, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 1.6298200514138816, | |
| "grad_norm": 0.40134212374687195, | |
| "learning_rate": 2.284204512996287e-05, | |
| "loss": 0.3302, | |
| "step": 9510 | |
| }, | |
| { | |
| "epoch": 1.6315338474721508, | |
| "grad_norm": 0.4334363639354706, | |
| "learning_rate": 2.2813481862325052e-05, | |
| "loss": 0.3463, | |
| "step": 9520 | |
| }, | |
| { | |
| "epoch": 1.63324764353042, | |
| "grad_norm": 0.4288492798805237, | |
| "learning_rate": 2.278491859468723e-05, | |
| "loss": 0.3492, | |
| "step": 9530 | |
| }, | |
| { | |
| "epoch": 1.634961439588689, | |
| "grad_norm": 0.41050291061401367, | |
| "learning_rate": 2.2756355327049417e-05, | |
| "loss": 0.3277, | |
| "step": 9540 | |
| }, | |
| { | |
| "epoch": 1.636675235646958, | |
| "grad_norm": 0.4153129458427429, | |
| "learning_rate": 2.27277920594116e-05, | |
| "loss": 0.3383, | |
| "step": 9550 | |
| }, | |
| { | |
| "epoch": 1.638389031705227, | |
| "grad_norm": 0.4749390482902527, | |
| "learning_rate": 2.269922879177378e-05, | |
| "loss": 0.3357, | |
| "step": 9560 | |
| }, | |
| { | |
| "epoch": 1.6401028277634961, | |
| "grad_norm": 0.3886820077896118, | |
| "learning_rate": 2.2670665524135962e-05, | |
| "loss": 0.3501, | |
| "step": 9570 | |
| }, | |
| { | |
| "epoch": 1.6418166238217653, | |
| "grad_norm": 0.42018669843673706, | |
| "learning_rate": 2.2642102256498144e-05, | |
| "loss": 0.3492, | |
| "step": 9580 | |
| }, | |
| { | |
| "epoch": 1.6435304198800342, | |
| "grad_norm": 0.3968152403831482, | |
| "learning_rate": 2.2613538988860327e-05, | |
| "loss": 0.3411, | |
| "step": 9590 | |
| }, | |
| { | |
| "epoch": 1.6452442159383034, | |
| "grad_norm": 0.4629608392715454, | |
| "learning_rate": 2.258497572122251e-05, | |
| "loss": 0.3495, | |
| "step": 9600 | |
| }, | |
| { | |
| "epoch": 1.6469580119965723, | |
| "grad_norm": 0.43643638491630554, | |
| "learning_rate": 2.2556412453584692e-05, | |
| "loss": 0.3406, | |
| "step": 9610 | |
| }, | |
| { | |
| "epoch": 1.6486718080548415, | |
| "grad_norm": 0.40299829840660095, | |
| "learning_rate": 2.252784918594687e-05, | |
| "loss": 0.3194, | |
| "step": 9620 | |
| }, | |
| { | |
| "epoch": 1.6503856041131106, | |
| "grad_norm": 0.39634135365486145, | |
| "learning_rate": 2.2499285918309054e-05, | |
| "loss": 0.3648, | |
| "step": 9630 | |
| }, | |
| { | |
| "epoch": 1.6520994001713796, | |
| "grad_norm": 0.37745097279548645, | |
| "learning_rate": 2.247072265067124e-05, | |
| "loss": 0.3309, | |
| "step": 9640 | |
| }, | |
| { | |
| "epoch": 1.6538131962296485, | |
| "grad_norm": 0.3941361606121063, | |
| "learning_rate": 2.244215938303342e-05, | |
| "loss": 0.3565, | |
| "step": 9650 | |
| }, | |
| { | |
| "epoch": 1.6555269922879177, | |
| "grad_norm": 0.3889491856098175, | |
| "learning_rate": 2.2413596115395602e-05, | |
| "loss": 0.3213, | |
| "step": 9660 | |
| }, | |
| { | |
| "epoch": 1.6572407883461868, | |
| "grad_norm": 0.40471869707107544, | |
| "learning_rate": 2.2385032847757785e-05, | |
| "loss": 0.3422, | |
| "step": 9670 | |
| }, | |
| { | |
| "epoch": 1.658954584404456, | |
| "grad_norm": 0.4090235233306885, | |
| "learning_rate": 2.2356469580119967e-05, | |
| "loss": 0.3612, | |
| "step": 9680 | |
| }, | |
| { | |
| "epoch": 1.660668380462725, | |
| "grad_norm": 0.42937588691711426, | |
| "learning_rate": 2.232790631248215e-05, | |
| "loss": 0.3259, | |
| "step": 9690 | |
| }, | |
| { | |
| "epoch": 1.6623821765209938, | |
| "grad_norm": 0.4077993631362915, | |
| "learning_rate": 2.2299343044844333e-05, | |
| "loss": 0.3343, | |
| "step": 9700 | |
| }, | |
| { | |
| "epoch": 1.664095972579263, | |
| "grad_norm": 0.4152628183364868, | |
| "learning_rate": 2.2270779777206512e-05, | |
| "loss": 0.3338, | |
| "step": 9710 | |
| }, | |
| { | |
| "epoch": 1.6658097686375322, | |
| "grad_norm": 0.4114777147769928, | |
| "learning_rate": 2.2242216509568694e-05, | |
| "loss": 0.3418, | |
| "step": 9720 | |
| }, | |
| { | |
| "epoch": 1.6675235646958013, | |
| "grad_norm": 0.46183767914772034, | |
| "learning_rate": 2.221365324193088e-05, | |
| "loss": 0.3493, | |
| "step": 9730 | |
| }, | |
| { | |
| "epoch": 1.6692373607540703, | |
| "grad_norm": 0.4328729808330536, | |
| "learning_rate": 2.218508997429306e-05, | |
| "loss": 0.3473, | |
| "step": 9740 | |
| }, | |
| { | |
| "epoch": 1.6709511568123392, | |
| "grad_norm": 0.42931821942329407, | |
| "learning_rate": 2.2156526706655242e-05, | |
| "loss": 0.3313, | |
| "step": 9750 | |
| }, | |
| { | |
| "epoch": 1.6726649528706083, | |
| "grad_norm": 0.4109592139720917, | |
| "learning_rate": 2.2127963439017425e-05, | |
| "loss": 0.3453, | |
| "step": 9760 | |
| }, | |
| { | |
| "epoch": 1.6743787489288775, | |
| "grad_norm": 0.37919366359710693, | |
| "learning_rate": 2.2099400171379604e-05, | |
| "loss": 0.3242, | |
| "step": 9770 | |
| }, | |
| { | |
| "epoch": 1.6760925449871467, | |
| "grad_norm": 0.4012930989265442, | |
| "learning_rate": 2.207083690374179e-05, | |
| "loss": 0.3469, | |
| "step": 9780 | |
| }, | |
| { | |
| "epoch": 1.6778063410454156, | |
| "grad_norm": 0.3762998878955841, | |
| "learning_rate": 2.2042273636103973e-05, | |
| "loss": 0.3517, | |
| "step": 9790 | |
| }, | |
| { | |
| "epoch": 1.6795201371036845, | |
| "grad_norm": 0.44242531061172485, | |
| "learning_rate": 2.2013710368466152e-05, | |
| "loss": 0.3469, | |
| "step": 9800 | |
| }, | |
| { | |
| "epoch": 1.6812339331619537, | |
| "grad_norm": 0.38373252749443054, | |
| "learning_rate": 2.1985147100828335e-05, | |
| "loss": 0.3306, | |
| "step": 9810 | |
| }, | |
| { | |
| "epoch": 1.6829477292202228, | |
| "grad_norm": 0.399817556142807, | |
| "learning_rate": 2.195658383319052e-05, | |
| "loss": 0.3196, | |
| "step": 9820 | |
| }, | |
| { | |
| "epoch": 1.684661525278492, | |
| "grad_norm": 0.4670737385749817, | |
| "learning_rate": 2.19280205655527e-05, | |
| "loss": 0.339, | |
| "step": 9830 | |
| }, | |
| { | |
| "epoch": 1.686375321336761, | |
| "grad_norm": 0.4311138391494751, | |
| "learning_rate": 2.1899457297914883e-05, | |
| "loss": 0.3213, | |
| "step": 9840 | |
| }, | |
| { | |
| "epoch": 1.6880891173950299, | |
| "grad_norm": 0.42144256830215454, | |
| "learning_rate": 2.1870894030277065e-05, | |
| "loss": 0.3275, | |
| "step": 9850 | |
| }, | |
| { | |
| "epoch": 1.689802913453299, | |
| "grad_norm": 0.4103786051273346, | |
| "learning_rate": 2.1842330762639244e-05, | |
| "loss": 0.3373, | |
| "step": 9860 | |
| }, | |
| { | |
| "epoch": 1.6915167095115682, | |
| "grad_norm": 0.41490599513053894, | |
| "learning_rate": 2.181376749500143e-05, | |
| "loss": 0.3235, | |
| "step": 9870 | |
| }, | |
| { | |
| "epoch": 1.6932305055698373, | |
| "grad_norm": 0.43073931336402893, | |
| "learning_rate": 2.1785204227363613e-05, | |
| "loss": 0.325, | |
| "step": 9880 | |
| }, | |
| { | |
| "epoch": 1.6949443016281063, | |
| "grad_norm": 0.4372043013572693, | |
| "learning_rate": 2.1756640959725792e-05, | |
| "loss": 0.3336, | |
| "step": 9890 | |
| }, | |
| { | |
| "epoch": 1.6966580976863752, | |
| "grad_norm": 0.3718228340148926, | |
| "learning_rate": 2.1728077692087975e-05, | |
| "loss": 0.3287, | |
| "step": 9900 | |
| }, | |
| { | |
| "epoch": 1.6983718937446444, | |
| "grad_norm": 0.41269469261169434, | |
| "learning_rate": 2.1699514424450158e-05, | |
| "loss": 0.3324, | |
| "step": 9910 | |
| }, | |
| { | |
| "epoch": 1.7000856898029135, | |
| "grad_norm": 0.39504536986351013, | |
| "learning_rate": 2.167095115681234e-05, | |
| "loss": 0.352, | |
| "step": 9920 | |
| }, | |
| { | |
| "epoch": 1.7017994858611827, | |
| "grad_norm": 0.520569920539856, | |
| "learning_rate": 2.1642387889174523e-05, | |
| "loss": 0.3444, | |
| "step": 9930 | |
| }, | |
| { | |
| "epoch": 1.7035132819194516, | |
| "grad_norm": 0.44295620918273926, | |
| "learning_rate": 2.1613824621536705e-05, | |
| "loss": 0.3373, | |
| "step": 9940 | |
| }, | |
| { | |
| "epoch": 1.7052270779777206, | |
| "grad_norm": 0.38542020320892334, | |
| "learning_rate": 2.1585261353898885e-05, | |
| "loss": 0.3251, | |
| "step": 9950 | |
| }, | |
| { | |
| "epoch": 1.7069408740359897, | |
| "grad_norm": 0.4100322425365448, | |
| "learning_rate": 2.155669808626107e-05, | |
| "loss": 0.3445, | |
| "step": 9960 | |
| }, | |
| { | |
| "epoch": 1.7086546700942589, | |
| "grad_norm": 0.37752169370651245, | |
| "learning_rate": 2.1528134818623253e-05, | |
| "loss": 0.3429, | |
| "step": 9970 | |
| }, | |
| { | |
| "epoch": 1.7103684661525278, | |
| "grad_norm": 0.4206605553627014, | |
| "learning_rate": 2.1499571550985432e-05, | |
| "loss": 0.3295, | |
| "step": 9980 | |
| }, | |
| { | |
| "epoch": 1.712082262210797, | |
| "grad_norm": 0.3830428421497345, | |
| "learning_rate": 2.1471008283347615e-05, | |
| "loss": 0.3298, | |
| "step": 9990 | |
| }, | |
| { | |
| "epoch": 1.713796058269066, | |
| "grad_norm": 0.40393152832984924, | |
| "learning_rate": 2.1442445015709798e-05, | |
| "loss": 0.3349, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 1.715509854327335, | |
| "grad_norm": 0.37100762128829956, | |
| "learning_rate": 2.141388174807198e-05, | |
| "loss": 0.3354, | |
| "step": 10010 | |
| }, | |
| { | |
| "epoch": 1.7172236503856042, | |
| "grad_norm": 0.3826449513435364, | |
| "learning_rate": 2.1385318480434163e-05, | |
| "loss": 0.3452, | |
| "step": 10020 | |
| }, | |
| { | |
| "epoch": 1.7189374464438731, | |
| "grad_norm": 0.39858880639076233, | |
| "learning_rate": 2.1356755212796346e-05, | |
| "loss": 0.3416, | |
| "step": 10030 | |
| }, | |
| { | |
| "epoch": 1.7206512425021423, | |
| "grad_norm": 0.38912972807884216, | |
| "learning_rate": 2.1328191945158528e-05, | |
| "loss": 0.3357, | |
| "step": 10040 | |
| }, | |
| { | |
| "epoch": 1.7223650385604112, | |
| "grad_norm": 0.39427903294563293, | |
| "learning_rate": 2.1299628677520707e-05, | |
| "loss": 0.3309, | |
| "step": 10050 | |
| }, | |
| { | |
| "epoch": 1.7240788346186804, | |
| "grad_norm": 0.4788413941860199, | |
| "learning_rate": 2.1271065409882893e-05, | |
| "loss": 0.3395, | |
| "step": 10060 | |
| }, | |
| { | |
| "epoch": 1.7257926306769495, | |
| "grad_norm": 0.36351656913757324, | |
| "learning_rate": 2.1242502142245073e-05, | |
| "loss": 0.3499, | |
| "step": 10070 | |
| }, | |
| { | |
| "epoch": 1.7275064267352185, | |
| "grad_norm": 0.42046666145324707, | |
| "learning_rate": 2.1213938874607255e-05, | |
| "loss": 0.3257, | |
| "step": 10080 | |
| }, | |
| { | |
| "epoch": 1.7292202227934874, | |
| "grad_norm": 0.41890949010849, | |
| "learning_rate": 2.1185375606969438e-05, | |
| "loss": 0.3186, | |
| "step": 10090 | |
| }, | |
| { | |
| "epoch": 1.7309340188517566, | |
| "grad_norm": 0.3858489990234375, | |
| "learning_rate": 2.115681233933162e-05, | |
| "loss": 0.3455, | |
| "step": 10100 | |
| }, | |
| { | |
| "epoch": 1.7326478149100257, | |
| "grad_norm": 0.41216158866882324, | |
| "learning_rate": 2.1128249071693803e-05, | |
| "loss": 0.3313, | |
| "step": 10110 | |
| }, | |
| { | |
| "epoch": 1.7343616109682949, | |
| "grad_norm": 0.4424970746040344, | |
| "learning_rate": 2.1099685804055986e-05, | |
| "loss": 0.335, | |
| "step": 10120 | |
| }, | |
| { | |
| "epoch": 1.7360754070265638, | |
| "grad_norm": 0.4221234619617462, | |
| "learning_rate": 2.107112253641817e-05, | |
| "loss": 0.3321, | |
| "step": 10130 | |
| }, | |
| { | |
| "epoch": 1.7377892030848328, | |
| "grad_norm": 0.4379345178604126, | |
| "learning_rate": 2.1042559268780348e-05, | |
| "loss": 0.3264, | |
| "step": 10140 | |
| }, | |
| { | |
| "epoch": 1.739502999143102, | |
| "grad_norm": 0.38541027903556824, | |
| "learning_rate": 2.1013996001142534e-05, | |
| "loss": 0.333, | |
| "step": 10150 | |
| }, | |
| { | |
| "epoch": 1.741216795201371, | |
| "grad_norm": 0.43533679842948914, | |
| "learning_rate": 2.0985432733504713e-05, | |
| "loss": 0.349, | |
| "step": 10160 | |
| }, | |
| { | |
| "epoch": 1.7429305912596402, | |
| "grad_norm": 0.4305630326271057, | |
| "learning_rate": 2.0956869465866896e-05, | |
| "loss": 0.331, | |
| "step": 10170 | |
| }, | |
| { | |
| "epoch": 1.7446443873179092, | |
| "grad_norm": 0.4137646555900574, | |
| "learning_rate": 2.0928306198229078e-05, | |
| "loss": 0.3435, | |
| "step": 10180 | |
| }, | |
| { | |
| "epoch": 1.746358183376178, | |
| "grad_norm": 0.391099214553833, | |
| "learning_rate": 2.089974293059126e-05, | |
| "loss": 0.325, | |
| "step": 10190 | |
| }, | |
| { | |
| "epoch": 1.7480719794344473, | |
| "grad_norm": 0.39120209217071533, | |
| "learning_rate": 2.0871179662953443e-05, | |
| "loss": 0.3359, | |
| "step": 10200 | |
| }, | |
| { | |
| "epoch": 1.7497857754927164, | |
| "grad_norm": 0.37801820039749146, | |
| "learning_rate": 2.0842616395315626e-05, | |
| "loss": 0.3211, | |
| "step": 10210 | |
| }, | |
| { | |
| "epoch": 1.7514995715509856, | |
| "grad_norm": 0.39814886450767517, | |
| "learning_rate": 2.081405312767781e-05, | |
| "loss": 0.3234, | |
| "step": 10220 | |
| }, | |
| { | |
| "epoch": 1.7532133676092545, | |
| "grad_norm": 0.39131447672843933, | |
| "learning_rate": 2.0785489860039988e-05, | |
| "loss": 0.3352, | |
| "step": 10230 | |
| }, | |
| { | |
| "epoch": 1.7549271636675234, | |
| "grad_norm": 0.4148966372013092, | |
| "learning_rate": 2.075692659240217e-05, | |
| "loss": 0.3339, | |
| "step": 10240 | |
| }, | |
| { | |
| "epoch": 1.7566409597257926, | |
| "grad_norm": 0.40360793471336365, | |
| "learning_rate": 2.0728363324764357e-05, | |
| "loss": 0.3372, | |
| "step": 10250 | |
| }, | |
| { | |
| "epoch": 1.7583547557840618, | |
| "grad_norm": 0.4748077690601349, | |
| "learning_rate": 2.0699800057126536e-05, | |
| "loss": 0.3256, | |
| "step": 10260 | |
| }, | |
| { | |
| "epoch": 1.760068551842331, | |
| "grad_norm": 0.3388690650463104, | |
| "learning_rate": 2.067123678948872e-05, | |
| "loss": 0.3532, | |
| "step": 10270 | |
| }, | |
| { | |
| "epoch": 1.7617823479005998, | |
| "grad_norm": 0.4345872402191162, | |
| "learning_rate": 2.06426735218509e-05, | |
| "loss": 0.3431, | |
| "step": 10280 | |
| }, | |
| { | |
| "epoch": 1.7634961439588688, | |
| "grad_norm": 0.4032730460166931, | |
| "learning_rate": 2.0614110254213084e-05, | |
| "loss": 0.34, | |
| "step": 10290 | |
| }, | |
| { | |
| "epoch": 1.765209940017138, | |
| "grad_norm": 0.3942527770996094, | |
| "learning_rate": 2.0585546986575266e-05, | |
| "loss": 0.3226, | |
| "step": 10300 | |
| }, | |
| { | |
| "epoch": 1.766923736075407, | |
| "grad_norm": 0.38292983174324036, | |
| "learning_rate": 2.055698371893745e-05, | |
| "loss": 0.3176, | |
| "step": 10310 | |
| }, | |
| { | |
| "epoch": 1.7686375321336762, | |
| "grad_norm": 0.407644122838974, | |
| "learning_rate": 2.0528420451299628e-05, | |
| "loss": 0.3395, | |
| "step": 10320 | |
| }, | |
| { | |
| "epoch": 1.7703513281919452, | |
| "grad_norm": 0.42464375495910645, | |
| "learning_rate": 2.049985718366181e-05, | |
| "loss": 0.3213, | |
| "step": 10330 | |
| }, | |
| { | |
| "epoch": 1.7720651242502141, | |
| "grad_norm": 0.3565274178981781, | |
| "learning_rate": 2.0471293916023997e-05, | |
| "loss": 0.3362, | |
| "step": 10340 | |
| }, | |
| { | |
| "epoch": 1.7737789203084833, | |
| "grad_norm": 0.37619078159332275, | |
| "learning_rate": 2.0442730648386176e-05, | |
| "loss": 0.3526, | |
| "step": 10350 | |
| }, | |
| { | |
| "epoch": 1.7754927163667524, | |
| "grad_norm": 0.43911996483802795, | |
| "learning_rate": 2.041416738074836e-05, | |
| "loss": 0.3256, | |
| "step": 10360 | |
| }, | |
| { | |
| "epoch": 1.7772065124250214, | |
| "grad_norm": 0.3840199410915375, | |
| "learning_rate": 2.038560411311054e-05, | |
| "loss": 0.3259, | |
| "step": 10370 | |
| }, | |
| { | |
| "epoch": 1.7789203084832905, | |
| "grad_norm": 0.4230378568172455, | |
| "learning_rate": 2.035704084547272e-05, | |
| "loss": 0.3282, | |
| "step": 10380 | |
| }, | |
| { | |
| "epoch": 1.7806341045415595, | |
| "grad_norm": 0.4155026376247406, | |
| "learning_rate": 2.0328477577834907e-05, | |
| "loss": 0.335, | |
| "step": 10390 | |
| }, | |
| { | |
| "epoch": 1.7823479005998286, | |
| "grad_norm": 0.46618354320526123, | |
| "learning_rate": 2.029991431019709e-05, | |
| "loss": 0.3474, | |
| "step": 10400 | |
| }, | |
| { | |
| "epoch": 1.7840616966580978, | |
| "grad_norm": 0.3844912052154541, | |
| "learning_rate": 2.027135104255927e-05, | |
| "loss": 0.3218, | |
| "step": 10410 | |
| }, | |
| { | |
| "epoch": 1.7857754927163667, | |
| "grad_norm": 0.37245050072669983, | |
| "learning_rate": 2.024278777492145e-05, | |
| "loss": 0.3367, | |
| "step": 10420 | |
| }, | |
| { | |
| "epoch": 1.7874892887746359, | |
| "grad_norm": 0.3999907970428467, | |
| "learning_rate": 2.0214224507283637e-05, | |
| "loss": 0.3493, | |
| "step": 10430 | |
| }, | |
| { | |
| "epoch": 1.7892030848329048, | |
| "grad_norm": 0.39571475982666016, | |
| "learning_rate": 2.0185661239645816e-05, | |
| "loss": 0.3301, | |
| "step": 10440 | |
| }, | |
| { | |
| "epoch": 1.790916880891174, | |
| "grad_norm": 0.38127267360687256, | |
| "learning_rate": 2.0157097972008e-05, | |
| "loss": 0.3419, | |
| "step": 10450 | |
| }, | |
| { | |
| "epoch": 1.7926306769494431, | |
| "grad_norm": 0.36275482177734375, | |
| "learning_rate": 2.012853470437018e-05, | |
| "loss": 0.3284, | |
| "step": 10460 | |
| }, | |
| { | |
| "epoch": 1.794344473007712, | |
| "grad_norm": 0.3948185443878174, | |
| "learning_rate": 2.009997143673236e-05, | |
| "loss": 0.3473, | |
| "step": 10470 | |
| }, | |
| { | |
| "epoch": 1.796058269065981, | |
| "grad_norm": 0.4034388065338135, | |
| "learning_rate": 2.0071408169094547e-05, | |
| "loss": 0.3299, | |
| "step": 10480 | |
| }, | |
| { | |
| "epoch": 1.7977720651242501, | |
| "grad_norm": 0.3925109803676605, | |
| "learning_rate": 2.004284490145673e-05, | |
| "loss": 0.3232, | |
| "step": 10490 | |
| }, | |
| { | |
| "epoch": 1.7994858611825193, | |
| "grad_norm": 0.39803266525268555, | |
| "learning_rate": 2.001428163381891e-05, | |
| "loss": 0.3428, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 1.8011996572407885, | |
| "grad_norm": 0.39179807901382446, | |
| "learning_rate": 1.998571836618109e-05, | |
| "loss": 0.3123, | |
| "step": 10510 | |
| }, | |
| { | |
| "epoch": 1.8029134532990574, | |
| "grad_norm": 0.40016594529151917, | |
| "learning_rate": 1.9957155098543274e-05, | |
| "loss": 0.3334, | |
| "step": 10520 | |
| }, | |
| { | |
| "epoch": 1.8046272493573263, | |
| "grad_norm": 0.41052624583244324, | |
| "learning_rate": 1.9928591830905457e-05, | |
| "loss": 0.342, | |
| "step": 10530 | |
| }, | |
| { | |
| "epoch": 1.8063410454155955, | |
| "grad_norm": 0.395678848028183, | |
| "learning_rate": 1.990002856326764e-05, | |
| "loss": 0.3374, | |
| "step": 10540 | |
| }, | |
| { | |
| "epoch": 1.8080548414738646, | |
| "grad_norm": 0.40321213006973267, | |
| "learning_rate": 1.9871465295629822e-05, | |
| "loss": 0.3424, | |
| "step": 10550 | |
| }, | |
| { | |
| "epoch": 1.8097686375321338, | |
| "grad_norm": 0.38921666145324707, | |
| "learning_rate": 1.9842902027992e-05, | |
| "loss": 0.3249, | |
| "step": 10560 | |
| }, | |
| { | |
| "epoch": 1.8114824335904027, | |
| "grad_norm": 0.4000155031681061, | |
| "learning_rate": 1.9814338760354184e-05, | |
| "loss": 0.3397, | |
| "step": 10570 | |
| }, | |
| { | |
| "epoch": 1.8131962296486717, | |
| "grad_norm": 0.4053511619567871, | |
| "learning_rate": 1.978577549271637e-05, | |
| "loss": 0.3315, | |
| "step": 10580 | |
| }, | |
| { | |
| "epoch": 1.8149100257069408, | |
| "grad_norm": 0.3945310115814209, | |
| "learning_rate": 1.975721222507855e-05, | |
| "loss": 0.3419, | |
| "step": 10590 | |
| }, | |
| { | |
| "epoch": 1.81662382176521, | |
| "grad_norm": 0.42978984117507935, | |
| "learning_rate": 1.972864895744073e-05, | |
| "loss": 0.3515, | |
| "step": 10600 | |
| }, | |
| { | |
| "epoch": 1.8183376178234791, | |
| "grad_norm": 0.3619568943977356, | |
| "learning_rate": 1.9700085689802914e-05, | |
| "loss": 0.3338, | |
| "step": 10610 | |
| }, | |
| { | |
| "epoch": 1.820051413881748, | |
| "grad_norm": 0.3962588310241699, | |
| "learning_rate": 1.9671522422165097e-05, | |
| "loss": 0.3329, | |
| "step": 10620 | |
| }, | |
| { | |
| "epoch": 1.821765209940017, | |
| "grad_norm": 0.3794926106929779, | |
| "learning_rate": 1.964295915452728e-05, | |
| "loss": 0.3274, | |
| "step": 10630 | |
| }, | |
| { | |
| "epoch": 1.8234790059982862, | |
| "grad_norm": 0.489555299282074, | |
| "learning_rate": 1.9614395886889462e-05, | |
| "loss": 0.3547, | |
| "step": 10640 | |
| }, | |
| { | |
| "epoch": 1.8251928020565553, | |
| "grad_norm": 0.4101254642009735, | |
| "learning_rate": 1.958583261925164e-05, | |
| "loss": 0.3262, | |
| "step": 10650 | |
| }, | |
| { | |
| "epoch": 1.8269065981148245, | |
| "grad_norm": 0.408819317817688, | |
| "learning_rate": 1.9557269351613824e-05, | |
| "loss": 0.3394, | |
| "step": 10660 | |
| }, | |
| { | |
| "epoch": 1.8286203941730934, | |
| "grad_norm": 0.38574379682540894, | |
| "learning_rate": 1.952870608397601e-05, | |
| "loss": 0.3432, | |
| "step": 10670 | |
| }, | |
| { | |
| "epoch": 1.8303341902313623, | |
| "grad_norm": 0.38276317715644836, | |
| "learning_rate": 1.950014281633819e-05, | |
| "loss": 0.3439, | |
| "step": 10680 | |
| }, | |
| { | |
| "epoch": 1.8320479862896315, | |
| "grad_norm": 0.4008176922798157, | |
| "learning_rate": 1.9471579548700372e-05, | |
| "loss": 0.348, | |
| "step": 10690 | |
| }, | |
| { | |
| "epoch": 1.8337617823479007, | |
| "grad_norm": 0.4130050837993622, | |
| "learning_rate": 1.9443016281062554e-05, | |
| "loss": 0.3327, | |
| "step": 10700 | |
| }, | |
| { | |
| "epoch": 1.8354755784061698, | |
| "grad_norm": 0.39769622683525085, | |
| "learning_rate": 1.9414453013424737e-05, | |
| "loss": 0.3314, | |
| "step": 10710 | |
| }, | |
| { | |
| "epoch": 1.8371893744644388, | |
| "grad_norm": 0.3474248945713043, | |
| "learning_rate": 1.938588974578692e-05, | |
| "loss": 0.3245, | |
| "step": 10720 | |
| }, | |
| { | |
| "epoch": 1.8389031705227077, | |
| "grad_norm": 0.3834674656391144, | |
| "learning_rate": 1.9357326478149102e-05, | |
| "loss": 0.3327, | |
| "step": 10730 | |
| }, | |
| { | |
| "epoch": 1.8406169665809768, | |
| "grad_norm": 0.4144718647003174, | |
| "learning_rate": 1.932876321051128e-05, | |
| "loss": 0.345, | |
| "step": 10740 | |
| }, | |
| { | |
| "epoch": 1.842330762639246, | |
| "grad_norm": 0.41075778007507324, | |
| "learning_rate": 1.9300199942873464e-05, | |
| "loss": 0.3497, | |
| "step": 10750 | |
| }, | |
| { | |
| "epoch": 1.8440445586975152, | |
| "grad_norm": 0.4072783291339874, | |
| "learning_rate": 1.927163667523565e-05, | |
| "loss": 0.329, | |
| "step": 10760 | |
| }, | |
| { | |
| "epoch": 1.845758354755784, | |
| "grad_norm": 0.4069250226020813, | |
| "learning_rate": 1.924307340759783e-05, | |
| "loss": 0.3254, | |
| "step": 10770 | |
| }, | |
| { | |
| "epoch": 1.847472150814053, | |
| "grad_norm": 0.40920811891555786, | |
| "learning_rate": 1.9214510139960012e-05, | |
| "loss": 0.339, | |
| "step": 10780 | |
| }, | |
| { | |
| "epoch": 1.8491859468723222, | |
| "grad_norm": 0.4345456659793854, | |
| "learning_rate": 1.9185946872322195e-05, | |
| "loss": 0.3486, | |
| "step": 10790 | |
| }, | |
| { | |
| "epoch": 1.8508997429305913, | |
| "grad_norm": 0.3994877338409424, | |
| "learning_rate": 1.9157383604684377e-05, | |
| "loss": 0.3352, | |
| "step": 10800 | |
| }, | |
| { | |
| "epoch": 1.8526135389888603, | |
| "grad_norm": 0.38893967866897583, | |
| "learning_rate": 1.912882033704656e-05, | |
| "loss": 0.3338, | |
| "step": 10810 | |
| }, | |
| { | |
| "epoch": 1.8543273350471294, | |
| "grad_norm": 0.37634167075157166, | |
| "learning_rate": 1.9100257069408742e-05, | |
| "loss": 0.3379, | |
| "step": 10820 | |
| }, | |
| { | |
| "epoch": 1.8560411311053984, | |
| "grad_norm": 0.5175420045852661, | |
| "learning_rate": 1.9071693801770925e-05, | |
| "loss": 0.3277, | |
| "step": 10830 | |
| }, | |
| { | |
| "epoch": 1.8577549271636675, | |
| "grad_norm": 0.34820953011512756, | |
| "learning_rate": 1.9043130534133104e-05, | |
| "loss": 0.3458, | |
| "step": 10840 | |
| }, | |
| { | |
| "epoch": 1.8594687232219367, | |
| "grad_norm": 0.46136412024497986, | |
| "learning_rate": 1.9014567266495287e-05, | |
| "loss": 0.3433, | |
| "step": 10850 | |
| }, | |
| { | |
| "epoch": 1.8611825192802056, | |
| "grad_norm": 0.3980143666267395, | |
| "learning_rate": 1.898600399885747e-05, | |
| "loss": 0.3362, | |
| "step": 10860 | |
| }, | |
| { | |
| "epoch": 1.8628963153384748, | |
| "grad_norm": 0.3734038472175598, | |
| "learning_rate": 1.8957440731219652e-05, | |
| "loss": 0.3279, | |
| "step": 10870 | |
| }, | |
| { | |
| "epoch": 1.8646101113967437, | |
| "grad_norm": 0.38700205087661743, | |
| "learning_rate": 1.8928877463581835e-05, | |
| "loss": 0.3336, | |
| "step": 10880 | |
| }, | |
| { | |
| "epoch": 1.8663239074550129, | |
| "grad_norm": 0.3614012897014618, | |
| "learning_rate": 1.8900314195944017e-05, | |
| "loss": 0.3314, | |
| "step": 10890 | |
| }, | |
| { | |
| "epoch": 1.868037703513282, | |
| "grad_norm": 0.40407755970954895, | |
| "learning_rate": 1.88717509283062e-05, | |
| "loss": 0.326, | |
| "step": 10900 | |
| }, | |
| { | |
| "epoch": 1.869751499571551, | |
| "grad_norm": 0.45859092473983765, | |
| "learning_rate": 1.8843187660668383e-05, | |
| "loss": 0.3307, | |
| "step": 10910 | |
| }, | |
| { | |
| "epoch": 1.87146529562982, | |
| "grad_norm": 0.3943389058113098, | |
| "learning_rate": 1.8814624393030565e-05, | |
| "loss": 0.32, | |
| "step": 10920 | |
| }, | |
| { | |
| "epoch": 1.873179091688089, | |
| "grad_norm": 0.4314815402030945, | |
| "learning_rate": 1.8786061125392745e-05, | |
| "loss": 0.321, | |
| "step": 10930 | |
| }, | |
| { | |
| "epoch": 1.8748928877463582, | |
| "grad_norm": 0.4362752437591553, | |
| "learning_rate": 1.8757497857754927e-05, | |
| "loss": 0.3245, | |
| "step": 10940 | |
| }, | |
| { | |
| "epoch": 1.8766066838046274, | |
| "grad_norm": 0.4172927439212799, | |
| "learning_rate": 1.8728934590117113e-05, | |
| "loss": 0.3297, | |
| "step": 10950 | |
| }, | |
| { | |
| "epoch": 1.8783204798628963, | |
| "grad_norm": 0.350132554769516, | |
| "learning_rate": 1.8700371322479292e-05, | |
| "loss": 0.3232, | |
| "step": 10960 | |
| }, | |
| { | |
| "epoch": 1.8800342759211652, | |
| "grad_norm": 0.38863199949264526, | |
| "learning_rate": 1.8671808054841475e-05, | |
| "loss": 0.3121, | |
| "step": 10970 | |
| }, | |
| { | |
| "epoch": 1.8817480719794344, | |
| "grad_norm": 0.3894982933998108, | |
| "learning_rate": 1.8643244787203658e-05, | |
| "loss": 0.3203, | |
| "step": 10980 | |
| }, | |
| { | |
| "epoch": 1.8834618680377035, | |
| "grad_norm": 0.405094712972641, | |
| "learning_rate": 1.8614681519565837e-05, | |
| "loss": 0.331, | |
| "step": 10990 | |
| }, | |
| { | |
| "epoch": 1.8851756640959727, | |
| "grad_norm": 0.4262722134590149, | |
| "learning_rate": 1.8586118251928023e-05, | |
| "loss": 0.3411, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 1.8868894601542416, | |
| "grad_norm": 0.37803834676742554, | |
| "learning_rate": 1.8557554984290206e-05, | |
| "loss": 0.327, | |
| "step": 11010 | |
| }, | |
| { | |
| "epoch": 1.8886032562125106, | |
| "grad_norm": 0.44252264499664307, | |
| "learning_rate": 1.8528991716652385e-05, | |
| "loss": 0.322, | |
| "step": 11020 | |
| }, | |
| { | |
| "epoch": 1.8903170522707797, | |
| "grad_norm": 0.41628533601760864, | |
| "learning_rate": 1.8500428449014567e-05, | |
| "loss": 0.3338, | |
| "step": 11030 | |
| }, | |
| { | |
| "epoch": 1.8920308483290489, | |
| "grad_norm": 0.4093359112739563, | |
| "learning_rate": 1.847186518137675e-05, | |
| "loss": 0.3329, | |
| "step": 11040 | |
| }, | |
| { | |
| "epoch": 1.893744644387318, | |
| "grad_norm": 0.41291117668151855, | |
| "learning_rate": 1.8443301913738933e-05, | |
| "loss": 0.3206, | |
| "step": 11050 | |
| }, | |
| { | |
| "epoch": 1.895458440445587, | |
| "grad_norm": 0.4110288918018341, | |
| "learning_rate": 1.8414738646101115e-05, | |
| "loss": 0.3396, | |
| "step": 11060 | |
| }, | |
| { | |
| "epoch": 1.897172236503856, | |
| "grad_norm": 0.41824471950531006, | |
| "learning_rate": 1.8386175378463298e-05, | |
| "loss": 0.3272, | |
| "step": 11070 | |
| }, | |
| { | |
| "epoch": 1.898886032562125, | |
| "grad_norm": 0.3943333029747009, | |
| "learning_rate": 1.8357612110825477e-05, | |
| "loss": 0.3448, | |
| "step": 11080 | |
| }, | |
| { | |
| "epoch": 1.9005998286203942, | |
| "grad_norm": 0.4381183683872223, | |
| "learning_rate": 1.8329048843187663e-05, | |
| "loss": 0.3315, | |
| "step": 11090 | |
| }, | |
| { | |
| "epoch": 1.9023136246786634, | |
| "grad_norm": 0.439394474029541, | |
| "learning_rate": 1.8300485575549846e-05, | |
| "loss": 0.3265, | |
| "step": 11100 | |
| }, | |
| { | |
| "epoch": 1.9040274207369323, | |
| "grad_norm": 0.41325217485427856, | |
| "learning_rate": 1.8271922307912025e-05, | |
| "loss": 0.3234, | |
| "step": 11110 | |
| }, | |
| { | |
| "epoch": 1.9057412167952013, | |
| "grad_norm": 0.44458985328674316, | |
| "learning_rate": 1.8243359040274208e-05, | |
| "loss": 0.3254, | |
| "step": 11120 | |
| }, | |
| { | |
| "epoch": 1.9074550128534704, | |
| "grad_norm": 0.3895314335823059, | |
| "learning_rate": 1.821479577263639e-05, | |
| "loss": 0.3406, | |
| "step": 11130 | |
| }, | |
| { | |
| "epoch": 1.9091688089117396, | |
| "grad_norm": 0.407094806432724, | |
| "learning_rate": 1.8186232504998573e-05, | |
| "loss": 0.3244, | |
| "step": 11140 | |
| }, | |
| { | |
| "epoch": 1.9108826049700087, | |
| "grad_norm": 0.4123266935348511, | |
| "learning_rate": 1.8157669237360755e-05, | |
| "loss": 0.3346, | |
| "step": 11150 | |
| }, | |
| { | |
| "epoch": 1.9125964010282777, | |
| "grad_norm": 0.3695683479309082, | |
| "learning_rate": 1.8129105969722938e-05, | |
| "loss": 0.3134, | |
| "step": 11160 | |
| }, | |
| { | |
| "epoch": 1.9143101970865466, | |
| "grad_norm": 0.3902941942214966, | |
| "learning_rate": 1.8100542702085117e-05, | |
| "loss": 0.3358, | |
| "step": 11170 | |
| }, | |
| { | |
| "epoch": 1.9160239931448158, | |
| "grad_norm": 0.3855278789997101, | |
| "learning_rate": 1.80719794344473e-05, | |
| "loss": 0.3261, | |
| "step": 11180 | |
| }, | |
| { | |
| "epoch": 1.917737789203085, | |
| "grad_norm": 0.4039939045906067, | |
| "learning_rate": 1.8043416166809486e-05, | |
| "loss": 0.3453, | |
| "step": 11190 | |
| }, | |
| { | |
| "epoch": 1.919451585261354, | |
| "grad_norm": 0.3597125709056854, | |
| "learning_rate": 1.8014852899171665e-05, | |
| "loss": 0.3347, | |
| "step": 11200 | |
| }, | |
| { | |
| "epoch": 1.921165381319623, | |
| "grad_norm": 0.38155752420425415, | |
| "learning_rate": 1.7986289631533848e-05, | |
| "loss": 0.3383, | |
| "step": 11210 | |
| }, | |
| { | |
| "epoch": 1.922879177377892, | |
| "grad_norm": 0.4478619396686554, | |
| "learning_rate": 1.795772636389603e-05, | |
| "loss": 0.3521, | |
| "step": 11220 | |
| }, | |
| { | |
| "epoch": 1.924592973436161, | |
| "grad_norm": 0.36768054962158203, | |
| "learning_rate": 1.7929163096258213e-05, | |
| "loss": 0.3237, | |
| "step": 11230 | |
| }, | |
| { | |
| "epoch": 1.9263067694944302, | |
| "grad_norm": 0.442877858877182, | |
| "learning_rate": 1.7900599828620396e-05, | |
| "loss": 0.3355, | |
| "step": 11240 | |
| }, | |
| { | |
| "epoch": 1.9280205655526992, | |
| "grad_norm": 0.369087815284729, | |
| "learning_rate": 1.787203656098258e-05, | |
| "loss": 0.3292, | |
| "step": 11250 | |
| }, | |
| { | |
| "epoch": 1.9297343616109683, | |
| "grad_norm": 0.4272878170013428, | |
| "learning_rate": 1.7843473293344758e-05, | |
| "loss": 0.3247, | |
| "step": 11260 | |
| }, | |
| { | |
| "epoch": 1.9314481576692373, | |
| "grad_norm": 0.40085938572883606, | |
| "learning_rate": 1.781491002570694e-05, | |
| "loss": 0.3115, | |
| "step": 11270 | |
| }, | |
| { | |
| "epoch": 1.9331619537275064, | |
| "grad_norm": 0.4040519595146179, | |
| "learning_rate": 1.7786346758069126e-05, | |
| "loss": 0.3345, | |
| "step": 11280 | |
| }, | |
| { | |
| "epoch": 1.9348757497857756, | |
| "grad_norm": 0.38495534658432007, | |
| "learning_rate": 1.7757783490431305e-05, | |
| "loss": 0.3325, | |
| "step": 11290 | |
| }, | |
| { | |
| "epoch": 1.9365895458440445, | |
| "grad_norm": 0.40545254945755005, | |
| "learning_rate": 1.7729220222793488e-05, | |
| "loss": 0.3401, | |
| "step": 11300 | |
| }, | |
| { | |
| "epoch": 1.9383033419023135, | |
| "grad_norm": 0.40779033303260803, | |
| "learning_rate": 1.770065695515567e-05, | |
| "loss": 0.3293, | |
| "step": 11310 | |
| }, | |
| { | |
| "epoch": 1.9400171379605826, | |
| "grad_norm": 0.4259167015552521, | |
| "learning_rate": 1.7672093687517853e-05, | |
| "loss": 0.3174, | |
| "step": 11320 | |
| }, | |
| { | |
| "epoch": 1.9417309340188518, | |
| "grad_norm": 0.3741375207901001, | |
| "learning_rate": 1.7643530419880036e-05, | |
| "loss": 0.3279, | |
| "step": 11330 | |
| }, | |
| { | |
| "epoch": 1.943444730077121, | |
| "grad_norm": 0.4534846842288971, | |
| "learning_rate": 1.761496715224222e-05, | |
| "loss": 0.348, | |
| "step": 11340 | |
| }, | |
| { | |
| "epoch": 1.9451585261353899, | |
| "grad_norm": 0.41401243209838867, | |
| "learning_rate": 1.7586403884604398e-05, | |
| "loss": 0.3238, | |
| "step": 11350 | |
| }, | |
| { | |
| "epoch": 1.9468723221936588, | |
| "grad_norm": 0.39744701981544495, | |
| "learning_rate": 1.755784061696658e-05, | |
| "loss": 0.3293, | |
| "step": 11360 | |
| }, | |
| { | |
| "epoch": 1.948586118251928, | |
| "grad_norm": 0.46253296732902527, | |
| "learning_rate": 1.7529277349328766e-05, | |
| "loss": 0.3308, | |
| "step": 11370 | |
| }, | |
| { | |
| "epoch": 1.9502999143101971, | |
| "grad_norm": 0.39185985922813416, | |
| "learning_rate": 1.7500714081690946e-05, | |
| "loss": 0.345, | |
| "step": 11380 | |
| }, | |
| { | |
| "epoch": 1.9520137103684663, | |
| "grad_norm": 0.37337854504585266, | |
| "learning_rate": 1.7472150814053128e-05, | |
| "loss": 0.3404, | |
| "step": 11390 | |
| }, | |
| { | |
| "epoch": 1.9537275064267352, | |
| "grad_norm": 0.47062331438064575, | |
| "learning_rate": 1.744358754641531e-05, | |
| "loss": 0.3361, | |
| "step": 11400 | |
| }, | |
| { | |
| "epoch": 1.9554413024850041, | |
| "grad_norm": 0.3791089355945587, | |
| "learning_rate": 1.7415024278777494e-05, | |
| "loss": 0.3312, | |
| "step": 11410 | |
| }, | |
| { | |
| "epoch": 1.9571550985432733, | |
| "grad_norm": 0.3983840048313141, | |
| "learning_rate": 1.7386461011139676e-05, | |
| "loss": 0.3433, | |
| "step": 11420 | |
| }, | |
| { | |
| "epoch": 1.9588688946015425, | |
| "grad_norm": 0.4170704185962677, | |
| "learning_rate": 1.735789774350186e-05, | |
| "loss": 0.3179, | |
| "step": 11430 | |
| }, | |
| { | |
| "epoch": 1.9605826906598116, | |
| "grad_norm": 0.38848522305488586, | |
| "learning_rate": 1.7329334475864038e-05, | |
| "loss": 0.3247, | |
| "step": 11440 | |
| }, | |
| { | |
| "epoch": 1.9622964867180805, | |
| "grad_norm": 0.38156813383102417, | |
| "learning_rate": 1.730077120822622e-05, | |
| "loss": 0.3323, | |
| "step": 11450 | |
| }, | |
| { | |
| "epoch": 1.9640102827763495, | |
| "grad_norm": 0.37670812010765076, | |
| "learning_rate": 1.7272207940588403e-05, | |
| "loss": 0.3362, | |
| "step": 11460 | |
| }, | |
| { | |
| "epoch": 1.9657240788346186, | |
| "grad_norm": 0.38012853264808655, | |
| "learning_rate": 1.7243644672950586e-05, | |
| "loss": 0.3289, | |
| "step": 11470 | |
| }, | |
| { | |
| "epoch": 1.9674378748928878, | |
| "grad_norm": 0.38730666041374207, | |
| "learning_rate": 1.721508140531277e-05, | |
| "loss": 0.3247, | |
| "step": 11480 | |
| }, | |
| { | |
| "epoch": 1.969151670951157, | |
| "grad_norm": 0.3926190733909607, | |
| "learning_rate": 1.718651813767495e-05, | |
| "loss": 0.3211, | |
| "step": 11490 | |
| }, | |
| { | |
| "epoch": 1.9708654670094259, | |
| "grad_norm": 0.39375394582748413, | |
| "learning_rate": 1.7157954870037134e-05, | |
| "loss": 0.3248, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 1.9725792630676948, | |
| "grad_norm": 0.3916712701320648, | |
| "learning_rate": 1.7129391602399313e-05, | |
| "loss": 0.3269, | |
| "step": 11510 | |
| }, | |
| { | |
| "epoch": 1.974293059125964, | |
| "grad_norm": 0.41252121329307556, | |
| "learning_rate": 1.71008283347615e-05, | |
| "loss": 0.3331, | |
| "step": 11520 | |
| }, | |
| { | |
| "epoch": 1.9760068551842331, | |
| "grad_norm": 0.4166218042373657, | |
| "learning_rate": 1.707226506712368e-05, | |
| "loss": 0.3457, | |
| "step": 11530 | |
| }, | |
| { | |
| "epoch": 1.9777206512425023, | |
| "grad_norm": 0.4022178649902344, | |
| "learning_rate": 1.704370179948586e-05, | |
| "loss": 0.3446, | |
| "step": 11540 | |
| }, | |
| { | |
| "epoch": 1.9794344473007712, | |
| "grad_norm": 0.43955641984939575, | |
| "learning_rate": 1.7015138531848044e-05, | |
| "loss": 0.3377, | |
| "step": 11550 | |
| }, | |
| { | |
| "epoch": 1.9811482433590402, | |
| "grad_norm": 0.4006490707397461, | |
| "learning_rate": 1.6986575264210226e-05, | |
| "loss": 0.3356, | |
| "step": 11560 | |
| }, | |
| { | |
| "epoch": 1.9828620394173093, | |
| "grad_norm": 0.4111030101776123, | |
| "learning_rate": 1.695801199657241e-05, | |
| "loss": 0.3344, | |
| "step": 11570 | |
| }, | |
| { | |
| "epoch": 1.9845758354755785, | |
| "grad_norm": 0.36849358677864075, | |
| "learning_rate": 1.692944872893459e-05, | |
| "loss": 0.3427, | |
| "step": 11580 | |
| }, | |
| { | |
| "epoch": 1.9862896315338476, | |
| "grad_norm": 0.40527480840682983, | |
| "learning_rate": 1.6900885461296774e-05, | |
| "loss": 0.3309, | |
| "step": 11590 | |
| }, | |
| { | |
| "epoch": 1.9880034275921166, | |
| "grad_norm": 0.3989039361476898, | |
| "learning_rate": 1.6872322193658953e-05, | |
| "loss": 0.3303, | |
| "step": 11600 | |
| }, | |
| { | |
| "epoch": 1.9897172236503855, | |
| "grad_norm": 0.39155837893486023, | |
| "learning_rate": 1.684375892602114e-05, | |
| "loss": 0.3364, | |
| "step": 11610 | |
| }, | |
| { | |
| "epoch": 1.9914310197086547, | |
| "grad_norm": 0.4368874132633209, | |
| "learning_rate": 1.6815195658383322e-05, | |
| "loss": 0.3434, | |
| "step": 11620 | |
| }, | |
| { | |
| "epoch": 1.9931448157669238, | |
| "grad_norm": 0.40511763095855713, | |
| "learning_rate": 1.67866323907455e-05, | |
| "loss": 0.3221, | |
| "step": 11630 | |
| }, | |
| { | |
| "epoch": 1.9948586118251928, | |
| "grad_norm": 0.3981911540031433, | |
| "learning_rate": 1.6758069123107684e-05, | |
| "loss": 0.3376, | |
| "step": 11640 | |
| }, | |
| { | |
| "epoch": 1.996572407883462, | |
| "grad_norm": 0.3849552571773529, | |
| "learning_rate": 1.6729505855469866e-05, | |
| "loss": 0.3236, | |
| "step": 11650 | |
| }, | |
| { | |
| "epoch": 1.9982862039417308, | |
| "grad_norm": 0.4233885109424591, | |
| "learning_rate": 1.670094258783205e-05, | |
| "loss": 0.3181, | |
| "step": 11660 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.8736078143119812, | |
| "learning_rate": 1.667237932019423e-05, | |
| "loss": 0.3204, | |
| "step": 11670 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 17505, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 9.7565399580672e+16, | |
| "train_batch_size": 16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |