| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 2.9999893429956836, |
| "eval_steps": 2000, |
| "global_step": 211128, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.007104669544057832, |
| "grad_norm": 9.946511268615723, |
| "learning_rate": 1.1604224885141856e-06, |
| "loss": 5.8176, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.014209339088115664, |
| "grad_norm": 14.786874771118164, |
| "learning_rate": 2.3421588594704685e-06, |
| "loss": 5.7587, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.021314008632173496, |
| "grad_norm": 16.701108932495117, |
| "learning_rate": 3.5262634395869846e-06, |
| "loss": 5.532, |
| "step": 1500 |
| }, |
| { |
| "epoch": 0.028418678176231327, |
| "grad_norm": 23.213130950927734, |
| "learning_rate": 4.710368019703501e-06, |
| "loss": 5.1119, |
| "step": 2000 |
| }, |
| { |
| "epoch": 0.028418678176231327, |
| "eval_runtime": 930.4679, |
| "eval_samples_per_second": 151.27, |
| "eval_steps_per_second": 37.818, |
| "step": 2000 |
| }, |
| { |
| "epoch": 0.03552334772028916, |
| "grad_norm": 23.64911651611328, |
| "learning_rate": 5.894472599820016e-06, |
| "loss": 4.4046, |
| "step": 2500 |
| }, |
| { |
| "epoch": 0.04262801726434699, |
| "grad_norm": 27.94416046142578, |
| "learning_rate": 7.078577179936533e-06, |
| "loss": 4.1391, |
| "step": 3000 |
| }, |
| { |
| "epoch": 0.04973268680840483, |
| "grad_norm": 25.3529109954834, |
| "learning_rate": 8.260313550892814e-06, |
| "loss": 4.0939, |
| "step": 3500 |
| }, |
| { |
| "epoch": 0.056837356352462655, |
| "grad_norm": 36.91496658325195, |
| "learning_rate": 9.444418131009332e-06, |
| "loss": 3.9517, |
| "step": 4000 |
| }, |
| { |
| "epoch": 0.056837356352462655, |
| "eval_runtime": 934.9504, |
| "eval_samples_per_second": 150.545, |
| "eval_steps_per_second": 37.636, |
| "step": 4000 |
| }, |
| { |
| "epoch": 0.06394202589652048, |
| "grad_norm": 19.763195037841797, |
| "learning_rate": 1.0628522711125847e-05, |
| "loss": 3.9683, |
| "step": 4500 |
| }, |
| { |
| "epoch": 0.07104669544057832, |
| "grad_norm": 22.828413009643555, |
| "learning_rate": 1.1812627291242363e-05, |
| "loss": 3.9331, |
| "step": 5000 |
| }, |
| { |
| "epoch": 0.07815136498463615, |
| "grad_norm": 30.304622650146484, |
| "learning_rate": 1.299673187135888e-05, |
| "loss": 3.9017, |
| "step": 5500 |
| }, |
| { |
| "epoch": 0.08525603452869399, |
| "grad_norm": 34.04104995727539, |
| "learning_rate": 1.4180836451475396e-05, |
| "loss": 3.8953, |
| "step": 6000 |
| }, |
| { |
| "epoch": 0.08525603452869399, |
| "eval_runtime": 932.5246, |
| "eval_samples_per_second": 150.937, |
| "eval_steps_per_second": 37.734, |
| "step": 6000 |
| }, |
| { |
| "epoch": 0.09236070407275182, |
| "grad_norm": 34.60411071777344, |
| "learning_rate": 1.536494103159191e-05, |
| "loss": 3.8456, |
| "step": 6500 |
| }, |
| { |
| "epoch": 0.09946537361680965, |
| "grad_norm": 28.492341995239258, |
| "learning_rate": 1.6549045611708428e-05, |
| "loss": 3.8317, |
| "step": 7000 |
| }, |
| { |
| "epoch": 0.10657004316086748, |
| "grad_norm": 25.529743194580078, |
| "learning_rate": 1.7733150191824944e-05, |
| "loss": 3.7857, |
| "step": 7500 |
| }, |
| { |
| "epoch": 0.11367471270492531, |
| "grad_norm": 25.264846801757812, |
| "learning_rate": 1.8914886562781225e-05, |
| "loss": 3.7997, |
| "step": 8000 |
| }, |
| { |
| "epoch": 0.11367471270492531, |
| "eval_runtime": 900.9582, |
| "eval_samples_per_second": 156.225, |
| "eval_steps_per_second": 39.056, |
| "step": 8000 |
| }, |
| { |
| "epoch": 0.12077938224898314, |
| "grad_norm": 21.667879104614258, |
| "learning_rate": 2.0098991142897742e-05, |
| "loss": 3.8116, |
| "step": 8500 |
| }, |
| { |
| "epoch": 0.12788405179304096, |
| "grad_norm": 24.545040130615234, |
| "learning_rate": 2.128309572301426e-05, |
| "loss": 3.8319, |
| "step": 9000 |
| }, |
| { |
| "epoch": 0.1349887213370988, |
| "grad_norm": 23.32590675354004, |
| "learning_rate": 2.246720030313077e-05, |
| "loss": 3.785, |
| "step": 9500 |
| }, |
| { |
| "epoch": 0.14209339088115663, |
| "grad_norm": 25.292354583740234, |
| "learning_rate": 2.3651304883247288e-05, |
| "loss": 3.7558, |
| "step": 10000 |
| }, |
| { |
| "epoch": 0.14209339088115663, |
| "eval_runtime": 948.3831, |
| "eval_samples_per_second": 148.413, |
| "eval_steps_per_second": 37.103, |
| "step": 10000 |
| }, |
| { |
| "epoch": 0.14919806042521447, |
| "grad_norm": 32.933631896972656, |
| "learning_rate": 2.4833041254203573e-05, |
| "loss": 3.6906, |
| "step": 10500 |
| }, |
| { |
| "epoch": 0.1563027299692723, |
| "grad_norm": 32.69184494018555, |
| "learning_rate": 2.601714583432009e-05, |
| "loss": 3.8022, |
| "step": 11000 |
| }, |
| { |
| "epoch": 0.16340739951333014, |
| "grad_norm": 22.242897033691406, |
| "learning_rate": 2.7201250414436602e-05, |
| "loss": 3.7729, |
| "step": 11500 |
| }, |
| { |
| "epoch": 0.17051206905738797, |
| "grad_norm": 22.738082885742188, |
| "learning_rate": 2.8385354994553122e-05, |
| "loss": 3.7329, |
| "step": 12000 |
| }, |
| { |
| "epoch": 0.17051206905738797, |
| "eval_runtime": 921.4048, |
| "eval_samples_per_second": 152.758, |
| "eval_steps_per_second": 38.19, |
| "step": 12000 |
| }, |
| { |
| "epoch": 0.1776167386014458, |
| "grad_norm": 20.65458869934082, |
| "learning_rate": 2.956945957466964e-05, |
| "loss": 3.7221, |
| "step": 12500 |
| }, |
| { |
| "epoch": 0.18472140814550364, |
| "grad_norm": 23.19024658203125, |
| "learning_rate": 3.075119594562592e-05, |
| "loss": 3.7592, |
| "step": 13000 |
| }, |
| { |
| "epoch": 0.19182607768956147, |
| "grad_norm": 23.87779426574707, |
| "learning_rate": 3.193530052574243e-05, |
| "loss": 3.7483, |
| "step": 13500 |
| }, |
| { |
| "epoch": 0.1989307472336193, |
| "grad_norm": 17.06186866760254, |
| "learning_rate": 3.311703689669872e-05, |
| "loss": 3.7152, |
| "step": 14000 |
| }, |
| { |
| "epoch": 0.1989307472336193, |
| "eval_runtime": 932.5402, |
| "eval_samples_per_second": 150.934, |
| "eval_steps_per_second": 37.733, |
| "step": 14000 |
| }, |
| { |
| "epoch": 0.20603541677767712, |
| "grad_norm": 28.80001449584961, |
| "learning_rate": 3.430114147681524e-05, |
| "loss": 3.6807, |
| "step": 14500 |
| }, |
| { |
| "epoch": 0.21314008632173495, |
| "grad_norm": 23.454565048217773, |
| "learning_rate": 3.548524605693175e-05, |
| "loss": 3.7147, |
| "step": 15000 |
| }, |
| { |
| "epoch": 0.22024475586579279, |
| "grad_norm": 23.109294891357422, |
| "learning_rate": 3.6669350637048263e-05, |
| "loss": 3.7469, |
| "step": 15500 |
| }, |
| { |
| "epoch": 0.22734942540985062, |
| "grad_norm": 25.332721710205078, |
| "learning_rate": 3.7853455217164777e-05, |
| "loss": 3.701, |
| "step": 16000 |
| }, |
| { |
| "epoch": 0.22734942540985062, |
| "eval_runtime": 926.1025, |
| "eval_samples_per_second": 151.983, |
| "eval_steps_per_second": 37.996, |
| "step": 16000 |
| }, |
| { |
| "epoch": 0.23445409495390845, |
| "grad_norm": 18.929309844970703, |
| "learning_rate": 3.9037559797281296e-05, |
| "loss": 3.6502, |
| "step": 16500 |
| }, |
| { |
| "epoch": 0.2415587644979663, |
| "grad_norm": 23.181020736694336, |
| "learning_rate": 4.0221664377397816e-05, |
| "loss": 3.7512, |
| "step": 17000 |
| }, |
| { |
| "epoch": 0.24866343404202412, |
| "grad_norm": 23.910934448242188, |
| "learning_rate": 4.140576895751433e-05, |
| "loss": 3.6642, |
| "step": 17500 |
| }, |
| { |
| "epoch": 0.25576810358608193, |
| "grad_norm": 21.21150779724121, |
| "learning_rate": 4.258987353763085e-05, |
| "loss": 3.6602, |
| "step": 18000 |
| }, |
| { |
| "epoch": 0.25576810358608193, |
| "eval_runtime": 918.3944, |
| "eval_samples_per_second": 153.259, |
| "eval_steps_per_second": 38.315, |
| "step": 18000 |
| }, |
| { |
| "epoch": 0.2628727731301398, |
| "grad_norm": 37.84225845336914, |
| "learning_rate": 4.377397811774736e-05, |
| "loss": 3.7027, |
| "step": 18500 |
| }, |
| { |
| "epoch": 0.2699774426741976, |
| "grad_norm": 25.410938262939453, |
| "learning_rate": 4.495808269786388e-05, |
| "loss": 3.6725, |
| "step": 19000 |
| }, |
| { |
| "epoch": 0.27708211221825546, |
| "grad_norm": 28.328292846679688, |
| "learning_rate": 4.6142187277980395e-05, |
| "loss": 3.6763, |
| "step": 19500 |
| }, |
| { |
| "epoch": 0.28418678176231327, |
| "grad_norm": 23.013893127441406, |
| "learning_rate": 4.732629185809691e-05, |
| "loss": 3.6533, |
| "step": 20000 |
| }, |
| { |
| "epoch": 0.28418678176231327, |
| "eval_runtime": 937.5319, |
| "eval_samples_per_second": 150.13, |
| "eval_steps_per_second": 37.533, |
| "step": 20000 |
| }, |
| { |
| "epoch": 0.29129145130637113, |
| "grad_norm": 24.00814437866211, |
| "learning_rate": 4.850802822905319e-05, |
| "loss": 3.6695, |
| "step": 20500 |
| }, |
| { |
| "epoch": 0.29839612085042894, |
| "grad_norm": 19.818140029907227, |
| "learning_rate": 4.9692132809169706e-05, |
| "loss": 3.6463, |
| "step": 21000 |
| }, |
| { |
| "epoch": 0.3055007903944868, |
| "grad_norm": 16.67455291748047, |
| "learning_rate": 4.9902639265321164e-05, |
| "loss": 3.685, |
| "step": 21500 |
| }, |
| { |
| "epoch": 0.3126054599385446, |
| "grad_norm": 53.42991638183594, |
| "learning_rate": 4.977107070494435e-05, |
| "loss": 3.695, |
| "step": 22000 |
| }, |
| { |
| "epoch": 0.3126054599385446, |
| "eval_runtime": 943.5405, |
| "eval_samples_per_second": 149.174, |
| "eval_steps_per_second": 37.294, |
| "step": 22000 |
| }, |
| { |
| "epoch": 0.31971012948260247, |
| "grad_norm": 23.91082000732422, |
| "learning_rate": 4.963976528168829e-05, |
| "loss": 3.6918, |
| "step": 22500 |
| }, |
| { |
| "epoch": 0.3268147990266603, |
| "grad_norm": 24.224802017211914, |
| "learning_rate": 4.9508196721311476e-05, |
| "loss": 3.6241, |
| "step": 23000 |
| }, |
| { |
| "epoch": 0.3339194685707181, |
| "grad_norm": 20.906675338745117, |
| "learning_rate": 4.937662816093467e-05, |
| "loss": 3.6723, |
| "step": 23500 |
| }, |
| { |
| "epoch": 0.34102413811477594, |
| "grad_norm": 21.082576751708984, |
| "learning_rate": 4.9245059600557854e-05, |
| "loss": 3.623, |
| "step": 24000 |
| }, |
| { |
| "epoch": 0.34102413811477594, |
| "eval_runtime": 932.7328, |
| "eval_samples_per_second": 150.903, |
| "eval_steps_per_second": 37.726, |
| "step": 24000 |
| }, |
| { |
| "epoch": 0.34812880765883375, |
| "grad_norm": 21.17667579650879, |
| "learning_rate": 4.911349104018104e-05, |
| "loss": 3.6673, |
| "step": 24500 |
| }, |
| { |
| "epoch": 0.3552334772028916, |
| "grad_norm": 16.182199478149414, |
| "learning_rate": 4.898218561692498e-05, |
| "loss": 3.6377, |
| "step": 25000 |
| }, |
| { |
| "epoch": 0.3623381467469494, |
| "grad_norm": 18.022315979003906, |
| "learning_rate": 4.885061705654817e-05, |
| "loss": 3.6189, |
| "step": 25500 |
| }, |
| { |
| "epoch": 0.3694428162910073, |
| "grad_norm": 21.209049224853516, |
| "learning_rate": 4.871904849617135e-05, |
| "loss": 3.6586, |
| "step": 26000 |
| }, |
| { |
| "epoch": 0.3694428162910073, |
| "eval_runtime": 937.8314, |
| "eval_samples_per_second": 150.082, |
| "eval_steps_per_second": 37.521, |
| "step": 26000 |
| }, |
| { |
| "epoch": 0.3765474858350651, |
| "grad_norm": 15.896419525146484, |
| "learning_rate": 4.8587479935794544e-05, |
| "loss": 3.6251, |
| "step": 26500 |
| }, |
| { |
| "epoch": 0.38365215537912295, |
| "grad_norm": 18.70542335510254, |
| "learning_rate": 4.8456174512538485e-05, |
| "loss": 3.6287, |
| "step": 27000 |
| }, |
| { |
| "epoch": 0.39075682492318076, |
| "grad_norm": 16.86925506591797, |
| "learning_rate": 4.832460595216167e-05, |
| "loss": 3.6281, |
| "step": 27500 |
| }, |
| { |
| "epoch": 0.3978614944672386, |
| "grad_norm": 22.52922821044922, |
| "learning_rate": 4.819303739178486e-05, |
| "loss": 3.6099, |
| "step": 28000 |
| }, |
| { |
| "epoch": 0.3978614944672386, |
| "eval_runtime": 917.9919, |
| "eval_samples_per_second": 153.326, |
| "eval_steps_per_second": 38.331, |
| "step": 28000 |
| }, |
| { |
| "epoch": 0.4049661640112964, |
| "grad_norm": 27.086261749267578, |
| "learning_rate": 4.806146883140805e-05, |
| "loss": 3.6677, |
| "step": 28500 |
| }, |
| { |
| "epoch": 0.41207083355535423, |
| "grad_norm": 27.24114990234375, |
| "learning_rate": 4.793016340815199e-05, |
| "loss": 3.6596, |
| "step": 29000 |
| }, |
| { |
| "epoch": 0.4191755030994121, |
| "grad_norm": 21.950387954711914, |
| "learning_rate": 4.779859484777518e-05, |
| "loss": 3.6288, |
| "step": 29500 |
| }, |
| { |
| "epoch": 0.4262801726434699, |
| "grad_norm": 19.85308074951172, |
| "learning_rate": 4.766702628739836e-05, |
| "loss": 3.6266, |
| "step": 30000 |
| }, |
| { |
| "epoch": 0.4262801726434699, |
| "eval_runtime": 933.7583, |
| "eval_samples_per_second": 150.737, |
| "eval_steps_per_second": 37.684, |
| "step": 30000 |
| }, |
| { |
| "epoch": 0.43338484218752776, |
| "grad_norm": 16.531118392944336, |
| "learning_rate": 4.753545772702155e-05, |
| "loss": 3.65, |
| "step": 30500 |
| }, |
| { |
| "epoch": 0.44048951173158557, |
| "grad_norm": 20.83889389038086, |
| "learning_rate": 4.740388916664474e-05, |
| "loss": 3.6066, |
| "step": 31000 |
| }, |
| { |
| "epoch": 0.44759418127564343, |
| "grad_norm": 17.424823760986328, |
| "learning_rate": 4.727258374338868e-05, |
| "loss": 3.6349, |
| "step": 31500 |
| }, |
| { |
| "epoch": 0.45469885081970124, |
| "grad_norm": 46.09840393066406, |
| "learning_rate": 4.714101518301187e-05, |
| "loss": 3.59, |
| "step": 32000 |
| }, |
| { |
| "epoch": 0.45469885081970124, |
| "eval_runtime": 939.8916, |
| "eval_samples_per_second": 149.753, |
| "eval_steps_per_second": 37.438, |
| "step": 32000 |
| }, |
| { |
| "epoch": 0.4618035203637591, |
| "grad_norm": 18.70945930480957, |
| "learning_rate": 4.700944662263506e-05, |
| "loss": 3.592, |
| "step": 32500 |
| }, |
| { |
| "epoch": 0.4689081899078169, |
| "grad_norm": 20.763996124267578, |
| "learning_rate": 4.687787806225824e-05, |
| "loss": 3.6066, |
| "step": 33000 |
| }, |
| { |
| "epoch": 0.47601285945187477, |
| "grad_norm": 23.204410552978516, |
| "learning_rate": 4.6746572639002185e-05, |
| "loss": 3.6536, |
| "step": 33500 |
| }, |
| { |
| "epoch": 0.4831175289959326, |
| "grad_norm": 18.78313636779785, |
| "learning_rate": 4.661500407862538e-05, |
| "loss": 3.6343, |
| "step": 34000 |
| }, |
| { |
| "epoch": 0.4831175289959326, |
| "eval_runtime": 887.497, |
| "eval_samples_per_second": 158.594, |
| "eval_steps_per_second": 39.649, |
| "step": 34000 |
| }, |
| { |
| "epoch": 0.4902221985399904, |
| "grad_norm": 19.029979705810547, |
| "learning_rate": 4.648343551824856e-05, |
| "loss": 3.6156, |
| "step": 34500 |
| }, |
| { |
| "epoch": 0.49732686808404825, |
| "grad_norm": 22.418338775634766, |
| "learning_rate": 4.635186695787175e-05, |
| "loss": 3.6354, |
| "step": 35000 |
| }, |
| { |
| "epoch": 0.5044315376281061, |
| "grad_norm": 21.143224716186523, |
| "learning_rate": 4.622029839749494e-05, |
| "loss": 3.5547, |
| "step": 35500 |
| }, |
| { |
| "epoch": 0.5115362071721639, |
| "grad_norm": 17.15317153930664, |
| "learning_rate": 4.608925611135963e-05, |
| "loss": 3.5856, |
| "step": 36000 |
| }, |
| { |
| "epoch": 0.5115362071721639, |
| "eval_runtime": 932.1634, |
| "eval_samples_per_second": 150.995, |
| "eval_steps_per_second": 37.749, |
| "step": 36000 |
| }, |
| { |
| "epoch": 0.5186408767162217, |
| "grad_norm": 19.316564559936523, |
| "learning_rate": 4.595768755098282e-05, |
| "loss": 3.5822, |
| "step": 36500 |
| }, |
| { |
| "epoch": 0.5257455462602796, |
| "grad_norm": 44.842105865478516, |
| "learning_rate": 4.582611899060601e-05, |
| "loss": 3.6059, |
| "step": 37000 |
| }, |
| { |
| "epoch": 0.5328502158043374, |
| "grad_norm": 19.391557693481445, |
| "learning_rate": 4.5694550430229194e-05, |
| "loss": 3.6424, |
| "step": 37500 |
| }, |
| { |
| "epoch": 0.5399548853483952, |
| "grad_norm": 17.909786224365234, |
| "learning_rate": 4.5562981869852386e-05, |
| "loss": 3.5643, |
| "step": 38000 |
| }, |
| { |
| "epoch": 0.5399548853483952, |
| "eval_runtime": 906.5953, |
| "eval_samples_per_second": 155.253, |
| "eval_steps_per_second": 38.813, |
| "step": 38000 |
| }, |
| { |
| "epoch": 0.5470595548924531, |
| "grad_norm": 18.2441349029541, |
| "learning_rate": 4.5431413309475565e-05, |
| "loss": 3.5851, |
| "step": 38500 |
| }, |
| { |
| "epoch": 0.5541642244365109, |
| "grad_norm": 28.68638801574707, |
| "learning_rate": 4.529984474909876e-05, |
| "loss": 3.5955, |
| "step": 39000 |
| }, |
| { |
| "epoch": 0.5612688939805688, |
| "grad_norm": 19.42378807067871, |
| "learning_rate": 4.516827618872195e-05, |
| "loss": 3.5832, |
| "step": 39500 |
| }, |
| { |
| "epoch": 0.5683735635246265, |
| "grad_norm": 20.94334602355957, |
| "learning_rate": 4.5036707628345135e-05, |
| "loss": 3.621, |
| "step": 40000 |
| }, |
| { |
| "epoch": 0.5683735635246265, |
| "eval_runtime": 932.967, |
| "eval_samples_per_second": 150.865, |
| "eval_steps_per_second": 37.716, |
| "step": 40000 |
| }, |
| { |
| "epoch": 0.5754782330686844, |
| "grad_norm": 15.059203147888184, |
| "learning_rate": 4.490513906796832e-05, |
| "loss": 3.5537, |
| "step": 40500 |
| }, |
| { |
| "epoch": 0.5825829026127423, |
| "grad_norm": 16.318017959594727, |
| "learning_rate": 4.477383364471226e-05, |
| "loss": 3.5972, |
| "step": 41000 |
| }, |
| { |
| "epoch": 0.5896875721568, |
| "grad_norm": 26.739654541015625, |
| "learning_rate": 4.464226508433545e-05, |
| "loss": 3.5889, |
| "step": 41500 |
| }, |
| { |
| "epoch": 0.5967922417008579, |
| "grad_norm": 17.74175453186035, |
| "learning_rate": 4.451069652395864e-05, |
| "loss": 3.6115, |
| "step": 42000 |
| }, |
| { |
| "epoch": 0.5967922417008579, |
| "eval_runtime": 928.247, |
| "eval_samples_per_second": 151.632, |
| "eval_steps_per_second": 37.908, |
| "step": 42000 |
| }, |
| { |
| "epoch": 0.6038969112449157, |
| "grad_norm": 15.483787536621094, |
| "learning_rate": 4.4379127963581825e-05, |
| "loss": 3.5656, |
| "step": 42500 |
| }, |
| { |
| "epoch": 0.6110015807889736, |
| "grad_norm": 13.45321273803711, |
| "learning_rate": 4.424755940320501e-05, |
| "loss": 3.6075, |
| "step": 43000 |
| }, |
| { |
| "epoch": 0.6181062503330313, |
| "grad_norm": 18.495105743408203, |
| "learning_rate": 4.4115990842828196e-05, |
| "loss": 3.5783, |
| "step": 43500 |
| }, |
| { |
| "epoch": 0.6252109198770892, |
| "grad_norm": 16.545825958251953, |
| "learning_rate": 4.398442228245139e-05, |
| "loss": 3.5468, |
| "step": 44000 |
| }, |
| { |
| "epoch": 0.6252109198770892, |
| "eval_runtime": 924.4432, |
| "eval_samples_per_second": 152.256, |
| "eval_steps_per_second": 38.064, |
| "step": 44000 |
| }, |
| { |
| "epoch": 0.6323155894211471, |
| "grad_norm": 24.224279403686523, |
| "learning_rate": 4.385311685919533e-05, |
| "loss": 3.5977, |
| "step": 44500 |
| }, |
| { |
| "epoch": 0.6394202589652049, |
| "grad_norm": 21.859100341796875, |
| "learning_rate": 4.3721548298818515e-05, |
| "loss": 3.5507, |
| "step": 45000 |
| }, |
| { |
| "epoch": 0.6465249285092627, |
| "grad_norm": 18.18729019165039, |
| "learning_rate": 4.358997973844171e-05, |
| "loss": 3.5176, |
| "step": 45500 |
| }, |
| { |
| "epoch": 0.6536295980533205, |
| "grad_norm": 19.58243179321289, |
| "learning_rate": 4.3458411178064886e-05, |
| "loss": 3.6093, |
| "step": 46000 |
| }, |
| { |
| "epoch": 0.6536295980533205, |
| "eval_runtime": 915.6486, |
| "eval_samples_per_second": 153.718, |
| "eval_steps_per_second": 38.43, |
| "step": 46000 |
| }, |
| { |
| "epoch": 0.6607342675973784, |
| "grad_norm": 18.41427993774414, |
| "learning_rate": 4.332684261768808e-05, |
| "loss": 3.524, |
| "step": 46500 |
| }, |
| { |
| "epoch": 0.6678389371414362, |
| "grad_norm": 17.978282928466797, |
| "learning_rate": 4.319553719443202e-05, |
| "loss": 3.5495, |
| "step": 47000 |
| }, |
| { |
| "epoch": 0.674943606685494, |
| "grad_norm": 20.26556968688965, |
| "learning_rate": 4.3063968634055205e-05, |
| "loss": 3.5603, |
| "step": 47500 |
| }, |
| { |
| "epoch": 0.6820482762295519, |
| "grad_norm": 21.91985511779785, |
| "learning_rate": 4.29324000736784e-05, |
| "loss": 3.5372, |
| "step": 48000 |
| }, |
| { |
| "epoch": 0.6820482762295519, |
| "eval_runtime": 939.9628, |
| "eval_samples_per_second": 149.742, |
| "eval_steps_per_second": 37.436, |
| "step": 48000 |
| }, |
| { |
| "epoch": 0.6891529457736097, |
| "grad_norm": 18.618022918701172, |
| "learning_rate": 4.280083151330158e-05, |
| "loss": 3.5419, |
| "step": 48500 |
| }, |
| { |
| "epoch": 0.6962576153176675, |
| "grad_norm": 20.38291358947754, |
| "learning_rate": 4.266926295292477e-05, |
| "loss": 3.5881, |
| "step": 49000 |
| }, |
| { |
| "epoch": 0.7033622848617254, |
| "grad_norm": 19.36083221435547, |
| "learning_rate": 4.253769439254796e-05, |
| "loss": 3.5256, |
| "step": 49500 |
| }, |
| { |
| "epoch": 0.7104669544057832, |
| "grad_norm": 18.683948516845703, |
| "learning_rate": 4.2406125832171147e-05, |
| "loss": 3.5507, |
| "step": 50000 |
| }, |
| { |
| "epoch": 0.7104669544057832, |
| "eval_runtime": 920.3025, |
| "eval_samples_per_second": 152.941, |
| "eval_steps_per_second": 38.235, |
| "step": 50000 |
| }, |
| { |
| "epoch": 0.7175716239498411, |
| "grad_norm": 15.374938011169434, |
| "learning_rate": 4.227455727179433e-05, |
| "loss": 3.5632, |
| "step": 50500 |
| }, |
| { |
| "epoch": 0.7246762934938988, |
| "grad_norm": 16.976755142211914, |
| "learning_rate": 4.214298871141752e-05, |
| "loss": 3.546, |
| "step": 51000 |
| }, |
| { |
| "epoch": 0.7317809630379567, |
| "grad_norm": 12.849639892578125, |
| "learning_rate": 4.2011683288161466e-05, |
| "loss": 3.5461, |
| "step": 51500 |
| }, |
| { |
| "epoch": 0.7388856325820146, |
| "grad_norm": 19.558488845825195, |
| "learning_rate": 4.188037786490541e-05, |
| "loss": 3.5127, |
| "step": 52000 |
| }, |
| { |
| "epoch": 0.7388856325820146, |
| "eval_runtime": 952.8423, |
| "eval_samples_per_second": 147.718, |
| "eval_steps_per_second": 36.93, |
| "step": 52000 |
| }, |
| { |
| "epoch": 0.7459903021260723, |
| "grad_norm": 19.32866668701172, |
| "learning_rate": 4.174880930452859e-05, |
| "loss": 3.5713, |
| "step": 52500 |
| }, |
| { |
| "epoch": 0.7530949716701302, |
| "grad_norm": 25.51015853881836, |
| "learning_rate": 4.161724074415178e-05, |
| "loss": 3.544, |
| "step": 53000 |
| }, |
| { |
| "epoch": 0.760199641214188, |
| "grad_norm": 23.327672958374023, |
| "learning_rate": 4.1485672183774963e-05, |
| "loss": 3.5789, |
| "step": 53500 |
| }, |
| { |
| "epoch": 0.7673043107582459, |
| "grad_norm": 14.6256685256958, |
| "learning_rate": 4.1354103623398156e-05, |
| "loss": 3.5612, |
| "step": 54000 |
| }, |
| { |
| "epoch": 0.7673043107582459, |
| "eval_runtime": 908.4425, |
| "eval_samples_per_second": 154.938, |
| "eval_steps_per_second": 38.734, |
| "step": 54000 |
| }, |
| { |
| "epoch": 0.7744089803023037, |
| "grad_norm": 13.763489723205566, |
| "learning_rate": 4.12227982001421e-05, |
| "loss": 3.5238, |
| "step": 54500 |
| }, |
| { |
| "epoch": 0.7815136498463615, |
| "grad_norm": 16.628599166870117, |
| "learning_rate": 4.109122963976528e-05, |
| "loss": 3.5555, |
| "step": 55000 |
| }, |
| { |
| "epoch": 0.7886183193904194, |
| "grad_norm": 16.67022132873535, |
| "learning_rate": 4.0959661079388475e-05, |
| "loss": 3.5683, |
| "step": 55500 |
| }, |
| { |
| "epoch": 0.7957229889344772, |
| "grad_norm": 19.672990798950195, |
| "learning_rate": 4.0828092519011654e-05, |
| "loss": 3.4749, |
| "step": 56000 |
| }, |
| { |
| "epoch": 0.7957229889344772, |
| "eval_runtime": 908.7971, |
| "eval_samples_per_second": 154.877, |
| "eval_steps_per_second": 38.719, |
| "step": 56000 |
| }, |
| { |
| "epoch": 0.802827658478535, |
| "grad_norm": 18.64630699157715, |
| "learning_rate": 4.06967870957556e-05, |
| "loss": 3.5202, |
| "step": 56500 |
| }, |
| { |
| "epoch": 0.8099323280225929, |
| "grad_norm": 19.014997482299805, |
| "learning_rate": 4.056521853537879e-05, |
| "loss": 3.514, |
| "step": 57000 |
| }, |
| { |
| "epoch": 0.8170369975666507, |
| "grad_norm": 20.02798843383789, |
| "learning_rate": 4.043364997500197e-05, |
| "loss": 3.5717, |
| "step": 57500 |
| }, |
| { |
| "epoch": 0.8241416671107085, |
| "grad_norm": 13.31798267364502, |
| "learning_rate": 4.0302081414625165e-05, |
| "loss": 3.5485, |
| "step": 58000 |
| }, |
| { |
| "epoch": 0.8241416671107085, |
| "eval_runtime": 920.7476, |
| "eval_samples_per_second": 152.867, |
| "eval_steps_per_second": 38.217, |
| "step": 58000 |
| }, |
| { |
| "epoch": 0.8312463366547663, |
| "grad_norm": 18.563255310058594, |
| "learning_rate": 4.017051285424835e-05, |
| "loss": 3.5037, |
| "step": 58500 |
| }, |
| { |
| "epoch": 0.8383510061988242, |
| "grad_norm": 18.993120193481445, |
| "learning_rate": 4.0038944293871536e-05, |
| "loss": 3.5404, |
| "step": 59000 |
| }, |
| { |
| "epoch": 0.845455675742882, |
| "grad_norm": 23.613004684448242, |
| "learning_rate": 3.990763887061548e-05, |
| "loss": 3.5513, |
| "step": 59500 |
| }, |
| { |
| "epoch": 0.8525603452869398, |
| "grad_norm": 17.62459373474121, |
| "learning_rate": 3.977607031023867e-05, |
| "loss": 3.5172, |
| "step": 60000 |
| }, |
| { |
| "epoch": 0.8525603452869398, |
| "eval_runtime": 947.8413, |
| "eval_samples_per_second": 148.497, |
| "eval_steps_per_second": 37.124, |
| "step": 60000 |
| }, |
| { |
| "epoch": 0.8596650148309977, |
| "grad_norm": 16.220739364624023, |
| "learning_rate": 3.9644501749861855e-05, |
| "loss": 3.5113, |
| "step": 60500 |
| }, |
| { |
| "epoch": 0.8667696843750555, |
| "grad_norm": 21.781057357788086, |
| "learning_rate": 3.951293318948504e-05, |
| "loss": 3.5046, |
| "step": 61000 |
| }, |
| { |
| "epoch": 0.8738743539191134, |
| "grad_norm": 18.97637367248535, |
| "learning_rate": 3.938136462910823e-05, |
| "loss": 3.5144, |
| "step": 61500 |
| }, |
| { |
| "epoch": 0.8809790234631711, |
| "grad_norm": 14.381050109863281, |
| "learning_rate": 3.925005920585217e-05, |
| "loss": 3.4889, |
| "step": 62000 |
| }, |
| { |
| "epoch": 0.8809790234631711, |
| "eval_runtime": 948.6666, |
| "eval_samples_per_second": 148.368, |
| "eval_steps_per_second": 37.092, |
| "step": 62000 |
| }, |
| { |
| "epoch": 0.888083693007229, |
| "grad_norm": 19.86937713623047, |
| "learning_rate": 3.911849064547536e-05, |
| "loss": 3.512, |
| "step": 62500 |
| }, |
| { |
| "epoch": 0.8951883625512869, |
| "grad_norm": 18.57746696472168, |
| "learning_rate": 3.8986922085098545e-05, |
| "loss": 3.5255, |
| "step": 63000 |
| }, |
| { |
| "epoch": 0.9022930320953446, |
| "grad_norm": 23.89400291442871, |
| "learning_rate": 3.885535352472173e-05, |
| "loss": 3.5221, |
| "step": 63500 |
| }, |
| { |
| "epoch": 0.9093977016394025, |
| "grad_norm": 20.12260627746582, |
| "learning_rate": 3.872378496434492e-05, |
| "loss": 3.5573, |
| "step": 64000 |
| }, |
| { |
| "epoch": 0.9093977016394025, |
| "eval_runtime": 957.6922, |
| "eval_samples_per_second": 146.97, |
| "eval_steps_per_second": 36.742, |
| "step": 64000 |
| }, |
| { |
| "epoch": 0.9165023711834603, |
| "grad_norm": 20.893495559692383, |
| "learning_rate": 3.859247954108886e-05, |
| "loss": 3.5487, |
| "step": 64500 |
| }, |
| { |
| "epoch": 0.9236070407275182, |
| "grad_norm": 29.79423713684082, |
| "learning_rate": 3.846091098071205e-05, |
| "loss": 3.5046, |
| "step": 65000 |
| }, |
| { |
| "epoch": 0.930711710271576, |
| "grad_norm": 17.226837158203125, |
| "learning_rate": 3.832934242033524e-05, |
| "loss": 3.5536, |
| "step": 65500 |
| }, |
| { |
| "epoch": 0.9378163798156338, |
| "grad_norm": 33.1728630065918, |
| "learning_rate": 3.819777385995843e-05, |
| "loss": 3.5286, |
| "step": 66000 |
| }, |
| { |
| "epoch": 0.9378163798156338, |
| "eval_runtime": 949.3545, |
| "eval_samples_per_second": 148.261, |
| "eval_steps_per_second": 37.065, |
| "step": 66000 |
| }, |
| { |
| "epoch": 0.9449210493596917, |
| "grad_norm": 19.82408332824707, |
| "learning_rate": 3.806620529958161e-05, |
| "loss": 3.5631, |
| "step": 66500 |
| }, |
| { |
| "epoch": 0.9520257189037495, |
| "grad_norm": 13.408245086669922, |
| "learning_rate": 3.7934899876325554e-05, |
| "loss": 3.4657, |
| "step": 67000 |
| }, |
| { |
| "epoch": 0.9591303884478073, |
| "grad_norm": 16.91946792602539, |
| "learning_rate": 3.780333131594874e-05, |
| "loss": 3.5009, |
| "step": 67500 |
| }, |
| { |
| "epoch": 0.9662350579918652, |
| "grad_norm": 16.867115020751953, |
| "learning_rate": 3.767176275557193e-05, |
| "loss": 3.562, |
| "step": 68000 |
| }, |
| { |
| "epoch": 0.9662350579918652, |
| "eval_runtime": 929.9681, |
| "eval_samples_per_second": 151.351, |
| "eval_steps_per_second": 37.838, |
| "step": 68000 |
| }, |
| { |
| "epoch": 0.973339727535923, |
| "grad_norm": 34.606056213378906, |
| "learning_rate": 3.754019419519512e-05, |
| "loss": 3.5117, |
| "step": 68500 |
| }, |
| { |
| "epoch": 0.9804443970799808, |
| "grad_norm": 16.481327056884766, |
| "learning_rate": 3.74086256348183e-05, |
| "loss": 3.5577, |
| "step": 69000 |
| }, |
| { |
| "epoch": 0.9875490666240386, |
| "grad_norm": 26.235326766967773, |
| "learning_rate": 3.7277057074441496e-05, |
| "loss": 3.5808, |
| "step": 69500 |
| }, |
| { |
| "epoch": 0.9946537361680965, |
| "grad_norm": 25.267833709716797, |
| "learning_rate": 3.714548851406468e-05, |
| "loss": 3.5231, |
| "step": 70000 |
| }, |
| { |
| "epoch": 0.9946537361680965, |
| "eval_runtime": 928.1554, |
| "eval_samples_per_second": 151.647, |
| "eval_steps_per_second": 37.912, |
| "step": 70000 |
| }, |
| { |
| "epoch": 1.0017584057121542, |
| "grad_norm": 17.89148712158203, |
| "learning_rate": 3.7013919953687873e-05, |
| "loss": 3.5025, |
| "step": 70500 |
| }, |
| { |
| "epoch": 1.0088630752562122, |
| "grad_norm": 21.38014793395996, |
| "learning_rate": 3.6882877667552564e-05, |
| "loss": 3.5377, |
| "step": 71000 |
| }, |
| { |
| "epoch": 1.01596774480027, |
| "grad_norm": 17.273181915283203, |
| "learning_rate": 3.675130910717575e-05, |
| "loss": 3.5147, |
| "step": 71500 |
| }, |
| { |
| "epoch": 1.0230724143443277, |
| "grad_norm": 19.168262481689453, |
| "learning_rate": 3.662000368391969e-05, |
| "loss": 3.4739, |
| "step": 72000 |
| }, |
| { |
| "epoch": 1.0230724143443277, |
| "eval_runtime": 921.4794, |
| "eval_samples_per_second": 152.746, |
| "eval_steps_per_second": 38.186, |
| "step": 72000 |
| }, |
| { |
| "epoch": 1.0301770838883857, |
| "grad_norm": 19.674152374267578, |
| "learning_rate": 3.648843512354288e-05, |
| "loss": 3.5197, |
| "step": 72500 |
| }, |
| { |
| "epoch": 1.0372817534324434, |
| "grad_norm": 24.5643367767334, |
| "learning_rate": 3.635686656316607e-05, |
| "loss": 3.4676, |
| "step": 73000 |
| }, |
| { |
| "epoch": 1.0443864229765012, |
| "grad_norm": 16.75780487060547, |
| "learning_rate": 3.6225298002789254e-05, |
| "loss": 3.5216, |
| "step": 73500 |
| }, |
| { |
| "epoch": 1.0514910925205592, |
| "grad_norm": 16.524784088134766, |
| "learning_rate": 3.6093729442412446e-05, |
| "loss": 3.5219, |
| "step": 74000 |
| }, |
| { |
| "epoch": 1.0514910925205592, |
| "eval_runtime": 930.5954, |
| "eval_samples_per_second": 151.249, |
| "eval_steps_per_second": 37.812, |
| "step": 74000 |
| }, |
| { |
| "epoch": 1.058595762064617, |
| "grad_norm": 16.586347579956055, |
| "learning_rate": 3.5962160882035625e-05, |
| "loss": 3.5428, |
| "step": 74500 |
| }, |
| { |
| "epoch": 1.065700431608675, |
| "grad_norm": 17.29988670349121, |
| "learning_rate": 3.583059232165882e-05, |
| "loss": 3.5186, |
| "step": 75000 |
| }, |
| { |
| "epoch": 1.0728051011527326, |
| "grad_norm": 19.46440315246582, |
| "learning_rate": 3.569902376128201e-05, |
| "loss": 3.5, |
| "step": 75500 |
| }, |
| { |
| "epoch": 1.0799097706967904, |
| "grad_norm": 17.439786911010742, |
| "learning_rate": 3.5567455200905195e-05, |
| "loss": 3.5213, |
| "step": 76000 |
| }, |
| { |
| "epoch": 1.0799097706967904, |
| "eval_runtime": 932.7299, |
| "eval_samples_per_second": 150.903, |
| "eval_steps_per_second": 37.726, |
| "step": 76000 |
| }, |
| { |
| "epoch": 1.0870144402408484, |
| "grad_norm": 15.520922660827637, |
| "learning_rate": 3.5436149777649136e-05, |
| "loss": 3.5296, |
| "step": 76500 |
| }, |
| { |
| "epoch": 1.0941191097849061, |
| "grad_norm": 24.197912216186523, |
| "learning_rate": 3.530458121727232e-05, |
| "loss": 3.5079, |
| "step": 77000 |
| }, |
| { |
| "epoch": 1.1012237793289639, |
| "grad_norm": 16.002283096313477, |
| "learning_rate": 3.517301265689551e-05, |
| "loss": 3.5577, |
| "step": 77500 |
| }, |
| { |
| "epoch": 1.1083284488730218, |
| "grad_norm": 23.288475036621094, |
| "learning_rate": 3.50414440965187e-05, |
| "loss": 3.512, |
| "step": 78000 |
| }, |
| { |
| "epoch": 1.1083284488730218, |
| "eval_runtime": 925.8903, |
| "eval_samples_per_second": 152.018, |
| "eval_steps_per_second": 38.005, |
| "step": 78000 |
| }, |
| { |
| "epoch": 1.1154331184170796, |
| "grad_norm": 19.086015701293945, |
| "learning_rate": 3.4909875536141885e-05, |
| "loss": 3.5082, |
| "step": 78500 |
| }, |
| { |
| "epoch": 1.1225377879611376, |
| "grad_norm": 14.41193675994873, |
| "learning_rate": 3.4778570112885826e-05, |
| "loss": 3.479, |
| "step": 79000 |
| }, |
| { |
| "epoch": 1.1296424575051953, |
| "grad_norm": 17.028974533081055, |
| "learning_rate": 3.464700155250901e-05, |
| "loss": 3.4934, |
| "step": 79500 |
| }, |
| { |
| "epoch": 1.136747127049253, |
| "grad_norm": 16.115354537963867, |
| "learning_rate": 3.4515432992132204e-05, |
| "loss": 3.4888, |
| "step": 80000 |
| }, |
| { |
| "epoch": 1.136747127049253, |
| "eval_runtime": 941.5235, |
| "eval_samples_per_second": 149.494, |
| "eval_steps_per_second": 37.373, |
| "step": 80000 |
| }, |
| { |
| "epoch": 1.143851796593311, |
| "grad_norm": 17.744951248168945, |
| "learning_rate": 3.438386443175539e-05, |
| "loss": 3.5219, |
| "step": 80500 |
| }, |
| { |
| "epoch": 1.1509564661373688, |
| "grad_norm": 14.755407333374023, |
| "learning_rate": 3.4252295871378575e-05, |
| "loss": 3.4917, |
| "step": 81000 |
| }, |
| { |
| "epoch": 1.1580611356814265, |
| "grad_norm": 13.909296989440918, |
| "learning_rate": 3.412072731100177e-05, |
| "loss": 3.4889, |
| "step": 81500 |
| }, |
| { |
| "epoch": 1.1651658052254845, |
| "grad_norm": 16.36424446105957, |
| "learning_rate": 3.398915875062495e-05, |
| "loss": 3.5214, |
| "step": 82000 |
| }, |
| { |
| "epoch": 1.1651658052254845, |
| "eval_runtime": 937.9616, |
| "eval_samples_per_second": 150.062, |
| "eval_steps_per_second": 37.515, |
| "step": 82000 |
| }, |
| { |
| "epoch": 1.1722704747695423, |
| "grad_norm": 21.457130432128906, |
| "learning_rate": 3.385759019024814e-05, |
| "loss": 3.499, |
| "step": 82500 |
| }, |
| { |
| "epoch": 1.1793751443136, |
| "grad_norm": 16.613374710083008, |
| "learning_rate": 3.372602162987133e-05, |
| "loss": 3.47, |
| "step": 83000 |
| }, |
| { |
| "epoch": 1.186479813857658, |
| "grad_norm": 20.228662490844727, |
| "learning_rate": 3.3594716206615265e-05, |
| "loss": 3.4882, |
| "step": 83500 |
| }, |
| { |
| "epoch": 1.1935844834017157, |
| "grad_norm": 16.61556625366211, |
| "learning_rate": 3.346314764623846e-05, |
| "loss": 3.4996, |
| "step": 84000 |
| }, |
| { |
| "epoch": 1.1935844834017157, |
| "eval_runtime": 921.749, |
| "eval_samples_per_second": 152.701, |
| "eval_steps_per_second": 38.175, |
| "step": 84000 |
| }, |
| { |
| "epoch": 1.2006891529457735, |
| "grad_norm": 18.255168914794922, |
| "learning_rate": 3.333157908586164e-05, |
| "loss": 3.5136, |
| "step": 84500 |
| }, |
| { |
| "epoch": 1.2077938224898315, |
| "grad_norm": 16.397192001342773, |
| "learning_rate": 3.320001052548483e-05, |
| "loss": 3.4573, |
| "step": 85000 |
| }, |
| { |
| "epoch": 1.2148984920338892, |
| "grad_norm": 23.602087020874023, |
| "learning_rate": 3.306870510222878e-05, |
| "loss": 3.497, |
| "step": 85500 |
| }, |
| { |
| "epoch": 1.2220031615779472, |
| "grad_norm": 23.570209503173828, |
| "learning_rate": 3.293713654185196e-05, |
| "loss": 3.4954, |
| "step": 86000 |
| }, |
| { |
| "epoch": 1.2220031615779472, |
| "eval_runtime": 928.5008, |
| "eval_samples_per_second": 151.591, |
| "eval_steps_per_second": 37.898, |
| "step": 86000 |
| }, |
| { |
| "epoch": 1.229107831122005, |
| "grad_norm": 16.394493103027344, |
| "learning_rate": 3.2805831118595904e-05, |
| "loss": 3.5192, |
| "step": 86500 |
| }, |
| { |
| "epoch": 1.2362125006660627, |
| "grad_norm": 15.098355293273926, |
| "learning_rate": 3.267426255821909e-05, |
| "loss": 3.4441, |
| "step": 87000 |
| }, |
| { |
| "epoch": 1.2433171702101207, |
| "grad_norm": 20.900165557861328, |
| "learning_rate": 3.2542693997842275e-05, |
| "loss": 3.5025, |
| "step": 87500 |
| }, |
| { |
| "epoch": 1.2504218397541784, |
| "grad_norm": 29.24736785888672, |
| "learning_rate": 3.241112543746547e-05, |
| "loss": 3.4749, |
| "step": 88000 |
| }, |
| { |
| "epoch": 1.2504218397541784, |
| "eval_runtime": 925.3923, |
| "eval_samples_per_second": 152.1, |
| "eval_steps_per_second": 38.025, |
| "step": 88000 |
| }, |
| { |
| "epoch": 1.2575265092982364, |
| "grad_norm": 22.28853988647461, |
| "learning_rate": 3.227955687708865e-05, |
| "loss": 3.5224, |
| "step": 88500 |
| }, |
| { |
| "epoch": 1.2646311788422941, |
| "grad_norm": 18.560422897338867, |
| "learning_rate": 3.2148251453832594e-05, |
| "loss": 3.4426, |
| "step": 89000 |
| }, |
| { |
| "epoch": 1.271735848386352, |
| "grad_norm": 22.707122802734375, |
| "learning_rate": 3.201668289345578e-05, |
| "loss": 3.5074, |
| "step": 89500 |
| }, |
| { |
| "epoch": 1.2788405179304099, |
| "grad_norm": 19.690576553344727, |
| "learning_rate": 3.188511433307897e-05, |
| "loss": 3.5421, |
| "step": 90000 |
| }, |
| { |
| "epoch": 1.2788405179304099, |
| "eval_runtime": 923.2428, |
| "eval_samples_per_second": 152.454, |
| "eval_steps_per_second": 38.113, |
| "step": 90000 |
| }, |
| { |
| "epoch": 1.2859451874744676, |
| "grad_norm": 15.056541442871094, |
| "learning_rate": 3.175354577270216e-05, |
| "loss": 3.463, |
| "step": 90500 |
| }, |
| { |
| "epoch": 1.2930498570185254, |
| "grad_norm": 17.073137283325195, |
| "learning_rate": 3.162197721232534e-05, |
| "loss": 3.5104, |
| "step": 91000 |
| }, |
| { |
| "epoch": 1.3001545265625833, |
| "grad_norm": 28.59168243408203, |
| "learning_rate": 3.1490408651948535e-05, |
| "loss": 3.5045, |
| "step": 91500 |
| }, |
| { |
| "epoch": 1.307259196106641, |
| "grad_norm": 20.911029815673828, |
| "learning_rate": 3.135884009157172e-05, |
| "loss": 3.555, |
| "step": 92000 |
| }, |
| { |
| "epoch": 1.307259196106641, |
| "eval_runtime": 929.4581, |
| "eval_samples_per_second": 151.434, |
| "eval_steps_per_second": 37.859, |
| "step": 92000 |
| }, |
| { |
| "epoch": 1.3143638656506988, |
| "grad_norm": 13.504472732543945, |
| "learning_rate": 3.1227271531194906e-05, |
| "loss": 3.4935, |
| "step": 92500 |
| }, |
| { |
| "epoch": 1.3214685351947568, |
| "grad_norm": 16.877857208251953, |
| "learning_rate": 3.1095966107938854e-05, |
| "loss": 3.509, |
| "step": 93000 |
| }, |
| { |
| "epoch": 1.3285732047388146, |
| "grad_norm": 16.726940155029297, |
| "learning_rate": 3.096439754756203e-05, |
| "loss": 3.4867, |
| "step": 93500 |
| }, |
| { |
| "epoch": 1.3356778742828723, |
| "grad_norm": 16.982975006103516, |
| "learning_rate": 3.0832828987185225e-05, |
| "loss": 3.5165, |
| "step": 94000 |
| }, |
| { |
| "epoch": 1.3356778742828723, |
| "eval_runtime": 926.8189, |
| "eval_samples_per_second": 151.866, |
| "eval_steps_per_second": 37.966, |
| "step": 94000 |
| }, |
| { |
| "epoch": 1.3427825438269303, |
| "grad_norm": 23.338428497314453, |
| "learning_rate": 3.070126042680841e-05, |
| "loss": 3.4786, |
| "step": 94500 |
| }, |
| { |
| "epoch": 1.349887213370988, |
| "grad_norm": 17.941577911376953, |
| "learning_rate": 3.056995500355235e-05, |
| "loss": 3.4714, |
| "step": 95000 |
| }, |
| { |
| "epoch": 1.3569918829150458, |
| "grad_norm": 28.672653198242188, |
| "learning_rate": 3.0438386443175544e-05, |
| "loss": 3.5097, |
| "step": 95500 |
| }, |
| { |
| "epoch": 1.3640965524591038, |
| "grad_norm": 18.826953887939453, |
| "learning_rate": 3.0306817882798726e-05, |
| "loss": 3.5081, |
| "step": 96000 |
| }, |
| { |
| "epoch": 1.3640965524591038, |
| "eval_runtime": 938.6101, |
| "eval_samples_per_second": 149.958, |
| "eval_steps_per_second": 37.489, |
| "step": 96000 |
| }, |
| { |
| "epoch": 1.3712012220031615, |
| "grad_norm": 15.427292823791504, |
| "learning_rate": 3.017524932242192e-05, |
| "loss": 3.4842, |
| "step": 96500 |
| }, |
| { |
| "epoch": 1.3783058915472193, |
| "grad_norm": 15.034753799438477, |
| "learning_rate": 3.00436807620451e-05, |
| "loss": 3.5055, |
| "step": 97000 |
| }, |
| { |
| "epoch": 1.3854105610912772, |
| "grad_norm": 16.047800064086914, |
| "learning_rate": 2.9912375338789045e-05, |
| "loss": 3.4855, |
| "step": 97500 |
| }, |
| { |
| "epoch": 1.392515230635335, |
| "grad_norm": 16.472871780395508, |
| "learning_rate": 2.9780806778412234e-05, |
| "loss": 3.4864, |
| "step": 98000 |
| }, |
| { |
| "epoch": 1.392515230635335, |
| "eval_runtime": 923.7428, |
| "eval_samples_per_second": 152.371, |
| "eval_steps_per_second": 38.093, |
| "step": 98000 |
| }, |
| { |
| "epoch": 1.399619900179393, |
| "grad_norm": 15.83158016204834, |
| "learning_rate": 2.964923821803542e-05, |
| "loss": 3.4872, |
| "step": 98500 |
| }, |
| { |
| "epoch": 1.4067245697234507, |
| "grad_norm": 22.84076499938965, |
| "learning_rate": 2.951766965765861e-05, |
| "loss": 3.4432, |
| "step": 99000 |
| }, |
| { |
| "epoch": 1.4138292392675087, |
| "grad_norm": 15.516064643859863, |
| "learning_rate": 2.9386364234402547e-05, |
| "loss": 3.4367, |
| "step": 99500 |
| }, |
| { |
| "epoch": 1.4209339088115664, |
| "grad_norm": 18.551212310791016, |
| "learning_rate": 2.9254795674025735e-05, |
| "loss": 3.4401, |
| "step": 100000 |
| }, |
| { |
| "epoch": 1.4209339088115664, |
| "eval_runtime": 939.3652, |
| "eval_samples_per_second": 149.837, |
| "eval_steps_per_second": 37.459, |
| "step": 100000 |
| }, |
| { |
| "epoch": 1.4280385783556242, |
| "grad_norm": 15.68122673034668, |
| "learning_rate": 2.9123227113648928e-05, |
| "loss": 3.4569, |
| "step": 100500 |
| }, |
| { |
| "epoch": 1.4351432478996822, |
| "grad_norm": 18.92440414428711, |
| "learning_rate": 2.899165855327211e-05, |
| "loss": 3.445, |
| "step": 101000 |
| }, |
| { |
| "epoch": 1.44224791744374, |
| "grad_norm": 21.443042755126953, |
| "learning_rate": 2.8860353130016055e-05, |
| "loss": 3.4473, |
| "step": 101500 |
| }, |
| { |
| "epoch": 1.4493525869877977, |
| "grad_norm": 15.25529670715332, |
| "learning_rate": 2.872878456963924e-05, |
| "loss": 3.4833, |
| "step": 102000 |
| }, |
| { |
| "epoch": 1.4493525869877977, |
| "eval_runtime": 958.8353, |
| "eval_samples_per_second": 146.795, |
| "eval_steps_per_second": 36.699, |
| "step": 102000 |
| }, |
| { |
| "epoch": 1.4564572565318556, |
| "grad_norm": 20.19648551940918, |
| "learning_rate": 2.859721600926243e-05, |
| "loss": 3.4869, |
| "step": 102500 |
| }, |
| { |
| "epoch": 1.4635619260759134, |
| "grad_norm": 20.799802780151367, |
| "learning_rate": 2.8465647448885618e-05, |
| "loss": 3.5262, |
| "step": 103000 |
| }, |
| { |
| "epoch": 1.4706665956199712, |
| "grad_norm": 14.214009284973145, |
| "learning_rate": 2.8334078888508803e-05, |
| "loss": 3.465, |
| "step": 103500 |
| }, |
| { |
| "epoch": 1.4777712651640291, |
| "grad_norm": 14.79015827178955, |
| "learning_rate": 2.8202773465252748e-05, |
| "loss": 3.4532, |
| "step": 104000 |
| }, |
| { |
| "epoch": 1.4777712651640291, |
| "eval_runtime": 938.9838, |
| "eval_samples_per_second": 149.898, |
| "eval_steps_per_second": 37.475, |
| "step": 104000 |
| }, |
| { |
| "epoch": 1.4848759347080869, |
| "grad_norm": 19.22818946838379, |
| "learning_rate": 2.807120490487593e-05, |
| "loss": 3.4953, |
| "step": 104500 |
| }, |
| { |
| "epoch": 1.4919806042521446, |
| "grad_norm": 24.664451599121094, |
| "learning_rate": 2.7939899481619875e-05, |
| "loss": 3.4737, |
| "step": 105000 |
| }, |
| { |
| "epoch": 1.4990852737962026, |
| "grad_norm": 13.840934753417969, |
| "learning_rate": 2.780833092124306e-05, |
| "loss": 3.4858, |
| "step": 105500 |
| }, |
| { |
| "epoch": 1.5061899433402604, |
| "grad_norm": 18.443531036376953, |
| "learning_rate": 2.767676236086625e-05, |
| "loss": 3.4521, |
| "step": 106000 |
| }, |
| { |
| "epoch": 1.5061899433402604, |
| "eval_runtime": 938.3751, |
| "eval_samples_per_second": 149.995, |
| "eval_steps_per_second": 37.499, |
| "step": 106000 |
| }, |
| { |
| "epoch": 1.513294612884318, |
| "grad_norm": 27.919483184814453, |
| "learning_rate": 2.7545193800489438e-05, |
| "loss": 3.4606, |
| "step": 106500 |
| }, |
| { |
| "epoch": 1.520399282428376, |
| "grad_norm": 15.594362258911133, |
| "learning_rate": 2.7413625240112624e-05, |
| "loss": 3.4488, |
| "step": 107000 |
| }, |
| { |
| "epoch": 1.527503951972434, |
| "grad_norm": 15.159219741821289, |
| "learning_rate": 2.7282319816856565e-05, |
| "loss": 3.3977, |
| "step": 107500 |
| }, |
| { |
| "epoch": 1.5346086215164916, |
| "grad_norm": 18.7772159576416, |
| "learning_rate": 2.715075125647975e-05, |
| "loss": 3.4667, |
| "step": 108000 |
| }, |
| { |
| "epoch": 1.5346086215164916, |
| "eval_runtime": 935.6439, |
| "eval_samples_per_second": 150.433, |
| "eval_steps_per_second": 37.608, |
| "step": 108000 |
| }, |
| { |
| "epoch": 1.5417132910605496, |
| "grad_norm": 18.113378524780273, |
| "learning_rate": 2.701918269610294e-05, |
| "loss": 3.4402, |
| "step": 108500 |
| }, |
| { |
| "epoch": 1.5488179606046075, |
| "grad_norm": 23.691232681274414, |
| "learning_rate": 2.6887614135726132e-05, |
| "loss": 3.5135, |
| "step": 109000 |
| }, |
| { |
| "epoch": 1.555922630148665, |
| "grad_norm": 16.19075584411621, |
| "learning_rate": 2.675630871247007e-05, |
| "loss": 3.4753, |
| "step": 109500 |
| }, |
| { |
| "epoch": 1.563027299692723, |
| "grad_norm": 18.575515747070312, |
| "learning_rate": 2.662474015209326e-05, |
| "loss": 3.4945, |
| "step": 110000 |
| }, |
| { |
| "epoch": 1.563027299692723, |
| "eval_runtime": 912.3292, |
| "eval_samples_per_second": 154.278, |
| "eval_steps_per_second": 38.569, |
| "step": 110000 |
| }, |
| { |
| "epoch": 1.570131969236781, |
| "grad_norm": 22.801149368286133, |
| "learning_rate": 2.6493171591716444e-05, |
| "loss": 3.4782, |
| "step": 110500 |
| }, |
| { |
| "epoch": 1.5772366387808388, |
| "grad_norm": 15.964780807495117, |
| "learning_rate": 2.6361603031339633e-05, |
| "loss": 3.4975, |
| "step": 111000 |
| }, |
| { |
| "epoch": 1.5843413083248965, |
| "grad_norm": 15.882137298583984, |
| "learning_rate": 2.6230034470962822e-05, |
| "loss": 3.4472, |
| "step": 111500 |
| }, |
| { |
| "epoch": 1.5914459778689545, |
| "grad_norm": 14.962567329406738, |
| "learning_rate": 2.6098465910586007e-05, |
| "loss": 3.4388, |
| "step": 112000 |
| }, |
| { |
| "epoch": 1.5914459778689545, |
| "eval_runtime": 913.3848, |
| "eval_samples_per_second": 154.099, |
| "eval_steps_per_second": 38.525, |
| "step": 112000 |
| }, |
| { |
| "epoch": 1.5985506474130122, |
| "grad_norm": 21.8176326751709, |
| "learning_rate": 2.596716048732995e-05, |
| "loss": 3.495, |
| "step": 112500 |
| }, |
| { |
| "epoch": 1.60565531695707, |
| "grad_norm": 16.743026733398438, |
| "learning_rate": 2.5835591926953134e-05, |
| "loss": 3.4125, |
| "step": 113000 |
| }, |
| { |
| "epoch": 1.612759986501128, |
| "grad_norm": 18.006900787353516, |
| "learning_rate": 2.5704023366576323e-05, |
| "loss": 3.4638, |
| "step": 113500 |
| }, |
| { |
| "epoch": 1.6198646560451857, |
| "grad_norm": 19.7940731048584, |
| "learning_rate": 2.5572454806199515e-05, |
| "loss": 3.4856, |
| "step": 114000 |
| }, |
| { |
| "epoch": 1.6198646560451857, |
| "eval_runtime": 974.062, |
| "eval_samples_per_second": 144.5, |
| "eval_steps_per_second": 36.125, |
| "step": 114000 |
| }, |
| { |
| "epoch": 1.6269693255892435, |
| "grad_norm": 19.187955856323242, |
| "learning_rate": 2.5440886245822697e-05, |
| "loss": 3.4502, |
| "step": 114500 |
| }, |
| { |
| "epoch": 1.6340739951333014, |
| "grad_norm": 20.042858123779297, |
| "learning_rate": 2.5309317685445886e-05, |
| "loss": 3.517, |
| "step": 115000 |
| }, |
| { |
| "epoch": 1.6411786646773592, |
| "grad_norm": 18.358047485351562, |
| "learning_rate": 2.5178012262189828e-05, |
| "loss": 3.4642, |
| "step": 115500 |
| }, |
| { |
| "epoch": 1.648283334221417, |
| "grad_norm": 15.388800621032715, |
| "learning_rate": 2.5046443701813017e-05, |
| "loss": 3.4721, |
| "step": 116000 |
| }, |
| { |
| "epoch": 1.648283334221417, |
| "eval_runtime": 918.8597, |
| "eval_samples_per_second": 153.181, |
| "eval_steps_per_second": 38.295, |
| "step": 116000 |
| }, |
| { |
| "epoch": 1.655388003765475, |
| "grad_norm": 14.33470630645752, |
| "learning_rate": 2.4914875141436202e-05, |
| "loss": 3.4237, |
| "step": 116500 |
| }, |
| { |
| "epoch": 1.6624926733095327, |
| "grad_norm": 20.042253494262695, |
| "learning_rate": 2.478330658105939e-05, |
| "loss": 3.4401, |
| "step": 117000 |
| }, |
| { |
| "epoch": 1.6695973428535904, |
| "grad_norm": 18.210189819335938, |
| "learning_rate": 2.465173802068258e-05, |
| "loss": 3.459, |
| "step": 117500 |
| }, |
| { |
| "epoch": 1.6767020123976484, |
| "grad_norm": 15.223936080932617, |
| "learning_rate": 2.4520169460305765e-05, |
| "loss": 3.4717, |
| "step": 118000 |
| }, |
| { |
| "epoch": 1.6767020123976484, |
| "eval_runtime": 936.3608, |
| "eval_samples_per_second": 150.318, |
| "eval_steps_per_second": 37.58, |
| "step": 118000 |
| }, |
| { |
| "epoch": 1.6838066819417064, |
| "grad_norm": 17.645105361938477, |
| "learning_rate": 2.4388864037049707e-05, |
| "loss": 3.4599, |
| "step": 118500 |
| }, |
| { |
| "epoch": 1.6909113514857639, |
| "grad_norm": 20.56248664855957, |
| "learning_rate": 2.4257295476672896e-05, |
| "loss": 3.5143, |
| "step": 119000 |
| }, |
| { |
| "epoch": 1.6980160210298219, |
| "grad_norm": 18.7009334564209, |
| "learning_rate": 2.412572691629608e-05, |
| "loss": 3.4491, |
| "step": 119500 |
| }, |
| { |
| "epoch": 1.7051206905738798, |
| "grad_norm": 20.2117977142334, |
| "learning_rate": 2.3994158355919273e-05, |
| "loss": 3.4439, |
| "step": 120000 |
| }, |
| { |
| "epoch": 1.7051206905738798, |
| "eval_runtime": 905.5948, |
| "eval_samples_per_second": 155.425, |
| "eval_steps_per_second": 38.856, |
| "step": 120000 |
| }, |
| { |
| "epoch": 1.7122253601179374, |
| "grad_norm": 16.9014892578125, |
| "learning_rate": 2.386285293266321e-05, |
| "loss": 3.4693, |
| "step": 120500 |
| }, |
| { |
| "epoch": 1.7193300296619953, |
| "grad_norm": 17.105438232421875, |
| "learning_rate": 2.37312843722864e-05, |
| "loss": 3.4436, |
| "step": 121000 |
| }, |
| { |
| "epoch": 1.7264346992060533, |
| "grad_norm": 20.201711654663086, |
| "learning_rate": 2.3599715811909586e-05, |
| "loss": 3.3987, |
| "step": 121500 |
| }, |
| { |
| "epoch": 1.733539368750111, |
| "grad_norm": 16.100046157836914, |
| "learning_rate": 2.3468147251532775e-05, |
| "loss": 3.4672, |
| "step": 122000 |
| }, |
| { |
| "epoch": 1.733539368750111, |
| "eval_runtime": 930.2492, |
| "eval_samples_per_second": 151.306, |
| "eval_steps_per_second": 37.826, |
| "step": 122000 |
| }, |
| { |
| "epoch": 1.7406440382941688, |
| "grad_norm": 21.168506622314453, |
| "learning_rate": 2.3336578691155964e-05, |
| "loss": 3.4675, |
| "step": 122500 |
| }, |
| { |
| "epoch": 1.7477487078382268, |
| "grad_norm": 20.836383819580078, |
| "learning_rate": 2.320501013077915e-05, |
| "loss": 3.4217, |
| "step": 123000 |
| }, |
| { |
| "epoch": 1.7548533773822845, |
| "grad_norm": 18.60804557800293, |
| "learning_rate": 2.3073441570402338e-05, |
| "loss": 3.4592, |
| "step": 123500 |
| }, |
| { |
| "epoch": 1.7619580469263423, |
| "grad_norm": 18.239465713500977, |
| "learning_rate": 2.2941873010025523e-05, |
| "loss": 3.4535, |
| "step": 124000 |
| }, |
| { |
| "epoch": 1.7619580469263423, |
| "eval_runtime": 973.6158, |
| "eval_samples_per_second": 144.566, |
| "eval_steps_per_second": 36.142, |
| "step": 124000 |
| }, |
| { |
| "epoch": 1.7690627164704003, |
| "grad_norm": 14.261148452758789, |
| "learning_rate": 2.2810304449648712e-05, |
| "loss": 3.4712, |
| "step": 124500 |
| }, |
| { |
| "epoch": 1.776167386014458, |
| "grad_norm": 19.631301879882812, |
| "learning_rate": 2.2678999026392657e-05, |
| "loss": 3.4446, |
| "step": 125000 |
| }, |
| { |
| "epoch": 1.7832720555585158, |
| "grad_norm": 16.400991439819336, |
| "learning_rate": 2.2547430466015843e-05, |
| "loss": 3.4876, |
| "step": 125500 |
| }, |
| { |
| "epoch": 1.7903767251025737, |
| "grad_norm": 17.142208099365234, |
| "learning_rate": 2.2415861905639028e-05, |
| "loss": 3.4329, |
| "step": 126000 |
| }, |
| { |
| "epoch": 1.7903767251025737, |
| "eval_runtime": 932.1027, |
| "eval_samples_per_second": 151.005, |
| "eval_steps_per_second": 37.751, |
| "step": 126000 |
| }, |
| { |
| "epoch": 1.7974813946466315, |
| "grad_norm": 24.79787826538086, |
| "learning_rate": 2.228455648238297e-05, |
| "loss": 3.4708, |
| "step": 126500 |
| }, |
| { |
| "epoch": 1.8045860641906892, |
| "grad_norm": 16.074108123779297, |
| "learning_rate": 2.2152987922006158e-05, |
| "loss": 3.4399, |
| "step": 127000 |
| }, |
| { |
| "epoch": 1.8116907337347472, |
| "grad_norm": 13.779826164245605, |
| "learning_rate": 2.2021419361629347e-05, |
| "loss": 3.436, |
| "step": 127500 |
| }, |
| { |
| "epoch": 1.818795403278805, |
| "grad_norm": 23.16010284423828, |
| "learning_rate": 2.1889850801252536e-05, |
| "loss": 3.4112, |
| "step": 128000 |
| }, |
| { |
| "epoch": 1.818795403278805, |
| "eval_runtime": 918.3318, |
| "eval_samples_per_second": 153.269, |
| "eval_steps_per_second": 38.317, |
| "step": 128000 |
| }, |
| { |
| "epoch": 1.8259000728228627, |
| "grad_norm": 14.259251594543457, |
| "learning_rate": 2.175828224087572e-05, |
| "loss": 3.429, |
| "step": 128500 |
| }, |
| { |
| "epoch": 1.8330047423669207, |
| "grad_norm": 13.93868637084961, |
| "learning_rate": 2.1626976817619663e-05, |
| "loss": 3.4684, |
| "step": 129000 |
| }, |
| { |
| "epoch": 1.8401094119109787, |
| "grad_norm": 22.935352325439453, |
| "learning_rate": 2.149540825724285e-05, |
| "loss": 3.4391, |
| "step": 129500 |
| }, |
| { |
| "epoch": 1.8472140814550362, |
| "grad_norm": 19.57620620727539, |
| "learning_rate": 2.136383969686604e-05, |
| "loss": 3.4188, |
| "step": 130000 |
| }, |
| { |
| "epoch": 1.8472140814550362, |
| "eval_runtime": 945.012, |
| "eval_samples_per_second": 148.942, |
| "eval_steps_per_second": 37.236, |
| "step": 130000 |
| }, |
| { |
| "epoch": 1.8543187509990942, |
| "grad_norm": 16.652795791625977, |
| "learning_rate": 2.1232271136489226e-05, |
| "loss": 3.4518, |
| "step": 130500 |
| }, |
| { |
| "epoch": 1.8614234205431521, |
| "grad_norm": 21.736949920654297, |
| "learning_rate": 2.1100702576112412e-05, |
| "loss": 3.4543, |
| "step": 131000 |
| }, |
| { |
| "epoch": 1.8685280900872097, |
| "grad_norm": 17.40629005432129, |
| "learning_rate": 2.09691340157356e-05, |
| "loss": 3.4471, |
| "step": 131500 |
| }, |
| { |
| "epoch": 1.8756327596312676, |
| "grad_norm": 22.435462951660156, |
| "learning_rate": 2.0837565455358786e-05, |
| "loss": 3.4871, |
| "step": 132000 |
| }, |
| { |
| "epoch": 1.8756327596312676, |
| "eval_runtime": 943.7281, |
| "eval_samples_per_second": 149.145, |
| "eval_steps_per_second": 37.286, |
| "step": 132000 |
| }, |
| { |
| "epoch": 1.8827374291753256, |
| "grad_norm": 15.484688758850098, |
| "learning_rate": 2.070599689498198e-05, |
| "loss": 3.4365, |
| "step": 132500 |
| }, |
| { |
| "epoch": 1.8898420987193834, |
| "grad_norm": 20.110355377197266, |
| "learning_rate": 2.057469147172592e-05, |
| "loss": 3.4458, |
| "step": 133000 |
| }, |
| { |
| "epoch": 1.896946768263441, |
| "grad_norm": 27.856857299804688, |
| "learning_rate": 2.0443122911349105e-05, |
| "loss": 3.4394, |
| "step": 133500 |
| }, |
| { |
| "epoch": 1.904051437807499, |
| "grad_norm": 16.6646671295166, |
| "learning_rate": 2.031155435097229e-05, |
| "loss": 3.3948, |
| "step": 134000 |
| }, |
| { |
| "epoch": 1.904051437807499, |
| "eval_runtime": 916.2941, |
| "eval_samples_per_second": 153.61, |
| "eval_steps_per_second": 38.403, |
| "step": 134000 |
| }, |
| { |
| "epoch": 1.9111561073515568, |
| "grad_norm": 17.75814437866211, |
| "learning_rate": 2.017998579059548e-05, |
| "loss": 3.4159, |
| "step": 134500 |
| }, |
| { |
| "epoch": 1.9182607768956146, |
| "grad_norm": 29.82477569580078, |
| "learning_rate": 2.004841723021867e-05, |
| "loss": 3.4183, |
| "step": 135000 |
| }, |
| { |
| "epoch": 1.9253654464396726, |
| "grad_norm": 18.365196228027344, |
| "learning_rate": 1.991711180696261e-05, |
| "loss": 3.4201, |
| "step": 135500 |
| }, |
| { |
| "epoch": 1.9324701159837303, |
| "grad_norm": 19.756254196166992, |
| "learning_rate": 1.9785543246585795e-05, |
| "loss": 3.4536, |
| "step": 136000 |
| }, |
| { |
| "epoch": 1.9324701159837303, |
| "eval_runtime": 927.9317, |
| "eval_samples_per_second": 151.684, |
| "eval_steps_per_second": 37.921, |
| "step": 136000 |
| }, |
| { |
| "epoch": 1.939574785527788, |
| "grad_norm": 24.40082359313965, |
| "learning_rate": 1.9653974686208984e-05, |
| "loss": 3.4828, |
| "step": 136500 |
| }, |
| { |
| "epoch": 1.946679455071846, |
| "grad_norm": 16.37041473388672, |
| "learning_rate": 1.952240612583217e-05, |
| "loss": 3.4541, |
| "step": 137000 |
| }, |
| { |
| "epoch": 1.9537841246159038, |
| "grad_norm": 15.545867919921875, |
| "learning_rate": 1.939110070257611e-05, |
| "loss": 3.4716, |
| "step": 137500 |
| }, |
| { |
| "epoch": 1.9608887941599615, |
| "grad_norm": 27.384632110595703, |
| "learning_rate": 1.9259532142199303e-05, |
| "loss": 3.4382, |
| "step": 138000 |
| }, |
| { |
| "epoch": 1.9608887941599615, |
| "eval_runtime": 910.3209, |
| "eval_samples_per_second": 154.618, |
| "eval_steps_per_second": 38.655, |
| "step": 138000 |
| }, |
| { |
| "epoch": 1.9679934637040195, |
| "grad_norm": 14.11701488494873, |
| "learning_rate": 1.912796358182249e-05, |
| "loss": 3.4248, |
| "step": 138500 |
| }, |
| { |
| "epoch": 1.9750981332480773, |
| "grad_norm": 15.275626182556152, |
| "learning_rate": 1.8996395021445674e-05, |
| "loss": 3.4646, |
| "step": 139000 |
| }, |
| { |
| "epoch": 1.982202802792135, |
| "grad_norm": 15.29659366607666, |
| "learning_rate": 1.8865089598189616e-05, |
| "loss": 3.4473, |
| "step": 139500 |
| }, |
| { |
| "epoch": 1.989307472336193, |
| "grad_norm": 15.644397735595703, |
| "learning_rate": 1.8733521037812805e-05, |
| "loss": 3.4239, |
| "step": 140000 |
| }, |
| { |
| "epoch": 1.989307472336193, |
| "eval_runtime": 938.7255, |
| "eval_samples_per_second": 149.939, |
| "eval_steps_per_second": 37.485, |
| "step": 140000 |
| }, |
| { |
| "epoch": 1.996412141880251, |
| "grad_norm": 19.77324676513672, |
| "learning_rate": 1.8601952477435994e-05, |
| "loss": 3.4572, |
| "step": 140500 |
| }, |
| { |
| "epoch": 2.0035168114243085, |
| "grad_norm": 22.91030502319336, |
| "learning_rate": 1.8470383917059182e-05, |
| "loss": 3.4282, |
| "step": 141000 |
| }, |
| { |
| "epoch": 2.0106214809683665, |
| "grad_norm": 21.323062896728516, |
| "learning_rate": 1.8338815356682368e-05, |
| "loss": 3.4858, |
| "step": 141500 |
| }, |
| { |
| "epoch": 2.0177261505124244, |
| "grad_norm": 15.01187801361084, |
| "learning_rate": 1.8207246796305553e-05, |
| "loss": 3.4014, |
| "step": 142000 |
| }, |
| { |
| "epoch": 2.0177261505124244, |
| "eval_runtime": 887.7808, |
| "eval_samples_per_second": 158.544, |
| "eval_steps_per_second": 39.636, |
| "step": 142000 |
| }, |
| { |
| "epoch": 2.024830820056482, |
| "grad_norm": 17.312917709350586, |
| "learning_rate": 1.8075678235928746e-05, |
| "loss": 3.3528, |
| "step": 142500 |
| }, |
| { |
| "epoch": 2.03193548960054, |
| "grad_norm": 15.480268478393555, |
| "learning_rate": 1.794410967555193e-05, |
| "loss": 3.4759, |
| "step": 143000 |
| }, |
| { |
| "epoch": 2.039040159144598, |
| "grad_norm": 19.289600372314453, |
| "learning_rate": 1.7812804252295873e-05, |
| "loss": 3.4416, |
| "step": 143500 |
| }, |
| { |
| "epoch": 2.0461448286886554, |
| "grad_norm": 13.766185760498047, |
| "learning_rate": 1.7681498829039814e-05, |
| "loss": 3.4511, |
| "step": 144000 |
| }, |
| { |
| "epoch": 2.0461448286886554, |
| "eval_runtime": 942.9255, |
| "eval_samples_per_second": 149.272, |
| "eval_steps_per_second": 37.318, |
| "step": 144000 |
| }, |
| { |
| "epoch": 2.0532494982327134, |
| "grad_norm": 15.767132759094238, |
| "learning_rate": 1.7549930268663e-05, |
| "loss": 3.4972, |
| "step": 144500 |
| }, |
| { |
| "epoch": 2.0603541677767714, |
| "grad_norm": 16.27056121826172, |
| "learning_rate": 1.7418361708286188e-05, |
| "loss": 3.4371, |
| "step": 145000 |
| }, |
| { |
| "epoch": 2.067458837320829, |
| "grad_norm": 19.297935485839844, |
| "learning_rate": 1.7286793147909377e-05, |
| "loss": 3.4419, |
| "step": 145500 |
| }, |
| { |
| "epoch": 2.074563506864887, |
| "grad_norm": 18.82818603515625, |
| "learning_rate": 1.7155224587532566e-05, |
| "loss": 3.4467, |
| "step": 146000 |
| }, |
| { |
| "epoch": 2.074563506864887, |
| "eval_runtime": 921.9813, |
| "eval_samples_per_second": 152.663, |
| "eval_steps_per_second": 38.166, |
| "step": 146000 |
| }, |
| { |
| "epoch": 2.081668176408945, |
| "grad_norm": 16.8089656829834, |
| "learning_rate": 1.702365602715575e-05, |
| "loss": 3.4412, |
| "step": 146500 |
| }, |
| { |
| "epoch": 2.0887728459530024, |
| "grad_norm": 17.762744903564453, |
| "learning_rate": 1.6892087466778937e-05, |
| "loss": 3.4644, |
| "step": 147000 |
| }, |
| { |
| "epoch": 2.0958775154970604, |
| "grad_norm": 17.5285587310791, |
| "learning_rate": 1.6760518906402126e-05, |
| "loss": 3.437, |
| "step": 147500 |
| }, |
| { |
| "epoch": 2.1029821850411183, |
| "grad_norm": 20.605100631713867, |
| "learning_rate": 1.662921348314607e-05, |
| "loss": 3.4551, |
| "step": 148000 |
| }, |
| { |
| "epoch": 2.1029821850411183, |
| "eval_runtime": 902.7522, |
| "eval_samples_per_second": 155.914, |
| "eval_steps_per_second": 38.979, |
| "step": 148000 |
| }, |
| { |
| "epoch": 2.1100868545851763, |
| "grad_norm": 17.58387565612793, |
| "learning_rate": 1.6497644922769256e-05, |
| "loss": 3.4763, |
| "step": 148500 |
| }, |
| { |
| "epoch": 2.117191524129234, |
| "grad_norm": 20.26319122314453, |
| "learning_rate": 1.6366076362392445e-05, |
| "loss": 3.4081, |
| "step": 149000 |
| }, |
| { |
| "epoch": 2.124296193673292, |
| "grad_norm": 12.938958168029785, |
| "learning_rate": 1.623450780201563e-05, |
| "loss": 3.4738, |
| "step": 149500 |
| }, |
| { |
| "epoch": 2.13140086321735, |
| "grad_norm": 18.24934196472168, |
| "learning_rate": 1.6102939241638816e-05, |
| "loss": 3.4015, |
| "step": 150000 |
| }, |
| { |
| "epoch": 2.13140086321735, |
| "eval_runtime": 952.2281, |
| "eval_samples_per_second": 147.813, |
| "eval_steps_per_second": 36.953, |
| "step": 150000 |
| }, |
| { |
| "epoch": 2.1385055327614073, |
| "grad_norm": 17.659948348999023, |
| "learning_rate": 1.597137068126201e-05, |
| "loss": 3.4798, |
| "step": 150500 |
| }, |
| { |
| "epoch": 2.1456102023054653, |
| "grad_norm": 16.74921226501465, |
| "learning_rate": 1.5839802120885194e-05, |
| "loss": 3.4662, |
| "step": 151000 |
| }, |
| { |
| "epoch": 2.1527148718495233, |
| "grad_norm": 18.972862243652344, |
| "learning_rate": 1.5708496697629135e-05, |
| "loss": 3.4461, |
| "step": 151500 |
| }, |
| { |
| "epoch": 2.159819541393581, |
| "grad_norm": 17.463333129882812, |
| "learning_rate": 1.557692813725232e-05, |
| "loss": 3.4509, |
| "step": 152000 |
| }, |
| { |
| "epoch": 2.159819541393581, |
| "eval_runtime": 952.341, |
| "eval_samples_per_second": 147.796, |
| "eval_steps_per_second": 36.949, |
| "step": 152000 |
| }, |
| { |
| "epoch": 2.1669242109376388, |
| "grad_norm": 17.932537078857422, |
| "learning_rate": 1.544535957687551e-05, |
| "loss": 3.4188, |
| "step": 152500 |
| }, |
| { |
| "epoch": 2.1740288804816967, |
| "grad_norm": 18.25333023071289, |
| "learning_rate": 1.53137910164987e-05, |
| "loss": 3.4275, |
| "step": 153000 |
| }, |
| { |
| "epoch": 2.1811335500257543, |
| "grad_norm": 15.192218780517578, |
| "learning_rate": 1.5182222456121886e-05, |
| "loss": 3.4497, |
| "step": 153500 |
| }, |
| { |
| "epoch": 2.1882382195698122, |
| "grad_norm": 59.057674407958984, |
| "learning_rate": 1.5050653895745073e-05, |
| "loss": 3.4341, |
| "step": 154000 |
| }, |
| { |
| "epoch": 2.1882382195698122, |
| "eval_runtime": 907.5029, |
| "eval_samples_per_second": 155.098, |
| "eval_steps_per_second": 38.775, |
| "step": 154000 |
| }, |
| { |
| "epoch": 2.19534288911387, |
| "grad_norm": 17.72281265258789, |
| "learning_rate": 1.491908533536826e-05, |
| "loss": 3.4777, |
| "step": 154500 |
| }, |
| { |
| "epoch": 2.2024475586579277, |
| "grad_norm": 17.717147827148438, |
| "learning_rate": 1.478751677499145e-05, |
| "loss": 3.4275, |
| "step": 155000 |
| }, |
| { |
| "epoch": 2.2095522282019857, |
| "grad_norm": 20.065603256225586, |
| "learning_rate": 1.4655948214614636e-05, |
| "loss": 3.3751, |
| "step": 155500 |
| }, |
| { |
| "epoch": 2.2166568977460437, |
| "grad_norm": 19.137521743774414, |
| "learning_rate": 1.4524642791358578e-05, |
| "loss": 3.4704, |
| "step": 156000 |
| }, |
| { |
| "epoch": 2.2166568977460437, |
| "eval_runtime": 923.9445, |
| "eval_samples_per_second": 152.338, |
| "eval_steps_per_second": 38.085, |
| "step": 156000 |
| }, |
| { |
| "epoch": 2.223761567290101, |
| "grad_norm": 28.932388305664062, |
| "learning_rate": 1.4393074230981765e-05, |
| "loss": 3.4592, |
| "step": 156500 |
| }, |
| { |
| "epoch": 2.230866236834159, |
| "grad_norm": 18.90865135192871, |
| "learning_rate": 1.4261505670604952e-05, |
| "loss": 3.4281, |
| "step": 157000 |
| }, |
| { |
| "epoch": 2.237970906378217, |
| "grad_norm": 14.286882400512695, |
| "learning_rate": 1.4129937110228143e-05, |
| "loss": 3.4169, |
| "step": 157500 |
| }, |
| { |
| "epoch": 2.245075575922275, |
| "grad_norm": 25.19231605529785, |
| "learning_rate": 1.3998631686972082e-05, |
| "loss": 3.4514, |
| "step": 158000 |
| }, |
| { |
| "epoch": 2.245075575922275, |
| "eval_runtime": 955.1982, |
| "eval_samples_per_second": 147.354, |
| "eval_steps_per_second": 36.838, |
| "step": 158000 |
| }, |
| { |
| "epoch": 2.2521802454663327, |
| "grad_norm": 15.808695793151855, |
| "learning_rate": 1.386706312659527e-05, |
| "loss": 3.4476, |
| "step": 158500 |
| }, |
| { |
| "epoch": 2.2592849150103906, |
| "grad_norm": 20.826406478881836, |
| "learning_rate": 1.3735494566218457e-05, |
| "loss": 3.4425, |
| "step": 159000 |
| }, |
| { |
| "epoch": 2.2663895845544486, |
| "grad_norm": 16.025421142578125, |
| "learning_rate": 1.3603926005841644e-05, |
| "loss": 3.4537, |
| "step": 159500 |
| }, |
| { |
| "epoch": 2.273494254098506, |
| "grad_norm": 23.02109146118164, |
| "learning_rate": 1.3472620582585585e-05, |
| "loss": 3.4596, |
| "step": 160000 |
| }, |
| { |
| "epoch": 2.273494254098506, |
| "eval_runtime": 994.6152, |
| "eval_samples_per_second": 141.514, |
| "eval_steps_per_second": 35.379, |
| "step": 160000 |
| }, |
| { |
| "epoch": 2.280598923642564, |
| "grad_norm": 24.307044982910156, |
| "learning_rate": 1.3341052022208774e-05, |
| "loss": 3.4252, |
| "step": 160500 |
| }, |
| { |
| "epoch": 2.287703593186622, |
| "grad_norm": 18.153467178344727, |
| "learning_rate": 1.3209483461831961e-05, |
| "loss": 3.412, |
| "step": 161000 |
| }, |
| { |
| "epoch": 2.2948082627306796, |
| "grad_norm": 22.00274658203125, |
| "learning_rate": 1.3077914901455148e-05, |
| "loss": 3.4418, |
| "step": 161500 |
| }, |
| { |
| "epoch": 2.3019129322747376, |
| "grad_norm": 14.408377647399902, |
| "learning_rate": 1.294660947819909e-05, |
| "loss": 3.4745, |
| "step": 162000 |
| }, |
| { |
| "epoch": 2.3019129322747376, |
| "eval_runtime": 957.1425, |
| "eval_samples_per_second": 147.054, |
| "eval_steps_per_second": 36.764, |
| "step": 162000 |
| }, |
| { |
| "epoch": 2.3090176018187956, |
| "grad_norm": 17.56698989868164, |
| "learning_rate": 1.2815040917822277e-05, |
| "loss": 3.4048, |
| "step": 162500 |
| }, |
| { |
| "epoch": 2.316122271362853, |
| "grad_norm": 14.062944412231445, |
| "learning_rate": 1.2683472357445468e-05, |
| "loss": 3.4323, |
| "step": 163000 |
| }, |
| { |
| "epoch": 2.323226940906911, |
| "grad_norm": 16.818817138671875, |
| "learning_rate": 1.2551903797068653e-05, |
| "loss": 3.4252, |
| "step": 163500 |
| }, |
| { |
| "epoch": 2.330331610450969, |
| "grad_norm": 29.905399322509766, |
| "learning_rate": 1.242033523669184e-05, |
| "loss": 3.4526, |
| "step": 164000 |
| }, |
| { |
| "epoch": 2.330331610450969, |
| "eval_runtime": 942.1268, |
| "eval_samples_per_second": 149.398, |
| "eval_steps_per_second": 37.35, |
| "step": 164000 |
| }, |
| { |
| "epoch": 2.3374362799950266, |
| "grad_norm": 15.426836967468262, |
| "learning_rate": 1.2288766676315027e-05, |
| "loss": 3.3833, |
| "step": 164500 |
| }, |
| { |
| "epoch": 2.3445409495390845, |
| "grad_norm": 17.668203353881836, |
| "learning_rate": 1.2157198115938216e-05, |
| "loss": 3.4454, |
| "step": 165000 |
| }, |
| { |
| "epoch": 2.3516456190831425, |
| "grad_norm": 19.938480377197266, |
| "learning_rate": 1.2025629555561404e-05, |
| "loss": 3.4777, |
| "step": 165500 |
| }, |
| { |
| "epoch": 2.3587502886272, |
| "grad_norm": 25.68340492248535, |
| "learning_rate": 1.1894324132305345e-05, |
| "loss": 3.4294, |
| "step": 166000 |
| }, |
| { |
| "epoch": 2.3587502886272, |
| "eval_runtime": 922.2117, |
| "eval_samples_per_second": 152.624, |
| "eval_steps_per_second": 38.156, |
| "step": 166000 |
| }, |
| { |
| "epoch": 2.365854958171258, |
| "grad_norm": 27.432424545288086, |
| "learning_rate": 1.1762755571928532e-05, |
| "loss": 3.4314, |
| "step": 166500 |
| }, |
| { |
| "epoch": 2.372959627715316, |
| "grad_norm": 16.082857131958008, |
| "learning_rate": 1.163118701155172e-05, |
| "loss": 3.4351, |
| "step": 167000 |
| }, |
| { |
| "epoch": 2.3800642972593735, |
| "grad_norm": 22.185914993286133, |
| "learning_rate": 1.1499618451174908e-05, |
| "loss": 3.461, |
| "step": 167500 |
| }, |
| { |
| "epoch": 2.3871689668034315, |
| "grad_norm": 24.197967529296875, |
| "learning_rate": 1.1368049890798095e-05, |
| "loss": 3.4533, |
| "step": 168000 |
| }, |
| { |
| "epoch": 2.3871689668034315, |
| "eval_runtime": 895.4635, |
| "eval_samples_per_second": 157.183, |
| "eval_steps_per_second": 39.296, |
| "step": 168000 |
| }, |
| { |
| "epoch": 2.3942736363474895, |
| "grad_norm": 17.674205780029297, |
| "learning_rate": 1.1236744467542037e-05, |
| "loss": 3.4427, |
| "step": 168500 |
| }, |
| { |
| "epoch": 2.401378305891547, |
| "grad_norm": 18.028247833251953, |
| "learning_rate": 1.1105175907165224e-05, |
| "loss": 3.4074, |
| "step": 169000 |
| }, |
| { |
| "epoch": 2.408482975435605, |
| "grad_norm": 22.633684158325195, |
| "learning_rate": 1.0973607346788411e-05, |
| "loss": 3.4411, |
| "step": 169500 |
| }, |
| { |
| "epoch": 2.415587644979663, |
| "grad_norm": 20.172569274902344, |
| "learning_rate": 1.08420387864116e-05, |
| "loss": 3.4517, |
| "step": 170000 |
| }, |
| { |
| "epoch": 2.415587644979663, |
| "eval_runtime": 934.155, |
| "eval_samples_per_second": 150.673, |
| "eval_steps_per_second": 37.668, |
| "step": 170000 |
| }, |
| { |
| "epoch": 2.4226923145237205, |
| "grad_norm": 20.472396850585938, |
| "learning_rate": 1.0710733363155541e-05, |
| "loss": 3.4382, |
| "step": 170500 |
| }, |
| { |
| "epoch": 2.4297969840677784, |
| "grad_norm": 18.737754821777344, |
| "learning_rate": 1.0579164802778729e-05, |
| "loss": 3.3906, |
| "step": 171000 |
| }, |
| { |
| "epoch": 2.4369016536118364, |
| "grad_norm": 25.360733032226562, |
| "learning_rate": 1.0447596242401916e-05, |
| "loss": 3.4101, |
| "step": 171500 |
| }, |
| { |
| "epoch": 2.4440063231558944, |
| "grad_norm": 17.960433959960938, |
| "learning_rate": 1.0316027682025103e-05, |
| "loss": 3.4239, |
| "step": 172000 |
| }, |
| { |
| "epoch": 2.4440063231558944, |
| "eval_runtime": 902.2, |
| "eval_samples_per_second": 156.01, |
| "eval_steps_per_second": 39.002, |
| "step": 172000 |
| }, |
| { |
| "epoch": 2.451110992699952, |
| "grad_norm": 14.151226997375488, |
| "learning_rate": 1.0184722258769044e-05, |
| "loss": 3.4173, |
| "step": 172500 |
| }, |
| { |
| "epoch": 2.45821566224401, |
| "grad_norm": 14.142123222351074, |
| "learning_rate": 1.0053153698392233e-05, |
| "loss": 3.4156, |
| "step": 173000 |
| }, |
| { |
| "epoch": 2.465320331788068, |
| "grad_norm": 21.70526695251465, |
| "learning_rate": 9.921848275136174e-06, |
| "loss": 3.458, |
| "step": 173500 |
| }, |
| { |
| "epoch": 2.4724250013321254, |
| "grad_norm": 18.53485870361328, |
| "learning_rate": 9.790279714759362e-06, |
| "loss": 3.447, |
| "step": 174000 |
| }, |
| { |
| "epoch": 2.4724250013321254, |
| "eval_runtime": 938.1633, |
| "eval_samples_per_second": 150.029, |
| "eval_steps_per_second": 37.507, |
| "step": 174000 |
| }, |
| { |
| "epoch": 2.4795296708761834, |
| "grad_norm": 18.21489143371582, |
| "learning_rate": 9.658711154382549e-06, |
| "loss": 3.3984, |
| "step": 174500 |
| }, |
| { |
| "epoch": 2.4866343404202413, |
| "grad_norm": 16.120616912841797, |
| "learning_rate": 9.527142594005736e-06, |
| "loss": 3.4071, |
| "step": 175000 |
| }, |
| { |
| "epoch": 2.493739009964299, |
| "grad_norm": 15.231195449829102, |
| "learning_rate": 9.395574033628925e-06, |
| "loss": 3.3855, |
| "step": 175500 |
| }, |
| { |
| "epoch": 2.500843679508357, |
| "grad_norm": 26.148868560791016, |
| "learning_rate": 9.264005473252112e-06, |
| "loss": 3.3718, |
| "step": 176000 |
| }, |
| { |
| "epoch": 2.500843679508357, |
| "eval_runtime": 931.3581, |
| "eval_samples_per_second": 151.126, |
| "eval_steps_per_second": 37.781, |
| "step": 176000 |
| }, |
| { |
| "epoch": 2.507948349052415, |
| "grad_norm": 22.703458786010742, |
| "learning_rate": 9.1324369128753e-06, |
| "loss": 3.4483, |
| "step": 176500 |
| }, |
| { |
| "epoch": 2.515053018596473, |
| "grad_norm": 24.028915405273438, |
| "learning_rate": 9.000868352498487e-06, |
| "loss": 3.4646, |
| "step": 177000 |
| }, |
| { |
| "epoch": 2.5221576881405303, |
| "grad_norm": 15.914198875427246, |
| "learning_rate": 8.869562929242428e-06, |
| "loss": 3.3819, |
| "step": 177500 |
| }, |
| { |
| "epoch": 2.5292623576845883, |
| "grad_norm": 20.722768783569336, |
| "learning_rate": 8.737994368865617e-06, |
| "loss": 3.4321, |
| "step": 178000 |
| }, |
| { |
| "epoch": 2.5292623576845883, |
| "eval_runtime": 931.1256, |
| "eval_samples_per_second": 151.163, |
| "eval_steps_per_second": 37.791, |
| "step": 178000 |
| }, |
| { |
| "epoch": 2.5363670272286463, |
| "grad_norm": 16.944055557250977, |
| "learning_rate": 8.606425808488804e-06, |
| "loss": 3.4277, |
| "step": 178500 |
| }, |
| { |
| "epoch": 2.543471696772704, |
| "grad_norm": 19.507802963256836, |
| "learning_rate": 8.474857248111991e-06, |
| "loss": 3.4652, |
| "step": 179000 |
| }, |
| { |
| "epoch": 2.5505763663167618, |
| "grad_norm": 17.919048309326172, |
| "learning_rate": 8.343288687735178e-06, |
| "loss": 3.4398, |
| "step": 179500 |
| }, |
| { |
| "epoch": 2.5576810358608197, |
| "grad_norm": 14.281726837158203, |
| "learning_rate": 8.211720127358367e-06, |
| "loss": 3.3992, |
| "step": 180000 |
| }, |
| { |
| "epoch": 2.5576810358608197, |
| "eval_runtime": 944.9306, |
| "eval_samples_per_second": 148.955, |
| "eval_steps_per_second": 37.239, |
| "step": 180000 |
| }, |
| { |
| "epoch": 2.5647857054048773, |
| "grad_norm": 14.9695463180542, |
| "learning_rate": 8.080151566981554e-06, |
| "loss": 3.4413, |
| "step": 180500 |
| }, |
| { |
| "epoch": 2.5718903749489352, |
| "grad_norm": 16.833791732788086, |
| "learning_rate": 7.948583006604742e-06, |
| "loss": 3.4267, |
| "step": 181000 |
| }, |
| { |
| "epoch": 2.578995044492993, |
| "grad_norm": 15.470376014709473, |
| "learning_rate": 7.81701444622793e-06, |
| "loss": 3.4481, |
| "step": 181500 |
| }, |
| { |
| "epoch": 2.5860997140370507, |
| "grad_norm": 21.1016845703125, |
| "learning_rate": 7.68570902297187e-06, |
| "loss": 3.4727, |
| "step": 182000 |
| }, |
| { |
| "epoch": 2.5860997140370507, |
| "eval_runtime": 954.808, |
| "eval_samples_per_second": 147.414, |
| "eval_steps_per_second": 36.853, |
| "step": 182000 |
| }, |
| { |
| "epoch": 2.5932043835811087, |
| "grad_norm": 21.787841796875, |
| "learning_rate": 7.554140462595059e-06, |
| "loss": 3.4596, |
| "step": 182500 |
| }, |
| { |
| "epoch": 2.6003090531251667, |
| "grad_norm": 17.705780029296875, |
| "learning_rate": 7.422571902218246e-06, |
| "loss": 3.4334, |
| "step": 183000 |
| }, |
| { |
| "epoch": 2.607413722669224, |
| "grad_norm": 24.78368377685547, |
| "learning_rate": 7.2910033418414335e-06, |
| "loss": 3.4673, |
| "step": 183500 |
| }, |
| { |
| "epoch": 2.614518392213282, |
| "grad_norm": 17.67124366760254, |
| "learning_rate": 7.159697918585376e-06, |
| "loss": 3.398, |
| "step": 184000 |
| }, |
| { |
| "epoch": 2.614518392213282, |
| "eval_runtime": 950.0509, |
| "eval_samples_per_second": 148.152, |
| "eval_steps_per_second": 37.038, |
| "step": 184000 |
| }, |
| { |
| "epoch": 2.62162306175734, |
| "grad_norm": 15.1649808883667, |
| "learning_rate": 7.028129358208563e-06, |
| "loss": 3.4236, |
| "step": 184500 |
| }, |
| { |
| "epoch": 2.6287277313013977, |
| "grad_norm": 18.517230987548828, |
| "learning_rate": 6.89656079783175e-06, |
| "loss": 3.3925, |
| "step": 185000 |
| }, |
| { |
| "epoch": 2.6358324008454557, |
| "grad_norm": 15.220212936401367, |
| "learning_rate": 6.764992237454938e-06, |
| "loss": 3.3708, |
| "step": 185500 |
| }, |
| { |
| "epoch": 2.6429370703895136, |
| "grad_norm": 16.052600860595703, |
| "learning_rate": 6.6336868141988795e-06, |
| "loss": 3.4095, |
| "step": 186000 |
| }, |
| { |
| "epoch": 2.6429370703895136, |
| "eval_runtime": 902.4427, |
| "eval_samples_per_second": 155.968, |
| "eval_steps_per_second": 38.992, |
| "step": 186000 |
| }, |
| { |
| "epoch": 2.650041739933571, |
| "grad_norm": 17.24014663696289, |
| "learning_rate": 6.50238139094282e-06, |
| "loss": 3.3694, |
| "step": 186500 |
| }, |
| { |
| "epoch": 2.657146409477629, |
| "grad_norm": 15.43420696258545, |
| "learning_rate": 6.370812830566009e-06, |
| "loss": 3.4659, |
| "step": 187000 |
| }, |
| { |
| "epoch": 2.664251079021687, |
| "grad_norm": 19.86100959777832, |
| "learning_rate": 6.239507407309949e-06, |
| "loss": 3.4001, |
| "step": 187500 |
| }, |
| { |
| "epoch": 2.6713557485657446, |
| "grad_norm": 15.499372482299805, |
| "learning_rate": 6.107938846933137e-06, |
| "loss": 3.4005, |
| "step": 188000 |
| }, |
| { |
| "epoch": 2.6713557485657446, |
| "eval_runtime": 935.9412, |
| "eval_samples_per_second": 150.386, |
| "eval_steps_per_second": 37.596, |
| "step": 188000 |
| }, |
| { |
| "epoch": 2.6784604181098026, |
| "grad_norm": 18.945688247680664, |
| "learning_rate": 5.976370286556325e-06, |
| "loss": 3.4568, |
| "step": 188500 |
| }, |
| { |
| "epoch": 2.6855650876538606, |
| "grad_norm": 23.000934600830078, |
| "learning_rate": 5.844801726179512e-06, |
| "loss": 3.4078, |
| "step": 189000 |
| }, |
| { |
| "epoch": 2.692669757197918, |
| "grad_norm": 16.832963943481445, |
| "learning_rate": 5.7132331658027e-06, |
| "loss": 3.4144, |
| "step": 189500 |
| }, |
| { |
| "epoch": 2.699774426741976, |
| "grad_norm": 21.619384765625, |
| "learning_rate": 5.581664605425888e-06, |
| "loss": 3.4234, |
| "step": 190000 |
| }, |
| { |
| "epoch": 2.699774426741976, |
| "eval_runtime": 977.3575, |
| "eval_samples_per_second": 144.013, |
| "eval_steps_per_second": 36.003, |
| "step": 190000 |
| }, |
| { |
| "epoch": 2.706879096286034, |
| "grad_norm": 34.39583206176758, |
| "learning_rate": 5.450096045049075e-06, |
| "loss": 3.4315, |
| "step": 190500 |
| }, |
| { |
| "epoch": 2.7139837658300916, |
| "grad_norm": 18.354318618774414, |
| "learning_rate": 5.318527484672263e-06, |
| "loss": 3.4279, |
| "step": 191000 |
| }, |
| { |
| "epoch": 2.7210884353741496, |
| "grad_norm": 24.28253173828125, |
| "learning_rate": 5.18695892429545e-06, |
| "loss": 3.4595, |
| "step": 191500 |
| }, |
| { |
| "epoch": 2.7281931049182075, |
| "grad_norm": 23.663331985473633, |
| "learning_rate": 5.055390363918638e-06, |
| "loss": 3.3998, |
| "step": 192000 |
| }, |
| { |
| "epoch": 2.7281931049182075, |
| "eval_runtime": 916.9506, |
| "eval_samples_per_second": 153.5, |
| "eval_steps_per_second": 38.375, |
| "step": 192000 |
| }, |
| { |
| "epoch": 2.735297774462265, |
| "grad_norm": 22.13831329345703, |
| "learning_rate": 4.923821803541826e-06, |
| "loss": 3.4417, |
| "step": 192500 |
| }, |
| { |
| "epoch": 2.742402444006323, |
| "grad_norm": 19.08433723449707, |
| "learning_rate": 4.792253243165014e-06, |
| "loss": 3.4359, |
| "step": 193000 |
| }, |
| { |
| "epoch": 2.749507113550381, |
| "grad_norm": 14.219480514526367, |
| "learning_rate": 4.660947819908955e-06, |
| "loss": 3.3982, |
| "step": 193500 |
| }, |
| { |
| "epoch": 2.7566117830944386, |
| "grad_norm": 16.88738250732422, |
| "learning_rate": 4.529379259532143e-06, |
| "loss": 3.3999, |
| "step": 194000 |
| }, |
| { |
| "epoch": 2.7566117830944386, |
| "eval_runtime": 929.4031, |
| "eval_samples_per_second": 151.443, |
| "eval_steps_per_second": 37.861, |
| "step": 194000 |
| }, |
| { |
| "epoch": 2.7637164526384965, |
| "grad_norm": 24.45867347717285, |
| "learning_rate": 4.39781069915533e-06, |
| "loss": 3.4601, |
| "step": 194500 |
| }, |
| { |
| "epoch": 2.7708211221825545, |
| "grad_norm": 18.18239402770996, |
| "learning_rate": 4.266242138778518e-06, |
| "loss": 3.4752, |
| "step": 195000 |
| }, |
| { |
| "epoch": 2.7779257917266125, |
| "grad_norm": 25.724437713623047, |
| "learning_rate": 4.134936715522459e-06, |
| "loss": 3.4426, |
| "step": 195500 |
| }, |
| { |
| "epoch": 2.78503046127067, |
| "grad_norm": 22.24292755126953, |
| "learning_rate": 4.003368155145647e-06, |
| "loss": 3.422, |
| "step": 196000 |
| }, |
| { |
| "epoch": 2.78503046127067, |
| "eval_runtime": 953.0692, |
| "eval_samples_per_second": 147.683, |
| "eval_steps_per_second": 36.921, |
| "step": 196000 |
| }, |
| { |
| "epoch": 2.792135130814728, |
| "grad_norm": 14.148300170898438, |
| "learning_rate": 3.871799594768835e-06, |
| "loss": 3.4318, |
| "step": 196500 |
| }, |
| { |
| "epoch": 2.799239800358786, |
| "grad_norm": 20.823726654052734, |
| "learning_rate": 3.740231034392022e-06, |
| "loss": 3.4268, |
| "step": 197000 |
| }, |
| { |
| "epoch": 2.806344469902844, |
| "grad_norm": 17.67780876159668, |
| "learning_rate": 3.6089256111359633e-06, |
| "loss": 3.4177, |
| "step": 197500 |
| }, |
| { |
| "epoch": 2.8134491394469014, |
| "grad_norm": 15.137374877929688, |
| "learning_rate": 3.477357050759151e-06, |
| "loss": 3.4456, |
| "step": 198000 |
| }, |
| { |
| "epoch": 2.8134491394469014, |
| "eval_runtime": 937.5641, |
| "eval_samples_per_second": 150.125, |
| "eval_steps_per_second": 37.531, |
| "step": 198000 |
| }, |
| { |
| "epoch": 2.8205538089909594, |
| "grad_norm": 19.98059844970703, |
| "learning_rate": 3.3457884903823386e-06, |
| "loss": 3.4413, |
| "step": 198500 |
| }, |
| { |
| "epoch": 2.8276584785350174, |
| "grad_norm": 21.89630126953125, |
| "learning_rate": 3.2144830671262795e-06, |
| "loss": 3.4022, |
| "step": 199000 |
| }, |
| { |
| "epoch": 2.834763148079075, |
| "grad_norm": 14.519454956054688, |
| "learning_rate": 3.0829145067494675e-06, |
| "loss": 3.4555, |
| "step": 199500 |
| }, |
| { |
| "epoch": 2.841867817623133, |
| "grad_norm": 16.703575134277344, |
| "learning_rate": 2.951345946372655e-06, |
| "loss": 3.4092, |
| "step": 200000 |
| }, |
| { |
| "epoch": 2.841867817623133, |
| "eval_runtime": 953.5002, |
| "eval_samples_per_second": 147.616, |
| "eval_steps_per_second": 36.904, |
| "step": 200000 |
| }, |
| { |
| "epoch": 2.848972487167191, |
| "grad_norm": 27.454334259033203, |
| "learning_rate": 2.8197773859958423e-06, |
| "loss": 3.418, |
| "step": 200500 |
| }, |
| { |
| "epoch": 2.8560771567112484, |
| "grad_norm": 23.255996704101562, |
| "learning_rate": 2.68820882561903e-06, |
| "loss": 3.3987, |
| "step": 201000 |
| }, |
| { |
| "epoch": 2.8631818262553064, |
| "grad_norm": 15.948774337768555, |
| "learning_rate": 2.556640265242218e-06, |
| "loss": 3.4912, |
| "step": 201500 |
| }, |
| { |
| "epoch": 2.8702864957993643, |
| "grad_norm": 20.497947692871094, |
| "learning_rate": 2.4250717048654057e-06, |
| "loss": 3.448, |
| "step": 202000 |
| }, |
| { |
| "epoch": 2.8702864957993643, |
| "eval_runtime": 911.0819, |
| "eval_samples_per_second": 154.489, |
| "eval_steps_per_second": 38.622, |
| "step": 202000 |
| }, |
| { |
| "epoch": 2.877391165343422, |
| "grad_norm": 15.809967994689941, |
| "learning_rate": 2.2937662816093465e-06, |
| "loss": 3.4309, |
| "step": 202500 |
| }, |
| { |
| "epoch": 2.88449583488748, |
| "grad_norm": 18.75983238220215, |
| "learning_rate": 2.162197721232534e-06, |
| "loss": 3.4832, |
| "step": 203000 |
| }, |
| { |
| "epoch": 2.891600504431538, |
| "grad_norm": 18.350236892700195, |
| "learning_rate": 2.030629160855722e-06, |
| "loss": 3.4685, |
| "step": 203500 |
| }, |
| { |
| "epoch": 2.8987051739755954, |
| "grad_norm": 22.548627853393555, |
| "learning_rate": 1.8990606004789096e-06, |
| "loss": 3.4514, |
| "step": 204000 |
| }, |
| { |
| "epoch": 2.8987051739755954, |
| "eval_runtime": 907.2069, |
| "eval_samples_per_second": 155.149, |
| "eval_steps_per_second": 38.787, |
| "step": 204000 |
| }, |
| { |
| "epoch": 2.9058098435196533, |
| "grad_norm": 22.571067810058594, |
| "learning_rate": 1.7674920401020975e-06, |
| "loss": 3.383, |
| "step": 204500 |
| }, |
| { |
| "epoch": 2.9129145130637113, |
| "grad_norm": 20.21802520751953, |
| "learning_rate": 1.635923479725285e-06, |
| "loss": 3.4761, |
| "step": 205000 |
| }, |
| { |
| "epoch": 2.920019182607769, |
| "grad_norm": 22.196605682373047, |
| "learning_rate": 1.5043549193484725e-06, |
| "loss": 3.4293, |
| "step": 205500 |
| }, |
| { |
| "epoch": 2.927123852151827, |
| "grad_norm": 20.584014892578125, |
| "learning_rate": 1.3727863589716602e-06, |
| "loss": 3.4388, |
| "step": 206000 |
| }, |
| { |
| "epoch": 2.927123852151827, |
| "eval_runtime": 935.9699, |
| "eval_samples_per_second": 150.381, |
| "eval_steps_per_second": 37.595, |
| "step": 206000 |
| }, |
| { |
| "epoch": 2.9342285216958848, |
| "grad_norm": 21.364656448364258, |
| "learning_rate": 1.2414809357156015e-06, |
| "loss": 3.3946, |
| "step": 206500 |
| }, |
| { |
| "epoch": 2.9413331912399423, |
| "grad_norm": 19.831722259521484, |
| "learning_rate": 1.109912375338789e-06, |
| "loss": 3.389, |
| "step": 207000 |
| }, |
| { |
| "epoch": 2.9484378607840003, |
| "grad_norm": 61.1855583190918, |
| "learning_rate": 9.783438149619767e-07, |
| "loss": 3.4133, |
| "step": 207500 |
| }, |
| { |
| "epoch": 2.9555425303280582, |
| "grad_norm": 22.293066024780273, |
| "learning_rate": 8.467752545851645e-07, |
| "loss": 3.4065, |
| "step": 208000 |
| }, |
| { |
| "epoch": 2.9555425303280582, |
| "eval_runtime": 952.4375, |
| "eval_samples_per_second": 147.781, |
| "eval_steps_per_second": 36.945, |
| "step": 208000 |
| }, |
| { |
| "epoch": 2.9626471998721158, |
| "grad_norm": 22.37586212158203, |
| "learning_rate": 7.152066942083521e-07, |
| "loss": 3.4225, |
| "step": 208500 |
| }, |
| { |
| "epoch": 2.9697518694161738, |
| "grad_norm": 22.698583602905273, |
| "learning_rate": 5.839012709522933e-07, |
| "loss": 3.4399, |
| "step": 209000 |
| }, |
| { |
| "epoch": 2.9768565389602317, |
| "grad_norm": 19.18611717224121, |
| "learning_rate": 4.523327105754809e-07, |
| "loss": 3.3873, |
| "step": 209500 |
| }, |
| { |
| "epoch": 2.9839612085042893, |
| "grad_norm": 20.257211685180664, |
| "learning_rate": 3.2076415019866855e-07, |
| "loss": 3.4055, |
| "step": 210000 |
| }, |
| { |
| "epoch": 2.9839612085042893, |
| "eval_runtime": 935.2702, |
| "eval_samples_per_second": 150.493, |
| "eval_steps_per_second": 37.623, |
| "step": 210000 |
| }, |
| { |
| "epoch": 2.9910658780483472, |
| "grad_norm": 16.924406051635742, |
| "learning_rate": 1.894587269426098e-07, |
| "loss": 3.4763, |
| "step": 210500 |
| }, |
| { |
| "epoch": 2.998170547592405, |
| "grad_norm": 19.95027732849121, |
| "learning_rate": 5.789016656579743e-08, |
| "loss": 3.4099, |
| "step": 211000 |
| } |
| ], |
| "logging_steps": 500, |
| "max_steps": 211128, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 3, |
| "save_steps": 2000, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 4.608136311300816e+16, |
| "train_batch_size": 4, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|