Datasets:
Invalid JSON: Unexpected token 'I', ..."ad_norm": Infinity,
"... is not valid JSON
| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.998954547717429, | |
| "eval_steps": 500, | |
| "global_step": 7530, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0019913376810872705, | |
| "grad_norm": 4.0654296875, | |
| "learning_rate": 0.0002999996736294919, | |
| "loss": 3.4521, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.003982675362174541, | |
| "grad_norm": 3.3652706146240234, | |
| "learning_rate": 0.00029999894256041275, | |
| "loss": 2.5577, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.005974013043261811, | |
| "grad_norm": 6.442378997802734, | |
| "learning_rate": 0.0002999977937399738, | |
| "loss": 2.6864, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.007965350724349082, | |
| "grad_norm": 3.433056592941284, | |
| "learning_rate": 0.0002999957702565602, | |
| "loss": 2.6988, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.009956688405436351, | |
| "grad_norm": 3.1151390075683594, | |
| "learning_rate": 0.0002999930940505367, | |
| "loss": 2.6097, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.011948026086523622, | |
| "grad_norm": 3.0310587882995605, | |
| "learning_rate": 0.00029998976513354893, | |
| "loss": 2.5627, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.013939363767610893, | |
| "grad_norm": 2.9382193088531494, | |
| "learning_rate": 0.00029998578352008324, | |
| "loss": 2.6011, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.015930701448698164, | |
| "grad_norm": 3.8187520503997803, | |
| "learning_rate": 0.00029998114922746583, | |
| "loss": 2.5422, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.017922039129785433, | |
| "grad_norm": 3.3968746662139893, | |
| "learning_rate": 0.00029997586227586346, | |
| "loss": 2.5569, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.019913376810872702, | |
| "grad_norm": 5.447213649749756, | |
| "learning_rate": 0.00029996992268828285, | |
| "loss": 2.5303, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.021904714491959975, | |
| "grad_norm": 3.1602864265441895, | |
| "learning_rate": 0.0002999633304905707, | |
| "loss": 2.5741, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.023896052173047244, | |
| "grad_norm": 3.8679628372192383, | |
| "learning_rate": 0.0002999560857114137, | |
| "loss": 2.5897, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.025887389854134513, | |
| "grad_norm": 3.5443027019500732, | |
| "learning_rate": 0.00029994818838233823, | |
| "loss": 2.6875, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.027878727535221786, | |
| "grad_norm": 3.584503412246704, | |
| "learning_rate": 0.00029993963853771043, | |
| "loss": 2.6568, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.029870065216309055, | |
| "grad_norm": 3.158475399017334, | |
| "learning_rate": 0.00029993043621473584, | |
| "loss": 2.6262, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.03186140289739633, | |
| "grad_norm": 4.361789703369141, | |
| "learning_rate": 0.0002999205814534593, | |
| "loss": 2.5888, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.033852740578483594, | |
| "grad_norm": 3.4383697509765625, | |
| "learning_rate": 0.00029991007429676494, | |
| "loss": 2.5537, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.035844078259570866, | |
| "grad_norm": 3.1702070236206055, | |
| "learning_rate": 0.0002998989147903757, | |
| "loss": 2.4183, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.03783541594065814, | |
| "grad_norm": 4.857996940612793, | |
| "learning_rate": 0.00029988710298285335, | |
| "loss": 2.572, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.039826753621745405, | |
| "grad_norm": 3.1501991748809814, | |
| "learning_rate": 0.0002998746389255983, | |
| "loss": 2.4835, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.04181809130283268, | |
| "grad_norm": 3.975405693054199, | |
| "learning_rate": 0.0002998615226728493, | |
| "loss": 2.579, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.04380942898391995, | |
| "grad_norm": 3.191920518875122, | |
| "learning_rate": 0.0002998477542816829, | |
| "loss": 2.5179, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.045800766665007216, | |
| "grad_norm": 3.55888295173645, | |
| "learning_rate": 0.0002998333338120139, | |
| "loss": 2.6215, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.04779210434609449, | |
| "grad_norm": 3.593543767929077, | |
| "learning_rate": 0.00029981826132659446, | |
| "loss": 2.4763, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.04978344202718176, | |
| "grad_norm": 4.385670185089111, | |
| "learning_rate": 0.00029980253689101407, | |
| "loss": 2.538, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.05177477970826903, | |
| "grad_norm": 3.4999585151672363, | |
| "learning_rate": 0.0002997861605736994, | |
| "loss": 2.5162, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.0537661173893563, | |
| "grad_norm": 3.657758951187134, | |
| "learning_rate": 0.00029976913244591363, | |
| "loss": 2.5734, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.05575745507044357, | |
| "grad_norm": 4.323859214782715, | |
| "learning_rate": 0.00029975145258175653, | |
| "loss": 2.6802, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.05774879275153084, | |
| "grad_norm": 3.3262791633605957, | |
| "learning_rate": 0.0002997331210581639, | |
| "loss": 2.5822, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.05974013043261811, | |
| "grad_norm": 3.809830665588379, | |
| "learning_rate": 0.00029971413795490734, | |
| "loss": 2.5107, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.06173146811370538, | |
| "grad_norm": 3.283738613128662, | |
| "learning_rate": 0.0002996945033545939, | |
| "loss": 2.6337, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.06372280579479266, | |
| "grad_norm": 3.442885398864746, | |
| "learning_rate": 0.0002996742173426655, | |
| "loss": 2.4263, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.06571414347587992, | |
| "grad_norm": 3.4212663173675537, | |
| "learning_rate": 0.000299653280007399, | |
| "loss": 2.5735, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.06770548115696719, | |
| "grad_norm": 3.8556458950042725, | |
| "learning_rate": 0.0002996316914399054, | |
| "loss": 2.5495, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.06969681883805447, | |
| "grad_norm": 4.062867164611816, | |
| "learning_rate": 0.0002996094517341297, | |
| "loss": 2.4665, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.07168815651914173, | |
| "grad_norm": 4.151358127593994, | |
| "learning_rate": 0.00029958656098685036, | |
| "loss": 2.5463, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.073679494200229, | |
| "grad_norm": 2.917109966278076, | |
| "learning_rate": 0.00029956301929767883, | |
| "loss": 2.5684, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.07567083188131628, | |
| "grad_norm": 4.13242769241333, | |
| "learning_rate": 0.0002995388267690593, | |
| "loss": 2.5404, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.07766216956240354, | |
| "grad_norm": 3.765427350997925, | |
| "learning_rate": 0.00029951398350626824, | |
| "loss": 2.4203, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.07965350724349081, | |
| "grad_norm": 4.968576908111572, | |
| "learning_rate": 0.0002994884896174137, | |
| "loss": 2.5068, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.08164484492457809, | |
| "grad_norm": 3.5136470794677734, | |
| "learning_rate": 0.000299462345213435, | |
| "loss": 2.5755, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.08363618260566535, | |
| "grad_norm": 3.77406907081604, | |
| "learning_rate": 0.0002994355504081024, | |
| "loss": 2.6048, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.08562752028675262, | |
| "grad_norm": 7.271650314331055, | |
| "learning_rate": 0.00029940810531801633, | |
| "loss": 2.6364, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.0876188579678399, | |
| "grad_norm": 4.087569713592529, | |
| "learning_rate": 0.00029938001006260697, | |
| "loss": 2.6253, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.08961019564892717, | |
| "grad_norm": 4.92678689956665, | |
| "learning_rate": 0.00029935126476413396, | |
| "loss": 2.4876, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.09160153333001443, | |
| "grad_norm": 4.242873668670654, | |
| "learning_rate": 0.00029932186954768534, | |
| "loss": 2.5986, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.09359287101110171, | |
| "grad_norm": 3.656292676925659, | |
| "learning_rate": 0.0002992918245411777, | |
| "loss": 2.47, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.09558420869218898, | |
| "grad_norm": 5.079676151275635, | |
| "learning_rate": 0.000299261129875355, | |
| "loss": 2.5446, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.09757554637327624, | |
| "grad_norm": 4.043215751647949, | |
| "learning_rate": 0.0002992297856837884, | |
| "loss": 2.4366, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.09956688405436352, | |
| "grad_norm": 3.710261106491089, | |
| "learning_rate": 0.00029919779210287555, | |
| "loss": 2.627, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.10155822173545079, | |
| "grad_norm": 6.257110595703125, | |
| "learning_rate": 0.0002991651492718398, | |
| "loss": 2.5499, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.10354955941653805, | |
| "grad_norm": 3.418558120727539, | |
| "learning_rate": 0.00029913185733273, | |
| "loss": 2.5487, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.10554089709762533, | |
| "grad_norm": 4.4878740310668945, | |
| "learning_rate": 0.0002990979164304195, | |
| "loss": 2.6916, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.1075322347787126, | |
| "grad_norm": 4.143045425415039, | |
| "learning_rate": 0.00029906332671260594, | |
| "loss": 2.5951, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.10952357245979986, | |
| "grad_norm": 4.116326332092285, | |
| "learning_rate": 0.00029902808832981, | |
| "loss": 2.5271, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.11151491014088714, | |
| "grad_norm": 3.435853958129883, | |
| "learning_rate": 0.00029899220143537526, | |
| "loss": 2.4868, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.11350624782197441, | |
| "grad_norm": 4.011627674102783, | |
| "learning_rate": 0.0002989556661854675, | |
| "loss": 2.5136, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.11549758550306168, | |
| "grad_norm": 4.947673320770264, | |
| "learning_rate": 0.0002989184827390737, | |
| "loss": 2.4441, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.11748892318414896, | |
| "grad_norm": 3.991987466812134, | |
| "learning_rate": 0.00029888065125800153, | |
| "loss": 2.4815, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.11948026086523622, | |
| "grad_norm": 5.1937360763549805, | |
| "learning_rate": 0.0002988421719068788, | |
| "loss": 2.6206, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.12147159854632349, | |
| "grad_norm": 4.464507102966309, | |
| "learning_rate": 0.00029880304485315254, | |
| "loss": 2.5436, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.12346293622741077, | |
| "grad_norm": 22.474430084228516, | |
| "learning_rate": 0.00029876327026708824, | |
| "loss": 2.6138, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.12545427390849803, | |
| "grad_norm": 4.142147064208984, | |
| "learning_rate": 0.00029872284832176924, | |
| "loss": 2.5605, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.1274456115895853, | |
| "grad_norm": 4.879560947418213, | |
| "learning_rate": 0.00029868177919309603, | |
| "loss": 2.609, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.12943694927067256, | |
| "grad_norm": 3.78598952293396, | |
| "learning_rate": 0.00029864006305978523, | |
| "loss": 2.5863, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.13142828695175984, | |
| "grad_norm": 5.507261753082275, | |
| "learning_rate": 0.00029859770010336905, | |
| "loss": 2.6525, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.13341962463284712, | |
| "grad_norm": 3.913325786590576, | |
| "learning_rate": 0.00029855469050819454, | |
| "loss": 2.5584, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.13541096231393437, | |
| "grad_norm": 4.263314723968506, | |
| "learning_rate": 0.00029851981737782636, | |
| "loss": 2.5515, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.13740229999502165, | |
| "grad_norm": 5.047379016876221, | |
| "learning_rate": 0.0002984756443064224, | |
| "loss": 2.5811, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.13939363767610893, | |
| "grad_norm": 3.8118085861206055, | |
| "learning_rate": 0.00029843082512739883, | |
| "loss": 2.647, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.14138497535719619, | |
| "grad_norm": 4.154444217681885, | |
| "learning_rate": 0.0002983853600357912, | |
| "loss": 2.4557, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.14337631303828346, | |
| "grad_norm": 3.792567253112793, | |
| "learning_rate": 0.0002983392492294455, | |
| "loss": 2.4481, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.14536765071937074, | |
| "grad_norm": 4.538722038269043, | |
| "learning_rate": 0.00029829249290901804, | |
| "loss": 2.4257, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.147358988400458, | |
| "grad_norm": 3.793282985687256, | |
| "learning_rate": 0.00029824509127797383, | |
| "loss": 2.5358, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.14935032608154528, | |
| "grad_norm": 4.034732818603516, | |
| "learning_rate": 0.0002981970445425862, | |
| "loss": 2.6415, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.15134166376263256, | |
| "grad_norm": 4.2402520179748535, | |
| "learning_rate": 0.0002981483529119355, | |
| "loss": 2.5689, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.1533330014437198, | |
| "grad_norm": 3.5456957817077637, | |
| "learning_rate": 0.0002980990165979088, | |
| "loss": 2.4177, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.1553243391248071, | |
| "grad_norm": 4.852187156677246, | |
| "learning_rate": 0.0002980490358151981, | |
| "loss": 2.5833, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.15731567680589437, | |
| "grad_norm": 3.8212156295776367, | |
| "learning_rate": 0.0002979984107813002, | |
| "loss": 2.439, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.15930701448698162, | |
| "grad_norm": 5.308228015899658, | |
| "learning_rate": 0.0002979471417165154, | |
| "loss": 2.6805, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.1612983521680689, | |
| "grad_norm": 3.9180448055267334, | |
| "learning_rate": 0.00029789522884394646, | |
| "loss": 2.8233, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.16328968984915618, | |
| "grad_norm": 3.9016313552856445, | |
| "learning_rate": 0.0002978426723894978, | |
| "loss": 2.5363, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.16528102753024343, | |
| "grad_norm": 5.2438225746154785, | |
| "learning_rate": 0.00029778947258187437, | |
| "loss": 2.5845, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.1672723652113307, | |
| "grad_norm": 4.8523664474487305, | |
| "learning_rate": 0.0002977356296525809, | |
| "loss": 2.5257, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.169263702892418, | |
| "grad_norm": 5.205035209655762, | |
| "learning_rate": 0.00029768114383592066, | |
| "loss": 2.5175, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.17125504057350524, | |
| "grad_norm": 4.0942606925964355, | |
| "learning_rate": 0.00029762601536899443, | |
| "loss": 2.5718, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.17324637825459252, | |
| "grad_norm": 3.762770652770996, | |
| "learning_rate": 0.0002975702444916997, | |
| "loss": 2.5675, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.1752377159356798, | |
| "grad_norm": 4.622000694274902, | |
| "learning_rate": 0.0002975138314467293, | |
| "loss": 2.5359, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.17722905361676705, | |
| "grad_norm": 3.6756174564361572, | |
| "learning_rate": 0.0002974567764795707, | |
| "loss": 2.5943, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.17922039129785433, | |
| "grad_norm": 3.5715696811676025, | |
| "learning_rate": 0.0002973990798385046, | |
| "loss": 2.6092, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.1812117289789416, | |
| "grad_norm": 3.91926646232605, | |
| "learning_rate": 0.0002973407417746042, | |
| "loss": 2.5497, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.18320306666002886, | |
| "grad_norm": 3.9486913681030273, | |
| "learning_rate": 0.0002972817625417338, | |
| "loss": 2.5328, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.18519440434111614, | |
| "grad_norm": 5.621224880218506, | |
| "learning_rate": 0.00029722214239654766, | |
| "loss": 2.6173, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.18718574202220342, | |
| "grad_norm": 3.9391846656799316, | |
| "learning_rate": 0.0002971618815984894, | |
| "loss": 2.6796, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.18917707970329067, | |
| "grad_norm": 4.538651943206787, | |
| "learning_rate": 0.00029710098040979026, | |
| "loss": 2.4812, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.19116841738437795, | |
| "grad_norm": 4.420340538024902, | |
| "learning_rate": 0.0002970394390954683, | |
| "loss": 2.6853, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.19315975506546523, | |
| "grad_norm": 3.4120190143585205, | |
| "learning_rate": 0.000296977257923327, | |
| "loss": 2.5475, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.19515109274655248, | |
| "grad_norm": 3.7508790493011475, | |
| "learning_rate": 0.00029691443716395446, | |
| "loss": 2.5562, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.19714243042763976, | |
| "grad_norm": 6.769453525543213, | |
| "learning_rate": 0.0002968509770907219, | |
| "loss": 2.5752, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.19913376810872704, | |
| "grad_norm": 5.990815162658691, | |
| "learning_rate": 0.0002967868779797827, | |
| "loss": 2.5315, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.2011251057898143, | |
| "grad_norm": 4.616420745849609, | |
| "learning_rate": 0.0002967221401100708, | |
| "loss": 2.4976, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.20311644347090158, | |
| "grad_norm": 4.068687915802002, | |
| "learning_rate": 0.0002966567637633001, | |
| "loss": 2.4871, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.20510778115198885, | |
| "grad_norm": 4.577060699462891, | |
| "learning_rate": 0.00029659074922396266, | |
| "loss": 2.6066, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.2070991188330761, | |
| "grad_norm": 6.227926254272461, | |
| "learning_rate": 0.00029652409677932793, | |
| "loss": 2.6226, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.2090904565141634, | |
| "grad_norm": 4.359228134155273, | |
| "learning_rate": 0.000296456806719441, | |
| "loss": 2.5863, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.21108179419525067, | |
| "grad_norm": 4.112326145172119, | |
| "learning_rate": 0.0002963888793371218, | |
| "loss": 2.5719, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.21307313187633792, | |
| "grad_norm": 4.560180187225342, | |
| "learning_rate": 0.0002963203149279636, | |
| "loss": 2.5393, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.2150644695574252, | |
| "grad_norm": 6.014193058013916, | |
| "learning_rate": 0.00029625111379033174, | |
| "loss": 2.5966, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.21705580723851248, | |
| "grad_norm": 3.90463924407959, | |
| "learning_rate": 0.0002961812762253623, | |
| "loss": 2.671, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.21904714491959973, | |
| "grad_norm": 5.432832717895508, | |
| "learning_rate": 0.000296110802536961, | |
| "loss": 2.587, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.221038482600687, | |
| "grad_norm": 4.703036785125732, | |
| "learning_rate": 0.0002960396930318016, | |
| "loss": 2.6888, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.2230298202817743, | |
| "grad_norm": 3.983517646789551, | |
| "learning_rate": 0.00029596794801932467, | |
| "loss": 2.6974, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.22502115796286154, | |
| "grad_norm": 5.251948356628418, | |
| "learning_rate": 0.0002958955678117363, | |
| "loss": 2.581, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.22701249564394882, | |
| "grad_norm": 3.736345052719116, | |
| "learning_rate": 0.0002958225527240067, | |
| "loss": 2.6273, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.2290038333250361, | |
| "grad_norm": 4.199942588806152, | |
| "learning_rate": 0.0002957489030738688, | |
| "loss": 2.5318, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.23099517100612335, | |
| "grad_norm": 4.074978828430176, | |
| "learning_rate": 0.00029567461918181693, | |
| "loss": 2.6444, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.23298650868721063, | |
| "grad_norm": 4.244055271148682, | |
| "learning_rate": 0.00029559970137110536, | |
| "loss": 2.5639, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.2349778463682979, | |
| "grad_norm": 4.564445972442627, | |
| "learning_rate": 0.000295524149967747, | |
| "loss": 2.6057, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.23696918404938516, | |
| "grad_norm": 12.886066436767578, | |
| "learning_rate": 0.00029546325287919543, | |
| "loss": 2.5556, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.23896052173047244, | |
| "grad_norm": 4.276486396789551, | |
| "learning_rate": 0.00029538656183942505, | |
| "loss": 2.5598, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.24095185941155972, | |
| "grad_norm": 3.5438523292541504, | |
| "learning_rate": 0.0002953092381345073, | |
| "loss": 2.6364, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.24294319709264697, | |
| "grad_norm": 5.840348243713379, | |
| "learning_rate": 0.00029523128210092445, | |
| "loss": 2.5398, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.24493453477373425, | |
| "grad_norm": 4.8936285972595215, | |
| "learning_rate": 0.0002951526940779106, | |
| "loss": 2.5041, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.24692587245482153, | |
| "grad_norm": 5.3416337966918945, | |
| "learning_rate": 0.00029507347440744993, | |
| "loss": 2.5853, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.24891721013590878, | |
| "grad_norm": 6.661780834197998, | |
| "learning_rate": 0.0002949936234342752, | |
| "loss": 2.574, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.25090854781699606, | |
| "grad_norm": 5.31721305847168, | |
| "learning_rate": 0.0002949131415058665, | |
| "loss": 2.6033, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.25289988549808334, | |
| "grad_norm": 4.867547988891602, | |
| "learning_rate": 0.0002948320289724496, | |
| "loss": 2.6626, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.2548912231791706, | |
| "grad_norm": 5.965942859649658, | |
| "learning_rate": 0.00029475028618699417, | |
| "loss": 2.5723, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.2568825608602579, | |
| "grad_norm": 4.902228355407715, | |
| "learning_rate": 0.00029466791350521286, | |
| "loss": 2.5944, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.2588738985413451, | |
| "grad_norm": 5.2122650146484375, | |
| "learning_rate": 0.0002945849112855591, | |
| "loss": 2.5094, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.2608652362224324, | |
| "grad_norm": 4.673396110534668, | |
| "learning_rate": 0.0002945012798892259, | |
| "loss": 2.5138, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.2628565739035197, | |
| "grad_norm": 8.117375373840332, | |
| "learning_rate": 0.00029441701968014423, | |
| "loss": 2.7075, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.26484791158460697, | |
| "grad_norm": 6.844805717468262, | |
| "learning_rate": 0.0002943321310249814, | |
| "loss": 2.6431, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.26683924926569424, | |
| "grad_norm": 5.7974982261657715, | |
| "learning_rate": 0.0002942466142931395, | |
| "loss": 2.5232, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.2688305869467815, | |
| "grad_norm": 4.563413619995117, | |
| "learning_rate": 0.00029416046985675356, | |
| "loss": 2.492, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.27082192462786875, | |
| "grad_norm": 4.943353176116943, | |
| "learning_rate": 0.0002940736980906905, | |
| "loss": 2.4998, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.27281326230895603, | |
| "grad_norm": 5.243081569671631, | |
| "learning_rate": 0.00029398629937254676, | |
| "loss": 2.5161, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.2748045999900433, | |
| "grad_norm": 3.9631588459014893, | |
| "learning_rate": 0.00029389827408264727, | |
| "loss": 2.5674, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.2767959376711306, | |
| "grad_norm": 4.865413665771484, | |
| "learning_rate": 0.0002938096226040435, | |
| "loss": 2.5864, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.27878727535221787, | |
| "grad_norm": 4.747464179992676, | |
| "learning_rate": 0.00029372034532251177, | |
| "loss": 2.6115, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.28077861303330515, | |
| "grad_norm": 4.920633792877197, | |
| "learning_rate": 0.00029363044262655175, | |
| "loss": 2.596, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.28276995071439237, | |
| "grad_norm": 4.5279340744018555, | |
| "learning_rate": 0.00029353991490738455, | |
| "loss": 2.6313, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.28476128839547965, | |
| "grad_norm": 4.8143486976623535, | |
| "learning_rate": 0.0002934487625589513, | |
| "loss": 2.6689, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.28675262607656693, | |
| "grad_norm": 12.043296813964844, | |
| "learning_rate": 0.0002933569859779111, | |
| "loss": 2.6326, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.2887439637576542, | |
| "grad_norm": 4.9652557373046875, | |
| "learning_rate": 0.00029326458556363957, | |
| "loss": 2.5531, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.2907353014387415, | |
| "grad_norm": 5.17543888092041, | |
| "learning_rate": 0.000293171561718227, | |
| "loss": 2.5285, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.29272663911982877, | |
| "grad_norm": 5.931879997253418, | |
| "learning_rate": 0.0002930779148464765, | |
| "loss": 2.5604, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.294717976800916, | |
| "grad_norm": 5.292037487030029, | |
| "learning_rate": 0.0002929836453559026, | |
| "loss": 2.5188, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.2967093144820033, | |
| "grad_norm": 5.012665748596191, | |
| "learning_rate": 0.00029288875365672887, | |
| "loss": 2.5135, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.29870065216309055, | |
| "grad_norm": 5.738946914672852, | |
| "learning_rate": 0.00029279324016188676, | |
| "loss": 2.5426, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.30069198984417783, | |
| "grad_norm": 4.243402481079102, | |
| "learning_rate": 0.00029269710528701345, | |
| "loss": 2.4408, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.3026833275252651, | |
| "grad_norm": 5.939416408538818, | |
| "learning_rate": 0.0002926003494504501, | |
| "loss": 2.8477, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.3046746652063524, | |
| "grad_norm": 4.015962600708008, | |
| "learning_rate": 0.0002925029730732401, | |
| "loss": 2.4087, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.3066660028874396, | |
| "grad_norm": 6.449498653411865, | |
| "learning_rate": 0.000292404976579127, | |
| "loss": 2.5785, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.3086573405685269, | |
| "grad_norm": 7.409605503082275, | |
| "learning_rate": 0.0002923063603945532, | |
| "loss": 2.5958, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.3106486782496142, | |
| "grad_norm": 4.776349067687988, | |
| "learning_rate": 0.0002922071249486575, | |
| "loss": 2.6296, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.31264001593070145, | |
| "grad_norm": 4.3293352127075195, | |
| "learning_rate": 0.0002921072706732734, | |
| "loss": 2.5353, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.31463135361178873, | |
| "grad_norm": 6.083279609680176, | |
| "learning_rate": 0.00029200679800292766, | |
| "loss": 2.6478, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.316622691292876, | |
| "grad_norm": 5.940248012542725, | |
| "learning_rate": 0.0002919057073748377, | |
| "loss": 2.7188, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.31861402897396324, | |
| "grad_norm": 4.022637367248535, | |
| "learning_rate": 0.00029180399922891026, | |
| "loss": 2.4939, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.3206053666550505, | |
| "grad_norm": 5.022520542144775, | |
| "learning_rate": 0.00029170167400773906, | |
| "loss": 2.5447, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.3225967043361378, | |
| "grad_norm": 7.304029941558838, | |
| "learning_rate": 0.00029159873215660337, | |
| "loss": 2.5208, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.3245880420172251, | |
| "grad_norm": 4.472403526306152, | |
| "learning_rate": 0.00029149517412346555, | |
| "loss": 2.6699, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 0.32657937969831236, | |
| "grad_norm": 5.33425235748291, | |
| "learning_rate": 0.00029139100035896953, | |
| "loss": 2.5921, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.32857071737939963, | |
| "grad_norm": 5.940118312835693, | |
| "learning_rate": 0.00029128621131643844, | |
| "loss": 2.6926, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.33056205506048686, | |
| "grad_norm": 5.923150539398193, | |
| "learning_rate": 0.0002911808074518731, | |
| "loss": 2.6289, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.33255339274157414, | |
| "grad_norm": 7.126611709594727, | |
| "learning_rate": 0.0002910747892239497, | |
| "loss": 2.5597, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 0.3345447304226614, | |
| "grad_norm": 6.361411094665527, | |
| "learning_rate": 0.00029096815709401776, | |
| "loss": 2.6819, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.3365360681037487, | |
| "grad_norm": 4.598730564117432, | |
| "learning_rate": 0.00029086091152609846, | |
| "loss": 2.5476, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 0.338527405784836, | |
| "grad_norm": 4.714099884033203, | |
| "learning_rate": 0.00029075305298688234, | |
| "loss": 2.5689, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.34051874346592326, | |
| "grad_norm": 6.404987335205078, | |
| "learning_rate": 0.00029064458194572734, | |
| "loss": 2.6044, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 0.3425100811470105, | |
| "grad_norm": 5.2892022132873535, | |
| "learning_rate": 0.0002905354988746568, | |
| "loss": 2.5326, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.34450141882809776, | |
| "grad_norm": 5.470654487609863, | |
| "learning_rate": 0.00029042580424835723, | |
| "loss": 2.4232, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 0.34649275650918504, | |
| "grad_norm": 4.468808174133301, | |
| "learning_rate": 0.00029031549854417667, | |
| "loss": 2.6581, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.3484840941902723, | |
| "grad_norm": 5.86844539642334, | |
| "learning_rate": 0.00029020458224212204, | |
| "loss": 2.5621, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 0.3504754318713596, | |
| "grad_norm": 4.8259711265563965, | |
| "learning_rate": 0.0002900930558248576, | |
| "loss": 2.6615, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.3524667695524469, | |
| "grad_norm": 5.640230655670166, | |
| "learning_rate": 0.00028998091977770236, | |
| "loss": 2.4399, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 0.3544581072335341, | |
| "grad_norm": 5.823399543762207, | |
| "learning_rate": 0.00028986817458862837, | |
| "loss": 2.6114, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.3564494449146214, | |
| "grad_norm": 5.678874492645264, | |
| "learning_rate": 0.0002897548207482583, | |
| "loss": 2.6281, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 0.35844078259570866, | |
| "grad_norm": 13.930092811584473, | |
| "learning_rate": 0.00028964085874986356, | |
| "loss": 2.6551, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.36043212027679594, | |
| "grad_norm": 4.730804443359375, | |
| "learning_rate": 0.00028952628908936184, | |
| "loss": 2.7113, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 0.3624234579578832, | |
| "grad_norm": 4.784435749053955, | |
| "learning_rate": 0.0002894111122653153, | |
| "loss": 2.7406, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.3644147956389705, | |
| "grad_norm": 5.345913887023926, | |
| "learning_rate": 0.0002892953287789283, | |
| "loss": 2.5814, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 0.3664061333200577, | |
| "grad_norm": 5.723335266113281, | |
| "learning_rate": 0.0002891789391340449, | |
| "loss": 2.7098, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.368397471001145, | |
| "grad_norm": 4.860372066497803, | |
| "learning_rate": 0.0002890619438371472, | |
| "loss": 2.5403, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 0.3703888086822323, | |
| "grad_norm": 4.663769245147705, | |
| "learning_rate": 0.0002889443433973525, | |
| "loss": 2.5292, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.37238014636331956, | |
| "grad_norm": 4.552435874938965, | |
| "learning_rate": 0.0002888261383264119, | |
| "loss": 2.5331, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 0.37437148404440684, | |
| "grad_norm": 6.1328253746032715, | |
| "learning_rate": 0.0002887073291387073, | |
| "loss": 2.6319, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.3763628217254941, | |
| "grad_norm": 4.32132625579834, | |
| "learning_rate": 0.0002885879163512496, | |
| "loss": 2.4976, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 0.37835415940658135, | |
| "grad_norm": 5.826934814453125, | |
| "learning_rate": 0.0002884679004836762, | |
| "loss": 2.5219, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.3803454970876686, | |
| "grad_norm": 5.766158103942871, | |
| "learning_rate": 0.00028834728205824904, | |
| "loss": 2.5199, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 0.3823368347687559, | |
| "grad_norm": 4.263400077819824, | |
| "learning_rate": 0.0002882260615998521, | |
| "loss": 2.5299, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.3843281724498432, | |
| "grad_norm": 5.239638328552246, | |
| "learning_rate": 0.0002881042396359891, | |
| "loss": 2.5803, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 0.38631951013093047, | |
| "grad_norm": 5.97824239730835, | |
| "learning_rate": 0.00028798181669678136, | |
| "loss": 2.5917, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.38831084781201775, | |
| "grad_norm": 5.745943069458008, | |
| "learning_rate": 0.0002878587933149653, | |
| "loss": 2.6519, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 0.39030218549310497, | |
| "grad_norm": 4.912780284881592, | |
| "learning_rate": 0.0002877351700258904, | |
| "loss": 2.7083, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.39229352317419225, | |
| "grad_norm": 5.343855857849121, | |
| "learning_rate": 0.0002876109473675165, | |
| "loss": 2.4946, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 0.39428486085527953, | |
| "grad_norm": 5.487871170043945, | |
| "learning_rate": 0.0002874861258804118, | |
| "loss": 2.6007, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.3962761985363668, | |
| "grad_norm": 5.249770164489746, | |
| "learning_rate": 0.00028736070610775046, | |
| "loss": 2.5687, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 0.3982675362174541, | |
| "grad_norm": 4.565980434417725, | |
| "learning_rate": 0.00028723468859530977, | |
| "loss": 2.5454, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.40025887389854137, | |
| "grad_norm": 6.998654365539551, | |
| "learning_rate": 0.0002871080738914685, | |
| "loss": 2.5901, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 0.4022502115796286, | |
| "grad_norm": 5.072119235992432, | |
| "learning_rate": 0.0002869808625472042, | |
| "loss": 2.6213, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.40424154926071587, | |
| "grad_norm": 6.3118720054626465, | |
| "learning_rate": 0.0002868530551160904, | |
| "loss": 2.5593, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 0.40623288694180315, | |
| "grad_norm": 4.6301984786987305, | |
| "learning_rate": 0.00028672465215429484, | |
| "loss": 2.613, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.40822422462289043, | |
| "grad_norm": 5.474564075469971, | |
| "learning_rate": 0.0002865956542205768, | |
| "loss": 2.559, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 0.4102155623039777, | |
| "grad_norm": 4.7905168533325195, | |
| "learning_rate": 0.00028646606187628446, | |
| "loss": 2.6535, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.412206899985065, | |
| "grad_norm": 5.30242395401001, | |
| "learning_rate": 0.0002863358756853528, | |
| "loss": 2.4873, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 0.4141982376661522, | |
| "grad_norm": 4.635322093963623, | |
| "learning_rate": 0.000286205096214301, | |
| "loss": 2.6035, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.4161895753472395, | |
| "grad_norm": 5.1605000495910645, | |
| "learning_rate": 0.00028607372403222976, | |
| "loss": 2.6536, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 0.4181809130283268, | |
| "grad_norm": 5.128244400024414, | |
| "learning_rate": 0.0002859417597108192, | |
| "loss": 2.5994, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.42017225070941405, | |
| "grad_norm": 5.02494478225708, | |
| "learning_rate": 0.0002858092038243262, | |
| "loss": 2.6867, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 0.42216358839050133, | |
| "grad_norm": 5.030214309692383, | |
| "learning_rate": 0.0002856760569495818, | |
| "loss": 2.5302, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.4241549260715886, | |
| "grad_norm": 5.012157917022705, | |
| "learning_rate": 0.00028554231966598884, | |
| "loss": 2.5605, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 0.42614626375267584, | |
| "grad_norm": 5.142708778381348, | |
| "learning_rate": 0.0002854079925555195, | |
| "loss": 2.6312, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.4281376014337631, | |
| "grad_norm": 6.910045623779297, | |
| "learning_rate": 0.0002852730762027124, | |
| "loss": 2.5357, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 0.4301289391148504, | |
| "grad_norm": 5.449594497680664, | |
| "learning_rate": 0.0002851375711946705, | |
| "loss": 2.5713, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.4321202767959377, | |
| "grad_norm": 6.184391975402832, | |
| "learning_rate": 0.0002850014781210583, | |
| "loss": 2.5214, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 0.43411161447702495, | |
| "grad_norm": 6.970107078552246, | |
| "learning_rate": 0.0002848647975740994, | |
| "loss": 2.63, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.43610295215811223, | |
| "grad_norm": 7.1215972900390625, | |
| "learning_rate": 0.0002847275301485737, | |
| "loss": 2.5809, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 0.43809428983919946, | |
| "grad_norm": 5.520656108856201, | |
| "learning_rate": 0.0002845896764418152, | |
| "loss": 2.5623, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.44008562752028674, | |
| "grad_norm": 5.367312908172607, | |
| "learning_rate": 0.0002844512370537089, | |
| "loss": 2.6073, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 0.442076965201374, | |
| "grad_norm": 5.053874492645264, | |
| "learning_rate": 0.00028431221258668865, | |
| "loss": 2.6203, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.4440683028824613, | |
| "grad_norm": 5.396303176879883, | |
| "learning_rate": 0.0002841726036457342, | |
| "loss": 2.3232, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 0.4460596405635486, | |
| "grad_norm": 4.401814937591553, | |
| "learning_rate": 0.0002840324108383688, | |
| "loss": 2.559, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.44805097824463586, | |
| "grad_norm": 7.336491584777832, | |
| "learning_rate": 0.0002838916347746564, | |
| "loss": 2.6238, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 0.4500423159257231, | |
| "grad_norm": 4.719950199127197, | |
| "learning_rate": 0.0002837502760671991, | |
| "loss": 2.5617, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.45203365360681036, | |
| "grad_norm": 7.233749866485596, | |
| "learning_rate": 0.0002836083353311344, | |
| "loss": 2.6933, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 0.45402499128789764, | |
| "grad_norm": 5.0576491355896, | |
| "learning_rate": 0.00028346581318413255, | |
| "loss": 2.4117, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.4560163289689849, | |
| "grad_norm": 4.500210762023926, | |
| "learning_rate": 0.00028332271024639395, | |
| "loss": 2.6071, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 0.4580076666500722, | |
| "grad_norm": 5.0376105308532715, | |
| "learning_rate": 0.0002831790271406462, | |
| "loss": 2.6693, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.4599990043311595, | |
| "grad_norm": 5.0912981033325195, | |
| "learning_rate": 0.0002830347644921418, | |
| "loss": 2.5838, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 0.4619903420122467, | |
| "grad_norm": 4.176620006561279, | |
| "learning_rate": 0.00028288992292865503, | |
| "loss": 2.5671, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.463981679693334, | |
| "grad_norm": 5.332305431365967, | |
| "learning_rate": 0.0002827445030804794, | |
| "loss": 2.6354, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 0.46597301737442126, | |
| "grad_norm": 5.466835975646973, | |
| "learning_rate": 0.00028259850558042486, | |
| "loss": 2.524, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.46796435505550854, | |
| "grad_norm": 5.8639020919799805, | |
| "learning_rate": 0.00028245193106381515, | |
| "loss": 2.5369, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 0.4699556927365958, | |
| "grad_norm": 5.681973457336426, | |
| "learning_rate": 0.00028230478016848503, | |
| "loss": 2.503, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.4719470304176831, | |
| "grad_norm": 6.664337158203125, | |
| "learning_rate": 0.0002821570535347772, | |
| "loss": 2.6666, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 0.4739383680987703, | |
| "grad_norm": 13.131908416748047, | |
| "learning_rate": 0.00028200875180553996, | |
| "loss": 2.5864, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.4759297057798576, | |
| "grad_norm": 4.748403549194336, | |
| "learning_rate": 0.00028185987562612404, | |
| "loss": 2.5652, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 0.4779210434609449, | |
| "grad_norm": 5.171413421630859, | |
| "learning_rate": 0.00028171042564438016, | |
| "loss": 2.4675, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.47991238114203216, | |
| "grad_norm": 5.2752227783203125, | |
| "learning_rate": 0.00028156040251065575, | |
| "loss": 2.738, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 0.48190371882311944, | |
| "grad_norm": 5.439636707305908, | |
| "learning_rate": 0.00028140980687779254, | |
| "loss": 2.5925, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.4838950565042067, | |
| "grad_norm": 5.162364482879639, | |
| "learning_rate": 0.00028125863940112346, | |
| "loss": 2.5849, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 0.48588639418529395, | |
| "grad_norm": 4.560568809509277, | |
| "learning_rate": 0.00028110690073847, | |
| "loss": 2.5966, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.4878777318663812, | |
| "grad_norm": 5.049319744110107, | |
| "learning_rate": 0.0002809545915501392, | |
| "loss": 2.5334, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 0.4898690695474685, | |
| "grad_norm": 6.197220802307129, | |
| "learning_rate": 0.0002808017124989206, | |
| "loss": 2.4928, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.4918604072285558, | |
| "grad_norm": 4.927899360656738, | |
| "learning_rate": 0.0002806482642500838, | |
| "loss": 2.6438, | |
| "step": 1235 | |
| }, | |
| { | |
| "epoch": 0.49385174490964306, | |
| "grad_norm": 4.190851211547852, | |
| "learning_rate": 0.00028049424747137537, | |
| "loss": 2.6136, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.49584308259073034, | |
| "grad_norm": 4.721156597137451, | |
| "learning_rate": 0.00028033966283301547, | |
| "loss": 2.7049, | |
| "step": 1245 | |
| }, | |
| { | |
| "epoch": 0.49783442027181757, | |
| "grad_norm": 5.104554653167725, | |
| "learning_rate": 0.0002801845110076959, | |
| "loss": 2.5448, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.49982575795290485, | |
| "grad_norm": 5.173543453216553, | |
| "learning_rate": 0.00028002879267057624, | |
| "loss": 2.4522, | |
| "step": 1255 | |
| }, | |
| { | |
| "epoch": 0.5018170956339921, | |
| "grad_norm": 6.031247138977051, | |
| "learning_rate": 0.0002798725084992816, | |
| "loss": 2.7182, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.5038084333150794, | |
| "grad_norm": 7.677878379821777, | |
| "learning_rate": 0.00027971565917389915, | |
| "loss": 2.5468, | |
| "step": 1265 | |
| }, | |
| { | |
| "epoch": 0.5057997709961667, | |
| "grad_norm": 5.632172584533691, | |
| "learning_rate": 0.0002795582453769755, | |
| "loss": 2.618, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.507791108677254, | |
| "grad_norm": 5.612128257751465, | |
| "learning_rate": 0.0002794002677935136, | |
| "loss": 2.6364, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 0.5097824463583412, | |
| "grad_norm": 6.009293556213379, | |
| "learning_rate": 0.00027924172711096975, | |
| "loss": 2.6681, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.5117737840394285, | |
| "grad_norm": 4.658798694610596, | |
| "learning_rate": 0.0002790826240192507, | |
| "loss": 2.6049, | |
| "step": 1285 | |
| }, | |
| { | |
| "epoch": 0.5137651217205158, | |
| "grad_norm": 5.140453815460205, | |
| "learning_rate": 0.00027892295921071056, | |
| "loss": 2.5811, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.515756459401603, | |
| "grad_norm": 6.587934494018555, | |
| "learning_rate": 0.00027876273338014773, | |
| "loss": 2.5577, | |
| "step": 1295 | |
| }, | |
| { | |
| "epoch": 0.5177477970826903, | |
| "grad_norm": 5.756930828094482, | |
| "learning_rate": 0.000278601947224802, | |
| "loss": 2.6275, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.5197391347637775, | |
| "grad_norm": 4.118381500244141, | |
| "learning_rate": 0.00027844060144435154, | |
| "loss": 2.575, | |
| "step": 1305 | |
| }, | |
| { | |
| "epoch": 0.5217304724448648, | |
| "grad_norm": 6.136612892150879, | |
| "learning_rate": 0.0002782786967409098, | |
| "loss": 2.5923, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.5237218101259521, | |
| "grad_norm": 5.936057090759277, | |
| "learning_rate": 0.0002781162338190222, | |
| "loss": 2.6065, | |
| "step": 1315 | |
| }, | |
| { | |
| "epoch": 0.5257131478070394, | |
| "grad_norm": 5.510227203369141, | |
| "learning_rate": 0.0002779532133856637, | |
| "loss": 2.5999, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.5277044854881267, | |
| "grad_norm": 5.084118843078613, | |
| "learning_rate": 0.00027778963615023505, | |
| "loss": 2.6796, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 0.5296958231692139, | |
| "grad_norm": 8.913198471069336, | |
| "learning_rate": 0.00027762550282456, | |
| "loss": 2.6884, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.5316871608503012, | |
| "grad_norm": 4.569077014923096, | |
| "learning_rate": 0.00027746081412288227, | |
| "loss": 2.5403, | |
| "step": 1335 | |
| }, | |
| { | |
| "epoch": 0.5336784985313885, | |
| "grad_norm": 6.321774005889893, | |
| "learning_rate": 0.00027729557076186235, | |
| "loss": 2.6237, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.5356698362124758, | |
| "grad_norm": 6.090647220611572, | |
| "learning_rate": 0.0002771297734605744, | |
| "loss": 2.6553, | |
| "step": 1345 | |
| }, | |
| { | |
| "epoch": 0.537661173893563, | |
| "grad_norm": 4.082389831542969, | |
| "learning_rate": 0.00027696342294050296, | |
| "loss": 2.6116, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.5396525115746502, | |
| "grad_norm": 5.483598709106445, | |
| "learning_rate": 0.0002767965199255402, | |
| "loss": 2.5374, | |
| "step": 1355 | |
| }, | |
| { | |
| "epoch": 0.5416438492557375, | |
| "grad_norm": 6.810405254364014, | |
| "learning_rate": 0.0002766290651419824, | |
| "loss": 2.5081, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.5436351869368248, | |
| "grad_norm": 5.5257039070129395, | |
| "learning_rate": 0.00027646105931852685, | |
| "loss": 2.578, | |
| "step": 1365 | |
| }, | |
| { | |
| "epoch": 0.5456265246179121, | |
| "grad_norm": 6.28507137298584, | |
| "learning_rate": 0.00027629250318626894, | |
| "loss": 2.7601, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.5476178622989993, | |
| "grad_norm": 4.6391119956970215, | |
| "learning_rate": 0.0002761233974786985, | |
| "loss": 2.6488, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 0.5496091999800866, | |
| "grad_norm": 4.203032493591309, | |
| "learning_rate": 0.00027595374293169734, | |
| "loss": 2.5824, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.5516005376611739, | |
| "grad_norm": 5.504507541656494, | |
| "learning_rate": 0.0002757835402835351, | |
| "loss": 2.5392, | |
| "step": 1385 | |
| }, | |
| { | |
| "epoch": 0.5535918753422612, | |
| "grad_norm": 4.769162178039551, | |
| "learning_rate": 0.00027561279027486687, | |
| "loss": 2.6826, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.5555832130233485, | |
| "grad_norm": 5.556728363037109, | |
| "learning_rate": 0.0002754414936487296, | |
| "loss": 2.6405, | |
| "step": 1395 | |
| }, | |
| { | |
| "epoch": 0.5575745507044357, | |
| "grad_norm": 4.737353324890137, | |
| "learning_rate": 0.0002752696511505387, | |
| "loss": 2.6097, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.559565888385523, | |
| "grad_norm": 4.895107746124268, | |
| "learning_rate": 0.00027509726352808523, | |
| "loss": 2.7201, | |
| "step": 1405 | |
| }, | |
| { | |
| "epoch": 0.5615572260666103, | |
| "grad_norm": 5.524222373962402, | |
| "learning_rate": 0.0002749243315315324, | |
| "loss": 2.6121, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.5635485637476975, | |
| "grad_norm": 7.01528263092041, | |
| "learning_rate": 0.0002747508559134122, | |
| "loss": 2.4831, | |
| "step": 1415 | |
| }, | |
| { | |
| "epoch": 0.5655399014287847, | |
| "grad_norm": 5.396809101104736, | |
| "learning_rate": 0.0002745768374286223, | |
| "loss": 2.4708, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.567531239109872, | |
| "grad_norm": 5.1723833084106445, | |
| "learning_rate": 0.00027440227683442266, | |
| "loss": 2.4762, | |
| "step": 1425 | |
| }, | |
| { | |
| "epoch": 0.5695225767909593, | |
| "grad_norm": 7.093824863433838, | |
| "learning_rate": 0.0002742271748904324, | |
| "loss": 2.6241, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.5715139144720466, | |
| "grad_norm": 5.00604772567749, | |
| "learning_rate": 0.0002740515323586264, | |
| "loss": 2.6665, | |
| "step": 1435 | |
| }, | |
| { | |
| "epoch": 0.5735052521531339, | |
| "grad_norm": 4.891804218292236, | |
| "learning_rate": 0.0002738753500033318, | |
| "loss": 2.5111, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.5754965898342211, | |
| "grad_norm": 6.145954132080078, | |
| "learning_rate": 0.0002736986285912249, | |
| "loss": 2.5384, | |
| "step": 1445 | |
| }, | |
| { | |
| "epoch": 0.5774879275153084, | |
| "grad_norm": 6.900878429412842, | |
| "learning_rate": 0.0002735213688913279, | |
| "loss": 2.6777, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.5794792651963957, | |
| "grad_norm": 4.369724750518799, | |
| "learning_rate": 0.00027334357167500524, | |
| "loss": 2.5877, | |
| "step": 1455 | |
| }, | |
| { | |
| "epoch": 0.581470602877483, | |
| "grad_norm": 5.170623302459717, | |
| "learning_rate": 0.00027316523771596054, | |
| "loss": 2.599, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.5834619405585703, | |
| "grad_norm": 5.459385395050049, | |
| "learning_rate": 0.000272986367790233, | |
| "loss": 2.6303, | |
| "step": 1465 | |
| }, | |
| { | |
| "epoch": 0.5854532782396575, | |
| "grad_norm": 5.773386001586914, | |
| "learning_rate": 0.0002728069626761943, | |
| "loss": 2.6477, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.5874446159207447, | |
| "grad_norm": 5.394538402557373, | |
| "learning_rate": 0.0002726270231545448, | |
| "loss": 2.5973, | |
| "step": 1475 | |
| }, | |
| { | |
| "epoch": 0.589435953601832, | |
| "grad_norm": 4.555385589599609, | |
| "learning_rate": 0.0002724465500083106, | |
| "loss": 2.5756, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.5914272912829193, | |
| "grad_norm": 6.8372650146484375, | |
| "learning_rate": 0.00027226554402283996, | |
| "loss": 2.4608, | |
| "step": 1485 | |
| }, | |
| { | |
| "epoch": 0.5934186289640065, | |
| "grad_norm": 8.024332046508789, | |
| "learning_rate": 0.00027208400598579966, | |
| "loss": 2.534, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.5954099666450938, | |
| "grad_norm": 8.167983055114746, | |
| "learning_rate": 0.0002719019366871719, | |
| "loss": 2.6079, | |
| "step": 1495 | |
| }, | |
| { | |
| "epoch": 0.5974013043261811, | |
| "grad_norm": 7.087464809417725, | |
| "learning_rate": 0.00027171933691925066, | |
| "loss": 2.5866, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.5993926420072684, | |
| "grad_norm": 7.60006856918335, | |
| "learning_rate": 0.00027153620747663826, | |
| "loss": 2.5826, | |
| "step": 1505 | |
| }, | |
| { | |
| "epoch": 0.6013839796883557, | |
| "grad_norm": 5.609342575073242, | |
| "learning_rate": 0.0002713525491562421, | |
| "loss": 2.4498, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.6033753173694429, | |
| "grad_norm": 6.407932758331299, | |
| "learning_rate": 0.00027116836275727096, | |
| "loss": 2.7349, | |
| "step": 1515 | |
| }, | |
| { | |
| "epoch": 0.6053666550505302, | |
| "grad_norm": 6.405856609344482, | |
| "learning_rate": 0.00027098364908123156, | |
| "loss": 2.4881, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.6073579927316175, | |
| "grad_norm": 5.0175676345825195, | |
| "learning_rate": 0.0002707984089319253, | |
| "loss": 2.6345, | |
| "step": 1525 | |
| }, | |
| { | |
| "epoch": 0.6093493304127048, | |
| "grad_norm": 4.912682056427002, | |
| "learning_rate": 0.00027061264311544435, | |
| "loss": 2.532, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.611340668093792, | |
| "grad_norm": 5.729355812072754, | |
| "learning_rate": 0.0002704263524401686, | |
| "loss": 2.6031, | |
| "step": 1535 | |
| }, | |
| { | |
| "epoch": 0.6133320057748792, | |
| "grad_norm": 6.606774806976318, | |
| "learning_rate": 0.0002702395377167617, | |
| "loss": 2.6403, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.6153233434559665, | |
| "grad_norm": 7.419695854187012, | |
| "learning_rate": 0.00027005219975816797, | |
| "loss": 2.5945, | |
| "step": 1545 | |
| }, | |
| { | |
| "epoch": 0.6173146811370538, | |
| "grad_norm": 9.066940307617188, | |
| "learning_rate": 0.0002698643393796085, | |
| "loss": 2.5915, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.6193060188181411, | |
| "grad_norm": 5.113959312438965, | |
| "learning_rate": 0.00026967595739857777, | |
| "loss": 2.6204, | |
| "step": 1555 | |
| }, | |
| { | |
| "epoch": 0.6212973564992283, | |
| "grad_norm": 5.126779556274414, | |
| "learning_rate": 0.0002694870546348401, | |
| "loss": 2.6282, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.6232886941803156, | |
| "grad_norm": 4.795478820800781, | |
| "learning_rate": 0.00026929763191042605, | |
| "loss": 2.5167, | |
| "step": 1565 | |
| }, | |
| { | |
| "epoch": 0.6252800318614029, | |
| "grad_norm": 5.510051727294922, | |
| "learning_rate": 0.0002691076900496288, | |
| "loss": 2.4141, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.6272713695424902, | |
| "grad_norm": 5.027546405792236, | |
| "learning_rate": 0.0002689172298790007, | |
| "loss": 2.6739, | |
| "step": 1575 | |
| }, | |
| { | |
| "epoch": 0.6292627072235775, | |
| "grad_norm": 6.95448637008667, | |
| "learning_rate": 0.0002687262522273494, | |
| "loss": 2.6941, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.6312540449046647, | |
| "grad_norm": 4.615077972412109, | |
| "learning_rate": 0.00026853475792573467, | |
| "loss": 2.5628, | |
| "step": 1585 | |
| }, | |
| { | |
| "epoch": 0.633245382585752, | |
| "grad_norm": 7.835186958312988, | |
| "learning_rate": 0.00026834274780746436, | |
| "loss": 2.6974, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.6352367202668392, | |
| "grad_norm": 4.616403579711914, | |
| "learning_rate": 0.00026815022270809094, | |
| "loss": 2.7351, | |
| "step": 1595 | |
| }, | |
| { | |
| "epoch": 0.6372280579479265, | |
| "grad_norm": 5.727982521057129, | |
| "learning_rate": 0.000267957183465408, | |
| "loss": 2.6939, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.6392193956290138, | |
| "grad_norm": 4.361421585083008, | |
| "learning_rate": 0.0002677636309194465, | |
| "loss": 2.4927, | |
| "step": 1605 | |
| }, | |
| { | |
| "epoch": 0.641210733310101, | |
| "grad_norm": 4.711188793182373, | |
| "learning_rate": 0.00026756956591247086, | |
| "loss": 2.4791, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.6432020709911883, | |
| "grad_norm": 6.163144588470459, | |
| "learning_rate": 0.00026737498928897574, | |
| "loss": 2.663, | |
| "step": 1615 | |
| }, | |
| { | |
| "epoch": 0.6451934086722756, | |
| "grad_norm": 7.343597888946533, | |
| "learning_rate": 0.0002671799018956821, | |
| "loss": 2.6147, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.6471847463533629, | |
| "grad_norm": 8.960589408874512, | |
| "learning_rate": 0.00026698430458153356, | |
| "loss": 2.5842, | |
| "step": 1625 | |
| }, | |
| { | |
| "epoch": 0.6491760840344502, | |
| "grad_norm": 7.366634845733643, | |
| "learning_rate": 0.0002667881981976927, | |
| "loss": 2.6003, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.6511674217155374, | |
| "grad_norm": 6.907988548278809, | |
| "learning_rate": 0.00026659158359753737, | |
| "loss": 2.6356, | |
| "step": 1635 | |
| }, | |
| { | |
| "epoch": 0.6531587593966247, | |
| "grad_norm": 6.1287641525268555, | |
| "learning_rate": 0.00026639446163665705, | |
| "loss": 2.5649, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.655150097077712, | |
| "grad_norm": 5.350227355957031, | |
| "learning_rate": 0.000266196833172849, | |
| "loss": 2.6785, | |
| "step": 1645 | |
| }, | |
| { | |
| "epoch": 0.6571414347587993, | |
| "grad_norm": 4.664305686950684, | |
| "learning_rate": 0.00026599869906611455, | |
| "loss": 2.5379, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.6591327724398864, | |
| "grad_norm": 8.54836654663086, | |
| "learning_rate": 0.0002658000601786554, | |
| "loss": 2.6151, | |
| "step": 1655 | |
| }, | |
| { | |
| "epoch": 0.6611241101209737, | |
| "grad_norm": 5.56497859954834, | |
| "learning_rate": 0.00026560091737487, | |
| "loss": 2.6609, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.663115447802061, | |
| "grad_norm": 4.609283447265625, | |
| "learning_rate": 0.0002654012715213493, | |
| "loss": 2.5232, | |
| "step": 1665 | |
| }, | |
| { | |
| "epoch": 0.6651067854831483, | |
| "grad_norm": 6.875670433044434, | |
| "learning_rate": 0.00026520112348687373, | |
| "loss": 2.5927, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.6670981231642356, | |
| "grad_norm": 6.808399677276611, | |
| "learning_rate": 0.0002650004741424088, | |
| "loss": 2.6449, | |
| "step": 1675 | |
| }, | |
| { | |
| "epoch": 0.6690894608453228, | |
| "grad_norm": 4.634307384490967, | |
| "learning_rate": 0.0002647993243611016, | |
| "loss": 2.572, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.6710807985264101, | |
| "grad_norm": 9.085533142089844, | |
| "learning_rate": 0.0002645976750182767, | |
| "loss": 2.6565, | |
| "step": 1685 | |
| }, | |
| { | |
| "epoch": 0.6730721362074974, | |
| "grad_norm": 6.228301048278809, | |
| "learning_rate": 0.000264395526991433, | |
| "loss": 2.5158, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.6750634738885847, | |
| "grad_norm": 4.803519248962402, | |
| "learning_rate": 0.00026419288116023897, | |
| "loss": 2.4979, | |
| "step": 1695 | |
| }, | |
| { | |
| "epoch": 0.677054811569672, | |
| "grad_norm": 6.3344621658325195, | |
| "learning_rate": 0.0002639897384065298, | |
| "loss": 2.6457, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.6790461492507592, | |
| "grad_norm": 5.770046234130859, | |
| "learning_rate": 0.00026378609961430266, | |
| "loss": 2.5031, | |
| "step": 1705 | |
| }, | |
| { | |
| "epoch": 0.6810374869318465, | |
| "grad_norm": 5.835142135620117, | |
| "learning_rate": 0.0002635819656697137, | |
| "loss": 2.5488, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.6830288246129338, | |
| "grad_norm": 5.059028625488281, | |
| "learning_rate": 0.0002633773374610735, | |
| "loss": 2.6686, | |
| "step": 1715 | |
| }, | |
| { | |
| "epoch": 0.685020162294021, | |
| "grad_norm": 6.267749309539795, | |
| "learning_rate": 0.0002631722158788435, | |
| "loss": 2.6308, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.6870114999751082, | |
| "grad_norm": 4.403576374053955, | |
| "learning_rate": 0.0002629666018156322, | |
| "loss": 2.6768, | |
| "step": 1725 | |
| }, | |
| { | |
| "epoch": 0.6890028376561955, | |
| "grad_norm": 6.0533905029296875, | |
| "learning_rate": 0.0002627604961661912, | |
| "loss": 2.5665, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.6909941753372828, | |
| "grad_norm": 6.283039569854736, | |
| "learning_rate": 0.0002625538998274112, | |
| "loss": 2.6614, | |
| "step": 1735 | |
| }, | |
| { | |
| "epoch": 0.6929855130183701, | |
| "grad_norm": 5.365102291107178, | |
| "learning_rate": 0.00026234681369831825, | |
| "loss": 2.6992, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.6949768506994574, | |
| "grad_norm": 4.661876201629639, | |
| "learning_rate": 0.00026213923868006973, | |
| "loss": 2.6264, | |
| "step": 1745 | |
| }, | |
| { | |
| "epoch": 0.6969681883805446, | |
| "grad_norm": 5.980187892913818, | |
| "learning_rate": 0.0002619311756759505, | |
| "loss": 2.5872, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.6989595260616319, | |
| "grad_norm": 6.6604204177856445, | |
| "learning_rate": 0.0002617226255913689, | |
| "loss": 2.5463, | |
| "step": 1755 | |
| }, | |
| { | |
| "epoch": 0.7009508637427192, | |
| "grad_norm": 5.904111385345459, | |
| "learning_rate": 0.00026151358933385293, | |
| "loss": 2.6829, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.7029422014238065, | |
| "grad_norm": 4.411726951599121, | |
| "learning_rate": 0.00026130406781304627, | |
| "loss": 2.5868, | |
| "step": 1765 | |
| }, | |
| { | |
| "epoch": 0.7049335391048938, | |
| "grad_norm": 5.401057720184326, | |
| "learning_rate": 0.00026109406194070405, | |
| "loss": 2.4201, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.706924876785981, | |
| "grad_norm": 10.872917175292969, | |
| "learning_rate": 0.0002608835726306893, | |
| "loss": 2.6233, | |
| "step": 1775 | |
| }, | |
| { | |
| "epoch": 0.7089162144670682, | |
| "grad_norm": 5.127570629119873, | |
| "learning_rate": 0.0002606726007989687, | |
| "loss": 2.4135, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.7109075521481555, | |
| "grad_norm": 4.496290683746338, | |
| "learning_rate": 0.0002604611473636088, | |
| "loss": 2.6121, | |
| "step": 1785 | |
| }, | |
| { | |
| "epoch": 0.7128988898292428, | |
| "grad_norm": 5.2560715675354, | |
| "learning_rate": 0.00026024921324477166, | |
| "loss": 2.5794, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.71489022751033, | |
| "grad_norm": 4.1616411209106445, | |
| "learning_rate": 0.00026003679936471136, | |
| "loss": 2.4883, | |
| "step": 1795 | |
| }, | |
| { | |
| "epoch": 0.7168815651914173, | |
| "grad_norm": 4.719475269317627, | |
| "learning_rate": 0.0002598239066477695, | |
| "loss": 2.5903, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.7188729028725046, | |
| "grad_norm": 7.156427383422852, | |
| "learning_rate": 0.00025961053602037144, | |
| "loss": 2.5673, | |
| "step": 1805 | |
| }, | |
| { | |
| "epoch": 0.7208642405535919, | |
| "grad_norm": 6.929321765899658, | |
| "learning_rate": 0.00025939668841102237, | |
| "loss": 2.521, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.7228555782346792, | |
| "grad_norm": 6.155335903167725, | |
| "learning_rate": 0.0002591823647503029, | |
| "loss": 2.5703, | |
| "step": 1815 | |
| }, | |
| { | |
| "epoch": 0.7248469159157664, | |
| "grad_norm": 5.760573863983154, | |
| "learning_rate": 0.00025896756597086526, | |
| "loss": 2.6104, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.7268382535968537, | |
| "grad_norm": 6.244937419891357, | |
| "learning_rate": 0.00025875229300742947, | |
| "loss": 2.6097, | |
| "step": 1825 | |
| }, | |
| { | |
| "epoch": 0.728829591277941, | |
| "grad_norm": 5.067662239074707, | |
| "learning_rate": 0.00025853654679677867, | |
| "loss": 2.556, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.7308209289590283, | |
| "grad_norm": 5.625128269195557, | |
| "learning_rate": 0.0002583203282777555, | |
| "loss": 2.7103, | |
| "step": 1835 | |
| }, | |
| { | |
| "epoch": 0.7328122666401155, | |
| "grad_norm": 5.45819616317749, | |
| "learning_rate": 0.0002581036383912581, | |
| "loss": 2.6172, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.7348036043212027, | |
| "grad_norm": 4.656463146209717, | |
| "learning_rate": 0.0002578864780802355, | |
| "loss": 2.6565, | |
| "step": 1845 | |
| }, | |
| { | |
| "epoch": 0.73679494200229, | |
| "grad_norm": 5.9828715324401855, | |
| "learning_rate": 0.000257668848289684, | |
| "loss": 2.5157, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.7387862796833773, | |
| "grad_norm": 5.619359970092773, | |
| "learning_rate": 0.00025745074996664293, | |
| "loss": 2.4723, | |
| "step": 1855 | |
| }, | |
| { | |
| "epoch": 0.7407776173644646, | |
| "grad_norm": 5.359917163848877, | |
| "learning_rate": 0.0002572321840601904, | |
| "loss": 2.5944, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.7427689550455518, | |
| "grad_norm": 4.9695353507995605, | |
| "learning_rate": 0.0002570131515214393, | |
| "loss": 2.5379, | |
| "step": 1865 | |
| }, | |
| { | |
| "epoch": 0.7447602927266391, | |
| "grad_norm": 6.0350341796875, | |
| "learning_rate": 0.000256793653303533, | |
| "loss": 2.5925, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.7467516304077264, | |
| "grad_norm": 5.810332298278809, | |
| "learning_rate": 0.0002565736903616416, | |
| "loss": 2.681, | |
| "step": 1875 | |
| }, | |
| { | |
| "epoch": 0.7487429680888137, | |
| "grad_norm": 4.983578205108643, | |
| "learning_rate": 0.0002563532636529573, | |
| "loss": 2.6438, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.750734305769901, | |
| "grad_norm": 4.9191131591796875, | |
| "learning_rate": 0.0002561323741366904, | |
| "loss": 2.6089, | |
| "step": 1885 | |
| }, | |
| { | |
| "epoch": 0.7527256434509882, | |
| "grad_norm": 6.884909152984619, | |
| "learning_rate": 0.0002559110227740652, | |
| "loss": 2.6537, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.7547169811320755, | |
| "grad_norm": 5.462011814117432, | |
| "learning_rate": 0.0002556892105283158, | |
| "loss": 2.4471, | |
| "step": 1895 | |
| }, | |
| { | |
| "epoch": 0.7567083188131627, | |
| "grad_norm": 5.4949822425842285, | |
| "learning_rate": 0.0002554669383646819, | |
| "loss": 2.7252, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.75869965649425, | |
| "grad_norm": 4.802751064300537, | |
| "learning_rate": 0.00025524420725040457, | |
| "loss": 2.4402, | |
| "step": 1905 | |
| }, | |
| { | |
| "epoch": 0.7606909941753373, | |
| "grad_norm": 7.37558126449585, | |
| "learning_rate": 0.000255021018154722, | |
| "loss": 2.7359, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.7626823318564245, | |
| "grad_norm": 6.215965747833252, | |
| "learning_rate": 0.00025479737204886537, | |
| "loss": 2.6348, | |
| "step": 1915 | |
| }, | |
| { | |
| "epoch": 0.7646736695375118, | |
| "grad_norm": 5.106846809387207, | |
| "learning_rate": 0.00025457326990605466, | |
| "loss": 2.6013, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.7666650072185991, | |
| "grad_norm": 6.881631374359131, | |
| "learning_rate": 0.0002543487127014942, | |
| "loss": 2.6093, | |
| "step": 1925 | |
| }, | |
| { | |
| "epoch": 0.7686563448996864, | |
| "grad_norm": 6.443165302276611, | |
| "learning_rate": 0.0002541237014123686, | |
| "loss": 2.6147, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.7706476825807737, | |
| "grad_norm": 6.010848522186279, | |
| "learning_rate": 0.00025389823701783853, | |
| "loss": 2.597, | |
| "step": 1935 | |
| }, | |
| { | |
| "epoch": 0.7726390202618609, | |
| "grad_norm": 5.879451274871826, | |
| "learning_rate": 0.00025367232049903647, | |
| "loss": 2.7176, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.7746303579429482, | |
| "grad_norm": 3.990070343017578, | |
| "learning_rate": 0.0002534459528390622, | |
| "loss": 2.5272, | |
| "step": 1945 | |
| }, | |
| { | |
| "epoch": 0.7766216956240355, | |
| "grad_norm": 4.5337629318237305, | |
| "learning_rate": 0.0002532191350229788, | |
| "loss": 2.5446, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.7786130333051228, | |
| "grad_norm": 6.173877716064453, | |
| "learning_rate": 0.0002529918680378082, | |
| "loss": 2.5218, | |
| "step": 1955 | |
| }, | |
| { | |
| "epoch": 0.7806043709862099, | |
| "grad_norm": 6.064763069152832, | |
| "learning_rate": 0.0002527641528725269, | |
| "loss": 2.6244, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.7825957086672972, | |
| "grad_norm": 5.518800258636475, | |
| "learning_rate": 0.0002525359905180618, | |
| "loss": 2.6561, | |
| "step": 1965 | |
| }, | |
| { | |
| "epoch": 0.7845870463483845, | |
| "grad_norm": 5.687634468078613, | |
| "learning_rate": 0.0002523073819672858, | |
| "loss": 2.6324, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.7865783840294718, | |
| "grad_norm": 6.286370754241943, | |
| "learning_rate": 0.00025207832821501336, | |
| "loss": 2.6164, | |
| "step": 1975 | |
| }, | |
| { | |
| "epoch": 0.7885697217105591, | |
| "grad_norm": 7.250809192657471, | |
| "learning_rate": 0.0002518488302579963, | |
| "loss": 2.6098, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.7905610593916463, | |
| "grad_norm": 5.801024436950684, | |
| "learning_rate": 0.0002516188890949196, | |
| "loss": 2.505, | |
| "step": 1985 | |
| }, | |
| { | |
| "epoch": 0.7925523970727336, | |
| "grad_norm": 7.584057807922363, | |
| "learning_rate": 0.00025138850572639665, | |
| "loss": 2.5602, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.7945437347538209, | |
| "grad_norm": 11.833077430725098, | |
| "learning_rate": 0.0002511576811549654, | |
| "loss": 2.4686, | |
| "step": 1995 | |
| }, | |
| { | |
| "epoch": 0.7965350724349082, | |
| "grad_norm": 6.000141620635986, | |
| "learning_rate": 0.00025092641638508353, | |
| "loss": 2.4901, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.7985264101159955, | |
| "grad_norm": 6.463593006134033, | |
| "learning_rate": 0.0002506947124231244, | |
| "loss": 2.5631, | |
| "step": 2005 | |
| }, | |
| { | |
| "epoch": 0.8005177477970827, | |
| "grad_norm": 6.079593658447266, | |
| "learning_rate": 0.0002504625702773725, | |
| "loss": 2.6703, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.80250908547817, | |
| "grad_norm": 6.521668910980225, | |
| "learning_rate": 0.0002502299909580193, | |
| "loss": 2.4696, | |
| "step": 2015 | |
| }, | |
| { | |
| "epoch": 0.8045004231592572, | |
| "grad_norm": 5.687950611114502, | |
| "learning_rate": 0.0002499969754771584, | |
| "loss": 2.7125, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 0.8064917608403445, | |
| "grad_norm": 4.566808223724365, | |
| "learning_rate": 0.00024976352484878164, | |
| "loss": 2.7074, | |
| "step": 2025 | |
| }, | |
| { | |
| "epoch": 0.8084830985214317, | |
| "grad_norm": 5.849015712738037, | |
| "learning_rate": 0.0002495296400887743, | |
| "loss": 2.7419, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 0.810474436202519, | |
| "grad_norm": 5.121682167053223, | |
| "learning_rate": 0.0002492953222149109, | |
| "loss": 2.5039, | |
| "step": 2035 | |
| }, | |
| { | |
| "epoch": 0.8124657738836063, | |
| "grad_norm": 5.572865009307861, | |
| "learning_rate": 0.0002490605722468507, | |
| "loss": 2.6298, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.8144571115646936, | |
| "grad_norm": 5.498860836029053, | |
| "learning_rate": 0.00024882539120613323, | |
| "loss": 2.5468, | |
| "step": 2045 | |
| }, | |
| { | |
| "epoch": 0.8164484492457809, | |
| "grad_norm": 5.025686264038086, | |
| "learning_rate": 0.0002485897801161739, | |
| "loss": 2.5345, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.8184397869268681, | |
| "grad_norm": 4.758937835693359, | |
| "learning_rate": 0.00024835374000225954, | |
| "loss": 2.5416, | |
| "step": 2055 | |
| }, | |
| { | |
| "epoch": 0.8204311246079554, | |
| "grad_norm": 4.406644821166992, | |
| "learning_rate": 0.00024811727189154396, | |
| "loss": 2.5523, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 0.8224224622890427, | |
| "grad_norm": 7.341763019561768, | |
| "learning_rate": 0.0002478803768130433, | |
| "loss": 2.4763, | |
| "step": 2065 | |
| }, | |
| { | |
| "epoch": 0.82441379997013, | |
| "grad_norm": 5.85587739944458, | |
| "learning_rate": 0.00024764305579763184, | |
| "loss": 2.5462, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 0.8264051376512173, | |
| "grad_norm": 4.7058796882629395, | |
| "learning_rate": 0.00024740530987803726, | |
| "loss": 2.6977, | |
| "step": 2075 | |
| }, | |
| { | |
| "epoch": 0.8283964753323044, | |
| "grad_norm": 6.2724995613098145, | |
| "learning_rate": 0.0002471671400888364, | |
| "loss": 2.5808, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 0.8303878130133917, | |
| "grad_norm": 6.3840179443359375, | |
| "learning_rate": 0.0002469285474664505, | |
| "loss": 2.4861, | |
| "step": 2085 | |
| }, | |
| { | |
| "epoch": 0.832379150694479, | |
| "grad_norm": 6.859890937805176, | |
| "learning_rate": 0.00024668953304914077, | |
| "loss": 2.6549, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 0.8343704883755663, | |
| "grad_norm": 5.382211685180664, | |
| "learning_rate": 0.000246450097877004, | |
| "loss": 2.6052, | |
| "step": 2095 | |
| }, | |
| { | |
| "epoch": 0.8363618260566535, | |
| "grad_norm": 6.212277412414551, | |
| "learning_rate": 0.000246210242991968, | |
| "loss": 2.5116, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.8383531637377408, | |
| "grad_norm": 5.71286678314209, | |
| "learning_rate": 0.0002459699694377868, | |
| "loss": 2.4863, | |
| "step": 2105 | |
| }, | |
| { | |
| "epoch": 0.8403445014188281, | |
| "grad_norm": 4.8305439949035645, | |
| "learning_rate": 0.0002457292782600364, | |
| "loss": 2.7366, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 0.8423358390999154, | |
| "grad_norm": 5.540607452392578, | |
| "learning_rate": 0.0002454881705061103, | |
| "loss": 2.4626, | |
| "step": 2115 | |
| }, | |
| { | |
| "epoch": 0.8443271767810027, | |
| "grad_norm": 4.778063774108887, | |
| "learning_rate": 0.0002452466472252145, | |
| "loss": 2.5505, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 0.8463185144620899, | |
| "grad_norm": 6.163254261016846, | |
| "learning_rate": 0.00024500470946836345, | |
| "loss": 2.5981, | |
| "step": 2125 | |
| }, | |
| { | |
| "epoch": 0.8483098521431772, | |
| "grad_norm": 5.865226745605469, | |
| "learning_rate": 0.00024476235828837503, | |
| "loss": 2.6405, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 0.8503011898242645, | |
| "grad_norm": 5.853811740875244, | |
| "learning_rate": 0.0002445195947398664, | |
| "loss": 2.4575, | |
| "step": 2135 | |
| }, | |
| { | |
| "epoch": 0.8522925275053517, | |
| "grad_norm": 4.554398059844971, | |
| "learning_rate": 0.000244276419879249, | |
| "loss": 2.6075, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 0.854283865186439, | |
| "grad_norm": 5.9426493644714355, | |
| "learning_rate": 0.00024403283476472415, | |
| "loss": 2.6052, | |
| "step": 2145 | |
| }, | |
| { | |
| "epoch": 0.8562752028675262, | |
| "grad_norm": 4.86089563369751, | |
| "learning_rate": 0.00024378884045627854, | |
| "loss": 2.6822, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.8582665405486135, | |
| "grad_norm": 5.2081780433654785, | |
| "learning_rate": 0.00024354443801567942, | |
| "loss": 2.459, | |
| "step": 2155 | |
| }, | |
| { | |
| "epoch": 0.8602578782297008, | |
| "grad_norm": 4.458292484283447, | |
| "learning_rate": 0.00024329962850647016, | |
| "loss": 2.476, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 0.8622492159107881, | |
| "grad_norm": 5.3445539474487305, | |
| "learning_rate": 0.0002430544129939654, | |
| "loss": 2.5372, | |
| "step": 2165 | |
| }, | |
| { | |
| "epoch": 0.8642405535918753, | |
| "grad_norm": 5.6220855712890625, | |
| "learning_rate": 0.0002428087925452467, | |
| "loss": 2.688, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 0.8662318912729626, | |
| "grad_norm": 5.187811851501465, | |
| "learning_rate": 0.00024256276822915768, | |
| "loss": 2.4423, | |
| "step": 2175 | |
| }, | |
| { | |
| "epoch": 0.8682232289540499, | |
| "grad_norm": 8.015525817871094, | |
| "learning_rate": 0.00024231634111629934, | |
| "loss": 2.5889, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 0.8702145666351372, | |
| "grad_norm": 6.496338844299316, | |
| "learning_rate": 0.00024206951227902563, | |
| "loss": 2.5738, | |
| "step": 2185 | |
| }, | |
| { | |
| "epoch": 0.8722059043162245, | |
| "grad_norm": 4.662912368774414, | |
| "learning_rate": 0.0002418222827914386, | |
| "loss": 2.7525, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 0.8741972419973117, | |
| "grad_norm": 6.552118301391602, | |
| "learning_rate": 0.0002415746537293837, | |
| "loss": 2.5813, | |
| "step": 2195 | |
| }, | |
| { | |
| "epoch": 0.8761885796783989, | |
| "grad_norm": 5.152313709259033, | |
| "learning_rate": 0.00024132662617044532, | |
| "loss": 2.5728, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.8781799173594862, | |
| "grad_norm": 4.024521827697754, | |
| "learning_rate": 0.00024107820119394175, | |
| "loss": 2.5748, | |
| "step": 2205 | |
| }, | |
| { | |
| "epoch": 0.8801712550405735, | |
| "grad_norm": 3.983680248260498, | |
| "learning_rate": 0.00024082937988092096, | |
| "loss": 2.5401, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 0.8821625927216608, | |
| "grad_norm": 5.457140922546387, | |
| "learning_rate": 0.0002405801633141553, | |
| "loss": 2.5143, | |
| "step": 2215 | |
| }, | |
| { | |
| "epoch": 0.884153930402748, | |
| "grad_norm": 4.946249008178711, | |
| "learning_rate": 0.00024033055257813732, | |
| "loss": 2.6762, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 0.8861452680838353, | |
| "grad_norm": 4.660250663757324, | |
| "learning_rate": 0.0002400805487590749, | |
| "loss": 2.5675, | |
| "step": 2225 | |
| }, | |
| { | |
| "epoch": 0.8881366057649226, | |
| "grad_norm": 7.786635398864746, | |
| "learning_rate": 0.00023983015294488623, | |
| "loss": 2.5946, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 0.8901279434460099, | |
| "grad_norm": 5.1657395362854, | |
| "learning_rate": 0.00023957936622519542, | |
| "loss": 2.6189, | |
| "step": 2235 | |
| }, | |
| { | |
| "epoch": 0.8921192811270972, | |
| "grad_norm": 5.05390739440918, | |
| "learning_rate": 0.00023932818969132777, | |
| "loss": 2.5522, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 0.8941106188081844, | |
| "grad_norm": 7.363577365875244, | |
| "learning_rate": 0.00023907662443630465, | |
| "loss": 2.589, | |
| "step": 2245 | |
| }, | |
| { | |
| "epoch": 0.8961019564892717, | |
| "grad_norm": 6.585803031921387, | |
| "learning_rate": 0.00023882467155483927, | |
| "loss": 2.5239, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.898093294170359, | |
| "grad_norm": 6.26939582824707, | |
| "learning_rate": 0.0002385723321433314, | |
| "loss": 2.5722, | |
| "step": 2255 | |
| }, | |
| { | |
| "epoch": 0.9000846318514462, | |
| "grad_norm": 5.3172526359558105, | |
| "learning_rate": 0.00023831960729986292, | |
| "loss": 2.5552, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 0.9020759695325334, | |
| "grad_norm": 4.980822563171387, | |
| "learning_rate": 0.00023806649812419306, | |
| "loss": 2.6506, | |
| "step": 2265 | |
| }, | |
| { | |
| "epoch": 0.9040673072136207, | |
| "grad_norm": 5.251991271972656, | |
| "learning_rate": 0.00023781300571775333, | |
| "loss": 2.613, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 0.906058644894708, | |
| "grad_norm": 4.616904258728027, | |
| "learning_rate": 0.00023755913118364306, | |
| "loss": 2.6393, | |
| "step": 2275 | |
| }, | |
| { | |
| "epoch": 0.9080499825757953, | |
| "grad_norm": 5.337674617767334, | |
| "learning_rate": 0.00023730487562662438, | |
| "loss": 2.4195, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 0.9100413202568826, | |
| "grad_norm": 6.516839504241943, | |
| "learning_rate": 0.00023705024015311751, | |
| "loss": 2.5637, | |
| "step": 2285 | |
| }, | |
| { | |
| "epoch": 0.9120326579379698, | |
| "grad_norm": 5.19506311416626, | |
| "learning_rate": 0.00023679522587119586, | |
| "loss": 2.4586, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 0.9140239956190571, | |
| "grad_norm": 4.948032855987549, | |
| "learning_rate": 0.00023653983389058128, | |
| "loss": 2.5845, | |
| "step": 2295 | |
| }, | |
| { | |
| "epoch": 0.9160153333001444, | |
| "grad_norm": 6.443713665008545, | |
| "learning_rate": 0.0002362840653226394, | |
| "loss": 2.664, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.9180066709812317, | |
| "grad_norm": 4.868452548980713, | |
| "learning_rate": 0.00023602792128037426, | |
| "loss": 2.4598, | |
| "step": 2305 | |
| }, | |
| { | |
| "epoch": 0.919998008662319, | |
| "grad_norm": 5.261866092681885, | |
| "learning_rate": 0.00023577140287842408, | |
| "loss": 2.5356, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 0.9219893463434062, | |
| "grad_norm": 6.211141586303711, | |
| "learning_rate": 0.00023551451123305614, | |
| "loss": 2.5352, | |
| "step": 2315 | |
| }, | |
| { | |
| "epoch": 0.9239806840244934, | |
| "grad_norm": 4.445993423461914, | |
| "learning_rate": 0.0002352572474621618, | |
| "loss": 2.5963, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 0.9259720217055807, | |
| "grad_norm": 4.683290481567383, | |
| "learning_rate": 0.00023499961268525188, | |
| "loss": 2.5556, | |
| "step": 2325 | |
| }, | |
| { | |
| "epoch": 0.927963359386668, | |
| "grad_norm": 5.033156871795654, | |
| "learning_rate": 0.0002347416080234515, | |
| "loss": 2.6199, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 0.9299546970677552, | |
| "grad_norm": 5.339117527008057, | |
| "learning_rate": 0.00023448323459949566, | |
| "loss": 2.4189, | |
| "step": 2335 | |
| }, | |
| { | |
| "epoch": 0.9319460347488425, | |
| "grad_norm": 7.870457649230957, | |
| "learning_rate": 0.0002342244935377238, | |
| "loss": 2.6203, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 0.9339373724299298, | |
| "grad_norm": 9.246062278747559, | |
| "learning_rate": 0.00023396538596407525, | |
| "loss": 2.5481, | |
| "step": 2345 | |
| }, | |
| { | |
| "epoch": 0.9359287101110171, | |
| "grad_norm": 5.908375263214111, | |
| "learning_rate": 0.00023370591300608438, | |
| "loss": 2.5498, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.9379200477921044, | |
| "grad_norm": 6.05574893951416, | |
| "learning_rate": 0.0002334460757928754, | |
| "loss": 2.5629, | |
| "step": 2355 | |
| }, | |
| { | |
| "epoch": 0.9399113854731916, | |
| "grad_norm": 5.820539951324463, | |
| "learning_rate": 0.00023318587545515768, | |
| "loss": 2.525, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 0.9419027231542789, | |
| "grad_norm": 7.062961101531982, | |
| "learning_rate": 0.00023292531312522083, | |
| "loss": 2.7078, | |
| "step": 2365 | |
| }, | |
| { | |
| "epoch": 0.9438940608353662, | |
| "grad_norm": 6.001385688781738, | |
| "learning_rate": 0.00023266438993692964, | |
| "loss": 2.6176, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 0.9458853985164535, | |
| "grad_norm": 5.372692584991455, | |
| "learning_rate": 0.0002324031070257192, | |
| "loss": 2.6501, | |
| "step": 2375 | |
| }, | |
| { | |
| "epoch": 0.9478767361975406, | |
| "grad_norm": 5.991538047790527, | |
| "learning_rate": 0.00023214146552859, | |
| "loss": 2.436, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 0.9498680738786279, | |
| "grad_norm": 4.772813320159912, | |
| "learning_rate": 0.0002318794665841029, | |
| "loss": 2.5451, | |
| "step": 2385 | |
| }, | |
| { | |
| "epoch": 0.9518594115597152, | |
| "grad_norm": 5.644576072692871, | |
| "learning_rate": 0.00023161711133237438, | |
| "loss": 2.5421, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 0.9538507492408025, | |
| "grad_norm": 6.208878517150879, | |
| "learning_rate": 0.00023135440091507124, | |
| "loss": 2.6161, | |
| "step": 2395 | |
| }, | |
| { | |
| "epoch": 0.9558420869218898, | |
| "grad_norm": 4.382922649383545, | |
| "learning_rate": 0.00023109133647540602, | |
| "loss": 2.5102, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.957833424602977, | |
| "grad_norm": 5.279038906097412, | |
| "learning_rate": 0.00023082791915813165, | |
| "loss": 2.7259, | |
| "step": 2405 | |
| }, | |
| { | |
| "epoch": 0.9598247622840643, | |
| "grad_norm": 4.535640716552734, | |
| "learning_rate": 0.00023056415010953667, | |
| "loss": 2.6417, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 0.9618160999651516, | |
| "grad_norm": 8.062291145324707, | |
| "learning_rate": 0.0002303000304774403, | |
| "loss": 2.439, | |
| "step": 2415 | |
| }, | |
| { | |
| "epoch": 0.9638074376462389, | |
| "grad_norm": 5.5467305183410645, | |
| "learning_rate": 0.00023003556141118738, | |
| "loss": 2.6322, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 0.9657987753273262, | |
| "grad_norm": 4.954715251922607, | |
| "learning_rate": 0.00022977074406164318, | |
| "loss": 2.5879, | |
| "step": 2425 | |
| }, | |
| { | |
| "epoch": 0.9677901130084134, | |
| "grad_norm": 4.517293930053711, | |
| "learning_rate": 0.00022950557958118878, | |
| "loss": 2.4739, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 0.9697814506895007, | |
| "grad_norm": 7.081440448760986, | |
| "learning_rate": 0.00022924006912371567, | |
| "loss": 2.6461, | |
| "step": 2435 | |
| }, | |
| { | |
| "epoch": 0.9717727883705879, | |
| "grad_norm": 6.215746879577637, | |
| "learning_rate": 0.00022897421384462092, | |
| "loss": 2.4895, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 0.9737641260516752, | |
| "grad_norm": 6.380356788635254, | |
| "learning_rate": 0.0002287080149008022, | |
| "loss": 2.6011, | |
| "step": 2445 | |
| }, | |
| { | |
| "epoch": 0.9757554637327625, | |
| "grad_norm": 5.506686687469482, | |
| "learning_rate": 0.0002284414734506527, | |
| "loss": 2.5588, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 0.9777468014138497, | |
| "grad_norm": 5.08485746383667, | |
| "learning_rate": 0.00022817459065405593, | |
| "loss": 2.5556, | |
| "step": 2455 | |
| }, | |
| { | |
| "epoch": 0.979738139094937, | |
| "grad_norm": 4.853709697723389, | |
| "learning_rate": 0.00022790736767238083, | |
| "loss": 2.5654, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 0.9817294767760243, | |
| "grad_norm": 4.686211109161377, | |
| "learning_rate": 0.0002276398056684769, | |
| "loss": 2.4522, | |
| "step": 2465 | |
| }, | |
| { | |
| "epoch": 0.9837208144571116, | |
| "grad_norm": 4.961901664733887, | |
| "learning_rate": 0.0002273719058066686, | |
| "loss": 2.5592, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 0.9857121521381988, | |
| "grad_norm": 5.117202281951904, | |
| "learning_rate": 0.00022710366925275086, | |
| "loss": 2.4735, | |
| "step": 2475 | |
| }, | |
| { | |
| "epoch": 0.9877034898192861, | |
| "grad_norm": 5.865626811981201, | |
| "learning_rate": 0.00022683509717398367, | |
| "loss": 2.5008, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 0.9896948275003734, | |
| "grad_norm": 6.076361179351807, | |
| "learning_rate": 0.0002265661907390871, | |
| "loss": 2.5271, | |
| "step": 2485 | |
| }, | |
| { | |
| "epoch": 0.9916861651814607, | |
| "grad_norm": 6.400584697723389, | |
| "learning_rate": 0.00022629695111823624, | |
| "loss": 2.574, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 0.993677502862548, | |
| "grad_norm": 5.469146728515625, | |
| "learning_rate": 0.000226027379483056, | |
| "loss": 2.5452, | |
| "step": 2495 | |
| }, | |
| { | |
| "epoch": 0.9956688405436351, | |
| "grad_norm": 5.671192646026611, | |
| "learning_rate": 0.0002257574770066162, | |
| "loss": 2.5663, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.9976601782247224, | |
| "grad_norm": 5.9548726081848145, | |
| "learning_rate": 0.00022548724486342624, | |
| "loss": 2.4705, | |
| "step": 2505 | |
| }, | |
| { | |
| "epoch": 0.9996515159058097, | |
| "grad_norm": 5.226431846618652, | |
| "learning_rate": 0.00022521668422943017, | |
| "loss": 2.4196, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 1.001642853586897, | |
| "grad_norm": 4.7078166007995605, | |
| "learning_rate": 0.00022494579628200146, | |
| "loss": 2.3862, | |
| "step": 2515 | |
| }, | |
| { | |
| "epoch": 1.0036341912679843, | |
| "grad_norm": 8.271881103515625, | |
| "learning_rate": 0.00022467458219993793, | |
| "loss": 2.2516, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 1.0056255289490716, | |
| "grad_norm": 6.609498023986816, | |
| "learning_rate": 0.00022440304316345664, | |
| "loss": 2.0925, | |
| "step": 2525 | |
| }, | |
| { | |
| "epoch": 1.0076168666301588, | |
| "grad_norm": 7.699496269226074, | |
| "learning_rate": 0.00022413118035418867, | |
| "loss": 2.2226, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 1.009608204311246, | |
| "grad_norm": 6.391448974609375, | |
| "learning_rate": 0.00022385899495517411, | |
| "loss": 2.1925, | |
| "step": 2535 | |
| }, | |
| { | |
| "epoch": 1.0115995419923334, | |
| "grad_norm": 5.20413064956665, | |
| "learning_rate": 0.00022358648815085674, | |
| "loss": 2.0784, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 1.0135908796734205, | |
| "grad_norm": 5.514545917510986, | |
| "learning_rate": 0.00022331366112707905, | |
| "loss": 2.1903, | |
| "step": 2545 | |
| }, | |
| { | |
| "epoch": 1.015582217354508, | |
| "grad_norm": 6.206656455993652, | |
| "learning_rate": 0.00022304051507107691, | |
| "loss": 2.0112, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 1.017573555035595, | |
| "grad_norm": 5.609284400939941, | |
| "learning_rate": 0.00022276705117147463, | |
| "loss": 2.1828, | |
| "step": 2555 | |
| }, | |
| { | |
| "epoch": 1.0195648927166825, | |
| "grad_norm": 6.571842193603516, | |
| "learning_rate": 0.0002224932706182795, | |
| "loss": 2.2427, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 1.0215562303977697, | |
| "grad_norm": 6.576747894287109, | |
| "learning_rate": 0.00022221917460287684, | |
| "loss": 2.0657, | |
| "step": 2565 | |
| }, | |
| { | |
| "epoch": 1.023547568078857, | |
| "grad_norm": 6.2041754722595215, | |
| "learning_rate": 0.0002219447643180248, | |
| "loss": 2.1984, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 1.0255389057599442, | |
| "grad_norm": 6.46384334564209, | |
| "learning_rate": 0.00022167004095784889, | |
| "loss": 2.2014, | |
| "step": 2575 | |
| }, | |
| { | |
| "epoch": 1.0275302434410316, | |
| "grad_norm": 5.901404857635498, | |
| "learning_rate": 0.00022139500571783727, | |
| "loss": 2.1411, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 1.0295215811221188, | |
| "grad_norm": 6.320353984832764, | |
| "learning_rate": 0.00022111965979483512, | |
| "loss": 2.1498, | |
| "step": 2585 | |
| }, | |
| { | |
| "epoch": 1.031512918803206, | |
| "grad_norm": 5.76351261138916, | |
| "learning_rate": 0.00022084400438703966, | |
| "loss": 2.1994, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 1.0335042564842933, | |
| "grad_norm": 6.784847736358643, | |
| "learning_rate": 0.00022056804069399473, | |
| "loss": 2.1888, | |
| "step": 2595 | |
| }, | |
| { | |
| "epoch": 1.0354955941653805, | |
| "grad_norm": 5.914264678955078, | |
| "learning_rate": 0.00022029176991658587, | |
| "loss": 2.1845, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 1.037486931846468, | |
| "grad_norm": 5.347561359405518, | |
| "learning_rate": 0.00022001519325703486, | |
| "loss": 2.1797, | |
| "step": 2605 | |
| }, | |
| { | |
| "epoch": 1.039478269527555, | |
| "grad_norm": 7.8450212478637695, | |
| "learning_rate": 0.0002197383119188946, | |
| "loss": 2.2326, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 1.0414696072086425, | |
| "grad_norm": 6.4014105796813965, | |
| "learning_rate": 0.00021946112710704375, | |
| "loss": 2.1578, | |
| "step": 2615 | |
| }, | |
| { | |
| "epoch": 1.0434609448897296, | |
| "grad_norm": 6.188974380493164, | |
| "learning_rate": 0.00021918364002768162, | |
| "loss": 2.163, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 1.045452282570817, | |
| "grad_norm": 7.119470119476318, | |
| "learning_rate": 0.00021890585188832288, | |
| "loss": 2.131, | |
| "step": 2625 | |
| }, | |
| { | |
| "epoch": 1.0474436202519042, | |
| "grad_norm": 6.069831848144531, | |
| "learning_rate": 0.00021862776389779226, | |
| "loss": 2.1385, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 1.0494349579329916, | |
| "grad_norm": 7.496238708496094, | |
| "learning_rate": 0.00021834937726621933, | |
| "loss": 2.1297, | |
| "step": 2635 | |
| }, | |
| { | |
| "epoch": 1.0514262956140787, | |
| "grad_norm": 5.977787017822266, | |
| "learning_rate": 0.0002180706932050333, | |
| "loss": 2.16, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 1.0534176332951661, | |
| "grad_norm": 7.515745162963867, | |
| "learning_rate": 0.0002177917129269576, | |
| "loss": 2.1703, | |
| "step": 2645 | |
| }, | |
| { | |
| "epoch": 1.0554089709762533, | |
| "grad_norm": 6.940252780914307, | |
| "learning_rate": 0.00021751243764600467, | |
| "loss": 2.2871, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 1.0574003086573405, | |
| "grad_norm": 6.161179065704346, | |
| "learning_rate": 0.0002172328685774707, | |
| "loss": 2.1182, | |
| "step": 2655 | |
| }, | |
| { | |
| "epoch": 1.0593916463384279, | |
| "grad_norm": 6.096395969390869, | |
| "learning_rate": 0.0002169530069379304, | |
| "loss": 2.1868, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 1.061382984019515, | |
| "grad_norm": 6.321666717529297, | |
| "learning_rate": 0.00021667285394523153, | |
| "loss": 2.3034, | |
| "step": 2665 | |
| }, | |
| { | |
| "epoch": 1.0633743217006024, | |
| "grad_norm": 5.823657512664795, | |
| "learning_rate": 0.00021639241081848978, | |
| "loss": 2.1462, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 1.0653656593816896, | |
| "grad_norm": 6.120224475860596, | |
| "learning_rate": 0.0002161116787780834, | |
| "loss": 2.2264, | |
| "step": 2675 | |
| }, | |
| { | |
| "epoch": 1.067356997062777, | |
| "grad_norm": 6.145210266113281, | |
| "learning_rate": 0.00021583065904564767, | |
| "loss": 2.1483, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 1.0693483347438641, | |
| "grad_norm": 5.247588157653809, | |
| "learning_rate": 0.00021554935284407017, | |
| "loss": 2.2671, | |
| "step": 2685 | |
| }, | |
| { | |
| "epoch": 1.0713396724249515, | |
| "grad_norm": 5.9891252517700195, | |
| "learning_rate": 0.00021526776139748472, | |
| "loss": 2.0288, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 1.0733310101060387, | |
| "grad_norm": 5.2879204750061035, | |
| "learning_rate": 0.00021498588593126666, | |
| "loss": 2.2455, | |
| "step": 2695 | |
| }, | |
| { | |
| "epoch": 1.075322347787126, | |
| "grad_norm": 5.61940860748291, | |
| "learning_rate": 0.00021470372767202713, | |
| "loss": 2.0936, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 1.0773136854682133, | |
| "grad_norm": 7.891155242919922, | |
| "learning_rate": 0.00021442128784760796, | |
| "loss": 2.2633, | |
| "step": 2705 | |
| }, | |
| { | |
| "epoch": 1.0793050231493004, | |
| "grad_norm": 4.800739765167236, | |
| "learning_rate": 0.00021413856768707617, | |
| "loss": 2.0494, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 1.0812963608303878, | |
| "grad_norm": 6.422128200531006, | |
| "learning_rate": 0.00021385556842071874, | |
| "loss": 2.2881, | |
| "step": 2715 | |
| }, | |
| { | |
| "epoch": 1.083287698511475, | |
| "grad_norm": 7.9622297286987305, | |
| "learning_rate": 0.0002135722912800372, | |
| "loss": 2.2561, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 1.0852790361925624, | |
| "grad_norm": 5.626192092895508, | |
| "learning_rate": 0.00021328873749774225, | |
| "loss": 2.1538, | |
| "step": 2725 | |
| }, | |
| { | |
| "epoch": 1.0872703738736496, | |
| "grad_norm": 6.245817184448242, | |
| "learning_rate": 0.00021300490830774847, | |
| "loss": 2.2026, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 1.089261711554737, | |
| "grad_norm": 7.044617652893066, | |
| "learning_rate": 0.00021272080494516888, | |
| "loss": 2.191, | |
| "step": 2735 | |
| }, | |
| { | |
| "epoch": 1.0912530492358241, | |
| "grad_norm": 5.305728912353516, | |
| "learning_rate": 0.00021243642864630962, | |
| "loss": 2.2054, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 1.0932443869169115, | |
| "grad_norm": 5.111948490142822, | |
| "learning_rate": 0.00021215178064866444, | |
| "loss": 2.08, | |
| "step": 2745 | |
| }, | |
| { | |
| "epoch": 1.0952357245979987, | |
| "grad_norm": 5.789201736450195, | |
| "learning_rate": 0.0002118668621909096, | |
| "loss": 2.1121, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 1.097227062279086, | |
| "grad_norm": 6.3249969482421875, | |
| "learning_rate": 0.00021158167451289814, | |
| "loss": 2.1042, | |
| "step": 2755 | |
| }, | |
| { | |
| "epoch": 1.0992183999601732, | |
| "grad_norm": 5.103941917419434, | |
| "learning_rate": 0.00021129621885565469, | |
| "loss": 2.1663, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 1.1012097376412604, | |
| "grad_norm": 7.262547016143799, | |
| "learning_rate": 0.00021101049646137003, | |
| "loss": 2.1616, | |
| "step": 2765 | |
| }, | |
| { | |
| "epoch": 1.1032010753223478, | |
| "grad_norm": 6.365930080413818, | |
| "learning_rate": 0.00021072450857339563, | |
| "loss": 2.2593, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 1.105192413003435, | |
| "grad_norm": 5.771877765655518, | |
| "learning_rate": 0.00021043825643623842, | |
| "loss": 1.9991, | |
| "step": 2775 | |
| }, | |
| { | |
| "epoch": 1.1071837506845223, | |
| "grad_norm": 5.775332927703857, | |
| "learning_rate": 0.00021015174129555502, | |
| "loss": 2.1432, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 1.1091750883656095, | |
| "grad_norm": 5.588124752044678, | |
| "learning_rate": 0.00020986496439814672, | |
| "loss": 2.1829, | |
| "step": 2785 | |
| }, | |
| { | |
| "epoch": 1.111166426046697, | |
| "grad_norm": 8.466453552246094, | |
| "learning_rate": 0.00020957792699195378, | |
| "loss": 2.2542, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 1.113157763727784, | |
| "grad_norm": 5.901578426361084, | |
| "learning_rate": 0.0002092906303260502, | |
| "loss": 2.0991, | |
| "step": 2795 | |
| }, | |
| { | |
| "epoch": 1.1151491014088715, | |
| "grad_norm": 5.687893867492676, | |
| "learning_rate": 0.000209003075650638, | |
| "loss": 2.1161, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 1.1171404390899586, | |
| "grad_norm": 7.566319465637207, | |
| "learning_rate": 0.00020871526421704207, | |
| "loss": 2.1824, | |
| "step": 2805 | |
| }, | |
| { | |
| "epoch": 1.119131776771046, | |
| "grad_norm": 6.398413181304932, | |
| "learning_rate": 0.00020842719727770466, | |
| "loss": 2.0616, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 1.1211231144521332, | |
| "grad_norm": 5.798287868499756, | |
| "learning_rate": 0.00020813887608617972, | |
| "loss": 2.3593, | |
| "step": 2815 | |
| }, | |
| { | |
| "epoch": 1.1231144521332206, | |
| "grad_norm": 5.282783031463623, | |
| "learning_rate": 0.00020785030189712783, | |
| "loss": 2.1251, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 1.1251057898143078, | |
| "grad_norm": 6.402846813201904, | |
| "learning_rate": 0.00020756147596631024, | |
| "loss": 2.1565, | |
| "step": 2825 | |
| }, | |
| { | |
| "epoch": 1.1270971274953951, | |
| "grad_norm": 6.533949851989746, | |
| "learning_rate": 0.00020727239955058393, | |
| "loss": 2.2081, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 1.1290884651764823, | |
| "grad_norm": 7.3982954025268555, | |
| "learning_rate": 0.00020698307390789572, | |
| "loss": 2.1653, | |
| "step": 2835 | |
| }, | |
| { | |
| "epoch": 1.1310798028575695, | |
| "grad_norm": 5.551629066467285, | |
| "learning_rate": 0.00020669350029727708, | |
| "loss": 2.3108, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 1.1330711405386569, | |
| "grad_norm": 6.518723964691162, | |
| "learning_rate": 0.00020640367997883853, | |
| "loss": 2.1915, | |
| "step": 2845 | |
| }, | |
| { | |
| "epoch": 1.135062478219744, | |
| "grad_norm": 5.792189121246338, | |
| "learning_rate": 0.00020611361421376403, | |
| "loss": 2.1557, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 1.1370538159008314, | |
| "grad_norm": 5.768317699432373, | |
| "learning_rate": 0.00020582330426430583, | |
| "loss": 2.1389, | |
| "step": 2855 | |
| }, | |
| { | |
| "epoch": 1.1390451535819186, | |
| "grad_norm": 7.050993919372559, | |
| "learning_rate": 0.0002055327513937786, | |
| "loss": 2.1938, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 1.141036491263006, | |
| "grad_norm": 6.800126552581787, | |
| "learning_rate": 0.00020524195686655422, | |
| "loss": 2.1704, | |
| "step": 2865 | |
| }, | |
| { | |
| "epoch": 1.1430278289440932, | |
| "grad_norm": 6.082620143890381, | |
| "learning_rate": 0.0002049509219480561, | |
| "loss": 2.1409, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 1.1450191666251806, | |
| "grad_norm": 5.8574018478393555, | |
| "learning_rate": 0.00020465964790475374, | |
| "loss": 2.2212, | |
| "step": 2875 | |
| }, | |
| { | |
| "epoch": 1.1470105043062677, | |
| "grad_norm": 5.6736931800842285, | |
| "learning_rate": 0.00020436813600415734, | |
| "loss": 2.2452, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 1.149001841987355, | |
| "grad_norm": 6.067693710327148, | |
| "learning_rate": 0.00020407638751481196, | |
| "loss": 2.1116, | |
| "step": 2885 | |
| }, | |
| { | |
| "epoch": 1.1509931796684423, | |
| "grad_norm": 6.335114002227783, | |
| "learning_rate": 0.00020378440370629237, | |
| "loss": 2.2473, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 1.1529845173495294, | |
| "grad_norm": 7.521064281463623, | |
| "learning_rate": 0.00020349218584919722, | |
| "loss": 2.0444, | |
| "step": 2895 | |
| }, | |
| { | |
| "epoch": 1.1549758550306168, | |
| "grad_norm": 6.531569004058838, | |
| "learning_rate": 0.0002031997352151438, | |
| "loss": 2.2091, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 1.156967192711704, | |
| "grad_norm": 6.6172027587890625, | |
| "learning_rate": 0.00020290705307676217, | |
| "loss": 2.1326, | |
| "step": 2905 | |
| }, | |
| { | |
| "epoch": 1.1589585303927914, | |
| "grad_norm": 6.893074989318848, | |
| "learning_rate": 0.00020261414070769, | |
| "loss": 2.1845, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 1.1609498680738786, | |
| "grad_norm": 5.909544944763184, | |
| "learning_rate": 0.0002023209993825667, | |
| "loss": 2.2273, | |
| "step": 2915 | |
| }, | |
| { | |
| "epoch": 1.162941205754966, | |
| "grad_norm": 6.50417423248291, | |
| "learning_rate": 0.000202027630377028, | |
| "loss": 2.1177, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 1.1649325434360531, | |
| "grad_norm": 6.459300518035889, | |
| "learning_rate": 0.00020173403496770057, | |
| "loss": 2.2723, | |
| "step": 2925 | |
| }, | |
| { | |
| "epoch": 1.1669238811171405, | |
| "grad_norm": 5.6512451171875, | |
| "learning_rate": 0.00020144021443219604, | |
| "loss": 2.2022, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 1.1689152187982277, | |
| "grad_norm": 6.4242634773254395, | |
| "learning_rate": 0.00020114617004910598, | |
| "loss": 2.1843, | |
| "step": 2935 | |
| }, | |
| { | |
| "epoch": 1.170906556479315, | |
| "grad_norm": 6.003353118896484, | |
| "learning_rate": 0.0002008519030979958, | |
| "loss": 2.2598, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 1.1728978941604022, | |
| "grad_norm": 4.6970133781433105, | |
| "learning_rate": 0.00020055741485939963, | |
| "loss": 2.3102, | |
| "step": 2945 | |
| }, | |
| { | |
| "epoch": 1.1748892318414894, | |
| "grad_norm": 6.059596061706543, | |
| "learning_rate": 0.00020026270661481447, | |
| "loss": 2.2779, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 1.1768805695225768, | |
| "grad_norm": 5.73154878616333, | |
| "learning_rate": 0.0001999677796466947, | |
| "loss": 2.1848, | |
| "step": 2955 | |
| }, | |
| { | |
| "epoch": 1.178871907203664, | |
| "grad_norm": 6.712233543395996, | |
| "learning_rate": 0.00019967263523844656, | |
| "loss": 2.1027, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 1.1808632448847514, | |
| "grad_norm": 7.3542399406433105, | |
| "learning_rate": 0.00019937727467442235, | |
| "loss": 2.254, | |
| "step": 2965 | |
| }, | |
| { | |
| "epoch": 1.1828545825658385, | |
| "grad_norm": 5.018367767333984, | |
| "learning_rate": 0.00019908169923991522, | |
| "loss": 2.1361, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 1.184845920246926, | |
| "grad_norm": 5.981570720672607, | |
| "learning_rate": 0.00019878591022115316, | |
| "loss": 2.1516, | |
| "step": 2975 | |
| }, | |
| { | |
| "epoch": 1.186837257928013, | |
| "grad_norm": 5.398682117462158, | |
| "learning_rate": 0.00019848990890529368, | |
| "loss": 2.2784, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 1.1888285956091005, | |
| "grad_norm": 4.907093048095703, | |
| "learning_rate": 0.00019819369658041818, | |
| "loss": 2.1853, | |
| "step": 2985 | |
| }, | |
| { | |
| "epoch": 1.1908199332901876, | |
| "grad_norm": 5.047986030578613, | |
| "learning_rate": 0.00019789727453552606, | |
| "loss": 2.3024, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 1.192811270971275, | |
| "grad_norm": 5.1892852783203125, | |
| "learning_rate": 0.00019760064406052969, | |
| "loss": 2.1318, | |
| "step": 2995 | |
| }, | |
| { | |
| "epoch": 1.1948026086523622, | |
| "grad_norm": 5.791012763977051, | |
| "learning_rate": 0.0001973038064462481, | |
| "loss": 2.1913, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 1.1967939463334494, | |
| "grad_norm": 7.220500946044922, | |
| "learning_rate": 0.00019700676298440195, | |
| "loss": 2.2053, | |
| "step": 3005 | |
| }, | |
| { | |
| "epoch": 1.1987852840145368, | |
| "grad_norm": 4.852428913116455, | |
| "learning_rate": 0.00019670951496760758, | |
| "loss": 2.1844, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 1.2007766216956242, | |
| "grad_norm": 6.431066036224365, | |
| "learning_rate": 0.00019641206368937146, | |
| "loss": 2.171, | |
| "step": 3015 | |
| }, | |
| { | |
| "epoch": 1.2027679593767113, | |
| "grad_norm": 5.527892589569092, | |
| "learning_rate": 0.0001961144104440845, | |
| "loss": 2.284, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 1.2047592970577985, | |
| "grad_norm": 5.936304092407227, | |
| "learning_rate": 0.00019581655652701671, | |
| "loss": 2.1361, | |
| "step": 3025 | |
| }, | |
| { | |
| "epoch": 1.2067506347388859, | |
| "grad_norm": 5.172138214111328, | |
| "learning_rate": 0.0001955185032343112, | |
| "loss": 2.158, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 1.208741972419973, | |
| "grad_norm": 5.692948818206787, | |
| "learning_rate": 0.0001952202518629786, | |
| "loss": 2.1897, | |
| "step": 3035 | |
| }, | |
| { | |
| "epoch": 1.2107333101010604, | |
| "grad_norm": 5.522965908050537, | |
| "learning_rate": 0.00019492180371089168, | |
| "loss": 2.287, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 1.2127246477821476, | |
| "grad_norm": 6.14274787902832, | |
| "learning_rate": 0.0001946231600767794, | |
| "loss": 2.1044, | |
| "step": 3045 | |
| }, | |
| { | |
| "epoch": 1.214715985463235, | |
| "grad_norm": 6.3660101890563965, | |
| "learning_rate": 0.00019432432226022147, | |
| "loss": 2.1893, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 1.2167073231443222, | |
| "grad_norm": 5.123039722442627, | |
| "learning_rate": 0.0001940252915616425, | |
| "loss": 2.2271, | |
| "step": 3055 | |
| }, | |
| { | |
| "epoch": 1.2186986608254096, | |
| "grad_norm": 5.454919815063477, | |
| "learning_rate": 0.0001937260692823065, | |
| "loss": 2.1444, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 1.2206899985064967, | |
| "grad_norm": 5.850592136383057, | |
| "learning_rate": 0.00019342665672431123, | |
| "loss": 2.2168, | |
| "step": 3065 | |
| }, | |
| { | |
| "epoch": 1.2226813361875841, | |
| "grad_norm": 5.659205913543701, | |
| "learning_rate": 0.00019312705519058238, | |
| "loss": 2.2518, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 1.2246726738686713, | |
| "grad_norm": 6.5372314453125, | |
| "learning_rate": 0.00019282726598486802, | |
| "loss": 2.2739, | |
| "step": 3075 | |
| }, | |
| { | |
| "epoch": 1.2266640115497585, | |
| "grad_norm": 5.422939777374268, | |
| "learning_rate": 0.0001925272904117328, | |
| "loss": 2.0801, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 1.2286553492308459, | |
| "grad_norm": 6.354897975921631, | |
| "learning_rate": 0.00019222712977655262, | |
| "loss": 2.1793, | |
| "step": 3085 | |
| }, | |
| { | |
| "epoch": 1.230646686911933, | |
| "grad_norm": 6.33408260345459, | |
| "learning_rate": 0.00019192678538550837, | |
| "loss": 2.247, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 1.2326380245930204, | |
| "grad_norm": 5.345519542694092, | |
| "learning_rate": 0.00019162625854558086, | |
| "loss": 2.1663, | |
| "step": 3095 | |
| }, | |
| { | |
| "epoch": 1.2346293622741076, | |
| "grad_norm": 5.576323509216309, | |
| "learning_rate": 0.00019132555056454464, | |
| "loss": 2.0648, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 1.236620699955195, | |
| "grad_norm": 5.9087910652160645, | |
| "learning_rate": 0.00019102466275096262, | |
| "loss": 2.1999, | |
| "step": 3105 | |
| }, | |
| { | |
| "epoch": 1.2386120376362821, | |
| "grad_norm": 6.147064685821533, | |
| "learning_rate": 0.00019072359641418031, | |
| "loss": 2.4797, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 1.2406033753173695, | |
| "grad_norm": 5.6060662269592285, | |
| "learning_rate": 0.00019042235286431994, | |
| "loss": 2.3287, | |
| "step": 3115 | |
| }, | |
| { | |
| "epoch": 1.2425947129984567, | |
| "grad_norm": 5.959619522094727, | |
| "learning_rate": 0.00019012093341227505, | |
| "loss": 2.1932, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 1.244586050679544, | |
| "grad_norm": 7.9409379959106445, | |
| "learning_rate": 0.00018981933936970446, | |
| "loss": 2.2579, | |
| "step": 3125 | |
| }, | |
| { | |
| "epoch": 1.2465773883606313, | |
| "grad_norm": 5.92664909362793, | |
| "learning_rate": 0.00018951757204902696, | |
| "loss": 2.3502, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 1.2485687260417184, | |
| "grad_norm": 5.162164211273193, | |
| "learning_rate": 0.00018921563276341522, | |
| "loss": 2.2192, | |
| "step": 3135 | |
| }, | |
| { | |
| "epoch": 1.2505600637228058, | |
| "grad_norm": 6.151308059692383, | |
| "learning_rate": 0.00018891352282679027, | |
| "loss": 2.2133, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 1.252551401403893, | |
| "grad_norm": 5.899823188781738, | |
| "learning_rate": 0.00018861124355381583, | |
| "loss": 2.3228, | |
| "step": 3145 | |
| }, | |
| { | |
| "epoch": 1.2545427390849804, | |
| "grad_norm": 5.929165840148926, | |
| "learning_rate": 0.0001883087962598923, | |
| "loss": 2.1752, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 1.2565340767660675, | |
| "grad_norm": 5.03986930847168, | |
| "learning_rate": 0.0001880061822611515, | |
| "loss": 2.196, | |
| "step": 3155 | |
| }, | |
| { | |
| "epoch": 1.258525414447155, | |
| "grad_norm": 7.291461944580078, | |
| "learning_rate": 0.0001877034028744505, | |
| "loss": 2.1835, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 1.260516752128242, | |
| "grad_norm": 5.5854620933532715, | |
| "learning_rate": 0.00018740045941736608, | |
| "loss": 2.0839, | |
| "step": 3165 | |
| }, | |
| { | |
| "epoch": 1.2625080898093295, | |
| "grad_norm": 4.644222259521484, | |
| "learning_rate": 0.00018709735320818918, | |
| "loss": 2.235, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 1.2644994274904167, | |
| "grad_norm": 4.4264817237854, | |
| "learning_rate": 0.00018679408556591866, | |
| "loss": 2.1371, | |
| "step": 3175 | |
| }, | |
| { | |
| "epoch": 1.266490765171504, | |
| "grad_norm": 6.052789688110352, | |
| "learning_rate": 0.00018649065781025623, | |
| "loss": 2.286, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 1.2684821028525912, | |
| "grad_norm": 5.982134819030762, | |
| "learning_rate": 0.00018618707126160004, | |
| "loss": 2.1594, | |
| "step": 3185 | |
| }, | |
| { | |
| "epoch": 1.2704734405336784, | |
| "grad_norm": 5.037675380706787, | |
| "learning_rate": 0.00018588332724103938, | |
| "loss": 2.2272, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 1.2724647782147658, | |
| "grad_norm": 4.905061721801758, | |
| "learning_rate": 0.00018557942707034882, | |
| "loss": 2.1714, | |
| "step": 3195 | |
| }, | |
| { | |
| "epoch": 1.2744561158958532, | |
| "grad_norm": 5.602728843688965, | |
| "learning_rate": 0.0001852753720719824, | |
| "loss": 2.1461, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 1.2764474535769403, | |
| "grad_norm": 6.72523832321167, | |
| "learning_rate": 0.0001849711635690679, | |
| "loss": 2.3851, | |
| "step": 3205 | |
| }, | |
| { | |
| "epoch": 1.2784387912580275, | |
| "grad_norm": 7.611420631408691, | |
| "learning_rate": 0.0001846668028854011, | |
| "loss": 2.1879, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 1.280430128939115, | |
| "grad_norm": 5.812536239624023, | |
| "learning_rate": 0.00018436229134544005, | |
| "loss": 2.2753, | |
| "step": 3215 | |
| }, | |
| { | |
| "epoch": 1.282421466620202, | |
| "grad_norm": 5.685847282409668, | |
| "learning_rate": 0.00018405763027429917, | |
| "loss": 2.2598, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 1.2844128043012895, | |
| "grad_norm": 5.823709487915039, | |
| "learning_rate": 0.0001837528209977437, | |
| "loss": 2.1045, | |
| "step": 3225 | |
| }, | |
| { | |
| "epoch": 1.2864041419823766, | |
| "grad_norm": 5.243285655975342, | |
| "learning_rate": 0.00018344786484218367, | |
| "loss": 2.2614, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 1.288395479663464, | |
| "grad_norm": 5.421647548675537, | |
| "learning_rate": 0.00018314276313466846, | |
| "loss": 2.1703, | |
| "step": 3235 | |
| }, | |
| { | |
| "epoch": 1.2903868173445512, | |
| "grad_norm": 6.741246700286865, | |
| "learning_rate": 0.00018283751720288066, | |
| "loss": 2.1891, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 1.2923781550256384, | |
| "grad_norm": 5.4550676345825195, | |
| "learning_rate": 0.00018253212837513055, | |
| "loss": 2.227, | |
| "step": 3245 | |
| }, | |
| { | |
| "epoch": 1.2943694927067257, | |
| "grad_norm": 5.21625280380249, | |
| "learning_rate": 0.0001822265979803502, | |
| "loss": 2.246, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 1.2963608303878131, | |
| "grad_norm": 7.004878044128418, | |
| "learning_rate": 0.00018192092734808772, | |
| "loss": 2.3162, | |
| "step": 3255 | |
| }, | |
| { | |
| "epoch": 1.2983521680689003, | |
| "grad_norm": 5.326993942260742, | |
| "learning_rate": 0.00018161511780850163, | |
| "loss": 2.1449, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 1.3003435057499875, | |
| "grad_norm": 6.218927383422852, | |
| "learning_rate": 0.00018130917069235467, | |
| "loss": 2.1936, | |
| "step": 3265 | |
| }, | |
| { | |
| "epoch": 1.3023348434310749, | |
| "grad_norm": 6.091947555541992, | |
| "learning_rate": 0.00018100308733100847, | |
| "loss": 2.2076, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 1.304326181112162, | |
| "grad_norm": 7.184474468231201, | |
| "learning_rate": 0.00018069686905641737, | |
| "loss": 2.3472, | |
| "step": 3275 | |
| }, | |
| { | |
| "epoch": 1.3063175187932494, | |
| "grad_norm": 5.758594036102295, | |
| "learning_rate": 0.00018039051720112297, | |
| "loss": 2.2042, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 1.3083088564743366, | |
| "grad_norm": 4.715916633605957, | |
| "learning_rate": 0.000180084033098248, | |
| "loss": 2.2247, | |
| "step": 3285 | |
| }, | |
| { | |
| "epoch": 1.310300194155424, | |
| "grad_norm": 5.105158805847168, | |
| "learning_rate": 0.0001797774180814908, | |
| "loss": 2.1685, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 1.3122915318365111, | |
| "grad_norm": 7.182875156402588, | |
| "learning_rate": 0.0001794706734851194, | |
| "loss": 2.2263, | |
| "step": 3295 | |
| }, | |
| { | |
| "epoch": 1.3142828695175983, | |
| "grad_norm": 6.164737224578857, | |
| "learning_rate": 0.00017916380064396557, | |
| "loss": 2.1982, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 1.3162742071986857, | |
| "grad_norm": 5.850378036499023, | |
| "learning_rate": 0.00017885680089341937, | |
| "loss": 2.1192, | |
| "step": 3305 | |
| }, | |
| { | |
| "epoch": 1.318265544879773, | |
| "grad_norm": 6.571321964263916, | |
| "learning_rate": 0.00017854967556942285, | |
| "loss": 2.3361, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 1.3202568825608603, | |
| "grad_norm": 5.506916522979736, | |
| "learning_rate": 0.00017824242600846476, | |
| "loss": 2.194, | |
| "step": 3315 | |
| }, | |
| { | |
| "epoch": 1.3222482202419474, | |
| "grad_norm": 9.358162879943848, | |
| "learning_rate": 0.00017793505354757428, | |
| "loss": 2.1641, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 1.3242395579230348, | |
| "grad_norm": 5.8232340812683105, | |
| "learning_rate": 0.00017762755952431558, | |
| "loss": 2.2781, | |
| "step": 3325 | |
| }, | |
| { | |
| "epoch": 1.326230895604122, | |
| "grad_norm": 6.010040760040283, | |
| "learning_rate": 0.00017731994527678166, | |
| "loss": 2.2773, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 1.3282222332852094, | |
| "grad_norm": 4.972679138183594, | |
| "learning_rate": 0.0001770122121435888, | |
| "loss": 2.1069, | |
| "step": 3335 | |
| }, | |
| { | |
| "epoch": 1.3302135709662966, | |
| "grad_norm": 12.89777660369873, | |
| "learning_rate": 0.00017670436146387063, | |
| "loss": 2.044, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 1.332204908647384, | |
| "grad_norm": 5.31705379486084, | |
| "learning_rate": 0.00017639639457727217, | |
| "loss": 2.153, | |
| "step": 3345 | |
| }, | |
| { | |
| "epoch": 1.3341962463284711, | |
| "grad_norm": 6.822572708129883, | |
| "learning_rate": 0.00017608831282394426, | |
| "loss": 2.2099, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 1.3361875840095583, | |
| "grad_norm": 7.124082565307617, | |
| "learning_rate": 0.00017578011754453753, | |
| "loss": 2.2104, | |
| "step": 3355 | |
| }, | |
| { | |
| "epoch": 1.3381789216906457, | |
| "grad_norm": 5.639463901519775, | |
| "learning_rate": 0.00017547181008019668, | |
| "loss": 2.2408, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 1.340170259371733, | |
| "grad_norm": 5.687006950378418, | |
| "learning_rate": 0.0001751633917725545, | |
| "loss": 2.1546, | |
| "step": 3365 | |
| }, | |
| { | |
| "epoch": 1.3421615970528202, | |
| "grad_norm": 5.070754051208496, | |
| "learning_rate": 0.0001748548639637262, | |
| "loss": 2.2619, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 1.3441529347339074, | |
| "grad_norm": 5.338910102844238, | |
| "learning_rate": 0.0001745462279963035, | |
| "loss": 2.3145, | |
| "step": 3375 | |
| }, | |
| { | |
| "epoch": 1.3461442724149948, | |
| "grad_norm": 6.1445794105529785, | |
| "learning_rate": 0.0001742374852133488, | |
| "loss": 2.0273, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 1.348135610096082, | |
| "grad_norm": 6.033137798309326, | |
| "learning_rate": 0.00017392863695838928, | |
| "loss": 2.2441, | |
| "step": 3385 | |
| }, | |
| { | |
| "epoch": 1.3501269477771694, | |
| "grad_norm": 8.369287490844727, | |
| "learning_rate": 0.000173619684575411, | |
| "loss": 2.2212, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 1.3521182854582565, | |
| "grad_norm": 7.496065616607666, | |
| "learning_rate": 0.00017331062940885338, | |
| "loss": 2.2526, | |
| "step": 3395 | |
| }, | |
| { | |
| "epoch": 1.354109623139344, | |
| "grad_norm": 5.870899677276611, | |
| "learning_rate": 0.00017300147280360285, | |
| "loss": 2.2123, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 1.356100960820431, | |
| "grad_norm": 6.694897651672363, | |
| "learning_rate": 0.00017269221610498748, | |
| "loss": 2.2284, | |
| "step": 3405 | |
| }, | |
| { | |
| "epoch": 1.3580922985015185, | |
| "grad_norm": 5.430352210998535, | |
| "learning_rate": 0.0001723828606587708, | |
| "loss": 2.1369, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 1.3600836361826056, | |
| "grad_norm": 7.428508758544922, | |
| "learning_rate": 0.00017207340781114598, | |
| "loss": 2.2149, | |
| "step": 3415 | |
| }, | |
| { | |
| "epoch": 1.362074973863693, | |
| "grad_norm": 6.232314586639404, | |
| "learning_rate": 0.0001717638589087303, | |
| "loss": 2.1926, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 1.3640663115447802, | |
| "grad_norm": 5.414062023162842, | |
| "learning_rate": 0.00017145421529855866, | |
| "loss": 2.1174, | |
| "step": 3425 | |
| }, | |
| { | |
| "epoch": 1.3660576492258674, | |
| "grad_norm": 5.721498012542725, | |
| "learning_rate": 0.00017114447832807845, | |
| "loss": 2.2366, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 1.3680489869069548, | |
| "grad_norm": 8.127852439880371, | |
| "learning_rate": 0.0001708346493451431, | |
| "loss": 2.3191, | |
| "step": 3435 | |
| }, | |
| { | |
| "epoch": 1.3700403245880421, | |
| "grad_norm": 5.363738059997559, | |
| "learning_rate": 0.00017052472969800647, | |
| "loss": 2.2159, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 1.3720316622691293, | |
| "grad_norm": 4.83927059173584, | |
| "learning_rate": 0.00017021472073531712, | |
| "loss": 2.1906, | |
| "step": 3445 | |
| }, | |
| { | |
| "epoch": 1.3740229999502165, | |
| "grad_norm": 7.324374675750732, | |
| "learning_rate": 0.000169904623806112, | |
| "loss": 2.1427, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 1.3760143376313039, | |
| "grad_norm": 5.2989373207092285, | |
| "learning_rate": 0.00016959444025981115, | |
| "loss": 2.1721, | |
| "step": 3455 | |
| }, | |
| { | |
| "epoch": 1.378005675312391, | |
| "grad_norm": 5.679375648498535, | |
| "learning_rate": 0.00016928417144621127, | |
| "loss": 2.1693, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 1.3799970129934784, | |
| "grad_norm": 5.638728141784668, | |
| "learning_rate": 0.00016897381871548036, | |
| "loss": 2.2256, | |
| "step": 3465 | |
| }, | |
| { | |
| "epoch": 1.3819883506745656, | |
| "grad_norm": 6.92189884185791, | |
| "learning_rate": 0.00016866338341815134, | |
| "loss": 2.2384, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 1.383979688355653, | |
| "grad_norm": 6.763597011566162, | |
| "learning_rate": 0.0001683528669051166, | |
| "loss": 2.0807, | |
| "step": 3475 | |
| }, | |
| { | |
| "epoch": 1.3859710260367402, | |
| "grad_norm": 13.712860107421875, | |
| "learning_rate": 0.00016804227052762195, | |
| "loss": 2.2717, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 1.3879623637178273, | |
| "grad_norm": 8.10069465637207, | |
| "learning_rate": 0.00016773159563726058, | |
| "loss": 2.0908, | |
| "step": 3485 | |
| }, | |
| { | |
| "epoch": 1.3899537013989147, | |
| "grad_norm": 5.849442481994629, | |
| "learning_rate": 0.00016742084358596757, | |
| "loss": 2.2445, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 1.3919450390800021, | |
| "grad_norm": 5.508017063140869, | |
| "learning_rate": 0.00016711001572601358, | |
| "loss": 2.2985, | |
| "step": 3495 | |
| }, | |
| { | |
| "epoch": 1.3939363767610893, | |
| "grad_norm": 4.959733486175537, | |
| "learning_rate": 0.00016679911340999925, | |
| "loss": 2.1415, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 1.3959277144421764, | |
| "grad_norm": 4.843476295471191, | |
| "learning_rate": 0.0001664881379908492, | |
| "loss": 2.3455, | |
| "step": 3505 | |
| }, | |
| { | |
| "epoch": 1.3979190521232638, | |
| "grad_norm": 5.710421085357666, | |
| "learning_rate": 0.00016617709082180615, | |
| "loss": 2.0707, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 1.399910389804351, | |
| "grad_norm": 5.431413173675537, | |
| "learning_rate": 0.00016586597325642514, | |
| "loss": 2.0146, | |
| "step": 3515 | |
| }, | |
| { | |
| "epoch": 1.4019017274854384, | |
| "grad_norm": 5.725831031799316, | |
| "learning_rate": 0.00016555478664856738, | |
| "loss": 2.1838, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 1.4038930651665256, | |
| "grad_norm": 6.110559940338135, | |
| "learning_rate": 0.00016524353235239473, | |
| "loss": 2.2518, | |
| "step": 3525 | |
| }, | |
| { | |
| "epoch": 1.405884402847613, | |
| "grad_norm": 5.45647668838501, | |
| "learning_rate": 0.00016493221172236337, | |
| "loss": 2.1785, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 1.4078757405287001, | |
| "grad_norm": 7.214836120605469, | |
| "learning_rate": 0.00016462082611321834, | |
| "loss": 2.2149, | |
| "step": 3535 | |
| }, | |
| { | |
| "epoch": 1.4098670782097873, | |
| "grad_norm": 5.839008331298828, | |
| "learning_rate": 0.00016430937687998736, | |
| "loss": 2.1552, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 1.4118584158908747, | |
| "grad_norm": 6.743014812469482, | |
| "learning_rate": 0.000163997865377975, | |
| "loss": 2.195, | |
| "step": 3545 | |
| }, | |
| { | |
| "epoch": 1.413849753571962, | |
| "grad_norm": 6.176872730255127, | |
| "learning_rate": 0.00016368629296275683, | |
| "loss": 2.1918, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 1.4158410912530492, | |
| "grad_norm": 5.614704132080078, | |
| "learning_rate": 0.0001633746609901735, | |
| "loss": 2.1733, | |
| "step": 3555 | |
| }, | |
| { | |
| "epoch": 1.4178324289341364, | |
| "grad_norm": 6.306468963623047, | |
| "learning_rate": 0.00016306297081632482, | |
| "loss": 2.1643, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 1.4198237666152238, | |
| "grad_norm": 5.466887950897217, | |
| "learning_rate": 0.0001627512237975638, | |
| "loss": 2.1423, | |
| "step": 3565 | |
| }, | |
| { | |
| "epoch": 1.421815104296311, | |
| "grad_norm": 6.443585395812988, | |
| "learning_rate": 0.00016243942129049097, | |
| "loss": 2.3586, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 1.4238064419773984, | |
| "grad_norm": 6.56254243850708, | |
| "learning_rate": 0.00016212756465194814, | |
| "loss": 2.1143, | |
| "step": 3575 | |
| }, | |
| { | |
| "epoch": 1.4257977796584855, | |
| "grad_norm": 6.025684833526611, | |
| "learning_rate": 0.00016181565523901284, | |
| "loss": 2.2403, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 1.427789117339573, | |
| "grad_norm": 5.992644786834717, | |
| "learning_rate": 0.0001615036944089921, | |
| "loss": 2.1688, | |
| "step": 3585 | |
| }, | |
| { | |
| "epoch": 1.42978045502066, | |
| "grad_norm": 5.697604179382324, | |
| "learning_rate": 0.00016119168351941688, | |
| "loss": 2.1304, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 1.4317717927017475, | |
| "grad_norm": 7.877066612243652, | |
| "learning_rate": 0.0001608796239280359, | |
| "loss": 2.2145, | |
| "step": 3595 | |
| }, | |
| { | |
| "epoch": 1.4337631303828346, | |
| "grad_norm": 7.044979095458984, | |
| "learning_rate": 0.00016056751699280964, | |
| "loss": 2.2152, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 1.435754468063922, | |
| "grad_norm": 6.05941915512085, | |
| "learning_rate": 0.0001602553640719049, | |
| "loss": 2.2349, | |
| "step": 3605 | |
| }, | |
| { | |
| "epoch": 1.4377458057450092, | |
| "grad_norm": 6.004716396331787, | |
| "learning_rate": 0.00015994316652368838, | |
| "loss": 2.1943, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 1.4397371434260964, | |
| "grad_norm": 7.441656112670898, | |
| "learning_rate": 0.00015963092570672106, | |
| "loss": 2.1507, | |
| "step": 3615 | |
| }, | |
| { | |
| "epoch": 1.4417284811071838, | |
| "grad_norm": 5.681270122528076, | |
| "learning_rate": 0.00015931864297975219, | |
| "loss": 2.2447, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 1.4437198187882712, | |
| "grad_norm": 4.99393367767334, | |
| "learning_rate": 0.00015900631970171337, | |
| "loss": 2.1024, | |
| "step": 3625 | |
| }, | |
| { | |
| "epoch": 1.4457111564693583, | |
| "grad_norm": 5.866675853729248, | |
| "learning_rate": 0.00015869395723171277, | |
| "loss": 2.2081, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 1.4477024941504455, | |
| "grad_norm": 5.251063823699951, | |
| "learning_rate": 0.00015838155692902896, | |
| "loss": 2.1623, | |
| "step": 3635 | |
| }, | |
| { | |
| "epoch": 1.4496938318315329, | |
| "grad_norm": 5.525659084320068, | |
| "learning_rate": 0.00015806912015310526, | |
| "loss": 2.3001, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 1.45168516951262, | |
| "grad_norm": 5.020875930786133, | |
| "learning_rate": 0.00015775664826354362, | |
| "loss": 2.2304, | |
| "step": 3645 | |
| }, | |
| { | |
| "epoch": 1.4536765071937074, | |
| "grad_norm": 5.0340704917907715, | |
| "learning_rate": 0.00015744414262009886, | |
| "loss": 2.1608, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 1.4556678448747946, | |
| "grad_norm": 4.942144393920898, | |
| "learning_rate": 0.00015713160458267263, | |
| "loss": 2.101, | |
| "step": 3655 | |
| }, | |
| { | |
| "epoch": 1.457659182555882, | |
| "grad_norm": 6.115739822387695, | |
| "learning_rate": 0.0001568190355113076, | |
| "loss": 2.1676, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 1.4596505202369692, | |
| "grad_norm": 8.139789581298828, | |
| "learning_rate": 0.00015650643676618146, | |
| "loss": 2.2546, | |
| "step": 3665 | |
| }, | |
| { | |
| "epoch": 1.4616418579180563, | |
| "grad_norm": 6.888481616973877, | |
| "learning_rate": 0.00015619380970760096, | |
| "loss": 2.2418, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 1.4636331955991437, | |
| "grad_norm": 6.429262161254883, | |
| "learning_rate": 0.00015588115569599624, | |
| "loss": 2.1296, | |
| "step": 3675 | |
| }, | |
| { | |
| "epoch": 1.4656245332802311, | |
| "grad_norm": 5.466495037078857, | |
| "learning_rate": 0.0001555684760919145, | |
| "loss": 2.2744, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 1.4676158709613183, | |
| "grad_norm": 6.42126989364624, | |
| "learning_rate": 0.0001552557722560145, | |
| "loss": 2.1773, | |
| "step": 3685 | |
| }, | |
| { | |
| "epoch": 1.4696072086424055, | |
| "grad_norm": 5.6322174072265625, | |
| "learning_rate": 0.00015494304554906034, | |
| "loss": 2.2818, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 1.4715985463234929, | |
| "grad_norm": 6.184334754943848, | |
| "learning_rate": 0.0001546302973319157, | |
| "loss": 2.1761, | |
| "step": 3695 | |
| }, | |
| { | |
| "epoch": 1.47358988400458, | |
| "grad_norm": 6.0626540184021, | |
| "learning_rate": 0.0001543175289655378, | |
| "loss": 2.2124, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 1.4755812216856674, | |
| "grad_norm": 5.791230201721191, | |
| "learning_rate": 0.00015400474181097163, | |
| "loss": 2.113, | |
| "step": 3705 | |
| }, | |
| { | |
| "epoch": 1.4775725593667546, | |
| "grad_norm": 5.994204998016357, | |
| "learning_rate": 0.00015369193722934385, | |
| "loss": 2.1965, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 1.479563897047842, | |
| "grad_norm": 5.7597856521606445, | |
| "learning_rate": 0.000153379116581857, | |
| "loss": 2.2088, | |
| "step": 3715 | |
| }, | |
| { | |
| "epoch": 1.4815552347289291, | |
| "grad_norm": 5.297857284545898, | |
| "learning_rate": 0.0001530662812297836, | |
| "loss": 2.1675, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 1.4835465724100163, | |
| "grad_norm": 6.562660217285156, | |
| "learning_rate": 0.00015275343253445993, | |
| "loss": 2.1832, | |
| "step": 3725 | |
| }, | |
| { | |
| "epoch": 1.4855379100911037, | |
| "grad_norm": 6.066761493682861, | |
| "learning_rate": 0.00015244057185728066, | |
| "loss": 2.2723, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 1.487529247772191, | |
| "grad_norm": 5.877458095550537, | |
| "learning_rate": 0.00015212770055969232, | |
| "loss": 2.1688, | |
| "step": 3735 | |
| }, | |
| { | |
| "epoch": 1.4895205854532783, | |
| "grad_norm": 6.463152885437012, | |
| "learning_rate": 0.00015181482000318786, | |
| "loss": 2.1291, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 1.4915119231343654, | |
| "grad_norm": 5.8509931564331055, | |
| "learning_rate": 0.00015150193154930033, | |
| "loss": 2.1488, | |
| "step": 3745 | |
| }, | |
| { | |
| "epoch": 1.4935032608154528, | |
| "grad_norm": 6.462055206298828, | |
| "learning_rate": 0.0001511890365595973, | |
| "loss": 2.1914, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 1.49549459849654, | |
| "grad_norm": 7.797671318054199, | |
| "learning_rate": 0.0001508761363956748, | |
| "loss": 2.1354, | |
| "step": 3755 | |
| }, | |
| { | |
| "epoch": 1.4974859361776274, | |
| "grad_norm": 5.754122734069824, | |
| "learning_rate": 0.00015056323241915113, | |
| "loss": 2.1788, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 1.4994772738587145, | |
| "grad_norm": 6.21970796585083, | |
| "learning_rate": 0.00015025032599166153, | |
| "loss": 2.2506, | |
| "step": 3765 | |
| }, | |
| { | |
| "epoch": 1.501468611539802, | |
| "grad_norm": 5.987273216247559, | |
| "learning_rate": 0.00014993741847485158, | |
| "loss": 2.3639, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 1.503459949220889, | |
| "grad_norm": 5.687912940979004, | |
| "learning_rate": 0.00014962451123037187, | |
| "loss": 2.1029, | |
| "step": 3775 | |
| }, | |
| { | |
| "epoch": 1.5054512869019763, | |
| "grad_norm": 5.01307487487793, | |
| "learning_rate": 0.0001493116056198716, | |
| "loss": 2.2263, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 1.5074426245830637, | |
| "grad_norm": 5.397319793701172, | |
| "learning_rate": 0.00014899870300499296, | |
| "loss": 2.3244, | |
| "step": 3785 | |
| }, | |
| { | |
| "epoch": 1.509433962264151, | |
| "grad_norm": 6.498507499694824, | |
| "learning_rate": 0.00014868580474736517, | |
| "loss": 2.2593, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 1.5114252999452382, | |
| "grad_norm": 5.791152477264404, | |
| "learning_rate": 0.00014837291220859829, | |
| "loss": 2.3214, | |
| "step": 3795 | |
| }, | |
| { | |
| "epoch": 1.5134166376263254, | |
| "grad_norm": 6.324441432952881, | |
| "learning_rate": 0.00014806002675027768, | |
| "loss": 2.1823, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 1.5154079753074128, | |
| "grad_norm": 5.276822566986084, | |
| "learning_rate": 0.0001477471497339578, | |
| "loss": 2.0003, | |
| "step": 3805 | |
| }, | |
| { | |
| "epoch": 1.5173993129885002, | |
| "grad_norm": 6.361807346343994, | |
| "learning_rate": 0.00014743428252115642, | |
| "loss": 2.1975, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 1.5193906506695873, | |
| "grad_norm": 5.382236003875732, | |
| "learning_rate": 0.0001471214264733486, | |
| "loss": 2.1137, | |
| "step": 3815 | |
| }, | |
| { | |
| "epoch": 1.5213819883506745, | |
| "grad_norm": 6.161167621612549, | |
| "learning_rate": 0.0001468085829519608, | |
| "loss": 2.206, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 1.523373326031762, | |
| "grad_norm": 5.9316086769104, | |
| "learning_rate": 0.0001464957533183651, | |
| "loss": 2.2331, | |
| "step": 3825 | |
| }, | |
| { | |
| "epoch": 1.525364663712849, | |
| "grad_norm": 5.059473514556885, | |
| "learning_rate": 0.00014618293893387297, | |
| "loss": 2.1523, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 1.5273560013939362, | |
| "grad_norm": 5.21863317489624, | |
| "learning_rate": 0.00014587014115972967, | |
| "loss": 2.2796, | |
| "step": 3835 | |
| }, | |
| { | |
| "epoch": 1.5293473390750236, | |
| "grad_norm": 4.951760768890381, | |
| "learning_rate": 0.00014555736135710802, | |
| "loss": 2.2487, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 1.531338676756111, | |
| "grad_norm": 6.017904758453369, | |
| "learning_rate": 0.00014524460088710282, | |
| "loss": 2.1701, | |
| "step": 3845 | |
| }, | |
| { | |
| "epoch": 1.5333300144371982, | |
| "grad_norm": 6.514730930328369, | |
| "learning_rate": 0.0001449318611107246, | |
| "loss": 2.3521, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 1.5353213521182854, | |
| "grad_norm": 5.786331653594971, | |
| "learning_rate": 0.0001446191433888939, | |
| "loss": 2.0861, | |
| "step": 3855 | |
| }, | |
| { | |
| "epoch": 1.5373126897993727, | |
| "grad_norm": 5.961428642272949, | |
| "learning_rate": 0.0001443064490824353, | |
| "loss": 2.2932, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 1.5393040274804601, | |
| "grad_norm": 5.961484432220459, | |
| "learning_rate": 0.00014399377955207142, | |
| "loss": 2.1267, | |
| "step": 3865 | |
| }, | |
| { | |
| "epoch": 1.5412953651615473, | |
| "grad_norm": 5.781057834625244, | |
| "learning_rate": 0.00014368113615841714, | |
| "loss": 2.161, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 1.5432867028426345, | |
| "grad_norm": 6.685031414031982, | |
| "learning_rate": 0.00014336852026197356, | |
| "loss": 2.1416, | |
| "step": 3875 | |
| }, | |
| { | |
| "epoch": 1.5452780405237219, | |
| "grad_norm": 6.4983649253845215, | |
| "learning_rate": 0.00014305593322312216, | |
| "loss": 2.1847, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 1.547269378204809, | |
| "grad_norm": 6.511983394622803, | |
| "learning_rate": 0.00014274337640211873, | |
| "loss": 2.0351, | |
| "step": 3885 | |
| }, | |
| { | |
| "epoch": 1.5492607158858962, | |
| "grad_norm": 5.873143196105957, | |
| "learning_rate": 0.00014243085115908777, | |
| "loss": 2.2255, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 1.5512520535669836, | |
| "grad_norm": 5.393064498901367, | |
| "learning_rate": 0.00014211835885401613, | |
| "loss": 2.1551, | |
| "step": 3895 | |
| }, | |
| { | |
| "epoch": 1.553243391248071, | |
| "grad_norm": 5.8017802238464355, | |
| "learning_rate": 0.0001418059008467475, | |
| "loss": 2.1379, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 1.5552347289291582, | |
| "grad_norm": 5.897889614105225, | |
| "learning_rate": 0.00014149347849697628, | |
| "loss": 2.2812, | |
| "step": 3905 | |
| }, | |
| { | |
| "epoch": 1.5572260666102453, | |
| "grad_norm": 6.137565612792969, | |
| "learning_rate": 0.0001411810931642416, | |
| "loss": 2.1536, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 1.5592174042913327, | |
| "grad_norm": 6.100076198577881, | |
| "learning_rate": 0.00014086874620792173, | |
| "loss": 2.065, | |
| "step": 3915 | |
| }, | |
| { | |
| "epoch": 1.56120874197242, | |
| "grad_norm": 5.261016368865967, | |
| "learning_rate": 0.0001405564389872277, | |
| "loss": 2.2897, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 1.5632000796535073, | |
| "grad_norm": 5.7907819747924805, | |
| "learning_rate": 0.00014024417286119772, | |
| "loss": 2.1539, | |
| "step": 3925 | |
| }, | |
| { | |
| "epoch": 1.5651914173345944, | |
| "grad_norm": 4.588942050933838, | |
| "learning_rate": 0.00013993194918869123, | |
| "loss": 2.1315, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 1.5671827550156818, | |
| "grad_norm": 5.841504096984863, | |
| "learning_rate": 0.0001396197693283828, | |
| "loss": 2.084, | |
| "step": 3935 | |
| }, | |
| { | |
| "epoch": 1.5691740926967692, | |
| "grad_norm": 5.055356979370117, | |
| "learning_rate": 0.00013930763463875656, | |
| "loss": 2.0406, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 1.5711654303778562, | |
| "grad_norm": 5.552372455596924, | |
| "learning_rate": 0.0001389955464780998, | |
| "loss": 2.1606, | |
| "step": 3945 | |
| }, | |
| { | |
| "epoch": 1.5731567680589436, | |
| "grad_norm": 5.9232378005981445, | |
| "learning_rate": 0.00013868350620449758, | |
| "loss": 2.1084, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 1.575148105740031, | |
| "grad_norm": 6.918461799621582, | |
| "learning_rate": 0.00013837151517582638, | |
| "loss": 2.2465, | |
| "step": 3955 | |
| }, | |
| { | |
| "epoch": 1.5771394434211181, | |
| "grad_norm": 5.042429447174072, | |
| "learning_rate": 0.00013805957474974855, | |
| "loss": 2.1984, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 1.5791307811022053, | |
| "grad_norm": 5.3624444007873535, | |
| "learning_rate": 0.0001377476862837061, | |
| "loss": 2.1909, | |
| "step": 3965 | |
| }, | |
| { | |
| "epoch": 1.5811221187832927, | |
| "grad_norm": 5.48763370513916, | |
| "learning_rate": 0.00013743585113491503, | |
| "loss": 2.2525, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 1.58311345646438, | |
| "grad_norm": 5.208502292633057, | |
| "learning_rate": 0.0001371240706603593, | |
| "loss": 2.2275, | |
| "step": 3975 | |
| }, | |
| { | |
| "epoch": 1.5851047941454672, | |
| "grad_norm": 7.1540069580078125, | |
| "learning_rate": 0.00013681234621678486, | |
| "loss": 2.2409, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 1.5870961318265544, | |
| "grad_norm": 5.815776824951172, | |
| "learning_rate": 0.000136500679160694, | |
| "loss": 2.1772, | |
| "step": 3985 | |
| }, | |
| { | |
| "epoch": 1.5890874695076418, | |
| "grad_norm": 5.809691429138184, | |
| "learning_rate": 0.00013618907084833912, | |
| "loss": 2.2952, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 1.5910788071887292, | |
| "grad_norm": 5.1112847328186035, | |
| "learning_rate": 0.00013587752263571713, | |
| "loss": 2.0964, | |
| "step": 3995 | |
| }, | |
| { | |
| "epoch": 1.5930701448698164, | |
| "grad_norm": 5.776605606079102, | |
| "learning_rate": 0.0001355660358785633, | |
| "loss": 2.0697, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 1.5950614825509035, | |
| "grad_norm": 5.093209743499756, | |
| "learning_rate": 0.0001352546119323455, | |
| "loss": 2.136, | |
| "step": 4005 | |
| }, | |
| { | |
| "epoch": 1.597052820231991, | |
| "grad_norm": 6.1989850997924805, | |
| "learning_rate": 0.00013494325215225833, | |
| "loss": 2.283, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 1.599044157913078, | |
| "grad_norm": 5.202113151550293, | |
| "learning_rate": 0.00013463195789321708, | |
| "loss": 2.2786, | |
| "step": 4015 | |
| }, | |
| { | |
| "epoch": 1.6010354955941652, | |
| "grad_norm": 5.918675422668457, | |
| "learning_rate": 0.000134320730509852, | |
| "loss": 2.2188, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 1.6030268332752526, | |
| "grad_norm": 6.093796730041504, | |
| "learning_rate": 0.00013400957135650221, | |
| "loss": 2.0943, | |
| "step": 4025 | |
| }, | |
| { | |
| "epoch": 1.60501817095634, | |
| "grad_norm": 5.631002902984619, | |
| "learning_rate": 0.00013369848178721005, | |
| "loss": 2.1842, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 1.6070095086374272, | |
| "grad_norm": 5.93800163269043, | |
| "learning_rate": 0.00013338746315571495, | |
| "loss": 2.1589, | |
| "step": 4035 | |
| }, | |
| { | |
| "epoch": 1.6090008463185144, | |
| "grad_norm": 5.358720779418945, | |
| "learning_rate": 0.00013307651681544774, | |
| "loss": 2.1383, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 1.6109921839996018, | |
| "grad_norm": 6.724377155303955, | |
| "learning_rate": 0.0001327656441195246, | |
| "loss": 2.1412, | |
| "step": 4045 | |
| }, | |
| { | |
| "epoch": 1.6129835216806891, | |
| "grad_norm": 5.9936747550964355, | |
| "learning_rate": 0.00013245484642074128, | |
| "loss": 2.226, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 1.6149748593617763, | |
| "grad_norm": 5.944963455200195, | |
| "learning_rate": 0.00013214412507156712, | |
| "loss": 2.1877, | |
| "step": 4055 | |
| }, | |
| { | |
| "epoch": 1.6169661970428635, | |
| "grad_norm": 4.6952972412109375, | |
| "learning_rate": 0.00013183348142413925, | |
| "loss": 2.1925, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 1.6189575347239509, | |
| "grad_norm": 4.9247331619262695, | |
| "learning_rate": 0.0001315229168302567, | |
| "loss": 2.0597, | |
| "step": 4065 | |
| }, | |
| { | |
| "epoch": 1.620948872405038, | |
| "grad_norm": 5.743644714355469, | |
| "learning_rate": 0.0001312124326413744, | |
| "loss": 2.2252, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 1.6229402100861252, | |
| "grad_norm": 7.026399612426758, | |
| "learning_rate": 0.00013090203020859757, | |
| "loss": 2.2459, | |
| "step": 4075 | |
| }, | |
| { | |
| "epoch": 1.6249315477672126, | |
| "grad_norm": 5.469521999359131, | |
| "learning_rate": 0.0001305917108826754, | |
| "loss": 2.237, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 1.6269228854483, | |
| "grad_norm": 5.118337631225586, | |
| "learning_rate": 0.0001302814760139956, | |
| "loss": 2.1671, | |
| "step": 4085 | |
| }, | |
| { | |
| "epoch": 1.6289142231293872, | |
| "grad_norm": 5.142106056213379, | |
| "learning_rate": 0.00012997132695257842, | |
| "loss": 2.1174, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 1.6309055608104743, | |
| "grad_norm": 5.856767177581787, | |
| "learning_rate": 0.00012966126504807053, | |
| "loss": 2.1862, | |
| "step": 4095 | |
| }, | |
| { | |
| "epoch": 1.6328968984915617, | |
| "grad_norm": 5.7608842849731445, | |
| "learning_rate": 0.00012935129164973948, | |
| "loss": 2.1811, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 1.6348882361726491, | |
| "grad_norm": 6.484520435333252, | |
| "learning_rate": 0.0001290414081064675, | |
| "loss": 2.1627, | |
| "step": 4105 | |
| }, | |
| { | |
| "epoch": 1.6368795738537363, | |
| "grad_norm": 7.269222259521484, | |
| "learning_rate": 0.00012873161576674604, | |
| "loss": 2.2155, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 1.6388709115348234, | |
| "grad_norm": 5.344924449920654, | |
| "learning_rate": 0.00012842191597866944, | |
| "loss": 2.2358, | |
| "step": 4115 | |
| }, | |
| { | |
| "epoch": 1.6408622492159108, | |
| "grad_norm": 5.197498798370361, | |
| "learning_rate": 0.00012811231008992945, | |
| "loss": 2.1274, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 1.642853586896998, | |
| "grad_norm": 5.114546775817871, | |
| "learning_rate": 0.0001278027994478092, | |
| "loss": 2.3124, | |
| "step": 4125 | |
| }, | |
| { | |
| "epoch": 1.6448449245780852, | |
| "grad_norm": 5.009857177734375, | |
| "learning_rate": 0.00012749338539917718, | |
| "loss": 2.2095, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 1.6468362622591726, | |
| "grad_norm": 5.543980121612549, | |
| "learning_rate": 0.00012718406929048177, | |
| "loss": 2.1183, | |
| "step": 4135 | |
| }, | |
| { | |
| "epoch": 1.64882759994026, | |
| "grad_norm": 6.6033196449279785, | |
| "learning_rate": 0.00012687485246774496, | |
| "loss": 2.0655, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 1.6508189376213471, | |
| "grad_norm": 4.530311584472656, | |
| "learning_rate": 0.00012656573627655685, | |
| "loss": 2.2523, | |
| "step": 4145 | |
| }, | |
| { | |
| "epoch": 1.6528102753024343, | |
| "grad_norm": 7.305230617523193, | |
| "learning_rate": 0.00012625672206206947, | |
| "loss": 2.3545, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 1.6548016129835217, | |
| "grad_norm": 6.4055495262146, | |
| "learning_rate": 0.00012594781116899122, | |
| "loss": 2.2055, | |
| "step": 4155 | |
| }, | |
| { | |
| "epoch": 1.656792950664609, | |
| "grad_norm": 4.996709823608398, | |
| "learning_rate": 0.00012563900494158084, | |
| "loss": 2.13, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 1.6587842883456962, | |
| "grad_norm": 5.305093765258789, | |
| "learning_rate": 0.00012533030472364158, | |
| "loss": 1.9513, | |
| "step": 4165 | |
| }, | |
| { | |
| "epoch": 1.6607756260267834, | |
| "grad_norm": 5.111289024353027, | |
| "learning_rate": 0.00012502171185851542, | |
| "loss": 2.2238, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 1.6627669637078708, | |
| "grad_norm": 5.95400333404541, | |
| "learning_rate": 0.00012471322768907713, | |
| "loss": 2.2567, | |
| "step": 4175 | |
| }, | |
| { | |
| "epoch": 1.6647583013889582, | |
| "grad_norm": 5.4170918464660645, | |
| "learning_rate": 0.00012440485355772854, | |
| "loss": 2.1921, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 1.6667496390700451, | |
| "grad_norm": 5.547909736633301, | |
| "learning_rate": 0.00012409659080639258, | |
| "loss": 2.1026, | |
| "step": 4185 | |
| }, | |
| { | |
| "epoch": 1.6687409767511325, | |
| "grad_norm": 6.24788761138916, | |
| "learning_rate": 0.00012378844077650755, | |
| "loss": 2.2438, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 1.67073231443222, | |
| "grad_norm": 5.332764148712158, | |
| "learning_rate": 0.00012348040480902121, | |
| "loss": 2.1973, | |
| "step": 4195 | |
| }, | |
| { | |
| "epoch": 1.672723652113307, | |
| "grad_norm": 5.244167804718018, | |
| "learning_rate": 0.00012317248424438494, | |
| "loss": 2.1514, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 1.6747149897943943, | |
| "grad_norm": 4.9936676025390625, | |
| "learning_rate": 0.000122864680422548, | |
| "loss": 2.0853, | |
| "step": 4205 | |
| }, | |
| { | |
| "epoch": 1.6767063274754817, | |
| "grad_norm": 6.823670387268066, | |
| "learning_rate": 0.00012255699468295155, | |
| "loss": 2.1558, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 1.678697665156569, | |
| "grad_norm": 5.074921607971191, | |
| "learning_rate": 0.00012224942836452295, | |
| "loss": 2.0616, | |
| "step": 4215 | |
| }, | |
| { | |
| "epoch": 1.6806890028376562, | |
| "grad_norm": 5.293245792388916, | |
| "learning_rate": 0.00012194198280566983, | |
| "loss": 2.1184, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 1.6826803405187434, | |
| "grad_norm": 5.603009223937988, | |
| "learning_rate": 0.00012163465934427443, | |
| "loss": 2.219, | |
| "step": 4225 | |
| }, | |
| { | |
| "epoch": 1.6846716781998308, | |
| "grad_norm": 4.916954517364502, | |
| "learning_rate": 0.00012132745931768762, | |
| "loss": 2.0718, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 1.6866630158809182, | |
| "grad_norm": 8.043903350830078, | |
| "learning_rate": 0.00012102038406272302, | |
| "loss": 2.1528, | |
| "step": 4235 | |
| }, | |
| { | |
| "epoch": 1.6886543535620053, | |
| "grad_norm": 4.8785505294799805, | |
| "learning_rate": 0.00012071343491565151, | |
| "loss": 2.1236, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 1.6906456912430925, | |
| "grad_norm": 5.881628513336182, | |
| "learning_rate": 0.00012040661321219498, | |
| "loss": 2.2627, | |
| "step": 4245 | |
| }, | |
| { | |
| "epoch": 1.6926370289241799, | |
| "grad_norm": 5.179254531860352, | |
| "learning_rate": 0.0001200999202875209, | |
| "loss": 2.0575, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 1.694628366605267, | |
| "grad_norm": 4.581838607788086, | |
| "learning_rate": 0.0001197933574762362, | |
| "loss": 2.0092, | |
| "step": 4255 | |
| }, | |
| { | |
| "epoch": 1.6966197042863542, | |
| "grad_norm": 5.721724510192871, | |
| "learning_rate": 0.0001194869261123818, | |
| "loss": 2.0574, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 1.6986110419674416, | |
| "grad_norm": 6.110555648803711, | |
| "learning_rate": 0.00011918062752942638, | |
| "loss": 2.1512, | |
| "step": 4265 | |
| }, | |
| { | |
| "epoch": 1.700602379648529, | |
| "grad_norm": 6.568111896514893, | |
| "learning_rate": 0.00011887446306026098, | |
| "loss": 2.1205, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 1.7025937173296162, | |
| "grad_norm": 6.348562240600586, | |
| "learning_rate": 0.00011856843403719302, | |
| "loss": 2.1388, | |
| "step": 4275 | |
| }, | |
| { | |
| "epoch": 1.7045850550107033, | |
| "grad_norm": 4.70415735244751, | |
| "learning_rate": 0.00011826254179194041, | |
| "loss": 2.0661, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 1.7065763926917907, | |
| "grad_norm": 5.290071487426758, | |
| "learning_rate": 0.00011795678765562598, | |
| "loss": 2.0906, | |
| "step": 4285 | |
| }, | |
| { | |
| "epoch": 1.7085677303728781, | |
| "grad_norm": 4.59420919418335, | |
| "learning_rate": 0.0001176511729587714, | |
| "loss": 2.0528, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 1.7105590680539653, | |
| "grad_norm": 5.748522758483887, | |
| "learning_rate": 0.00011734569903129178, | |
| "loss": 2.2097, | |
| "step": 4295 | |
| }, | |
| { | |
| "epoch": 1.7125504057350525, | |
| "grad_norm": 5.9820876121521, | |
| "learning_rate": 0.00011704036720248941, | |
| "loss": 2.1171, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 1.7145417434161399, | |
| "grad_norm": 5.448683738708496, | |
| "learning_rate": 0.00011673517880104839, | |
| "loss": 2.1346, | |
| "step": 4305 | |
| }, | |
| { | |
| "epoch": 1.716533081097227, | |
| "grad_norm": 6.049896240234375, | |
| "learning_rate": 0.0001164301351550287, | |
| "loss": 2.1895, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 1.7185244187783142, | |
| "grad_norm": 5.706995487213135, | |
| "learning_rate": 0.00011612523759186024, | |
| "loss": 2.1633, | |
| "step": 4315 | |
| }, | |
| { | |
| "epoch": 1.7205157564594016, | |
| "grad_norm": 5.151224136352539, | |
| "learning_rate": 0.00011582048743833739, | |
| "loss": 2.1786, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 1.722507094140489, | |
| "grad_norm": 7.260252952575684, | |
| "learning_rate": 0.00011551588602061292, | |
| "loss": 2.1119, | |
| "step": 4325 | |
| }, | |
| { | |
| "epoch": 1.7244984318215761, | |
| "grad_norm": 6.449362754821777, | |
| "learning_rate": 0.00011521143466419248, | |
| "loss": 2.2628, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 1.7264897695026633, | |
| "grad_norm": 6.293276309967041, | |
| "learning_rate": 0.00011490713469392858, | |
| "loss": 2.1769, | |
| "step": 4335 | |
| }, | |
| { | |
| "epoch": 1.7284811071837507, | |
| "grad_norm": 5.702457904815674, | |
| "learning_rate": 0.00011460298743401509, | |
| "loss": 2.2575, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 1.730472444864838, | |
| "grad_norm": 5.36246919631958, | |
| "learning_rate": 0.0001142989942079813, | |
| "loss": 2.2241, | |
| "step": 4345 | |
| }, | |
| { | |
| "epoch": 1.7324637825459253, | |
| "grad_norm": 6.228604316711426, | |
| "learning_rate": 0.00011399515633868613, | |
| "loss": 2.2833, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 1.7344551202270124, | |
| "grad_norm": 6.484302997589111, | |
| "learning_rate": 0.00011369147514831259, | |
| "loss": 2.3199, | |
| "step": 4355 | |
| }, | |
| { | |
| "epoch": 1.7364464579080998, | |
| "grad_norm": 5.6527886390686035, | |
| "learning_rate": 0.00011338795195836171, | |
| "loss": 2.0978, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 1.738437795589187, | |
| "grad_norm": 5.345698833465576, | |
| "learning_rate": 0.00011308458808964716, | |
| "loss": 2.0553, | |
| "step": 4365 | |
| }, | |
| { | |
| "epoch": 1.7404291332702742, | |
| "grad_norm": 5.294981479644775, | |
| "learning_rate": 0.00011278138486228912, | |
| "loss": 2.1411, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 1.7424204709513615, | |
| "grad_norm": 6.168553829193115, | |
| "learning_rate": 0.00011247834359570883, | |
| "loss": 2.251, | |
| "step": 4375 | |
| }, | |
| { | |
| "epoch": 1.744411808632449, | |
| "grad_norm": 5.573464393615723, | |
| "learning_rate": 0.00011217546560862279, | |
| "loss": 2.14, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 1.746403146313536, | |
| "grad_norm": 5.3229851722717285, | |
| "learning_rate": 0.0001118727522190368, | |
| "loss": 2.1176, | |
| "step": 4385 | |
| }, | |
| { | |
| "epoch": 1.7483944839946233, | |
| "grad_norm": 5.849644184112549, | |
| "learning_rate": 0.00011157020474424055, | |
| "loss": 2.1246, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 1.7503858216757107, | |
| "grad_norm": 6.106393814086914, | |
| "learning_rate": 0.00011126782450080168, | |
| "loss": 2.2544, | |
| "step": 4395 | |
| }, | |
| { | |
| "epoch": 1.752377159356798, | |
| "grad_norm": 4.8731255531311035, | |
| "learning_rate": 0.0001109656128045601, | |
| "loss": 2.0802, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 1.7543684970378852, | |
| "grad_norm": 6.5706257820129395, | |
| "learning_rate": 0.00011066357097062226, | |
| "loss": 2.0044, | |
| "step": 4405 | |
| }, | |
| { | |
| "epoch": 1.7563598347189724, | |
| "grad_norm": 5.224118709564209, | |
| "learning_rate": 0.00011036170031335544, | |
| "loss": 2.2617, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 1.7583511724000598, | |
| "grad_norm": 6.793256759643555, | |
| "learning_rate": 0.0001100600021463821, | |
| "loss": 2.2929, | |
| "step": 4415 | |
| }, | |
| { | |
| "epoch": 1.7603425100811472, | |
| "grad_norm": 5.432855606079102, | |
| "learning_rate": 0.00010975847778257396, | |
| "loss": 2.2589, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 1.7623338477622341, | |
| "grad_norm": 5.692535400390625, | |
| "learning_rate": 0.00010945712853404654, | |
| "loss": 2.2153, | |
| "step": 4425 | |
| }, | |
| { | |
| "epoch": 1.7643251854433215, | |
| "grad_norm": 5.197839260101318, | |
| "learning_rate": 0.00010915595571215317, | |
| "loss": 2.2352, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 1.766316523124409, | |
| "grad_norm": 5.767719268798828, | |
| "learning_rate": 0.00010885496062747969, | |
| "loss": 2.2606, | |
| "step": 4435 | |
| }, | |
| { | |
| "epoch": 1.768307860805496, | |
| "grad_norm": 6.0898356437683105, | |
| "learning_rate": 0.00010855414458983822, | |
| "loss": 2.0726, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 1.7702991984865832, | |
| "grad_norm": 5.859544277191162, | |
| "learning_rate": 0.00010825350890826198, | |
| "loss": 2.1762, | |
| "step": 4445 | |
| }, | |
| { | |
| "epoch": 1.7722905361676706, | |
| "grad_norm": 5.864034175872803, | |
| "learning_rate": 0.00010795305489099912, | |
| "loss": 2.1124, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 1.774281873848758, | |
| "grad_norm": 6.083292007446289, | |
| "learning_rate": 0.00010765278384550751, | |
| "loss": 1.9115, | |
| "step": 4455 | |
| }, | |
| { | |
| "epoch": 1.7762732115298452, | |
| "grad_norm": 6.627115249633789, | |
| "learning_rate": 0.00010735269707844854, | |
| "loss": 2.111, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 1.7782645492109324, | |
| "grad_norm": 6.6406168937683105, | |
| "learning_rate": 0.00010705279589568188, | |
| "loss": 2.225, | |
| "step": 4465 | |
| }, | |
| { | |
| "epoch": 1.7802558868920197, | |
| "grad_norm": 5.369992256164551, | |
| "learning_rate": 0.00010675308160225952, | |
| "loss": 2.2207, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 1.7822472245731071, | |
| "grad_norm": 5.870012283325195, | |
| "learning_rate": 0.00010645355550242027, | |
| "loss": 2.2278, | |
| "step": 4475 | |
| }, | |
| { | |
| "epoch": 1.7842385622541943, | |
| "grad_norm": 5.092059135437012, | |
| "learning_rate": 0.00010615421889958387, | |
| "loss": 2.2037, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 1.7862298999352815, | |
| "grad_norm": 5.079998970031738, | |
| "learning_rate": 0.00010585507309634553, | |
| "loss": 2.1696, | |
| "step": 4485 | |
| }, | |
| { | |
| "epoch": 1.7882212376163689, | |
| "grad_norm": 5.64705753326416, | |
| "learning_rate": 0.00010555611939447017, | |
| "loss": 2.1788, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 1.790212575297456, | |
| "grad_norm": 5.797013282775879, | |
| "learning_rate": 0.00010525735909488672, | |
| "loss": 2.102, | |
| "step": 4495 | |
| }, | |
| { | |
| "epoch": 1.7922039129785432, | |
| "grad_norm": 6.012421131134033, | |
| "learning_rate": 0.00010495879349768255, | |
| "loss": 2.1638, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 1.7941952506596306, | |
| "grad_norm": 6.438976287841797, | |
| "learning_rate": 0.0001046604239020977, | |
| "loss": 2.1953, | |
| "step": 4505 | |
| }, | |
| { | |
| "epoch": 1.796186588340718, | |
| "grad_norm": 4.844203948974609, | |
| "learning_rate": 0.00010436225160651935, | |
| "loss": 2.0658, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 1.7981779260218052, | |
| "grad_norm": 6.363651752471924, | |
| "learning_rate": 0.00010406427790847608, | |
| "loss": 2.1306, | |
| "step": 4515 | |
| }, | |
| { | |
| "epoch": 1.8001692637028923, | |
| "grad_norm": 4.920433044433594, | |
| "learning_rate": 0.00010376650410463223, | |
| "loss": 2.1241, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 1.8021606013839797, | |
| "grad_norm": 5.457321643829346, | |
| "learning_rate": 0.00010346893149078234, | |
| "loss": 2.1246, | |
| "step": 4525 | |
| }, | |
| { | |
| "epoch": 1.804151939065067, | |
| "grad_norm": 4.993035793304443, | |
| "learning_rate": 0.00010317156136184537, | |
| "loss": 2.1185, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 1.8061432767461543, | |
| "grad_norm": 5.458240032196045, | |
| "learning_rate": 0.00010287439501185923, | |
| "loss": 2.2005, | |
| "step": 4535 | |
| }, | |
| { | |
| "epoch": 1.8081346144272414, | |
| "grad_norm": 5.164868354797363, | |
| "learning_rate": 0.000102577433733975, | |
| "loss": 2.2367, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 1.8101259521083288, | |
| "grad_norm": 5.254199028015137, | |
| "learning_rate": 0.0001022806788204514, | |
| "loss": 2.1091, | |
| "step": 4545 | |
| }, | |
| { | |
| "epoch": 1.812117289789416, | |
| "grad_norm": 5.778502941131592, | |
| "learning_rate": 0.00010198413156264909, | |
| "loss": 2.2249, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 1.8141086274705032, | |
| "grad_norm": 5.061821460723877, | |
| "learning_rate": 0.00010168779325102513, | |
| "loss": 2.1306, | |
| "step": 4555 | |
| }, | |
| { | |
| "epoch": 1.8160999651515906, | |
| "grad_norm": 5.960233688354492, | |
| "learning_rate": 0.00010139166517512739, | |
| "loss": 2.2575, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 1.818091302832678, | |
| "grad_norm": 6.0032429695129395, | |
| "learning_rate": 0.00010109574862358872, | |
| "loss": 2.2546, | |
| "step": 4565 | |
| }, | |
| { | |
| "epoch": 1.8200826405137651, | |
| "grad_norm": 6.485595226287842, | |
| "learning_rate": 0.00010080004488412167, | |
| "loss": 2.1074, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 1.8220739781948523, | |
| "grad_norm": 5.097439765930176, | |
| "learning_rate": 0.00010050455524351256, | |
| "loss": 2.2229, | |
| "step": 4575 | |
| }, | |
| { | |
| "epoch": 1.8240653158759397, | |
| "grad_norm": 5.461282253265381, | |
| "learning_rate": 0.00010020928098761618, | |
| "loss": 2.2268, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 1.826056653557027, | |
| "grad_norm": 4.748180866241455, | |
| "learning_rate": 9.991422340134994e-05, | |
| "loss": 2.0874, | |
| "step": 4585 | |
| }, | |
| { | |
| "epoch": 1.8280479912381142, | |
| "grad_norm": 7.213197231292725, | |
| "learning_rate": 9.961938376868843e-05, | |
| "loss": 2.2942, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 1.8300393289192014, | |
| "grad_norm": 5.269963264465332, | |
| "learning_rate": 9.932476337265785e-05, | |
| "loss": 2.3143, | |
| "step": 4595 | |
| }, | |
| { | |
| "epoch": 1.8320306666002888, | |
| "grad_norm": 5.104089260101318, | |
| "learning_rate": 9.903036349533025e-05, | |
| "loss": 2.161, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 1.834022004281376, | |
| "grad_norm": 5.0273661613464355, | |
| "learning_rate": 9.873618541781814e-05, | |
| "loss": 2.121, | |
| "step": 4605 | |
| }, | |
| { | |
| "epoch": 1.8360133419624631, | |
| "grad_norm": 5.1278557777404785, | |
| "learning_rate": 9.844223042026885e-05, | |
| "loss": 2.1467, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 1.8380046796435505, | |
| "grad_norm": 6.490189552307129, | |
| "learning_rate": 9.814849978185895e-05, | |
| "loss": 2.1369, | |
| "step": 4615 | |
| }, | |
| { | |
| "epoch": 1.839996017324638, | |
| "grad_norm": 5.803715705871582, | |
| "learning_rate": 9.785499478078862e-05, | |
| "loss": 2.2422, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 1.841987355005725, | |
| "grad_norm": 5.247805118560791, | |
| "learning_rate": 9.756171669427626e-05, | |
| "loss": 2.2922, | |
| "step": 4625 | |
| }, | |
| { | |
| "epoch": 1.8439786926868122, | |
| "grad_norm": 4.887587547302246, | |
| "learning_rate": 9.726866679855279e-05, | |
| "loss": 2.1578, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 1.8459700303678996, | |
| "grad_norm": 5.077564239501953, | |
| "learning_rate": 9.697584636885608e-05, | |
| "loss": 2.1355, | |
| "step": 4635 | |
| }, | |
| { | |
| "epoch": 1.847961368048987, | |
| "grad_norm": 4.351661205291748, | |
| "learning_rate": 9.668325667942554e-05, | |
| "loss": 2.1895, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 1.8499527057300742, | |
| "grad_norm": 5.322985649108887, | |
| "learning_rate": 9.639089900349638e-05, | |
| "loss": 2.22, | |
| "step": 4645 | |
| }, | |
| { | |
| "epoch": 1.8519440434111614, | |
| "grad_norm": 5.055166721343994, | |
| "learning_rate": 9.609877461329433e-05, | |
| "loss": 2.1724, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 1.8539353810922488, | |
| "grad_norm": 5.176064968109131, | |
| "learning_rate": 9.580688478002977e-05, | |
| "loss": 2.1442, | |
| "step": 4655 | |
| }, | |
| { | |
| "epoch": 1.8559267187733361, | |
| "grad_norm": 5.6811418533325195, | |
| "learning_rate": 9.551523077389253e-05, | |
| "loss": 2.1636, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 1.857918056454423, | |
| "grad_norm": 6.5232038497924805, | |
| "learning_rate": 9.522381386404618e-05, | |
| "loss": 2.2238, | |
| "step": 4665 | |
| }, | |
| { | |
| "epoch": 1.8599093941355105, | |
| "grad_norm": 7.1458964347839355, | |
| "learning_rate": 9.493263531862245e-05, | |
| "loss": 2.191, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 1.8619007318165979, | |
| "grad_norm": 6.995946884155273, | |
| "learning_rate": 9.464169640471596e-05, | |
| "loss": 2.2831, | |
| "step": 4675 | |
| }, | |
| { | |
| "epoch": 1.863892069497685, | |
| "grad_norm": 4.7528486251831055, | |
| "learning_rate": 9.43509983883784e-05, | |
| "loss": 2.1966, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 1.8658834071787722, | |
| "grad_norm": 4.584712505340576, | |
| "learning_rate": 9.406054253461323e-05, | |
| "loss": 2.1003, | |
| "step": 4685 | |
| }, | |
| { | |
| "epoch": 1.8678747448598596, | |
| "grad_norm": 5.8391547203063965, | |
| "learning_rate": 9.377033010737011e-05, | |
| "loss": 2.129, | |
| "step": 4690 | |
| }, | |
| { | |
| "epoch": 1.869866082540947, | |
| "grad_norm": 5.241213798522949, | |
| "learning_rate": 9.348036236953942e-05, | |
| "loss": 2.1238, | |
| "step": 4695 | |
| }, | |
| { | |
| "epoch": 1.8718574202220342, | |
| "grad_norm": 5.671873092651367, | |
| "learning_rate": 9.319064058294673e-05, | |
| "loss": 2.0219, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 1.8738487579031213, | |
| "grad_norm": 5.927294731140137, | |
| "learning_rate": 9.290116600834731e-05, | |
| "loss": 2.2963, | |
| "step": 4705 | |
| }, | |
| { | |
| "epoch": 1.8758400955842087, | |
| "grad_norm": 5.177212715148926, | |
| "learning_rate": 9.261193990542072e-05, | |
| "loss": 2.0044, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 1.8778314332652961, | |
| "grad_norm": 5.216682434082031, | |
| "learning_rate": 9.232296353276517e-05, | |
| "loss": 2.072, | |
| "step": 4715 | |
| }, | |
| { | |
| "epoch": 1.8798227709463833, | |
| "grad_norm": 5.458096981048584, | |
| "learning_rate": 9.203423814789228e-05, | |
| "loss": 2.112, | |
| "step": 4720 | |
| }, | |
| { | |
| "epoch": 1.8818141086274704, | |
| "grad_norm": 6.221094131469727, | |
| "learning_rate": 9.174576500722132e-05, | |
| "loss": 2.1151, | |
| "step": 4725 | |
| }, | |
| { | |
| "epoch": 1.8838054463085578, | |
| "grad_norm": 5.665980339050293, | |
| "learning_rate": 9.1457545366074e-05, | |
| "loss": 2.0835, | |
| "step": 4730 | |
| }, | |
| { | |
| "epoch": 1.885796783989645, | |
| "grad_norm": 6.715048789978027, | |
| "learning_rate": 9.116958047866882e-05, | |
| "loss": 2.1682, | |
| "step": 4735 | |
| }, | |
| { | |
| "epoch": 1.8877881216707322, | |
| "grad_norm": 5.3913679122924805, | |
| "learning_rate": 9.088187159811576e-05, | |
| "loss": 2.1956, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 1.8897794593518196, | |
| "grad_norm": 4.760240077972412, | |
| "learning_rate": 9.059441997641079e-05, | |
| "loss": 2.1155, | |
| "step": 4745 | |
| }, | |
| { | |
| "epoch": 1.891770797032907, | |
| "grad_norm": 4.783170700073242, | |
| "learning_rate": 9.030722686443026e-05, | |
| "loss": 2.0731, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 1.8937621347139941, | |
| "grad_norm": 6.2002482414245605, | |
| "learning_rate": 9.00202935119257e-05, | |
| "loss": 2.15, | |
| "step": 4755 | |
| }, | |
| { | |
| "epoch": 1.8957534723950813, | |
| "grad_norm": 5.836517810821533, | |
| "learning_rate": 8.973362116751818e-05, | |
| "loss": 2.0689, | |
| "step": 4760 | |
| }, | |
| { | |
| "epoch": 1.8977448100761687, | |
| "grad_norm": 6.286279201507568, | |
| "learning_rate": 8.944721107869308e-05, | |
| "loss": 2.1428, | |
| "step": 4765 | |
| }, | |
| { | |
| "epoch": 1.899736147757256, | |
| "grad_norm": 5.639240264892578, | |
| "learning_rate": 8.916106449179444e-05, | |
| "loss": 2.1202, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 1.9017274854383432, | |
| "grad_norm": 4.700593948364258, | |
| "learning_rate": 8.887518265201971e-05, | |
| "loss": 2.0323, | |
| "step": 4775 | |
| }, | |
| { | |
| "epoch": 1.9037188231194304, | |
| "grad_norm": 5.773509979248047, | |
| "learning_rate": 8.858956680341422e-05, | |
| "loss": 2.121, | |
| "step": 4780 | |
| }, | |
| { | |
| "epoch": 1.9057101608005178, | |
| "grad_norm": 5.129843235015869, | |
| "learning_rate": 8.830421818886586e-05, | |
| "loss": 2.0498, | |
| "step": 4785 | |
| }, | |
| { | |
| "epoch": 1.907701498481605, | |
| "grad_norm": 5.609828948974609, | |
| "learning_rate": 8.801913805009959e-05, | |
| "loss": 2.1391, | |
| "step": 4790 | |
| }, | |
| { | |
| "epoch": 1.9096928361626921, | |
| "grad_norm": 4.961773872375488, | |
| "learning_rate": 8.773432762767202e-05, | |
| "loss": 2.1965, | |
| "step": 4795 | |
| }, | |
| { | |
| "epoch": 1.9116841738437795, | |
| "grad_norm": 5.141656875610352, | |
| "learning_rate": 8.744978816096618e-05, | |
| "loss": 2.0892, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 1.913675511524867, | |
| "grad_norm": 5.264577388763428, | |
| "learning_rate": 8.71655208881859e-05, | |
| "loss": 2.2029, | |
| "step": 4805 | |
| }, | |
| { | |
| "epoch": 1.915666849205954, | |
| "grad_norm": 5.03646183013916, | |
| "learning_rate": 8.688152704635062e-05, | |
| "loss": 2.1529, | |
| "step": 4810 | |
| }, | |
| { | |
| "epoch": 1.9176581868870413, | |
| "grad_norm": 5.15019416809082, | |
| "learning_rate": 8.659780787128976e-05, | |
| "loss": 2.1064, | |
| "step": 4815 | |
| }, | |
| { | |
| "epoch": 1.9196495245681287, | |
| "grad_norm": 4.951735496520996, | |
| "learning_rate": 8.631436459763774e-05, | |
| "loss": 2.0913, | |
| "step": 4820 | |
| }, | |
| { | |
| "epoch": 1.921640862249216, | |
| "grad_norm": 7.545402526855469, | |
| "learning_rate": 8.603119845882815e-05, | |
| "loss": 2.1739, | |
| "step": 4825 | |
| }, | |
| { | |
| "epoch": 1.9236321999303032, | |
| "grad_norm": 7.116678237915039, | |
| "learning_rate": 8.574831068708872e-05, | |
| "loss": 2.1532, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 1.9256235376113904, | |
| "grad_norm": 6.284858226776123, | |
| "learning_rate": 8.546570251343568e-05, | |
| "loss": 2.0795, | |
| "step": 4835 | |
| }, | |
| { | |
| "epoch": 1.9276148752924778, | |
| "grad_norm": 5.071526050567627, | |
| "learning_rate": 8.518337516766882e-05, | |
| "loss": 2.1881, | |
| "step": 4840 | |
| }, | |
| { | |
| "epoch": 1.929606212973565, | |
| "grad_norm": 5.03200626373291, | |
| "learning_rate": 8.49013298783657e-05, | |
| "loss": 2.024, | |
| "step": 4845 | |
| }, | |
| { | |
| "epoch": 1.931597550654652, | |
| "grad_norm": 5.654317855834961, | |
| "learning_rate": 8.461956787287639e-05, | |
| "loss": 2.1415, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 1.9335888883357395, | |
| "grad_norm": 5.167952060699463, | |
| "learning_rate": 8.433809037731851e-05, | |
| "loss": 2.0673, | |
| "step": 4855 | |
| }, | |
| { | |
| "epoch": 1.9355802260168269, | |
| "grad_norm": 5.573550701141357, | |
| "learning_rate": 8.405689861657135e-05, | |
| "loss": 2.2054, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 1.937571563697914, | |
| "grad_norm": 5.166540622711182, | |
| "learning_rate": 8.37759938142709e-05, | |
| "loss": 2.0969, | |
| "step": 4865 | |
| }, | |
| { | |
| "epoch": 1.9395629013790012, | |
| "grad_norm": 4.89374303817749, | |
| "learning_rate": 8.349537719280432e-05, | |
| "loss": 2.1023, | |
| "step": 4870 | |
| }, | |
| { | |
| "epoch": 1.9415542390600886, | |
| "grad_norm": 5.91700553894043, | |
| "learning_rate": 8.321504997330495e-05, | |
| "loss": 2.2145, | |
| "step": 4875 | |
| }, | |
| { | |
| "epoch": 1.943545576741176, | |
| "grad_norm": 5.197847366333008, | |
| "learning_rate": 8.293501337564653e-05, | |
| "loss": 2.1721, | |
| "step": 4880 | |
| }, | |
| { | |
| "epoch": 1.9455369144222632, | |
| "grad_norm": 4.873611927032471, | |
| "learning_rate": 8.265526861843816e-05, | |
| "loss": 2.1454, | |
| "step": 4885 | |
| }, | |
| { | |
| "epoch": 1.9475282521033503, | |
| "grad_norm": 5.418996334075928, | |
| "learning_rate": 8.237581691901915e-05, | |
| "loss": 2.1281, | |
| "step": 4890 | |
| }, | |
| { | |
| "epoch": 1.9495195897844377, | |
| "grad_norm": 5.226861476898193, | |
| "learning_rate": 8.209665949345333e-05, | |
| "loss": 2.2334, | |
| "step": 4895 | |
| }, | |
| { | |
| "epoch": 1.9515109274655251, | |
| "grad_norm": 6.427994251251221, | |
| "learning_rate": 8.181779755652409e-05, | |
| "loss": 2.1161, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 1.953502265146612, | |
| "grad_norm": 5.055788993835449, | |
| "learning_rate": 8.153923232172882e-05, | |
| "loss": 2.0538, | |
| "step": 4905 | |
| }, | |
| { | |
| "epoch": 1.9554936028276995, | |
| "grad_norm": 5.816956996917725, | |
| "learning_rate": 8.126096500127399e-05, | |
| "loss": 2.1474, | |
| "step": 4910 | |
| }, | |
| { | |
| "epoch": 1.9574849405087869, | |
| "grad_norm": 4.791458606719971, | |
| "learning_rate": 8.098299680606955e-05, | |
| "loss": 2.0556, | |
| "step": 4915 | |
| }, | |
| { | |
| "epoch": 1.959476278189874, | |
| "grad_norm": 6.023045063018799, | |
| "learning_rate": 8.07053289457237e-05, | |
| "loss": 2.185, | |
| "step": 4920 | |
| }, | |
| { | |
| "epoch": 1.9614676158709612, | |
| "grad_norm": 5.472064971923828, | |
| "learning_rate": 8.042796262853786e-05, | |
| "loss": 2.0859, | |
| "step": 4925 | |
| }, | |
| { | |
| "epoch": 1.9634589535520486, | |
| "grad_norm": 4.683407783508301, | |
| "learning_rate": 8.015089906150117e-05, | |
| "loss": 2.1597, | |
| "step": 4930 | |
| }, | |
| { | |
| "epoch": 1.965450291233136, | |
| "grad_norm": 5.039796352386475, | |
| "learning_rate": 7.98741394502853e-05, | |
| "loss": 2.0879, | |
| "step": 4935 | |
| }, | |
| { | |
| "epoch": 1.9674416289142231, | |
| "grad_norm": Infinity, | |
| "learning_rate": 7.965295141887069e-05, | |
| "loss": 2.1786, | |
| "step": 4940 | |
| }, | |
| { | |
| "epoch": 1.9694329665953103, | |
| "grad_norm": 4.787463665008545, | |
| "learning_rate": 7.937674196219953e-05, | |
| "loss": 2.2384, | |
| "step": 4945 | |
| }, | |
| { | |
| "epoch": 1.9714243042763977, | |
| "grad_norm": 5.068600654602051, | |
| "learning_rate": 7.910083983017654e-05, | |
| "loss": 2.162, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 1.973415641957485, | |
| "grad_norm": 4.847311496734619, | |
| "learning_rate": 7.882524622341922e-05, | |
| "loss": 2.0265, | |
| "step": 4955 | |
| }, | |
| { | |
| "epoch": 1.9754069796385723, | |
| "grad_norm": 5.144827365875244, | |
| "learning_rate": 7.854996234120263e-05, | |
| "loss": 2.2106, | |
| "step": 4960 | |
| }, | |
| { | |
| "epoch": 1.9773983173196594, | |
| "grad_norm": 5.553465366363525, | |
| "learning_rate": 7.827498938145386e-05, | |
| "loss": 2.0393, | |
| "step": 4965 | |
| }, | |
| { | |
| "epoch": 1.9793896550007468, | |
| "grad_norm": 4.800995826721191, | |
| "learning_rate": 7.800032854074723e-05, | |
| "loss": 2.0138, | |
| "step": 4970 | |
| }, | |
| { | |
| "epoch": 1.981380992681834, | |
| "grad_norm": 5.342923164367676, | |
| "learning_rate": 7.772598101429869e-05, | |
| "loss": 2.0343, | |
| "step": 4975 | |
| }, | |
| { | |
| "epoch": 1.9833723303629212, | |
| "grad_norm": 6.1510443687438965, | |
| "learning_rate": 7.745194799596073e-05, | |
| "loss": 2.2479, | |
| "step": 4980 | |
| }, | |
| { | |
| "epoch": 1.9853636680440085, | |
| "grad_norm": 5.93689489364624, | |
| "learning_rate": 7.717823067821737e-05, | |
| "loss": 2.2015, | |
| "step": 4985 | |
| }, | |
| { | |
| "epoch": 1.987355005725096, | |
| "grad_norm": 4.906221389770508, | |
| "learning_rate": 7.690483025217871e-05, | |
| "loss": 2.0462, | |
| "step": 4990 | |
| }, | |
| { | |
| "epoch": 1.989346343406183, | |
| "grad_norm": 5.339593410491943, | |
| "learning_rate": 7.663174790757592e-05, | |
| "loss": 2.1735, | |
| "step": 4995 | |
| }, | |
| { | |
| "epoch": 1.9913376810872703, | |
| "grad_norm": 5.099614143371582, | |
| "learning_rate": 7.635898483275588e-05, | |
| "loss": 2.1925, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 1.9933290187683577, | |
| "grad_norm": 4.778752326965332, | |
| "learning_rate": 7.608654221467637e-05, | |
| "loss": 2.1225, | |
| "step": 5005 | |
| }, | |
| { | |
| "epoch": 1.995320356449445, | |
| "grad_norm": 4.891331672668457, | |
| "learning_rate": 7.58144212389005e-05, | |
| "loss": 2.1722, | |
| "step": 5010 | |
| }, | |
| { | |
| "epoch": 1.9973116941305322, | |
| "grad_norm": 6.8483381271362305, | |
| "learning_rate": 7.554262308959163e-05, | |
| "loss": 2.0786, | |
| "step": 5015 | |
| }, | |
| { | |
| "epoch": 1.9993030318116194, | |
| "grad_norm": 6.551092147827148, | |
| "learning_rate": 7.52711489495086e-05, | |
| "loss": 1.9824, | |
| "step": 5020 | |
| }, | |
| { | |
| "epoch": 2.001294369492707, | |
| "grad_norm": 3.9980087280273438, | |
| "learning_rate": 7.500000000000002e-05, | |
| "loss": 1.6428, | |
| "step": 5025 | |
| }, | |
| { | |
| "epoch": 2.003285707173794, | |
| "grad_norm": 4.957551956176758, | |
| "learning_rate": 7.472917742099953e-05, | |
| "loss": 1.6547, | |
| "step": 5030 | |
| }, | |
| { | |
| "epoch": 2.005277044854881, | |
| "grad_norm": 4.297199249267578, | |
| "learning_rate": 7.445868239102042e-05, | |
| "loss": 1.5545, | |
| "step": 5035 | |
| }, | |
| { | |
| "epoch": 2.0072683825359685, | |
| "grad_norm": 5.14373779296875, | |
| "learning_rate": 7.418851608715084e-05, | |
| "loss": 1.4708, | |
| "step": 5040 | |
| }, | |
| { | |
| "epoch": 2.009259720217056, | |
| "grad_norm": 6.700247764587402, | |
| "learning_rate": 7.39186796850482e-05, | |
| "loss": 1.4207, | |
| "step": 5045 | |
| }, | |
| { | |
| "epoch": 2.0112510578981433, | |
| "grad_norm": 8.440861701965332, | |
| "learning_rate": 7.364917435893446e-05, | |
| "loss": 1.5883, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 2.0132423955792302, | |
| "grad_norm": 6.795040130615234, | |
| "learning_rate": 7.338000128159074e-05, | |
| "loss": 1.3543, | |
| "step": 5055 | |
| }, | |
| { | |
| "epoch": 2.0152337332603176, | |
| "grad_norm": 9.68851375579834, | |
| "learning_rate": 7.311116162435254e-05, | |
| "loss": 1.5251, | |
| "step": 5060 | |
| }, | |
| { | |
| "epoch": 2.017225070941405, | |
| "grad_norm": 6.082462787628174, | |
| "learning_rate": 7.284265655710429e-05, | |
| "loss": 1.4305, | |
| "step": 5065 | |
| }, | |
| { | |
| "epoch": 2.019216408622492, | |
| "grad_norm": 4.950602054595947, | |
| "learning_rate": 7.257448724827442e-05, | |
| "loss": 1.4375, | |
| "step": 5070 | |
| }, | |
| { | |
| "epoch": 2.0212077463035794, | |
| "grad_norm": 5.117416858673096, | |
| "learning_rate": 7.23066548648304e-05, | |
| "loss": 1.5044, | |
| "step": 5075 | |
| }, | |
| { | |
| "epoch": 2.0231990839846667, | |
| "grad_norm": 6.734591960906982, | |
| "learning_rate": 7.203916057227338e-05, | |
| "loss": 1.4885, | |
| "step": 5080 | |
| }, | |
| { | |
| "epoch": 2.025190421665754, | |
| "grad_norm": 5.435779571533203, | |
| "learning_rate": 7.177200553463333e-05, | |
| "loss": 1.4702, | |
| "step": 5085 | |
| }, | |
| { | |
| "epoch": 2.027181759346841, | |
| "grad_norm": 5.962007999420166, | |
| "learning_rate": 7.15051909144639e-05, | |
| "loss": 1.4807, | |
| "step": 5090 | |
| }, | |
| { | |
| "epoch": 2.0291730970279285, | |
| "grad_norm": 7.344203948974609, | |
| "learning_rate": 7.123871787283748e-05, | |
| "loss": 1.3754, | |
| "step": 5095 | |
| }, | |
| { | |
| "epoch": 2.031164434709016, | |
| "grad_norm": 5.459081649780273, | |
| "learning_rate": 7.097258756933995e-05, | |
| "loss": 1.353, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 2.0331557723901033, | |
| "grad_norm": 8.167816162109375, | |
| "learning_rate": 7.070680116206563e-05, | |
| "loss": 1.4694, | |
| "step": 5105 | |
| }, | |
| { | |
| "epoch": 2.03514711007119, | |
| "grad_norm": 5.838662147521973, | |
| "learning_rate": 7.044135980761259e-05, | |
| "loss": 1.3246, | |
| "step": 5110 | |
| }, | |
| { | |
| "epoch": 2.0371384477522776, | |
| "grad_norm": 6.302445888519287, | |
| "learning_rate": 7.017626466107718e-05, | |
| "loss": 1.5211, | |
| "step": 5115 | |
| }, | |
| { | |
| "epoch": 2.039129785433365, | |
| "grad_norm": 5.905932426452637, | |
| "learning_rate": 6.991151687604922e-05, | |
| "loss": 1.4369, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 2.041121123114452, | |
| "grad_norm": 5.84182596206665, | |
| "learning_rate": 6.96471176046069e-05, | |
| "loss": 1.3727, | |
| "step": 5125 | |
| }, | |
| { | |
| "epoch": 2.0431124607955393, | |
| "grad_norm": 5.710243225097656, | |
| "learning_rate": 6.938306799731201e-05, | |
| "loss": 1.5538, | |
| "step": 5130 | |
| }, | |
| { | |
| "epoch": 2.0451037984766267, | |
| "grad_norm": 5.345917701721191, | |
| "learning_rate": 6.911936920320456e-05, | |
| "loss": 1.4582, | |
| "step": 5135 | |
| }, | |
| { | |
| "epoch": 2.047095136157714, | |
| "grad_norm": 5.621410369873047, | |
| "learning_rate": 6.885602236979794e-05, | |
| "loss": 1.4463, | |
| "step": 5140 | |
| }, | |
| { | |
| "epoch": 2.049086473838801, | |
| "grad_norm": 6.868717670440674, | |
| "learning_rate": 6.859302864307412e-05, | |
| "loss": 1.4782, | |
| "step": 5145 | |
| }, | |
| { | |
| "epoch": 2.0510778115198884, | |
| "grad_norm": 5.948458671569824, | |
| "learning_rate": 6.833038916747834e-05, | |
| "loss": 1.4206, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 2.053069149200976, | |
| "grad_norm": 6.426418304443359, | |
| "learning_rate": 6.806810508591432e-05, | |
| "loss": 1.4499, | |
| "step": 5155 | |
| }, | |
| { | |
| "epoch": 2.055060486882063, | |
| "grad_norm": 6.86967658996582, | |
| "learning_rate": 6.780617753973917e-05, | |
| "loss": 1.5276, | |
| "step": 5160 | |
| }, | |
| { | |
| "epoch": 2.05705182456315, | |
| "grad_norm": 6.395193576812744, | |
| "learning_rate": 6.75446076687587e-05, | |
| "loss": 1.4228, | |
| "step": 5165 | |
| }, | |
| { | |
| "epoch": 2.0590431622442376, | |
| "grad_norm": 6.809487342834473, | |
| "learning_rate": 6.728339661122211e-05, | |
| "loss": 1.4222, | |
| "step": 5170 | |
| }, | |
| { | |
| "epoch": 2.061034499925325, | |
| "grad_norm": 5.599830627441406, | |
| "learning_rate": 6.702254550381711e-05, | |
| "loss": 1.4701, | |
| "step": 5175 | |
| }, | |
| { | |
| "epoch": 2.063025837606412, | |
| "grad_norm": 5.606078147888184, | |
| "learning_rate": 6.676205548166525e-05, | |
| "loss": 1.5687, | |
| "step": 5180 | |
| }, | |
| { | |
| "epoch": 2.0650171752874993, | |
| "grad_norm": 6.464372634887695, | |
| "learning_rate": 6.650192767831659e-05, | |
| "loss": 1.4237, | |
| "step": 5185 | |
| }, | |
| { | |
| "epoch": 2.0670085129685867, | |
| "grad_norm": 5.501936435699463, | |
| "learning_rate": 6.624216322574514e-05, | |
| "loss": 1.451, | |
| "step": 5190 | |
| }, | |
| { | |
| "epoch": 2.068999850649674, | |
| "grad_norm": 6.523349285125732, | |
| "learning_rate": 6.598276325434356e-05, | |
| "loss": 1.3937, | |
| "step": 5195 | |
| }, | |
| { | |
| "epoch": 2.070991188330761, | |
| "grad_norm": 5.1206278800964355, | |
| "learning_rate": 6.57237288929186e-05, | |
| "loss": 1.4379, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 2.0729825260118484, | |
| "grad_norm": 8.085687637329102, | |
| "learning_rate": 6.546506126868592e-05, | |
| "loss": 1.6361, | |
| "step": 5205 | |
| }, | |
| { | |
| "epoch": 2.074973863692936, | |
| "grad_norm": 6.567965507507324, | |
| "learning_rate": 6.52067615072653e-05, | |
| "loss": 1.5065, | |
| "step": 5210 | |
| }, | |
| { | |
| "epoch": 2.076965201374023, | |
| "grad_norm": 5.279557228088379, | |
| "learning_rate": 6.494883073267566e-05, | |
| "loss": 1.5345, | |
| "step": 5215 | |
| }, | |
| { | |
| "epoch": 2.07895653905511, | |
| "grad_norm": 5.705742835998535, | |
| "learning_rate": 6.46912700673304e-05, | |
| "loss": 1.3541, | |
| "step": 5220 | |
| }, | |
| { | |
| "epoch": 2.0809478767361975, | |
| "grad_norm": 5.617422103881836, | |
| "learning_rate": 6.443408063203225e-05, | |
| "loss": 1.4141, | |
| "step": 5225 | |
| }, | |
| { | |
| "epoch": 2.082939214417285, | |
| "grad_norm": 7.766902923583984, | |
| "learning_rate": 6.417726354596842e-05, | |
| "loss": 1.3367, | |
| "step": 5230 | |
| }, | |
| { | |
| "epoch": 2.0849305520983723, | |
| "grad_norm": 7.059458255767822, | |
| "learning_rate": 6.392081992670598e-05, | |
| "loss": 1.473, | |
| "step": 5235 | |
| }, | |
| { | |
| "epoch": 2.0869218897794592, | |
| "grad_norm": 6.633787631988525, | |
| "learning_rate": 6.366475089018668e-05, | |
| "loss": 1.4581, | |
| "step": 5240 | |
| }, | |
| { | |
| "epoch": 2.0889132274605466, | |
| "grad_norm": 5.493014335632324, | |
| "learning_rate": 6.340905755072227e-05, | |
| "loss": 1.4396, | |
| "step": 5245 | |
| }, | |
| { | |
| "epoch": 2.090904565141634, | |
| "grad_norm": 6.356673717498779, | |
| "learning_rate": 6.315374102098956e-05, | |
| "loss": 1.4986, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 2.092895902822721, | |
| "grad_norm": 6.202879905700684, | |
| "learning_rate": 6.28988024120258e-05, | |
| "loss": 1.3253, | |
| "step": 5255 | |
| }, | |
| { | |
| "epoch": 2.0948872405038084, | |
| "grad_norm": 6.061187744140625, | |
| "learning_rate": 6.264424283322353e-05, | |
| "loss": 1.4252, | |
| "step": 5260 | |
| }, | |
| { | |
| "epoch": 2.0968785781848958, | |
| "grad_norm": 6.786153316497803, | |
| "learning_rate": 6.239006339232592e-05, | |
| "loss": 1.3983, | |
| "step": 5265 | |
| }, | |
| { | |
| "epoch": 2.098869915865983, | |
| "grad_norm": 6.876367092132568, | |
| "learning_rate": 6.213626519542189e-05, | |
| "loss": 1.4465, | |
| "step": 5270 | |
| }, | |
| { | |
| "epoch": 2.10086125354707, | |
| "grad_norm": 5.9195170402526855, | |
| "learning_rate": 6.188284934694152e-05, | |
| "loss": 1.4788, | |
| "step": 5275 | |
| }, | |
| { | |
| "epoch": 2.1028525912281575, | |
| "grad_norm": 6.0256242752075195, | |
| "learning_rate": 6.162981694965086e-05, | |
| "loss": 1.5226, | |
| "step": 5280 | |
| }, | |
| { | |
| "epoch": 2.104843928909245, | |
| "grad_norm": 8.436933517456055, | |
| "learning_rate": 6.137716910464735e-05, | |
| "loss": 1.4933, | |
| "step": 5285 | |
| }, | |
| { | |
| "epoch": 2.1068352665903323, | |
| "grad_norm": 6.069702625274658, | |
| "learning_rate": 6.112490691135518e-05, | |
| "loss": 1.4294, | |
| "step": 5290 | |
| }, | |
| { | |
| "epoch": 2.108826604271419, | |
| "grad_norm": 6.045875549316406, | |
| "learning_rate": 6.087303146752018e-05, | |
| "loss": 1.4371, | |
| "step": 5295 | |
| }, | |
| { | |
| "epoch": 2.1108179419525066, | |
| "grad_norm": 7.140870571136475, | |
| "learning_rate": 6.062154386920521e-05, | |
| "loss": 1.4965, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 2.112809279633594, | |
| "grad_norm": 7.140031814575195, | |
| "learning_rate": 6.037044521078536e-05, | |
| "loss": 1.5039, | |
| "step": 5305 | |
| }, | |
| { | |
| "epoch": 2.114800617314681, | |
| "grad_norm": 6.5458879470825195, | |
| "learning_rate": 6.011973658494337e-05, | |
| "loss": 1.478, | |
| "step": 5310 | |
| }, | |
| { | |
| "epoch": 2.1167919549957683, | |
| "grad_norm": 6.794205665588379, | |
| "learning_rate": 5.9869419082664525e-05, | |
| "loss": 1.4578, | |
| "step": 5315 | |
| }, | |
| { | |
| "epoch": 2.1187832926768557, | |
| "grad_norm": 6.228168487548828, | |
| "learning_rate": 5.961949379323214e-05, | |
| "loss": 1.3406, | |
| "step": 5320 | |
| }, | |
| { | |
| "epoch": 2.120774630357943, | |
| "grad_norm": 6.367410182952881, | |
| "learning_rate": 5.9369961804222875e-05, | |
| "loss": 1.3709, | |
| "step": 5325 | |
| }, | |
| { | |
| "epoch": 2.12276596803903, | |
| "grad_norm": 6.626134872436523, | |
| "learning_rate": 5.912082420150179e-05, | |
| "loss": 1.3714, | |
| "step": 5330 | |
| }, | |
| { | |
| "epoch": 2.1247573057201175, | |
| "grad_norm": 5.566625595092773, | |
| "learning_rate": 5.887208206921774e-05, | |
| "loss": 1.3924, | |
| "step": 5335 | |
| }, | |
| { | |
| "epoch": 2.126748643401205, | |
| "grad_norm": 5.629339218139648, | |
| "learning_rate": 5.862373648979865e-05, | |
| "loss": 1.5259, | |
| "step": 5340 | |
| }, | |
| { | |
| "epoch": 2.1287399810822922, | |
| "grad_norm": 6.338861465454102, | |
| "learning_rate": 5.837578854394692e-05, | |
| "loss": 1.4742, | |
| "step": 5345 | |
| }, | |
| { | |
| "epoch": 2.130731318763379, | |
| "grad_norm": 6.18589448928833, | |
| "learning_rate": 5.812823931063445e-05, | |
| "loss": 1.387, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 2.1327226564444666, | |
| "grad_norm": 5.2369704246521, | |
| "learning_rate": 5.78810898670981e-05, | |
| "loss": 1.4379, | |
| "step": 5355 | |
| }, | |
| { | |
| "epoch": 2.134713994125554, | |
| "grad_norm": 6.575031280517578, | |
| "learning_rate": 5.7634341288835196e-05, | |
| "loss": 1.4462, | |
| "step": 5360 | |
| }, | |
| { | |
| "epoch": 2.136705331806641, | |
| "grad_norm": 5.535890102386475, | |
| "learning_rate": 5.738799464959845e-05, | |
| "loss": 1.4794, | |
| "step": 5365 | |
| }, | |
| { | |
| "epoch": 2.1386966694877283, | |
| "grad_norm": 6.6877827644348145, | |
| "learning_rate": 5.714205102139157e-05, | |
| "loss": 1.4962, | |
| "step": 5370 | |
| }, | |
| { | |
| "epoch": 2.1406880071688157, | |
| "grad_norm": 6.816041469573975, | |
| "learning_rate": 5.689651147446448e-05, | |
| "loss": 1.3646, | |
| "step": 5375 | |
| }, | |
| { | |
| "epoch": 2.142679344849903, | |
| "grad_norm": 5.999199390411377, | |
| "learning_rate": 5.66513770773088e-05, | |
| "loss": 1.4707, | |
| "step": 5380 | |
| }, | |
| { | |
| "epoch": 2.14467068253099, | |
| "grad_norm": 6.749256610870361, | |
| "learning_rate": 5.6406648896653026e-05, | |
| "loss": 1.4804, | |
| "step": 5385 | |
| }, | |
| { | |
| "epoch": 2.1466620202120774, | |
| "grad_norm": 5.8225603103637695, | |
| "learning_rate": 5.616232799745788e-05, | |
| "loss": 1.4235, | |
| "step": 5390 | |
| }, | |
| { | |
| "epoch": 2.148653357893165, | |
| "grad_norm": 6.044665336608887, | |
| "learning_rate": 5.5918415442911974e-05, | |
| "loss": 1.4927, | |
| "step": 5395 | |
| }, | |
| { | |
| "epoch": 2.150644695574252, | |
| "grad_norm": 7.252010822296143, | |
| "learning_rate": 5.567491229442675e-05, | |
| "loss": 1.4286, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 2.152636033255339, | |
| "grad_norm": 5.604063987731934, | |
| "learning_rate": 5.543181961163219e-05, | |
| "loss": 1.376, | |
| "step": 5405 | |
| }, | |
| { | |
| "epoch": 2.1546273709364265, | |
| "grad_norm": 6.4235029220581055, | |
| "learning_rate": 5.518913845237197e-05, | |
| "loss": 1.4612, | |
| "step": 5410 | |
| }, | |
| { | |
| "epoch": 2.156618708617514, | |
| "grad_norm": 7.064362525939941, | |
| "learning_rate": 5.494686987269922e-05, | |
| "loss": 1.4151, | |
| "step": 5415 | |
| }, | |
| { | |
| "epoch": 2.158610046298601, | |
| "grad_norm": 5.43442964553833, | |
| "learning_rate": 5.470501492687144e-05, | |
| "loss": 1.3365, | |
| "step": 5420 | |
| }, | |
| { | |
| "epoch": 2.1606013839796883, | |
| "grad_norm": 6.117883205413818, | |
| "learning_rate": 5.4463574667346224e-05, | |
| "loss": 1.3991, | |
| "step": 5425 | |
| }, | |
| { | |
| "epoch": 2.1625927216607757, | |
| "grad_norm": 7.441193580627441, | |
| "learning_rate": 5.4222550144776716e-05, | |
| "loss": 1.5015, | |
| "step": 5430 | |
| }, | |
| { | |
| "epoch": 2.164584059341863, | |
| "grad_norm": 5.909252643585205, | |
| "learning_rate": 5.398194240800683e-05, | |
| "loss": 1.507, | |
| "step": 5435 | |
| }, | |
| { | |
| "epoch": 2.16657539702295, | |
| "grad_norm": 5.986944198608398, | |
| "learning_rate": 5.374175250406682e-05, | |
| "loss": 1.4358, | |
| "step": 5440 | |
| }, | |
| { | |
| "epoch": 2.1685667347040374, | |
| "grad_norm": 6.824896335601807, | |
| "learning_rate": 5.350198147816866e-05, | |
| "loss": 1.4419, | |
| "step": 5445 | |
| }, | |
| { | |
| "epoch": 2.1705580723851248, | |
| "grad_norm": 6.392977237701416, | |
| "learning_rate": 5.326263037370164e-05, | |
| "loss": 1.3984, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 2.172549410066212, | |
| "grad_norm": 5.7649126052856445, | |
| "learning_rate": 5.302370023222764e-05, | |
| "loss": 1.4324, | |
| "step": 5455 | |
| }, | |
| { | |
| "epoch": 2.174540747747299, | |
| "grad_norm": 6.149197101593018, | |
| "learning_rate": 5.27851920934766e-05, | |
| "loss": 1.3704, | |
| "step": 5460 | |
| }, | |
| { | |
| "epoch": 2.1765320854283865, | |
| "grad_norm": 6.189772605895996, | |
| "learning_rate": 5.254710699534225e-05, | |
| "loss": 1.4444, | |
| "step": 5465 | |
| }, | |
| { | |
| "epoch": 2.178523423109474, | |
| "grad_norm": 6.396068572998047, | |
| "learning_rate": 5.2309445973877304e-05, | |
| "loss": 1.4017, | |
| "step": 5470 | |
| }, | |
| { | |
| "epoch": 2.180514760790561, | |
| "grad_norm": 7.503396987915039, | |
| "learning_rate": 5.2072210063289014e-05, | |
| "loss": 1.4759, | |
| "step": 5475 | |
| }, | |
| { | |
| "epoch": 2.1825060984716482, | |
| "grad_norm": 5.856228828430176, | |
| "learning_rate": 5.183540029593478e-05, | |
| "loss": 1.4067, | |
| "step": 5480 | |
| }, | |
| { | |
| "epoch": 2.1844974361527356, | |
| "grad_norm": 6.6336140632629395, | |
| "learning_rate": 5.1599017702317665e-05, | |
| "loss": 1.3564, | |
| "step": 5485 | |
| }, | |
| { | |
| "epoch": 2.186488773833823, | |
| "grad_norm": 5.758519172668457, | |
| "learning_rate": 5.1363063311081715e-05, | |
| "loss": 1.4527, | |
| "step": 5490 | |
| }, | |
| { | |
| "epoch": 2.18848011151491, | |
| "grad_norm": 6.567979335784912, | |
| "learning_rate": 5.1127538149007616e-05, | |
| "loss": 1.4816, | |
| "step": 5495 | |
| }, | |
| { | |
| "epoch": 2.1904714491959973, | |
| "grad_norm": 5.97678804397583, | |
| "learning_rate": 5.089244324100834e-05, | |
| "loss": 1.5056, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 2.1924627868770847, | |
| "grad_norm": 6.623924732208252, | |
| "learning_rate": 5.065777961012445e-05, | |
| "loss": 1.5091, | |
| "step": 5505 | |
| }, | |
| { | |
| "epoch": 2.194454124558172, | |
| "grad_norm": 6.162147521972656, | |
| "learning_rate": 5.0423548277519795e-05, | |
| "loss": 1.4491, | |
| "step": 5510 | |
| }, | |
| { | |
| "epoch": 2.196445462239259, | |
| "grad_norm": 6.854715824127197, | |
| "learning_rate": 5.0189750262476965e-05, | |
| "loss": 1.4854, | |
| "step": 5515 | |
| }, | |
| { | |
| "epoch": 2.1984367999203465, | |
| "grad_norm": 6.7204270362854, | |
| "learning_rate": 4.995638658239309e-05, | |
| "loss": 1.3953, | |
| "step": 5520 | |
| }, | |
| { | |
| "epoch": 2.200428137601434, | |
| "grad_norm": 5.354440212249756, | |
| "learning_rate": 4.9723458252775093e-05, | |
| "loss": 1.3548, | |
| "step": 5525 | |
| }, | |
| { | |
| "epoch": 2.202419475282521, | |
| "grad_norm": 6.147752285003662, | |
| "learning_rate": 4.9490966287235416e-05, | |
| "loss": 1.4881, | |
| "step": 5530 | |
| }, | |
| { | |
| "epoch": 2.204410812963608, | |
| "grad_norm": 6.762166500091553, | |
| "learning_rate": 4.9258911697487776e-05, | |
| "loss": 1.3972, | |
| "step": 5535 | |
| }, | |
| { | |
| "epoch": 2.2064021506446956, | |
| "grad_norm": 7.021646976470947, | |
| "learning_rate": 4.902729549334245e-05, | |
| "loss": 1.4337, | |
| "step": 5540 | |
| }, | |
| { | |
| "epoch": 2.208393488325783, | |
| "grad_norm": 6.543638706207275, | |
| "learning_rate": 4.879611868270207e-05, | |
| "loss": 1.3513, | |
| "step": 5545 | |
| }, | |
| { | |
| "epoch": 2.21038482600687, | |
| "grad_norm": 6.03616189956665, | |
| "learning_rate": 4.856538227155718e-05, | |
| "loss": 1.5526, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 2.2123761636879573, | |
| "grad_norm": 6.789417743682861, | |
| "learning_rate": 4.8335087263981994e-05, | |
| "loss": 1.3557, | |
| "step": 5555 | |
| }, | |
| { | |
| "epoch": 2.2143675013690447, | |
| "grad_norm": 6.264193058013916, | |
| "learning_rate": 4.810523466212979e-05, | |
| "loss": 1.4205, | |
| "step": 5560 | |
| }, | |
| { | |
| "epoch": 2.216358839050132, | |
| "grad_norm": 6.371268272399902, | |
| "learning_rate": 4.787582546622864e-05, | |
| "loss": 1.4186, | |
| "step": 5565 | |
| }, | |
| { | |
| "epoch": 2.218350176731219, | |
| "grad_norm": 6.438905239105225, | |
| "learning_rate": 4.764686067457726e-05, | |
| "loss": 1.4787, | |
| "step": 5570 | |
| }, | |
| { | |
| "epoch": 2.2203415144123064, | |
| "grad_norm": 6.756014347076416, | |
| "learning_rate": 4.7418341283540354e-05, | |
| "loss": 1.4171, | |
| "step": 5575 | |
| }, | |
| { | |
| "epoch": 2.222332852093394, | |
| "grad_norm": 6.023416042327881, | |
| "learning_rate": 4.719026828754442e-05, | |
| "loss": 1.4746, | |
| "step": 5580 | |
| }, | |
| { | |
| "epoch": 2.224324189774481, | |
| "grad_norm": 6.320237636566162, | |
| "learning_rate": 4.696264267907346e-05, | |
| "loss": 1.4176, | |
| "step": 5585 | |
| }, | |
| { | |
| "epoch": 2.226315527455568, | |
| "grad_norm": 6.24311637878418, | |
| "learning_rate": 4.6735465448664656e-05, | |
| "loss": 1.3984, | |
| "step": 5590 | |
| }, | |
| { | |
| "epoch": 2.2283068651366555, | |
| "grad_norm": 6.375988006591797, | |
| "learning_rate": 4.650873758490394e-05, | |
| "loss": 1.4745, | |
| "step": 5595 | |
| }, | |
| { | |
| "epoch": 2.230298202817743, | |
| "grad_norm": 5.514700889587402, | |
| "learning_rate": 4.628246007442182e-05, | |
| "loss": 1.3082, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 2.23228954049883, | |
| "grad_norm": 6.441384315490723, | |
| "learning_rate": 4.605663390188907e-05, | |
| "loss": 1.3744, | |
| "step": 5605 | |
| }, | |
| { | |
| "epoch": 2.2342808781799173, | |
| "grad_norm": 5.932528495788574, | |
| "learning_rate": 4.583126005001241e-05, | |
| "loss": 1.4119, | |
| "step": 5610 | |
| }, | |
| { | |
| "epoch": 2.2362722158610047, | |
| "grad_norm": 6.599247455596924, | |
| "learning_rate": 4.560633949953016e-05, | |
| "loss": 1.4283, | |
| "step": 5615 | |
| }, | |
| { | |
| "epoch": 2.238263553542092, | |
| "grad_norm": 5.809045314788818, | |
| "learning_rate": 4.5381873229208125e-05, | |
| "loss": 1.4476, | |
| "step": 5620 | |
| }, | |
| { | |
| "epoch": 2.240254891223179, | |
| "grad_norm": 5.800905227661133, | |
| "learning_rate": 4.51578622158353e-05, | |
| "loss": 1.4383, | |
| "step": 5625 | |
| }, | |
| { | |
| "epoch": 2.2422462289042664, | |
| "grad_norm": 6.697018146514893, | |
| "learning_rate": 4.493430743421949e-05, | |
| "loss": 1.4488, | |
| "step": 5630 | |
| }, | |
| { | |
| "epoch": 2.244237566585354, | |
| "grad_norm": 6.055057048797607, | |
| "learning_rate": 4.471120985718314e-05, | |
| "loss": 1.4049, | |
| "step": 5635 | |
| }, | |
| { | |
| "epoch": 2.246228904266441, | |
| "grad_norm": 6.137016296386719, | |
| "learning_rate": 4.44885704555593e-05, | |
| "loss": 1.3719, | |
| "step": 5640 | |
| }, | |
| { | |
| "epoch": 2.248220241947528, | |
| "grad_norm": 6.365423679351807, | |
| "learning_rate": 4.4266390198187034e-05, | |
| "loss": 1.4431, | |
| "step": 5645 | |
| }, | |
| { | |
| "epoch": 2.2502115796286155, | |
| "grad_norm": 6.158652305603027, | |
| "learning_rate": 4.404467005190746e-05, | |
| "loss": 1.4635, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 2.252202917309703, | |
| "grad_norm": 5.414857387542725, | |
| "learning_rate": 4.38234109815594e-05, | |
| "loss": 1.3857, | |
| "step": 5655 | |
| }, | |
| { | |
| "epoch": 2.2541942549907903, | |
| "grad_norm": 6.830102920532227, | |
| "learning_rate": 4.360261394997545e-05, | |
| "loss": 1.4411, | |
| "step": 5660 | |
| }, | |
| { | |
| "epoch": 2.2561855926718772, | |
| "grad_norm": 5.979335308074951, | |
| "learning_rate": 4.338227991797741e-05, | |
| "loss": 1.4889, | |
| "step": 5665 | |
| }, | |
| { | |
| "epoch": 2.2581769303529646, | |
| "grad_norm": 5.826920032501221, | |
| "learning_rate": 4.3162409844372325e-05, | |
| "loss": 1.3643, | |
| "step": 5670 | |
| }, | |
| { | |
| "epoch": 2.260168268034052, | |
| "grad_norm": 7.233572959899902, | |
| "learning_rate": 4.2943004685948277e-05, | |
| "loss": 1.566, | |
| "step": 5675 | |
| }, | |
| { | |
| "epoch": 2.262159605715139, | |
| "grad_norm": 5.2802042961120605, | |
| "learning_rate": 4.2724065397470337e-05, | |
| "loss": 1.2933, | |
| "step": 5680 | |
| }, | |
| { | |
| "epoch": 2.2641509433962264, | |
| "grad_norm": 5.731015205383301, | |
| "learning_rate": 4.2505592931676116e-05, | |
| "loss": 1.4204, | |
| "step": 5685 | |
| }, | |
| { | |
| "epoch": 2.2661422810773137, | |
| "grad_norm": 5.166196346282959, | |
| "learning_rate": 4.228758823927187e-05, | |
| "loss": 1.3884, | |
| "step": 5690 | |
| }, | |
| { | |
| "epoch": 2.268133618758401, | |
| "grad_norm": 7.443281173706055, | |
| "learning_rate": 4.2070052268928364e-05, | |
| "loss": 1.412, | |
| "step": 5695 | |
| }, | |
| { | |
| "epoch": 2.270124956439488, | |
| "grad_norm": 5.445531845092773, | |
| "learning_rate": 4.185298596727655e-05, | |
| "loss": 1.506, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 2.2721162941205755, | |
| "grad_norm": 6.483303546905518, | |
| "learning_rate": 4.163639027890362e-05, | |
| "loss": 1.429, | |
| "step": 5705 | |
| }, | |
| { | |
| "epoch": 2.274107631801663, | |
| "grad_norm": 5.9170403480529785, | |
| "learning_rate": 4.142026614634881e-05, | |
| "loss": 1.4705, | |
| "step": 5710 | |
| }, | |
| { | |
| "epoch": 2.2760989694827503, | |
| "grad_norm": 6.848852634429932, | |
| "learning_rate": 4.1204614510099436e-05, | |
| "loss": 1.4002, | |
| "step": 5715 | |
| }, | |
| { | |
| "epoch": 2.278090307163837, | |
| "grad_norm": 6.7346577644348145, | |
| "learning_rate": 4.098943630858658e-05, | |
| "loss": 1.3614, | |
| "step": 5720 | |
| }, | |
| { | |
| "epoch": 2.2800816448449246, | |
| "grad_norm": 6.159039497375488, | |
| "learning_rate": 4.07747324781811e-05, | |
| "loss": 1.4712, | |
| "step": 5725 | |
| }, | |
| { | |
| "epoch": 2.282072982526012, | |
| "grad_norm": 6.049747943878174, | |
| "learning_rate": 4.056050395318976e-05, | |
| "loss": 1.398, | |
| "step": 5730 | |
| }, | |
| { | |
| "epoch": 2.284064320207099, | |
| "grad_norm": 6.871042728424072, | |
| "learning_rate": 4.034675166585079e-05, | |
| "loss": 1.4639, | |
| "step": 5735 | |
| }, | |
| { | |
| "epoch": 2.2860556578881863, | |
| "grad_norm": 6.381641387939453, | |
| "learning_rate": 4.013347654633009e-05, | |
| "loss": 1.4031, | |
| "step": 5740 | |
| }, | |
| { | |
| "epoch": 2.2880469955692737, | |
| "grad_norm": 8.244357109069824, | |
| "learning_rate": 3.992067952271708e-05, | |
| "loss": 1.4173, | |
| "step": 5745 | |
| }, | |
| { | |
| "epoch": 2.290038333250361, | |
| "grad_norm": 5.877044200897217, | |
| "learning_rate": 3.970836152102083e-05, | |
| "loss": 1.3303, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 2.292029670931448, | |
| "grad_norm": 7.401268005371094, | |
| "learning_rate": 3.9496523465165714e-05, | |
| "loss": 1.4079, | |
| "step": 5755 | |
| }, | |
| { | |
| "epoch": 2.2940210086125354, | |
| "grad_norm": 6.720442771911621, | |
| "learning_rate": 3.928516627698764e-05, | |
| "loss": 1.2561, | |
| "step": 5760 | |
| }, | |
| { | |
| "epoch": 2.296012346293623, | |
| "grad_norm": 5.907570838928223, | |
| "learning_rate": 3.907429087623007e-05, | |
| "loss": 1.3237, | |
| "step": 5765 | |
| }, | |
| { | |
| "epoch": 2.29800368397471, | |
| "grad_norm": 6.501235485076904, | |
| "learning_rate": 3.8863898180539774e-05, | |
| "loss": 1.3766, | |
| "step": 5770 | |
| }, | |
| { | |
| "epoch": 2.299995021655797, | |
| "grad_norm": 5.766967296600342, | |
| "learning_rate": 3.8653989105463004e-05, | |
| "loss": 1.4697, | |
| "step": 5775 | |
| }, | |
| { | |
| "epoch": 2.3019863593368846, | |
| "grad_norm": 6.114048480987549, | |
| "learning_rate": 3.8444564564441515e-05, | |
| "loss": 1.4737, | |
| "step": 5780 | |
| }, | |
| { | |
| "epoch": 2.303977697017972, | |
| "grad_norm": 7.505747318267822, | |
| "learning_rate": 3.8235625468808625e-05, | |
| "loss": 1.4341, | |
| "step": 5785 | |
| }, | |
| { | |
| "epoch": 2.305969034699059, | |
| "grad_norm": 5.183752059936523, | |
| "learning_rate": 3.802717272778506e-05, | |
| "loss": 1.3941, | |
| "step": 5790 | |
| }, | |
| { | |
| "epoch": 2.3079603723801463, | |
| "grad_norm": 5.889784812927246, | |
| "learning_rate": 3.781920724847517e-05, | |
| "loss": 1.4962, | |
| "step": 5795 | |
| }, | |
| { | |
| "epoch": 2.3099517100612337, | |
| "grad_norm": 6.595159530639648, | |
| "learning_rate": 3.7611729935863e-05, | |
| "loss": 1.3548, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 2.311943047742321, | |
| "grad_norm": 5.552062034606934, | |
| "learning_rate": 3.740474169280819e-05, | |
| "loss": 1.3996, | |
| "step": 5805 | |
| }, | |
| { | |
| "epoch": 2.313934385423408, | |
| "grad_norm": 6.149967193603516, | |
| "learning_rate": 3.7198243420042205e-05, | |
| "loss": 1.2514, | |
| "step": 5810 | |
| }, | |
| { | |
| "epoch": 2.3159257231044954, | |
| "grad_norm": 7.16715145111084, | |
| "learning_rate": 3.699223601616425e-05, | |
| "loss": 1.5508, | |
| "step": 5815 | |
| }, | |
| { | |
| "epoch": 2.317917060785583, | |
| "grad_norm": 6.43556547164917, | |
| "learning_rate": 3.678672037763766e-05, | |
| "loss": 1.4132, | |
| "step": 5820 | |
| }, | |
| { | |
| "epoch": 2.31990839846667, | |
| "grad_norm": 6.692020893096924, | |
| "learning_rate": 3.6581697398785606e-05, | |
| "loss": 1.4769, | |
| "step": 5825 | |
| }, | |
| { | |
| "epoch": 2.321899736147757, | |
| "grad_norm": 6.448044300079346, | |
| "learning_rate": 3.637716797178745e-05, | |
| "loss": 1.443, | |
| "step": 5830 | |
| }, | |
| { | |
| "epoch": 2.3238910738288445, | |
| "grad_norm": 5.569827079772949, | |
| "learning_rate": 3.61731329866749e-05, | |
| "loss": 1.3258, | |
| "step": 5835 | |
| }, | |
| { | |
| "epoch": 2.325882411509932, | |
| "grad_norm": 6.826900482177734, | |
| "learning_rate": 3.5969593331327945e-05, | |
| "loss": 1.4307, | |
| "step": 5840 | |
| }, | |
| { | |
| "epoch": 2.327873749191019, | |
| "grad_norm": 6.331065654754639, | |
| "learning_rate": 3.576654989147113e-05, | |
| "loss": 1.3226, | |
| "step": 5845 | |
| }, | |
| { | |
| "epoch": 2.3298650868721063, | |
| "grad_norm": 6.6188530921936035, | |
| "learning_rate": 3.5564003550669626e-05, | |
| "loss": 1.5756, | |
| "step": 5850 | |
| }, | |
| { | |
| "epoch": 2.3318564245531936, | |
| "grad_norm": 5.677494049072266, | |
| "learning_rate": 3.536195519032556e-05, | |
| "loss": 1.4443, | |
| "step": 5855 | |
| }, | |
| { | |
| "epoch": 2.333847762234281, | |
| "grad_norm": 6.42593240737915, | |
| "learning_rate": 3.516040568967386e-05, | |
| "loss": 1.3909, | |
| "step": 5860 | |
| }, | |
| { | |
| "epoch": 2.335839099915368, | |
| "grad_norm": 7.254830360412598, | |
| "learning_rate": 3.4959355925778686e-05, | |
| "loss": 1.4784, | |
| "step": 5865 | |
| }, | |
| { | |
| "epoch": 2.3378304375964554, | |
| "grad_norm": 5.911623954772949, | |
| "learning_rate": 3.475880677352964e-05, | |
| "loss": 1.4363, | |
| "step": 5870 | |
| }, | |
| { | |
| "epoch": 2.3398217752775428, | |
| "grad_norm": 5.863831043243408, | |
| "learning_rate": 3.455875910563771e-05, | |
| "loss": 1.4029, | |
| "step": 5875 | |
| }, | |
| { | |
| "epoch": 2.34181311295863, | |
| "grad_norm": 6.270824909210205, | |
| "learning_rate": 3.4359213792631684e-05, | |
| "loss": 1.3896, | |
| "step": 5880 | |
| }, | |
| { | |
| "epoch": 2.343804450639717, | |
| "grad_norm": 6.321516513824463, | |
| "learning_rate": 3.4160171702854285e-05, | |
| "loss": 1.4431, | |
| "step": 5885 | |
| }, | |
| { | |
| "epoch": 2.3457957883208045, | |
| "grad_norm": 6.566595554351807, | |
| "learning_rate": 3.396163370245847e-05, | |
| "loss": 1.4417, | |
| "step": 5890 | |
| }, | |
| { | |
| "epoch": 2.347787126001892, | |
| "grad_norm": 7.686715602874756, | |
| "learning_rate": 3.376360065540353e-05, | |
| "loss": 1.3301, | |
| "step": 5895 | |
| }, | |
| { | |
| "epoch": 2.349778463682979, | |
| "grad_norm": 7.394875526428223, | |
| "learning_rate": 3.356607342345136e-05, | |
| "loss": 1.3286, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 2.351769801364066, | |
| "grad_norm": 6.7270426750183105, | |
| "learning_rate": 3.3369052866162875e-05, | |
| "loss": 1.3933, | |
| "step": 5905 | |
| }, | |
| { | |
| "epoch": 2.3537611390451536, | |
| "grad_norm": 6.101992607116699, | |
| "learning_rate": 3.3172539840894026e-05, | |
| "loss": 1.4248, | |
| "step": 5910 | |
| }, | |
| { | |
| "epoch": 2.355752476726241, | |
| "grad_norm": 7.11561918258667, | |
| "learning_rate": 3.297653520279225e-05, | |
| "loss": 1.3682, | |
| "step": 5915 | |
| }, | |
| { | |
| "epoch": 2.357743814407328, | |
| "grad_norm": 6.916004657745361, | |
| "learning_rate": 3.2781039804792603e-05, | |
| "loss": 1.405, | |
| "step": 5920 | |
| }, | |
| { | |
| "epoch": 2.3597351520884153, | |
| "grad_norm": 6.024013996124268, | |
| "learning_rate": 3.258605449761425e-05, | |
| "loss": 1.4808, | |
| "step": 5925 | |
| }, | |
| { | |
| "epoch": 2.3617264897695027, | |
| "grad_norm": 7.276432991027832, | |
| "learning_rate": 3.239158012975655e-05, | |
| "loss": 1.4218, | |
| "step": 5930 | |
| }, | |
| { | |
| "epoch": 2.36371782745059, | |
| "grad_norm": 6.543519020080566, | |
| "learning_rate": 3.219761754749545e-05, | |
| "loss": 1.5413, | |
| "step": 5935 | |
| }, | |
| { | |
| "epoch": 2.365709165131677, | |
| "grad_norm": 6.037833213806152, | |
| "learning_rate": 3.200416759487989e-05, | |
| "loss": 1.4296, | |
| "step": 5940 | |
| }, | |
| { | |
| "epoch": 2.3677005028127645, | |
| "grad_norm": 6.344012260437012, | |
| "learning_rate": 3.181123111372797e-05, | |
| "loss": 1.3827, | |
| "step": 5945 | |
| }, | |
| { | |
| "epoch": 2.369691840493852, | |
| "grad_norm": 6.848827838897705, | |
| "learning_rate": 3.1618808943623385e-05, | |
| "loss": 1.5233, | |
| "step": 5950 | |
| }, | |
| { | |
| "epoch": 2.371683178174939, | |
| "grad_norm": 5.740164756774902, | |
| "learning_rate": 3.1426901921911715e-05, | |
| "loss": 1.3816, | |
| "step": 5955 | |
| }, | |
| { | |
| "epoch": 2.373674515856026, | |
| "grad_norm": 5.448791027069092, | |
| "learning_rate": 3.1235510883696934e-05, | |
| "loss": 1.3521, | |
| "step": 5960 | |
| }, | |
| { | |
| "epoch": 2.3756658535371136, | |
| "grad_norm": 7.108462333679199, | |
| "learning_rate": 3.104463666183755e-05, | |
| "loss": 1.4244, | |
| "step": 5965 | |
| }, | |
| { | |
| "epoch": 2.377657191218201, | |
| "grad_norm": 5.922284126281738, | |
| "learning_rate": 3.085428008694306e-05, | |
| "loss": 1.3821, | |
| "step": 5970 | |
| }, | |
| { | |
| "epoch": 2.379648528899288, | |
| "grad_norm": 7.061075210571289, | |
| "learning_rate": 3.066444198737053e-05, | |
| "loss": 1.3369, | |
| "step": 5975 | |
| }, | |
| { | |
| "epoch": 2.3816398665803753, | |
| "grad_norm": 7.461008548736572, | |
| "learning_rate": 3.0475123189220664e-05, | |
| "loss": 1.4149, | |
| "step": 5980 | |
| }, | |
| { | |
| "epoch": 2.3836312042614627, | |
| "grad_norm": 6.245413303375244, | |
| "learning_rate": 3.028632451633442e-05, | |
| "loss": 1.3382, | |
| "step": 5985 | |
| }, | |
| { | |
| "epoch": 2.38562254194255, | |
| "grad_norm": 7.291752338409424, | |
| "learning_rate": 3.009804679028936e-05, | |
| "loss": 1.4529, | |
| "step": 5990 | |
| }, | |
| { | |
| "epoch": 2.387613879623637, | |
| "grad_norm": 6.265554904937744, | |
| "learning_rate": 2.9910290830396166e-05, | |
| "loss": 1.3111, | |
| "step": 5995 | |
| }, | |
| { | |
| "epoch": 2.3896052173047244, | |
| "grad_norm": 5.732409477233887, | |
| "learning_rate": 2.9723057453694942e-05, | |
| "loss": 1.4022, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 2.391596554985812, | |
| "grad_norm": 6.244343280792236, | |
| "learning_rate": 2.9536347474951644e-05, | |
| "loss": 1.4136, | |
| "step": 6005 | |
| }, | |
| { | |
| "epoch": 2.3935878926668988, | |
| "grad_norm": 6.473920822143555, | |
| "learning_rate": 2.9350161706654796e-05, | |
| "loss": 1.4527, | |
| "step": 6010 | |
| }, | |
| { | |
| "epoch": 2.395579230347986, | |
| "grad_norm": 6.109169960021973, | |
| "learning_rate": 2.9164500959011565e-05, | |
| "loss": 1.3211, | |
| "step": 6015 | |
| }, | |
| { | |
| "epoch": 2.3975705680290735, | |
| "grad_norm": 6.401641368865967, | |
| "learning_rate": 2.8979366039944552e-05, | |
| "loss": 1.4453, | |
| "step": 6020 | |
| }, | |
| { | |
| "epoch": 2.399561905710161, | |
| "grad_norm": 5.588070392608643, | |
| "learning_rate": 2.8794757755088076e-05, | |
| "loss": 1.307, | |
| "step": 6025 | |
| }, | |
| { | |
| "epoch": 2.4015532433912483, | |
| "grad_norm": 5.913764953613281, | |
| "learning_rate": 2.861067690778487e-05, | |
| "loss": 1.3796, | |
| "step": 6030 | |
| }, | |
| { | |
| "epoch": 2.4035445810723353, | |
| "grad_norm": 6.586962699890137, | |
| "learning_rate": 2.842712429908237e-05, | |
| "loss": 1.3723, | |
| "step": 6035 | |
| }, | |
| { | |
| "epoch": 2.4055359187534227, | |
| "grad_norm": 5.830726146697998, | |
| "learning_rate": 2.82441007277293e-05, | |
| "loss": 1.6131, | |
| "step": 6040 | |
| }, | |
| { | |
| "epoch": 2.40752725643451, | |
| "grad_norm": 6.545270919799805, | |
| "learning_rate": 2.806160699017237e-05, | |
| "loss": 1.4068, | |
| "step": 6045 | |
| }, | |
| { | |
| "epoch": 2.409518594115597, | |
| "grad_norm": 6.324460506439209, | |
| "learning_rate": 2.7879643880552512e-05, | |
| "loss": 1.5127, | |
| "step": 6050 | |
| }, | |
| { | |
| "epoch": 2.4115099317966844, | |
| "grad_norm": 7.322314262390137, | |
| "learning_rate": 2.7698212190701623e-05, | |
| "loss": 1.3749, | |
| "step": 6055 | |
| }, | |
| { | |
| "epoch": 2.4135012694777718, | |
| "grad_norm": 5.913839817047119, | |
| "learning_rate": 2.7517312710139027e-05, | |
| "loss": 1.4688, | |
| "step": 6060 | |
| }, | |
| { | |
| "epoch": 2.4154926071588587, | |
| "grad_norm": 6.860445976257324, | |
| "learning_rate": 2.733694622606824e-05, | |
| "loss": 1.4385, | |
| "step": 6065 | |
| }, | |
| { | |
| "epoch": 2.417483944839946, | |
| "grad_norm": 6.26818323135376, | |
| "learning_rate": 2.71571135233732e-05, | |
| "loss": 1.3846, | |
| "step": 6070 | |
| }, | |
| { | |
| "epoch": 2.4194752825210335, | |
| "grad_norm": 5.973329544067383, | |
| "learning_rate": 2.6977815384615132e-05, | |
| "loss": 1.4896, | |
| "step": 6075 | |
| }, | |
| { | |
| "epoch": 2.421466620202121, | |
| "grad_norm": 6.393100738525391, | |
| "learning_rate": 2.6799052590029003e-05, | |
| "loss": 1.3961, | |
| "step": 6080 | |
| }, | |
| { | |
| "epoch": 2.4234579578832083, | |
| "grad_norm": 6.824984073638916, | |
| "learning_rate": 2.6620825917520257e-05, | |
| "loss": 1.3814, | |
| "step": 6085 | |
| }, | |
| { | |
| "epoch": 2.4254492955642952, | |
| "grad_norm": 6.342784404754639, | |
| "learning_rate": 2.6443136142661263e-05, | |
| "loss": 1.431, | |
| "step": 6090 | |
| }, | |
| { | |
| "epoch": 2.4274406332453826, | |
| "grad_norm": 6.881380558013916, | |
| "learning_rate": 2.6265984038688005e-05, | |
| "loss": 1.3264, | |
| "step": 6095 | |
| }, | |
| { | |
| "epoch": 2.42943197092647, | |
| "grad_norm": 6.984031677246094, | |
| "learning_rate": 2.6089370376496854e-05, | |
| "loss": 1.553, | |
| "step": 6100 | |
| }, | |
| { | |
| "epoch": 2.431423308607557, | |
| "grad_norm": 6.409067630767822, | |
| "learning_rate": 2.5913295924641004e-05, | |
| "loss": 1.4004, | |
| "step": 6105 | |
| }, | |
| { | |
| "epoch": 2.4334146462886443, | |
| "grad_norm": 7.744305610656738, | |
| "learning_rate": 2.57377614493272e-05, | |
| "loss": 1.431, | |
| "step": 6110 | |
| }, | |
| { | |
| "epoch": 2.4354059839697317, | |
| "grad_norm": 6.909455299377441, | |
| "learning_rate": 2.5562767714412418e-05, | |
| "loss": 1.3365, | |
| "step": 6115 | |
| }, | |
| { | |
| "epoch": 2.437397321650819, | |
| "grad_norm": 6.19822359085083, | |
| "learning_rate": 2.538831548140069e-05, | |
| "loss": 1.3417, | |
| "step": 6120 | |
| }, | |
| { | |
| "epoch": 2.439388659331906, | |
| "grad_norm": 6.540812015533447, | |
| "learning_rate": 2.5214405509439422e-05, | |
| "loss": 1.3847, | |
| "step": 6125 | |
| }, | |
| { | |
| "epoch": 2.4413799970129935, | |
| "grad_norm": 6.4344162940979, | |
| "learning_rate": 2.5041038555316518e-05, | |
| "loss": 1.4237, | |
| "step": 6130 | |
| }, | |
| { | |
| "epoch": 2.443371334694081, | |
| "grad_norm": 7.177534103393555, | |
| "learning_rate": 2.4868215373456712e-05, | |
| "loss": 1.4874, | |
| "step": 6135 | |
| }, | |
| { | |
| "epoch": 2.4453626723751682, | |
| "grad_norm": 5.777395725250244, | |
| "learning_rate": 2.4695936715918607e-05, | |
| "loss": 1.3981, | |
| "step": 6140 | |
| }, | |
| { | |
| "epoch": 2.447354010056255, | |
| "grad_norm": 6.4199347496032715, | |
| "learning_rate": 2.4524203332391144e-05, | |
| "loss": 1.6551, | |
| "step": 6145 | |
| }, | |
| { | |
| "epoch": 2.4493453477373426, | |
| "grad_norm": 7.651169300079346, | |
| "learning_rate": 2.4353015970190453e-05, | |
| "loss": 1.394, | |
| "step": 6150 | |
| }, | |
| { | |
| "epoch": 2.45133668541843, | |
| "grad_norm": 6.039809703826904, | |
| "learning_rate": 2.418237537425666e-05, | |
| "loss": 1.3926, | |
| "step": 6155 | |
| }, | |
| { | |
| "epoch": 2.453328023099517, | |
| "grad_norm": 6.839121341705322, | |
| "learning_rate": 2.401228228715052e-05, | |
| "loss": 1.3861, | |
| "step": 6160 | |
| }, | |
| { | |
| "epoch": 2.4553193607806043, | |
| "grad_norm": 5.479107856750488, | |
| "learning_rate": 2.3842737449050257e-05, | |
| "loss": 1.2794, | |
| "step": 6165 | |
| }, | |
| { | |
| "epoch": 2.4573106984616917, | |
| "grad_norm": 6.096414089202881, | |
| "learning_rate": 2.367374159774832e-05, | |
| "loss": 1.3764, | |
| "step": 6170 | |
| }, | |
| { | |
| "epoch": 2.459302036142779, | |
| "grad_norm": 6.262311935424805, | |
| "learning_rate": 2.3505295468648256e-05, | |
| "loss": 1.3863, | |
| "step": 6175 | |
| }, | |
| { | |
| "epoch": 2.461293373823866, | |
| "grad_norm": 7.217311382293701, | |
| "learning_rate": 2.3337399794761336e-05, | |
| "loss": 1.4574, | |
| "step": 6180 | |
| }, | |
| { | |
| "epoch": 2.4632847115049534, | |
| "grad_norm": 5.660340309143066, | |
| "learning_rate": 2.3170055306703512e-05, | |
| "loss": 1.4165, | |
| "step": 6185 | |
| }, | |
| { | |
| "epoch": 2.465276049186041, | |
| "grad_norm": 6.607390880584717, | |
| "learning_rate": 2.3003262732692217e-05, | |
| "loss": 1.4666, | |
| "step": 6190 | |
| }, | |
| { | |
| "epoch": 2.467267386867128, | |
| "grad_norm": 5.438241481781006, | |
| "learning_rate": 2.2837022798543132e-05, | |
| "loss": 1.3601, | |
| "step": 6195 | |
| }, | |
| { | |
| "epoch": 2.469258724548215, | |
| "grad_norm": 6.2114338874816895, | |
| "learning_rate": 2.2671336227667097e-05, | |
| "loss": 1.3728, | |
| "step": 6200 | |
| }, | |
| { | |
| "epoch": 2.4712500622293025, | |
| "grad_norm": 6.168636798858643, | |
| "learning_rate": 2.250620374106686e-05, | |
| "loss": 1.3483, | |
| "step": 6205 | |
| }, | |
| { | |
| "epoch": 2.47324139991039, | |
| "grad_norm": 6.182287216186523, | |
| "learning_rate": 2.2341626057334133e-05, | |
| "loss": 1.3469, | |
| "step": 6210 | |
| }, | |
| { | |
| "epoch": 2.475232737591477, | |
| "grad_norm": 8.469073295593262, | |
| "learning_rate": 2.2177603892646272e-05, | |
| "loss": 1.4293, | |
| "step": 6215 | |
| }, | |
| { | |
| "epoch": 2.4772240752725643, | |
| "grad_norm": 6.551890850067139, | |
| "learning_rate": 2.2014137960763178e-05, | |
| "loss": 1.4135, | |
| "step": 6220 | |
| }, | |
| { | |
| "epoch": 2.4792154129536517, | |
| "grad_norm": 5.324213981628418, | |
| "learning_rate": 2.1851228973024387e-05, | |
| "loss": 1.3905, | |
| "step": 6225 | |
| }, | |
| { | |
| "epoch": 2.481206750634739, | |
| "grad_norm": 7.160348892211914, | |
| "learning_rate": 2.1688877638345725e-05, | |
| "loss": 1.4851, | |
| "step": 6230 | |
| }, | |
| { | |
| "epoch": 2.483198088315826, | |
| "grad_norm": 5.865573406219482, | |
| "learning_rate": 2.152708466321636e-05, | |
| "loss": 1.4202, | |
| "step": 6235 | |
| }, | |
| { | |
| "epoch": 2.4851894259969134, | |
| "grad_norm": 6.2235918045043945, | |
| "learning_rate": 2.136585075169565e-05, | |
| "loss": 1.3384, | |
| "step": 6240 | |
| }, | |
| { | |
| "epoch": 2.487180763678001, | |
| "grad_norm": 6.540336608886719, | |
| "learning_rate": 2.1205176605410256e-05, | |
| "loss": 1.3893, | |
| "step": 6245 | |
| }, | |
| { | |
| "epoch": 2.489172101359088, | |
| "grad_norm": 6.606202602386475, | |
| "learning_rate": 2.104506292355085e-05, | |
| "loss": 1.4976, | |
| "step": 6250 | |
| }, | |
| { | |
| "epoch": 2.491163439040175, | |
| "grad_norm": 6.5868659019470215, | |
| "learning_rate": 2.0885510402869167e-05, | |
| "loss": 1.3458, | |
| "step": 6255 | |
| }, | |
| { | |
| "epoch": 2.4931547767212625, | |
| "grad_norm": 7.185669898986816, | |
| "learning_rate": 2.072651973767512e-05, | |
| "loss": 1.4209, | |
| "step": 6260 | |
| }, | |
| { | |
| "epoch": 2.49514611440235, | |
| "grad_norm": 7.7647576332092285, | |
| "learning_rate": 2.0568091619833523e-05, | |
| "loss": 1.483, | |
| "step": 6265 | |
| }, | |
| { | |
| "epoch": 2.497137452083437, | |
| "grad_norm": 6.41839075088501, | |
| "learning_rate": 2.041022673876125e-05, | |
| "loss": 1.3673, | |
| "step": 6270 | |
| }, | |
| { | |
| "epoch": 2.4991287897645242, | |
| "grad_norm": 6.278010368347168, | |
| "learning_rate": 2.025292578142415e-05, | |
| "loss": 1.3931, | |
| "step": 6275 | |
| }, | |
| { | |
| "epoch": 2.5011201274456116, | |
| "grad_norm": 6.852733135223389, | |
| "learning_rate": 2.009618943233419e-05, | |
| "loss": 1.4295, | |
| "step": 6280 | |
| }, | |
| { | |
| "epoch": 2.503111465126699, | |
| "grad_norm": 5.997931957244873, | |
| "learning_rate": 1.9940018373546336e-05, | |
| "loss": 1.2439, | |
| "step": 6285 | |
| }, | |
| { | |
| "epoch": 2.505102802807786, | |
| "grad_norm": 6.848467826843262, | |
| "learning_rate": 1.9784413284655575e-05, | |
| "loss": 1.348, | |
| "step": 6290 | |
| }, | |
| { | |
| "epoch": 2.5070941404888734, | |
| "grad_norm": 6.354589462280273, | |
| "learning_rate": 1.9629374842794067e-05, | |
| "loss": 1.4389, | |
| "step": 6295 | |
| }, | |
| { | |
| "epoch": 2.5090854781699607, | |
| "grad_norm": 5.17344856262207, | |
| "learning_rate": 1.9474903722628192e-05, | |
| "loss": 1.3658, | |
| "step": 6300 | |
| }, | |
| { | |
| "epoch": 2.511076815851048, | |
| "grad_norm": 6.303539752960205, | |
| "learning_rate": 1.932100059635551e-05, | |
| "loss": 1.4752, | |
| "step": 6305 | |
| }, | |
| { | |
| "epoch": 2.513068153532135, | |
| "grad_norm": 6.674501895904541, | |
| "learning_rate": 1.9167666133701847e-05, | |
| "loss": 1.3493, | |
| "step": 6310 | |
| }, | |
| { | |
| "epoch": 2.5150594912132225, | |
| "grad_norm": 4.996592044830322, | |
| "learning_rate": 1.901490100191857e-05, | |
| "loss": 1.4374, | |
| "step": 6315 | |
| }, | |
| { | |
| "epoch": 2.51705082889431, | |
| "grad_norm": 5.650495529174805, | |
| "learning_rate": 1.886270586577944e-05, | |
| "loss": 1.3024, | |
| "step": 6320 | |
| }, | |
| { | |
| "epoch": 2.519042166575397, | |
| "grad_norm": 6.74934720993042, | |
| "learning_rate": 1.8711081387577837e-05, | |
| "loss": 1.4318, | |
| "step": 6325 | |
| }, | |
| { | |
| "epoch": 2.521033504256484, | |
| "grad_norm": 6.7489423751831055, | |
| "learning_rate": 1.8560028227123823e-05, | |
| "loss": 1.462, | |
| "step": 6330 | |
| }, | |
| { | |
| "epoch": 2.5230248419375716, | |
| "grad_norm": 5.957546710968018, | |
| "learning_rate": 1.8409547041741434e-05, | |
| "loss": 1.3598, | |
| "step": 6335 | |
| }, | |
| { | |
| "epoch": 2.525016179618659, | |
| "grad_norm": 6.806410789489746, | |
| "learning_rate": 1.825963848626557e-05, | |
| "loss": 1.3982, | |
| "step": 6340 | |
| }, | |
| { | |
| "epoch": 2.5270075172997464, | |
| "grad_norm": 7.132102966308594, | |
| "learning_rate": 1.8110303213039296e-05, | |
| "loss": 1.348, | |
| "step": 6345 | |
| }, | |
| { | |
| "epoch": 2.5289988549808333, | |
| "grad_norm": 6.3546061515808105, | |
| "learning_rate": 1.796154187191105e-05, | |
| "loss": 1.4032, | |
| "step": 6350 | |
| }, | |
| { | |
| "epoch": 2.5309901926619207, | |
| "grad_norm": 5.63569450378418, | |
| "learning_rate": 1.7813355110231677e-05, | |
| "loss": 1.3413, | |
| "step": 6355 | |
| }, | |
| { | |
| "epoch": 2.532981530343008, | |
| "grad_norm": 6.957376480102539, | |
| "learning_rate": 1.766574357285166e-05, | |
| "loss": 1.3587, | |
| "step": 6360 | |
| }, | |
| { | |
| "epoch": 2.534972868024095, | |
| "grad_norm": 7.172070026397705, | |
| "learning_rate": 1.7518707902118378e-05, | |
| "loss": 1.3324, | |
| "step": 6365 | |
| }, | |
| { | |
| "epoch": 2.5369642057051824, | |
| "grad_norm": 6.005961894989014, | |
| "learning_rate": 1.7372248737873278e-05, | |
| "loss": 1.3518, | |
| "step": 6370 | |
| }, | |
| { | |
| "epoch": 2.53895554338627, | |
| "grad_norm": 6.351656913757324, | |
| "learning_rate": 1.7226366717449048e-05, | |
| "loss": 1.3107, | |
| "step": 6375 | |
| }, | |
| { | |
| "epoch": 2.5409468810673568, | |
| "grad_norm": 5.603379726409912, | |
| "learning_rate": 1.708106247566683e-05, | |
| "loss": 1.3946, | |
| "step": 6380 | |
| }, | |
| { | |
| "epoch": 2.542938218748444, | |
| "grad_norm": 5.805838108062744, | |
| "learning_rate": 1.6936336644833614e-05, | |
| "loss": 1.3023, | |
| "step": 6385 | |
| }, | |
| { | |
| "epoch": 2.5449295564295316, | |
| "grad_norm": 6.263773441314697, | |
| "learning_rate": 1.6792189854739258e-05, | |
| "loss": 1.3593, | |
| "step": 6390 | |
| }, | |
| { | |
| "epoch": 2.546920894110619, | |
| "grad_norm": 6.895465850830078, | |
| "learning_rate": 1.6648622732653943e-05, | |
| "loss": 1.3542, | |
| "step": 6395 | |
| }, | |
| { | |
| "epoch": 2.5489122317917063, | |
| "grad_norm": 6.908726692199707, | |
| "learning_rate": 1.6505635903325275e-05, | |
| "loss": 1.419, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 2.5509035694727933, | |
| "grad_norm": 5.818434715270996, | |
| "learning_rate": 1.6363229988975763e-05, | |
| "loss": 1.4539, | |
| "step": 6405 | |
| }, | |
| { | |
| "epoch": 2.5528949071538807, | |
| "grad_norm": 6.764200210571289, | |
| "learning_rate": 1.6221405609299948e-05, | |
| "loss": 1.4295, | |
| "step": 6410 | |
| }, | |
| { | |
| "epoch": 2.554886244834968, | |
| "grad_norm": 6.165177345275879, | |
| "learning_rate": 1.6080163381461702e-05, | |
| "loss": 1.3533, | |
| "step": 6415 | |
| }, | |
| { | |
| "epoch": 2.556877582516055, | |
| "grad_norm": 8.976422309875488, | |
| "learning_rate": 1.5939503920091738e-05, | |
| "loss": 1.3518, | |
| "step": 6420 | |
| }, | |
| { | |
| "epoch": 2.5588689201971424, | |
| "grad_norm": 7.087680816650391, | |
| "learning_rate": 1.5799427837284683e-05, | |
| "loss": 1.4809, | |
| "step": 6425 | |
| }, | |
| { | |
| "epoch": 2.56086025787823, | |
| "grad_norm": 5.952059268951416, | |
| "learning_rate": 1.565993574259659e-05, | |
| "loss": 1.3105, | |
| "step": 6430 | |
| }, | |
| { | |
| "epoch": 2.5628515955593167, | |
| "grad_norm": 5.57841682434082, | |
| "learning_rate": 1.5521028243042134e-05, | |
| "loss": 1.3695, | |
| "step": 6435 | |
| }, | |
| { | |
| "epoch": 2.564842933240404, | |
| "grad_norm": 5.929076671600342, | |
| "learning_rate": 1.5382705943092218e-05, | |
| "loss": 1.4548, | |
| "step": 6440 | |
| }, | |
| { | |
| "epoch": 2.5668342709214915, | |
| "grad_norm": 6.7737202644348145, | |
| "learning_rate": 1.5244969444671051e-05, | |
| "loss": 1.3541, | |
| "step": 6445 | |
| }, | |
| { | |
| "epoch": 2.568825608602579, | |
| "grad_norm": 6.4556074142456055, | |
| "learning_rate": 1.5107819347153704e-05, | |
| "loss": 1.5401, | |
| "step": 6450 | |
| }, | |
| { | |
| "epoch": 2.5708169462836663, | |
| "grad_norm": 5.711570739746094, | |
| "learning_rate": 1.4971256247363534e-05, | |
| "loss": 1.4449, | |
| "step": 6455 | |
| }, | |
| { | |
| "epoch": 2.5728082839647533, | |
| "grad_norm": 5.679041862487793, | |
| "learning_rate": 1.4835280739569388e-05, | |
| "loss": 1.2817, | |
| "step": 6460 | |
| }, | |
| { | |
| "epoch": 2.5747996216458406, | |
| "grad_norm": 6.76185417175293, | |
| "learning_rate": 1.4699893415483232e-05, | |
| "loss": 1.4374, | |
| "step": 6465 | |
| }, | |
| { | |
| "epoch": 2.576790959326928, | |
| "grad_norm": 6.594644546508789, | |
| "learning_rate": 1.4565094864257426e-05, | |
| "loss": 1.4975, | |
| "step": 6470 | |
| }, | |
| { | |
| "epoch": 2.578782297008015, | |
| "grad_norm": 5.892383575439453, | |
| "learning_rate": 1.4430885672482328e-05, | |
| "loss": 1.3731, | |
| "step": 6475 | |
| }, | |
| { | |
| "epoch": 2.5807736346891024, | |
| "grad_norm": 6.247217655181885, | |
| "learning_rate": 1.4297266424183517e-05, | |
| "loss": 1.3796, | |
| "step": 6480 | |
| }, | |
| { | |
| "epoch": 2.5827649723701898, | |
| "grad_norm": 6.876238822937012, | |
| "learning_rate": 1.416423770081941e-05, | |
| "loss": 1.4786, | |
| "step": 6485 | |
| }, | |
| { | |
| "epoch": 2.5847563100512767, | |
| "grad_norm": 5.32161283493042, | |
| "learning_rate": 1.4031800081278754e-05, | |
| "loss": 1.3543, | |
| "step": 6490 | |
| }, | |
| { | |
| "epoch": 2.586747647732364, | |
| "grad_norm": 6.2356648445129395, | |
| "learning_rate": 1.389995414187795e-05, | |
| "loss": 1.3401, | |
| "step": 6495 | |
| }, | |
| { | |
| "epoch": 2.5887389854134515, | |
| "grad_norm": 6.550376892089844, | |
| "learning_rate": 1.3768700456358683e-05, | |
| "loss": 1.4265, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 2.590730323094539, | |
| "grad_norm": 6.80794095993042, | |
| "learning_rate": 1.363803959588537e-05, | |
| "loss": 1.439, | |
| "step": 6505 | |
| }, | |
| { | |
| "epoch": 2.5927216607756263, | |
| "grad_norm": 7.110243797302246, | |
| "learning_rate": 1.3507972129042737e-05, | |
| "loss": 1.4863, | |
| "step": 6510 | |
| }, | |
| { | |
| "epoch": 2.594712998456713, | |
| "grad_norm": 6.215062141418457, | |
| "learning_rate": 1.3378498621833233e-05, | |
| "loss": 1.4606, | |
| "step": 6515 | |
| }, | |
| { | |
| "epoch": 2.5967043361378006, | |
| "grad_norm": 6.820135116577148, | |
| "learning_rate": 1.3249619637674619e-05, | |
| "loss": 1.4164, | |
| "step": 6520 | |
| }, | |
| { | |
| "epoch": 2.598695673818888, | |
| "grad_norm": 5.777781009674072, | |
| "learning_rate": 1.3121335737397609e-05, | |
| "loss": 1.4159, | |
| "step": 6525 | |
| }, | |
| { | |
| "epoch": 2.600687011499975, | |
| "grad_norm": 5.947566032409668, | |
| "learning_rate": 1.2993647479243258e-05, | |
| "loss": 1.3885, | |
| "step": 6530 | |
| }, | |
| { | |
| "epoch": 2.6026783491810623, | |
| "grad_norm": 7.960350036621094, | |
| "learning_rate": 1.2866555418860674e-05, | |
| "loss": 1.3622, | |
| "step": 6535 | |
| }, | |
| { | |
| "epoch": 2.6046696868621497, | |
| "grad_norm": 7.677003383636475, | |
| "learning_rate": 1.2740060109304478e-05, | |
| "loss": 1.3941, | |
| "step": 6540 | |
| }, | |
| { | |
| "epoch": 2.6066610245432367, | |
| "grad_norm": 6.016810417175293, | |
| "learning_rate": 1.2614162101032599e-05, | |
| "loss": 1.4176, | |
| "step": 6545 | |
| }, | |
| { | |
| "epoch": 2.608652362224324, | |
| "grad_norm": 5.751842498779297, | |
| "learning_rate": 1.2488861941903593e-05, | |
| "loss": 1.4261, | |
| "step": 6550 | |
| }, | |
| { | |
| "epoch": 2.6106436999054115, | |
| "grad_norm": 5.662951469421387, | |
| "learning_rate": 1.2364160177174503e-05, | |
| "loss": 1.4162, | |
| "step": 6555 | |
| }, | |
| { | |
| "epoch": 2.612635037586499, | |
| "grad_norm": 6.150815010070801, | |
| "learning_rate": 1.22400573494984e-05, | |
| "loss": 1.4112, | |
| "step": 6560 | |
| }, | |
| { | |
| "epoch": 2.6146263752675862, | |
| "grad_norm": 6.814656734466553, | |
| "learning_rate": 1.2116553998921957e-05, | |
| "loss": 1.3148, | |
| "step": 6565 | |
| }, | |
| { | |
| "epoch": 2.616617712948673, | |
| "grad_norm": 6.5078558921813965, | |
| "learning_rate": 1.1993650662883219e-05, | |
| "loss": 1.4669, | |
| "step": 6570 | |
| }, | |
| { | |
| "epoch": 2.6186090506297606, | |
| "grad_norm": 7.931562900543213, | |
| "learning_rate": 1.187134787620912e-05, | |
| "loss": 1.4846, | |
| "step": 6575 | |
| }, | |
| { | |
| "epoch": 2.620600388310848, | |
| "grad_norm": 6.5228590965271, | |
| "learning_rate": 1.1749646171113364e-05, | |
| "loss": 1.3225, | |
| "step": 6580 | |
| }, | |
| { | |
| "epoch": 2.622591725991935, | |
| "grad_norm": 6.067448616027832, | |
| "learning_rate": 1.1628546077193868e-05, | |
| "loss": 1.3188, | |
| "step": 6585 | |
| }, | |
| { | |
| "epoch": 2.6245830636730223, | |
| "grad_norm": 6.385129451751709, | |
| "learning_rate": 1.1508048121430625e-05, | |
| "loss": 1.4801, | |
| "step": 6590 | |
| }, | |
| { | |
| "epoch": 2.6265744013541097, | |
| "grad_norm": 5.796778202056885, | |
| "learning_rate": 1.1388152828183389e-05, | |
| "loss": 1.3978, | |
| "step": 6595 | |
| }, | |
| { | |
| "epoch": 2.6285657390351966, | |
| "grad_norm": 6.417087554931641, | |
| "learning_rate": 1.1268860719189316e-05, | |
| "loss": 1.3319, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 2.630557076716284, | |
| "grad_norm": 6.347975254058838, | |
| "learning_rate": 1.115017231356074e-05, | |
| "loss": 1.3908, | |
| "step": 6605 | |
| }, | |
| { | |
| "epoch": 2.6325484143973714, | |
| "grad_norm": 5.882072448730469, | |
| "learning_rate": 1.1032088127782917e-05, | |
| "loss": 1.3999, | |
| "step": 6610 | |
| }, | |
| { | |
| "epoch": 2.634539752078459, | |
| "grad_norm": 5.941750526428223, | |
| "learning_rate": 1.0914608675711839e-05, | |
| "loss": 1.3493, | |
| "step": 6615 | |
| }, | |
| { | |
| "epoch": 2.636531089759546, | |
| "grad_norm": 6.197680950164795, | |
| "learning_rate": 1.0797734468571867e-05, | |
| "loss": 1.3622, | |
| "step": 6620 | |
| }, | |
| { | |
| "epoch": 2.638522427440633, | |
| "grad_norm": 7.5896992683410645, | |
| "learning_rate": 1.0681466014953538e-05, | |
| "loss": 1.4264, | |
| "step": 6625 | |
| }, | |
| { | |
| "epoch": 2.6405137651217205, | |
| "grad_norm": 5.610791206359863, | |
| "learning_rate": 1.056580382081153e-05, | |
| "loss": 1.3576, | |
| "step": 6630 | |
| }, | |
| { | |
| "epoch": 2.642505102802808, | |
| "grad_norm": 5.284849166870117, | |
| "learning_rate": 1.0450748389462183e-05, | |
| "loss": 1.4206, | |
| "step": 6635 | |
| }, | |
| { | |
| "epoch": 2.644496440483895, | |
| "grad_norm": 6.58115816116333, | |
| "learning_rate": 1.0336300221581511e-05, | |
| "loss": 1.4038, | |
| "step": 6640 | |
| }, | |
| { | |
| "epoch": 2.6464877781649823, | |
| "grad_norm": 6.350376129150391, | |
| "learning_rate": 1.0222459815202883e-05, | |
| "loss": 1.3237, | |
| "step": 6645 | |
| }, | |
| { | |
| "epoch": 2.6484791158460697, | |
| "grad_norm": 5.541227340698242, | |
| "learning_rate": 1.0109227665715059e-05, | |
| "loss": 1.349, | |
| "step": 6650 | |
| }, | |
| { | |
| "epoch": 2.6504704535271566, | |
| "grad_norm": 6.974477291107178, | |
| "learning_rate": 9.99660426585977e-06, | |
| "loss": 1.3641, | |
| "step": 6655 | |
| }, | |
| { | |
| "epoch": 2.652461791208244, | |
| "grad_norm": 6.048290729522705, | |
| "learning_rate": 9.884590105729784e-06, | |
| "loss": 1.2903, | |
| "step": 6660 | |
| }, | |
| { | |
| "epoch": 2.6544531288893314, | |
| "grad_norm": 5.928702354431152, | |
| "learning_rate": 9.773185672766703e-06, | |
| "loss": 1.4015, | |
| "step": 6665 | |
| }, | |
| { | |
| "epoch": 2.6564444665704188, | |
| "grad_norm": 6.905900478363037, | |
| "learning_rate": 9.662391451758788e-06, | |
| "loss": 1.3503, | |
| "step": 6670 | |
| }, | |
| { | |
| "epoch": 2.658435804251506, | |
| "grad_norm": 8.249216079711914, | |
| "learning_rate": 9.552207924838923e-06, | |
| "loss": 1.3357, | |
| "step": 6675 | |
| }, | |
| { | |
| "epoch": 2.660427141932593, | |
| "grad_norm": 6.073180675506592, | |
| "learning_rate": 9.442635571482481e-06, | |
| "loss": 1.4308, | |
| "step": 6680 | |
| }, | |
| { | |
| "epoch": 2.6624184796136805, | |
| "grad_norm": 7.015928745269775, | |
| "learning_rate": 9.333674868505286e-06, | |
| "loss": 1.2818, | |
| "step": 6685 | |
| }, | |
| { | |
| "epoch": 2.664409817294768, | |
| "grad_norm": 7.025008678436279, | |
| "learning_rate": 9.225326290061453e-06, | |
| "loss": 1.387, | |
| "step": 6690 | |
| }, | |
| { | |
| "epoch": 2.666401154975855, | |
| "grad_norm": 7.363583564758301, | |
| "learning_rate": 9.11759030764136e-06, | |
| "loss": 1.3665, | |
| "step": 6695 | |
| }, | |
| { | |
| "epoch": 2.6683924926569422, | |
| "grad_norm": 7.165689945220947, | |
| "learning_rate": 9.010467390069703e-06, | |
| "loss": 1.5225, | |
| "step": 6700 | |
| }, | |
| { | |
| "epoch": 2.6703838303380296, | |
| "grad_norm": 6.025979518890381, | |
| "learning_rate": 8.903958003503264e-06, | |
| "loss": 1.3151, | |
| "step": 6705 | |
| }, | |
| { | |
| "epoch": 2.6723751680191166, | |
| "grad_norm": 6.4776411056518555, | |
| "learning_rate": 8.798062611429007e-06, | |
| "loss": 1.3935, | |
| "step": 6710 | |
| }, | |
| { | |
| "epoch": 2.674366505700204, | |
| "grad_norm": 6.628039360046387, | |
| "learning_rate": 8.692781674662002e-06, | |
| "loss": 1.3811, | |
| "step": 6715 | |
| }, | |
| { | |
| "epoch": 2.6763578433812913, | |
| "grad_norm": 6.728973388671875, | |
| "learning_rate": 8.588115651343558e-06, | |
| "loss": 1.4715, | |
| "step": 6720 | |
| }, | |
| { | |
| "epoch": 2.6783491810623787, | |
| "grad_norm": 7.614314556121826, | |
| "learning_rate": 8.484064996939021e-06, | |
| "loss": 1.2541, | |
| "step": 6725 | |
| }, | |
| { | |
| "epoch": 2.680340518743466, | |
| "grad_norm": 6.7132391929626465, | |
| "learning_rate": 8.380630164235919e-06, | |
| "loss": 1.3637, | |
| "step": 6730 | |
| }, | |
| { | |
| "epoch": 2.682331856424553, | |
| "grad_norm": 6.705021381378174, | |
| "learning_rate": 8.277811603341933e-06, | |
| "loss": 1.422, | |
| "step": 6735 | |
| }, | |
| { | |
| "epoch": 2.6843231941056405, | |
| "grad_norm": 6.249575138092041, | |
| "learning_rate": 8.175609761683093e-06, | |
| "loss": 1.4463, | |
| "step": 6740 | |
| }, | |
| { | |
| "epoch": 2.686314531786728, | |
| "grad_norm": 6.032909870147705, | |
| "learning_rate": 8.074025084001561e-06, | |
| "loss": 1.4117, | |
| "step": 6745 | |
| }, | |
| { | |
| "epoch": 2.688305869467815, | |
| "grad_norm": 6.830637454986572, | |
| "learning_rate": 7.973058012353911e-06, | |
| "loss": 1.394, | |
| "step": 6750 | |
| }, | |
| { | |
| "epoch": 2.690297207148902, | |
| "grad_norm": 6.5258097648620605, | |
| "learning_rate": 7.872708986109183e-06, | |
| "loss": 1.4076, | |
| "step": 6755 | |
| }, | |
| { | |
| "epoch": 2.6922885448299896, | |
| "grad_norm": 6.043037414550781, | |
| "learning_rate": 7.772978441946836e-06, | |
| "loss": 1.368, | |
| "step": 6760 | |
| }, | |
| { | |
| "epoch": 2.694279882511077, | |
| "grad_norm": 6.692487716674805, | |
| "learning_rate": 7.673866813855e-06, | |
| "loss": 1.4427, | |
| "step": 6765 | |
| }, | |
| { | |
| "epoch": 2.696271220192164, | |
| "grad_norm": 6.502307415008545, | |
| "learning_rate": 7.575374533128492e-06, | |
| "loss": 1.3994, | |
| "step": 6770 | |
| }, | |
| { | |
| "epoch": 2.6982625578732513, | |
| "grad_norm": 5.603662014007568, | |
| "learning_rate": 7.477502028366999e-06, | |
| "loss": 1.4891, | |
| "step": 6775 | |
| }, | |
| { | |
| "epoch": 2.7002538955543387, | |
| "grad_norm": 6.476789474487305, | |
| "learning_rate": 7.380249725473186e-06, | |
| "loss": 1.4082, | |
| "step": 6780 | |
| }, | |
| { | |
| "epoch": 2.702245233235426, | |
| "grad_norm": 7.55186653137207, | |
| "learning_rate": 7.2836180476508076e-06, | |
| "loss": 1.3687, | |
| "step": 6785 | |
| }, | |
| { | |
| "epoch": 2.704236570916513, | |
| "grad_norm": 6.1320977210998535, | |
| "learning_rate": 7.187607415402946e-06, | |
| "loss": 1.3346, | |
| "step": 6790 | |
| }, | |
| { | |
| "epoch": 2.7062279085976004, | |
| "grad_norm": 5.8300395011901855, | |
| "learning_rate": 7.092218246530129e-06, | |
| "loss": 1.4623, | |
| "step": 6795 | |
| }, | |
| { | |
| "epoch": 2.708219246278688, | |
| "grad_norm": 7.90705680847168, | |
| "learning_rate": 6.997450956128481e-06, | |
| "loss": 1.3667, | |
| "step": 6800 | |
| }, | |
| { | |
| "epoch": 2.7102105839597748, | |
| "grad_norm": 5.899392604827881, | |
| "learning_rate": 6.90330595658799e-06, | |
| "loss": 1.3206, | |
| "step": 6805 | |
| }, | |
| { | |
| "epoch": 2.712201921640862, | |
| "grad_norm": 6.035600662231445, | |
| "learning_rate": 6.809783657590695e-06, | |
| "loss": 1.4675, | |
| "step": 6810 | |
| }, | |
| { | |
| "epoch": 2.7141932593219495, | |
| "grad_norm": 6.182140827178955, | |
| "learning_rate": 6.7168844661088685e-06, | |
| "loss": 1.2444, | |
| "step": 6815 | |
| }, | |
| { | |
| "epoch": 2.716184597003037, | |
| "grad_norm": 6.4296698570251465, | |
| "learning_rate": 6.6246087864032186e-06, | |
| "loss": 1.3859, | |
| "step": 6820 | |
| }, | |
| { | |
| "epoch": 2.7181759346841243, | |
| "grad_norm": 5.676371097564697, | |
| "learning_rate": 6.532957020021257e-06, | |
| "loss": 1.3183, | |
| "step": 6825 | |
| }, | |
| { | |
| "epoch": 2.7201672723652113, | |
| "grad_norm": 5.844963550567627, | |
| "learning_rate": 6.441929565795434e-06, | |
| "loss": 1.3464, | |
| "step": 6830 | |
| }, | |
| { | |
| "epoch": 2.7221586100462987, | |
| "grad_norm": 6.410009860992432, | |
| "learning_rate": 6.351526819841407e-06, | |
| "loss": 1.4351, | |
| "step": 6835 | |
| }, | |
| { | |
| "epoch": 2.724149947727386, | |
| "grad_norm": 5.1571855545043945, | |
| "learning_rate": 6.261749175556391e-06, | |
| "loss": 1.3552, | |
| "step": 6840 | |
| }, | |
| { | |
| "epoch": 2.726141285408473, | |
| "grad_norm": 7.127869129180908, | |
| "learning_rate": 6.172597023617409e-06, | |
| "loss": 1.4057, | |
| "step": 6845 | |
| }, | |
| { | |
| "epoch": 2.7281326230895604, | |
| "grad_norm": 5.79174280166626, | |
| "learning_rate": 6.084070751979547e-06, | |
| "loss": 1.3631, | |
| "step": 6850 | |
| }, | |
| { | |
| "epoch": 2.730123960770648, | |
| "grad_norm": 6.417024612426758, | |
| "learning_rate": 5.996170745874335e-06, | |
| "loss": 1.3564, | |
| "step": 6855 | |
| }, | |
| { | |
| "epoch": 2.7321152984517347, | |
| "grad_norm": 5.934596061706543, | |
| "learning_rate": 5.9088973878080316e-06, | |
| "loss": 1.3883, | |
| "step": 6860 | |
| }, | |
| { | |
| "epoch": 2.734106636132822, | |
| "grad_norm": 7.157084941864014, | |
| "learning_rate": 5.8222510575599955e-06, | |
| "loss": 1.4344, | |
| "step": 6865 | |
| }, | |
| { | |
| "epoch": 2.7360979738139095, | |
| "grad_norm": 6.882761001586914, | |
| "learning_rate": 5.736232132180951e-06, | |
| "loss": 1.4617, | |
| "step": 6870 | |
| }, | |
| { | |
| "epoch": 2.738089311494997, | |
| "grad_norm": 5.87106990814209, | |
| "learning_rate": 5.6508409859914394e-06, | |
| "loss": 1.3243, | |
| "step": 6875 | |
| }, | |
| { | |
| "epoch": 2.7400806491760843, | |
| "grad_norm": 6.3300065994262695, | |
| "learning_rate": 5.566077990580187e-06, | |
| "loss": 1.3606, | |
| "step": 6880 | |
| }, | |
| { | |
| "epoch": 2.7420719868571712, | |
| "grad_norm": 5.391266345977783, | |
| "learning_rate": 5.481943514802372e-06, | |
| "loss": 1.4057, | |
| "step": 6885 | |
| }, | |
| { | |
| "epoch": 2.7440633245382586, | |
| "grad_norm": 6.474628448486328, | |
| "learning_rate": 5.398437924778182e-06, | |
| "loss": 1.2948, | |
| "step": 6890 | |
| }, | |
| { | |
| "epoch": 2.746054662219346, | |
| "grad_norm": 5.8539276123046875, | |
| "learning_rate": 5.315561583891104e-06, | |
| "loss": 1.4363, | |
| "step": 6895 | |
| }, | |
| { | |
| "epoch": 2.748045999900433, | |
| "grad_norm": 5.793940544128418, | |
| "learning_rate": 5.233314852786369e-06, | |
| "loss": 1.4127, | |
| "step": 6900 | |
| }, | |
| { | |
| "epoch": 2.7500373375815204, | |
| "grad_norm": 5.599001407623291, | |
| "learning_rate": 5.151698089369433e-06, | |
| "loss": 1.3238, | |
| "step": 6905 | |
| }, | |
| { | |
| "epoch": 2.7520286752626077, | |
| "grad_norm": 6.264495372772217, | |
| "learning_rate": 5.07071164880431e-06, | |
| "loss": 1.4469, | |
| "step": 6910 | |
| }, | |
| { | |
| "epoch": 2.7540200129436947, | |
| "grad_norm": 6.348090648651123, | |
| "learning_rate": 4.990355883512209e-06, | |
| "loss": 1.4263, | |
| "step": 6915 | |
| }, | |
| { | |
| "epoch": 2.756011350624782, | |
| "grad_norm": 6.050279140472412, | |
| "learning_rate": 4.910631143169785e-06, | |
| "loss": 1.3478, | |
| "step": 6920 | |
| }, | |
| { | |
| "epoch": 2.7580026883058695, | |
| "grad_norm": 5.605953216552734, | |
| "learning_rate": 4.831537774707739e-06, | |
| "loss": 1.4268, | |
| "step": 6925 | |
| }, | |
| { | |
| "epoch": 2.759994025986957, | |
| "grad_norm": 5.3409199714660645, | |
| "learning_rate": 4.7530761223093515e-06, | |
| "loss": 1.3866, | |
| "step": 6930 | |
| }, | |
| { | |
| "epoch": 2.7619853636680443, | |
| "grad_norm": 6.341844081878662, | |
| "learning_rate": 4.675246527408871e-06, | |
| "loss": 1.4328, | |
| "step": 6935 | |
| }, | |
| { | |
| "epoch": 2.763976701349131, | |
| "grad_norm": 5.9319586753845215, | |
| "learning_rate": 4.5980493286900626e-06, | |
| "loss": 1.4405, | |
| "step": 6940 | |
| }, | |
| { | |
| "epoch": 2.7659680390302186, | |
| "grad_norm": 6.037888050079346, | |
| "learning_rate": 4.52148486208479e-06, | |
| "loss": 1.4027, | |
| "step": 6945 | |
| }, | |
| { | |
| "epoch": 2.767959376711306, | |
| "grad_norm": 6.9271440505981445, | |
| "learning_rate": 4.445553460771523e-06, | |
| "loss": 1.4402, | |
| "step": 6950 | |
| }, | |
| { | |
| "epoch": 2.769950714392393, | |
| "grad_norm": 6.065907001495361, | |
| "learning_rate": 4.3702554551738656e-06, | |
| "loss": 1.3285, | |
| "step": 6955 | |
| }, | |
| { | |
| "epoch": 2.7719420520734803, | |
| "grad_norm": 6.596900463104248, | |
| "learning_rate": 4.295591172959078e-06, | |
| "loss": 1.3919, | |
| "step": 6960 | |
| }, | |
| { | |
| "epoch": 2.7739333897545677, | |
| "grad_norm": 6.205164432525635, | |
| "learning_rate": 4.2215609390368275e-06, | |
| "loss": 1.3757, | |
| "step": 6965 | |
| }, | |
| { | |
| "epoch": 2.7759247274356547, | |
| "grad_norm": 5.704510688781738, | |
| "learning_rate": 4.148165075557536e-06, | |
| "loss": 1.2786, | |
| "step": 6970 | |
| }, | |
| { | |
| "epoch": 2.777916065116742, | |
| "grad_norm": 6.643089294433594, | |
| "learning_rate": 4.0754039019111504e-06, | |
| "loss": 1.3229, | |
| "step": 6975 | |
| }, | |
| { | |
| "epoch": 2.7799074027978294, | |
| "grad_norm": 5.736795425415039, | |
| "learning_rate": 4.0032777347256796e-06, | |
| "loss": 1.3014, | |
| "step": 6980 | |
| }, | |
| { | |
| "epoch": 2.781898740478917, | |
| "grad_norm": 5.650008201599121, | |
| "learning_rate": 3.931786887865856e-06, | |
| "loss": 1.3824, | |
| "step": 6985 | |
| }, | |
| { | |
| "epoch": 2.7838900781600042, | |
| "grad_norm": 6.722746849060059, | |
| "learning_rate": 3.860931672431727e-06, | |
| "loss": 1.3002, | |
| "step": 6990 | |
| }, | |
| { | |
| "epoch": 2.785881415841091, | |
| "grad_norm": 7.014549255371094, | |
| "learning_rate": 3.7907123967573174e-06, | |
| "loss": 1.3736, | |
| "step": 6995 | |
| }, | |
| { | |
| "epoch": 2.7878727535221786, | |
| "grad_norm": 5.417212009429932, | |
| "learning_rate": 3.721129366409331e-06, | |
| "loss": 1.2088, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 2.789864091203266, | |
| "grad_norm": 6.103097915649414, | |
| "learning_rate": 3.6521828841857227e-06, | |
| "loss": 1.4813, | |
| "step": 7005 | |
| }, | |
| { | |
| "epoch": 2.791855428884353, | |
| "grad_norm": 6.893677234649658, | |
| "learning_rate": 3.583873250114494e-06, | |
| "loss": 1.4579, | |
| "step": 7010 | |
| }, | |
| { | |
| "epoch": 2.7938467665654403, | |
| "grad_norm": 5.980708122253418, | |
| "learning_rate": 3.5162007614522813e-06, | |
| "loss": 1.3265, | |
| "step": 7015 | |
| }, | |
| { | |
| "epoch": 2.7958381042465277, | |
| "grad_norm": 6.312836647033691, | |
| "learning_rate": 3.4491657126831885e-06, | |
| "loss": 1.2436, | |
| "step": 7020 | |
| }, | |
| { | |
| "epoch": 2.7978294419276146, | |
| "grad_norm": 7.480289936065674, | |
| "learning_rate": 3.3827683955173544e-06, | |
| "loss": 1.4201, | |
| "step": 7025 | |
| }, | |
| { | |
| "epoch": 2.799820779608702, | |
| "grad_norm": 6.134524345397949, | |
| "learning_rate": 3.317009098889789e-06, | |
| "loss": 1.2915, | |
| "step": 7030 | |
| }, | |
| { | |
| "epoch": 2.8018121172897894, | |
| "grad_norm": 5.769630432128906, | |
| "learning_rate": 3.251888108959139e-06, | |
| "loss": 1.3436, | |
| "step": 7035 | |
| }, | |
| { | |
| "epoch": 2.803803454970877, | |
| "grad_norm": 6.060738563537598, | |
| "learning_rate": 3.1874057091063056e-06, | |
| "loss": 1.3628, | |
| "step": 7040 | |
| }, | |
| { | |
| "epoch": 2.805794792651964, | |
| "grad_norm": 7.591846942901611, | |
| "learning_rate": 3.1235621799333475e-06, | |
| "loss": 1.3498, | |
| "step": 7045 | |
| }, | |
| { | |
| "epoch": 2.807786130333051, | |
| "grad_norm": 7.156486988067627, | |
| "learning_rate": 3.0603577992621807e-06, | |
| "loss": 1.4384, | |
| "step": 7050 | |
| }, | |
| { | |
| "epoch": 2.8097774680141385, | |
| "grad_norm": 6.646254062652588, | |
| "learning_rate": 2.9977928421334454e-06, | |
| "loss": 1.38, | |
| "step": 7055 | |
| }, | |
| { | |
| "epoch": 2.811768805695226, | |
| "grad_norm": 5.995254993438721, | |
| "learning_rate": 2.9358675808052232e-06, | |
| "loss": 1.307, | |
| "step": 7060 | |
| }, | |
| { | |
| "epoch": 2.813760143376313, | |
| "grad_norm": 7.127299785614014, | |
| "learning_rate": 2.8745822847518752e-06, | |
| "loss": 1.4385, | |
| "step": 7065 | |
| }, | |
| { | |
| "epoch": 2.8157514810574003, | |
| "grad_norm": 6.606611251831055, | |
| "learning_rate": 2.813937220662954e-06, | |
| "loss": 1.4435, | |
| "step": 7070 | |
| }, | |
| { | |
| "epoch": 2.8177428187384876, | |
| "grad_norm": 5.3539605140686035, | |
| "learning_rate": 2.7539326524419092e-06, | |
| "loss": 1.3494, | |
| "step": 7075 | |
| }, | |
| { | |
| "epoch": 2.8197341564195746, | |
| "grad_norm": 6.307882785797119, | |
| "learning_rate": 2.6945688412050357e-06, | |
| "loss": 1.5222, | |
| "step": 7080 | |
| }, | |
| { | |
| "epoch": 2.821725494100662, | |
| "grad_norm": 6.9230637550354, | |
| "learning_rate": 2.6358460452802767e-06, | |
| "loss": 1.4859, | |
| "step": 7085 | |
| }, | |
| { | |
| "epoch": 2.8237168317817494, | |
| "grad_norm": 6.22226619720459, | |
| "learning_rate": 2.577764520206155e-06, | |
| "loss": 1.4203, | |
| "step": 7090 | |
| }, | |
| { | |
| "epoch": 2.8257081694628368, | |
| "grad_norm": 6.648333549499512, | |
| "learning_rate": 2.52032451873066e-06, | |
| "loss": 1.3568, | |
| "step": 7095 | |
| }, | |
| { | |
| "epoch": 2.827699507143924, | |
| "grad_norm": 6.351929664611816, | |
| "learning_rate": 2.4635262908100473e-06, | |
| "loss": 1.4753, | |
| "step": 7100 | |
| }, | |
| { | |
| "epoch": 2.829690844825011, | |
| "grad_norm": 6.392971515655518, | |
| "learning_rate": 2.40737008360789e-06, | |
| "loss": 1.378, | |
| "step": 7105 | |
| }, | |
| { | |
| "epoch": 2.8316821825060985, | |
| "grad_norm": 6.635252952575684, | |
| "learning_rate": 2.35185614149393e-06, | |
| "loss": 1.4273, | |
| "step": 7110 | |
| }, | |
| { | |
| "epoch": 2.833673520187186, | |
| "grad_norm": 6.16091251373291, | |
| "learning_rate": 2.2969847060429924e-06, | |
| "loss": 1.4064, | |
| "step": 7115 | |
| }, | |
| { | |
| "epoch": 2.835664857868273, | |
| "grad_norm": 6.811882019042969, | |
| "learning_rate": 2.2427560160340084e-06, | |
| "loss": 1.4521, | |
| "step": 7120 | |
| }, | |
| { | |
| "epoch": 2.83765619554936, | |
| "grad_norm": 6.058680534362793, | |
| "learning_rate": 2.189170307448879e-06, | |
| "loss": 1.3218, | |
| "step": 7125 | |
| }, | |
| { | |
| "epoch": 2.8396475332304476, | |
| "grad_norm": 6.335659980773926, | |
| "learning_rate": 2.136227813471575e-06, | |
| "loss": 1.3948, | |
| "step": 7130 | |
| }, | |
| { | |
| "epoch": 2.8416388709115346, | |
| "grad_norm": 5.820364952087402, | |
| "learning_rate": 2.0839287644870086e-06, | |
| "loss": 1.3663, | |
| "step": 7135 | |
| }, | |
| { | |
| "epoch": 2.843630208592622, | |
| "grad_norm": 6.760519981384277, | |
| "learning_rate": 2.0322733880800468e-06, | |
| "loss": 1.442, | |
| "step": 7140 | |
| }, | |
| { | |
| "epoch": 2.8456215462737093, | |
| "grad_norm": 6.848934650421143, | |
| "learning_rate": 1.981261909034598e-06, | |
| "loss": 1.5015, | |
| "step": 7145 | |
| }, | |
| { | |
| "epoch": 2.8476128839547967, | |
| "grad_norm": 7.0265021324157715, | |
| "learning_rate": 1.930894549332562e-06, | |
| "loss": 1.3389, | |
| "step": 7150 | |
| }, | |
| { | |
| "epoch": 2.849604221635884, | |
| "grad_norm": 4.9822235107421875, | |
| "learning_rate": 1.881171528152864e-06, | |
| "loss": 1.3325, | |
| "step": 7155 | |
| }, | |
| { | |
| "epoch": 2.851595559316971, | |
| "grad_norm": 6.151075839996338, | |
| "learning_rate": 1.8320930618705376e-06, | |
| "loss": 1.4162, | |
| "step": 7160 | |
| }, | |
| { | |
| "epoch": 2.8535868969980585, | |
| "grad_norm": 6.126955986022949, | |
| "learning_rate": 1.7836593640558116e-06, | |
| "loss": 1.4418, | |
| "step": 7165 | |
| }, | |
| { | |
| "epoch": 2.855578234679146, | |
| "grad_norm": 6.3819122314453125, | |
| "learning_rate": 1.735870645473042e-06, | |
| "loss": 1.4855, | |
| "step": 7170 | |
| }, | |
| { | |
| "epoch": 2.857569572360233, | |
| "grad_norm": 5.6472859382629395, | |
| "learning_rate": 1.688727114079963e-06, | |
| "loss": 1.4014, | |
| "step": 7175 | |
| }, | |
| { | |
| "epoch": 2.85956091004132, | |
| "grad_norm": 6.101297378540039, | |
| "learning_rate": 1.6422289750267047e-06, | |
| "loss": 1.4108, | |
| "step": 7180 | |
| }, | |
| { | |
| "epoch": 2.8615522477224076, | |
| "grad_norm": 6.445528984069824, | |
| "learning_rate": 1.5963764306548443e-06, | |
| "loss": 1.3386, | |
| "step": 7185 | |
| }, | |
| { | |
| "epoch": 2.863543585403495, | |
| "grad_norm": 6.503068447113037, | |
| "learning_rate": 1.5511696804966556e-06, | |
| "loss": 1.4485, | |
| "step": 7190 | |
| }, | |
| { | |
| "epoch": 2.865534923084582, | |
| "grad_norm": 6.986062526702881, | |
| "learning_rate": 1.5066089212741271e-06, | |
| "loss": 1.4852, | |
| "step": 7195 | |
| }, | |
| { | |
| "epoch": 2.8675262607656693, | |
| "grad_norm": 6.3332366943359375, | |
| "learning_rate": 1.4626943468981467e-06, | |
| "loss": 1.3835, | |
| "step": 7200 | |
| }, | |
| { | |
| "epoch": 2.8695175984467567, | |
| "grad_norm": 6.295677661895752, | |
| "learning_rate": 1.4194261484677006e-06, | |
| "loss": 1.4499, | |
| "step": 7205 | |
| }, | |
| { | |
| "epoch": 2.871508936127844, | |
| "grad_norm": 6.975468158721924, | |
| "learning_rate": 1.3768045142689588e-06, | |
| "loss": 1.2806, | |
| "step": 7210 | |
| }, | |
| { | |
| "epoch": 2.873500273808931, | |
| "grad_norm": 6.046121597290039, | |
| "learning_rate": 1.3348296297745254e-06, | |
| "loss": 1.4481, | |
| "step": 7215 | |
| }, | |
| { | |
| "epoch": 2.8754916114900184, | |
| "grad_norm": 6.595587253570557, | |
| "learning_rate": 1.2935016776425721e-06, | |
| "loss": 1.4859, | |
| "step": 7220 | |
| }, | |
| { | |
| "epoch": 2.877482949171106, | |
| "grad_norm": 6.250722885131836, | |
| "learning_rate": 1.2528208377161064e-06, | |
| "loss": 1.3335, | |
| "step": 7225 | |
| }, | |
| { | |
| "epoch": 2.8794742868521928, | |
| "grad_norm": 5.738440990447998, | |
| "learning_rate": 1.2127872870221378e-06, | |
| "loss": 1.39, | |
| "step": 7230 | |
| }, | |
| { | |
| "epoch": 2.88146562453328, | |
| "grad_norm": 5.639859676361084, | |
| "learning_rate": 1.1734011997709625e-06, | |
| "loss": 1.379, | |
| "step": 7235 | |
| }, | |
| { | |
| "epoch": 2.8834569622143675, | |
| "grad_norm": 6.074785232543945, | |
| "learning_rate": 1.1346627473553138e-06, | |
| "loss": 1.3816, | |
| "step": 7240 | |
| }, | |
| { | |
| "epoch": 2.885448299895455, | |
| "grad_norm": 5.629566669464111, | |
| "learning_rate": 1.0965720983497295e-06, | |
| "loss": 1.3705, | |
| "step": 7245 | |
| }, | |
| { | |
| "epoch": 2.8874396375765423, | |
| "grad_norm": 6.7186970710754395, | |
| "learning_rate": 1.0591294185097187e-06, | |
| "loss": 1.4106, | |
| "step": 7250 | |
| }, | |
| { | |
| "epoch": 2.8894309752576293, | |
| "grad_norm": 6.334540843963623, | |
| "learning_rate": 1.0223348707710965e-06, | |
| "loss": 1.3795, | |
| "step": 7255 | |
| }, | |
| { | |
| "epoch": 2.8914223129387167, | |
| "grad_norm": 6.2255988121032715, | |
| "learning_rate": 9.861886152492838e-07, | |
| "loss": 1.4139, | |
| "step": 7260 | |
| }, | |
| { | |
| "epoch": 2.893413650619804, | |
| "grad_norm": 6.067513942718506, | |
| "learning_rate": 9.506908092385579e-07, | |
| "loss": 1.3067, | |
| "step": 7265 | |
| }, | |
| { | |
| "epoch": 2.895404988300891, | |
| "grad_norm": 5.387002468109131, | |
| "learning_rate": 9.158416072114205e-07, | |
| "loss": 1.3011, | |
| "step": 7270 | |
| }, | |
| { | |
| "epoch": 2.8973963259819784, | |
| "grad_norm": 6.608880996704102, | |
| "learning_rate": 8.816411608178975e-07, | |
| "loss": 1.4344, | |
| "step": 7275 | |
| }, | |
| { | |
| "epoch": 2.8993876636630658, | |
| "grad_norm": 6.290225028991699, | |
| "learning_rate": 8.480896188848729e-07, | |
| "loss": 1.3629, | |
| "step": 7280 | |
| }, | |
| { | |
| "epoch": 2.9013790013441527, | |
| "grad_norm": 6.4408183097839355, | |
| "learning_rate": 8.151871274154565e-07, | |
| "loss": 1.2844, | |
| "step": 7285 | |
| }, | |
| { | |
| "epoch": 2.90337033902524, | |
| "grad_norm": 5.775238990783691, | |
| "learning_rate": 7.829338295883836e-07, | |
| "loss": 1.4011, | |
| "step": 7290 | |
| }, | |
| { | |
| "epoch": 2.9053616767063275, | |
| "grad_norm": 7.453217506408691, | |
| "learning_rate": 7.513298657572831e-07, | |
| "loss": 1.3853, | |
| "step": 7295 | |
| }, | |
| { | |
| "epoch": 2.907353014387415, | |
| "grad_norm": 7.316783428192139, | |
| "learning_rate": 7.203753734502104e-07, | |
| "loss": 1.316, | |
| "step": 7300 | |
| }, | |
| { | |
| "epoch": 2.9093443520685023, | |
| "grad_norm": 6.529441833496094, | |
| "learning_rate": 6.900704873689322e-07, | |
| "loss": 1.3602, | |
| "step": 7305 | |
| }, | |
| { | |
| "epoch": 2.9113356897495892, | |
| "grad_norm": 6.993330001831055, | |
| "learning_rate": 6.604153393884093e-07, | |
| "loss": 1.4213, | |
| "step": 7310 | |
| }, | |
| { | |
| "epoch": 2.9133270274306766, | |
| "grad_norm": 5.9941182136535645, | |
| "learning_rate": 6.314100585561477e-07, | |
| "loss": 1.5167, | |
| "step": 7315 | |
| }, | |
| { | |
| "epoch": 2.915318365111764, | |
| "grad_norm": 6.265793800354004, | |
| "learning_rate": 6.030547710917655e-07, | |
| "loss": 1.403, | |
| "step": 7320 | |
| }, | |
| { | |
| "epoch": 2.917309702792851, | |
| "grad_norm": 5.922816753387451, | |
| "learning_rate": 5.753496003862935e-07, | |
| "loss": 1.4157, | |
| "step": 7325 | |
| }, | |
| { | |
| "epoch": 2.9193010404739383, | |
| "grad_norm": 6.88986873626709, | |
| "learning_rate": 5.482946670017585e-07, | |
| "loss": 1.3336, | |
| "step": 7330 | |
| }, | |
| { | |
| "epoch": 2.9212923781550257, | |
| "grad_norm": 7.298521995544434, | |
| "learning_rate": 5.218900886705845e-07, | |
| "loss": 1.3912, | |
| "step": 7335 | |
| }, | |
| { | |
| "epoch": 2.9232837158361127, | |
| "grad_norm": 6.366304397583008, | |
| "learning_rate": 4.961359802951259e-07, | |
| "loss": 1.4181, | |
| "step": 7340 | |
| }, | |
| { | |
| "epoch": 2.9252750535172, | |
| "grad_norm": 6.151203155517578, | |
| "learning_rate": 4.7103245394711773e-07, | |
| "loss": 1.4416, | |
| "step": 7345 | |
| }, | |
| { | |
| "epoch": 2.9272663911982875, | |
| "grad_norm": 7.088301181793213, | |
| "learning_rate": 4.4657961886724327e-07, | |
| "loss": 1.4493, | |
| "step": 7350 | |
| }, | |
| { | |
| "epoch": 2.929257728879375, | |
| "grad_norm": 7.647716522216797, | |
| "learning_rate": 4.2277758146461726e-07, | |
| "loss": 1.4544, | |
| "step": 7355 | |
| }, | |
| { | |
| "epoch": 2.9312490665604622, | |
| "grad_norm": 6.20832633972168, | |
| "learning_rate": 3.9962644531637e-07, | |
| "loss": 1.3583, | |
| "step": 7360 | |
| }, | |
| { | |
| "epoch": 2.933240404241549, | |
| "grad_norm": 5.964649200439453, | |
| "learning_rate": 3.771263111671141e-07, | |
| "loss": 1.3814, | |
| "step": 7365 | |
| }, | |
| { | |
| "epoch": 2.9352317419226366, | |
| "grad_norm": 6.578097343444824, | |
| "learning_rate": 3.55277276928595e-07, | |
| "loss": 1.4387, | |
| "step": 7370 | |
| }, | |
| { | |
| "epoch": 2.937223079603724, | |
| "grad_norm": 5.8518452644348145, | |
| "learning_rate": 3.340794376792244e-07, | |
| "loss": 1.3866, | |
| "step": 7375 | |
| }, | |
| { | |
| "epoch": 2.939214417284811, | |
| "grad_norm": 6.087634086608887, | |
| "learning_rate": 3.135328856636643e-07, | |
| "loss": 1.3422, | |
| "step": 7380 | |
| }, | |
| { | |
| "epoch": 2.9412057549658983, | |
| "grad_norm": 7.072717189788818, | |
| "learning_rate": 2.93637710292427e-07, | |
| "loss": 1.3686, | |
| "step": 7385 | |
| }, | |
| { | |
| "epoch": 2.9431970926469857, | |
| "grad_norm": 5.9594502449035645, | |
| "learning_rate": 2.743939981414922e-07, | |
| "loss": 1.4133, | |
| "step": 7390 | |
| }, | |
| { | |
| "epoch": 2.9451884303280726, | |
| "grad_norm": 8.152170181274414, | |
| "learning_rate": 2.5580183295190735e-07, | |
| "loss": 1.4139, | |
| "step": 7395 | |
| }, | |
| { | |
| "epoch": 2.94717976800916, | |
| "grad_norm": 6.129400253295898, | |
| "learning_rate": 2.378612956295045e-07, | |
| "loss": 1.4123, | |
| "step": 7400 | |
| }, | |
| { | |
| "epoch": 2.9491711056902474, | |
| "grad_norm": 6.710536956787109, | |
| "learning_rate": 2.2057246424441733e-07, | |
| "loss": 1.5799, | |
| "step": 7405 | |
| }, | |
| { | |
| "epoch": 2.951162443371335, | |
| "grad_norm": 6.588728427886963, | |
| "learning_rate": 2.03935414030848e-07, | |
| "loss": 1.4038, | |
| "step": 7410 | |
| }, | |
| { | |
| "epoch": 2.953153781052422, | |
| "grad_norm": 6.022589683532715, | |
| "learning_rate": 1.879502173867009e-07, | |
| "loss": 1.3085, | |
| "step": 7415 | |
| }, | |
| { | |
| "epoch": 2.955145118733509, | |
| "grad_norm": 5.406111240386963, | |
| "learning_rate": 1.7261694387328273e-07, | |
| "loss": 1.4544, | |
| "step": 7420 | |
| }, | |
| { | |
| "epoch": 2.9571364564145965, | |
| "grad_norm": 6.308793067932129, | |
| "learning_rate": 1.579356602149362e-07, | |
| "loss": 1.3811, | |
| "step": 7425 | |
| }, | |
| { | |
| "epoch": 2.959127794095684, | |
| "grad_norm": 5.521365165710449, | |
| "learning_rate": 1.439064302988402e-07, | |
| "loss": 1.3174, | |
| "step": 7430 | |
| }, | |
| { | |
| "epoch": 2.961119131776771, | |
| "grad_norm": 6.406843185424805, | |
| "learning_rate": 1.3052931517471e-07, | |
| "loss": 1.4519, | |
| "step": 7435 | |
| }, | |
| { | |
| "epoch": 2.9631104694578583, | |
| "grad_norm": 7.547394752502441, | |
| "learning_rate": 1.1780437305448087e-07, | |
| "loss": 1.4098, | |
| "step": 7440 | |
| }, | |
| { | |
| "epoch": 2.9651018071389457, | |
| "grad_norm": 6.150856018066406, | |
| "learning_rate": 1.057316593120916e-07, | |
| "loss": 1.3563, | |
| "step": 7445 | |
| }, | |
| { | |
| "epoch": 2.9670931448200326, | |
| "grad_norm": 6.139143466949463, | |
| "learning_rate": 9.431122648325129e-08, | |
| "loss": 1.396, | |
| "step": 7450 | |
| }, | |
| { | |
| "epoch": 2.96908448250112, | |
| "grad_norm": 6.330423355102539, | |
| "learning_rate": 8.354312426518961e-08, | |
| "loss": 1.3253, | |
| "step": 7455 | |
| }, | |
| { | |
| "epoch": 2.9710758201822074, | |
| "grad_norm": 6.3016228675842285, | |
| "learning_rate": 7.342739951644028e-08, | |
| "loss": 1.4452, | |
| "step": 7460 | |
| }, | |
| { | |
| "epoch": 2.973067157863295, | |
| "grad_norm": 5.905858516693115, | |
| "learning_rate": 6.396409625667453e-08, | |
| "loss": 1.3387, | |
| "step": 7465 | |
| }, | |
| { | |
| "epoch": 2.975058495544382, | |
| "grad_norm": 6.0580549240112305, | |
| "learning_rate": 5.5153255666468e-08, | |
| "loss": 1.4156, | |
| "step": 7470 | |
| }, | |
| { | |
| "epoch": 2.977049833225469, | |
| "grad_norm": 7.04741907119751, | |
| "learning_rate": 4.6994916087134125e-08, | |
| "loss": 1.3523, | |
| "step": 7475 | |
| }, | |
| { | |
| "epoch": 2.9790411709065565, | |
| "grad_norm": 5.739146709442139, | |
| "learning_rate": 3.9489113020557685e-08, | |
| "loss": 1.3493, | |
| "step": 7480 | |
| }, | |
| { | |
| "epoch": 2.981032508587644, | |
| "grad_norm": 5.794787883758545, | |
| "learning_rate": 3.263587912902821e-08, | |
| "loss": 1.3492, | |
| "step": 7485 | |
| }, | |
| { | |
| "epoch": 2.983023846268731, | |
| "grad_norm": 6.884363174438477, | |
| "learning_rate": 2.643524423515675e-08, | |
| "loss": 1.4875, | |
| "step": 7490 | |
| }, | |
| { | |
| "epoch": 2.9850151839498182, | |
| "grad_norm": 5.869548797607422, | |
| "learning_rate": 2.0887235321626062e-08, | |
| "loss": 1.3215, | |
| "step": 7495 | |
| }, | |
| { | |
| "epoch": 2.9870065216309056, | |
| "grad_norm": 6.334482192993164, | |
| "learning_rate": 1.599187653122391e-08, | |
| "loss": 1.3576, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 2.9889978593119926, | |
| "grad_norm": 7.025340557098389, | |
| "learning_rate": 1.174918916660994e-08, | |
| "loss": 1.4502, | |
| "step": 7505 | |
| }, | |
| { | |
| "epoch": 2.99098919699308, | |
| "grad_norm": 6.232082366943359, | |
| "learning_rate": 8.15919169028234e-09, | |
| "loss": 1.3403, | |
| "step": 7510 | |
| }, | |
| { | |
| "epoch": 2.9929805346741674, | |
| "grad_norm": 7.146350860595703, | |
| "learning_rate": 5.221899724527912e-09, | |
| "loss": 1.4512, | |
| "step": 7515 | |
| }, | |
| { | |
| "epoch": 2.9949718723552547, | |
| "grad_norm": 6.049505233764648, | |
| "learning_rate": 2.937326051272171e-09, | |
| "loss": 1.503, | |
| "step": 7520 | |
| }, | |
| { | |
| "epoch": 2.996963210036342, | |
| "grad_norm": 5.630764007568359, | |
| "learning_rate": 1.3054806120627036e-09, | |
| "loss": 1.2154, | |
| "step": 7525 | |
| }, | |
| { | |
| "epoch": 2.998954547717429, | |
| "grad_norm": 6.927404880523682, | |
| "learning_rate": 3.263705080691625e-10, | |
| "loss": 1.5118, | |
| "step": 7530 | |
| }, | |
| { | |
| "epoch": 2.998954547717429, | |
| "step": 7530, | |
| "total_flos": 3.7457880685726925e+17, | |
| "train_loss": 2.0558883373164245, | |
| "train_runtime": 9070.5505, | |
| "train_samples_per_second": 13.287, | |
| "train_steps_per_second": 0.83 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 7530, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 100, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 3.7457880685726925e+17, | |
| "train_batch_size": 2, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |