| { |
| "best_metric": 1.262895941734314, |
| "best_model_checkpoint": "runs/deepseek_lora_20240422-165831/checkpoint-27500", |
| "epoch": 0.75, |
| "eval_steps": 500, |
| "global_step": 30000, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0, |
| "grad_norm": 2.7229208946228027, |
| "learning_rate": 4.0000000000000003e-07, |
| "loss": 2.2066, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.0, |
| "grad_norm": 5.007352828979492, |
| "learning_rate": 8.000000000000001e-07, |
| "loss": 2.1122, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.0, |
| "grad_norm": 3.8536276817321777, |
| "learning_rate": 1.2000000000000002e-06, |
| "loss": 2.2073, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.0, |
| "grad_norm": 6.411635875701904, |
| "learning_rate": 1.6000000000000001e-06, |
| "loss": 2.3019, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.0, |
| "grad_norm": 8.24316692352295, |
| "learning_rate": 2.0000000000000003e-06, |
| "loss": 2.1321, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.0, |
| "grad_norm": 4.362759113311768, |
| "learning_rate": 2.4000000000000003e-06, |
| "loss": 2.1916, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.0, |
| "grad_norm": 4.222783088684082, |
| "learning_rate": 2.8000000000000003e-06, |
| "loss": 2.2857, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.0, |
| "grad_norm": 3.4690635204315186, |
| "learning_rate": 3.2000000000000003e-06, |
| "loss": 2.2253, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.0, |
| "grad_norm": 4.878602981567383, |
| "learning_rate": 3.6000000000000003e-06, |
| "loss": 2.1578, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.0, |
| "grad_norm": 4.750248908996582, |
| "learning_rate": 4.000000000000001e-06, |
| "loss": 2.0044, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.0, |
| "grad_norm": 7.198167324066162, |
| "learning_rate": 4.4e-06, |
| "loss": 1.8984, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.0, |
| "grad_norm": 3.500101327896118, |
| "learning_rate": 4.800000000000001e-06, |
| "loss": 1.9501, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.0, |
| "grad_norm": 7.465259552001953, |
| "learning_rate": 5.2e-06, |
| "loss": 1.8413, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.0, |
| "grad_norm": 3.6927084922790527, |
| "learning_rate": 5.600000000000001e-06, |
| "loss": 2.0101, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.0, |
| "grad_norm": 8.518953323364258, |
| "learning_rate": 6e-06, |
| "loss": 1.8778, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.0, |
| "grad_norm": 3.7254889011383057, |
| "learning_rate": 6.4000000000000006e-06, |
| "loss": 1.8361, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.0, |
| "grad_norm": 4.634269714355469, |
| "learning_rate": 6.800000000000001e-06, |
| "loss": 1.6214, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.0, |
| "grad_norm": 8.053196907043457, |
| "learning_rate": 7.2000000000000005e-06, |
| "loss": 1.7552, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.0, |
| "grad_norm": 3.407431125640869, |
| "learning_rate": 7.600000000000001e-06, |
| "loss": 1.8447, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 8.015592575073242, |
| "learning_rate": 8.000000000000001e-06, |
| "loss": 1.736, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 10.115213394165039, |
| "learning_rate": 8.400000000000001e-06, |
| "loss": 1.5497, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 6.565807819366455, |
| "learning_rate": 8.8e-06, |
| "loss": 1.6854, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 4.260842800140381, |
| "learning_rate": 9.200000000000002e-06, |
| "loss": 1.986, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 4.239616394042969, |
| "learning_rate": 9.600000000000001e-06, |
| "loss": 1.5284, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 3.048999786376953, |
| "learning_rate": 1e-05, |
| "loss": 1.7342, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 3.2346177101135254, |
| "learning_rate": 1.04e-05, |
| "loss": 1.8604, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 4.236307144165039, |
| "learning_rate": 1.0800000000000002e-05, |
| "loss": 1.6303, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 3.784558057785034, |
| "learning_rate": 1.1200000000000001e-05, |
| "loss": 1.6309, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 3.2512731552124023, |
| "learning_rate": 1.16e-05, |
| "loss": 1.8278, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 4.695291996002197, |
| "learning_rate": 1.2e-05, |
| "loss": 1.6175, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 5.554133892059326, |
| "learning_rate": 1.2400000000000002e-05, |
| "loss": 1.6657, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 24.56378936767578, |
| "learning_rate": 1.2800000000000001e-05, |
| "loss": 1.663, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 2.6168453693389893, |
| "learning_rate": 1.3200000000000002e-05, |
| "loss": 1.811, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 3.714534282684326, |
| "learning_rate": 1.3600000000000002e-05, |
| "loss": 1.7414, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 4.220114231109619, |
| "learning_rate": 1.4e-05, |
| "loss": 1.69, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 2.1215527057647705, |
| "learning_rate": 1.4400000000000001e-05, |
| "loss": 1.6113, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 2.8771586418151855, |
| "learning_rate": 1.48e-05, |
| "loss": 1.6141, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 5.036810398101807, |
| "learning_rate": 1.5200000000000002e-05, |
| "loss": 1.7124, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 1.9079569578170776, |
| "learning_rate": 1.5600000000000003e-05, |
| "loss": 1.629, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 9.102452278137207, |
| "learning_rate": 1.6000000000000003e-05, |
| "loss": 1.659, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 6.5280866622924805, |
| "learning_rate": 1.64e-05, |
| "loss": 1.6147, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 6.27349853515625, |
| "learning_rate": 1.6800000000000002e-05, |
| "loss": 1.5316, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 21.586790084838867, |
| "learning_rate": 1.72e-05, |
| "loss": 1.7693, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 3.198234796524048, |
| "learning_rate": 1.76e-05, |
| "loss": 1.465, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 5.132129669189453, |
| "learning_rate": 1.8e-05, |
| "loss": 1.6176, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 5.825719833374023, |
| "learning_rate": 1.8400000000000003e-05, |
| "loss": 1.6984, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 6.724545478820801, |
| "learning_rate": 1.88e-05, |
| "loss": 1.7011, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 3.723080635070801, |
| "learning_rate": 1.9200000000000003e-05, |
| "loss": 1.6338, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 2.8718888759613037, |
| "learning_rate": 1.9600000000000002e-05, |
| "loss": 1.5002, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 11.207817077636719, |
| "learning_rate": 2e-05, |
| "loss": 1.6829, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.01, |
| "eval_loss": 1.6066228151321411, |
| "eval_runtime": 66.1997, |
| "eval_samples_per_second": 15.106, |
| "eval_steps_per_second": 15.106, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 5.459104537963867, |
| "learning_rate": 1.999322033898305e-05, |
| "loss": 1.6216, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 2.6112444400787354, |
| "learning_rate": 1.9986440677966104e-05, |
| "loss": 1.5761, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 4.285440921783447, |
| "learning_rate": 1.9979661016949154e-05, |
| "loss": 1.6097, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 2.781306505203247, |
| "learning_rate": 1.9972881355932204e-05, |
| "loss": 1.6967, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 7.55482292175293, |
| "learning_rate": 1.9966101694915257e-05, |
| "loss": 1.5974, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 5.083601474761963, |
| "learning_rate": 1.9959322033898307e-05, |
| "loss": 1.6287, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 3.7552945613861084, |
| "learning_rate": 1.995254237288136e-05, |
| "loss": 1.4225, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 3.149674892425537, |
| "learning_rate": 1.994576271186441e-05, |
| "loss": 1.5439, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 5.270935535430908, |
| "learning_rate": 1.993898305084746e-05, |
| "loss": 1.8439, |
| "step": 590 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 2.552823543548584, |
| "learning_rate": 1.9932203389830512e-05, |
| "loss": 1.5678, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.02, |
| "grad_norm": 2.6956074237823486, |
| "learning_rate": 1.992542372881356e-05, |
| "loss": 1.6002, |
| "step": 610 |
| }, |
| { |
| "epoch": 0.02, |
| "grad_norm": 3.0761303901672363, |
| "learning_rate": 1.991864406779661e-05, |
| "loss": 1.7211, |
| "step": 620 |
| }, |
| { |
| "epoch": 0.02, |
| "grad_norm": 3.1744227409362793, |
| "learning_rate": 1.991186440677966e-05, |
| "loss": 1.6327, |
| "step": 630 |
| }, |
| { |
| "epoch": 0.02, |
| "grad_norm": 4.774084091186523, |
| "learning_rate": 1.990508474576271e-05, |
| "loss": 1.6871, |
| "step": 640 |
| }, |
| { |
| "epoch": 0.02, |
| "grad_norm": 3.4767563343048096, |
| "learning_rate": 1.9898305084745764e-05, |
| "loss": 1.6038, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.02, |
| "grad_norm": 2.3587417602539062, |
| "learning_rate": 1.9891525423728814e-05, |
| "loss": 1.8443, |
| "step": 660 |
| }, |
| { |
| "epoch": 0.02, |
| "grad_norm": 3.177602529525757, |
| "learning_rate": 1.9884745762711867e-05, |
| "loss": 1.5371, |
| "step": 670 |
| }, |
| { |
| "epoch": 0.02, |
| "grad_norm": 2.579742193222046, |
| "learning_rate": 1.9877966101694917e-05, |
| "loss": 1.4555, |
| "step": 680 |
| }, |
| { |
| "epoch": 0.02, |
| "grad_norm": 3.631253242492676, |
| "learning_rate": 1.9871186440677966e-05, |
| "loss": 1.6794, |
| "step": 690 |
| }, |
| { |
| "epoch": 0.02, |
| "grad_norm": 2.278071641921997, |
| "learning_rate": 1.986440677966102e-05, |
| "loss": 1.477, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.02, |
| "grad_norm": 3.924114227294922, |
| "learning_rate": 1.985762711864407e-05, |
| "loss": 1.6933, |
| "step": 710 |
| }, |
| { |
| "epoch": 0.02, |
| "grad_norm": 3.5087924003601074, |
| "learning_rate": 1.985084745762712e-05, |
| "loss": 1.5713, |
| "step": 720 |
| }, |
| { |
| "epoch": 0.02, |
| "grad_norm": 2.0184319019317627, |
| "learning_rate": 1.9844067796610172e-05, |
| "loss": 1.6689, |
| "step": 730 |
| }, |
| { |
| "epoch": 0.02, |
| "grad_norm": 2.6397581100463867, |
| "learning_rate": 1.9837288135593222e-05, |
| "loss": 1.5636, |
| "step": 740 |
| }, |
| { |
| "epoch": 0.02, |
| "grad_norm": 3.12296986579895, |
| "learning_rate": 1.9830508474576275e-05, |
| "loss": 1.6867, |
| "step": 750 |
| }, |
| { |
| "epoch": 0.02, |
| "grad_norm": 2.04766583442688, |
| "learning_rate": 1.9823728813559324e-05, |
| "loss": 1.6027, |
| "step": 760 |
| }, |
| { |
| "epoch": 0.02, |
| "grad_norm": 1.8128042221069336, |
| "learning_rate": 1.9816949152542374e-05, |
| "loss": 1.6354, |
| "step": 770 |
| }, |
| { |
| "epoch": 0.02, |
| "grad_norm": 2.708834171295166, |
| "learning_rate": 1.9810169491525427e-05, |
| "loss": 1.5484, |
| "step": 780 |
| }, |
| { |
| "epoch": 0.02, |
| "grad_norm": 1.9796117544174194, |
| "learning_rate": 1.9803389830508477e-05, |
| "loss": 1.4511, |
| "step": 790 |
| }, |
| { |
| "epoch": 0.02, |
| "grad_norm": 4.195634365081787, |
| "learning_rate": 1.9796610169491527e-05, |
| "loss": 1.6435, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.02, |
| "grad_norm": 3.132399320602417, |
| "learning_rate": 1.978983050847458e-05, |
| "loss": 1.5196, |
| "step": 810 |
| }, |
| { |
| "epoch": 0.02, |
| "grad_norm": 2.9280669689178467, |
| "learning_rate": 1.9783050847457626e-05, |
| "loss": 1.522, |
| "step": 820 |
| }, |
| { |
| "epoch": 0.02, |
| "grad_norm": 2.2013494968414307, |
| "learning_rate": 1.977627118644068e-05, |
| "loss": 1.6589, |
| "step": 830 |
| }, |
| { |
| "epoch": 0.02, |
| "grad_norm": 9.277434349060059, |
| "learning_rate": 1.976949152542373e-05, |
| "loss": 1.5828, |
| "step": 840 |
| }, |
| { |
| "epoch": 0.02, |
| "grad_norm": 6.09869384765625, |
| "learning_rate": 1.9762711864406782e-05, |
| "loss": 1.6683, |
| "step": 850 |
| }, |
| { |
| "epoch": 0.02, |
| "grad_norm": 1.7093573808670044, |
| "learning_rate": 1.9755932203389832e-05, |
| "loss": 1.6881, |
| "step": 860 |
| }, |
| { |
| "epoch": 0.02, |
| "grad_norm": 1.834920048713684, |
| "learning_rate": 1.974915254237288e-05, |
| "loss": 1.6169, |
| "step": 870 |
| }, |
| { |
| "epoch": 0.02, |
| "grad_norm": 3.802130699157715, |
| "learning_rate": 1.9742372881355935e-05, |
| "loss": 1.5526, |
| "step": 880 |
| }, |
| { |
| "epoch": 0.02, |
| "grad_norm": 2.086676836013794, |
| "learning_rate": 1.9735593220338984e-05, |
| "loss": 1.661, |
| "step": 890 |
| }, |
| { |
| "epoch": 0.02, |
| "grad_norm": 1.9825890064239502, |
| "learning_rate": 1.9728813559322034e-05, |
| "loss": 1.6302, |
| "step": 900 |
| }, |
| { |
| "epoch": 0.02, |
| "grad_norm": 1.9660803079605103, |
| "learning_rate": 1.9722033898305087e-05, |
| "loss": 1.6146, |
| "step": 910 |
| }, |
| { |
| "epoch": 0.02, |
| "grad_norm": 3.5572495460510254, |
| "learning_rate": 1.9715254237288137e-05, |
| "loss": 1.6143, |
| "step": 920 |
| }, |
| { |
| "epoch": 0.02, |
| "grad_norm": 5.440743923187256, |
| "learning_rate": 1.970847457627119e-05, |
| "loss": 1.4907, |
| "step": 930 |
| }, |
| { |
| "epoch": 0.02, |
| "grad_norm": 2.432176351547241, |
| "learning_rate": 1.970169491525424e-05, |
| "loss": 1.6272, |
| "step": 940 |
| }, |
| { |
| "epoch": 0.02, |
| "grad_norm": 1.2422198057174683, |
| "learning_rate": 1.969491525423729e-05, |
| "loss": 1.6404, |
| "step": 950 |
| }, |
| { |
| "epoch": 0.02, |
| "grad_norm": 3.0598487854003906, |
| "learning_rate": 1.9688135593220342e-05, |
| "loss": 1.5811, |
| "step": 960 |
| }, |
| { |
| "epoch": 0.02, |
| "grad_norm": 3.399864673614502, |
| "learning_rate": 1.9681355932203392e-05, |
| "loss": 1.6221, |
| "step": 970 |
| }, |
| { |
| "epoch": 0.02, |
| "grad_norm": 1.3376494646072388, |
| "learning_rate": 1.9674576271186442e-05, |
| "loss": 1.5384, |
| "step": 980 |
| }, |
| { |
| "epoch": 0.02, |
| "grad_norm": 4.062708854675293, |
| "learning_rate": 1.9667796610169495e-05, |
| "loss": 1.5448, |
| "step": 990 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 4.200625419616699, |
| "learning_rate": 1.9661016949152545e-05, |
| "loss": 1.6656, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 1.5288029909133911, |
| "eval_runtime": 66.2062, |
| "eval_samples_per_second": 15.104, |
| "eval_steps_per_second": 15.104, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 4.217132091522217, |
| "learning_rate": 1.9654237288135594e-05, |
| "loss": 1.6834, |
| "step": 1010 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 1.8246206045150757, |
| "learning_rate": 1.9647457627118644e-05, |
| "loss": 1.7242, |
| "step": 1020 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 3.0907254219055176, |
| "learning_rate": 1.9640677966101697e-05, |
| "loss": 1.6493, |
| "step": 1030 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 2.1235597133636475, |
| "learning_rate": 1.9633898305084747e-05, |
| "loss": 1.6628, |
| "step": 1040 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 4.815682411193848, |
| "learning_rate": 1.9627118644067796e-05, |
| "loss": 1.5552, |
| "step": 1050 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 2.7466652393341064, |
| "learning_rate": 1.962033898305085e-05, |
| "loss": 1.7091, |
| "step": 1060 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 1.9031518697738647, |
| "learning_rate": 1.96135593220339e-05, |
| "loss": 1.4593, |
| "step": 1070 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 7.788964748382568, |
| "learning_rate": 1.960677966101695e-05, |
| "loss": 1.7782, |
| "step": 1080 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 3.2768681049346924, |
| "learning_rate": 1.9600000000000002e-05, |
| "loss": 1.6339, |
| "step": 1090 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 2.7264978885650635, |
| "learning_rate": 1.9593220338983052e-05, |
| "loss": 1.7666, |
| "step": 1100 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 4.865339279174805, |
| "learning_rate": 1.95864406779661e-05, |
| "loss": 1.7127, |
| "step": 1110 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 2.9894134998321533, |
| "learning_rate": 1.9579661016949155e-05, |
| "loss": 1.5671, |
| "step": 1120 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 3.2608160972595215, |
| "learning_rate": 1.9572881355932204e-05, |
| "loss": 1.5396, |
| "step": 1130 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 2.3654749393463135, |
| "learning_rate": 1.9566101694915257e-05, |
| "loss": 1.514, |
| "step": 1140 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 1.9910569190979004, |
| "learning_rate": 1.9559322033898307e-05, |
| "loss": 1.6519, |
| "step": 1150 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 3.1228861808776855, |
| "learning_rate": 1.9552542372881357e-05, |
| "loss": 1.5683, |
| "step": 1160 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 2.8690907955169678, |
| "learning_rate": 1.954576271186441e-05, |
| "loss": 1.5795, |
| "step": 1170 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 7.847595691680908, |
| "learning_rate": 1.953898305084746e-05, |
| "loss": 1.4718, |
| "step": 1180 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 3.4904606342315674, |
| "learning_rate": 1.953220338983051e-05, |
| "loss": 1.6105, |
| "step": 1190 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 2.352919340133667, |
| "learning_rate": 1.9525423728813562e-05, |
| "loss": 1.6323, |
| "step": 1200 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 1.9066511392593384, |
| "learning_rate": 1.9518644067796612e-05, |
| "loss": 1.5846, |
| "step": 1210 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 2.2066802978515625, |
| "learning_rate": 1.9511864406779665e-05, |
| "loss": 1.4947, |
| "step": 1220 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 2.038661003112793, |
| "learning_rate": 1.950508474576271e-05, |
| "loss": 1.5167, |
| "step": 1230 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 4.544153690338135, |
| "learning_rate": 1.9498305084745765e-05, |
| "loss": 1.7429, |
| "step": 1240 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 4.264445781707764, |
| "learning_rate": 1.9491525423728814e-05, |
| "loss": 1.7164, |
| "step": 1250 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 7.955377101898193, |
| "learning_rate": 1.9484745762711864e-05, |
| "loss": 1.4268, |
| "step": 1260 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 3.6983351707458496, |
| "learning_rate": 1.9477966101694917e-05, |
| "loss": 1.5497, |
| "step": 1270 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 2.3593084812164307, |
| "learning_rate": 1.9471186440677967e-05, |
| "loss": 1.4209, |
| "step": 1280 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 5.2454938888549805, |
| "learning_rate": 1.9464406779661017e-05, |
| "loss": 1.7169, |
| "step": 1290 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 4.33056116104126, |
| "learning_rate": 1.945762711864407e-05, |
| "loss": 1.491, |
| "step": 1300 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 3.095102071762085, |
| "learning_rate": 1.945084745762712e-05, |
| "loss": 1.5366, |
| "step": 1310 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 1.7006287574768066, |
| "learning_rate": 1.9444067796610172e-05, |
| "loss": 1.5085, |
| "step": 1320 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 2.643651008605957, |
| "learning_rate": 1.9437288135593222e-05, |
| "loss": 1.4597, |
| "step": 1330 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 1.9585516452789307, |
| "learning_rate": 1.9430508474576272e-05, |
| "loss": 1.6975, |
| "step": 1340 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 2.669740676879883, |
| "learning_rate": 1.9423728813559325e-05, |
| "loss": 1.4074, |
| "step": 1350 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 3.0379438400268555, |
| "learning_rate": 1.9416949152542375e-05, |
| "loss": 1.5256, |
| "step": 1360 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 2.81781005859375, |
| "learning_rate": 1.9410169491525424e-05, |
| "loss": 1.5329, |
| "step": 1370 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 1.2218108177185059, |
| "learning_rate": 1.9403389830508477e-05, |
| "loss": 1.5283, |
| "step": 1380 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 2.4585816860198975, |
| "learning_rate": 1.9396610169491527e-05, |
| "loss": 1.4265, |
| "step": 1390 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 6.117985725402832, |
| "learning_rate": 1.938983050847458e-05, |
| "loss": 1.6968, |
| "step": 1400 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 1.7194918394088745, |
| "learning_rate": 1.938305084745763e-05, |
| "loss": 1.5565, |
| "step": 1410 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 3.65537691116333, |
| "learning_rate": 1.937627118644068e-05, |
| "loss": 1.7526, |
| "step": 1420 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 3.0874905586242676, |
| "learning_rate": 1.9369491525423733e-05, |
| "loss": 1.4787, |
| "step": 1430 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 3.945019006729126, |
| "learning_rate": 1.936271186440678e-05, |
| "loss": 1.5238, |
| "step": 1440 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 2.7200796604156494, |
| "learning_rate": 1.9355932203389832e-05, |
| "loss": 1.4571, |
| "step": 1450 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 8.368704795837402, |
| "learning_rate": 1.9349152542372882e-05, |
| "loss": 1.5916, |
| "step": 1460 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 3.0204453468322754, |
| "learning_rate": 1.934237288135593e-05, |
| "loss": 1.6997, |
| "step": 1470 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 2.3051364421844482, |
| "learning_rate": 1.9335593220338985e-05, |
| "loss": 1.5315, |
| "step": 1480 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 1.246631145477295, |
| "learning_rate": 1.9328813559322034e-05, |
| "loss": 1.478, |
| "step": 1490 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 1.8742200136184692, |
| "learning_rate": 1.9322033898305087e-05, |
| "loss": 1.5419, |
| "step": 1500 |
| }, |
| { |
| "epoch": 0.04, |
| "eval_loss": 1.5387661457061768, |
| "eval_runtime": 66.1849, |
| "eval_samples_per_second": 15.109, |
| "eval_steps_per_second": 15.109, |
| "step": 1500 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 1.624708652496338, |
| "learning_rate": 1.9315254237288137e-05, |
| "loss": 1.5002, |
| "step": 1510 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 5.5269856452941895, |
| "learning_rate": 1.9308474576271187e-05, |
| "loss": 1.6267, |
| "step": 1520 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 1.879159688949585, |
| "learning_rate": 1.930169491525424e-05, |
| "loss": 1.4951, |
| "step": 1530 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 4.727252006530762, |
| "learning_rate": 1.929491525423729e-05, |
| "loss": 1.5438, |
| "step": 1540 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 1.9965201616287231, |
| "learning_rate": 1.928813559322034e-05, |
| "loss": 1.6525, |
| "step": 1550 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 2.61816143989563, |
| "learning_rate": 1.9281355932203392e-05, |
| "loss": 1.4746, |
| "step": 1560 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 2.8157131671905518, |
| "learning_rate": 1.9274576271186442e-05, |
| "loss": 1.5454, |
| "step": 1570 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 2.738640546798706, |
| "learning_rate": 1.9267796610169492e-05, |
| "loss": 1.4876, |
| "step": 1580 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 3.118277072906494, |
| "learning_rate": 1.9261016949152545e-05, |
| "loss": 1.5587, |
| "step": 1590 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 6.730972766876221, |
| "learning_rate": 1.9254237288135595e-05, |
| "loss": 1.6752, |
| "step": 1600 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 9.461782455444336, |
| "learning_rate": 1.9247457627118648e-05, |
| "loss": 1.5409, |
| "step": 1610 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 4.566100120544434, |
| "learning_rate": 1.9240677966101698e-05, |
| "loss": 1.4724, |
| "step": 1620 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 3.8972136974334717, |
| "learning_rate": 1.9233898305084747e-05, |
| "loss": 1.5706, |
| "step": 1630 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 2.0403244495391846, |
| "learning_rate": 1.92271186440678e-05, |
| "loss": 1.6694, |
| "step": 1640 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 3.0590033531188965, |
| "learning_rate": 1.9220338983050847e-05, |
| "loss": 1.5711, |
| "step": 1650 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 0.9862588047981262, |
| "learning_rate": 1.92135593220339e-05, |
| "loss": 1.486, |
| "step": 1660 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 2.9184350967407227, |
| "learning_rate": 1.920677966101695e-05, |
| "loss": 1.5526, |
| "step": 1670 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 3.7024717330932617, |
| "learning_rate": 1.9200000000000003e-05, |
| "loss": 1.6754, |
| "step": 1680 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 4.887876510620117, |
| "learning_rate": 1.9193220338983052e-05, |
| "loss": 1.6843, |
| "step": 1690 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 2.6079001426696777, |
| "learning_rate": 1.9186440677966102e-05, |
| "loss": 1.5116, |
| "step": 1700 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 3.407667875289917, |
| "learning_rate": 1.9179661016949155e-05, |
| "loss": 1.581, |
| "step": 1710 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 2.201183795928955, |
| "learning_rate": 1.9172881355932205e-05, |
| "loss": 1.6449, |
| "step": 1720 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 1.4738363027572632, |
| "learning_rate": 1.9166101694915254e-05, |
| "loss": 1.3959, |
| "step": 1730 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 1.745976209640503, |
| "learning_rate": 1.9159322033898308e-05, |
| "loss": 1.394, |
| "step": 1740 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 3.694169759750366, |
| "learning_rate": 1.9152542372881357e-05, |
| "loss": 1.6542, |
| "step": 1750 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 2.3387959003448486, |
| "learning_rate": 1.9145762711864407e-05, |
| "loss": 1.6928, |
| "step": 1760 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 2.8310043811798096, |
| "learning_rate": 1.913898305084746e-05, |
| "loss": 1.5955, |
| "step": 1770 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 2.731762647628784, |
| "learning_rate": 1.913220338983051e-05, |
| "loss": 1.7997, |
| "step": 1780 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 2.896686553955078, |
| "learning_rate": 1.9125423728813563e-05, |
| "loss": 1.5137, |
| "step": 1790 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 1.8352593183517456, |
| "learning_rate": 1.9118644067796613e-05, |
| "loss": 1.5224, |
| "step": 1800 |
| }, |
| { |
| "epoch": 0.05, |
| "grad_norm": 4.474410057067871, |
| "learning_rate": 1.9111864406779662e-05, |
| "loss": 1.6659, |
| "step": 1810 |
| }, |
| { |
| "epoch": 0.05, |
| "grad_norm": 3.063828468322754, |
| "learning_rate": 1.9105084745762715e-05, |
| "loss": 1.4191, |
| "step": 1820 |
| }, |
| { |
| "epoch": 0.05, |
| "grad_norm": 1.9058163166046143, |
| "learning_rate": 1.9098305084745765e-05, |
| "loss": 1.5386, |
| "step": 1830 |
| }, |
| { |
| "epoch": 0.05, |
| "grad_norm": 2.915477991104126, |
| "learning_rate": 1.9091525423728815e-05, |
| "loss": 1.5756, |
| "step": 1840 |
| }, |
| { |
| "epoch": 0.05, |
| "grad_norm": 1.423071265220642, |
| "learning_rate": 1.9084745762711868e-05, |
| "loss": 1.5377, |
| "step": 1850 |
| }, |
| { |
| "epoch": 0.05, |
| "grad_norm": 2.5523242950439453, |
| "learning_rate": 1.9077966101694914e-05, |
| "loss": 1.5199, |
| "step": 1860 |
| }, |
| { |
| "epoch": 0.05, |
| "grad_norm": 2.609219789505005, |
| "learning_rate": 1.9071186440677967e-05, |
| "loss": 1.5402, |
| "step": 1870 |
| }, |
| { |
| "epoch": 0.05, |
| "grad_norm": 7.032884120941162, |
| "learning_rate": 1.9064406779661017e-05, |
| "loss": 1.4348, |
| "step": 1880 |
| }, |
| { |
| "epoch": 0.05, |
| "grad_norm": 3.192718029022217, |
| "learning_rate": 1.905762711864407e-05, |
| "loss": 1.5971, |
| "step": 1890 |
| }, |
| { |
| "epoch": 0.05, |
| "grad_norm": 2.0084311962127686, |
| "learning_rate": 1.905084745762712e-05, |
| "loss": 1.459, |
| "step": 1900 |
| }, |
| { |
| "epoch": 0.05, |
| "grad_norm": 1.646857738494873, |
| "learning_rate": 1.904406779661017e-05, |
| "loss": 1.507, |
| "step": 1910 |
| }, |
| { |
| "epoch": 0.05, |
| "grad_norm": 3.855058431625366, |
| "learning_rate": 1.9037288135593223e-05, |
| "loss": 1.5667, |
| "step": 1920 |
| }, |
| { |
| "epoch": 0.05, |
| "grad_norm": 1.520700216293335, |
| "learning_rate": 1.9030508474576272e-05, |
| "loss": 1.573, |
| "step": 1930 |
| }, |
| { |
| "epoch": 0.05, |
| "grad_norm": 2.6929845809936523, |
| "learning_rate": 1.9023728813559322e-05, |
| "loss": 1.4966, |
| "step": 1940 |
| }, |
| { |
| "epoch": 0.05, |
| "grad_norm": 2.283622980117798, |
| "learning_rate": 1.9016949152542375e-05, |
| "loss": 1.5018, |
| "step": 1950 |
| }, |
| { |
| "epoch": 0.05, |
| "grad_norm": 1.3686760663986206, |
| "learning_rate": 1.9010169491525425e-05, |
| "loss": 1.3841, |
| "step": 1960 |
| }, |
| { |
| "epoch": 0.05, |
| "grad_norm": 2.02280592918396, |
| "learning_rate": 1.9003389830508478e-05, |
| "loss": 1.3902, |
| "step": 1970 |
| }, |
| { |
| "epoch": 0.05, |
| "grad_norm": 2.312986135482788, |
| "learning_rate": 1.8996610169491528e-05, |
| "loss": 1.6016, |
| "step": 1980 |
| }, |
| { |
| "epoch": 0.05, |
| "grad_norm": 2.497074604034424, |
| "learning_rate": 1.8989830508474577e-05, |
| "loss": 1.6256, |
| "step": 1990 |
| }, |
| { |
| "epoch": 0.05, |
| "grad_norm": 8.351258277893066, |
| "learning_rate": 1.898305084745763e-05, |
| "loss": 1.5034, |
| "step": 2000 |
| }, |
| { |
| "epoch": 0.05, |
| "eval_loss": 1.555788516998291, |
| "eval_runtime": 66.1337, |
| "eval_samples_per_second": 15.121, |
| "eval_steps_per_second": 15.121, |
| "step": 2000 |
| }, |
| { |
| "epoch": 0.05, |
| "grad_norm": 10.036495208740234, |
| "learning_rate": 1.897627118644068e-05, |
| "loss": 1.6968, |
| "step": 2010 |
| }, |
| { |
| "epoch": 0.05, |
| "grad_norm": 5.859747886657715, |
| "learning_rate": 1.896949152542373e-05, |
| "loss": 1.655, |
| "step": 2020 |
| }, |
| { |
| "epoch": 0.05, |
| "grad_norm": 2.809723138809204, |
| "learning_rate": 1.8962711864406783e-05, |
| "loss": 1.5661, |
| "step": 2030 |
| }, |
| { |
| "epoch": 0.05, |
| "grad_norm": 3.2033305168151855, |
| "learning_rate": 1.8955932203389833e-05, |
| "loss": 1.5528, |
| "step": 2040 |
| }, |
| { |
| "epoch": 0.05, |
| "grad_norm": 2.821791887283325, |
| "learning_rate": 1.8949152542372882e-05, |
| "loss": 1.5215, |
| "step": 2050 |
| }, |
| { |
| "epoch": 0.05, |
| "grad_norm": 3.9140360355377197, |
| "learning_rate": 1.8942372881355932e-05, |
| "loss": 1.6541, |
| "step": 2060 |
| }, |
| { |
| "epoch": 0.05, |
| "grad_norm": 1.9218617677688599, |
| "learning_rate": 1.8935593220338985e-05, |
| "loss": 1.6119, |
| "step": 2070 |
| }, |
| { |
| "epoch": 0.05, |
| "grad_norm": 1.7994463443756104, |
| "learning_rate": 1.8928813559322035e-05, |
| "loss": 1.5222, |
| "step": 2080 |
| }, |
| { |
| "epoch": 0.05, |
| "grad_norm": 2.291020631790161, |
| "learning_rate": 1.8922033898305085e-05, |
| "loss": 1.5315, |
| "step": 2090 |
| }, |
| { |
| "epoch": 0.05, |
| "grad_norm": 2.737337589263916, |
| "learning_rate": 1.8915254237288138e-05, |
| "loss": 1.4785, |
| "step": 2100 |
| }, |
| { |
| "epoch": 0.05, |
| "grad_norm": 3.1229870319366455, |
| "learning_rate": 1.8908474576271187e-05, |
| "loss": 1.6181, |
| "step": 2110 |
| }, |
| { |
| "epoch": 0.05, |
| "grad_norm": 2.7481417655944824, |
| "learning_rate": 1.8901694915254237e-05, |
| "loss": 1.4123, |
| "step": 2120 |
| }, |
| { |
| "epoch": 0.05, |
| "grad_norm": 3.3576602935791016, |
| "learning_rate": 1.889491525423729e-05, |
| "loss": 1.5839, |
| "step": 2130 |
| }, |
| { |
| "epoch": 0.05, |
| "grad_norm": 3.292433261871338, |
| "learning_rate": 1.888813559322034e-05, |
| "loss": 1.5668, |
| "step": 2140 |
| }, |
| { |
| "epoch": 0.05, |
| "grad_norm": 5.039631366729736, |
| "learning_rate": 1.8881355932203393e-05, |
| "loss": 1.3932, |
| "step": 2150 |
| }, |
| { |
| "epoch": 0.05, |
| "grad_norm": 2.142812490463257, |
| "learning_rate": 1.8874576271186443e-05, |
| "loss": 1.6613, |
| "step": 2160 |
| }, |
| { |
| "epoch": 0.05, |
| "grad_norm": 1.2970080375671387, |
| "learning_rate": 1.8867796610169492e-05, |
| "loss": 1.4982, |
| "step": 2170 |
| }, |
| { |
| "epoch": 0.05, |
| "grad_norm": 7.887810230255127, |
| "learning_rate": 1.8861016949152545e-05, |
| "loss": 1.6085, |
| "step": 2180 |
| }, |
| { |
| "epoch": 0.05, |
| "grad_norm": 3.8233675956726074, |
| "learning_rate": 1.8854237288135595e-05, |
| "loss": 1.668, |
| "step": 2190 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 2.357754707336426, |
| "learning_rate": 1.8847457627118645e-05, |
| "loss": 1.6456, |
| "step": 2200 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 5.471128463745117, |
| "learning_rate": 1.8840677966101698e-05, |
| "loss": 1.5198, |
| "step": 2210 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 1.850195050239563, |
| "learning_rate": 1.8833898305084748e-05, |
| "loss": 1.5557, |
| "step": 2220 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 2.1556105613708496, |
| "learning_rate": 1.8827118644067797e-05, |
| "loss": 1.6411, |
| "step": 2230 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 3.6570167541503906, |
| "learning_rate": 1.882033898305085e-05, |
| "loss": 1.474, |
| "step": 2240 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 2.989947557449341, |
| "learning_rate": 1.88135593220339e-05, |
| "loss": 1.5413, |
| "step": 2250 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 1.8367825746536255, |
| "learning_rate": 1.8806779661016953e-05, |
| "loss": 1.5744, |
| "step": 2260 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 2.2120065689086914, |
| "learning_rate": 1.88e-05, |
| "loss": 1.5589, |
| "step": 2270 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 3.29278302192688, |
| "learning_rate": 1.8793220338983053e-05, |
| "loss": 1.4468, |
| "step": 2280 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 2.6164047718048096, |
| "learning_rate": 1.8786440677966102e-05, |
| "loss": 1.5143, |
| "step": 2290 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 3.008776903152466, |
| "learning_rate": 1.8779661016949152e-05, |
| "loss": 1.5692, |
| "step": 2300 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 2.0924556255340576, |
| "learning_rate": 1.8772881355932205e-05, |
| "loss": 1.4559, |
| "step": 2310 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 2.1632914543151855, |
| "learning_rate": 1.8766101694915255e-05, |
| "loss": 1.5876, |
| "step": 2320 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 2.367845058441162, |
| "learning_rate": 1.8759322033898305e-05, |
| "loss": 1.5093, |
| "step": 2330 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 2.618950843811035, |
| "learning_rate": 1.8752542372881358e-05, |
| "loss": 1.4839, |
| "step": 2340 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 2.7340502738952637, |
| "learning_rate": 1.8745762711864407e-05, |
| "loss": 1.7354, |
| "step": 2350 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 3.4295051097869873, |
| "learning_rate": 1.873898305084746e-05, |
| "loss": 1.4937, |
| "step": 2360 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 1.078732967376709, |
| "learning_rate": 1.873220338983051e-05, |
| "loss": 1.7125, |
| "step": 2370 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 3.103182554244995, |
| "learning_rate": 1.872542372881356e-05, |
| "loss": 1.4019, |
| "step": 2380 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 1.760866641998291, |
| "learning_rate": 1.8718644067796613e-05, |
| "loss": 1.4383, |
| "step": 2390 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 1.4431618452072144, |
| "learning_rate": 1.8711864406779663e-05, |
| "loss": 1.5302, |
| "step": 2400 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 3.5087249279022217, |
| "learning_rate": 1.8705084745762712e-05, |
| "loss": 1.5693, |
| "step": 2410 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 3.648737668991089, |
| "learning_rate": 1.8698305084745765e-05, |
| "loss": 1.2027, |
| "step": 2420 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 4.0298357009887695, |
| "learning_rate": 1.8691525423728815e-05, |
| "loss": 1.4772, |
| "step": 2430 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 4.11863899230957, |
| "learning_rate": 1.8684745762711868e-05, |
| "loss": 1.402, |
| "step": 2440 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 1.1432223320007324, |
| "learning_rate": 1.8677966101694918e-05, |
| "loss": 1.5907, |
| "step": 2450 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 2.0923776626586914, |
| "learning_rate": 1.8671186440677968e-05, |
| "loss": 1.569, |
| "step": 2460 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 1.987396001815796, |
| "learning_rate": 1.866440677966102e-05, |
| "loss": 1.6504, |
| "step": 2470 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 3.959685802459717, |
| "learning_rate": 1.8657627118644067e-05, |
| "loss": 1.4029, |
| "step": 2480 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 3.1348440647125244, |
| "learning_rate": 1.865084745762712e-05, |
| "loss": 1.5063, |
| "step": 2490 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 2.788222074508667, |
| "learning_rate": 1.864406779661017e-05, |
| "loss": 1.5971, |
| "step": 2500 |
| }, |
| { |
| "epoch": 0.06, |
| "eval_loss": 1.5344786643981934, |
| "eval_runtime": 66.1387, |
| "eval_samples_per_second": 15.12, |
| "eval_steps_per_second": 15.12, |
| "step": 2500 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 1.9007914066314697, |
| "learning_rate": 1.863728813559322e-05, |
| "loss": 1.5292, |
| "step": 2510 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 2.1382060050964355, |
| "learning_rate": 1.8630508474576273e-05, |
| "loss": 1.516, |
| "step": 2520 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 0.914665162563324, |
| "learning_rate": 1.8623728813559322e-05, |
| "loss": 1.3994, |
| "step": 2530 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 4.413509368896484, |
| "learning_rate": 1.8616949152542376e-05, |
| "loss": 1.5696, |
| "step": 2540 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 1.9687520265579224, |
| "learning_rate": 1.8610169491525425e-05, |
| "loss": 1.6693, |
| "step": 2550 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 3.4700605869293213, |
| "learning_rate": 1.8603389830508475e-05, |
| "loss": 1.6217, |
| "step": 2560 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 2.2257847785949707, |
| "learning_rate": 1.8596610169491528e-05, |
| "loss": 1.65, |
| "step": 2570 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 15.683956146240234, |
| "learning_rate": 1.8589830508474578e-05, |
| "loss": 1.5663, |
| "step": 2580 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 2.2584922313690186, |
| "learning_rate": 1.8583050847457627e-05, |
| "loss": 1.5146, |
| "step": 2590 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 1.900083065032959, |
| "learning_rate": 1.857627118644068e-05, |
| "loss": 1.5062, |
| "step": 2600 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 2.6166887283325195, |
| "learning_rate": 1.856949152542373e-05, |
| "loss": 1.3145, |
| "step": 2610 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 0.9894976019859314, |
| "learning_rate": 1.856271186440678e-05, |
| "loss": 1.6037, |
| "step": 2620 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 5.897536277770996, |
| "learning_rate": 1.8555932203389833e-05, |
| "loss": 1.5846, |
| "step": 2630 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 3.8714261054992676, |
| "learning_rate": 1.8549152542372883e-05, |
| "loss": 1.5008, |
| "step": 2640 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 3.062786817550659, |
| "learning_rate": 1.8542372881355936e-05, |
| "loss": 1.5885, |
| "step": 2650 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 4.548590660095215, |
| "learning_rate": 1.8535593220338986e-05, |
| "loss": 1.4718, |
| "step": 2660 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 1.9573743343353271, |
| "learning_rate": 1.8528813559322035e-05, |
| "loss": 1.5926, |
| "step": 2670 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 1.7771761417388916, |
| "learning_rate": 1.852203389830509e-05, |
| "loss": 1.5136, |
| "step": 2680 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 3.312812328338623, |
| "learning_rate": 1.8515254237288135e-05, |
| "loss": 1.4674, |
| "step": 2690 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 2.288461685180664, |
| "learning_rate": 1.8508474576271188e-05, |
| "loss": 1.576, |
| "step": 2700 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 3.608473062515259, |
| "learning_rate": 1.8501694915254237e-05, |
| "loss": 1.5574, |
| "step": 2710 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 2.6018731594085693, |
| "learning_rate": 1.849491525423729e-05, |
| "loss": 1.473, |
| "step": 2720 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 2.2333261966705322, |
| "learning_rate": 1.848813559322034e-05, |
| "loss": 1.4562, |
| "step": 2730 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 6.693967342376709, |
| "learning_rate": 1.848135593220339e-05, |
| "loss": 1.433, |
| "step": 2740 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 2.6075336933135986, |
| "learning_rate": 1.8474576271186443e-05, |
| "loss": 1.2995, |
| "step": 2750 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 2.2900924682617188, |
| "learning_rate": 1.8467796610169493e-05, |
| "loss": 1.4313, |
| "step": 2760 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 5.270084381103516, |
| "learning_rate": 1.8461016949152542e-05, |
| "loss": 1.4531, |
| "step": 2770 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 2.3377845287323, |
| "learning_rate": 1.8454237288135596e-05, |
| "loss": 1.4393, |
| "step": 2780 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 1.9369862079620361, |
| "learning_rate": 1.8447457627118645e-05, |
| "loss": 1.5269, |
| "step": 2790 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 3.107175350189209, |
| "learning_rate": 1.8440677966101695e-05, |
| "loss": 1.4581, |
| "step": 2800 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 2.7586755752563477, |
| "learning_rate": 1.8433898305084748e-05, |
| "loss": 1.4577, |
| "step": 2810 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 1.3124182224273682, |
| "learning_rate": 1.8427118644067798e-05, |
| "loss": 1.6085, |
| "step": 2820 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 1.8919298648834229, |
| "learning_rate": 1.842033898305085e-05, |
| "loss": 1.3827, |
| "step": 2830 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 2.49289870262146, |
| "learning_rate": 1.84135593220339e-05, |
| "loss": 1.4628, |
| "step": 2840 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 2.832827568054199, |
| "learning_rate": 1.840677966101695e-05, |
| "loss": 1.4701, |
| "step": 2850 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 1.2280476093292236, |
| "learning_rate": 1.8400000000000003e-05, |
| "loss": 1.5576, |
| "step": 2860 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 2.703963279724121, |
| "learning_rate": 1.8393220338983053e-05, |
| "loss": 1.3686, |
| "step": 2870 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 1.554801106452942, |
| "learning_rate": 1.8386440677966103e-05, |
| "loss": 1.5867, |
| "step": 2880 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 1.4219539165496826, |
| "learning_rate": 1.8379661016949153e-05, |
| "loss": 1.696, |
| "step": 2890 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 7.199713706970215, |
| "learning_rate": 1.8372881355932202e-05, |
| "loss": 1.5041, |
| "step": 2900 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 2.7661526203155518, |
| "learning_rate": 1.8366101694915255e-05, |
| "loss": 1.4472, |
| "step": 2910 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 6.59897518157959, |
| "learning_rate": 1.8359322033898305e-05, |
| "loss": 1.4166, |
| "step": 2920 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 1.927669882774353, |
| "learning_rate": 1.8352542372881358e-05, |
| "loss": 1.4674, |
| "step": 2930 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 1.0190672874450684, |
| "learning_rate": 1.8345762711864408e-05, |
| "loss": 1.4591, |
| "step": 2940 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 3.63718843460083, |
| "learning_rate": 1.8338983050847458e-05, |
| "loss": 1.7469, |
| "step": 2950 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 1.5168763399124146, |
| "learning_rate": 1.833220338983051e-05, |
| "loss": 1.5691, |
| "step": 2960 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 1.323123574256897, |
| "learning_rate": 1.832542372881356e-05, |
| "loss": 1.385, |
| "step": 2970 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 3.590506076812744, |
| "learning_rate": 1.831864406779661e-05, |
| "loss": 1.429, |
| "step": 2980 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 4.562034606933594, |
| "learning_rate": 1.8311864406779663e-05, |
| "loss": 1.4169, |
| "step": 2990 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 3.3229966163635254, |
| "learning_rate": 1.8305084745762713e-05, |
| "loss": 1.5123, |
| "step": 3000 |
| }, |
| { |
| "epoch": 0.07, |
| "eval_loss": 1.5320124626159668, |
| "eval_runtime": 66.1653, |
| "eval_samples_per_second": 15.114, |
| "eval_steps_per_second": 15.114, |
| "step": 3000 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 3.8690502643585205, |
| "learning_rate": 1.8298305084745766e-05, |
| "loss": 1.2614, |
| "step": 3010 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 2.8421173095703125, |
| "learning_rate": 1.8291525423728816e-05, |
| "loss": 1.3572, |
| "step": 3020 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 3.3784098625183105, |
| "learning_rate": 1.8284745762711865e-05, |
| "loss": 1.4465, |
| "step": 3030 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 3.611422061920166, |
| "learning_rate": 1.827796610169492e-05, |
| "loss": 1.4531, |
| "step": 3040 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 2.3969950675964355, |
| "learning_rate": 1.8271186440677968e-05, |
| "loss": 1.3137, |
| "step": 3050 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 4.92153263092041, |
| "learning_rate": 1.8264406779661018e-05, |
| "loss": 1.6209, |
| "step": 3060 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 3.952179431915283, |
| "learning_rate": 1.825762711864407e-05, |
| "loss": 1.4923, |
| "step": 3070 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 4.00267219543457, |
| "learning_rate": 1.825084745762712e-05, |
| "loss": 1.5688, |
| "step": 3080 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 2.1664321422576904, |
| "learning_rate": 1.824406779661017e-05, |
| "loss": 1.4545, |
| "step": 3090 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 3.1541237831115723, |
| "learning_rate": 1.823728813559322e-05, |
| "loss": 1.6623, |
| "step": 3100 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 2.577681064605713, |
| "learning_rate": 1.8230508474576273e-05, |
| "loss": 1.6301, |
| "step": 3110 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 2.2123563289642334, |
| "learning_rate": 1.8223728813559323e-05, |
| "loss": 1.5614, |
| "step": 3120 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 1.7630640268325806, |
| "learning_rate": 1.8216949152542373e-05, |
| "loss": 1.5482, |
| "step": 3130 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 1.5110881328582764, |
| "learning_rate": 1.8210169491525426e-05, |
| "loss": 1.4324, |
| "step": 3140 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 2.9992825984954834, |
| "learning_rate": 1.8203389830508475e-05, |
| "loss": 1.5376, |
| "step": 3150 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 2.825230836868286, |
| "learning_rate": 1.8196610169491525e-05, |
| "loss": 1.3925, |
| "step": 3160 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 5.502832412719727, |
| "learning_rate": 1.8189830508474578e-05, |
| "loss": 1.5082, |
| "step": 3170 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 1.258467197418213, |
| "learning_rate": 1.8183050847457628e-05, |
| "loss": 1.512, |
| "step": 3180 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 6.260064601898193, |
| "learning_rate": 1.817627118644068e-05, |
| "loss": 1.5949, |
| "step": 3190 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 1.3906792402267456, |
| "learning_rate": 1.816949152542373e-05, |
| "loss": 1.4887, |
| "step": 3200 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 6.833672523498535, |
| "learning_rate": 1.816271186440678e-05, |
| "loss": 1.6286, |
| "step": 3210 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 7.004729747772217, |
| "learning_rate": 1.8155932203389833e-05, |
| "loss": 1.6974, |
| "step": 3220 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 3.688082695007324, |
| "learning_rate": 1.8149152542372883e-05, |
| "loss": 1.577, |
| "step": 3230 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 1.2561973333358765, |
| "learning_rate": 1.8142372881355933e-05, |
| "loss": 1.5154, |
| "step": 3240 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 2.1860358715057373, |
| "learning_rate": 1.8135593220338986e-05, |
| "loss": 1.5281, |
| "step": 3250 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 1.4622399806976318, |
| "learning_rate": 1.8128813559322036e-05, |
| "loss": 1.4103, |
| "step": 3260 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 3.036226511001587, |
| "learning_rate": 1.8122033898305085e-05, |
| "loss": 1.4003, |
| "step": 3270 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 4.228930473327637, |
| "learning_rate": 1.811525423728814e-05, |
| "loss": 1.6889, |
| "step": 3280 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 4.7660322189331055, |
| "learning_rate": 1.8108474576271188e-05, |
| "loss": 1.5185, |
| "step": 3290 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 2.857886552810669, |
| "learning_rate": 1.810169491525424e-05, |
| "loss": 1.4166, |
| "step": 3300 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 2.357570171356201, |
| "learning_rate": 1.8094915254237288e-05, |
| "loss": 1.6341, |
| "step": 3310 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 2.1648855209350586, |
| "learning_rate": 1.808813559322034e-05, |
| "loss": 1.6001, |
| "step": 3320 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 2.5360536575317383, |
| "learning_rate": 1.808135593220339e-05, |
| "loss": 1.5059, |
| "step": 3330 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 3.741981267929077, |
| "learning_rate": 1.807457627118644e-05, |
| "loss": 1.5082, |
| "step": 3340 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 3.4550766944885254, |
| "learning_rate": 1.8067796610169493e-05, |
| "loss": 1.6378, |
| "step": 3350 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 1.9454740285873413, |
| "learning_rate": 1.8061016949152543e-05, |
| "loss": 1.6602, |
| "step": 3360 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 3.708360195159912, |
| "learning_rate": 1.8054237288135593e-05, |
| "loss": 1.4584, |
| "step": 3370 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 2.4115514755249023, |
| "learning_rate": 1.8047457627118646e-05, |
| "loss": 1.4053, |
| "step": 3380 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 2.075867176055908, |
| "learning_rate": 1.8040677966101695e-05, |
| "loss": 1.46, |
| "step": 3390 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 3.1672651767730713, |
| "learning_rate": 1.803389830508475e-05, |
| "loss": 1.4964, |
| "step": 3400 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 4.5812578201293945, |
| "learning_rate": 1.8027118644067798e-05, |
| "loss": 1.4005, |
| "step": 3410 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 2.583771228790283, |
| "learning_rate": 1.8020338983050848e-05, |
| "loss": 1.6024, |
| "step": 3420 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 4.464291095733643, |
| "learning_rate": 1.80135593220339e-05, |
| "loss": 1.5075, |
| "step": 3430 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 2.609657049179077, |
| "learning_rate": 1.800677966101695e-05, |
| "loss": 1.4527, |
| "step": 3440 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 1.844227910041809, |
| "learning_rate": 1.8e-05, |
| "loss": 1.471, |
| "step": 3450 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 3.5781702995300293, |
| "learning_rate": 1.7993220338983054e-05, |
| "loss": 1.5221, |
| "step": 3460 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 1.8293837308883667, |
| "learning_rate": 1.7986440677966103e-05, |
| "loss": 1.4831, |
| "step": 3470 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 5.273003101348877, |
| "learning_rate": 1.7979661016949156e-05, |
| "loss": 1.5131, |
| "step": 3480 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 3.1019296646118164, |
| "learning_rate": 1.7972881355932206e-05, |
| "loss": 1.4346, |
| "step": 3490 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 3.419081687927246, |
| "learning_rate": 1.7966101694915256e-05, |
| "loss": 1.496, |
| "step": 3500 |
| }, |
| { |
| "epoch": 0.09, |
| "eval_loss": 1.5301283597946167, |
| "eval_runtime": 66.1385, |
| "eval_samples_per_second": 15.12, |
| "eval_steps_per_second": 15.12, |
| "step": 3500 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 4.49799919128418, |
| "learning_rate": 1.795932203389831e-05, |
| "loss": 1.5304, |
| "step": 3510 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 3.267191171646118, |
| "learning_rate": 1.7952542372881355e-05, |
| "loss": 1.5103, |
| "step": 3520 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 3.4655425548553467, |
| "learning_rate": 1.7945762711864408e-05, |
| "loss": 1.3131, |
| "step": 3530 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 2.289358615875244, |
| "learning_rate": 1.7938983050847458e-05, |
| "loss": 1.2731, |
| "step": 3540 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 1.528286337852478, |
| "learning_rate": 1.7932203389830508e-05, |
| "loss": 1.6249, |
| "step": 3550 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 1.5927760601043701, |
| "learning_rate": 1.792542372881356e-05, |
| "loss": 1.5177, |
| "step": 3560 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 1.5520817041397095, |
| "learning_rate": 1.791864406779661e-05, |
| "loss": 1.4523, |
| "step": 3570 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 3.531754970550537, |
| "learning_rate": 1.7911864406779664e-05, |
| "loss": 1.5342, |
| "step": 3580 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 2.6139304637908936, |
| "learning_rate": 1.7905084745762713e-05, |
| "loss": 1.3377, |
| "step": 3590 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 5.242190837860107, |
| "learning_rate": 1.7898305084745763e-05, |
| "loss": 1.592, |
| "step": 3600 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 3.10337495803833, |
| "learning_rate": 1.7891525423728816e-05, |
| "loss": 1.4516, |
| "step": 3610 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 2.615224599838257, |
| "learning_rate": 1.7884745762711866e-05, |
| "loss": 1.4325, |
| "step": 3620 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 5.353131294250488, |
| "learning_rate": 1.7877966101694916e-05, |
| "loss": 1.5772, |
| "step": 3630 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 1.1678059101104736, |
| "learning_rate": 1.787118644067797e-05, |
| "loss": 1.572, |
| "step": 3640 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 3.2298007011413574, |
| "learning_rate": 1.7864406779661018e-05, |
| "loss": 1.5214, |
| "step": 3650 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 3.405092477798462, |
| "learning_rate": 1.785762711864407e-05, |
| "loss": 1.4677, |
| "step": 3660 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 3.6669983863830566, |
| "learning_rate": 1.785084745762712e-05, |
| "loss": 1.2853, |
| "step": 3670 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 3.2381701469421387, |
| "learning_rate": 1.784406779661017e-05, |
| "loss": 1.3501, |
| "step": 3680 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 2.3621437549591064, |
| "learning_rate": 1.7837288135593224e-05, |
| "loss": 1.5105, |
| "step": 3690 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 1.1474658250808716, |
| "learning_rate": 1.7830508474576274e-05, |
| "loss": 1.5107, |
| "step": 3700 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 9.769930839538574, |
| "learning_rate": 1.7823728813559323e-05, |
| "loss": 1.8114, |
| "step": 3710 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 1.6212892532348633, |
| "learning_rate": 1.7816949152542376e-05, |
| "loss": 1.4948, |
| "step": 3720 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 3.909820795059204, |
| "learning_rate": 1.7810169491525423e-05, |
| "loss": 1.4145, |
| "step": 3730 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 1.870615005493164, |
| "learning_rate": 1.7803389830508476e-05, |
| "loss": 1.4856, |
| "step": 3740 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 2.5912163257598877, |
| "learning_rate": 1.7796610169491526e-05, |
| "loss": 1.5021, |
| "step": 3750 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 2.303807020187378, |
| "learning_rate": 1.778983050847458e-05, |
| "loss": 1.4348, |
| "step": 3760 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 2.5953946113586426, |
| "learning_rate": 1.778305084745763e-05, |
| "loss": 1.4073, |
| "step": 3770 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 2.129793643951416, |
| "learning_rate": 1.7776271186440678e-05, |
| "loss": 1.4827, |
| "step": 3780 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 8.186966896057129, |
| "learning_rate": 1.776949152542373e-05, |
| "loss": 1.5887, |
| "step": 3790 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 2.2956864833831787, |
| "learning_rate": 1.776271186440678e-05, |
| "loss": 1.4925, |
| "step": 3800 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 1.3646732568740845, |
| "learning_rate": 1.775593220338983e-05, |
| "loss": 1.5814, |
| "step": 3810 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 5.3318071365356445, |
| "learning_rate": 1.7749152542372884e-05, |
| "loss": 1.6139, |
| "step": 3820 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 2.53743314743042, |
| "learning_rate": 1.7742372881355933e-05, |
| "loss": 1.5198, |
| "step": 3830 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 7.048913478851318, |
| "learning_rate": 1.7735593220338983e-05, |
| "loss": 1.4417, |
| "step": 3840 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 2.367314100265503, |
| "learning_rate": 1.7728813559322036e-05, |
| "loss": 1.4633, |
| "step": 3850 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 1.619931697845459, |
| "learning_rate": 1.7722033898305086e-05, |
| "loss": 1.3719, |
| "step": 3860 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 3.264190435409546, |
| "learning_rate": 1.771525423728814e-05, |
| "loss": 1.3079, |
| "step": 3870 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 4.68599796295166, |
| "learning_rate": 1.770847457627119e-05, |
| "loss": 1.6278, |
| "step": 3880 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 4.21970272064209, |
| "learning_rate": 1.770169491525424e-05, |
| "loss": 1.4814, |
| "step": 3890 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 2.3647706508636475, |
| "learning_rate": 1.769491525423729e-05, |
| "loss": 1.5973, |
| "step": 3900 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 3.2886738777160645, |
| "learning_rate": 1.768813559322034e-05, |
| "loss": 1.5224, |
| "step": 3910 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 2.6817705631256104, |
| "learning_rate": 1.768135593220339e-05, |
| "loss": 1.566, |
| "step": 3920 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 1.6243913173675537, |
| "learning_rate": 1.767457627118644e-05, |
| "loss": 1.6011, |
| "step": 3930 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 1.8161165714263916, |
| "learning_rate": 1.7667796610169494e-05, |
| "loss": 1.5303, |
| "step": 3940 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 2.486697196960449, |
| "learning_rate": 1.7661016949152543e-05, |
| "loss": 1.4032, |
| "step": 3950 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 1.8153432607650757, |
| "learning_rate": 1.7654237288135593e-05, |
| "loss": 1.5167, |
| "step": 3960 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 2.186868190765381, |
| "learning_rate": 1.7647457627118646e-05, |
| "loss": 1.6066, |
| "step": 3970 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 2.1040332317352295, |
| "learning_rate": 1.7640677966101696e-05, |
| "loss": 1.4142, |
| "step": 3980 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 3.989042282104492, |
| "learning_rate": 1.7633898305084746e-05, |
| "loss": 1.4877, |
| "step": 3990 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 3.7532989978790283, |
| "learning_rate": 1.76271186440678e-05, |
| "loss": 1.4016, |
| "step": 4000 |
| }, |
| { |
| "epoch": 0.1, |
| "eval_loss": 1.5089428424835205, |
| "eval_runtime": 66.2329, |
| "eval_samples_per_second": 15.098, |
| "eval_steps_per_second": 15.098, |
| "step": 4000 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 6.605846405029297, |
| "learning_rate": 1.762033898305085e-05, |
| "loss": 1.6163, |
| "step": 4010 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 3.1976850032806396, |
| "learning_rate": 1.7613559322033898e-05, |
| "loss": 1.4298, |
| "step": 4020 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 5.412558555603027, |
| "learning_rate": 1.760677966101695e-05, |
| "loss": 1.3316, |
| "step": 4030 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 5.241945266723633, |
| "learning_rate": 1.76e-05, |
| "loss": 1.6258, |
| "step": 4040 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 11.647382736206055, |
| "learning_rate": 1.7593220338983054e-05, |
| "loss": 1.5479, |
| "step": 4050 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 2.091214656829834, |
| "learning_rate": 1.7586440677966104e-05, |
| "loss": 1.4385, |
| "step": 4060 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 2.492511034011841, |
| "learning_rate": 1.7579661016949153e-05, |
| "loss": 1.6785, |
| "step": 4070 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 1.9180244207382202, |
| "learning_rate": 1.7572881355932206e-05, |
| "loss": 1.6234, |
| "step": 4080 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 2.256627082824707, |
| "learning_rate": 1.7566101694915256e-05, |
| "loss": 1.3143, |
| "step": 4090 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 2.2864432334899902, |
| "learning_rate": 1.7559322033898306e-05, |
| "loss": 1.4961, |
| "step": 4100 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 6.091422080993652, |
| "learning_rate": 1.755254237288136e-05, |
| "loss": 1.27, |
| "step": 4110 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 1.6744223833084106, |
| "learning_rate": 1.754576271186441e-05, |
| "loss": 1.6859, |
| "step": 4120 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 3.741482973098755, |
| "learning_rate": 1.753898305084746e-05, |
| "loss": 1.5382, |
| "step": 4130 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 3.2129416465759277, |
| "learning_rate": 1.7532203389830508e-05, |
| "loss": 1.548, |
| "step": 4140 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 1.9851702451705933, |
| "learning_rate": 1.752542372881356e-05, |
| "loss": 1.5777, |
| "step": 4150 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 2.7135250568389893, |
| "learning_rate": 1.751864406779661e-05, |
| "loss": 1.6374, |
| "step": 4160 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 6.881992340087891, |
| "learning_rate": 1.751186440677966e-05, |
| "loss": 1.5171, |
| "step": 4170 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 3.8673460483551025, |
| "learning_rate": 1.7505084745762714e-05, |
| "loss": 1.3488, |
| "step": 4180 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 3.7248244285583496, |
| "learning_rate": 1.7498305084745763e-05, |
| "loss": 1.4269, |
| "step": 4190 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 4.191452503204346, |
| "learning_rate": 1.7491525423728813e-05, |
| "loss": 1.591, |
| "step": 4200 |
| }, |
| { |
| "epoch": 0.11, |
| "grad_norm": 1.9477959871292114, |
| "learning_rate": 1.7484745762711866e-05, |
| "loss": 1.5194, |
| "step": 4210 |
| }, |
| { |
| "epoch": 0.11, |
| "grad_norm": 4.151371002197266, |
| "learning_rate": 1.7477966101694916e-05, |
| "loss": 1.4644, |
| "step": 4220 |
| }, |
| { |
| "epoch": 0.11, |
| "grad_norm": 3.285062551498413, |
| "learning_rate": 1.747118644067797e-05, |
| "loss": 1.3697, |
| "step": 4230 |
| }, |
| { |
| "epoch": 0.11, |
| "grad_norm": 5.2172136306762695, |
| "learning_rate": 1.746440677966102e-05, |
| "loss": 1.3542, |
| "step": 4240 |
| }, |
| { |
| "epoch": 0.11, |
| "grad_norm": 2.321295738220215, |
| "learning_rate": 1.745762711864407e-05, |
| "loss": 1.6265, |
| "step": 4250 |
| }, |
| { |
| "epoch": 0.11, |
| "grad_norm": 3.2060482501983643, |
| "learning_rate": 1.745084745762712e-05, |
| "loss": 1.5214, |
| "step": 4260 |
| }, |
| { |
| "epoch": 0.11, |
| "grad_norm": 2.697052240371704, |
| "learning_rate": 1.744406779661017e-05, |
| "loss": 1.5446, |
| "step": 4270 |
| }, |
| { |
| "epoch": 0.11, |
| "grad_norm": 2.639937162399292, |
| "learning_rate": 1.743728813559322e-05, |
| "loss": 1.5329, |
| "step": 4280 |
| }, |
| { |
| "epoch": 0.11, |
| "grad_norm": 3.5502707958221436, |
| "learning_rate": 1.7430508474576274e-05, |
| "loss": 1.4997, |
| "step": 4290 |
| }, |
| { |
| "epoch": 0.11, |
| "grad_norm": 5.514580726623535, |
| "learning_rate": 1.7423728813559324e-05, |
| "loss": 1.4641, |
| "step": 4300 |
| }, |
| { |
| "epoch": 0.11, |
| "grad_norm": 2.264512062072754, |
| "learning_rate": 1.7416949152542373e-05, |
| "loss": 1.5588, |
| "step": 4310 |
| }, |
| { |
| "epoch": 0.11, |
| "grad_norm": 4.865510940551758, |
| "learning_rate": 1.7410169491525427e-05, |
| "loss": 1.5777, |
| "step": 4320 |
| }, |
| { |
| "epoch": 0.11, |
| "grad_norm": 2.872544050216675, |
| "learning_rate": 1.7403389830508476e-05, |
| "loss": 1.4683, |
| "step": 4330 |
| }, |
| { |
| "epoch": 0.11, |
| "grad_norm": 2.1891725063323975, |
| "learning_rate": 1.739661016949153e-05, |
| "loss": 1.4572, |
| "step": 4340 |
| }, |
| { |
| "epoch": 0.11, |
| "grad_norm": 5.604435443878174, |
| "learning_rate": 1.7389830508474576e-05, |
| "loss": 1.3685, |
| "step": 4350 |
| }, |
| { |
| "epoch": 0.11, |
| "grad_norm": 1.9309135675430298, |
| "learning_rate": 1.738305084745763e-05, |
| "loss": 1.6086, |
| "step": 4360 |
| }, |
| { |
| "epoch": 0.11, |
| "grad_norm": 1.2651278972625732, |
| "learning_rate": 1.737627118644068e-05, |
| "loss": 1.5768, |
| "step": 4370 |
| }, |
| { |
| "epoch": 0.11, |
| "grad_norm": 3.4745676517486572, |
| "learning_rate": 1.7369491525423728e-05, |
| "loss": 1.4553, |
| "step": 4380 |
| }, |
| { |
| "epoch": 0.11, |
| "grad_norm": 5.904428958892822, |
| "learning_rate": 1.736271186440678e-05, |
| "loss": 1.654, |
| "step": 4390 |
| }, |
| { |
| "epoch": 0.11, |
| "grad_norm": 1.6250745058059692, |
| "learning_rate": 1.735593220338983e-05, |
| "loss": 1.5443, |
| "step": 4400 |
| }, |
| { |
| "epoch": 0.11, |
| "grad_norm": 3.289626121520996, |
| "learning_rate": 1.734915254237288e-05, |
| "loss": 1.4691, |
| "step": 4410 |
| }, |
| { |
| "epoch": 0.11, |
| "grad_norm": 5.800081729888916, |
| "learning_rate": 1.7342372881355934e-05, |
| "loss": 1.543, |
| "step": 4420 |
| }, |
| { |
| "epoch": 0.11, |
| "grad_norm": 9.145909309387207, |
| "learning_rate": 1.7335593220338983e-05, |
| "loss": 1.4814, |
| "step": 4430 |
| }, |
| { |
| "epoch": 0.11, |
| "grad_norm": 2.22583270072937, |
| "learning_rate": 1.7328813559322037e-05, |
| "loss": 1.548, |
| "step": 4440 |
| }, |
| { |
| "epoch": 0.11, |
| "grad_norm": 4.693650722503662, |
| "learning_rate": 1.7322033898305086e-05, |
| "loss": 1.5602, |
| "step": 4450 |
| }, |
| { |
| "epoch": 0.11, |
| "grad_norm": 2.1424403190612793, |
| "learning_rate": 1.7315254237288136e-05, |
| "loss": 1.5298, |
| "step": 4460 |
| }, |
| { |
| "epoch": 0.11, |
| "grad_norm": 2.8103983402252197, |
| "learning_rate": 1.730847457627119e-05, |
| "loss": 1.435, |
| "step": 4470 |
| }, |
| { |
| "epoch": 0.11, |
| "grad_norm": 2.2972493171691895, |
| "learning_rate": 1.730169491525424e-05, |
| "loss": 1.4906, |
| "step": 4480 |
| }, |
| { |
| "epoch": 0.11, |
| "grad_norm": 2.437459707260132, |
| "learning_rate": 1.729491525423729e-05, |
| "loss": 1.5637, |
| "step": 4490 |
| }, |
| { |
| "epoch": 0.11, |
| "grad_norm": 3.561988115310669, |
| "learning_rate": 1.728813559322034e-05, |
| "loss": 1.572, |
| "step": 4500 |
| }, |
| { |
| "epoch": 0.11, |
| "eval_loss": 1.4887409210205078, |
| "eval_runtime": 66.1778, |
| "eval_samples_per_second": 15.111, |
| "eval_steps_per_second": 15.111, |
| "step": 4500 |
| }, |
| { |
| "epoch": 0.11, |
| "grad_norm": 0.4782548248767853, |
| "learning_rate": 1.728135593220339e-05, |
| "loss": 1.606, |
| "step": 4510 |
| }, |
| { |
| "epoch": 0.11, |
| "grad_norm": 4.1626386642456055, |
| "learning_rate": 1.7274576271186444e-05, |
| "loss": 1.6311, |
| "step": 4520 |
| }, |
| { |
| "epoch": 0.11, |
| "grad_norm": 3.057555675506592, |
| "learning_rate": 1.7267796610169494e-05, |
| "loss": 1.495, |
| "step": 4530 |
| }, |
| { |
| "epoch": 0.11, |
| "grad_norm": 4.714250087738037, |
| "learning_rate": 1.7261016949152544e-05, |
| "loss": 1.5219, |
| "step": 4540 |
| }, |
| { |
| "epoch": 0.11, |
| "grad_norm": 1.1815061569213867, |
| "learning_rate": 1.7254237288135597e-05, |
| "loss": 1.5227, |
| "step": 4550 |
| }, |
| { |
| "epoch": 0.11, |
| "grad_norm": 3.602353811264038, |
| "learning_rate": 1.7247457627118643e-05, |
| "loss": 1.6756, |
| "step": 4560 |
| }, |
| { |
| "epoch": 0.11, |
| "grad_norm": 1.7794777154922485, |
| "learning_rate": 1.7240677966101696e-05, |
| "loss": 1.3716, |
| "step": 4570 |
| }, |
| { |
| "epoch": 0.11, |
| "grad_norm": 2.1229944229125977, |
| "learning_rate": 1.7233898305084746e-05, |
| "loss": 1.4612, |
| "step": 4580 |
| }, |
| { |
| "epoch": 0.11, |
| "grad_norm": 4.656471252441406, |
| "learning_rate": 1.7227118644067796e-05, |
| "loss": 1.6358, |
| "step": 4590 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 2.0882911682128906, |
| "learning_rate": 1.722033898305085e-05, |
| "loss": 1.5147, |
| "step": 4600 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 3.070713996887207, |
| "learning_rate": 1.72135593220339e-05, |
| "loss": 1.5253, |
| "step": 4610 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 1.7038753032684326, |
| "learning_rate": 1.720677966101695e-05, |
| "loss": 1.4375, |
| "step": 4620 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 2.5298070907592773, |
| "learning_rate": 1.72e-05, |
| "loss": 1.5301, |
| "step": 4630 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 2.2928740978240967, |
| "learning_rate": 1.719322033898305e-05, |
| "loss": 1.2571, |
| "step": 4640 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 5.660508155822754, |
| "learning_rate": 1.7186440677966104e-05, |
| "loss": 1.5168, |
| "step": 4650 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 5.104097366333008, |
| "learning_rate": 1.7179661016949154e-05, |
| "loss": 1.4134, |
| "step": 4660 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 4.954836368560791, |
| "learning_rate": 1.7172881355932204e-05, |
| "loss": 1.4909, |
| "step": 4670 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 4.392303466796875, |
| "learning_rate": 1.7166101694915257e-05, |
| "loss": 1.5639, |
| "step": 4680 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 3.9031317234039307, |
| "learning_rate": 1.7159322033898306e-05, |
| "loss": 1.4521, |
| "step": 4690 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 4.280984401702881, |
| "learning_rate": 1.715254237288136e-05, |
| "loss": 1.3877, |
| "step": 4700 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 1.2300760746002197, |
| "learning_rate": 1.714576271186441e-05, |
| "loss": 1.5971, |
| "step": 4710 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 3.845804452896118, |
| "learning_rate": 1.713898305084746e-05, |
| "loss": 1.5987, |
| "step": 4720 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 5.861780643463135, |
| "learning_rate": 1.7132203389830512e-05, |
| "loss": 1.7391, |
| "step": 4730 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 4.704226970672607, |
| "learning_rate": 1.712542372881356e-05, |
| "loss": 1.3629, |
| "step": 4740 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 1.362378716468811, |
| "learning_rate": 1.711864406779661e-05, |
| "loss": 1.5607, |
| "step": 4750 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 3.89095139503479, |
| "learning_rate": 1.711186440677966e-05, |
| "loss": 1.6382, |
| "step": 4760 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 2.8033549785614014, |
| "learning_rate": 1.710508474576271e-05, |
| "loss": 1.6113, |
| "step": 4770 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 3.045806884765625, |
| "learning_rate": 1.7098305084745764e-05, |
| "loss": 1.4223, |
| "step": 4780 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 7.401500701904297, |
| "learning_rate": 1.7091525423728814e-05, |
| "loss": 1.4659, |
| "step": 4790 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 1.490501880645752, |
| "learning_rate": 1.7084745762711867e-05, |
| "loss": 1.4543, |
| "step": 4800 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 2.137923240661621, |
| "learning_rate": 1.7077966101694916e-05, |
| "loss": 1.4117, |
| "step": 4810 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 3.254201889038086, |
| "learning_rate": 1.7071186440677966e-05, |
| "loss": 1.4404, |
| "step": 4820 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 2.1423888206481934, |
| "learning_rate": 1.706440677966102e-05, |
| "loss": 1.5539, |
| "step": 4830 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 1.927777647972107, |
| "learning_rate": 1.705762711864407e-05, |
| "loss": 1.5285, |
| "step": 4840 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 3.6136608123779297, |
| "learning_rate": 1.705084745762712e-05, |
| "loss": 1.3503, |
| "step": 4850 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 4.160021781921387, |
| "learning_rate": 1.704406779661017e-05, |
| "loss": 1.5243, |
| "step": 4860 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 3.0722756385803223, |
| "learning_rate": 1.703728813559322e-05, |
| "loss": 1.5306, |
| "step": 4870 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 6.304449558258057, |
| "learning_rate": 1.703050847457627e-05, |
| "loss": 1.5878, |
| "step": 4880 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 1.935084581375122, |
| "learning_rate": 1.7023728813559324e-05, |
| "loss": 1.5446, |
| "step": 4890 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 2.6202478408813477, |
| "learning_rate": 1.7016949152542374e-05, |
| "loss": 1.6461, |
| "step": 4900 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 3.2252795696258545, |
| "learning_rate": 1.7010169491525427e-05, |
| "loss": 1.445, |
| "step": 4910 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 4.115199565887451, |
| "learning_rate": 1.7003389830508477e-05, |
| "loss": 1.5513, |
| "step": 4920 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 2.852402448654175, |
| "learning_rate": 1.6996610169491526e-05, |
| "loss": 1.496, |
| "step": 4930 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 2.5148980617523193, |
| "learning_rate": 1.698983050847458e-05, |
| "loss": 1.4281, |
| "step": 4940 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 1.3501824140548706, |
| "learning_rate": 1.698305084745763e-05, |
| "loss": 1.4301, |
| "step": 4950 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 3.4159467220306396, |
| "learning_rate": 1.697627118644068e-05, |
| "loss": 1.5491, |
| "step": 4960 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 4.547379970550537, |
| "learning_rate": 1.696949152542373e-05, |
| "loss": 1.5137, |
| "step": 4970 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 3.4466910362243652, |
| "learning_rate": 1.6962711864406782e-05, |
| "loss": 1.4767, |
| "step": 4980 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 6.4336395263671875, |
| "learning_rate": 1.695593220338983e-05, |
| "loss": 1.5213, |
| "step": 4990 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 1.0396947860717773, |
| "learning_rate": 1.694915254237288e-05, |
| "loss": 1.4402, |
| "step": 5000 |
| }, |
| { |
| "epoch": 0.12, |
| "eval_loss": 1.4466354846954346, |
| "eval_runtime": 66.2264, |
| "eval_samples_per_second": 15.1, |
| "eval_steps_per_second": 15.1, |
| "step": 5000 |
| }, |
| { |
| "epoch": 0.13, |
| "grad_norm": 3.8836324214935303, |
| "learning_rate": 1.6942372881355934e-05, |
| "loss": 1.6772, |
| "step": 5010 |
| }, |
| { |
| "epoch": 0.13, |
| "grad_norm": 4.293881893157959, |
| "learning_rate": 1.6935593220338984e-05, |
| "loss": 1.4618, |
| "step": 5020 |
| }, |
| { |
| "epoch": 0.13, |
| "grad_norm": 3.111931800842285, |
| "learning_rate": 1.6928813559322034e-05, |
| "loss": 1.5564, |
| "step": 5030 |
| }, |
| { |
| "epoch": 0.13, |
| "grad_norm": 3.7768237590789795, |
| "learning_rate": 1.6922033898305087e-05, |
| "loss": 1.5239, |
| "step": 5040 |
| }, |
| { |
| "epoch": 0.13, |
| "grad_norm": 1.8081879615783691, |
| "learning_rate": 1.6915254237288136e-05, |
| "loss": 1.5257, |
| "step": 5050 |
| }, |
| { |
| "epoch": 0.13, |
| "grad_norm": 3.2274467945098877, |
| "learning_rate": 1.6908474576271186e-05, |
| "loss": 1.3785, |
| "step": 5060 |
| }, |
| { |
| "epoch": 0.13, |
| "grad_norm": 1.4177571535110474, |
| "learning_rate": 1.690169491525424e-05, |
| "loss": 1.4504, |
| "step": 5070 |
| }, |
| { |
| "epoch": 0.13, |
| "grad_norm": 4.527300834655762, |
| "learning_rate": 1.689491525423729e-05, |
| "loss": 1.4865, |
| "step": 5080 |
| }, |
| { |
| "epoch": 0.13, |
| "grad_norm": 2.1885902881622314, |
| "learning_rate": 1.6888135593220342e-05, |
| "loss": 1.5584, |
| "step": 5090 |
| }, |
| { |
| "epoch": 0.13, |
| "grad_norm": 2.6522934436798096, |
| "learning_rate": 1.6881355932203392e-05, |
| "loss": 1.4736, |
| "step": 5100 |
| }, |
| { |
| "epoch": 0.13, |
| "grad_norm": 2.8651719093322754, |
| "learning_rate": 1.687457627118644e-05, |
| "loss": 1.6247, |
| "step": 5110 |
| }, |
| { |
| "epoch": 0.13, |
| "grad_norm": 3.211879253387451, |
| "learning_rate": 1.6867796610169495e-05, |
| "loss": 1.4822, |
| "step": 5120 |
| }, |
| { |
| "epoch": 0.13, |
| "grad_norm": 4.725778579711914, |
| "learning_rate": 1.6861016949152544e-05, |
| "loss": 1.6364, |
| "step": 5130 |
| }, |
| { |
| "epoch": 0.13, |
| "grad_norm": 2.7316319942474365, |
| "learning_rate": 1.6854237288135594e-05, |
| "loss": 1.4218, |
| "step": 5140 |
| }, |
| { |
| "epoch": 0.13, |
| "grad_norm": 3.2881827354431152, |
| "learning_rate": 1.6847457627118647e-05, |
| "loss": 1.4729, |
| "step": 5150 |
| }, |
| { |
| "epoch": 0.13, |
| "grad_norm": 4.106601238250732, |
| "learning_rate": 1.6840677966101697e-05, |
| "loss": 1.5303, |
| "step": 5160 |
| }, |
| { |
| "epoch": 0.13, |
| "grad_norm": 1.9981019496917725, |
| "learning_rate": 1.683389830508475e-05, |
| "loss": 1.4583, |
| "step": 5170 |
| }, |
| { |
| "epoch": 0.13, |
| "grad_norm": 2.5599071979522705, |
| "learning_rate": 1.6827118644067796e-05, |
| "loss": 1.5817, |
| "step": 5180 |
| }, |
| { |
| "epoch": 0.13, |
| "grad_norm": 4.673508644104004, |
| "learning_rate": 1.682033898305085e-05, |
| "loss": 1.4967, |
| "step": 5190 |
| }, |
| { |
| "epoch": 0.13, |
| "grad_norm": 7.4875922203063965, |
| "learning_rate": 1.68135593220339e-05, |
| "loss": 1.4482, |
| "step": 5200 |
| }, |
| { |
| "epoch": 0.13, |
| "grad_norm": 5.208539962768555, |
| "learning_rate": 1.680677966101695e-05, |
| "loss": 1.5031, |
| "step": 5210 |
| }, |
| { |
| "epoch": 0.13, |
| "grad_norm": 5.5398664474487305, |
| "learning_rate": 1.6800000000000002e-05, |
| "loss": 1.3661, |
| "step": 5220 |
| }, |
| { |
| "epoch": 0.13, |
| "grad_norm": 4.9544267654418945, |
| "learning_rate": 1.679322033898305e-05, |
| "loss": 1.599, |
| "step": 5230 |
| }, |
| { |
| "epoch": 0.13, |
| "grad_norm": 3.121668815612793, |
| "learning_rate": 1.67864406779661e-05, |
| "loss": 1.4801, |
| "step": 5240 |
| }, |
| { |
| "epoch": 0.13, |
| "grad_norm": 4.368370532989502, |
| "learning_rate": 1.6779661016949154e-05, |
| "loss": 1.5737, |
| "step": 5250 |
| }, |
| { |
| "epoch": 0.13, |
| "grad_norm": 4.03584098815918, |
| "learning_rate": 1.6772881355932204e-05, |
| "loss": 1.4995, |
| "step": 5260 |
| }, |
| { |
| "epoch": 0.13, |
| "grad_norm": 5.6201090812683105, |
| "learning_rate": 1.6766101694915257e-05, |
| "loss": 1.3567, |
| "step": 5270 |
| }, |
| { |
| "epoch": 0.13, |
| "grad_norm": 4.009582996368408, |
| "learning_rate": 1.6759322033898307e-05, |
| "loss": 1.4241, |
| "step": 5280 |
| }, |
| { |
| "epoch": 0.13, |
| "grad_norm": 3.4552884101867676, |
| "learning_rate": 1.6752542372881357e-05, |
| "loss": 1.3472, |
| "step": 5290 |
| }, |
| { |
| "epoch": 0.13, |
| "grad_norm": 4.6100029945373535, |
| "learning_rate": 1.674576271186441e-05, |
| "loss": 1.3696, |
| "step": 5300 |
| }, |
| { |
| "epoch": 0.13, |
| "grad_norm": 6.752505779266357, |
| "learning_rate": 1.673898305084746e-05, |
| "loss": 1.5192, |
| "step": 5310 |
| }, |
| { |
| "epoch": 0.13, |
| "grad_norm": 1.4629709720611572, |
| "learning_rate": 1.673220338983051e-05, |
| "loss": 1.5774, |
| "step": 5320 |
| }, |
| { |
| "epoch": 0.13, |
| "grad_norm": 2.904249906539917, |
| "learning_rate": 1.6725423728813562e-05, |
| "loss": 1.4843, |
| "step": 5330 |
| }, |
| { |
| "epoch": 0.13, |
| "grad_norm": 3.3308334350585938, |
| "learning_rate": 1.6718644067796612e-05, |
| "loss": 1.6006, |
| "step": 5340 |
| }, |
| { |
| "epoch": 0.13, |
| "grad_norm": 2.405097484588623, |
| "learning_rate": 1.671186440677966e-05, |
| "loss": 1.5736, |
| "step": 5350 |
| }, |
| { |
| "epoch": 0.13, |
| "grad_norm": 3.4328196048736572, |
| "learning_rate": 1.6705084745762715e-05, |
| "loss": 1.4613, |
| "step": 5360 |
| }, |
| { |
| "epoch": 0.13, |
| "grad_norm": 1.5162967443466187, |
| "learning_rate": 1.6698305084745764e-05, |
| "loss": 1.4562, |
| "step": 5370 |
| }, |
| { |
| "epoch": 0.13, |
| "grad_norm": 2.473905563354492, |
| "learning_rate": 1.6691525423728817e-05, |
| "loss": 1.4899, |
| "step": 5380 |
| }, |
| { |
| "epoch": 0.13, |
| "grad_norm": 1.3600870370864868, |
| "learning_rate": 1.6684745762711864e-05, |
| "loss": 1.4197, |
| "step": 5390 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 3.9598913192749023, |
| "learning_rate": 1.6677966101694917e-05, |
| "loss": 1.4268, |
| "step": 5400 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 2.6608197689056396, |
| "learning_rate": 1.6671186440677967e-05, |
| "loss": 1.2926, |
| "step": 5410 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 2.557682991027832, |
| "learning_rate": 1.6664406779661016e-05, |
| "loss": 1.6283, |
| "step": 5420 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 2.969449281692505, |
| "learning_rate": 1.665762711864407e-05, |
| "loss": 1.4047, |
| "step": 5430 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 2.7862861156463623, |
| "learning_rate": 1.665084745762712e-05, |
| "loss": 1.4233, |
| "step": 5440 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 1.7756729125976562, |
| "learning_rate": 1.6644067796610172e-05, |
| "loss": 1.4485, |
| "step": 5450 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 2.082773447036743, |
| "learning_rate": 1.6637288135593222e-05, |
| "loss": 1.3878, |
| "step": 5460 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 5.100788116455078, |
| "learning_rate": 1.663050847457627e-05, |
| "loss": 1.5481, |
| "step": 5470 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 3.218986749649048, |
| "learning_rate": 1.6623728813559325e-05, |
| "loss": 1.3906, |
| "step": 5480 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 1.727528691291809, |
| "learning_rate": 1.6616949152542374e-05, |
| "loss": 1.4824, |
| "step": 5490 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 4.321578502655029, |
| "learning_rate": 1.6610169491525424e-05, |
| "loss": 1.5586, |
| "step": 5500 |
| }, |
| { |
| "epoch": 0.14, |
| "eval_loss": 1.469241976737976, |
| "eval_runtime": 66.1281, |
| "eval_samples_per_second": 15.122, |
| "eval_steps_per_second": 15.122, |
| "step": 5500 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 1.7050514221191406, |
| "learning_rate": 1.6603389830508477e-05, |
| "loss": 1.2704, |
| "step": 5510 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 1.4852951765060425, |
| "learning_rate": 1.6596610169491527e-05, |
| "loss": 1.6239, |
| "step": 5520 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 3.524787664413452, |
| "learning_rate": 1.6589830508474577e-05, |
| "loss": 1.45, |
| "step": 5530 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 1.804272174835205, |
| "learning_rate": 1.658305084745763e-05, |
| "loss": 1.4039, |
| "step": 5540 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 2.2891910076141357, |
| "learning_rate": 1.657627118644068e-05, |
| "loss": 1.3969, |
| "step": 5550 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 2.775587797164917, |
| "learning_rate": 1.6569491525423732e-05, |
| "loss": 1.5571, |
| "step": 5560 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 4.006053924560547, |
| "learning_rate": 1.6562711864406782e-05, |
| "loss": 1.4743, |
| "step": 5570 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 2.258488416671753, |
| "learning_rate": 1.6555932203389832e-05, |
| "loss": 1.3094, |
| "step": 5580 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 1.469382405281067, |
| "learning_rate": 1.654915254237288e-05, |
| "loss": 1.4247, |
| "step": 5590 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 2.234476089477539, |
| "learning_rate": 1.654237288135593e-05, |
| "loss": 1.4024, |
| "step": 5600 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 4.297300338745117, |
| "learning_rate": 1.6535593220338984e-05, |
| "loss": 1.3422, |
| "step": 5610 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 2.316805362701416, |
| "learning_rate": 1.6528813559322034e-05, |
| "loss": 1.4723, |
| "step": 5620 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 2.859321117401123, |
| "learning_rate": 1.6522033898305084e-05, |
| "loss": 1.4444, |
| "step": 5630 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 5.158230304718018, |
| "learning_rate": 1.6515254237288137e-05, |
| "loss": 1.6279, |
| "step": 5640 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 2.160407781600952, |
| "learning_rate": 1.6508474576271187e-05, |
| "loss": 1.4487, |
| "step": 5650 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 2.0725409984588623, |
| "learning_rate": 1.650169491525424e-05, |
| "loss": 1.4906, |
| "step": 5660 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 1.5659565925598145, |
| "learning_rate": 1.649491525423729e-05, |
| "loss": 1.4545, |
| "step": 5670 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 8.198447227478027, |
| "learning_rate": 1.648813559322034e-05, |
| "loss": 1.5087, |
| "step": 5680 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 3.8482344150543213, |
| "learning_rate": 1.6481355932203392e-05, |
| "loss": 1.5596, |
| "step": 5690 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 3.828826665878296, |
| "learning_rate": 1.6474576271186442e-05, |
| "loss": 1.5452, |
| "step": 5700 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 9.930662155151367, |
| "learning_rate": 1.646779661016949e-05, |
| "loss": 1.4667, |
| "step": 5710 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 2.9717953205108643, |
| "learning_rate": 1.6461016949152545e-05, |
| "loss": 1.5651, |
| "step": 5720 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 1.6129778623580933, |
| "learning_rate": 1.6454237288135594e-05, |
| "loss": 1.3477, |
| "step": 5730 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 2.341064214706421, |
| "learning_rate": 1.6447457627118648e-05, |
| "loss": 1.5159, |
| "step": 5740 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 4.6363205909729, |
| "learning_rate": 1.6440677966101697e-05, |
| "loss": 1.5524, |
| "step": 5750 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 4.433897495269775, |
| "learning_rate": 1.6433898305084747e-05, |
| "loss": 1.46, |
| "step": 5760 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 3.2978014945983887, |
| "learning_rate": 1.64271186440678e-05, |
| "loss": 1.5847, |
| "step": 5770 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 3.325284004211426, |
| "learning_rate": 1.642033898305085e-05, |
| "loss": 1.3553, |
| "step": 5780 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 1.9704495668411255, |
| "learning_rate": 1.64135593220339e-05, |
| "loss": 1.3661, |
| "step": 5790 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 1.6186383962631226, |
| "learning_rate": 1.640677966101695e-05, |
| "loss": 1.5538, |
| "step": 5800 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 2.1665966510772705, |
| "learning_rate": 1.64e-05, |
| "loss": 1.5188, |
| "step": 5810 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 7.165323734283447, |
| "learning_rate": 1.6393220338983052e-05, |
| "loss": 1.627, |
| "step": 5820 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 7.434396743774414, |
| "learning_rate": 1.63864406779661e-05, |
| "loss": 1.4751, |
| "step": 5830 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 2.549182176589966, |
| "learning_rate": 1.6379661016949155e-05, |
| "loss": 1.5013, |
| "step": 5840 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 2.3907253742218018, |
| "learning_rate": 1.6372881355932204e-05, |
| "loss": 1.5532, |
| "step": 5850 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 2.3262014389038086, |
| "learning_rate": 1.6366101694915254e-05, |
| "loss": 1.4404, |
| "step": 5860 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 2.387146234512329, |
| "learning_rate": 1.6359322033898307e-05, |
| "loss": 1.4057, |
| "step": 5870 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 4.461369514465332, |
| "learning_rate": 1.6352542372881357e-05, |
| "loss": 1.4003, |
| "step": 5880 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 5.3152384757995605, |
| "learning_rate": 1.6345762711864407e-05, |
| "loss": 1.6658, |
| "step": 5890 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 3.839587450027466, |
| "learning_rate": 1.633898305084746e-05, |
| "loss": 1.6396, |
| "step": 5900 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 1.5456769466400146, |
| "learning_rate": 1.633220338983051e-05, |
| "loss": 1.4543, |
| "step": 5910 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 1.218180775642395, |
| "learning_rate": 1.6325423728813563e-05, |
| "loss": 1.6549, |
| "step": 5920 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 3.9048516750335693, |
| "learning_rate": 1.6318644067796612e-05, |
| "loss": 1.4558, |
| "step": 5930 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 2.0440735816955566, |
| "learning_rate": 1.6311864406779662e-05, |
| "loss": 1.392, |
| "step": 5940 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 5.26229190826416, |
| "learning_rate": 1.6305084745762715e-05, |
| "loss": 1.5081, |
| "step": 5950 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 2.3289761543273926, |
| "learning_rate": 1.6298305084745765e-05, |
| "loss": 1.3642, |
| "step": 5960 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 2.584726333618164, |
| "learning_rate": 1.6291525423728814e-05, |
| "loss": 1.3108, |
| "step": 5970 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 3.9245219230651855, |
| "learning_rate": 1.6284745762711868e-05, |
| "loss": 1.5209, |
| "step": 5980 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 1.5690237283706665, |
| "learning_rate": 1.6277966101694917e-05, |
| "loss": 1.4554, |
| "step": 5990 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 2.7308597564697266, |
| "learning_rate": 1.6271186440677967e-05, |
| "loss": 1.3758, |
| "step": 6000 |
| }, |
| { |
| "epoch": 0.15, |
| "eval_loss": 1.4649540185928345, |
| "eval_runtime": 66.131, |
| "eval_samples_per_second": 15.122, |
| "eval_steps_per_second": 15.122, |
| "step": 6000 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 3.45599627494812, |
| "learning_rate": 1.6264406779661017e-05, |
| "loss": 1.4816, |
| "step": 6010 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 2.358900785446167, |
| "learning_rate": 1.625762711864407e-05, |
| "loss": 1.5672, |
| "step": 6020 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 1.577291488647461, |
| "learning_rate": 1.625084745762712e-05, |
| "loss": 1.536, |
| "step": 6030 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 1.5293314456939697, |
| "learning_rate": 1.624406779661017e-05, |
| "loss": 1.3852, |
| "step": 6040 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 3.6409552097320557, |
| "learning_rate": 1.6237288135593222e-05, |
| "loss": 1.3703, |
| "step": 6050 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 1.245363712310791, |
| "learning_rate": 1.6230508474576272e-05, |
| "loss": 1.4489, |
| "step": 6060 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 3.5935614109039307, |
| "learning_rate": 1.6223728813559322e-05, |
| "loss": 1.3177, |
| "step": 6070 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 3.89214825630188, |
| "learning_rate": 1.6216949152542375e-05, |
| "loss": 1.6123, |
| "step": 6080 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 2.6698529720306396, |
| "learning_rate": 1.6210169491525424e-05, |
| "loss": 1.4598, |
| "step": 6090 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 4.818114757537842, |
| "learning_rate": 1.6203389830508474e-05, |
| "loss": 1.3712, |
| "step": 6100 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 5.041522979736328, |
| "learning_rate": 1.6196610169491527e-05, |
| "loss": 1.5556, |
| "step": 6110 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 2.3514511585235596, |
| "learning_rate": 1.6189830508474577e-05, |
| "loss": 1.5359, |
| "step": 6120 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 1.9724153280258179, |
| "learning_rate": 1.618305084745763e-05, |
| "loss": 1.5191, |
| "step": 6130 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 3.0398383140563965, |
| "learning_rate": 1.617627118644068e-05, |
| "loss": 1.4751, |
| "step": 6140 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 1.3548771142959595, |
| "learning_rate": 1.616949152542373e-05, |
| "loss": 1.4159, |
| "step": 6150 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 3.616636276245117, |
| "learning_rate": 1.6162711864406783e-05, |
| "loss": 1.4667, |
| "step": 6160 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 3.348968744277954, |
| "learning_rate": 1.6155932203389832e-05, |
| "loss": 1.4056, |
| "step": 6170 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 2.5771777629852295, |
| "learning_rate": 1.6149152542372882e-05, |
| "loss": 1.4256, |
| "step": 6180 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 3.0773298740386963, |
| "learning_rate": 1.6142372881355935e-05, |
| "loss": 1.2936, |
| "step": 6190 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 2.114321708679199, |
| "learning_rate": 1.6135593220338985e-05, |
| "loss": 1.5362, |
| "step": 6200 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 3.142282485961914, |
| "learning_rate": 1.6128813559322038e-05, |
| "loss": 1.506, |
| "step": 6210 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 5.1931562423706055, |
| "learning_rate": 1.6122033898305084e-05, |
| "loss": 1.581, |
| "step": 6220 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 3.0442159175872803, |
| "learning_rate": 1.6115254237288137e-05, |
| "loss": 1.4553, |
| "step": 6230 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 4.841310501098633, |
| "learning_rate": 1.6108474576271187e-05, |
| "loss": 1.5559, |
| "step": 6240 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 4.651193141937256, |
| "learning_rate": 1.6101694915254237e-05, |
| "loss": 1.3535, |
| "step": 6250 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 3.571608543395996, |
| "learning_rate": 1.609491525423729e-05, |
| "loss": 1.5062, |
| "step": 6260 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 6.008980751037598, |
| "learning_rate": 1.608813559322034e-05, |
| "loss": 1.4696, |
| "step": 6270 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 11.914450645446777, |
| "learning_rate": 1.608135593220339e-05, |
| "loss": 1.4643, |
| "step": 6280 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 1.8239995241165161, |
| "learning_rate": 1.6074576271186442e-05, |
| "loss": 1.6175, |
| "step": 6290 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 2.543314218521118, |
| "learning_rate": 1.6067796610169492e-05, |
| "loss": 1.4362, |
| "step": 6300 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 3.3625097274780273, |
| "learning_rate": 1.6061016949152545e-05, |
| "loss": 1.5615, |
| "step": 6310 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 4.757710933685303, |
| "learning_rate": 1.6054237288135595e-05, |
| "loss": 1.4382, |
| "step": 6320 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 2.5102877616882324, |
| "learning_rate": 1.6047457627118645e-05, |
| "loss": 1.5367, |
| "step": 6330 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 3.226902484893799, |
| "learning_rate": 1.6040677966101698e-05, |
| "loss": 1.7053, |
| "step": 6340 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 2.9312872886657715, |
| "learning_rate": 1.6033898305084747e-05, |
| "loss": 1.5362, |
| "step": 6350 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 1.0346354246139526, |
| "learning_rate": 1.6027118644067797e-05, |
| "loss": 1.6161, |
| "step": 6360 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 1.9462825059890747, |
| "learning_rate": 1.602033898305085e-05, |
| "loss": 1.4455, |
| "step": 6370 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 1.9478490352630615, |
| "learning_rate": 1.60135593220339e-05, |
| "loss": 1.5183, |
| "step": 6380 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 2.781726598739624, |
| "learning_rate": 1.600677966101695e-05, |
| "loss": 1.3517, |
| "step": 6390 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 2.242448091506958, |
| "learning_rate": 1.6000000000000003e-05, |
| "loss": 1.6257, |
| "step": 6400 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 1.3541291952133179, |
| "learning_rate": 1.5993220338983052e-05, |
| "loss": 1.4108, |
| "step": 6410 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 2.1203219890594482, |
| "learning_rate": 1.5986440677966105e-05, |
| "loss": 1.5363, |
| "step": 6420 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 2.9840617179870605, |
| "learning_rate": 1.5979661016949152e-05, |
| "loss": 1.4282, |
| "step": 6430 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 3.6846158504486084, |
| "learning_rate": 1.5972881355932205e-05, |
| "loss": 1.4535, |
| "step": 6440 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 1.272042155265808, |
| "learning_rate": 1.5966101694915255e-05, |
| "loss": 1.5842, |
| "step": 6450 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 2.9574079513549805, |
| "learning_rate": 1.5959322033898304e-05, |
| "loss": 1.3752, |
| "step": 6460 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 2.928889751434326, |
| "learning_rate": 1.5952542372881357e-05, |
| "loss": 1.4675, |
| "step": 6470 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 5.292194843292236, |
| "learning_rate": 1.5945762711864407e-05, |
| "loss": 1.5172, |
| "step": 6480 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 3.3911092281341553, |
| "learning_rate": 1.593898305084746e-05, |
| "loss": 1.467, |
| "step": 6490 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 4.30944299697876, |
| "learning_rate": 1.593220338983051e-05, |
| "loss": 1.3515, |
| "step": 6500 |
| }, |
| { |
| "epoch": 0.16, |
| "eval_loss": 1.465691089630127, |
| "eval_runtime": 66.1688, |
| "eval_samples_per_second": 15.113, |
| "eval_steps_per_second": 15.113, |
| "step": 6500 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 2.5098283290863037, |
| "learning_rate": 1.592542372881356e-05, |
| "loss": 1.5554, |
| "step": 6510 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 7.33607292175293, |
| "learning_rate": 1.5918644067796613e-05, |
| "loss": 1.6483, |
| "step": 6520 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 3.0138282775878906, |
| "learning_rate": 1.5911864406779662e-05, |
| "loss": 1.5784, |
| "step": 6530 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 3.527956008911133, |
| "learning_rate": 1.5905084745762712e-05, |
| "loss": 1.265, |
| "step": 6540 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 3.4589829444885254, |
| "learning_rate": 1.5898305084745765e-05, |
| "loss": 1.6605, |
| "step": 6550 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 5.758669853210449, |
| "learning_rate": 1.5891525423728815e-05, |
| "loss": 1.2722, |
| "step": 6560 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 2.0331597328186035, |
| "learning_rate": 1.5884745762711865e-05, |
| "loss": 1.5016, |
| "step": 6570 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 7.730234622955322, |
| "learning_rate": 1.5877966101694918e-05, |
| "loss": 1.6401, |
| "step": 6580 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 1.977071762084961, |
| "learning_rate": 1.5871186440677967e-05, |
| "loss": 1.3554, |
| "step": 6590 |
| }, |
| { |
| "epoch": 0.17, |
| "grad_norm": 3.8177216053009033, |
| "learning_rate": 1.586440677966102e-05, |
| "loss": 1.6109, |
| "step": 6600 |
| }, |
| { |
| "epoch": 0.17, |
| "grad_norm": 4.232168197631836, |
| "learning_rate": 1.585762711864407e-05, |
| "loss": 1.3901, |
| "step": 6610 |
| }, |
| { |
| "epoch": 0.17, |
| "grad_norm": 6.004239082336426, |
| "learning_rate": 1.585084745762712e-05, |
| "loss": 1.648, |
| "step": 6620 |
| }, |
| { |
| "epoch": 0.17, |
| "grad_norm": 4.166013240814209, |
| "learning_rate": 1.584406779661017e-05, |
| "loss": 1.4462, |
| "step": 6630 |
| }, |
| { |
| "epoch": 0.17, |
| "grad_norm": 2.389451503753662, |
| "learning_rate": 1.583728813559322e-05, |
| "loss": 1.5163, |
| "step": 6640 |
| }, |
| { |
| "epoch": 0.17, |
| "grad_norm": 2.042405843734741, |
| "learning_rate": 1.5830508474576272e-05, |
| "loss": 1.4931, |
| "step": 6650 |
| }, |
| { |
| "epoch": 0.17, |
| "grad_norm": 2.3263092041015625, |
| "learning_rate": 1.5823728813559322e-05, |
| "loss": 1.4758, |
| "step": 6660 |
| }, |
| { |
| "epoch": 0.17, |
| "grad_norm": 5.392455577850342, |
| "learning_rate": 1.5816949152542372e-05, |
| "loss": 1.6324, |
| "step": 6670 |
| }, |
| { |
| "epoch": 0.17, |
| "grad_norm": 4.789013385772705, |
| "learning_rate": 1.5810169491525425e-05, |
| "loss": 1.5744, |
| "step": 6680 |
| }, |
| { |
| "epoch": 0.17, |
| "grad_norm": 2.284268617630005, |
| "learning_rate": 1.5803389830508475e-05, |
| "loss": 1.6875, |
| "step": 6690 |
| }, |
| { |
| "epoch": 0.17, |
| "grad_norm": 2.3472728729248047, |
| "learning_rate": 1.5796610169491528e-05, |
| "loss": 1.5855, |
| "step": 6700 |
| }, |
| { |
| "epoch": 0.17, |
| "grad_norm": 4.676074981689453, |
| "learning_rate": 1.5789830508474577e-05, |
| "loss": 1.4166, |
| "step": 6710 |
| }, |
| { |
| "epoch": 0.17, |
| "grad_norm": 3.129818916320801, |
| "learning_rate": 1.5783050847457627e-05, |
| "loss": 1.3874, |
| "step": 6720 |
| }, |
| { |
| "epoch": 0.17, |
| "grad_norm": 4.993739604949951, |
| "learning_rate": 1.577627118644068e-05, |
| "loss": 1.4709, |
| "step": 6730 |
| }, |
| { |
| "epoch": 0.17, |
| "grad_norm": 2.868453025817871, |
| "learning_rate": 1.576949152542373e-05, |
| "loss": 1.5804, |
| "step": 6740 |
| }, |
| { |
| "epoch": 0.17, |
| "grad_norm": 1.086580753326416, |
| "learning_rate": 1.576271186440678e-05, |
| "loss": 1.3879, |
| "step": 6750 |
| }, |
| { |
| "epoch": 0.17, |
| "grad_norm": 5.881350994110107, |
| "learning_rate": 1.5755932203389833e-05, |
| "loss": 1.4923, |
| "step": 6760 |
| }, |
| { |
| "epoch": 0.17, |
| "grad_norm": 3.4320054054260254, |
| "learning_rate": 1.5749152542372882e-05, |
| "loss": 1.401, |
| "step": 6770 |
| }, |
| { |
| "epoch": 0.17, |
| "grad_norm": 2.2193899154663086, |
| "learning_rate": 1.5742372881355936e-05, |
| "loss": 1.5497, |
| "step": 6780 |
| }, |
| { |
| "epoch": 0.17, |
| "grad_norm": 3.470163583755493, |
| "learning_rate": 1.5735593220338985e-05, |
| "loss": 1.4824, |
| "step": 6790 |
| }, |
| { |
| "epoch": 0.17, |
| "grad_norm": 3.5179107189178467, |
| "learning_rate": 1.5728813559322035e-05, |
| "loss": 1.5401, |
| "step": 6800 |
| }, |
| { |
| "epoch": 0.17, |
| "grad_norm": 4.4173359870910645, |
| "learning_rate": 1.5722033898305088e-05, |
| "loss": 1.3075, |
| "step": 6810 |
| }, |
| { |
| "epoch": 0.17, |
| "grad_norm": 4.17536735534668, |
| "learning_rate": 1.5715254237288138e-05, |
| "loss": 1.5135, |
| "step": 6820 |
| }, |
| { |
| "epoch": 0.17, |
| "grad_norm": 3.278881311416626, |
| "learning_rate": 1.5708474576271187e-05, |
| "loss": 1.5745, |
| "step": 6830 |
| }, |
| { |
| "epoch": 0.17, |
| "grad_norm": 2.718890905380249, |
| "learning_rate": 1.5701694915254237e-05, |
| "loss": 1.5397, |
| "step": 6840 |
| }, |
| { |
| "epoch": 0.17, |
| "grad_norm": 1.8117082118988037, |
| "learning_rate": 1.5694915254237287e-05, |
| "loss": 1.5904, |
| "step": 6850 |
| }, |
| { |
| "epoch": 0.17, |
| "grad_norm": 1.4601502418518066, |
| "learning_rate": 1.568813559322034e-05, |
| "loss": 1.5485, |
| "step": 6860 |
| }, |
| { |
| "epoch": 0.17, |
| "grad_norm": 2.0269432067871094, |
| "learning_rate": 1.568135593220339e-05, |
| "loss": 1.4487, |
| "step": 6870 |
| }, |
| { |
| "epoch": 0.17, |
| "grad_norm": 3.047973394393921, |
| "learning_rate": 1.5674576271186443e-05, |
| "loss": 1.2999, |
| "step": 6880 |
| }, |
| { |
| "epoch": 0.17, |
| "grad_norm": 2.7541468143463135, |
| "learning_rate": 1.5667796610169492e-05, |
| "loss": 1.4651, |
| "step": 6890 |
| }, |
| { |
| "epoch": 0.17, |
| "grad_norm": 5.5456767082214355, |
| "learning_rate": 1.5661016949152542e-05, |
| "loss": 1.4433, |
| "step": 6900 |
| }, |
| { |
| "epoch": 0.17, |
| "grad_norm": 3.4916276931762695, |
| "learning_rate": 1.5654237288135595e-05, |
| "loss": 1.5764, |
| "step": 6910 |
| }, |
| { |
| "epoch": 0.17, |
| "grad_norm": 2.6278750896453857, |
| "learning_rate": 1.5647457627118645e-05, |
| "loss": 1.3208, |
| "step": 6920 |
| }, |
| { |
| "epoch": 0.17, |
| "grad_norm": 2.9469902515411377, |
| "learning_rate": 1.5640677966101695e-05, |
| "loss": 1.6433, |
| "step": 6930 |
| }, |
| { |
| "epoch": 0.17, |
| "grad_norm": 5.235908031463623, |
| "learning_rate": 1.5633898305084748e-05, |
| "loss": 1.2608, |
| "step": 6940 |
| }, |
| { |
| "epoch": 0.17, |
| "grad_norm": 1.5849263668060303, |
| "learning_rate": 1.5627118644067798e-05, |
| "loss": 1.5088, |
| "step": 6950 |
| }, |
| { |
| "epoch": 0.17, |
| "grad_norm": 3.6014277935028076, |
| "learning_rate": 1.562033898305085e-05, |
| "loss": 1.3976, |
| "step": 6960 |
| }, |
| { |
| "epoch": 0.17, |
| "grad_norm": 3.220320701599121, |
| "learning_rate": 1.56135593220339e-05, |
| "loss": 1.5011, |
| "step": 6970 |
| }, |
| { |
| "epoch": 0.17, |
| "grad_norm": 3.434382915496826, |
| "learning_rate": 1.560677966101695e-05, |
| "loss": 1.4733, |
| "step": 6980 |
| }, |
| { |
| "epoch": 0.17, |
| "grad_norm": 4.612408638000488, |
| "learning_rate": 1.5600000000000003e-05, |
| "loss": 1.4618, |
| "step": 6990 |
| }, |
| { |
| "epoch": 0.17, |
| "grad_norm": 2.9301650524139404, |
| "learning_rate": 1.5593220338983053e-05, |
| "loss": 1.4165, |
| "step": 7000 |
| }, |
| { |
| "epoch": 0.17, |
| "eval_loss": 1.469406247138977, |
| "eval_runtime": 66.2443, |
| "eval_samples_per_second": 15.096, |
| "eval_steps_per_second": 15.096, |
| "step": 7000 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 5.865874767303467, |
| "learning_rate": 1.5586440677966103e-05, |
| "loss": 1.3156, |
| "step": 7010 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 5.411038875579834, |
| "learning_rate": 1.5579661016949156e-05, |
| "loss": 1.4303, |
| "step": 7020 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 2.869673728942871, |
| "learning_rate": 1.5572881355932205e-05, |
| "loss": 1.5055, |
| "step": 7030 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 1.0629090070724487, |
| "learning_rate": 1.5566101694915255e-05, |
| "loss": 1.6094, |
| "step": 7040 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 2.271601676940918, |
| "learning_rate": 1.5559322033898305e-05, |
| "loss": 1.4648, |
| "step": 7050 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 1.5404294729232788, |
| "learning_rate": 1.5552542372881358e-05, |
| "loss": 1.3793, |
| "step": 7060 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 3.9599192142486572, |
| "learning_rate": 1.5545762711864408e-05, |
| "loss": 1.3563, |
| "step": 7070 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 6.35668420791626, |
| "learning_rate": 1.5538983050847457e-05, |
| "loss": 1.4637, |
| "step": 7080 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 7.315426349639893, |
| "learning_rate": 1.553220338983051e-05, |
| "loss": 1.3284, |
| "step": 7090 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 1.8330402374267578, |
| "learning_rate": 1.552542372881356e-05, |
| "loss": 1.3762, |
| "step": 7100 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 3.072326183319092, |
| "learning_rate": 1.551864406779661e-05, |
| "loss": 1.4194, |
| "step": 7110 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 1.952779769897461, |
| "learning_rate": 1.5511864406779663e-05, |
| "loss": 1.4886, |
| "step": 7120 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 4.332074165344238, |
| "learning_rate": 1.5505084745762713e-05, |
| "loss": 1.4912, |
| "step": 7130 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 3.962344169616699, |
| "learning_rate": 1.5498305084745762e-05, |
| "loss": 1.5502, |
| "step": 7140 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 7.930359363555908, |
| "learning_rate": 1.5491525423728815e-05, |
| "loss": 1.471, |
| "step": 7150 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 4.925573825836182, |
| "learning_rate": 1.5484745762711865e-05, |
| "loss": 1.3155, |
| "step": 7160 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 3.2943711280822754, |
| "learning_rate": 1.5477966101694918e-05, |
| "loss": 1.3094, |
| "step": 7170 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 1.9433196783065796, |
| "learning_rate": 1.5471186440677968e-05, |
| "loss": 1.3453, |
| "step": 7180 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 1.8384184837341309, |
| "learning_rate": 1.5464406779661018e-05, |
| "loss": 1.3788, |
| "step": 7190 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 2.92781138420105, |
| "learning_rate": 1.545762711864407e-05, |
| "loss": 1.5704, |
| "step": 7200 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 1.0172346830368042, |
| "learning_rate": 1.545084745762712e-05, |
| "loss": 1.5442, |
| "step": 7210 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 1.0915580987930298, |
| "learning_rate": 1.544406779661017e-05, |
| "loss": 1.5226, |
| "step": 7220 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 9.170607566833496, |
| "learning_rate": 1.5437288135593223e-05, |
| "loss": 1.5066, |
| "step": 7230 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 4.84716796875, |
| "learning_rate": 1.5430508474576273e-05, |
| "loss": 1.4508, |
| "step": 7240 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 5.949992656707764, |
| "learning_rate": 1.5423728813559326e-05, |
| "loss": 1.3393, |
| "step": 7250 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 5.912881851196289, |
| "learning_rate": 1.5416949152542372e-05, |
| "loss": 1.5918, |
| "step": 7260 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 1.8128445148468018, |
| "learning_rate": 1.5410169491525425e-05, |
| "loss": 1.5794, |
| "step": 7270 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 3.9023091793060303, |
| "learning_rate": 1.5403389830508475e-05, |
| "loss": 1.345, |
| "step": 7280 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 2.263958692550659, |
| "learning_rate": 1.5396610169491525e-05, |
| "loss": 1.3651, |
| "step": 7290 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 8.103110313415527, |
| "learning_rate": 1.5389830508474578e-05, |
| "loss": 1.4784, |
| "step": 7300 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 4.5859456062316895, |
| "learning_rate": 1.5383050847457628e-05, |
| "loss": 1.4087, |
| "step": 7310 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 5.6322503089904785, |
| "learning_rate": 1.5376271186440677e-05, |
| "loss": 1.518, |
| "step": 7320 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 2.4485771656036377, |
| "learning_rate": 1.536949152542373e-05, |
| "loss": 1.3471, |
| "step": 7330 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 2.0619120597839355, |
| "learning_rate": 1.536271186440678e-05, |
| "loss": 1.2563, |
| "step": 7340 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 4.037415027618408, |
| "learning_rate": 1.5355932203389833e-05, |
| "loss": 1.4414, |
| "step": 7350 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 2.4679503440856934, |
| "learning_rate": 1.5349152542372883e-05, |
| "loss": 1.4021, |
| "step": 7360 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 1.4774163961410522, |
| "learning_rate": 1.5342372881355933e-05, |
| "loss": 1.4331, |
| "step": 7370 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 4.21893310546875, |
| "learning_rate": 1.5335593220338986e-05, |
| "loss": 1.4847, |
| "step": 7380 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 4.261248588562012, |
| "learning_rate": 1.5328813559322035e-05, |
| "loss": 1.4098, |
| "step": 7390 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 2.419386625289917, |
| "learning_rate": 1.5322033898305085e-05, |
| "loss": 1.4635, |
| "step": 7400 |
| }, |
| { |
| "epoch": 0.19, |
| "grad_norm": 3.3119351863861084, |
| "learning_rate": 1.5315254237288138e-05, |
| "loss": 1.3862, |
| "step": 7410 |
| }, |
| { |
| "epoch": 0.19, |
| "grad_norm": 4.350612163543701, |
| "learning_rate": 1.5308474576271188e-05, |
| "loss": 1.5094, |
| "step": 7420 |
| }, |
| { |
| "epoch": 0.19, |
| "grad_norm": 3.089937925338745, |
| "learning_rate": 1.530169491525424e-05, |
| "loss": 1.6249, |
| "step": 7430 |
| }, |
| { |
| "epoch": 0.19, |
| "grad_norm": 6.980156898498535, |
| "learning_rate": 1.529491525423729e-05, |
| "loss": 1.6102, |
| "step": 7440 |
| }, |
| { |
| "epoch": 0.19, |
| "grad_norm": 3.881622552871704, |
| "learning_rate": 1.528813559322034e-05, |
| "loss": 1.5149, |
| "step": 7450 |
| }, |
| { |
| "epoch": 0.19, |
| "grad_norm": 2.8510727882385254, |
| "learning_rate": 1.528135593220339e-05, |
| "loss": 1.4542, |
| "step": 7460 |
| }, |
| { |
| "epoch": 0.19, |
| "grad_norm": 4.4826130867004395, |
| "learning_rate": 1.527457627118644e-05, |
| "loss": 1.5085, |
| "step": 7470 |
| }, |
| { |
| "epoch": 0.19, |
| "grad_norm": 4.991846084594727, |
| "learning_rate": 1.5267796610169493e-05, |
| "loss": 1.4761, |
| "step": 7480 |
| }, |
| { |
| "epoch": 0.19, |
| "grad_norm": 4.136972904205322, |
| "learning_rate": 1.5261016949152543e-05, |
| "loss": 1.4851, |
| "step": 7490 |
| }, |
| { |
| "epoch": 0.19, |
| "grad_norm": 3.58878231048584, |
| "learning_rate": 1.5254237288135594e-05, |
| "loss": 1.4117, |
| "step": 7500 |
| }, |
| { |
| "epoch": 0.19, |
| "eval_loss": 1.437897801399231, |
| "eval_runtime": 66.1821, |
| "eval_samples_per_second": 15.11, |
| "eval_steps_per_second": 15.11, |
| "step": 7500 |
| }, |
| { |
| "epoch": 0.19, |
| "grad_norm": 2.952547311782837, |
| "learning_rate": 1.5247457627118645e-05, |
| "loss": 1.4895, |
| "step": 7510 |
| }, |
| { |
| "epoch": 0.19, |
| "grad_norm": 1.617139458656311, |
| "learning_rate": 1.5240677966101695e-05, |
| "loss": 1.3878, |
| "step": 7520 |
| }, |
| { |
| "epoch": 0.19, |
| "grad_norm": 1.6708911657333374, |
| "learning_rate": 1.5233898305084747e-05, |
| "loss": 1.5612, |
| "step": 7530 |
| }, |
| { |
| "epoch": 0.19, |
| "grad_norm": 1.824523687362671, |
| "learning_rate": 1.5227118644067798e-05, |
| "loss": 1.407, |
| "step": 7540 |
| }, |
| { |
| "epoch": 0.19, |
| "grad_norm": 3.4340052604675293, |
| "learning_rate": 1.522033898305085e-05, |
| "loss": 1.2622, |
| "step": 7550 |
| }, |
| { |
| "epoch": 0.19, |
| "grad_norm": 3.8234424591064453, |
| "learning_rate": 1.5213559322033899e-05, |
| "loss": 1.5187, |
| "step": 7560 |
| }, |
| { |
| "epoch": 0.19, |
| "grad_norm": 2.398066520690918, |
| "learning_rate": 1.520677966101695e-05, |
| "loss": 1.4119, |
| "step": 7570 |
| }, |
| { |
| "epoch": 0.19, |
| "grad_norm": 4.461039066314697, |
| "learning_rate": 1.5200000000000002e-05, |
| "loss": 1.6286, |
| "step": 7580 |
| }, |
| { |
| "epoch": 0.19, |
| "grad_norm": 8.626133918762207, |
| "learning_rate": 1.5193220338983052e-05, |
| "loss": 1.4452, |
| "step": 7590 |
| }, |
| { |
| "epoch": 0.19, |
| "grad_norm": 4.230071067810059, |
| "learning_rate": 1.5186440677966103e-05, |
| "loss": 1.3512, |
| "step": 7600 |
| }, |
| { |
| "epoch": 0.19, |
| "grad_norm": 4.9356279373168945, |
| "learning_rate": 1.5179661016949154e-05, |
| "loss": 1.5673, |
| "step": 7610 |
| }, |
| { |
| "epoch": 0.19, |
| "grad_norm": 2.832441806793213, |
| "learning_rate": 1.5172881355932206e-05, |
| "loss": 1.3301, |
| "step": 7620 |
| }, |
| { |
| "epoch": 0.19, |
| "grad_norm": 3.501631259918213, |
| "learning_rate": 1.5166101694915255e-05, |
| "loss": 1.4446, |
| "step": 7630 |
| }, |
| { |
| "epoch": 0.19, |
| "grad_norm": 1.15791916847229, |
| "learning_rate": 1.5159322033898307e-05, |
| "loss": 1.4241, |
| "step": 7640 |
| }, |
| { |
| "epoch": 0.19, |
| "grad_norm": 2.7517549991607666, |
| "learning_rate": 1.5152542372881358e-05, |
| "loss": 1.5962, |
| "step": 7650 |
| }, |
| { |
| "epoch": 0.19, |
| "grad_norm": 2.9269521236419678, |
| "learning_rate": 1.514576271186441e-05, |
| "loss": 1.3783, |
| "step": 7660 |
| }, |
| { |
| "epoch": 0.19, |
| "grad_norm": 3.538501501083374, |
| "learning_rate": 1.5138983050847458e-05, |
| "loss": 1.6107, |
| "step": 7670 |
| }, |
| { |
| "epoch": 0.19, |
| "grad_norm": 9.009359359741211, |
| "learning_rate": 1.5132203389830509e-05, |
| "loss": 1.5968, |
| "step": 7680 |
| }, |
| { |
| "epoch": 0.19, |
| "grad_norm": 2.63154673576355, |
| "learning_rate": 1.512542372881356e-05, |
| "loss": 1.5102, |
| "step": 7690 |
| }, |
| { |
| "epoch": 0.19, |
| "grad_norm": 3.5580034255981445, |
| "learning_rate": 1.511864406779661e-05, |
| "loss": 1.2809, |
| "step": 7700 |
| }, |
| { |
| "epoch": 0.19, |
| "grad_norm": 5.356471061706543, |
| "learning_rate": 1.5111864406779662e-05, |
| "loss": 1.214, |
| "step": 7710 |
| }, |
| { |
| "epoch": 0.19, |
| "grad_norm": 4.988667964935303, |
| "learning_rate": 1.5105084745762713e-05, |
| "loss": 1.4744, |
| "step": 7720 |
| }, |
| { |
| "epoch": 0.19, |
| "grad_norm": 3.1577963829040527, |
| "learning_rate": 1.5098305084745763e-05, |
| "loss": 1.5062, |
| "step": 7730 |
| }, |
| { |
| "epoch": 0.19, |
| "grad_norm": 4.623036861419678, |
| "learning_rate": 1.5091525423728814e-05, |
| "loss": 1.5103, |
| "step": 7740 |
| }, |
| { |
| "epoch": 0.19, |
| "grad_norm": 2.3117690086364746, |
| "learning_rate": 1.5084745762711865e-05, |
| "loss": 1.4344, |
| "step": 7750 |
| }, |
| { |
| "epoch": 0.19, |
| "grad_norm": 2.289649248123169, |
| "learning_rate": 1.5077966101694917e-05, |
| "loss": 1.5636, |
| "step": 7760 |
| }, |
| { |
| "epoch": 0.19, |
| "grad_norm": 1.5120543241500854, |
| "learning_rate": 1.5071186440677967e-05, |
| "loss": 1.4689, |
| "step": 7770 |
| }, |
| { |
| "epoch": 0.19, |
| "grad_norm": 2.479252576828003, |
| "learning_rate": 1.5064406779661018e-05, |
| "loss": 1.3047, |
| "step": 7780 |
| }, |
| { |
| "epoch": 0.19, |
| "grad_norm": 4.039693832397461, |
| "learning_rate": 1.505762711864407e-05, |
| "loss": 1.3197, |
| "step": 7790 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 3.129855155944824, |
| "learning_rate": 1.505084745762712e-05, |
| "loss": 1.4407, |
| "step": 7800 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 4.973848819732666, |
| "learning_rate": 1.504406779661017e-05, |
| "loss": 1.3066, |
| "step": 7810 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 1.673112392425537, |
| "learning_rate": 1.5037288135593222e-05, |
| "loss": 1.4593, |
| "step": 7820 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 2.6259243488311768, |
| "learning_rate": 1.5030508474576273e-05, |
| "loss": 1.4993, |
| "step": 7830 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 1.8319427967071533, |
| "learning_rate": 1.5023728813559325e-05, |
| "loss": 1.444, |
| "step": 7840 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 3.4944241046905518, |
| "learning_rate": 1.5016949152542374e-05, |
| "loss": 1.2785, |
| "step": 7850 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 2.0709774494171143, |
| "learning_rate": 1.5010169491525426e-05, |
| "loss": 1.4925, |
| "step": 7860 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 1.6483750343322754, |
| "learning_rate": 1.5003389830508477e-05, |
| "loss": 1.5283, |
| "step": 7870 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 4.786270618438721, |
| "learning_rate": 1.4996610169491525e-05, |
| "loss": 1.4946, |
| "step": 7880 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 1.536661148071289, |
| "learning_rate": 1.4989830508474577e-05, |
| "loss": 1.4044, |
| "step": 7890 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 2.717747926712036, |
| "learning_rate": 1.4983050847457628e-05, |
| "loss": 1.4569, |
| "step": 7900 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 2.2604639530181885, |
| "learning_rate": 1.4976271186440678e-05, |
| "loss": 1.5154, |
| "step": 7910 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 3.6678876876831055, |
| "learning_rate": 1.4969491525423729e-05, |
| "loss": 1.1938, |
| "step": 7920 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 3.3134288787841797, |
| "learning_rate": 1.496271186440678e-05, |
| "loss": 1.4676, |
| "step": 7930 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 1.8791242837905884, |
| "learning_rate": 1.4955932203389832e-05, |
| "loss": 1.5698, |
| "step": 7940 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 3.279029369354248, |
| "learning_rate": 1.4949152542372882e-05, |
| "loss": 1.4374, |
| "step": 7950 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 6.518031597137451, |
| "learning_rate": 1.4942372881355933e-05, |
| "loss": 1.3021, |
| "step": 7960 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 5.85239315032959, |
| "learning_rate": 1.4935593220338984e-05, |
| "loss": 1.479, |
| "step": 7970 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 5.397909641265869, |
| "learning_rate": 1.4928813559322036e-05, |
| "loss": 1.4118, |
| "step": 7980 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 3.4075229167938232, |
| "learning_rate": 1.4922033898305086e-05, |
| "loss": 1.4959, |
| "step": 7990 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 6.57758092880249, |
| "learning_rate": 1.4915254237288137e-05, |
| "loss": 1.6252, |
| "step": 8000 |
| }, |
| { |
| "epoch": 0.2, |
| "eval_loss": 1.4186667203903198, |
| "eval_runtime": 66.1276, |
| "eval_samples_per_second": 15.122, |
| "eval_steps_per_second": 15.122, |
| "step": 8000 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 1.395254373550415, |
| "learning_rate": 1.4908474576271188e-05, |
| "loss": 1.4234, |
| "step": 8010 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 1.7249040603637695, |
| "learning_rate": 1.490169491525424e-05, |
| "loss": 1.3279, |
| "step": 8020 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 6.21826171875, |
| "learning_rate": 1.489491525423729e-05, |
| "loss": 1.3013, |
| "step": 8030 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 2.4089813232421875, |
| "learning_rate": 1.4888135593220341e-05, |
| "loss": 1.4544, |
| "step": 8040 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 1.597097635269165, |
| "learning_rate": 1.4881355932203392e-05, |
| "loss": 1.4793, |
| "step": 8050 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 1.9910588264465332, |
| "learning_rate": 1.4874576271186442e-05, |
| "loss": 1.4182, |
| "step": 8060 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 4.10093355178833, |
| "learning_rate": 1.4867796610169493e-05, |
| "loss": 1.1596, |
| "step": 8070 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 3.8215675354003906, |
| "learning_rate": 1.4861016949152545e-05, |
| "loss": 1.5739, |
| "step": 8080 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 3.1352415084838867, |
| "learning_rate": 1.4854237288135593e-05, |
| "loss": 1.3123, |
| "step": 8090 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 10.560978889465332, |
| "learning_rate": 1.4847457627118644e-05, |
| "loss": 1.5507, |
| "step": 8100 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 1.808532953262329, |
| "learning_rate": 1.4840677966101696e-05, |
| "loss": 1.5942, |
| "step": 8110 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 4.674659252166748, |
| "learning_rate": 1.4833898305084747e-05, |
| "loss": 1.5669, |
| "step": 8120 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 3.7762465476989746, |
| "learning_rate": 1.4827118644067797e-05, |
| "loss": 1.4565, |
| "step": 8130 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 8.079657554626465, |
| "learning_rate": 1.4820338983050848e-05, |
| "loss": 1.4097, |
| "step": 8140 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 1.739395260810852, |
| "learning_rate": 1.48135593220339e-05, |
| "loss": 1.5126, |
| "step": 8150 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 12.929269790649414, |
| "learning_rate": 1.4806779661016951e-05, |
| "loss": 1.4822, |
| "step": 8160 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 2.2452526092529297, |
| "learning_rate": 1.48e-05, |
| "loss": 1.3672, |
| "step": 8170 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 5.2609333992004395, |
| "learning_rate": 1.4793220338983052e-05, |
| "loss": 1.44, |
| "step": 8180 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 3.3144960403442383, |
| "learning_rate": 1.4786440677966103e-05, |
| "loss": 1.4122, |
| "step": 8190 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 3.837510824203491, |
| "learning_rate": 1.4779661016949153e-05, |
| "loss": 1.5105, |
| "step": 8200 |
| }, |
| { |
| "epoch": 0.21, |
| "grad_norm": 2.3328332901000977, |
| "learning_rate": 1.4772881355932205e-05, |
| "loss": 1.4721, |
| "step": 8210 |
| }, |
| { |
| "epoch": 0.21, |
| "grad_norm": 3.8686797618865967, |
| "learning_rate": 1.4766101694915256e-05, |
| "loss": 1.3236, |
| "step": 8220 |
| }, |
| { |
| "epoch": 0.21, |
| "grad_norm": 3.545306444168091, |
| "learning_rate": 1.4759322033898307e-05, |
| "loss": 1.3628, |
| "step": 8230 |
| }, |
| { |
| "epoch": 0.21, |
| "grad_norm": 5.3495965003967285, |
| "learning_rate": 1.4752542372881357e-05, |
| "loss": 1.4093, |
| "step": 8240 |
| }, |
| { |
| "epoch": 0.21, |
| "grad_norm": 2.040356159210205, |
| "learning_rate": 1.4745762711864408e-05, |
| "loss": 1.4116, |
| "step": 8250 |
| }, |
| { |
| "epoch": 0.21, |
| "grad_norm": 3.531248092651367, |
| "learning_rate": 1.473898305084746e-05, |
| "loss": 1.4185, |
| "step": 8260 |
| }, |
| { |
| "epoch": 0.21, |
| "grad_norm": 1.453710675239563, |
| "learning_rate": 1.4732203389830511e-05, |
| "loss": 1.3446, |
| "step": 8270 |
| }, |
| { |
| "epoch": 0.21, |
| "grad_norm": 5.032666206359863, |
| "learning_rate": 1.4725423728813561e-05, |
| "loss": 1.521, |
| "step": 8280 |
| }, |
| { |
| "epoch": 0.21, |
| "grad_norm": 4.052346229553223, |
| "learning_rate": 1.4718644067796612e-05, |
| "loss": 1.5021, |
| "step": 8290 |
| }, |
| { |
| "epoch": 0.21, |
| "grad_norm": 3.378077268600464, |
| "learning_rate": 1.4711864406779662e-05, |
| "loss": 1.3995, |
| "step": 8300 |
| }, |
| { |
| "epoch": 0.21, |
| "grad_norm": 3.423922061920166, |
| "learning_rate": 1.4705084745762712e-05, |
| "loss": 1.4133, |
| "step": 8310 |
| }, |
| { |
| "epoch": 0.21, |
| "grad_norm": 7.027323246002197, |
| "learning_rate": 1.4698305084745763e-05, |
| "loss": 1.2491, |
| "step": 8320 |
| }, |
| { |
| "epoch": 0.21, |
| "grad_norm": 3.2872166633605957, |
| "learning_rate": 1.4691525423728815e-05, |
| "loss": 1.1965, |
| "step": 8330 |
| }, |
| { |
| "epoch": 0.21, |
| "grad_norm": 3.879119396209717, |
| "learning_rate": 1.4684745762711864e-05, |
| "loss": 1.3579, |
| "step": 8340 |
| }, |
| { |
| "epoch": 0.21, |
| "grad_norm": 4.697524070739746, |
| "learning_rate": 1.4677966101694916e-05, |
| "loss": 1.4266, |
| "step": 8350 |
| }, |
| { |
| "epoch": 0.21, |
| "grad_norm": 5.742332458496094, |
| "learning_rate": 1.4671186440677967e-05, |
| "loss": 1.4115, |
| "step": 8360 |
| }, |
| { |
| "epoch": 0.21, |
| "grad_norm": 5.360010623931885, |
| "learning_rate": 1.4664406779661018e-05, |
| "loss": 1.443, |
| "step": 8370 |
| }, |
| { |
| "epoch": 0.21, |
| "grad_norm": 5.725327968597412, |
| "learning_rate": 1.4657627118644068e-05, |
| "loss": 1.3006, |
| "step": 8380 |
| }, |
| { |
| "epoch": 0.21, |
| "grad_norm": 3.76000714302063, |
| "learning_rate": 1.465084745762712e-05, |
| "loss": 1.317, |
| "step": 8390 |
| }, |
| { |
| "epoch": 0.21, |
| "grad_norm": 2.6450018882751465, |
| "learning_rate": 1.4644067796610171e-05, |
| "loss": 1.5148, |
| "step": 8400 |
| }, |
| { |
| "epoch": 0.21, |
| "grad_norm": 4.761094093322754, |
| "learning_rate": 1.4637288135593222e-05, |
| "loss": 1.3873, |
| "step": 8410 |
| }, |
| { |
| "epoch": 0.21, |
| "grad_norm": 3.0644989013671875, |
| "learning_rate": 1.4630508474576272e-05, |
| "loss": 1.4294, |
| "step": 8420 |
| }, |
| { |
| "epoch": 0.21, |
| "grad_norm": 2.3098878860473633, |
| "learning_rate": 1.4623728813559323e-05, |
| "loss": 1.4443, |
| "step": 8430 |
| }, |
| { |
| "epoch": 0.21, |
| "grad_norm": 4.367359638214111, |
| "learning_rate": 1.4616949152542375e-05, |
| "loss": 1.5441, |
| "step": 8440 |
| }, |
| { |
| "epoch": 0.21, |
| "grad_norm": 9.395272254943848, |
| "learning_rate": 1.4610169491525426e-05, |
| "loss": 1.4793, |
| "step": 8450 |
| }, |
| { |
| "epoch": 0.21, |
| "grad_norm": 2.6706058979034424, |
| "learning_rate": 1.4603389830508476e-05, |
| "loss": 1.3537, |
| "step": 8460 |
| }, |
| { |
| "epoch": 0.21, |
| "grad_norm": 2.5797505378723145, |
| "learning_rate": 1.4596610169491527e-05, |
| "loss": 1.3109, |
| "step": 8470 |
| }, |
| { |
| "epoch": 0.21, |
| "grad_norm": 2.9615049362182617, |
| "learning_rate": 1.4589830508474579e-05, |
| "loss": 1.4649, |
| "step": 8480 |
| }, |
| { |
| "epoch": 0.21, |
| "grad_norm": 2.650679349899292, |
| "learning_rate": 1.458305084745763e-05, |
| "loss": 1.4196, |
| "step": 8490 |
| }, |
| { |
| "epoch": 0.21, |
| "grad_norm": 1.7347412109375, |
| "learning_rate": 1.4576271186440678e-05, |
| "loss": 1.4922, |
| "step": 8500 |
| }, |
| { |
| "epoch": 0.21, |
| "eval_loss": 1.4525456428527832, |
| "eval_runtime": 66.1301, |
| "eval_samples_per_second": 15.122, |
| "eval_steps_per_second": 15.122, |
| "step": 8500 |
| }, |
| { |
| "epoch": 0.21, |
| "grad_norm": 4.196395397186279, |
| "learning_rate": 1.456949152542373e-05, |
| "loss": 1.4091, |
| "step": 8510 |
| }, |
| { |
| "epoch": 0.21, |
| "grad_norm": 1.4905701875686646, |
| "learning_rate": 1.456271186440678e-05, |
| "loss": 1.3764, |
| "step": 8520 |
| }, |
| { |
| "epoch": 0.21, |
| "grad_norm": 6.2871174812316895, |
| "learning_rate": 1.455593220338983e-05, |
| "loss": 1.5042, |
| "step": 8530 |
| }, |
| { |
| "epoch": 0.21, |
| "grad_norm": 1.7789188623428345, |
| "learning_rate": 1.4549152542372882e-05, |
| "loss": 1.4442, |
| "step": 8540 |
| }, |
| { |
| "epoch": 0.21, |
| "grad_norm": 2.0368621349334717, |
| "learning_rate": 1.4542372881355933e-05, |
| "loss": 1.3332, |
| "step": 8550 |
| }, |
| { |
| "epoch": 0.21, |
| "grad_norm": 2.3782999515533447, |
| "learning_rate": 1.4535593220338983e-05, |
| "loss": 1.4489, |
| "step": 8560 |
| }, |
| { |
| "epoch": 0.21, |
| "grad_norm": 1.9039316177368164, |
| "learning_rate": 1.4528813559322035e-05, |
| "loss": 1.4103, |
| "step": 8570 |
| }, |
| { |
| "epoch": 0.21, |
| "grad_norm": 8.607722282409668, |
| "learning_rate": 1.4522033898305086e-05, |
| "loss": 1.494, |
| "step": 8580 |
| }, |
| { |
| "epoch": 0.21, |
| "grad_norm": 4.126497268676758, |
| "learning_rate": 1.4515254237288137e-05, |
| "loss": 1.5005, |
| "step": 8590 |
| }, |
| { |
| "epoch": 0.21, |
| "grad_norm": 2.387799024581909, |
| "learning_rate": 1.4508474576271187e-05, |
| "loss": 1.5427, |
| "step": 8600 |
| }, |
| { |
| "epoch": 0.22, |
| "grad_norm": 2.6460745334625244, |
| "learning_rate": 1.4501694915254239e-05, |
| "loss": 1.3386, |
| "step": 8610 |
| }, |
| { |
| "epoch": 0.22, |
| "grad_norm": 13.728014945983887, |
| "learning_rate": 1.449491525423729e-05, |
| "loss": 1.3242, |
| "step": 8620 |
| }, |
| { |
| "epoch": 0.22, |
| "grad_norm": 2.1503047943115234, |
| "learning_rate": 1.4488135593220341e-05, |
| "loss": 1.5502, |
| "step": 8630 |
| }, |
| { |
| "epoch": 0.22, |
| "grad_norm": 2.9898569583892822, |
| "learning_rate": 1.4481355932203391e-05, |
| "loss": 1.5844, |
| "step": 8640 |
| }, |
| { |
| "epoch": 0.22, |
| "grad_norm": 2.0317842960357666, |
| "learning_rate": 1.4474576271186442e-05, |
| "loss": 1.4781, |
| "step": 8650 |
| }, |
| { |
| "epoch": 0.22, |
| "grad_norm": 3.0708272457122803, |
| "learning_rate": 1.4467796610169494e-05, |
| "loss": 1.4521, |
| "step": 8660 |
| }, |
| { |
| "epoch": 0.22, |
| "grad_norm": 5.950216770172119, |
| "learning_rate": 1.4461016949152544e-05, |
| "loss": 1.4619, |
| "step": 8670 |
| }, |
| { |
| "epoch": 0.22, |
| "grad_norm": 3.4892992973327637, |
| "learning_rate": 1.4454237288135595e-05, |
| "loss": 1.5312, |
| "step": 8680 |
| }, |
| { |
| "epoch": 0.22, |
| "grad_norm": 2.799809694290161, |
| "learning_rate": 1.4447457627118646e-05, |
| "loss": 1.6236, |
| "step": 8690 |
| }, |
| { |
| "epoch": 0.22, |
| "grad_norm": 2.8113033771514893, |
| "learning_rate": 1.4440677966101698e-05, |
| "loss": 1.4817, |
| "step": 8700 |
| }, |
| { |
| "epoch": 0.22, |
| "grad_norm": 4.948983192443848, |
| "learning_rate": 1.4433898305084746e-05, |
| "loss": 1.4583, |
| "step": 8710 |
| }, |
| { |
| "epoch": 0.22, |
| "grad_norm": 4.663936138153076, |
| "learning_rate": 1.4427118644067797e-05, |
| "loss": 1.3879, |
| "step": 8720 |
| }, |
| { |
| "epoch": 0.22, |
| "grad_norm": 5.0930256843566895, |
| "learning_rate": 1.4420338983050849e-05, |
| "loss": 1.4138, |
| "step": 8730 |
| }, |
| { |
| "epoch": 0.22, |
| "grad_norm": 2.463430404663086, |
| "learning_rate": 1.4413559322033898e-05, |
| "loss": 1.4856, |
| "step": 8740 |
| }, |
| { |
| "epoch": 0.22, |
| "grad_norm": 3.769231081008911, |
| "learning_rate": 1.440677966101695e-05, |
| "loss": 1.5743, |
| "step": 8750 |
| }, |
| { |
| "epoch": 0.22, |
| "grad_norm": 2.731410503387451, |
| "learning_rate": 1.4400000000000001e-05, |
| "loss": 1.6308, |
| "step": 8760 |
| }, |
| { |
| "epoch": 0.22, |
| "grad_norm": 0.9978777766227722, |
| "learning_rate": 1.4393220338983052e-05, |
| "loss": 1.5564, |
| "step": 8770 |
| }, |
| { |
| "epoch": 0.22, |
| "grad_norm": 2.8928000926971436, |
| "learning_rate": 1.4386440677966102e-05, |
| "loss": 1.4164, |
| "step": 8780 |
| }, |
| { |
| "epoch": 0.22, |
| "grad_norm": 11.18360424041748, |
| "learning_rate": 1.4379661016949154e-05, |
| "loss": 1.4095, |
| "step": 8790 |
| }, |
| { |
| "epoch": 0.22, |
| "grad_norm": 2.7960269451141357, |
| "learning_rate": 1.4372881355932205e-05, |
| "loss": 1.5618, |
| "step": 8800 |
| }, |
| { |
| "epoch": 0.22, |
| "grad_norm": 3.2135298252105713, |
| "learning_rate": 1.4366101694915255e-05, |
| "loss": 1.4324, |
| "step": 8810 |
| }, |
| { |
| "epoch": 0.22, |
| "grad_norm": 1.8829147815704346, |
| "learning_rate": 1.4359322033898306e-05, |
| "loss": 1.5242, |
| "step": 8820 |
| }, |
| { |
| "epoch": 0.22, |
| "grad_norm": 3.833895206451416, |
| "learning_rate": 1.4352542372881357e-05, |
| "loss": 1.2926, |
| "step": 8830 |
| }, |
| { |
| "epoch": 0.22, |
| "grad_norm": 5.0034356117248535, |
| "learning_rate": 1.4345762711864409e-05, |
| "loss": 1.3666, |
| "step": 8840 |
| }, |
| { |
| "epoch": 0.22, |
| "grad_norm": 2.923778533935547, |
| "learning_rate": 1.4338983050847459e-05, |
| "loss": 1.4768, |
| "step": 8850 |
| }, |
| { |
| "epoch": 0.22, |
| "grad_norm": 2.227283477783203, |
| "learning_rate": 1.433220338983051e-05, |
| "loss": 1.5738, |
| "step": 8860 |
| }, |
| { |
| "epoch": 0.22, |
| "grad_norm": 5.208776473999023, |
| "learning_rate": 1.4325423728813561e-05, |
| "loss": 1.5767, |
| "step": 8870 |
| }, |
| { |
| "epoch": 0.22, |
| "grad_norm": 1.79727041721344, |
| "learning_rate": 1.4318644067796613e-05, |
| "loss": 1.3856, |
| "step": 8880 |
| }, |
| { |
| "epoch": 0.22, |
| "grad_norm": 1.7950516939163208, |
| "learning_rate": 1.4311864406779662e-05, |
| "loss": 1.4558, |
| "step": 8890 |
| }, |
| { |
| "epoch": 0.22, |
| "grad_norm": 3.7515742778778076, |
| "learning_rate": 1.4305084745762714e-05, |
| "loss": 1.4925, |
| "step": 8900 |
| }, |
| { |
| "epoch": 0.22, |
| "grad_norm": 3.0850913524627686, |
| "learning_rate": 1.4298305084745765e-05, |
| "loss": 1.5348, |
| "step": 8910 |
| }, |
| { |
| "epoch": 0.22, |
| "grad_norm": 3.571545124053955, |
| "learning_rate": 1.4291525423728813e-05, |
| "loss": 1.3513, |
| "step": 8920 |
| }, |
| { |
| "epoch": 0.22, |
| "grad_norm": 3.581817150115967, |
| "learning_rate": 1.4284745762711865e-05, |
| "loss": 1.2864, |
| "step": 8930 |
| }, |
| { |
| "epoch": 0.22, |
| "grad_norm": 1.8698399066925049, |
| "learning_rate": 1.4277966101694916e-05, |
| "loss": 1.3102, |
| "step": 8940 |
| }, |
| { |
| "epoch": 0.22, |
| "grad_norm": 3.0762484073638916, |
| "learning_rate": 1.4271186440677966e-05, |
| "loss": 1.6684, |
| "step": 8950 |
| }, |
| { |
| "epoch": 0.22, |
| "grad_norm": 9.45728588104248, |
| "learning_rate": 1.4264406779661017e-05, |
| "loss": 1.4243, |
| "step": 8960 |
| }, |
| { |
| "epoch": 0.22, |
| "grad_norm": 2.8703598976135254, |
| "learning_rate": 1.4257627118644069e-05, |
| "loss": 1.6217, |
| "step": 8970 |
| }, |
| { |
| "epoch": 0.22, |
| "grad_norm": 1.5414369106292725, |
| "learning_rate": 1.425084745762712e-05, |
| "loss": 1.4434, |
| "step": 8980 |
| }, |
| { |
| "epoch": 0.22, |
| "grad_norm": 2.332796335220337, |
| "learning_rate": 1.424406779661017e-05, |
| "loss": 1.3841, |
| "step": 8990 |
| }, |
| { |
| "epoch": 0.23, |
| "grad_norm": 5.087715148925781, |
| "learning_rate": 1.4237288135593221e-05, |
| "loss": 1.3598, |
| "step": 9000 |
| }, |
| { |
| "epoch": 0.23, |
| "eval_loss": 1.4213091135025024, |
| "eval_runtime": 66.1473, |
| "eval_samples_per_second": 15.118, |
| "eval_steps_per_second": 15.118, |
| "step": 9000 |
| }, |
| { |
| "epoch": 0.23, |
| "grad_norm": 3.272977590560913, |
| "learning_rate": 1.4230508474576273e-05, |
| "loss": 1.4555, |
| "step": 9010 |
| }, |
| { |
| "epoch": 0.23, |
| "grad_norm": 3.6894586086273193, |
| "learning_rate": 1.4223728813559324e-05, |
| "loss": 1.3464, |
| "step": 9020 |
| }, |
| { |
| "epoch": 0.23, |
| "grad_norm": 1.8790464401245117, |
| "learning_rate": 1.4216949152542374e-05, |
| "loss": 1.6049, |
| "step": 9030 |
| }, |
| { |
| "epoch": 0.23, |
| "grad_norm": 2.313295364379883, |
| "learning_rate": 1.4210169491525425e-05, |
| "loss": 1.538, |
| "step": 9040 |
| }, |
| { |
| "epoch": 0.23, |
| "grad_norm": 3.532170295715332, |
| "learning_rate": 1.4203389830508476e-05, |
| "loss": 1.4518, |
| "step": 9050 |
| }, |
| { |
| "epoch": 0.23, |
| "grad_norm": 3.459240674972534, |
| "learning_rate": 1.4196610169491528e-05, |
| "loss": 1.4913, |
| "step": 9060 |
| }, |
| { |
| "epoch": 0.23, |
| "grad_norm": 2.919579029083252, |
| "learning_rate": 1.4189830508474578e-05, |
| "loss": 1.1391, |
| "step": 9070 |
| }, |
| { |
| "epoch": 0.23, |
| "grad_norm": 9.67332935333252, |
| "learning_rate": 1.4183050847457629e-05, |
| "loss": 1.4643, |
| "step": 9080 |
| }, |
| { |
| "epoch": 0.23, |
| "grad_norm": 1.2171639204025269, |
| "learning_rate": 1.417627118644068e-05, |
| "loss": 1.5016, |
| "step": 9090 |
| }, |
| { |
| "epoch": 0.23, |
| "grad_norm": 9.06068229675293, |
| "learning_rate": 1.416949152542373e-05, |
| "loss": 1.5658, |
| "step": 9100 |
| }, |
| { |
| "epoch": 0.23, |
| "grad_norm": 4.348093032836914, |
| "learning_rate": 1.4162711864406781e-05, |
| "loss": 1.4176, |
| "step": 9110 |
| }, |
| { |
| "epoch": 0.23, |
| "grad_norm": 4.279529094696045, |
| "learning_rate": 1.4155932203389833e-05, |
| "loss": 1.4751, |
| "step": 9120 |
| }, |
| { |
| "epoch": 0.23, |
| "grad_norm": 3.2655725479125977, |
| "learning_rate": 1.414915254237288e-05, |
| "loss": 1.4425, |
| "step": 9130 |
| }, |
| { |
| "epoch": 0.23, |
| "grad_norm": 3.8446030616760254, |
| "learning_rate": 1.4142372881355932e-05, |
| "loss": 1.4249, |
| "step": 9140 |
| }, |
| { |
| "epoch": 0.23, |
| "grad_norm": 3.5366909503936768, |
| "learning_rate": 1.4135593220338984e-05, |
| "loss": 1.3664, |
| "step": 9150 |
| }, |
| { |
| "epoch": 0.23, |
| "grad_norm": 2.4776804447174072, |
| "learning_rate": 1.4128813559322035e-05, |
| "loss": 1.4624, |
| "step": 9160 |
| }, |
| { |
| "epoch": 0.23, |
| "grad_norm": 2.9319727420806885, |
| "learning_rate": 1.4122033898305085e-05, |
| "loss": 1.2617, |
| "step": 9170 |
| }, |
| { |
| "epoch": 0.23, |
| "grad_norm": 4.934535503387451, |
| "learning_rate": 1.4115254237288136e-05, |
| "loss": 1.3883, |
| "step": 9180 |
| }, |
| { |
| "epoch": 0.23, |
| "grad_norm": 5.425727367401123, |
| "learning_rate": 1.4108474576271188e-05, |
| "loss": 1.4821, |
| "step": 9190 |
| }, |
| { |
| "epoch": 0.23, |
| "grad_norm": 2.792351007461548, |
| "learning_rate": 1.4101694915254239e-05, |
| "loss": 1.5057, |
| "step": 9200 |
| }, |
| { |
| "epoch": 0.23, |
| "grad_norm": 2.8711283206939697, |
| "learning_rate": 1.4094915254237289e-05, |
| "loss": 1.3026, |
| "step": 9210 |
| }, |
| { |
| "epoch": 0.23, |
| "grad_norm": 2.8754985332489014, |
| "learning_rate": 1.408813559322034e-05, |
| "loss": 1.5327, |
| "step": 9220 |
| }, |
| { |
| "epoch": 0.23, |
| "grad_norm": 4.520450592041016, |
| "learning_rate": 1.4081355932203391e-05, |
| "loss": 1.4834, |
| "step": 9230 |
| }, |
| { |
| "epoch": 0.23, |
| "grad_norm": 4.829522609710693, |
| "learning_rate": 1.4074576271186441e-05, |
| "loss": 1.4513, |
| "step": 9240 |
| }, |
| { |
| "epoch": 0.23, |
| "grad_norm": 3.405491352081299, |
| "learning_rate": 1.4067796610169493e-05, |
| "loss": 1.3591, |
| "step": 9250 |
| }, |
| { |
| "epoch": 0.23, |
| "grad_norm": 3.4185872077941895, |
| "learning_rate": 1.4061016949152544e-05, |
| "loss": 1.4836, |
| "step": 9260 |
| }, |
| { |
| "epoch": 0.23, |
| "grad_norm": 2.9153568744659424, |
| "learning_rate": 1.4054237288135595e-05, |
| "loss": 1.3154, |
| "step": 9270 |
| }, |
| { |
| "epoch": 0.23, |
| "grad_norm": 5.7017412185668945, |
| "learning_rate": 1.4047457627118645e-05, |
| "loss": 1.5036, |
| "step": 9280 |
| }, |
| { |
| "epoch": 0.23, |
| "grad_norm": 2.359771728515625, |
| "learning_rate": 1.4040677966101696e-05, |
| "loss": 1.3116, |
| "step": 9290 |
| }, |
| { |
| "epoch": 0.23, |
| "grad_norm": 2.206207036972046, |
| "learning_rate": 1.4033898305084748e-05, |
| "loss": 1.4501, |
| "step": 9300 |
| }, |
| { |
| "epoch": 0.23, |
| "grad_norm": 5.006134986877441, |
| "learning_rate": 1.40271186440678e-05, |
| "loss": 1.3749, |
| "step": 9310 |
| }, |
| { |
| "epoch": 0.23, |
| "grad_norm": 5.707823753356934, |
| "learning_rate": 1.4020338983050849e-05, |
| "loss": 1.5003, |
| "step": 9320 |
| }, |
| { |
| "epoch": 0.23, |
| "grad_norm": 2.2034099102020264, |
| "learning_rate": 1.4013559322033899e-05, |
| "loss": 1.398, |
| "step": 9330 |
| }, |
| { |
| "epoch": 0.23, |
| "grad_norm": 4.742650985717773, |
| "learning_rate": 1.400677966101695e-05, |
| "loss": 1.4648, |
| "step": 9340 |
| }, |
| { |
| "epoch": 0.23, |
| "grad_norm": 2.050645589828491, |
| "learning_rate": 1.4e-05, |
| "loss": 1.388, |
| "step": 9350 |
| }, |
| { |
| "epoch": 0.23, |
| "grad_norm": 5.325618267059326, |
| "learning_rate": 1.3993220338983051e-05, |
| "loss": 1.5215, |
| "step": 9360 |
| }, |
| { |
| "epoch": 0.23, |
| "grad_norm": 1.011096715927124, |
| "learning_rate": 1.3986440677966103e-05, |
| "loss": 1.3718, |
| "step": 9370 |
| }, |
| { |
| "epoch": 0.23, |
| "grad_norm": 3.435424566268921, |
| "learning_rate": 1.3979661016949152e-05, |
| "loss": 1.607, |
| "step": 9380 |
| }, |
| { |
| "epoch": 0.23, |
| "grad_norm": 2.3544552326202393, |
| "learning_rate": 1.3972881355932204e-05, |
| "loss": 1.487, |
| "step": 9390 |
| }, |
| { |
| "epoch": 0.23, |
| "grad_norm": 3.571235418319702, |
| "learning_rate": 1.3966101694915255e-05, |
| "loss": 1.3305, |
| "step": 9400 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 2.248509407043457, |
| "learning_rate": 1.3959322033898306e-05, |
| "loss": 1.3421, |
| "step": 9410 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 5.0531005859375, |
| "learning_rate": 1.3952542372881356e-05, |
| "loss": 1.5146, |
| "step": 9420 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 6.318211078643799, |
| "learning_rate": 1.3945762711864408e-05, |
| "loss": 1.4586, |
| "step": 9430 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 5.778545379638672, |
| "learning_rate": 1.3938983050847459e-05, |
| "loss": 1.3418, |
| "step": 9440 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 4.6284871101379395, |
| "learning_rate": 1.393220338983051e-05, |
| "loss": 1.6445, |
| "step": 9450 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 5.105873107910156, |
| "learning_rate": 1.392542372881356e-05, |
| "loss": 1.4662, |
| "step": 9460 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 1.4448604583740234, |
| "learning_rate": 1.3918644067796612e-05, |
| "loss": 1.4413, |
| "step": 9470 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 1.9221776723861694, |
| "learning_rate": 1.3911864406779663e-05, |
| "loss": 1.5862, |
| "step": 9480 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 5.959461688995361, |
| "learning_rate": 1.3905084745762714e-05, |
| "loss": 1.3307, |
| "step": 9490 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 2.975703477859497, |
| "learning_rate": 1.3898305084745764e-05, |
| "loss": 1.3601, |
| "step": 9500 |
| }, |
| { |
| "epoch": 0.24, |
| "eval_loss": 1.438934087753296, |
| "eval_runtime": 66.1288, |
| "eval_samples_per_second": 15.122, |
| "eval_steps_per_second": 15.122, |
| "step": 9500 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 1.9357355833053589, |
| "learning_rate": 1.3891525423728815e-05, |
| "loss": 1.6061, |
| "step": 9510 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 5.015805244445801, |
| "learning_rate": 1.3884745762711867e-05, |
| "loss": 1.4235, |
| "step": 9520 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 3.4826226234436035, |
| "learning_rate": 1.3877966101694918e-05, |
| "loss": 1.474, |
| "step": 9530 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 4.293178558349609, |
| "learning_rate": 1.3871186440677966e-05, |
| "loss": 1.5052, |
| "step": 9540 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 3.036905527114868, |
| "learning_rate": 1.3864406779661018e-05, |
| "loss": 1.419, |
| "step": 9550 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 4.020019054412842, |
| "learning_rate": 1.3857627118644067e-05, |
| "loss": 1.4257, |
| "step": 9560 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 7.403942584991455, |
| "learning_rate": 1.3850847457627119e-05, |
| "loss": 1.3108, |
| "step": 9570 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 2.8201069831848145, |
| "learning_rate": 1.384406779661017e-05, |
| "loss": 1.3367, |
| "step": 9580 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 1.7385600805282593, |
| "learning_rate": 1.3837288135593222e-05, |
| "loss": 1.511, |
| "step": 9590 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 1.3381562232971191, |
| "learning_rate": 1.3830508474576271e-05, |
| "loss": 1.304, |
| "step": 9600 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 2.895104169845581, |
| "learning_rate": 1.3823728813559323e-05, |
| "loss": 1.4794, |
| "step": 9610 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 5.847405910491943, |
| "learning_rate": 1.3816949152542374e-05, |
| "loss": 1.5482, |
| "step": 9620 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 1.4990681409835815, |
| "learning_rate": 1.3810169491525425e-05, |
| "loss": 1.2669, |
| "step": 9630 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 4.587515830993652, |
| "learning_rate": 1.3803389830508475e-05, |
| "loss": 1.3339, |
| "step": 9640 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 4.533742904663086, |
| "learning_rate": 1.3796610169491527e-05, |
| "loss": 1.3709, |
| "step": 9650 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 6.8022003173828125, |
| "learning_rate": 1.3789830508474578e-05, |
| "loss": 1.2791, |
| "step": 9660 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 7.4975175857543945, |
| "learning_rate": 1.378305084745763e-05, |
| "loss": 1.5221, |
| "step": 9670 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 3.8370859622955322, |
| "learning_rate": 1.3776271186440679e-05, |
| "loss": 1.295, |
| "step": 9680 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 3.027163505554199, |
| "learning_rate": 1.376949152542373e-05, |
| "loss": 1.4657, |
| "step": 9690 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 7.874743938446045, |
| "learning_rate": 1.3762711864406782e-05, |
| "loss": 1.4806, |
| "step": 9700 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 3.1617636680603027, |
| "learning_rate": 1.3755932203389832e-05, |
| "loss": 1.4772, |
| "step": 9710 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 7.0615458488464355, |
| "learning_rate": 1.3749152542372883e-05, |
| "loss": 1.4963, |
| "step": 9720 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 4.889975070953369, |
| "learning_rate": 1.3742372881355934e-05, |
| "loss": 1.4434, |
| "step": 9730 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 3.209303855895996, |
| "learning_rate": 1.3735593220338986e-05, |
| "loss": 1.5107, |
| "step": 9740 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 3.6406736373901367, |
| "learning_rate": 1.3728813559322034e-05, |
| "loss": 1.3425, |
| "step": 9750 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 1.520709753036499, |
| "learning_rate": 1.3722033898305085e-05, |
| "loss": 1.4677, |
| "step": 9760 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 3.9511709213256836, |
| "learning_rate": 1.3715254237288137e-05, |
| "loss": 1.5378, |
| "step": 9770 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 3.744352102279663, |
| "learning_rate": 1.3708474576271186e-05, |
| "loss": 1.4105, |
| "step": 9780 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 3.820054292678833, |
| "learning_rate": 1.3701694915254238e-05, |
| "loss": 1.3589, |
| "step": 9790 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 4.339268684387207, |
| "learning_rate": 1.3694915254237289e-05, |
| "loss": 1.5092, |
| "step": 9800 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 2.449904680252075, |
| "learning_rate": 1.368813559322034e-05, |
| "loss": 1.4878, |
| "step": 9810 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 9.548687934875488, |
| "learning_rate": 1.368135593220339e-05, |
| "loss": 1.4862, |
| "step": 9820 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 4.738724708557129, |
| "learning_rate": 1.3674576271186442e-05, |
| "loss": 1.4042, |
| "step": 9830 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 4.652593612670898, |
| "learning_rate": 1.3667796610169493e-05, |
| "loss": 1.3038, |
| "step": 9840 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 4.120163917541504, |
| "learning_rate": 1.3661016949152543e-05, |
| "loss": 1.4582, |
| "step": 9850 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 4.297613143920898, |
| "learning_rate": 1.3654237288135594e-05, |
| "loss": 1.4914, |
| "step": 9860 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 3.7870216369628906, |
| "learning_rate": 1.3647457627118646e-05, |
| "loss": 1.3886, |
| "step": 9870 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 4.052813529968262, |
| "learning_rate": 1.3640677966101697e-05, |
| "loss": 1.5646, |
| "step": 9880 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 4.547369003295898, |
| "learning_rate": 1.3633898305084747e-05, |
| "loss": 1.3154, |
| "step": 9890 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 1.9027762413024902, |
| "learning_rate": 1.3627118644067798e-05, |
| "loss": 1.4038, |
| "step": 9900 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 3.208552360534668, |
| "learning_rate": 1.362033898305085e-05, |
| "loss": 1.3637, |
| "step": 9910 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 13.44774055480957, |
| "learning_rate": 1.36135593220339e-05, |
| "loss": 1.2825, |
| "step": 9920 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 8.423622131347656, |
| "learning_rate": 1.360677966101695e-05, |
| "loss": 1.4533, |
| "step": 9930 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 4.321129322052002, |
| "learning_rate": 1.3600000000000002e-05, |
| "loss": 1.3952, |
| "step": 9940 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 4.656356334686279, |
| "learning_rate": 1.3593220338983053e-05, |
| "loss": 1.4925, |
| "step": 9950 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 3.8355157375335693, |
| "learning_rate": 1.3586440677966101e-05, |
| "loss": 1.4598, |
| "step": 9960 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 3.602102041244507, |
| "learning_rate": 1.3579661016949153e-05, |
| "loss": 1.4722, |
| "step": 9970 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 1.8679078817367554, |
| "learning_rate": 1.3572881355932204e-05, |
| "loss": 1.5094, |
| "step": 9980 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 3.2152957916259766, |
| "learning_rate": 1.3566101694915254e-05, |
| "loss": 1.4489, |
| "step": 9990 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 2.101644515991211, |
| "learning_rate": 1.3559322033898305e-05, |
| "loss": 1.5278, |
| "step": 10000 |
| }, |
| { |
| "epoch": 0.25, |
| "eval_loss": 1.419925332069397, |
| "eval_runtime": 66.1325, |
| "eval_samples_per_second": 15.121, |
| "eval_steps_per_second": 15.121, |
| "step": 10000 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 6.951940536499023, |
| "learning_rate": 1.3552542372881357e-05, |
| "loss": 1.4138, |
| "step": 10010 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 1.5260531902313232, |
| "learning_rate": 1.3545762711864408e-05, |
| "loss": 1.3812, |
| "step": 10020 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 2.1888010501861572, |
| "learning_rate": 1.3538983050847458e-05, |
| "loss": 1.5596, |
| "step": 10030 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 1.2334835529327393, |
| "learning_rate": 1.353220338983051e-05, |
| "loss": 1.5287, |
| "step": 10040 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 4.4969353675842285, |
| "learning_rate": 1.352542372881356e-05, |
| "loss": 1.4268, |
| "step": 10050 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 4.795516490936279, |
| "learning_rate": 1.3518644067796612e-05, |
| "loss": 1.2992, |
| "step": 10060 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 2.478320837020874, |
| "learning_rate": 1.3511864406779662e-05, |
| "loss": 1.4974, |
| "step": 10070 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 1.9508737325668335, |
| "learning_rate": 1.3505084745762713e-05, |
| "loss": 1.3097, |
| "step": 10080 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 3.163217306137085, |
| "learning_rate": 1.3498305084745764e-05, |
| "loss": 1.4782, |
| "step": 10090 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 5.9888787269592285, |
| "learning_rate": 1.3491525423728816e-05, |
| "loss": 1.3798, |
| "step": 10100 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 4.777866840362549, |
| "learning_rate": 1.3484745762711866e-05, |
| "loss": 1.4042, |
| "step": 10110 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 2.4334771633148193, |
| "learning_rate": 1.3477966101694917e-05, |
| "loss": 1.4185, |
| "step": 10120 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 2.8577849864959717, |
| "learning_rate": 1.3471186440677968e-05, |
| "loss": 1.4438, |
| "step": 10130 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 2.9416913986206055, |
| "learning_rate": 1.346440677966102e-05, |
| "loss": 1.4679, |
| "step": 10140 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 5.749608516693115, |
| "learning_rate": 1.345762711864407e-05, |
| "loss": 1.3858, |
| "step": 10150 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 3.1441190242767334, |
| "learning_rate": 1.3450847457627121e-05, |
| "loss": 1.3954, |
| "step": 10160 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 4.9486188888549805, |
| "learning_rate": 1.3444067796610169e-05, |
| "loss": 1.4373, |
| "step": 10170 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 4.6098785400390625, |
| "learning_rate": 1.343728813559322e-05, |
| "loss": 1.3843, |
| "step": 10180 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 2.3991878032684326, |
| "learning_rate": 1.3430508474576272e-05, |
| "loss": 1.3708, |
| "step": 10190 |
| }, |
| { |
| "epoch": 0.26, |
| "grad_norm": 4.673692226409912, |
| "learning_rate": 1.3423728813559323e-05, |
| "loss": 1.5219, |
| "step": 10200 |
| }, |
| { |
| "epoch": 0.26, |
| "grad_norm": 1.292708158493042, |
| "learning_rate": 1.3416949152542373e-05, |
| "loss": 1.4646, |
| "step": 10210 |
| }, |
| { |
| "epoch": 0.26, |
| "grad_norm": 6.8573479652404785, |
| "learning_rate": 1.3410169491525424e-05, |
| "loss": 1.5663, |
| "step": 10220 |
| }, |
| { |
| "epoch": 0.26, |
| "grad_norm": 1.6200875043869019, |
| "learning_rate": 1.3403389830508476e-05, |
| "loss": 1.4902, |
| "step": 10230 |
| }, |
| { |
| "epoch": 0.26, |
| "grad_norm": 5.667778491973877, |
| "learning_rate": 1.3396610169491527e-05, |
| "loss": 1.4259, |
| "step": 10240 |
| }, |
| { |
| "epoch": 0.26, |
| "grad_norm": 6.129855155944824, |
| "learning_rate": 1.3389830508474577e-05, |
| "loss": 1.43, |
| "step": 10250 |
| }, |
| { |
| "epoch": 0.26, |
| "grad_norm": 2.988898992538452, |
| "learning_rate": 1.3383050847457628e-05, |
| "loss": 1.3867, |
| "step": 10260 |
| }, |
| { |
| "epoch": 0.26, |
| "grad_norm": 2.862208366394043, |
| "learning_rate": 1.337627118644068e-05, |
| "loss": 1.4641, |
| "step": 10270 |
| }, |
| { |
| "epoch": 0.26, |
| "grad_norm": 4.67335319519043, |
| "learning_rate": 1.3369491525423731e-05, |
| "loss": 1.4377, |
| "step": 10280 |
| }, |
| { |
| "epoch": 0.26, |
| "grad_norm": 6.531377792358398, |
| "learning_rate": 1.336271186440678e-05, |
| "loss": 1.3493, |
| "step": 10290 |
| }, |
| { |
| "epoch": 0.26, |
| "grad_norm": 2.349855899810791, |
| "learning_rate": 1.3355932203389832e-05, |
| "loss": 1.3054, |
| "step": 10300 |
| }, |
| { |
| "epoch": 0.26, |
| "grad_norm": 1.38920259475708, |
| "learning_rate": 1.3349152542372883e-05, |
| "loss": 1.4467, |
| "step": 10310 |
| }, |
| { |
| "epoch": 0.26, |
| "grad_norm": 4.019247055053711, |
| "learning_rate": 1.3342372881355933e-05, |
| "loss": 1.5132, |
| "step": 10320 |
| }, |
| { |
| "epoch": 0.26, |
| "grad_norm": 2.967440128326416, |
| "learning_rate": 1.3335593220338985e-05, |
| "loss": 1.391, |
| "step": 10330 |
| }, |
| { |
| "epoch": 0.26, |
| "grad_norm": 8.294310569763184, |
| "learning_rate": 1.3328813559322036e-05, |
| "loss": 1.3113, |
| "step": 10340 |
| }, |
| { |
| "epoch": 0.26, |
| "grad_norm": 5.314875602722168, |
| "learning_rate": 1.3322033898305087e-05, |
| "loss": 1.4274, |
| "step": 10350 |
| }, |
| { |
| "epoch": 0.26, |
| "grad_norm": 9.737332344055176, |
| "learning_rate": 1.3315254237288137e-05, |
| "loss": 1.4872, |
| "step": 10360 |
| }, |
| { |
| "epoch": 0.26, |
| "grad_norm": 5.628332614898682, |
| "learning_rate": 1.3308474576271187e-05, |
| "loss": 1.4917, |
| "step": 10370 |
| }, |
| { |
| "epoch": 0.26, |
| "grad_norm": 2.7575583457946777, |
| "learning_rate": 1.3301694915254238e-05, |
| "loss": 1.5113, |
| "step": 10380 |
| }, |
| { |
| "epoch": 0.26, |
| "grad_norm": 6.598272800445557, |
| "learning_rate": 1.3294915254237288e-05, |
| "loss": 1.321, |
| "step": 10390 |
| }, |
| { |
| "epoch": 0.26, |
| "grad_norm": 5.086571216583252, |
| "learning_rate": 1.328813559322034e-05, |
| "loss": 1.4841, |
| "step": 10400 |
| }, |
| { |
| "epoch": 0.26, |
| "grad_norm": 2.9577059745788574, |
| "learning_rate": 1.328135593220339e-05, |
| "loss": 1.3408, |
| "step": 10410 |
| }, |
| { |
| "epoch": 0.26, |
| "grad_norm": 7.587204933166504, |
| "learning_rate": 1.3274576271186442e-05, |
| "loss": 1.4564, |
| "step": 10420 |
| }, |
| { |
| "epoch": 0.26, |
| "grad_norm": 3.409858226776123, |
| "learning_rate": 1.3267796610169492e-05, |
| "loss": 1.5365, |
| "step": 10430 |
| }, |
| { |
| "epoch": 0.26, |
| "grad_norm": 4.1367387771606445, |
| "learning_rate": 1.3261016949152543e-05, |
| "loss": 1.3273, |
| "step": 10440 |
| }, |
| { |
| "epoch": 0.26, |
| "grad_norm": 4.932620048522949, |
| "learning_rate": 1.3254237288135595e-05, |
| "loss": 1.5361, |
| "step": 10450 |
| }, |
| { |
| "epoch": 0.26, |
| "grad_norm": 2.9673991203308105, |
| "learning_rate": 1.3247457627118644e-05, |
| "loss": 1.5074, |
| "step": 10460 |
| }, |
| { |
| "epoch": 0.26, |
| "grad_norm": 8.657196998596191, |
| "learning_rate": 1.3240677966101696e-05, |
| "loss": 1.4046, |
| "step": 10470 |
| }, |
| { |
| "epoch": 0.26, |
| "grad_norm": 0.6172802448272705, |
| "learning_rate": 1.3233898305084747e-05, |
| "loss": 1.2062, |
| "step": 10480 |
| }, |
| { |
| "epoch": 0.26, |
| "grad_norm": 2.4086947441101074, |
| "learning_rate": 1.3227118644067798e-05, |
| "loss": 1.4763, |
| "step": 10490 |
| }, |
| { |
| "epoch": 0.26, |
| "grad_norm": 2.687913656234741, |
| "learning_rate": 1.3220338983050848e-05, |
| "loss": 1.4159, |
| "step": 10500 |
| }, |
| { |
| "epoch": 0.26, |
| "eval_loss": 1.3867793083190918, |
| "eval_runtime": 66.119, |
| "eval_samples_per_second": 15.124, |
| "eval_steps_per_second": 15.124, |
| "step": 10500 |
| }, |
| { |
| "epoch": 0.26, |
| "grad_norm": 3.875162363052368, |
| "learning_rate": 1.32135593220339e-05, |
| "loss": 1.6311, |
| "step": 10510 |
| }, |
| { |
| "epoch": 0.26, |
| "grad_norm": 2.257335901260376, |
| "learning_rate": 1.3206779661016951e-05, |
| "loss": 1.467, |
| "step": 10520 |
| }, |
| { |
| "epoch": 0.26, |
| "grad_norm": 2.7035036087036133, |
| "learning_rate": 1.3200000000000002e-05, |
| "loss": 1.5601, |
| "step": 10530 |
| }, |
| { |
| "epoch": 0.26, |
| "grad_norm": 4.788657188415527, |
| "learning_rate": 1.3193220338983052e-05, |
| "loss": 1.4355, |
| "step": 10540 |
| }, |
| { |
| "epoch": 0.26, |
| "grad_norm": 3.4326958656311035, |
| "learning_rate": 1.3186440677966103e-05, |
| "loss": 1.4474, |
| "step": 10550 |
| }, |
| { |
| "epoch": 0.26, |
| "grad_norm": 5.0102009773254395, |
| "learning_rate": 1.3179661016949155e-05, |
| "loss": 1.579, |
| "step": 10560 |
| }, |
| { |
| "epoch": 0.26, |
| "grad_norm": 2.6414384841918945, |
| "learning_rate": 1.3172881355932206e-05, |
| "loss": 1.4042, |
| "step": 10570 |
| }, |
| { |
| "epoch": 0.26, |
| "grad_norm": 2.270298480987549, |
| "learning_rate": 1.3166101694915254e-05, |
| "loss": 1.4761, |
| "step": 10580 |
| }, |
| { |
| "epoch": 0.26, |
| "grad_norm": 2.361912250518799, |
| "learning_rate": 1.3159322033898306e-05, |
| "loss": 1.4999, |
| "step": 10590 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 5.356451988220215, |
| "learning_rate": 1.3152542372881355e-05, |
| "loss": 1.4594, |
| "step": 10600 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 5.603447914123535, |
| "learning_rate": 1.3145762711864407e-05, |
| "loss": 1.355, |
| "step": 10610 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 2.6022191047668457, |
| "learning_rate": 1.3138983050847458e-05, |
| "loss": 1.3742, |
| "step": 10620 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 5.697202205657959, |
| "learning_rate": 1.313220338983051e-05, |
| "loss": 1.3713, |
| "step": 10630 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 2.859365224838257, |
| "learning_rate": 1.312542372881356e-05, |
| "loss": 1.5086, |
| "step": 10640 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 7.860188007354736, |
| "learning_rate": 1.311864406779661e-05, |
| "loss": 1.2122, |
| "step": 10650 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 2.77958607673645, |
| "learning_rate": 1.3111864406779662e-05, |
| "loss": 1.4479, |
| "step": 10660 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 9.004295349121094, |
| "learning_rate": 1.3105084745762714e-05, |
| "loss": 1.5347, |
| "step": 10670 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 4.261473178863525, |
| "learning_rate": 1.3098305084745763e-05, |
| "loss": 1.4104, |
| "step": 10680 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 1.7853983640670776, |
| "learning_rate": 1.3091525423728815e-05, |
| "loss": 1.3679, |
| "step": 10690 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 4.3893609046936035, |
| "learning_rate": 1.3084745762711866e-05, |
| "loss": 1.4985, |
| "step": 10700 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 1.7915986776351929, |
| "learning_rate": 1.3077966101694917e-05, |
| "loss": 1.4847, |
| "step": 10710 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 5.252231121063232, |
| "learning_rate": 1.3071186440677967e-05, |
| "loss": 1.4013, |
| "step": 10720 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 2.2784807682037354, |
| "learning_rate": 1.3064406779661019e-05, |
| "loss": 1.395, |
| "step": 10730 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 4.382660388946533, |
| "learning_rate": 1.305762711864407e-05, |
| "loss": 1.3082, |
| "step": 10740 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 3.5009450912475586, |
| "learning_rate": 1.305084745762712e-05, |
| "loss": 1.355, |
| "step": 10750 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 6.310500621795654, |
| "learning_rate": 1.3044067796610171e-05, |
| "loss": 1.4164, |
| "step": 10760 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 2.8609213829040527, |
| "learning_rate": 1.3037288135593222e-05, |
| "loss": 1.4316, |
| "step": 10770 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 5.6872382164001465, |
| "learning_rate": 1.3030508474576274e-05, |
| "loss": 1.4071, |
| "step": 10780 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 4.75257682800293, |
| "learning_rate": 1.3023728813559322e-05, |
| "loss": 1.4906, |
| "step": 10790 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 3.286771297454834, |
| "learning_rate": 1.3016949152542373e-05, |
| "loss": 1.3927, |
| "step": 10800 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 5.782283782958984, |
| "learning_rate": 1.3010169491525425e-05, |
| "loss": 1.5204, |
| "step": 10810 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 2.9265480041503906, |
| "learning_rate": 1.3003389830508474e-05, |
| "loss": 1.5996, |
| "step": 10820 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 7.570978164672852, |
| "learning_rate": 1.2996610169491526e-05, |
| "loss": 1.3926, |
| "step": 10830 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 2.7784712314605713, |
| "learning_rate": 1.2989830508474577e-05, |
| "loss": 1.4601, |
| "step": 10840 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 2.448601007461548, |
| "learning_rate": 1.2983050847457629e-05, |
| "loss": 1.4095, |
| "step": 10850 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 3.784883975982666, |
| "learning_rate": 1.2976271186440678e-05, |
| "loss": 1.3626, |
| "step": 10860 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 5.855194091796875, |
| "learning_rate": 1.296949152542373e-05, |
| "loss": 1.3263, |
| "step": 10870 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 3.4989304542541504, |
| "learning_rate": 1.2962711864406781e-05, |
| "loss": 1.4125, |
| "step": 10880 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 4.406559467315674, |
| "learning_rate": 1.295593220338983e-05, |
| "loss": 1.561, |
| "step": 10890 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 3.153087615966797, |
| "learning_rate": 1.2949152542372882e-05, |
| "loss": 1.4574, |
| "step": 10900 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 3.5177876949310303, |
| "learning_rate": 1.2942372881355934e-05, |
| "loss": 1.3867, |
| "step": 10910 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 4.136317729949951, |
| "learning_rate": 1.2935593220338985e-05, |
| "loss": 1.4718, |
| "step": 10920 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 2.291229009628296, |
| "learning_rate": 1.2928813559322035e-05, |
| "loss": 1.3122, |
| "step": 10930 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 3.3874454498291016, |
| "learning_rate": 1.2922033898305086e-05, |
| "loss": 1.4073, |
| "step": 10940 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 4.799066066741943, |
| "learning_rate": 1.2915254237288137e-05, |
| "loss": 1.4412, |
| "step": 10950 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 6.790704250335693, |
| "learning_rate": 1.2908474576271189e-05, |
| "loss": 1.5664, |
| "step": 10960 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 3.8187286853790283, |
| "learning_rate": 1.2901694915254239e-05, |
| "loss": 1.2302, |
| "step": 10970 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 2.162944793701172, |
| "learning_rate": 1.289491525423729e-05, |
| "loss": 1.4602, |
| "step": 10980 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 7.523599147796631, |
| "learning_rate": 1.2888135593220341e-05, |
| "loss": 1.4643, |
| "step": 10990 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 3.7828078269958496, |
| "learning_rate": 1.288135593220339e-05, |
| "loss": 1.6134, |
| "step": 11000 |
| }, |
| { |
| "epoch": 0.28, |
| "eval_loss": 1.4437503814697266, |
| "eval_runtime": 66.1284, |
| "eval_samples_per_second": 15.122, |
| "eval_steps_per_second": 15.122, |
| "step": 11000 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 10.092121124267578, |
| "learning_rate": 1.287457627118644e-05, |
| "loss": 1.5293, |
| "step": 11010 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 7.2347235679626465, |
| "learning_rate": 1.2867796610169492e-05, |
| "loss": 1.4884, |
| "step": 11020 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 2.231647491455078, |
| "learning_rate": 1.2861016949152542e-05, |
| "loss": 1.5299, |
| "step": 11030 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 6.09018611907959, |
| "learning_rate": 1.2854237288135593e-05, |
| "loss": 1.588, |
| "step": 11040 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 5.75846529006958, |
| "learning_rate": 1.2847457627118645e-05, |
| "loss": 1.4481, |
| "step": 11050 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 2.08042311668396, |
| "learning_rate": 1.2840677966101696e-05, |
| "loss": 1.4066, |
| "step": 11060 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 4.944487571716309, |
| "learning_rate": 1.2833898305084746e-05, |
| "loss": 1.3442, |
| "step": 11070 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 2.4912710189819336, |
| "learning_rate": 1.2827118644067797e-05, |
| "loss": 1.6345, |
| "step": 11080 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 2.6630759239196777, |
| "learning_rate": 1.2820338983050849e-05, |
| "loss": 1.4073, |
| "step": 11090 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 1.5061525106430054, |
| "learning_rate": 1.28135593220339e-05, |
| "loss": 1.5894, |
| "step": 11100 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 2.7690367698669434, |
| "learning_rate": 1.280677966101695e-05, |
| "loss": 1.3617, |
| "step": 11110 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 6.3855462074279785, |
| "learning_rate": 1.2800000000000001e-05, |
| "loss": 1.3119, |
| "step": 11120 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 7.427648544311523, |
| "learning_rate": 1.2793220338983053e-05, |
| "loss": 1.5297, |
| "step": 11130 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 8.82369613647461, |
| "learning_rate": 1.2786440677966104e-05, |
| "loss": 1.3032, |
| "step": 11140 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 3.6501011848449707, |
| "learning_rate": 1.2779661016949154e-05, |
| "loss": 1.3527, |
| "step": 11150 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 2.652341604232788, |
| "learning_rate": 1.2772881355932205e-05, |
| "loss": 1.2817, |
| "step": 11160 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 5.405377388000488, |
| "learning_rate": 1.2766101694915256e-05, |
| "loss": 1.2666, |
| "step": 11170 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 7.799898147583008, |
| "learning_rate": 1.2759322033898308e-05, |
| "loss": 1.5301, |
| "step": 11180 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 3.052325487136841, |
| "learning_rate": 1.2752542372881358e-05, |
| "loss": 1.2761, |
| "step": 11190 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 5.282183647155762, |
| "learning_rate": 1.2745762711864407e-05, |
| "loss": 1.4701, |
| "step": 11200 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 3.8688693046569824, |
| "learning_rate": 1.2738983050847457e-05, |
| "loss": 1.4549, |
| "step": 11210 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 5.308023929595947, |
| "learning_rate": 1.2732203389830508e-05, |
| "loss": 1.3581, |
| "step": 11220 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 5.7319841384887695, |
| "learning_rate": 1.272542372881356e-05, |
| "loss": 1.5457, |
| "step": 11230 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 9.156388282775879, |
| "learning_rate": 1.2718644067796611e-05, |
| "loss": 1.5201, |
| "step": 11240 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 5.315608024597168, |
| "learning_rate": 1.2711864406779661e-05, |
| "loss": 1.3505, |
| "step": 11250 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 1.7720659971237183, |
| "learning_rate": 1.2705084745762712e-05, |
| "loss": 1.3938, |
| "step": 11260 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 4.514403343200684, |
| "learning_rate": 1.2698305084745764e-05, |
| "loss": 1.6121, |
| "step": 11270 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 6.911466121673584, |
| "learning_rate": 1.2691525423728815e-05, |
| "loss": 1.3458, |
| "step": 11280 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 4.472634792327881, |
| "learning_rate": 1.2684745762711865e-05, |
| "loss": 1.2857, |
| "step": 11290 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 2.4667749404907227, |
| "learning_rate": 1.2677966101694916e-05, |
| "loss": 1.3835, |
| "step": 11300 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 4.501352787017822, |
| "learning_rate": 1.2671186440677968e-05, |
| "loss": 1.3502, |
| "step": 11310 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 4.203758239746094, |
| "learning_rate": 1.2664406779661019e-05, |
| "loss": 1.412, |
| "step": 11320 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 4.501011848449707, |
| "learning_rate": 1.2657627118644069e-05, |
| "loss": 1.4239, |
| "step": 11330 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 6.272605895996094, |
| "learning_rate": 1.265084745762712e-05, |
| "loss": 1.4452, |
| "step": 11340 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 1.9776923656463623, |
| "learning_rate": 1.2644067796610171e-05, |
| "loss": 1.3449, |
| "step": 11350 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 2.2955100536346436, |
| "learning_rate": 1.2637288135593221e-05, |
| "loss": 1.4088, |
| "step": 11360 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 5.005813121795654, |
| "learning_rate": 1.2630508474576273e-05, |
| "loss": 1.4663, |
| "step": 11370 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 3.1890101432800293, |
| "learning_rate": 1.2623728813559324e-05, |
| "loss": 1.4736, |
| "step": 11380 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 5.545890808105469, |
| "learning_rate": 1.2616949152542375e-05, |
| "loss": 1.3913, |
| "step": 11390 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 3.0610640048980713, |
| "learning_rate": 1.2610169491525425e-05, |
| "loss": 1.1965, |
| "step": 11400 |
| }, |
| { |
| "epoch": 0.29, |
| "grad_norm": 5.919715881347656, |
| "learning_rate": 1.2603389830508475e-05, |
| "loss": 1.6285, |
| "step": 11410 |
| }, |
| { |
| "epoch": 0.29, |
| "grad_norm": 5.279333591461182, |
| "learning_rate": 1.2596610169491526e-05, |
| "loss": 1.4144, |
| "step": 11420 |
| }, |
| { |
| "epoch": 0.29, |
| "grad_norm": 6.2425737380981445, |
| "learning_rate": 1.2589830508474576e-05, |
| "loss": 1.4376, |
| "step": 11430 |
| }, |
| { |
| "epoch": 0.29, |
| "grad_norm": 1.8463287353515625, |
| "learning_rate": 1.2583050847457627e-05, |
| "loss": 1.4008, |
| "step": 11440 |
| }, |
| { |
| "epoch": 0.29, |
| "grad_norm": 6.911423206329346, |
| "learning_rate": 1.2576271186440679e-05, |
| "loss": 1.4218, |
| "step": 11450 |
| }, |
| { |
| "epoch": 0.29, |
| "grad_norm": 1.8190709352493286, |
| "learning_rate": 1.256949152542373e-05, |
| "loss": 1.3105, |
| "step": 11460 |
| }, |
| { |
| "epoch": 0.29, |
| "grad_norm": 2.9692275524139404, |
| "learning_rate": 1.256271186440678e-05, |
| "loss": 1.4071, |
| "step": 11470 |
| }, |
| { |
| "epoch": 0.29, |
| "grad_norm": 2.9249064922332764, |
| "learning_rate": 1.2555932203389831e-05, |
| "loss": 1.4017, |
| "step": 11480 |
| }, |
| { |
| "epoch": 0.29, |
| "grad_norm": 4.724937915802002, |
| "learning_rate": 1.2549152542372883e-05, |
| "loss": 1.3826, |
| "step": 11490 |
| }, |
| { |
| "epoch": 0.29, |
| "grad_norm": 2.75592303276062, |
| "learning_rate": 1.2542372881355932e-05, |
| "loss": 1.4793, |
| "step": 11500 |
| }, |
| { |
| "epoch": 0.29, |
| "eval_loss": 1.4384660720825195, |
| "eval_runtime": 66.1691, |
| "eval_samples_per_second": 15.113, |
| "eval_steps_per_second": 15.113, |
| "step": 11500 |
| }, |
| { |
| "epoch": 0.29, |
| "grad_norm": 4.604795455932617, |
| "learning_rate": 1.2535593220338984e-05, |
| "loss": 1.5059, |
| "step": 11510 |
| }, |
| { |
| "epoch": 0.29, |
| "grad_norm": 2.278486490249634, |
| "learning_rate": 1.2528813559322035e-05, |
| "loss": 1.307, |
| "step": 11520 |
| }, |
| { |
| "epoch": 0.29, |
| "grad_norm": 3.228501796722412, |
| "learning_rate": 1.2522033898305087e-05, |
| "loss": 1.4515, |
| "step": 11530 |
| }, |
| { |
| "epoch": 0.29, |
| "grad_norm": 5.253673076629639, |
| "learning_rate": 1.2515254237288136e-05, |
| "loss": 1.3283, |
| "step": 11540 |
| }, |
| { |
| "epoch": 0.29, |
| "grad_norm": 5.198097229003906, |
| "learning_rate": 1.2508474576271188e-05, |
| "loss": 1.25, |
| "step": 11550 |
| }, |
| { |
| "epoch": 0.29, |
| "grad_norm": 5.645752429962158, |
| "learning_rate": 1.2501694915254239e-05, |
| "loss": 1.555, |
| "step": 11560 |
| }, |
| { |
| "epoch": 0.29, |
| "grad_norm": 5.3141913414001465, |
| "learning_rate": 1.249491525423729e-05, |
| "loss": 1.5135, |
| "step": 11570 |
| }, |
| { |
| "epoch": 0.29, |
| "grad_norm": 1.6161258220672607, |
| "learning_rate": 1.248813559322034e-05, |
| "loss": 1.3809, |
| "step": 11580 |
| }, |
| { |
| "epoch": 0.29, |
| "grad_norm": 3.477424383163452, |
| "learning_rate": 1.2481355932203392e-05, |
| "loss": 1.3237, |
| "step": 11590 |
| }, |
| { |
| "epoch": 0.29, |
| "grad_norm": 3.588980197906494, |
| "learning_rate": 1.2474576271186443e-05, |
| "loss": 1.3171, |
| "step": 11600 |
| }, |
| { |
| "epoch": 0.29, |
| "grad_norm": 2.2803046703338623, |
| "learning_rate": 1.2467796610169494e-05, |
| "loss": 1.3816, |
| "step": 11610 |
| }, |
| { |
| "epoch": 0.29, |
| "grad_norm": 1.7084169387817383, |
| "learning_rate": 1.2461016949152542e-05, |
| "loss": 1.3663, |
| "step": 11620 |
| }, |
| { |
| "epoch": 0.29, |
| "grad_norm": 2.5784099102020264, |
| "learning_rate": 1.2454237288135594e-05, |
| "loss": 1.4399, |
| "step": 11630 |
| }, |
| { |
| "epoch": 0.29, |
| "grad_norm": 4.369024276733398, |
| "learning_rate": 1.2447457627118643e-05, |
| "loss": 1.3491, |
| "step": 11640 |
| }, |
| { |
| "epoch": 0.29, |
| "grad_norm": 6.239041328430176, |
| "learning_rate": 1.2440677966101695e-05, |
| "loss": 1.2562, |
| "step": 11650 |
| }, |
| { |
| "epoch": 0.29, |
| "grad_norm": 4.257971286773682, |
| "learning_rate": 1.2433898305084746e-05, |
| "loss": 1.4829, |
| "step": 11660 |
| }, |
| { |
| "epoch": 0.29, |
| "grad_norm": 4.7997660636901855, |
| "learning_rate": 1.2427118644067798e-05, |
| "loss": 1.3841, |
| "step": 11670 |
| }, |
| { |
| "epoch": 0.29, |
| "grad_norm": 3.7475717067718506, |
| "learning_rate": 1.2420338983050847e-05, |
| "loss": 1.3625, |
| "step": 11680 |
| }, |
| { |
| "epoch": 0.29, |
| "grad_norm": 2.6195523738861084, |
| "learning_rate": 1.2413559322033899e-05, |
| "loss": 1.4386, |
| "step": 11690 |
| }, |
| { |
| "epoch": 0.29, |
| "grad_norm": 1.9052702188491821, |
| "learning_rate": 1.240677966101695e-05, |
| "loss": 1.3974, |
| "step": 11700 |
| }, |
| { |
| "epoch": 0.29, |
| "grad_norm": 3.2266929149627686, |
| "learning_rate": 1.2400000000000002e-05, |
| "loss": 1.4356, |
| "step": 11710 |
| }, |
| { |
| "epoch": 0.29, |
| "grad_norm": 4.4783806800842285, |
| "learning_rate": 1.2393220338983051e-05, |
| "loss": 1.5246, |
| "step": 11720 |
| }, |
| { |
| "epoch": 0.29, |
| "grad_norm": 5.959903717041016, |
| "learning_rate": 1.2386440677966103e-05, |
| "loss": 1.2639, |
| "step": 11730 |
| }, |
| { |
| "epoch": 0.29, |
| "grad_norm": 3.8165271282196045, |
| "learning_rate": 1.2379661016949154e-05, |
| "loss": 1.2957, |
| "step": 11740 |
| }, |
| { |
| "epoch": 0.29, |
| "grad_norm": 6.7595367431640625, |
| "learning_rate": 1.2372881355932205e-05, |
| "loss": 1.4522, |
| "step": 11750 |
| }, |
| { |
| "epoch": 0.29, |
| "grad_norm": 2.844536066055298, |
| "learning_rate": 1.2366101694915255e-05, |
| "loss": 1.5718, |
| "step": 11760 |
| }, |
| { |
| "epoch": 0.29, |
| "grad_norm": 7.719458103179932, |
| "learning_rate": 1.2359322033898307e-05, |
| "loss": 1.579, |
| "step": 11770 |
| }, |
| { |
| "epoch": 0.29, |
| "grad_norm": 1.1778481006622314, |
| "learning_rate": 1.2352542372881358e-05, |
| "loss": 1.5497, |
| "step": 11780 |
| }, |
| { |
| "epoch": 0.29, |
| "grad_norm": 4.286680221557617, |
| "learning_rate": 1.234576271186441e-05, |
| "loss": 1.4507, |
| "step": 11790 |
| }, |
| { |
| "epoch": 0.29, |
| "grad_norm": 1.9755306243896484, |
| "learning_rate": 1.2338983050847459e-05, |
| "loss": 1.2822, |
| "step": 11800 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 3.383787155151367, |
| "learning_rate": 1.233220338983051e-05, |
| "loss": 1.3459, |
| "step": 11810 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 2.444471836090088, |
| "learning_rate": 1.2325423728813562e-05, |
| "loss": 1.3843, |
| "step": 11820 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 6.240593433380127, |
| "learning_rate": 1.231864406779661e-05, |
| "loss": 1.696, |
| "step": 11830 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 2.954618215560913, |
| "learning_rate": 1.2311864406779661e-05, |
| "loss": 1.3513, |
| "step": 11840 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 4.6317243576049805, |
| "learning_rate": 1.2305084745762713e-05, |
| "loss": 1.4252, |
| "step": 11850 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 7.941683769226074, |
| "learning_rate": 1.2298305084745762e-05, |
| "loss": 1.3129, |
| "step": 11860 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 4.002432823181152, |
| "learning_rate": 1.2291525423728814e-05, |
| "loss": 1.5798, |
| "step": 11870 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 2.3357415199279785, |
| "learning_rate": 1.2284745762711865e-05, |
| "loss": 1.2337, |
| "step": 11880 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 2.709670305252075, |
| "learning_rate": 1.2277966101694917e-05, |
| "loss": 1.3422, |
| "step": 11890 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 4.940807342529297, |
| "learning_rate": 1.2271186440677966e-05, |
| "loss": 1.4152, |
| "step": 11900 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 6.757214546203613, |
| "learning_rate": 1.2264406779661018e-05, |
| "loss": 1.6552, |
| "step": 11910 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 15.510869026184082, |
| "learning_rate": 1.2257627118644069e-05, |
| "loss": 1.3236, |
| "step": 11920 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 1.592451810836792, |
| "learning_rate": 1.225084745762712e-05, |
| "loss": 1.4875, |
| "step": 11930 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 5.799335479736328, |
| "learning_rate": 1.224406779661017e-05, |
| "loss": 1.5547, |
| "step": 11940 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 2.2848143577575684, |
| "learning_rate": 1.2237288135593222e-05, |
| "loss": 1.5282, |
| "step": 11950 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 1.5773121118545532, |
| "learning_rate": 1.2230508474576273e-05, |
| "loss": 1.3649, |
| "step": 11960 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 9.566110610961914, |
| "learning_rate": 1.2223728813559323e-05, |
| "loss": 1.4326, |
| "step": 11970 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 2.2648942470550537, |
| "learning_rate": 1.2216949152542374e-05, |
| "loss": 1.2719, |
| "step": 11980 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 8.525247573852539, |
| "learning_rate": 1.2210169491525426e-05, |
| "loss": 1.3143, |
| "step": 11990 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 2.6508049964904785, |
| "learning_rate": 1.2203389830508477e-05, |
| "loss": 1.5295, |
| "step": 12000 |
| }, |
| { |
| "epoch": 0.3, |
| "eval_loss": 1.422916293144226, |
| "eval_runtime": 66.1433, |
| "eval_samples_per_second": 15.119, |
| "eval_steps_per_second": 15.119, |
| "step": 12000 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 6.6923394203186035, |
| "learning_rate": 1.2196610169491527e-05, |
| "loss": 1.4182, |
| "step": 12010 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 4.860045433044434, |
| "learning_rate": 1.2189830508474578e-05, |
| "loss": 1.3847, |
| "step": 12020 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 5.437830924987793, |
| "learning_rate": 1.2183050847457628e-05, |
| "loss": 1.3516, |
| "step": 12030 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 2.578477382659912, |
| "learning_rate": 1.2176271186440677e-05, |
| "loss": 1.4079, |
| "step": 12040 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 6.06549072265625, |
| "learning_rate": 1.2169491525423729e-05, |
| "loss": 1.3579, |
| "step": 12050 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 7.3074750900268555, |
| "learning_rate": 1.216271186440678e-05, |
| "loss": 1.3398, |
| "step": 12060 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 6.550406455993652, |
| "learning_rate": 1.2155932203389832e-05, |
| "loss": 1.4268, |
| "step": 12070 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 8.258136749267578, |
| "learning_rate": 1.2149152542372881e-05, |
| "loss": 1.4757, |
| "step": 12080 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 3.204159736633301, |
| "learning_rate": 1.2142372881355933e-05, |
| "loss": 1.4967, |
| "step": 12090 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 6.911435127258301, |
| "learning_rate": 1.2135593220338984e-05, |
| "loss": 1.3255, |
| "step": 12100 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 7.780226230621338, |
| "learning_rate": 1.2128813559322034e-05, |
| "loss": 1.3092, |
| "step": 12110 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 3.6478915214538574, |
| "learning_rate": 1.2122033898305085e-05, |
| "loss": 1.5233, |
| "step": 12120 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 2.810391664505005, |
| "learning_rate": 1.2115254237288137e-05, |
| "loss": 1.3415, |
| "step": 12130 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 2.897092580795288, |
| "learning_rate": 1.2108474576271188e-05, |
| "loss": 1.3495, |
| "step": 12140 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 7.986344814300537, |
| "learning_rate": 1.2101694915254238e-05, |
| "loss": 1.2973, |
| "step": 12150 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 5.259188652038574, |
| "learning_rate": 1.209491525423729e-05, |
| "loss": 1.5173, |
| "step": 12160 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 3.554582357406616, |
| "learning_rate": 1.208813559322034e-05, |
| "loss": 1.4427, |
| "step": 12170 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 1.8010913133621216, |
| "learning_rate": 1.2081355932203392e-05, |
| "loss": 1.2691, |
| "step": 12180 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 7.936720371246338, |
| "learning_rate": 1.2074576271186442e-05, |
| "loss": 1.2707, |
| "step": 12190 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 3.37536883354187, |
| "learning_rate": 1.2067796610169493e-05, |
| "loss": 1.3896, |
| "step": 12200 |
| }, |
| { |
| "epoch": 0.31, |
| "grad_norm": 3.7712466716766357, |
| "learning_rate": 1.2061016949152544e-05, |
| "loss": 1.6235, |
| "step": 12210 |
| }, |
| { |
| "epoch": 0.31, |
| "grad_norm": 6.127082824707031, |
| "learning_rate": 1.2054237288135596e-05, |
| "loss": 1.465, |
| "step": 12220 |
| }, |
| { |
| "epoch": 0.31, |
| "grad_norm": 2.607111930847168, |
| "learning_rate": 1.2047457627118646e-05, |
| "loss": 1.2618, |
| "step": 12230 |
| }, |
| { |
| "epoch": 0.31, |
| "grad_norm": 5.540430545806885, |
| "learning_rate": 1.2040677966101695e-05, |
| "loss": 1.3243, |
| "step": 12240 |
| }, |
| { |
| "epoch": 0.31, |
| "grad_norm": 2.288236141204834, |
| "learning_rate": 1.2033898305084745e-05, |
| "loss": 1.4512, |
| "step": 12250 |
| }, |
| { |
| "epoch": 0.31, |
| "grad_norm": 4.430274963378906, |
| "learning_rate": 1.2027118644067796e-05, |
| "loss": 1.3736, |
| "step": 12260 |
| }, |
| { |
| "epoch": 0.31, |
| "grad_norm": 5.795346260070801, |
| "learning_rate": 1.2020338983050848e-05, |
| "loss": 1.4164, |
| "step": 12270 |
| }, |
| { |
| "epoch": 0.31, |
| "grad_norm": 1.6566057205200195, |
| "learning_rate": 1.20135593220339e-05, |
| "loss": 1.4127, |
| "step": 12280 |
| }, |
| { |
| "epoch": 0.31, |
| "grad_norm": 2.7761037349700928, |
| "learning_rate": 1.2006779661016949e-05, |
| "loss": 1.4318, |
| "step": 12290 |
| }, |
| { |
| "epoch": 0.31, |
| "grad_norm": 3.363727569580078, |
| "learning_rate": 1.2e-05, |
| "loss": 1.4556, |
| "step": 12300 |
| }, |
| { |
| "epoch": 0.31, |
| "grad_norm": 5.918773174285889, |
| "learning_rate": 1.1993220338983052e-05, |
| "loss": 1.4288, |
| "step": 12310 |
| }, |
| { |
| "epoch": 0.31, |
| "grad_norm": 3.683154821395874, |
| "learning_rate": 1.1986440677966103e-05, |
| "loss": 1.3806, |
| "step": 12320 |
| }, |
| { |
| "epoch": 0.31, |
| "grad_norm": 10.262763977050781, |
| "learning_rate": 1.1979661016949153e-05, |
| "loss": 1.4731, |
| "step": 12330 |
| }, |
| { |
| "epoch": 0.31, |
| "grad_norm": 2.709179162979126, |
| "learning_rate": 1.1972881355932204e-05, |
| "loss": 1.43, |
| "step": 12340 |
| }, |
| { |
| "epoch": 0.31, |
| "grad_norm": 6.632396221160889, |
| "learning_rate": 1.1966101694915256e-05, |
| "loss": 1.5524, |
| "step": 12350 |
| }, |
| { |
| "epoch": 0.31, |
| "grad_norm": 2.5457146167755127, |
| "learning_rate": 1.1959322033898307e-05, |
| "loss": 1.4103, |
| "step": 12360 |
| }, |
| { |
| "epoch": 0.31, |
| "grad_norm": 3.7321617603302, |
| "learning_rate": 1.1952542372881357e-05, |
| "loss": 1.7357, |
| "step": 12370 |
| }, |
| { |
| "epoch": 0.31, |
| "grad_norm": 13.385096549987793, |
| "learning_rate": 1.1945762711864408e-05, |
| "loss": 1.5424, |
| "step": 12380 |
| }, |
| { |
| "epoch": 0.31, |
| "grad_norm": 4.128885746002197, |
| "learning_rate": 1.193898305084746e-05, |
| "loss": 1.4083, |
| "step": 12390 |
| }, |
| { |
| "epoch": 0.31, |
| "grad_norm": 3.696180820465088, |
| "learning_rate": 1.1932203389830511e-05, |
| "loss": 1.4562, |
| "step": 12400 |
| }, |
| { |
| "epoch": 0.31, |
| "grad_norm": 2.035158157348633, |
| "learning_rate": 1.192542372881356e-05, |
| "loss": 1.4134, |
| "step": 12410 |
| }, |
| { |
| "epoch": 0.31, |
| "grad_norm": 5.3175787925720215, |
| "learning_rate": 1.1918644067796612e-05, |
| "loss": 1.4388, |
| "step": 12420 |
| }, |
| { |
| "epoch": 0.31, |
| "grad_norm": 4.401218891143799, |
| "learning_rate": 1.1911864406779663e-05, |
| "loss": 1.3129, |
| "step": 12430 |
| }, |
| { |
| "epoch": 0.31, |
| "grad_norm": 2.708665132522583, |
| "learning_rate": 1.1905084745762713e-05, |
| "loss": 1.3967, |
| "step": 12440 |
| }, |
| { |
| "epoch": 0.31, |
| "grad_norm": 4.167813301086426, |
| "learning_rate": 1.1898305084745763e-05, |
| "loss": 1.5785, |
| "step": 12450 |
| }, |
| { |
| "epoch": 0.31, |
| "grad_norm": 6.84459114074707, |
| "learning_rate": 1.1891525423728814e-05, |
| "loss": 1.4729, |
| "step": 12460 |
| }, |
| { |
| "epoch": 0.31, |
| "grad_norm": 7.7822651863098145, |
| "learning_rate": 1.1884745762711864e-05, |
| "loss": 1.3682, |
| "step": 12470 |
| }, |
| { |
| "epoch": 0.31, |
| "grad_norm": 14.827698707580566, |
| "learning_rate": 1.1877966101694915e-05, |
| "loss": 1.316, |
| "step": 12480 |
| }, |
| { |
| "epoch": 0.31, |
| "grad_norm": 7.276782512664795, |
| "learning_rate": 1.1871186440677967e-05, |
| "loss": 1.4088, |
| "step": 12490 |
| }, |
| { |
| "epoch": 0.31, |
| "grad_norm": 2.0169296264648438, |
| "learning_rate": 1.1864406779661018e-05, |
| "loss": 1.287, |
| "step": 12500 |
| }, |
| { |
| "epoch": 0.31, |
| "eval_loss": 1.3866758346557617, |
| "eval_runtime": 66.1437, |
| "eval_samples_per_second": 15.119, |
| "eval_steps_per_second": 15.119, |
| "step": 12500 |
| }, |
| { |
| "epoch": 0.31, |
| "grad_norm": 3.162349224090576, |
| "learning_rate": 1.1857627118644068e-05, |
| "loss": 1.6078, |
| "step": 12510 |
| }, |
| { |
| "epoch": 0.31, |
| "grad_norm": 2.835212469100952, |
| "learning_rate": 1.185084745762712e-05, |
| "loss": 1.3934, |
| "step": 12520 |
| }, |
| { |
| "epoch": 0.31, |
| "grad_norm": 3.5523407459259033, |
| "learning_rate": 1.184406779661017e-05, |
| "loss": 1.4201, |
| "step": 12530 |
| }, |
| { |
| "epoch": 0.31, |
| "grad_norm": 2.8521957397460938, |
| "learning_rate": 1.183728813559322e-05, |
| "loss": 1.1765, |
| "step": 12540 |
| }, |
| { |
| "epoch": 0.31, |
| "grad_norm": 6.991311073303223, |
| "learning_rate": 1.1830508474576272e-05, |
| "loss": 1.2703, |
| "step": 12550 |
| }, |
| { |
| "epoch": 0.31, |
| "grad_norm": 9.624125480651855, |
| "learning_rate": 1.1823728813559323e-05, |
| "loss": 1.3824, |
| "step": 12560 |
| }, |
| { |
| "epoch": 0.31, |
| "grad_norm": 2.1948940753936768, |
| "learning_rate": 1.1816949152542375e-05, |
| "loss": 1.4304, |
| "step": 12570 |
| }, |
| { |
| "epoch": 0.31, |
| "grad_norm": 2.5481557846069336, |
| "learning_rate": 1.1810169491525424e-05, |
| "loss": 1.3297, |
| "step": 12580 |
| }, |
| { |
| "epoch": 0.31, |
| "grad_norm": 2.6598098278045654, |
| "learning_rate": 1.1803389830508476e-05, |
| "loss": 1.1585, |
| "step": 12590 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 6.169371128082275, |
| "learning_rate": 1.1796610169491527e-05, |
| "loss": 1.4042, |
| "step": 12600 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 2.9693713188171387, |
| "learning_rate": 1.1789830508474578e-05, |
| "loss": 1.4801, |
| "step": 12610 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 5.916517734527588, |
| "learning_rate": 1.1783050847457628e-05, |
| "loss": 1.3524, |
| "step": 12620 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 12.339306831359863, |
| "learning_rate": 1.177627118644068e-05, |
| "loss": 1.4822, |
| "step": 12630 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 5.854163646697998, |
| "learning_rate": 1.1769491525423731e-05, |
| "loss": 1.6638, |
| "step": 12640 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 4.270496368408203, |
| "learning_rate": 1.1762711864406782e-05, |
| "loss": 1.3407, |
| "step": 12650 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 6.106138706207275, |
| "learning_rate": 1.175593220338983e-05, |
| "loss": 1.4408, |
| "step": 12660 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 1.9153695106506348, |
| "learning_rate": 1.1749152542372882e-05, |
| "loss": 1.3821, |
| "step": 12670 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 4.479553699493408, |
| "learning_rate": 1.1742372881355931e-05, |
| "loss": 1.3252, |
| "step": 12680 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 4.480614185333252, |
| "learning_rate": 1.1735593220338983e-05, |
| "loss": 1.5234, |
| "step": 12690 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 5.578995704650879, |
| "learning_rate": 1.1728813559322034e-05, |
| "loss": 1.4655, |
| "step": 12700 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 4.981246471405029, |
| "learning_rate": 1.1722033898305086e-05, |
| "loss": 1.3632, |
| "step": 12710 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 12.358545303344727, |
| "learning_rate": 1.1715254237288135e-05, |
| "loss": 1.3998, |
| "step": 12720 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 14.668237686157227, |
| "learning_rate": 1.1708474576271187e-05, |
| "loss": 1.3784, |
| "step": 12730 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 3.8441002368927, |
| "learning_rate": 1.1701694915254238e-05, |
| "loss": 1.482, |
| "step": 12740 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 4.92882776260376, |
| "learning_rate": 1.169491525423729e-05, |
| "loss": 1.4855, |
| "step": 12750 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 5.74147891998291, |
| "learning_rate": 1.168813559322034e-05, |
| "loss": 1.4695, |
| "step": 12760 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 4.936017990112305, |
| "learning_rate": 1.168135593220339e-05, |
| "loss": 1.3325, |
| "step": 12770 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 4.67604923248291, |
| "learning_rate": 1.1674576271186442e-05, |
| "loss": 1.5104, |
| "step": 12780 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 2.439807891845703, |
| "learning_rate": 1.1667796610169494e-05, |
| "loss": 1.3626, |
| "step": 12790 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 1.6871551275253296, |
| "learning_rate": 1.1661016949152543e-05, |
| "loss": 1.5824, |
| "step": 12800 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 6.6204094886779785, |
| "learning_rate": 1.1654237288135595e-05, |
| "loss": 1.2672, |
| "step": 12810 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 3.2927229404449463, |
| "learning_rate": 1.1647457627118646e-05, |
| "loss": 1.3032, |
| "step": 12820 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 8.059797286987305, |
| "learning_rate": 1.1640677966101697e-05, |
| "loss": 1.3087, |
| "step": 12830 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 3.8133039474487305, |
| "learning_rate": 1.1633898305084747e-05, |
| "loss": 1.4655, |
| "step": 12840 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 4.857828140258789, |
| "learning_rate": 1.1627118644067799e-05, |
| "loss": 1.3882, |
| "step": 12850 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 4.760891914367676, |
| "learning_rate": 1.162033898305085e-05, |
| "loss": 1.438, |
| "step": 12860 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 5.746943473815918, |
| "learning_rate": 1.1613559322033898e-05, |
| "loss": 1.29, |
| "step": 12870 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 10.054022789001465, |
| "learning_rate": 1.160677966101695e-05, |
| "loss": 1.4788, |
| "step": 12880 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 11.264670372009277, |
| "learning_rate": 1.16e-05, |
| "loss": 1.4834, |
| "step": 12890 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 2.883831739425659, |
| "learning_rate": 1.159322033898305e-05, |
| "loss": 1.3276, |
| "step": 12900 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 3.7215306758880615, |
| "learning_rate": 1.1586440677966102e-05, |
| "loss": 1.3578, |
| "step": 12910 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 3.2970972061157227, |
| "learning_rate": 1.1579661016949153e-05, |
| "loss": 1.3238, |
| "step": 12920 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 5.821772575378418, |
| "learning_rate": 1.1572881355932205e-05, |
| "loss": 1.3903, |
| "step": 12930 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 2.4745774269104004, |
| "learning_rate": 1.1566101694915254e-05, |
| "loss": 1.166, |
| "step": 12940 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 2.578946590423584, |
| "learning_rate": 1.1559322033898306e-05, |
| "loss": 1.483, |
| "step": 12950 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 3.525611400604248, |
| "learning_rate": 1.1552542372881357e-05, |
| "loss": 1.3907, |
| "step": 12960 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 4.851688385009766, |
| "learning_rate": 1.1545762711864409e-05, |
| "loss": 1.2686, |
| "step": 12970 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 2.2654104232788086, |
| "learning_rate": 1.1538983050847458e-05, |
| "loss": 1.3566, |
| "step": 12980 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 4.508155345916748, |
| "learning_rate": 1.153220338983051e-05, |
| "loss": 1.434, |
| "step": 12990 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 4.140927791595459, |
| "learning_rate": 1.1525423728813561e-05, |
| "loss": 1.4034, |
| "step": 13000 |
| }, |
| { |
| "epoch": 0.33, |
| "eval_loss": 1.4143681526184082, |
| "eval_runtime": 66.1661, |
| "eval_samples_per_second": 15.113, |
| "eval_steps_per_second": 15.113, |
| "step": 13000 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 4.795019149780273, |
| "learning_rate": 1.151864406779661e-05, |
| "loss": 1.617, |
| "step": 13010 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 4.142047882080078, |
| "learning_rate": 1.1511864406779662e-05, |
| "loss": 1.4168, |
| "step": 13020 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 3.405993700027466, |
| "learning_rate": 1.1505084745762714e-05, |
| "loss": 1.4029, |
| "step": 13030 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 4.590165615081787, |
| "learning_rate": 1.1498305084745765e-05, |
| "loss": 1.2981, |
| "step": 13040 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 1.7291467189788818, |
| "learning_rate": 1.1491525423728815e-05, |
| "loss": 1.3752, |
| "step": 13050 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 2.5406222343444824, |
| "learning_rate": 1.1484745762711866e-05, |
| "loss": 1.4635, |
| "step": 13060 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 5.161932468414307, |
| "learning_rate": 1.1477966101694916e-05, |
| "loss": 1.3763, |
| "step": 13070 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 5.527559280395508, |
| "learning_rate": 1.1471186440677965e-05, |
| "loss": 1.3618, |
| "step": 13080 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 3.843920946121216, |
| "learning_rate": 1.1464406779661017e-05, |
| "loss": 1.312, |
| "step": 13090 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 4.652730464935303, |
| "learning_rate": 1.1457627118644068e-05, |
| "loss": 1.3818, |
| "step": 13100 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 3.841367244720459, |
| "learning_rate": 1.145084745762712e-05, |
| "loss": 1.4252, |
| "step": 13110 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 4.024381637573242, |
| "learning_rate": 1.144406779661017e-05, |
| "loss": 1.4014, |
| "step": 13120 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 3.9260082244873047, |
| "learning_rate": 1.143728813559322e-05, |
| "loss": 1.4733, |
| "step": 13130 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 7.934506893157959, |
| "learning_rate": 1.1430508474576272e-05, |
| "loss": 1.6533, |
| "step": 13140 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 4.169222354888916, |
| "learning_rate": 1.1423728813559322e-05, |
| "loss": 1.4004, |
| "step": 13150 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 3.165480852127075, |
| "learning_rate": 1.1416949152542373e-05, |
| "loss": 1.2961, |
| "step": 13160 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 1.703479528427124, |
| "learning_rate": 1.1410169491525425e-05, |
| "loss": 1.5203, |
| "step": 13170 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 15.553948402404785, |
| "learning_rate": 1.1403389830508476e-05, |
| "loss": 1.4719, |
| "step": 13180 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 3.422497034072876, |
| "learning_rate": 1.1396610169491526e-05, |
| "loss": 1.4232, |
| "step": 13190 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 3.0440897941589355, |
| "learning_rate": 1.1389830508474577e-05, |
| "loss": 1.3813, |
| "step": 13200 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 3.650101661682129, |
| "learning_rate": 1.1383050847457629e-05, |
| "loss": 1.4398, |
| "step": 13210 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 9.34521484375, |
| "learning_rate": 1.137627118644068e-05, |
| "loss": 1.4457, |
| "step": 13220 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 2.5624570846557617, |
| "learning_rate": 1.136949152542373e-05, |
| "loss": 1.3008, |
| "step": 13230 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 2.584078311920166, |
| "learning_rate": 1.1362711864406781e-05, |
| "loss": 1.3545, |
| "step": 13240 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 4.867392539978027, |
| "learning_rate": 1.1355932203389833e-05, |
| "loss": 1.4067, |
| "step": 13250 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 2.185886859893799, |
| "learning_rate": 1.1349152542372884e-05, |
| "loss": 1.3976, |
| "step": 13260 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 4.314910888671875, |
| "learning_rate": 1.1342372881355934e-05, |
| "loss": 1.3568, |
| "step": 13270 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 5.425469875335693, |
| "learning_rate": 1.1335593220338983e-05, |
| "loss": 1.5861, |
| "step": 13280 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 8.086491584777832, |
| "learning_rate": 1.1328813559322033e-05, |
| "loss": 1.567, |
| "step": 13290 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 4.411368370056152, |
| "learning_rate": 1.1322033898305084e-05, |
| "loss": 1.2149, |
| "step": 13300 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 5.792349815368652, |
| "learning_rate": 1.1315254237288136e-05, |
| "loss": 1.4701, |
| "step": 13310 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 2.967857837677002, |
| "learning_rate": 1.1308474576271187e-05, |
| "loss": 1.3723, |
| "step": 13320 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 4.052553176879883, |
| "learning_rate": 1.1301694915254237e-05, |
| "loss": 1.3078, |
| "step": 13330 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 1.5086994171142578, |
| "learning_rate": 1.1294915254237288e-05, |
| "loss": 1.4948, |
| "step": 13340 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 8.732611656188965, |
| "learning_rate": 1.128813559322034e-05, |
| "loss": 1.5011, |
| "step": 13350 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 6.4710235595703125, |
| "learning_rate": 1.1281355932203391e-05, |
| "loss": 1.3214, |
| "step": 13360 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 5.698025226593018, |
| "learning_rate": 1.1274576271186441e-05, |
| "loss": 1.4719, |
| "step": 13370 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 1.9803974628448486, |
| "learning_rate": 1.1267796610169492e-05, |
| "loss": 1.4078, |
| "step": 13380 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 6.019112586975098, |
| "learning_rate": 1.1261016949152544e-05, |
| "loss": 1.4646, |
| "step": 13390 |
| }, |
| { |
| "epoch": 0.34, |
| "grad_norm": 5.664279937744141, |
| "learning_rate": 1.1254237288135595e-05, |
| "loss": 1.5236, |
| "step": 13400 |
| }, |
| { |
| "epoch": 0.34, |
| "grad_norm": 3.8619132041931152, |
| "learning_rate": 1.1247457627118645e-05, |
| "loss": 1.3184, |
| "step": 13410 |
| }, |
| { |
| "epoch": 0.34, |
| "grad_norm": 2.45538330078125, |
| "learning_rate": 1.1240677966101696e-05, |
| "loss": 1.3868, |
| "step": 13420 |
| }, |
| { |
| "epoch": 0.34, |
| "grad_norm": 3.7637529373168945, |
| "learning_rate": 1.1233898305084748e-05, |
| "loss": 1.4405, |
| "step": 13430 |
| }, |
| { |
| "epoch": 0.34, |
| "grad_norm": 4.774598121643066, |
| "learning_rate": 1.1227118644067799e-05, |
| "loss": 1.287, |
| "step": 13440 |
| }, |
| { |
| "epoch": 0.34, |
| "grad_norm": 3.2298827171325684, |
| "learning_rate": 1.1220338983050849e-05, |
| "loss": 1.169, |
| "step": 13450 |
| }, |
| { |
| "epoch": 0.34, |
| "grad_norm": 1.9264500141143799, |
| "learning_rate": 1.12135593220339e-05, |
| "loss": 1.4071, |
| "step": 13460 |
| }, |
| { |
| "epoch": 0.34, |
| "grad_norm": 3.1234660148620605, |
| "learning_rate": 1.1206779661016951e-05, |
| "loss": 1.3734, |
| "step": 13470 |
| }, |
| { |
| "epoch": 0.34, |
| "grad_norm": 1.4275059700012207, |
| "learning_rate": 1.1200000000000001e-05, |
| "loss": 1.4451, |
| "step": 13480 |
| }, |
| { |
| "epoch": 0.34, |
| "grad_norm": 4.883017063140869, |
| "learning_rate": 1.1193220338983051e-05, |
| "loss": 1.4083, |
| "step": 13490 |
| }, |
| { |
| "epoch": 0.34, |
| "grad_norm": 7.2406325340271, |
| "learning_rate": 1.1186440677966102e-05, |
| "loss": 1.209, |
| "step": 13500 |
| }, |
| { |
| "epoch": 0.34, |
| "eval_loss": 1.4201830625534058, |
| "eval_runtime": 66.1321, |
| "eval_samples_per_second": 15.121, |
| "eval_steps_per_second": 15.121, |
| "step": 13500 |
| }, |
| { |
| "epoch": 0.34, |
| "grad_norm": 5.777285099029541, |
| "learning_rate": 1.1179661016949152e-05, |
| "loss": 1.3445, |
| "step": 13510 |
| }, |
| { |
| "epoch": 0.34, |
| "grad_norm": 3.1429507732391357, |
| "learning_rate": 1.1172881355932203e-05, |
| "loss": 1.4894, |
| "step": 13520 |
| }, |
| { |
| "epoch": 0.34, |
| "grad_norm": 10.670025825500488, |
| "learning_rate": 1.1166101694915255e-05, |
| "loss": 1.4, |
| "step": 13530 |
| }, |
| { |
| "epoch": 0.34, |
| "grad_norm": 3.0807442665100098, |
| "learning_rate": 1.1159322033898306e-05, |
| "loss": 1.3409, |
| "step": 13540 |
| }, |
| { |
| "epoch": 0.34, |
| "grad_norm": 7.570891857147217, |
| "learning_rate": 1.1152542372881356e-05, |
| "loss": 1.3676, |
| "step": 13550 |
| }, |
| { |
| "epoch": 0.34, |
| "grad_norm": 11.09575080871582, |
| "learning_rate": 1.1145762711864407e-05, |
| "loss": 1.402, |
| "step": 13560 |
| }, |
| { |
| "epoch": 0.34, |
| "grad_norm": 6.450993537902832, |
| "learning_rate": 1.1138983050847459e-05, |
| "loss": 1.4072, |
| "step": 13570 |
| }, |
| { |
| "epoch": 0.34, |
| "grad_norm": 6.089435577392578, |
| "learning_rate": 1.113220338983051e-05, |
| "loss": 1.3705, |
| "step": 13580 |
| }, |
| { |
| "epoch": 0.34, |
| "grad_norm": 4.839201927185059, |
| "learning_rate": 1.112542372881356e-05, |
| "loss": 1.5674, |
| "step": 13590 |
| }, |
| { |
| "epoch": 0.34, |
| "grad_norm": 12.40379810333252, |
| "learning_rate": 1.1118644067796611e-05, |
| "loss": 1.2697, |
| "step": 13600 |
| }, |
| { |
| "epoch": 0.34, |
| "grad_norm": 5.095344066619873, |
| "learning_rate": 1.1111864406779663e-05, |
| "loss": 1.1917, |
| "step": 13610 |
| }, |
| { |
| "epoch": 0.34, |
| "grad_norm": 3.252272367477417, |
| "learning_rate": 1.1105084745762712e-05, |
| "loss": 1.3542, |
| "step": 13620 |
| }, |
| { |
| "epoch": 0.34, |
| "grad_norm": 8.330405235290527, |
| "learning_rate": 1.1098305084745764e-05, |
| "loss": 1.4464, |
| "step": 13630 |
| }, |
| { |
| "epoch": 0.34, |
| "grad_norm": 1.9642021656036377, |
| "learning_rate": 1.1091525423728815e-05, |
| "loss": 1.2904, |
| "step": 13640 |
| }, |
| { |
| "epoch": 0.34, |
| "grad_norm": 3.2120866775512695, |
| "learning_rate": 1.1084745762711867e-05, |
| "loss": 1.3887, |
| "step": 13650 |
| }, |
| { |
| "epoch": 0.34, |
| "grad_norm": 3.582340717315674, |
| "learning_rate": 1.1077966101694916e-05, |
| "loss": 1.5277, |
| "step": 13660 |
| }, |
| { |
| "epoch": 0.34, |
| "grad_norm": 3.2663116455078125, |
| "learning_rate": 1.1071186440677968e-05, |
| "loss": 1.2393, |
| "step": 13670 |
| }, |
| { |
| "epoch": 0.34, |
| "grad_norm": 7.755127906799316, |
| "learning_rate": 1.1064406779661019e-05, |
| "loss": 1.3327, |
| "step": 13680 |
| }, |
| { |
| "epoch": 0.34, |
| "grad_norm": 4.777354717254639, |
| "learning_rate": 1.105762711864407e-05, |
| "loss": 1.4229, |
| "step": 13690 |
| }, |
| { |
| "epoch": 0.34, |
| "grad_norm": 4.864866733551025, |
| "learning_rate": 1.1050847457627118e-05, |
| "loss": 1.5831, |
| "step": 13700 |
| }, |
| { |
| "epoch": 0.34, |
| "grad_norm": 3.381706953048706, |
| "learning_rate": 1.104406779661017e-05, |
| "loss": 1.5776, |
| "step": 13710 |
| }, |
| { |
| "epoch": 0.34, |
| "grad_norm": 2.4377899169921875, |
| "learning_rate": 1.1037288135593221e-05, |
| "loss": 1.4538, |
| "step": 13720 |
| }, |
| { |
| "epoch": 0.34, |
| "grad_norm": 3.592681646347046, |
| "learning_rate": 1.1030508474576271e-05, |
| "loss": 1.5015, |
| "step": 13730 |
| }, |
| { |
| "epoch": 0.34, |
| "grad_norm": 4.690391540527344, |
| "learning_rate": 1.1023728813559322e-05, |
| "loss": 1.3766, |
| "step": 13740 |
| }, |
| { |
| "epoch": 0.34, |
| "grad_norm": 6.648055553436279, |
| "learning_rate": 1.1016949152542374e-05, |
| "loss": 1.319, |
| "step": 13750 |
| }, |
| { |
| "epoch": 0.34, |
| "grad_norm": 4.713019847869873, |
| "learning_rate": 1.1010169491525423e-05, |
| "loss": 1.4213, |
| "step": 13760 |
| }, |
| { |
| "epoch": 0.34, |
| "grad_norm": 4.177305698394775, |
| "learning_rate": 1.1003389830508475e-05, |
| "loss": 1.5414, |
| "step": 13770 |
| }, |
| { |
| "epoch": 0.34, |
| "grad_norm": 4.832136154174805, |
| "learning_rate": 1.0996610169491526e-05, |
| "loss": 1.457, |
| "step": 13780 |
| }, |
| { |
| "epoch": 0.34, |
| "grad_norm": 5.470792770385742, |
| "learning_rate": 1.0989830508474578e-05, |
| "loss": 1.3565, |
| "step": 13790 |
| }, |
| { |
| "epoch": 0.34, |
| "grad_norm": 2.7103376388549805, |
| "learning_rate": 1.0983050847457627e-05, |
| "loss": 1.0967, |
| "step": 13800 |
| }, |
| { |
| "epoch": 0.35, |
| "grad_norm": 5.0832390785217285, |
| "learning_rate": 1.0976271186440679e-05, |
| "loss": 1.5281, |
| "step": 13810 |
| }, |
| { |
| "epoch": 0.35, |
| "grad_norm": 9.628556251525879, |
| "learning_rate": 1.096949152542373e-05, |
| "loss": 1.3223, |
| "step": 13820 |
| }, |
| { |
| "epoch": 0.35, |
| "grad_norm": 5.501499176025391, |
| "learning_rate": 1.0962711864406782e-05, |
| "loss": 1.452, |
| "step": 13830 |
| }, |
| { |
| "epoch": 0.35, |
| "grad_norm": 1.4253193140029907, |
| "learning_rate": 1.0955932203389831e-05, |
| "loss": 1.5329, |
| "step": 13840 |
| }, |
| { |
| "epoch": 0.35, |
| "grad_norm": 3.109076976776123, |
| "learning_rate": 1.0949152542372883e-05, |
| "loss": 1.2804, |
| "step": 13850 |
| }, |
| { |
| "epoch": 0.35, |
| "grad_norm": 7.186499118804932, |
| "learning_rate": 1.0942372881355934e-05, |
| "loss": 1.3599, |
| "step": 13860 |
| }, |
| { |
| "epoch": 0.35, |
| "grad_norm": 3.0720372200012207, |
| "learning_rate": 1.0935593220338985e-05, |
| "loss": 1.4686, |
| "step": 13870 |
| }, |
| { |
| "epoch": 0.35, |
| "grad_norm": 3.5827410221099854, |
| "learning_rate": 1.0928813559322035e-05, |
| "loss": 1.3139, |
| "step": 13880 |
| }, |
| { |
| "epoch": 0.35, |
| "grad_norm": 12.06527328491211, |
| "learning_rate": 1.0922033898305087e-05, |
| "loss": 1.4268, |
| "step": 13890 |
| }, |
| { |
| "epoch": 0.35, |
| "grad_norm": 4.608367919921875, |
| "learning_rate": 1.0915254237288135e-05, |
| "loss": 1.2591, |
| "step": 13900 |
| }, |
| { |
| "epoch": 0.35, |
| "grad_norm": 13.63802433013916, |
| "learning_rate": 1.0908474576271186e-05, |
| "loss": 1.2512, |
| "step": 13910 |
| }, |
| { |
| "epoch": 0.35, |
| "grad_norm": 2.2884724140167236, |
| "learning_rate": 1.0901694915254237e-05, |
| "loss": 1.5108, |
| "step": 13920 |
| }, |
| { |
| "epoch": 0.35, |
| "grad_norm": 4.693191051483154, |
| "learning_rate": 1.0894915254237289e-05, |
| "loss": 1.1383, |
| "step": 13930 |
| }, |
| { |
| "epoch": 0.35, |
| "grad_norm": 4.830557823181152, |
| "learning_rate": 1.0888135593220339e-05, |
| "loss": 1.3608, |
| "step": 13940 |
| }, |
| { |
| "epoch": 0.35, |
| "grad_norm": 5.044919490814209, |
| "learning_rate": 1.088135593220339e-05, |
| "loss": 1.2956, |
| "step": 13950 |
| }, |
| { |
| "epoch": 0.35, |
| "grad_norm": 2.327402114868164, |
| "learning_rate": 1.0874576271186441e-05, |
| "loss": 1.4006, |
| "step": 13960 |
| }, |
| { |
| "epoch": 0.35, |
| "grad_norm": 8.76153564453125, |
| "learning_rate": 1.0867796610169493e-05, |
| "loss": 1.4544, |
| "step": 13970 |
| }, |
| { |
| "epoch": 0.35, |
| "grad_norm": 3.762763023376465, |
| "learning_rate": 1.0861016949152542e-05, |
| "loss": 1.4545, |
| "step": 13980 |
| }, |
| { |
| "epoch": 0.35, |
| "grad_norm": 4.206634521484375, |
| "learning_rate": 1.0854237288135594e-05, |
| "loss": 1.3256, |
| "step": 13990 |
| }, |
| { |
| "epoch": 0.35, |
| "grad_norm": 3.6637351512908936, |
| "learning_rate": 1.0847457627118645e-05, |
| "loss": 1.5003, |
| "step": 14000 |
| }, |
| { |
| "epoch": 0.35, |
| "eval_loss": 1.414922833442688, |
| "eval_runtime": 66.1224, |
| "eval_samples_per_second": 15.123, |
| "eval_steps_per_second": 15.123, |
| "step": 14000 |
| }, |
| { |
| "epoch": 0.35, |
| "grad_norm": 3.5580198764801025, |
| "learning_rate": 1.0840677966101697e-05, |
| "loss": 1.2528, |
| "step": 14010 |
| }, |
| { |
| "epoch": 0.35, |
| "grad_norm": 4.473653316497803, |
| "learning_rate": 1.0833898305084746e-05, |
| "loss": 1.4678, |
| "step": 14020 |
| }, |
| { |
| "epoch": 0.35, |
| "grad_norm": 2.7036588191986084, |
| "learning_rate": 1.0827118644067798e-05, |
| "loss": 1.5327, |
| "step": 14030 |
| }, |
| { |
| "epoch": 0.35, |
| "grad_norm": 2.9595816135406494, |
| "learning_rate": 1.0820338983050849e-05, |
| "loss": 1.3218, |
| "step": 14040 |
| }, |
| { |
| "epoch": 0.35, |
| "grad_norm": 2.984138011932373, |
| "learning_rate": 1.08135593220339e-05, |
| "loss": 1.3408, |
| "step": 14050 |
| }, |
| { |
| "epoch": 0.35, |
| "grad_norm": 3.022242307662964, |
| "learning_rate": 1.080677966101695e-05, |
| "loss": 1.5133, |
| "step": 14060 |
| }, |
| { |
| "epoch": 0.35, |
| "grad_norm": 4.826704978942871, |
| "learning_rate": 1.0800000000000002e-05, |
| "loss": 1.3917, |
| "step": 14070 |
| }, |
| { |
| "epoch": 0.35, |
| "grad_norm": 7.675862789154053, |
| "learning_rate": 1.0793220338983053e-05, |
| "loss": 1.2337, |
| "step": 14080 |
| }, |
| { |
| "epoch": 0.35, |
| "grad_norm": 4.611223220825195, |
| "learning_rate": 1.0786440677966103e-05, |
| "loss": 1.3582, |
| "step": 14090 |
| }, |
| { |
| "epoch": 0.35, |
| "grad_norm": 2.353292942047119, |
| "learning_rate": 1.0779661016949154e-05, |
| "loss": 1.4237, |
| "step": 14100 |
| }, |
| { |
| "epoch": 0.35, |
| "grad_norm": 4.331914901733398, |
| "learning_rate": 1.0772881355932204e-05, |
| "loss": 1.4808, |
| "step": 14110 |
| }, |
| { |
| "epoch": 0.35, |
| "grad_norm": 2.759838819503784, |
| "learning_rate": 1.0766101694915254e-05, |
| "loss": 1.4499, |
| "step": 14120 |
| }, |
| { |
| "epoch": 0.35, |
| "grad_norm": 4.08477783203125, |
| "learning_rate": 1.0759322033898305e-05, |
| "loss": 1.3125, |
| "step": 14130 |
| }, |
| { |
| "epoch": 0.35, |
| "grad_norm": 3.8619682788848877, |
| "learning_rate": 1.0752542372881356e-05, |
| "loss": 1.2508, |
| "step": 14140 |
| }, |
| { |
| "epoch": 0.35, |
| "grad_norm": 5.26367712020874, |
| "learning_rate": 1.0745762711864408e-05, |
| "loss": 1.4067, |
| "step": 14150 |
| }, |
| { |
| "epoch": 0.35, |
| "grad_norm": 8.280834197998047, |
| "learning_rate": 1.0738983050847457e-05, |
| "loss": 1.381, |
| "step": 14160 |
| }, |
| { |
| "epoch": 0.35, |
| "grad_norm": 1.3759233951568604, |
| "learning_rate": 1.0732203389830509e-05, |
| "loss": 1.4211, |
| "step": 14170 |
| }, |
| { |
| "epoch": 0.35, |
| "grad_norm": 1.8453333377838135, |
| "learning_rate": 1.072542372881356e-05, |
| "loss": 1.4187, |
| "step": 14180 |
| }, |
| { |
| "epoch": 0.35, |
| "grad_norm": 3.5439627170562744, |
| "learning_rate": 1.0718644067796612e-05, |
| "loss": 1.2863, |
| "step": 14190 |
| }, |
| { |
| "epoch": 0.35, |
| "grad_norm": 5.693528652191162, |
| "learning_rate": 1.0711864406779661e-05, |
| "loss": 1.3819, |
| "step": 14200 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 6.000442028045654, |
| "learning_rate": 1.0705084745762713e-05, |
| "loss": 1.4, |
| "step": 14210 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 4.452291011810303, |
| "learning_rate": 1.0698305084745764e-05, |
| "loss": 1.3242, |
| "step": 14220 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 8.150368690490723, |
| "learning_rate": 1.0691525423728814e-05, |
| "loss": 1.4536, |
| "step": 14230 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 6.411572456359863, |
| "learning_rate": 1.0684745762711865e-05, |
| "loss": 1.2176, |
| "step": 14240 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 6.748562335968018, |
| "learning_rate": 1.0677966101694917e-05, |
| "loss": 1.3727, |
| "step": 14250 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 5.915468215942383, |
| "learning_rate": 1.0671186440677968e-05, |
| "loss": 1.2339, |
| "step": 14260 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 2.7658674716949463, |
| "learning_rate": 1.0664406779661018e-05, |
| "loss": 1.3729, |
| "step": 14270 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 2.117175579071045, |
| "learning_rate": 1.065762711864407e-05, |
| "loss": 1.4233, |
| "step": 14280 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 3.734428882598877, |
| "learning_rate": 1.065084745762712e-05, |
| "loss": 1.4449, |
| "step": 14290 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 3.934438467025757, |
| "learning_rate": 1.0644067796610172e-05, |
| "loss": 1.3337, |
| "step": 14300 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 10.599371910095215, |
| "learning_rate": 1.0637288135593222e-05, |
| "loss": 1.3862, |
| "step": 14310 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 3.4198312759399414, |
| "learning_rate": 1.0630508474576271e-05, |
| "loss": 1.5868, |
| "step": 14320 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 1.7814559936523438, |
| "learning_rate": 1.0623728813559323e-05, |
| "loss": 1.3958, |
| "step": 14330 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 10.756269454956055, |
| "learning_rate": 1.0616949152542373e-05, |
| "loss": 1.516, |
| "step": 14340 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 1.8235437870025635, |
| "learning_rate": 1.0610169491525424e-05, |
| "loss": 1.54, |
| "step": 14350 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 7.628040790557861, |
| "learning_rate": 1.0603389830508475e-05, |
| "loss": 1.2579, |
| "step": 14360 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 3.0332067012786865, |
| "learning_rate": 1.0596610169491525e-05, |
| "loss": 1.3637, |
| "step": 14370 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 3.866598606109619, |
| "learning_rate": 1.0589830508474576e-05, |
| "loss": 1.5127, |
| "step": 14380 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 3.0996134281158447, |
| "learning_rate": 1.0583050847457628e-05, |
| "loss": 1.538, |
| "step": 14390 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 3.3183581829071045, |
| "learning_rate": 1.057627118644068e-05, |
| "loss": 1.4018, |
| "step": 14400 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 5.61666202545166, |
| "learning_rate": 1.0569491525423729e-05, |
| "loss": 1.374, |
| "step": 14410 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 4.340878963470459, |
| "learning_rate": 1.056271186440678e-05, |
| "loss": 1.4605, |
| "step": 14420 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 3.6541993618011475, |
| "learning_rate": 1.0555932203389832e-05, |
| "loss": 1.4579, |
| "step": 14430 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 6.3943772315979, |
| "learning_rate": 1.0549152542372883e-05, |
| "loss": 1.6246, |
| "step": 14440 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 3.543633460998535, |
| "learning_rate": 1.0542372881355933e-05, |
| "loss": 1.5073, |
| "step": 14450 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 2.0119078159332275, |
| "learning_rate": 1.0535593220338984e-05, |
| "loss": 1.367, |
| "step": 14460 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 14.83475112915039, |
| "learning_rate": 1.0528813559322036e-05, |
| "loss": 1.3893, |
| "step": 14470 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 2.9194772243499756, |
| "learning_rate": 1.0522033898305087e-05, |
| "loss": 1.3436, |
| "step": 14480 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 1.9060604572296143, |
| "learning_rate": 1.0515254237288137e-05, |
| "loss": 1.2047, |
| "step": 14490 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 2.6039791107177734, |
| "learning_rate": 1.0508474576271188e-05, |
| "loss": 1.5192, |
| "step": 14500 |
| }, |
| { |
| "epoch": 0.36, |
| "eval_loss": 1.3242599964141846, |
| "eval_runtime": 66.116, |
| "eval_samples_per_second": 15.125, |
| "eval_steps_per_second": 15.125, |
| "step": 14500 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 6.422837257385254, |
| "learning_rate": 1.050169491525424e-05, |
| "loss": 1.4687, |
| "step": 14510 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 4.683182716369629, |
| "learning_rate": 1.049491525423729e-05, |
| "loss": 1.4816, |
| "step": 14520 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 3.1711997985839844, |
| "learning_rate": 1.0488135593220339e-05, |
| "loss": 1.354, |
| "step": 14530 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 2.8648619651794434, |
| "learning_rate": 1.048135593220339e-05, |
| "loss": 1.3518, |
| "step": 14540 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 10.453191757202148, |
| "learning_rate": 1.047457627118644e-05, |
| "loss": 1.3256, |
| "step": 14550 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 4.701107978820801, |
| "learning_rate": 1.0467796610169491e-05, |
| "loss": 1.3005, |
| "step": 14560 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 3.5893495082855225, |
| "learning_rate": 1.0461016949152543e-05, |
| "loss": 1.3151, |
| "step": 14570 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 12.855472564697266, |
| "learning_rate": 1.0454237288135594e-05, |
| "loss": 1.5216, |
| "step": 14580 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 3.115908145904541, |
| "learning_rate": 1.0447457627118644e-05, |
| "loss": 1.6356, |
| "step": 14590 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 5.046034812927246, |
| "learning_rate": 1.0440677966101695e-05, |
| "loss": 1.4374, |
| "step": 14600 |
| }, |
| { |
| "epoch": 0.37, |
| "grad_norm": 8.317269325256348, |
| "learning_rate": 1.0433898305084747e-05, |
| "loss": 1.3663, |
| "step": 14610 |
| }, |
| { |
| "epoch": 0.37, |
| "grad_norm": 9.950586318969727, |
| "learning_rate": 1.0427118644067798e-05, |
| "loss": 1.5752, |
| "step": 14620 |
| }, |
| { |
| "epoch": 0.37, |
| "grad_norm": 8.241649627685547, |
| "learning_rate": 1.0420338983050848e-05, |
| "loss": 1.3704, |
| "step": 14630 |
| }, |
| { |
| "epoch": 0.37, |
| "grad_norm": 5.070446491241455, |
| "learning_rate": 1.04135593220339e-05, |
| "loss": 1.4483, |
| "step": 14640 |
| }, |
| { |
| "epoch": 0.37, |
| "grad_norm": 1.575085997581482, |
| "learning_rate": 1.040677966101695e-05, |
| "loss": 1.5115, |
| "step": 14650 |
| }, |
| { |
| "epoch": 0.37, |
| "grad_norm": 8.428694725036621, |
| "learning_rate": 1.04e-05, |
| "loss": 1.3522, |
| "step": 14660 |
| }, |
| { |
| "epoch": 0.37, |
| "grad_norm": 7.704261302947998, |
| "learning_rate": 1.0393220338983052e-05, |
| "loss": 1.2121, |
| "step": 14670 |
| }, |
| { |
| "epoch": 0.37, |
| "grad_norm": 3.4666318893432617, |
| "learning_rate": 1.0386440677966103e-05, |
| "loss": 1.4718, |
| "step": 14680 |
| }, |
| { |
| "epoch": 0.37, |
| "grad_norm": 8.076580047607422, |
| "learning_rate": 1.0379661016949155e-05, |
| "loss": 1.4, |
| "step": 14690 |
| }, |
| { |
| "epoch": 0.37, |
| "grad_norm": 3.1513173580169678, |
| "learning_rate": 1.0372881355932204e-05, |
| "loss": 1.3593, |
| "step": 14700 |
| }, |
| { |
| "epoch": 0.37, |
| "grad_norm": 4.961329460144043, |
| "learning_rate": 1.0366101694915256e-05, |
| "loss": 1.4394, |
| "step": 14710 |
| }, |
| { |
| "epoch": 0.37, |
| "grad_norm": 7.442839622497559, |
| "learning_rate": 1.0359322033898307e-05, |
| "loss": 1.3876, |
| "step": 14720 |
| }, |
| { |
| "epoch": 0.37, |
| "grad_norm": 6.982285499572754, |
| "learning_rate": 1.0352542372881358e-05, |
| "loss": 1.455, |
| "step": 14730 |
| }, |
| { |
| "epoch": 0.37, |
| "grad_norm": 10.58169174194336, |
| "learning_rate": 1.0345762711864406e-05, |
| "loss": 1.362, |
| "step": 14740 |
| }, |
| { |
| "epoch": 0.37, |
| "grad_norm": 3.1506564617156982, |
| "learning_rate": 1.0338983050847458e-05, |
| "loss": 1.3879, |
| "step": 14750 |
| }, |
| { |
| "epoch": 0.37, |
| "grad_norm": 4.143970012664795, |
| "learning_rate": 1.033220338983051e-05, |
| "loss": 1.4343, |
| "step": 14760 |
| }, |
| { |
| "epoch": 0.37, |
| "grad_norm": 2.493852376937866, |
| "learning_rate": 1.0325423728813559e-05, |
| "loss": 1.4638, |
| "step": 14770 |
| }, |
| { |
| "epoch": 0.37, |
| "grad_norm": 4.69954252243042, |
| "learning_rate": 1.031864406779661e-05, |
| "loss": 1.3695, |
| "step": 14780 |
| }, |
| { |
| "epoch": 0.37, |
| "grad_norm": 2.7337687015533447, |
| "learning_rate": 1.0311864406779662e-05, |
| "loss": 1.4077, |
| "step": 14790 |
| }, |
| { |
| "epoch": 0.37, |
| "grad_norm": 4.879547119140625, |
| "learning_rate": 1.0305084745762712e-05, |
| "loss": 1.3673, |
| "step": 14800 |
| }, |
| { |
| "epoch": 0.37, |
| "grad_norm": 3.318307638168335, |
| "learning_rate": 1.0298305084745763e-05, |
| "loss": 1.4526, |
| "step": 14810 |
| }, |
| { |
| "epoch": 0.37, |
| "grad_norm": 1.659396767616272, |
| "learning_rate": 1.0291525423728814e-05, |
| "loss": 1.4729, |
| "step": 14820 |
| }, |
| { |
| "epoch": 0.37, |
| "grad_norm": 2.797215223312378, |
| "learning_rate": 1.0284745762711866e-05, |
| "loss": 1.5173, |
| "step": 14830 |
| }, |
| { |
| "epoch": 0.37, |
| "grad_norm": 10.495044708251953, |
| "learning_rate": 1.0277966101694915e-05, |
| "loss": 1.4145, |
| "step": 14840 |
| }, |
| { |
| "epoch": 0.37, |
| "grad_norm": 8.500311851501465, |
| "learning_rate": 1.0271186440677967e-05, |
| "loss": 1.3707, |
| "step": 14850 |
| }, |
| { |
| "epoch": 0.37, |
| "grad_norm": 3.057346820831299, |
| "learning_rate": 1.0264406779661018e-05, |
| "loss": 1.2915, |
| "step": 14860 |
| }, |
| { |
| "epoch": 0.37, |
| "grad_norm": 3.8174734115600586, |
| "learning_rate": 1.025762711864407e-05, |
| "loss": 1.2654, |
| "step": 14870 |
| }, |
| { |
| "epoch": 0.37, |
| "grad_norm": 2.6704373359680176, |
| "learning_rate": 1.025084745762712e-05, |
| "loss": 1.178, |
| "step": 14880 |
| }, |
| { |
| "epoch": 0.37, |
| "grad_norm": 1.770277500152588, |
| "learning_rate": 1.024406779661017e-05, |
| "loss": 1.338, |
| "step": 14890 |
| }, |
| { |
| "epoch": 0.37, |
| "grad_norm": 2.0929298400878906, |
| "learning_rate": 1.0237288135593222e-05, |
| "loss": 1.3959, |
| "step": 14900 |
| }, |
| { |
| "epoch": 0.37, |
| "grad_norm": 13.965825080871582, |
| "learning_rate": 1.0230508474576274e-05, |
| "loss": 1.3289, |
| "step": 14910 |
| }, |
| { |
| "epoch": 0.37, |
| "grad_norm": 5.192085266113281, |
| "learning_rate": 1.0223728813559323e-05, |
| "loss": 1.3114, |
| "step": 14920 |
| }, |
| { |
| "epoch": 0.37, |
| "grad_norm": 9.4222412109375, |
| "learning_rate": 1.0216949152542375e-05, |
| "loss": 1.4214, |
| "step": 14930 |
| }, |
| { |
| "epoch": 0.37, |
| "grad_norm": 3.792501926422119, |
| "learning_rate": 1.0210169491525423e-05, |
| "loss": 1.4965, |
| "step": 14940 |
| }, |
| { |
| "epoch": 0.37, |
| "grad_norm": 2.663039207458496, |
| "learning_rate": 1.0203389830508474e-05, |
| "loss": 1.2647, |
| "step": 14950 |
| }, |
| { |
| "epoch": 0.37, |
| "grad_norm": 5.2181806564331055, |
| "learning_rate": 1.0196610169491525e-05, |
| "loss": 1.4376, |
| "step": 14960 |
| }, |
| { |
| "epoch": 0.37, |
| "grad_norm": 2.7282190322875977, |
| "learning_rate": 1.0189830508474577e-05, |
| "loss": 1.3529, |
| "step": 14970 |
| }, |
| { |
| "epoch": 0.37, |
| "grad_norm": 3.326599597930908, |
| "learning_rate": 1.0183050847457627e-05, |
| "loss": 1.4733, |
| "step": 14980 |
| }, |
| { |
| "epoch": 0.37, |
| "grad_norm": 1.9269278049468994, |
| "learning_rate": 1.0176271186440678e-05, |
| "loss": 1.4141, |
| "step": 14990 |
| }, |
| { |
| "epoch": 0.38, |
| "grad_norm": 10.291763305664062, |
| "learning_rate": 1.016949152542373e-05, |
| "loss": 1.3266, |
| "step": 15000 |
| }, |
| { |
| "epoch": 0.38, |
| "eval_loss": 1.3898714780807495, |
| "eval_runtime": 66.1314, |
| "eval_samples_per_second": 15.121, |
| "eval_steps_per_second": 15.121, |
| "step": 15000 |
| }, |
| { |
| "epoch": 0.38, |
| "grad_norm": 3.327223777770996, |
| "learning_rate": 1.016271186440678e-05, |
| "loss": 1.4933, |
| "step": 15010 |
| }, |
| { |
| "epoch": 0.38, |
| "grad_norm": 5.010586738586426, |
| "learning_rate": 1.015593220338983e-05, |
| "loss": 1.2914, |
| "step": 15020 |
| }, |
| { |
| "epoch": 0.38, |
| "grad_norm": 4.52363395690918, |
| "learning_rate": 1.0149152542372882e-05, |
| "loss": 1.3007, |
| "step": 15030 |
| }, |
| { |
| "epoch": 0.38, |
| "grad_norm": 4.301648139953613, |
| "learning_rate": 1.0142372881355933e-05, |
| "loss": 1.3652, |
| "step": 15040 |
| }, |
| { |
| "epoch": 0.38, |
| "grad_norm": 8.124075889587402, |
| "learning_rate": 1.0135593220338985e-05, |
| "loss": 1.3957, |
| "step": 15050 |
| }, |
| { |
| "epoch": 0.38, |
| "grad_norm": 4.588447093963623, |
| "learning_rate": 1.0128813559322034e-05, |
| "loss": 1.1597, |
| "step": 15060 |
| }, |
| { |
| "epoch": 0.38, |
| "grad_norm": 3.6226577758789062, |
| "learning_rate": 1.0122033898305086e-05, |
| "loss": 1.5604, |
| "step": 15070 |
| }, |
| { |
| "epoch": 0.38, |
| "grad_norm": 7.039438724517822, |
| "learning_rate": 1.0115254237288137e-05, |
| "loss": 1.2913, |
| "step": 15080 |
| }, |
| { |
| "epoch": 0.38, |
| "grad_norm": 1.9853217601776123, |
| "learning_rate": 1.0108474576271189e-05, |
| "loss": 1.2087, |
| "step": 15090 |
| }, |
| { |
| "epoch": 0.38, |
| "grad_norm": 6.037110805511475, |
| "learning_rate": 1.0101694915254238e-05, |
| "loss": 1.3358, |
| "step": 15100 |
| }, |
| { |
| "epoch": 0.38, |
| "grad_norm": 1.2350672483444214, |
| "learning_rate": 1.009491525423729e-05, |
| "loss": 1.4253, |
| "step": 15110 |
| }, |
| { |
| "epoch": 0.38, |
| "grad_norm": 4.601665019989014, |
| "learning_rate": 1.0088135593220341e-05, |
| "loss": 1.3471, |
| "step": 15120 |
| }, |
| { |
| "epoch": 0.38, |
| "grad_norm": 7.822099208831787, |
| "learning_rate": 1.008135593220339e-05, |
| "loss": 1.4255, |
| "step": 15130 |
| }, |
| { |
| "epoch": 0.38, |
| "grad_norm": 2.1710550785064697, |
| "learning_rate": 1.0074576271186442e-05, |
| "loss": 1.4419, |
| "step": 15140 |
| }, |
| { |
| "epoch": 0.38, |
| "grad_norm": 6.896235942840576, |
| "learning_rate": 1.0067796610169492e-05, |
| "loss": 1.3166, |
| "step": 15150 |
| }, |
| { |
| "epoch": 0.38, |
| "grad_norm": 2.7912566661834717, |
| "learning_rate": 1.0061016949152542e-05, |
| "loss": 1.3577, |
| "step": 15160 |
| }, |
| { |
| "epoch": 0.38, |
| "grad_norm": 2.8911972045898438, |
| "learning_rate": 1.0054237288135593e-05, |
| "loss": 1.3415, |
| "step": 15170 |
| }, |
| { |
| "epoch": 0.38, |
| "grad_norm": 1.8997740745544434, |
| "learning_rate": 1.0047457627118644e-05, |
| "loss": 1.3933, |
| "step": 15180 |
| }, |
| { |
| "epoch": 0.38, |
| "grad_norm": 5.039108753204346, |
| "learning_rate": 1.0040677966101696e-05, |
| "loss": 1.3395, |
| "step": 15190 |
| }, |
| { |
| "epoch": 0.38, |
| "grad_norm": 5.744147300720215, |
| "learning_rate": 1.0033898305084746e-05, |
| "loss": 1.4215, |
| "step": 15200 |
| }, |
| { |
| "epoch": 0.38, |
| "grad_norm": 18.610450744628906, |
| "learning_rate": 1.0027118644067797e-05, |
| "loss": 1.4639, |
| "step": 15210 |
| }, |
| { |
| "epoch": 0.38, |
| "grad_norm": 4.890042304992676, |
| "learning_rate": 1.0020338983050848e-05, |
| "loss": 1.34, |
| "step": 15220 |
| }, |
| { |
| "epoch": 0.38, |
| "grad_norm": 2.5182347297668457, |
| "learning_rate": 1.00135593220339e-05, |
| "loss": 1.5222, |
| "step": 15230 |
| }, |
| { |
| "epoch": 0.38, |
| "grad_norm": 2.647080183029175, |
| "learning_rate": 1.000677966101695e-05, |
| "loss": 1.3716, |
| "step": 15240 |
| }, |
| { |
| "epoch": 0.38, |
| "grad_norm": 7.10032320022583, |
| "learning_rate": 1e-05, |
| "loss": 1.4433, |
| "step": 15250 |
| }, |
| { |
| "epoch": 0.38, |
| "grad_norm": 10.124835968017578, |
| "learning_rate": 9.993220338983052e-06, |
| "loss": 1.402, |
| "step": 15260 |
| }, |
| { |
| "epoch": 0.38, |
| "grad_norm": 13.186834335327148, |
| "learning_rate": 9.986440677966102e-06, |
| "loss": 1.4211, |
| "step": 15270 |
| }, |
| { |
| "epoch": 0.38, |
| "grad_norm": 3.1523687839508057, |
| "learning_rate": 9.979661016949153e-06, |
| "loss": 1.3585, |
| "step": 15280 |
| }, |
| { |
| "epoch": 0.38, |
| "grad_norm": 5.048830032348633, |
| "learning_rate": 9.972881355932205e-06, |
| "loss": 1.4079, |
| "step": 15290 |
| }, |
| { |
| "epoch": 0.38, |
| "grad_norm": 9.165743827819824, |
| "learning_rate": 9.966101694915256e-06, |
| "loss": 1.4847, |
| "step": 15300 |
| }, |
| { |
| "epoch": 0.38, |
| "grad_norm": 10.89134693145752, |
| "learning_rate": 9.959322033898306e-06, |
| "loss": 1.4922, |
| "step": 15310 |
| }, |
| { |
| "epoch": 0.38, |
| "grad_norm": 2.4734840393066406, |
| "learning_rate": 9.952542372881356e-06, |
| "loss": 1.496, |
| "step": 15320 |
| }, |
| { |
| "epoch": 0.38, |
| "grad_norm": 2.6290500164031982, |
| "learning_rate": 9.945762711864407e-06, |
| "loss": 1.4658, |
| "step": 15330 |
| }, |
| { |
| "epoch": 0.38, |
| "grad_norm": 2.3270649909973145, |
| "learning_rate": 9.938983050847458e-06, |
| "loss": 1.3203, |
| "step": 15340 |
| }, |
| { |
| "epoch": 0.38, |
| "grad_norm": 3.256084442138672, |
| "learning_rate": 9.93220338983051e-06, |
| "loss": 1.3317, |
| "step": 15350 |
| }, |
| { |
| "epoch": 0.38, |
| "grad_norm": 7.33984899520874, |
| "learning_rate": 9.92542372881356e-06, |
| "loss": 1.2692, |
| "step": 15360 |
| }, |
| { |
| "epoch": 0.38, |
| "grad_norm": 5.8543877601623535, |
| "learning_rate": 9.918644067796611e-06, |
| "loss": 1.4074, |
| "step": 15370 |
| }, |
| { |
| "epoch": 0.38, |
| "grad_norm": 3.914440870285034, |
| "learning_rate": 9.911864406779662e-06, |
| "loss": 1.4998, |
| "step": 15380 |
| }, |
| { |
| "epoch": 0.38, |
| "grad_norm": 4.911335468292236, |
| "learning_rate": 9.905084745762714e-06, |
| "loss": 1.2676, |
| "step": 15390 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 2.9057512283325195, |
| "learning_rate": 9.898305084745763e-06, |
| "loss": 1.4411, |
| "step": 15400 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 6.507194519042969, |
| "learning_rate": 9.891525423728813e-06, |
| "loss": 1.3075, |
| "step": 15410 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 2.6351258754730225, |
| "learning_rate": 9.884745762711864e-06, |
| "loss": 1.368, |
| "step": 15420 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 10.542738914489746, |
| "learning_rate": 9.877966101694916e-06, |
| "loss": 1.3388, |
| "step": 15430 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 5.773244857788086, |
| "learning_rate": 9.871186440677967e-06, |
| "loss": 1.3478, |
| "step": 15440 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 3.678858757019043, |
| "learning_rate": 9.864406779661017e-06, |
| "loss": 1.3866, |
| "step": 15450 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 6.312896251678467, |
| "learning_rate": 9.857627118644068e-06, |
| "loss": 1.418, |
| "step": 15460 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 6.026124954223633, |
| "learning_rate": 9.85084745762712e-06, |
| "loss": 1.584, |
| "step": 15470 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 3.8132076263427734, |
| "learning_rate": 9.844067796610171e-06, |
| "loss": 1.4433, |
| "step": 15480 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 3.9010612964630127, |
| "learning_rate": 9.837288135593221e-06, |
| "loss": 1.3962, |
| "step": 15490 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 2.6610724925994873, |
| "learning_rate": 9.830508474576272e-06, |
| "loss": 1.3506, |
| "step": 15500 |
| }, |
| { |
| "epoch": 0.39, |
| "eval_loss": 1.4206969738006592, |
| "eval_runtime": 66.1225, |
| "eval_samples_per_second": 15.123, |
| "eval_steps_per_second": 15.123, |
| "step": 15500 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 10.500436782836914, |
| "learning_rate": 9.823728813559322e-06, |
| "loss": 1.4373, |
| "step": 15510 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 4.49819278717041, |
| "learning_rate": 9.816949152542373e-06, |
| "loss": 1.3353, |
| "step": 15520 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 4.517664909362793, |
| "learning_rate": 9.810169491525425e-06, |
| "loss": 1.4062, |
| "step": 15530 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 11.253808975219727, |
| "learning_rate": 9.803389830508474e-06, |
| "loss": 1.4507, |
| "step": 15540 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 2.5719611644744873, |
| "learning_rate": 9.796610169491526e-06, |
| "loss": 1.2784, |
| "step": 15550 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 2.9650001525878906, |
| "learning_rate": 9.789830508474577e-06, |
| "loss": 1.3021, |
| "step": 15560 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 4.1970319747924805, |
| "learning_rate": 9.783050847457629e-06, |
| "loss": 1.4252, |
| "step": 15570 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 1.926850438117981, |
| "learning_rate": 9.776271186440678e-06, |
| "loss": 1.2455, |
| "step": 15580 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 8.753373146057129, |
| "learning_rate": 9.76949152542373e-06, |
| "loss": 1.1841, |
| "step": 15590 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 11.411351203918457, |
| "learning_rate": 9.762711864406781e-06, |
| "loss": 1.391, |
| "step": 15600 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 9.503700256347656, |
| "learning_rate": 9.755932203389833e-06, |
| "loss": 1.4071, |
| "step": 15610 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 3.8297741413116455, |
| "learning_rate": 9.749152542372882e-06, |
| "loss": 1.4042, |
| "step": 15620 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 7.153141498565674, |
| "learning_rate": 9.742372881355932e-06, |
| "loss": 1.3474, |
| "step": 15630 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 2.806121826171875, |
| "learning_rate": 9.735593220338983e-06, |
| "loss": 1.4126, |
| "step": 15640 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 7.874058246612549, |
| "learning_rate": 9.728813559322035e-06, |
| "loss": 1.5157, |
| "step": 15650 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 3.6346802711486816, |
| "learning_rate": 9.722033898305086e-06, |
| "loss": 1.3617, |
| "step": 15660 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 19.185550689697266, |
| "learning_rate": 9.715254237288136e-06, |
| "loss": 1.4801, |
| "step": 15670 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 15.9682035446167, |
| "learning_rate": 9.708474576271187e-06, |
| "loss": 1.1923, |
| "step": 15680 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 2.7673888206481934, |
| "learning_rate": 9.701694915254239e-06, |
| "loss": 1.5568, |
| "step": 15690 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 7.633556842803955, |
| "learning_rate": 9.69491525423729e-06, |
| "loss": 1.4391, |
| "step": 15700 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 11.961231231689453, |
| "learning_rate": 9.68813559322034e-06, |
| "loss": 1.4212, |
| "step": 15710 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 2.426793098449707, |
| "learning_rate": 9.68135593220339e-06, |
| "loss": 1.1991, |
| "step": 15720 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 7.8566107749938965, |
| "learning_rate": 9.674576271186441e-06, |
| "loss": 1.2991, |
| "step": 15730 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 10.158808708190918, |
| "learning_rate": 9.667796610169492e-06, |
| "loss": 1.4826, |
| "step": 15740 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 6.697884559631348, |
| "learning_rate": 9.661016949152544e-06, |
| "loss": 1.4448, |
| "step": 15750 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 3.4867446422576904, |
| "learning_rate": 9.654237288135593e-06, |
| "loss": 1.4839, |
| "step": 15760 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 1.6158361434936523, |
| "learning_rate": 9.647457627118645e-06, |
| "loss": 1.2674, |
| "step": 15770 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 3.913695812225342, |
| "learning_rate": 9.640677966101696e-06, |
| "loss": 1.3351, |
| "step": 15780 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 1.9007806777954102, |
| "learning_rate": 9.633898305084746e-06, |
| "loss": 1.3478, |
| "step": 15790 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 2.1645798683166504, |
| "learning_rate": 9.627118644067797e-06, |
| "loss": 1.3018, |
| "step": 15800 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 3.865454912185669, |
| "learning_rate": 9.620338983050849e-06, |
| "loss": 1.5398, |
| "step": 15810 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 4.120809555053711, |
| "learning_rate": 9.6135593220339e-06, |
| "loss": 1.4546, |
| "step": 15820 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 5.410675525665283, |
| "learning_rate": 9.60677966101695e-06, |
| "loss": 1.4648, |
| "step": 15830 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 8.03393268585205, |
| "learning_rate": 9.600000000000001e-06, |
| "loss": 1.2891, |
| "step": 15840 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 7.1069655418396, |
| "learning_rate": 9.593220338983051e-06, |
| "loss": 1.314, |
| "step": 15850 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 6.312065601348877, |
| "learning_rate": 9.586440677966102e-06, |
| "loss": 1.2608, |
| "step": 15860 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 3.499418258666992, |
| "learning_rate": 9.579661016949154e-06, |
| "loss": 1.3816, |
| "step": 15870 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 5.211281776428223, |
| "learning_rate": 9.572881355932203e-06, |
| "loss": 1.1021, |
| "step": 15880 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 4.372194290161133, |
| "learning_rate": 9.566101694915255e-06, |
| "loss": 1.4179, |
| "step": 15890 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 5.44555139541626, |
| "learning_rate": 9.559322033898306e-06, |
| "loss": 1.4848, |
| "step": 15900 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 3.483454942703247, |
| "learning_rate": 9.552542372881358e-06, |
| "loss": 1.2985, |
| "step": 15910 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 3.8721625804901123, |
| "learning_rate": 9.545762711864407e-06, |
| "loss": 1.4731, |
| "step": 15920 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 4.317465305328369, |
| "learning_rate": 9.538983050847457e-06, |
| "loss": 1.501, |
| "step": 15930 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 7.075793743133545, |
| "learning_rate": 9.532203389830508e-06, |
| "loss": 1.5138, |
| "step": 15940 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 5.606925964355469, |
| "learning_rate": 9.52542372881356e-06, |
| "loss": 1.3771, |
| "step": 15950 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 2.937833547592163, |
| "learning_rate": 9.518644067796611e-06, |
| "loss": 1.4764, |
| "step": 15960 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 4.39876651763916, |
| "learning_rate": 9.511864406779661e-06, |
| "loss": 1.4022, |
| "step": 15970 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 6.455230712890625, |
| "learning_rate": 9.505084745762712e-06, |
| "loss": 1.3569, |
| "step": 15980 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 4.89456033706665, |
| "learning_rate": 9.498305084745764e-06, |
| "loss": 1.3426, |
| "step": 15990 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 5.696287631988525, |
| "learning_rate": 9.491525423728815e-06, |
| "loss": 1.5381, |
| "step": 16000 |
| }, |
| { |
| "epoch": 0.4, |
| "eval_loss": 1.3579893112182617, |
| "eval_runtime": 66.2433, |
| "eval_samples_per_second": 15.096, |
| "eval_steps_per_second": 15.096, |
| "step": 16000 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 6.06011962890625, |
| "learning_rate": 9.484745762711865e-06, |
| "loss": 1.4422, |
| "step": 16010 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 0.9439699053764343, |
| "learning_rate": 9.477966101694916e-06, |
| "loss": 1.3661, |
| "step": 16020 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 2.9158008098602295, |
| "learning_rate": 9.471186440677966e-06, |
| "loss": 1.1349, |
| "step": 16030 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 3.3317673206329346, |
| "learning_rate": 9.464406779661017e-06, |
| "loss": 1.4161, |
| "step": 16040 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 2.866821050643921, |
| "learning_rate": 9.457627118644069e-06, |
| "loss": 1.2952, |
| "step": 16050 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 8.00590991973877, |
| "learning_rate": 9.450847457627119e-06, |
| "loss": 1.3493, |
| "step": 16060 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 2.1362667083740234, |
| "learning_rate": 9.44406779661017e-06, |
| "loss": 1.3385, |
| "step": 16070 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 4.567724227905273, |
| "learning_rate": 9.437288135593221e-06, |
| "loss": 1.2988, |
| "step": 16080 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 3.382605791091919, |
| "learning_rate": 9.430508474576273e-06, |
| "loss": 1.2131, |
| "step": 16090 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 13.581380844116211, |
| "learning_rate": 9.423728813559322e-06, |
| "loss": 1.3962, |
| "step": 16100 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 6.365314483642578, |
| "learning_rate": 9.416949152542374e-06, |
| "loss": 1.4928, |
| "step": 16110 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 5.413196563720703, |
| "learning_rate": 9.410169491525425e-06, |
| "loss": 1.2728, |
| "step": 16120 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 3.6787874698638916, |
| "learning_rate": 9.403389830508477e-06, |
| "loss": 1.1831, |
| "step": 16130 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 2.8850882053375244, |
| "learning_rate": 9.396610169491526e-06, |
| "loss": 1.3025, |
| "step": 16140 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 3.6025898456573486, |
| "learning_rate": 9.389830508474576e-06, |
| "loss": 1.3553, |
| "step": 16150 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 3.00011944770813, |
| "learning_rate": 9.383050847457627e-06, |
| "loss": 1.5127, |
| "step": 16160 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 4.33295202255249, |
| "learning_rate": 9.376271186440679e-06, |
| "loss": 1.5001, |
| "step": 16170 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 3.6982882022857666, |
| "learning_rate": 9.36949152542373e-06, |
| "loss": 1.4729, |
| "step": 16180 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 7.049408435821533, |
| "learning_rate": 9.36271186440678e-06, |
| "loss": 1.276, |
| "step": 16190 |
| }, |
| { |
| "epoch": 0.41, |
| "grad_norm": 1.6569970846176147, |
| "learning_rate": 9.355932203389831e-06, |
| "loss": 1.3321, |
| "step": 16200 |
| }, |
| { |
| "epoch": 0.41, |
| "grad_norm": 2.4531731605529785, |
| "learning_rate": 9.349152542372883e-06, |
| "loss": 1.435, |
| "step": 16210 |
| }, |
| { |
| "epoch": 0.41, |
| "grad_norm": 2.776325225830078, |
| "learning_rate": 9.342372881355934e-06, |
| "loss": 1.2586, |
| "step": 16220 |
| }, |
| { |
| "epoch": 0.41, |
| "grad_norm": 2.805823564529419, |
| "learning_rate": 9.335593220338984e-06, |
| "loss": 1.3393, |
| "step": 16230 |
| }, |
| { |
| "epoch": 0.41, |
| "grad_norm": 2.3886725902557373, |
| "learning_rate": 9.328813559322034e-06, |
| "loss": 1.4241, |
| "step": 16240 |
| }, |
| { |
| "epoch": 0.41, |
| "grad_norm": 10.983514785766602, |
| "learning_rate": 9.322033898305085e-06, |
| "loss": 1.5032, |
| "step": 16250 |
| }, |
| { |
| "epoch": 0.41, |
| "grad_norm": 2.922609329223633, |
| "learning_rate": 9.315254237288136e-06, |
| "loss": 1.2289, |
| "step": 16260 |
| }, |
| { |
| "epoch": 0.41, |
| "grad_norm": 3.623088836669922, |
| "learning_rate": 9.308474576271188e-06, |
| "loss": 1.2854, |
| "step": 16270 |
| }, |
| { |
| "epoch": 0.41, |
| "grad_norm": 3.6580920219421387, |
| "learning_rate": 9.301694915254237e-06, |
| "loss": 1.3371, |
| "step": 16280 |
| }, |
| { |
| "epoch": 0.41, |
| "grad_norm": 3.010573625564575, |
| "learning_rate": 9.294915254237289e-06, |
| "loss": 1.465, |
| "step": 16290 |
| }, |
| { |
| "epoch": 0.41, |
| "grad_norm": 3.1502161026000977, |
| "learning_rate": 9.28813559322034e-06, |
| "loss": 1.2245, |
| "step": 16300 |
| }, |
| { |
| "epoch": 0.41, |
| "grad_norm": 2.7607476711273193, |
| "learning_rate": 9.28135593220339e-06, |
| "loss": 1.4668, |
| "step": 16310 |
| }, |
| { |
| "epoch": 0.41, |
| "grad_norm": 6.022114276885986, |
| "learning_rate": 9.274576271186441e-06, |
| "loss": 1.1924, |
| "step": 16320 |
| }, |
| { |
| "epoch": 0.41, |
| "grad_norm": 5.824821949005127, |
| "learning_rate": 9.267796610169493e-06, |
| "loss": 1.3773, |
| "step": 16330 |
| }, |
| { |
| "epoch": 0.41, |
| "grad_norm": 3.7949423789978027, |
| "learning_rate": 9.261016949152544e-06, |
| "loss": 1.3334, |
| "step": 16340 |
| }, |
| { |
| "epoch": 0.41, |
| "grad_norm": 1.7965031862258911, |
| "learning_rate": 9.254237288135594e-06, |
| "loss": 1.5022, |
| "step": 16350 |
| }, |
| { |
| "epoch": 0.41, |
| "grad_norm": 4.245846271514893, |
| "learning_rate": 9.247457627118645e-06, |
| "loss": 1.2312, |
| "step": 16360 |
| }, |
| { |
| "epoch": 0.41, |
| "grad_norm": 7.283750057220459, |
| "learning_rate": 9.240677966101695e-06, |
| "loss": 1.2448, |
| "step": 16370 |
| }, |
| { |
| "epoch": 0.41, |
| "grad_norm": 3.51558780670166, |
| "learning_rate": 9.233898305084746e-06, |
| "loss": 1.4823, |
| "step": 16380 |
| }, |
| { |
| "epoch": 0.41, |
| "grad_norm": 7.842286586761475, |
| "learning_rate": 9.227118644067798e-06, |
| "loss": 1.3931, |
| "step": 16390 |
| }, |
| { |
| "epoch": 0.41, |
| "grad_norm": 2.6128389835357666, |
| "learning_rate": 9.220338983050847e-06, |
| "loss": 1.4359, |
| "step": 16400 |
| }, |
| { |
| "epoch": 0.41, |
| "grad_norm": 2.5427112579345703, |
| "learning_rate": 9.213559322033899e-06, |
| "loss": 1.4364, |
| "step": 16410 |
| }, |
| { |
| "epoch": 0.41, |
| "grad_norm": 3.0635411739349365, |
| "learning_rate": 9.20677966101695e-06, |
| "loss": 1.4804, |
| "step": 16420 |
| }, |
| { |
| "epoch": 0.41, |
| "grad_norm": 4.00955057144165, |
| "learning_rate": 9.200000000000002e-06, |
| "loss": 1.1819, |
| "step": 16430 |
| }, |
| { |
| "epoch": 0.41, |
| "grad_norm": 2.0021607875823975, |
| "learning_rate": 9.193220338983051e-06, |
| "loss": 1.312, |
| "step": 16440 |
| }, |
| { |
| "epoch": 0.41, |
| "grad_norm": 6.373915672302246, |
| "learning_rate": 9.186440677966101e-06, |
| "loss": 1.5638, |
| "step": 16450 |
| }, |
| { |
| "epoch": 0.41, |
| "grad_norm": 7.486032962799072, |
| "learning_rate": 9.179661016949153e-06, |
| "loss": 1.5156, |
| "step": 16460 |
| }, |
| { |
| "epoch": 0.41, |
| "grad_norm": 7.894711971282959, |
| "learning_rate": 9.172881355932204e-06, |
| "loss": 1.3042, |
| "step": 16470 |
| }, |
| { |
| "epoch": 0.41, |
| "grad_norm": 4.769121170043945, |
| "learning_rate": 9.166101694915255e-06, |
| "loss": 1.5031, |
| "step": 16480 |
| }, |
| { |
| "epoch": 0.41, |
| "grad_norm": 5.979970455169678, |
| "learning_rate": 9.159322033898305e-06, |
| "loss": 1.2542, |
| "step": 16490 |
| }, |
| { |
| "epoch": 0.41, |
| "grad_norm": 6.4072651863098145, |
| "learning_rate": 9.152542372881356e-06, |
| "loss": 1.319, |
| "step": 16500 |
| }, |
| { |
| "epoch": 0.41, |
| "eval_loss": 1.3969231843948364, |
| "eval_runtime": 66.1684, |
| "eval_samples_per_second": 15.113, |
| "eval_steps_per_second": 15.113, |
| "step": 16500 |
| }, |
| { |
| "epoch": 0.41, |
| "grad_norm": 6.439510822296143, |
| "learning_rate": 9.145762711864408e-06, |
| "loss": 1.6106, |
| "step": 16510 |
| }, |
| { |
| "epoch": 0.41, |
| "grad_norm": 3.3735623359680176, |
| "learning_rate": 9.13898305084746e-06, |
| "loss": 1.3179, |
| "step": 16520 |
| }, |
| { |
| "epoch": 0.41, |
| "grad_norm": 4.679867267608643, |
| "learning_rate": 9.132203389830509e-06, |
| "loss": 1.465, |
| "step": 16530 |
| }, |
| { |
| "epoch": 0.41, |
| "grad_norm": 3.878396987915039, |
| "learning_rate": 9.12542372881356e-06, |
| "loss": 1.2999, |
| "step": 16540 |
| }, |
| { |
| "epoch": 0.41, |
| "grad_norm": 5.397853374481201, |
| "learning_rate": 9.11864406779661e-06, |
| "loss": 1.3575, |
| "step": 16550 |
| }, |
| { |
| "epoch": 0.41, |
| "grad_norm": 3.2240700721740723, |
| "learning_rate": 9.111864406779661e-06, |
| "loss": 1.5086, |
| "step": 16560 |
| }, |
| { |
| "epoch": 0.41, |
| "grad_norm": 4.239155292510986, |
| "learning_rate": 9.105084745762713e-06, |
| "loss": 1.3695, |
| "step": 16570 |
| }, |
| { |
| "epoch": 0.41, |
| "grad_norm": 9.791718482971191, |
| "learning_rate": 9.098305084745763e-06, |
| "loss": 1.4928, |
| "step": 16580 |
| }, |
| { |
| "epoch": 0.41, |
| "grad_norm": 4.839406967163086, |
| "learning_rate": 9.091525423728814e-06, |
| "loss": 1.4183, |
| "step": 16590 |
| }, |
| { |
| "epoch": 0.41, |
| "grad_norm": 3.728440046310425, |
| "learning_rate": 9.084745762711865e-06, |
| "loss": 1.2819, |
| "step": 16600 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 2.5298233032226562, |
| "learning_rate": 9.077966101694917e-06, |
| "loss": 1.3922, |
| "step": 16610 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 4.599859714508057, |
| "learning_rate": 9.071186440677966e-06, |
| "loss": 1.4627, |
| "step": 16620 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 2.5592544078826904, |
| "learning_rate": 9.064406779661018e-06, |
| "loss": 1.544, |
| "step": 16630 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 8.281981468200684, |
| "learning_rate": 9.05762711864407e-06, |
| "loss": 1.4084, |
| "step": 16640 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 5.964704990386963, |
| "learning_rate": 9.05084745762712e-06, |
| "loss": 1.4186, |
| "step": 16650 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 4.577818393707275, |
| "learning_rate": 9.04406779661017e-06, |
| "loss": 1.2691, |
| "step": 16660 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 3.4889256954193115, |
| "learning_rate": 9.03728813559322e-06, |
| "loss": 1.3494, |
| "step": 16670 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 4.458433628082275, |
| "learning_rate": 9.030508474576271e-06, |
| "loss": 1.4639, |
| "step": 16680 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 5.813331127166748, |
| "learning_rate": 9.023728813559323e-06, |
| "loss": 1.2206, |
| "step": 16690 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 2.583115339279175, |
| "learning_rate": 9.016949152542374e-06, |
| "loss": 1.4299, |
| "step": 16700 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 7.3467206954956055, |
| "learning_rate": 9.010169491525424e-06, |
| "loss": 1.3001, |
| "step": 16710 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 3.896238088607788, |
| "learning_rate": 9.003389830508475e-06, |
| "loss": 1.3541, |
| "step": 16720 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 3.967496156692505, |
| "learning_rate": 8.996610169491527e-06, |
| "loss": 1.3315, |
| "step": 16730 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 2.9316656589508057, |
| "learning_rate": 8.989830508474578e-06, |
| "loss": 1.3935, |
| "step": 16740 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 1.619555950164795, |
| "learning_rate": 8.983050847457628e-06, |
| "loss": 1.2879, |
| "step": 16750 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 3.6497581005096436, |
| "learning_rate": 8.976271186440678e-06, |
| "loss": 1.5195, |
| "step": 16760 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 5.33650016784668, |
| "learning_rate": 8.969491525423729e-06, |
| "loss": 1.4568, |
| "step": 16770 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 7.370872974395752, |
| "learning_rate": 8.96271186440678e-06, |
| "loss": 1.5122, |
| "step": 16780 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 6.295271873474121, |
| "learning_rate": 8.955932203389832e-06, |
| "loss": 1.177, |
| "step": 16790 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 7.165903091430664, |
| "learning_rate": 8.949152542372881e-06, |
| "loss": 1.3597, |
| "step": 16800 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 4.152050018310547, |
| "learning_rate": 8.942372881355933e-06, |
| "loss": 1.3837, |
| "step": 16810 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 2.2240993976593018, |
| "learning_rate": 8.935593220338984e-06, |
| "loss": 1.3974, |
| "step": 16820 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 2.854851245880127, |
| "learning_rate": 8.928813559322036e-06, |
| "loss": 1.4478, |
| "step": 16830 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 4.584380149841309, |
| "learning_rate": 8.922033898305085e-06, |
| "loss": 1.4808, |
| "step": 16840 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 5.496336460113525, |
| "learning_rate": 8.915254237288137e-06, |
| "loss": 1.4587, |
| "step": 16850 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 9.56225872039795, |
| "learning_rate": 8.908474576271188e-06, |
| "loss": 1.2726, |
| "step": 16860 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 3.6302268505096436, |
| "learning_rate": 8.901694915254238e-06, |
| "loss": 1.4753, |
| "step": 16870 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 9.815473556518555, |
| "learning_rate": 8.89491525423729e-06, |
| "loss": 1.3163, |
| "step": 16880 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 6.138949871063232, |
| "learning_rate": 8.888135593220339e-06, |
| "loss": 1.3637, |
| "step": 16890 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 5.0360212326049805, |
| "learning_rate": 8.88135593220339e-06, |
| "loss": 1.5961, |
| "step": 16900 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 11.983102798461914, |
| "learning_rate": 8.874576271186442e-06, |
| "loss": 1.3359, |
| "step": 16910 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 3.8704030513763428, |
| "learning_rate": 8.867796610169492e-06, |
| "loss": 1.2624, |
| "step": 16920 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 8.61357593536377, |
| "learning_rate": 8.861016949152543e-06, |
| "loss": 1.4325, |
| "step": 16930 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 1.4079731702804565, |
| "learning_rate": 8.854237288135594e-06, |
| "loss": 1.3984, |
| "step": 16940 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 4.425031661987305, |
| "learning_rate": 8.847457627118646e-06, |
| "loss": 1.3202, |
| "step": 16950 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 7.565074920654297, |
| "learning_rate": 8.840677966101695e-06, |
| "loss": 1.4561, |
| "step": 16960 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 4.513836860656738, |
| "learning_rate": 8.833898305084747e-06, |
| "loss": 1.52, |
| "step": 16970 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 7.830351829528809, |
| "learning_rate": 8.827118644067797e-06, |
| "loss": 1.4321, |
| "step": 16980 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 4.269097328186035, |
| "learning_rate": 8.820338983050848e-06, |
| "loss": 1.4247, |
| "step": 16990 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 3.2125535011291504, |
| "learning_rate": 8.8135593220339e-06, |
| "loss": 1.1334, |
| "step": 17000 |
| }, |
| { |
| "epoch": 0.42, |
| "eval_loss": 1.3314954042434692, |
| "eval_runtime": 66.1036, |
| "eval_samples_per_second": 15.128, |
| "eval_steps_per_second": 15.128, |
| "step": 17000 |
| }, |
| { |
| "epoch": 0.43, |
| "grad_norm": 3.5944836139678955, |
| "learning_rate": 8.806779661016949e-06, |
| "loss": 1.2363, |
| "step": 17010 |
| }, |
| { |
| "epoch": 0.43, |
| "grad_norm": 3.680687189102173, |
| "learning_rate": 8.8e-06, |
| "loss": 1.2968, |
| "step": 17020 |
| }, |
| { |
| "epoch": 0.43, |
| "grad_norm": 9.756001472473145, |
| "learning_rate": 8.793220338983052e-06, |
| "loss": 1.378, |
| "step": 17030 |
| }, |
| { |
| "epoch": 0.43, |
| "grad_norm": 3.8217949867248535, |
| "learning_rate": 8.786440677966103e-06, |
| "loss": 1.1933, |
| "step": 17040 |
| }, |
| { |
| "epoch": 0.43, |
| "grad_norm": 4.635326862335205, |
| "learning_rate": 8.779661016949153e-06, |
| "loss": 1.4635, |
| "step": 17050 |
| }, |
| { |
| "epoch": 0.43, |
| "grad_norm": 12.349688529968262, |
| "learning_rate": 8.772881355932204e-06, |
| "loss": 1.1233, |
| "step": 17060 |
| }, |
| { |
| "epoch": 0.43, |
| "grad_norm": 2.0350751876831055, |
| "learning_rate": 8.766101694915254e-06, |
| "loss": 1.5517, |
| "step": 17070 |
| }, |
| { |
| "epoch": 0.43, |
| "grad_norm": 2.533076524734497, |
| "learning_rate": 8.759322033898305e-06, |
| "loss": 1.2468, |
| "step": 17080 |
| }, |
| { |
| "epoch": 0.43, |
| "grad_norm": 5.4851813316345215, |
| "learning_rate": 8.752542372881357e-06, |
| "loss": 1.3057, |
| "step": 17090 |
| }, |
| { |
| "epoch": 0.43, |
| "grad_norm": 5.9718499183654785, |
| "learning_rate": 8.745762711864407e-06, |
| "loss": 1.3685, |
| "step": 17100 |
| }, |
| { |
| "epoch": 0.43, |
| "grad_norm": 6.759610176086426, |
| "learning_rate": 8.738983050847458e-06, |
| "loss": 1.2429, |
| "step": 17110 |
| }, |
| { |
| "epoch": 0.43, |
| "grad_norm": 10.515771865844727, |
| "learning_rate": 8.73220338983051e-06, |
| "loss": 1.1885, |
| "step": 17120 |
| }, |
| { |
| "epoch": 0.43, |
| "grad_norm": 5.439294815063477, |
| "learning_rate": 8.72542372881356e-06, |
| "loss": 1.4804, |
| "step": 17130 |
| }, |
| { |
| "epoch": 0.43, |
| "grad_norm": 3.2483952045440674, |
| "learning_rate": 8.71864406779661e-06, |
| "loss": 1.2879, |
| "step": 17140 |
| }, |
| { |
| "epoch": 0.43, |
| "grad_norm": 12.16537857055664, |
| "learning_rate": 8.711864406779662e-06, |
| "loss": 1.365, |
| "step": 17150 |
| }, |
| { |
| "epoch": 0.43, |
| "grad_norm": 7.272519588470459, |
| "learning_rate": 8.705084745762713e-06, |
| "loss": 1.2356, |
| "step": 17160 |
| }, |
| { |
| "epoch": 0.43, |
| "grad_norm": 5.10579252243042, |
| "learning_rate": 8.698305084745765e-06, |
| "loss": 1.2475, |
| "step": 17170 |
| }, |
| { |
| "epoch": 0.43, |
| "grad_norm": 2.060873031616211, |
| "learning_rate": 8.691525423728814e-06, |
| "loss": 1.2728, |
| "step": 17180 |
| }, |
| { |
| "epoch": 0.43, |
| "grad_norm": 4.275463581085205, |
| "learning_rate": 8.684745762711864e-06, |
| "loss": 1.3372, |
| "step": 17190 |
| }, |
| { |
| "epoch": 0.43, |
| "grad_norm": 7.369024753570557, |
| "learning_rate": 8.677966101694915e-06, |
| "loss": 1.3319, |
| "step": 17200 |
| }, |
| { |
| "epoch": 0.43, |
| "grad_norm": 4.748632431030273, |
| "learning_rate": 8.671186440677967e-06, |
| "loss": 1.3954, |
| "step": 17210 |
| }, |
| { |
| "epoch": 0.43, |
| "grad_norm": 6.593578815460205, |
| "learning_rate": 8.664406779661018e-06, |
| "loss": 1.218, |
| "step": 17220 |
| }, |
| { |
| "epoch": 0.43, |
| "grad_norm": 9.88740348815918, |
| "learning_rate": 8.657627118644068e-06, |
| "loss": 1.3813, |
| "step": 17230 |
| }, |
| { |
| "epoch": 0.43, |
| "grad_norm": 5.244362831115723, |
| "learning_rate": 8.65084745762712e-06, |
| "loss": 1.3307, |
| "step": 17240 |
| }, |
| { |
| "epoch": 0.43, |
| "grad_norm": 6.388607025146484, |
| "learning_rate": 8.64406779661017e-06, |
| "loss": 1.3975, |
| "step": 17250 |
| }, |
| { |
| "epoch": 0.43, |
| "grad_norm": 5.940032005310059, |
| "learning_rate": 8.637288135593222e-06, |
| "loss": 1.452, |
| "step": 17260 |
| }, |
| { |
| "epoch": 0.43, |
| "grad_norm": 1.8261200189590454, |
| "learning_rate": 8.630508474576272e-06, |
| "loss": 1.3114, |
| "step": 17270 |
| }, |
| { |
| "epoch": 0.43, |
| "grad_norm": 4.878362655639648, |
| "learning_rate": 8.623728813559322e-06, |
| "loss": 1.3152, |
| "step": 17280 |
| }, |
| { |
| "epoch": 0.43, |
| "grad_norm": 5.558933258056641, |
| "learning_rate": 8.616949152542373e-06, |
| "loss": 1.5567, |
| "step": 17290 |
| }, |
| { |
| "epoch": 0.43, |
| "grad_norm": 2.346237897872925, |
| "learning_rate": 8.610169491525424e-06, |
| "loss": 1.4462, |
| "step": 17300 |
| }, |
| { |
| "epoch": 0.43, |
| "grad_norm": 5.712830066680908, |
| "learning_rate": 8.603389830508476e-06, |
| "loss": 1.2658, |
| "step": 17310 |
| }, |
| { |
| "epoch": 0.43, |
| "grad_norm": 6.277383804321289, |
| "learning_rate": 8.596610169491526e-06, |
| "loss": 1.3562, |
| "step": 17320 |
| }, |
| { |
| "epoch": 0.43, |
| "grad_norm": 2.8792171478271484, |
| "learning_rate": 8.589830508474577e-06, |
| "loss": 1.2409, |
| "step": 17330 |
| }, |
| { |
| "epoch": 0.43, |
| "grad_norm": 4.000193119049072, |
| "learning_rate": 8.583050847457628e-06, |
| "loss": 1.3503, |
| "step": 17340 |
| }, |
| { |
| "epoch": 0.43, |
| "grad_norm": 6.255504131317139, |
| "learning_rate": 8.57627118644068e-06, |
| "loss": 1.3411, |
| "step": 17350 |
| }, |
| { |
| "epoch": 0.43, |
| "grad_norm": 5.366970539093018, |
| "learning_rate": 8.56949152542373e-06, |
| "loss": 1.2918, |
| "step": 17360 |
| }, |
| { |
| "epoch": 0.43, |
| "grad_norm": 1.9505482912063599, |
| "learning_rate": 8.56271186440678e-06, |
| "loss": 1.3139, |
| "step": 17370 |
| }, |
| { |
| "epoch": 0.43, |
| "grad_norm": 8.022522926330566, |
| "learning_rate": 8.55593220338983e-06, |
| "loss": 1.515, |
| "step": 17380 |
| }, |
| { |
| "epoch": 0.43, |
| "grad_norm": 5.483621120452881, |
| "learning_rate": 8.549152542372882e-06, |
| "loss": 1.4464, |
| "step": 17390 |
| }, |
| { |
| "epoch": 0.43, |
| "grad_norm": 5.349776268005371, |
| "learning_rate": 8.542372881355933e-06, |
| "loss": 1.5864, |
| "step": 17400 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 3.4136993885040283, |
| "learning_rate": 8.535593220338983e-06, |
| "loss": 1.2964, |
| "step": 17410 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 4.270326614379883, |
| "learning_rate": 8.528813559322034e-06, |
| "loss": 1.3736, |
| "step": 17420 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 2.886993169784546, |
| "learning_rate": 8.522033898305086e-06, |
| "loss": 1.3513, |
| "step": 17430 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 4.42819881439209, |
| "learning_rate": 8.515254237288136e-06, |
| "loss": 1.4226, |
| "step": 17440 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 5.844710350036621, |
| "learning_rate": 8.508474576271187e-06, |
| "loss": 1.3135, |
| "step": 17450 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 2.4220869541168213, |
| "learning_rate": 8.501694915254238e-06, |
| "loss": 1.165, |
| "step": 17460 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 4.976444244384766, |
| "learning_rate": 8.49491525423729e-06, |
| "loss": 1.1964, |
| "step": 17470 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 12.55490779876709, |
| "learning_rate": 8.48813559322034e-06, |
| "loss": 1.5891, |
| "step": 17480 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 4.102578163146973, |
| "learning_rate": 8.481355932203391e-06, |
| "loss": 1.445, |
| "step": 17490 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 2.6767678260803223, |
| "learning_rate": 8.47457627118644e-06, |
| "loss": 1.536, |
| "step": 17500 |
| }, |
| { |
| "epoch": 0.44, |
| "eval_loss": 1.388473391532898, |
| "eval_runtime": 66.17, |
| "eval_samples_per_second": 15.113, |
| "eval_steps_per_second": 15.113, |
| "step": 17500 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 5.189449310302734, |
| "learning_rate": 8.467796610169492e-06, |
| "loss": 1.352, |
| "step": 17510 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 2.765326499938965, |
| "learning_rate": 8.461016949152543e-06, |
| "loss": 1.5275, |
| "step": 17520 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 3.60880446434021, |
| "learning_rate": 8.454237288135593e-06, |
| "loss": 1.2793, |
| "step": 17530 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 3.072174310684204, |
| "learning_rate": 8.447457627118644e-06, |
| "loss": 1.2746, |
| "step": 17540 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 3.3222594261169434, |
| "learning_rate": 8.440677966101696e-06, |
| "loss": 1.3359, |
| "step": 17550 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 1.3298100233078003, |
| "learning_rate": 8.433898305084747e-06, |
| "loss": 1.3826, |
| "step": 17560 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 4.06503438949585, |
| "learning_rate": 8.427118644067797e-06, |
| "loss": 1.3339, |
| "step": 17570 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 6.150786399841309, |
| "learning_rate": 8.420338983050848e-06, |
| "loss": 1.3414, |
| "step": 17580 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 9.045748710632324, |
| "learning_rate": 8.413559322033898e-06, |
| "loss": 1.3183, |
| "step": 17590 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 4.415981769561768, |
| "learning_rate": 8.40677966101695e-06, |
| "loss": 1.3323, |
| "step": 17600 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 8.40713882446289, |
| "learning_rate": 8.400000000000001e-06, |
| "loss": 1.2519, |
| "step": 17610 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 6.841925144195557, |
| "learning_rate": 8.39322033898305e-06, |
| "loss": 1.4799, |
| "step": 17620 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 10.993582725524902, |
| "learning_rate": 8.386440677966102e-06, |
| "loss": 1.3495, |
| "step": 17630 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 2.115006685256958, |
| "learning_rate": 8.379661016949153e-06, |
| "loss": 1.3902, |
| "step": 17640 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 3.485889434814453, |
| "learning_rate": 8.372881355932205e-06, |
| "loss": 1.4462, |
| "step": 17650 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 5.725930690765381, |
| "learning_rate": 8.366101694915255e-06, |
| "loss": 1.4603, |
| "step": 17660 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 6.5530548095703125, |
| "learning_rate": 8.359322033898306e-06, |
| "loss": 1.4954, |
| "step": 17670 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 6.006740570068359, |
| "learning_rate": 8.352542372881357e-06, |
| "loss": 1.3556, |
| "step": 17680 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 5.5967936515808105, |
| "learning_rate": 8.345762711864409e-06, |
| "loss": 1.0719, |
| "step": 17690 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 5.70643949508667, |
| "learning_rate": 8.338983050847458e-06, |
| "loss": 1.2968, |
| "step": 17700 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 3.280465841293335, |
| "learning_rate": 8.332203389830508e-06, |
| "loss": 1.2669, |
| "step": 17710 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 3.098782777786255, |
| "learning_rate": 8.32542372881356e-06, |
| "loss": 1.4704, |
| "step": 17720 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 2.825495958328247, |
| "learning_rate": 8.318644067796611e-06, |
| "loss": 1.2165, |
| "step": 17730 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 3.8040249347686768, |
| "learning_rate": 8.311864406779662e-06, |
| "loss": 1.3623, |
| "step": 17740 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 3.4438130855560303, |
| "learning_rate": 8.305084745762712e-06, |
| "loss": 1.4255, |
| "step": 17750 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 2.187945604324341, |
| "learning_rate": 8.298305084745763e-06, |
| "loss": 1.2181, |
| "step": 17760 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 2.0644514560699463, |
| "learning_rate": 8.291525423728815e-06, |
| "loss": 1.4669, |
| "step": 17770 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 4.314215660095215, |
| "learning_rate": 8.284745762711866e-06, |
| "loss": 1.3776, |
| "step": 17780 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 2.948415517807007, |
| "learning_rate": 8.277966101694916e-06, |
| "loss": 1.3198, |
| "step": 17790 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 7.77257776260376, |
| "learning_rate": 8.271186440677966e-06, |
| "loss": 1.3826, |
| "step": 17800 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 2.2085230350494385, |
| "learning_rate": 8.264406779661017e-06, |
| "loss": 1.3443, |
| "step": 17810 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 2.8318214416503906, |
| "learning_rate": 8.257627118644068e-06, |
| "loss": 1.3126, |
| "step": 17820 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 4.544606685638428, |
| "learning_rate": 8.25084745762712e-06, |
| "loss": 1.2955, |
| "step": 17830 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 5.595729351043701, |
| "learning_rate": 8.24406779661017e-06, |
| "loss": 1.3814, |
| "step": 17840 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 6.201912879943848, |
| "learning_rate": 8.237288135593221e-06, |
| "loss": 1.4243, |
| "step": 17850 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 7.697713851928711, |
| "learning_rate": 8.230508474576272e-06, |
| "loss": 1.326, |
| "step": 17860 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 3.7588555812835693, |
| "learning_rate": 8.223728813559324e-06, |
| "loss": 1.1265, |
| "step": 17870 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 8.472000122070312, |
| "learning_rate": 8.216949152542373e-06, |
| "loss": 1.2678, |
| "step": 17880 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 4.802706241607666, |
| "learning_rate": 8.210169491525425e-06, |
| "loss": 1.5116, |
| "step": 17890 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 5.357900142669678, |
| "learning_rate": 8.203389830508475e-06, |
| "loss": 1.4435, |
| "step": 17900 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 3.5331151485443115, |
| "learning_rate": 8.196610169491526e-06, |
| "loss": 1.3838, |
| "step": 17910 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 2.591485023498535, |
| "learning_rate": 8.189830508474577e-06, |
| "loss": 1.3595, |
| "step": 17920 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 9.1065034866333, |
| "learning_rate": 8.183050847457627e-06, |
| "loss": 1.4109, |
| "step": 17930 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 3.2992680072784424, |
| "learning_rate": 8.176271186440678e-06, |
| "loss": 1.3554, |
| "step": 17940 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 10.191650390625, |
| "learning_rate": 8.16949152542373e-06, |
| "loss": 1.3561, |
| "step": 17950 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 5.769218444824219, |
| "learning_rate": 8.162711864406781e-06, |
| "loss": 1.4044, |
| "step": 17960 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 47.62531661987305, |
| "learning_rate": 8.155932203389831e-06, |
| "loss": 1.6211, |
| "step": 17970 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 3.514390230178833, |
| "learning_rate": 8.149152542372882e-06, |
| "loss": 1.288, |
| "step": 17980 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 3.9963278770446777, |
| "learning_rate": 8.142372881355934e-06, |
| "loss": 1.3625, |
| "step": 17990 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 4.2144856452941895, |
| "learning_rate": 8.135593220338983e-06, |
| "loss": 1.3154, |
| "step": 18000 |
| }, |
| { |
| "epoch": 0.45, |
| "eval_loss": 1.3663108348846436, |
| "eval_runtime": 66.1644, |
| "eval_samples_per_second": 15.114, |
| "eval_steps_per_second": 15.114, |
| "step": 18000 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 3.047025680541992, |
| "learning_rate": 8.128813559322035e-06, |
| "loss": 1.3131, |
| "step": 18010 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 5.798911094665527, |
| "learning_rate": 8.122033898305085e-06, |
| "loss": 1.4371, |
| "step": 18020 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 3.8710880279541016, |
| "learning_rate": 8.115254237288136e-06, |
| "loss": 1.5147, |
| "step": 18030 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 6.445448398590088, |
| "learning_rate": 8.108474576271187e-06, |
| "loss": 1.3332, |
| "step": 18040 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 5.0877275466918945, |
| "learning_rate": 8.101694915254237e-06, |
| "loss": 1.5076, |
| "step": 18050 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 6.0062737464904785, |
| "learning_rate": 8.094915254237289e-06, |
| "loss": 1.3257, |
| "step": 18060 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 3.2079477310180664, |
| "learning_rate": 8.08813559322034e-06, |
| "loss": 1.2162, |
| "step": 18070 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 2.974025249481201, |
| "learning_rate": 8.081355932203391e-06, |
| "loss": 1.3433, |
| "step": 18080 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 11.321368217468262, |
| "learning_rate": 8.074576271186441e-06, |
| "loss": 1.4845, |
| "step": 18090 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 3.768314838409424, |
| "learning_rate": 8.067796610169492e-06, |
| "loss": 1.3288, |
| "step": 18100 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 3.9936201572418213, |
| "learning_rate": 8.061016949152542e-06, |
| "loss": 1.2507, |
| "step": 18110 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 3.69484543800354, |
| "learning_rate": 8.054237288135594e-06, |
| "loss": 1.2615, |
| "step": 18120 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 6.352110862731934, |
| "learning_rate": 8.047457627118645e-06, |
| "loss": 1.2888, |
| "step": 18130 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 8.259808540344238, |
| "learning_rate": 8.040677966101695e-06, |
| "loss": 1.3588, |
| "step": 18140 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 8.203502655029297, |
| "learning_rate": 8.033898305084746e-06, |
| "loss": 1.2716, |
| "step": 18150 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 3.8740904331207275, |
| "learning_rate": 8.027118644067797e-06, |
| "loss": 1.4191, |
| "step": 18160 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 3.9104349613189697, |
| "learning_rate": 8.020338983050849e-06, |
| "loss": 1.1126, |
| "step": 18170 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 4.973033428192139, |
| "learning_rate": 8.013559322033899e-06, |
| "loss": 1.4034, |
| "step": 18180 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 5.2367329597473145, |
| "learning_rate": 8.00677966101695e-06, |
| "loss": 1.4094, |
| "step": 18190 |
| }, |
| { |
| "epoch": 0.46, |
| "grad_norm": 4.842703819274902, |
| "learning_rate": 8.000000000000001e-06, |
| "loss": 1.4423, |
| "step": 18200 |
| }, |
| { |
| "epoch": 0.46, |
| "grad_norm": 6.39046573638916, |
| "learning_rate": 7.993220338983053e-06, |
| "loss": 1.3491, |
| "step": 18210 |
| }, |
| { |
| "epoch": 0.46, |
| "grad_norm": 3.424907922744751, |
| "learning_rate": 7.986440677966102e-06, |
| "loss": 1.268, |
| "step": 18220 |
| }, |
| { |
| "epoch": 0.46, |
| "grad_norm": 4.363860607147217, |
| "learning_rate": 7.979661016949152e-06, |
| "loss": 1.2921, |
| "step": 18230 |
| }, |
| { |
| "epoch": 0.46, |
| "grad_norm": 10.35586929321289, |
| "learning_rate": 7.972881355932204e-06, |
| "loss": 1.4456, |
| "step": 18240 |
| }, |
| { |
| "epoch": 0.46, |
| "grad_norm": 11.094548225402832, |
| "learning_rate": 7.966101694915255e-06, |
| "loss": 1.5682, |
| "step": 18250 |
| }, |
| { |
| "epoch": 0.46, |
| "grad_norm": 10.801241874694824, |
| "learning_rate": 7.959322033898306e-06, |
| "loss": 1.3475, |
| "step": 18260 |
| }, |
| { |
| "epoch": 0.46, |
| "grad_norm": 3.893005132675171, |
| "learning_rate": 7.952542372881356e-06, |
| "loss": 1.3858, |
| "step": 18270 |
| }, |
| { |
| "epoch": 0.46, |
| "grad_norm": 8.01429557800293, |
| "learning_rate": 7.945762711864407e-06, |
| "loss": 1.3695, |
| "step": 18280 |
| }, |
| { |
| "epoch": 0.46, |
| "grad_norm": 9.507207870483398, |
| "learning_rate": 7.938983050847459e-06, |
| "loss": 1.2442, |
| "step": 18290 |
| }, |
| { |
| "epoch": 0.46, |
| "grad_norm": 4.206308364868164, |
| "learning_rate": 7.93220338983051e-06, |
| "loss": 1.241, |
| "step": 18300 |
| }, |
| { |
| "epoch": 0.46, |
| "grad_norm": 7.3320794105529785, |
| "learning_rate": 7.92542372881356e-06, |
| "loss": 1.3338, |
| "step": 18310 |
| }, |
| { |
| "epoch": 0.46, |
| "grad_norm": 4.259428977966309, |
| "learning_rate": 7.91864406779661e-06, |
| "loss": 1.3902, |
| "step": 18320 |
| }, |
| { |
| "epoch": 0.46, |
| "grad_norm": 15.748085975646973, |
| "learning_rate": 7.911864406779661e-06, |
| "loss": 1.282, |
| "step": 18330 |
| }, |
| { |
| "epoch": 0.46, |
| "grad_norm": 7.123409748077393, |
| "learning_rate": 7.905084745762712e-06, |
| "loss": 1.3311, |
| "step": 18340 |
| }, |
| { |
| "epoch": 0.46, |
| "grad_norm": 7.022697925567627, |
| "learning_rate": 7.898305084745764e-06, |
| "loss": 1.292, |
| "step": 18350 |
| }, |
| { |
| "epoch": 0.46, |
| "grad_norm": 3.3042855262756348, |
| "learning_rate": 7.891525423728814e-06, |
| "loss": 1.3587, |
| "step": 18360 |
| }, |
| { |
| "epoch": 0.46, |
| "grad_norm": 3.5053913593292236, |
| "learning_rate": 7.884745762711865e-06, |
| "loss": 1.5188, |
| "step": 18370 |
| }, |
| { |
| "epoch": 0.46, |
| "grad_norm": 2.6854681968688965, |
| "learning_rate": 7.877966101694916e-06, |
| "loss": 1.3633, |
| "step": 18380 |
| }, |
| { |
| "epoch": 0.46, |
| "grad_norm": 3.8879082202911377, |
| "learning_rate": 7.871186440677968e-06, |
| "loss": 1.5491, |
| "step": 18390 |
| }, |
| { |
| "epoch": 0.46, |
| "grad_norm": 9.43989086151123, |
| "learning_rate": 7.864406779661017e-06, |
| "loss": 1.2686, |
| "step": 18400 |
| }, |
| { |
| "epoch": 0.46, |
| "grad_norm": 4.684770107269287, |
| "learning_rate": 7.857627118644069e-06, |
| "loss": 1.2463, |
| "step": 18410 |
| }, |
| { |
| "epoch": 0.46, |
| "grad_norm": 2.699023485183716, |
| "learning_rate": 7.850847457627119e-06, |
| "loss": 1.5216, |
| "step": 18420 |
| }, |
| { |
| "epoch": 0.46, |
| "grad_norm": 3.1254053115844727, |
| "learning_rate": 7.84406779661017e-06, |
| "loss": 1.4231, |
| "step": 18430 |
| }, |
| { |
| "epoch": 0.46, |
| "grad_norm": 3.4634742736816406, |
| "learning_rate": 7.837288135593221e-06, |
| "loss": 1.4174, |
| "step": 18440 |
| }, |
| { |
| "epoch": 0.46, |
| "grad_norm": 2.7554078102111816, |
| "learning_rate": 7.830508474576271e-06, |
| "loss": 1.2646, |
| "step": 18450 |
| }, |
| { |
| "epoch": 0.46, |
| "grad_norm": 7.782464504241943, |
| "learning_rate": 7.823728813559322e-06, |
| "loss": 1.273, |
| "step": 18460 |
| }, |
| { |
| "epoch": 0.46, |
| "grad_norm": 12.717724800109863, |
| "learning_rate": 7.816949152542374e-06, |
| "loss": 1.3536, |
| "step": 18470 |
| }, |
| { |
| "epoch": 0.46, |
| "grad_norm": 4.644845008850098, |
| "learning_rate": 7.810169491525425e-06, |
| "loss": 1.567, |
| "step": 18480 |
| }, |
| { |
| "epoch": 0.46, |
| "grad_norm": 2.032013177871704, |
| "learning_rate": 7.803389830508475e-06, |
| "loss": 1.4428, |
| "step": 18490 |
| }, |
| { |
| "epoch": 0.46, |
| "grad_norm": 8.91115951538086, |
| "learning_rate": 7.796610169491526e-06, |
| "loss": 1.4071, |
| "step": 18500 |
| }, |
| { |
| "epoch": 0.46, |
| "eval_loss": 1.3208402395248413, |
| "eval_runtime": 66.1773, |
| "eval_samples_per_second": 15.111, |
| "eval_steps_per_second": 15.111, |
| "step": 18500 |
| }, |
| { |
| "epoch": 0.46, |
| "grad_norm": 4.1798248291015625, |
| "learning_rate": 7.789830508474578e-06, |
| "loss": 1.3752, |
| "step": 18510 |
| }, |
| { |
| "epoch": 0.46, |
| "grad_norm": 3.6975016593933105, |
| "learning_rate": 7.783050847457628e-06, |
| "loss": 1.3685, |
| "step": 18520 |
| }, |
| { |
| "epoch": 0.46, |
| "grad_norm": 4.421247959136963, |
| "learning_rate": 7.776271186440679e-06, |
| "loss": 1.5264, |
| "step": 18530 |
| }, |
| { |
| "epoch": 0.46, |
| "grad_norm": 8.739845275878906, |
| "learning_rate": 7.769491525423729e-06, |
| "loss": 1.4609, |
| "step": 18540 |
| }, |
| { |
| "epoch": 0.46, |
| "grad_norm": 3.287424325942993, |
| "learning_rate": 7.76271186440678e-06, |
| "loss": 1.1154, |
| "step": 18550 |
| }, |
| { |
| "epoch": 0.46, |
| "grad_norm": 3.1192266941070557, |
| "learning_rate": 7.755932203389831e-06, |
| "loss": 1.2787, |
| "step": 18560 |
| }, |
| { |
| "epoch": 0.46, |
| "grad_norm": 8.368815422058105, |
| "learning_rate": 7.749152542372881e-06, |
| "loss": 1.3339, |
| "step": 18570 |
| }, |
| { |
| "epoch": 0.46, |
| "grad_norm": 6.241825580596924, |
| "learning_rate": 7.742372881355933e-06, |
| "loss": 1.2293, |
| "step": 18580 |
| }, |
| { |
| "epoch": 0.46, |
| "grad_norm": 9.267044067382812, |
| "learning_rate": 7.735593220338984e-06, |
| "loss": 1.4354, |
| "step": 18590 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 5.569955348968506, |
| "learning_rate": 7.728813559322035e-06, |
| "loss": 1.3941, |
| "step": 18600 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 11.273942947387695, |
| "learning_rate": 7.722033898305085e-06, |
| "loss": 1.1127, |
| "step": 18610 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 3.7952206134796143, |
| "learning_rate": 7.715254237288136e-06, |
| "loss": 1.3437, |
| "step": 18620 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 2.9084484577178955, |
| "learning_rate": 7.708474576271186e-06, |
| "loss": 1.3646, |
| "step": 18630 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 4.15964937210083, |
| "learning_rate": 7.701694915254238e-06, |
| "loss": 1.4666, |
| "step": 18640 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 4.938930988311768, |
| "learning_rate": 7.694915254237289e-06, |
| "loss": 1.4065, |
| "step": 18650 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 8.298666954040527, |
| "learning_rate": 7.688135593220339e-06, |
| "loss": 1.354, |
| "step": 18660 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 3.1837921142578125, |
| "learning_rate": 7.68135593220339e-06, |
| "loss": 1.3879, |
| "step": 18670 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 2.978053331375122, |
| "learning_rate": 7.674576271186441e-06, |
| "loss": 1.3765, |
| "step": 18680 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 5.5602827072143555, |
| "learning_rate": 7.667796610169493e-06, |
| "loss": 1.2135, |
| "step": 18690 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 5.481113910675049, |
| "learning_rate": 7.661016949152543e-06, |
| "loss": 1.3841, |
| "step": 18700 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 9.419681549072266, |
| "learning_rate": 7.654237288135594e-06, |
| "loss": 1.2349, |
| "step": 18710 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 4.955466270446777, |
| "learning_rate": 7.647457627118645e-06, |
| "loss": 1.4208, |
| "step": 18720 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 5.841220855712891, |
| "learning_rate": 7.640677966101695e-06, |
| "loss": 1.4526, |
| "step": 18730 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 3.1412012577056885, |
| "learning_rate": 7.633898305084746e-06, |
| "loss": 1.4606, |
| "step": 18740 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 4.624940395355225, |
| "learning_rate": 7.627118644067797e-06, |
| "loss": 1.3875, |
| "step": 18750 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 9.828381538391113, |
| "learning_rate": 7.6203389830508476e-06, |
| "loss": 1.4079, |
| "step": 18760 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 5.299017429351807, |
| "learning_rate": 7.613559322033899e-06, |
| "loss": 1.278, |
| "step": 18770 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 6.449117183685303, |
| "learning_rate": 7.6067796610169495e-06, |
| "loss": 1.4141, |
| "step": 18780 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 6.961145401000977, |
| "learning_rate": 7.600000000000001e-06, |
| "loss": 1.2828, |
| "step": 18790 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 8.588834762573242, |
| "learning_rate": 7.5932203389830515e-06, |
| "loss": 1.3544, |
| "step": 18800 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 2.5683794021606445, |
| "learning_rate": 7.586440677966103e-06, |
| "loss": 1.4127, |
| "step": 18810 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 2.101924180984497, |
| "learning_rate": 7.5796610169491534e-06, |
| "loss": 1.229, |
| "step": 18820 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 5.389444351196289, |
| "learning_rate": 7.572881355932205e-06, |
| "loss": 1.3112, |
| "step": 18830 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 3.154507875442505, |
| "learning_rate": 7.5661016949152545e-06, |
| "loss": 1.2603, |
| "step": 18840 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 4.938506126403809, |
| "learning_rate": 7.559322033898305e-06, |
| "loss": 1.5423, |
| "step": 18850 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 5.838810443878174, |
| "learning_rate": 7.5525423728813565e-06, |
| "loss": 1.0651, |
| "step": 18860 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 7.946393013000488, |
| "learning_rate": 7.545762711864407e-06, |
| "loss": 1.4706, |
| "step": 18870 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 12.6395902633667, |
| "learning_rate": 7.5389830508474584e-06, |
| "loss": 1.4036, |
| "step": 18880 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 4.769916534423828, |
| "learning_rate": 7.532203389830509e-06, |
| "loss": 1.33, |
| "step": 18890 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 5.928445816040039, |
| "learning_rate": 7.52542372881356e-06, |
| "loss": 1.4844, |
| "step": 18900 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 4.121998310089111, |
| "learning_rate": 7.518644067796611e-06, |
| "loss": 1.2672, |
| "step": 18910 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 1.256914496421814, |
| "learning_rate": 7.511864406779662e-06, |
| "loss": 1.337, |
| "step": 18920 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 9.951703071594238, |
| "learning_rate": 7.505084745762713e-06, |
| "loss": 1.3359, |
| "step": 18930 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 5.373715400695801, |
| "learning_rate": 7.498305084745763e-06, |
| "loss": 1.2437, |
| "step": 18940 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 7.120504379272461, |
| "learning_rate": 7.491525423728814e-06, |
| "loss": 1.1883, |
| "step": 18950 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 7.661159992218018, |
| "learning_rate": 7.4847457627118646e-06, |
| "loss": 1.2648, |
| "step": 18960 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 5.859286308288574, |
| "learning_rate": 7.477966101694916e-06, |
| "loss": 1.3106, |
| "step": 18970 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 7.276169300079346, |
| "learning_rate": 7.4711864406779665e-06, |
| "loss": 1.393, |
| "step": 18980 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 6.31447172164917, |
| "learning_rate": 7.464406779661018e-06, |
| "loss": 1.4051, |
| "step": 18990 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 8.36728572845459, |
| "learning_rate": 7.4576271186440685e-06, |
| "loss": 1.4003, |
| "step": 19000 |
| }, |
| { |
| "epoch": 0.47, |
| "eval_loss": 1.347457766532898, |
| "eval_runtime": 66.1461, |
| "eval_samples_per_second": 15.118, |
| "eval_steps_per_second": 15.118, |
| "step": 19000 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 3.911505937576294, |
| "learning_rate": 7.45084745762712e-06, |
| "loss": 1.2646, |
| "step": 19010 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 3.6765291690826416, |
| "learning_rate": 7.4440677966101704e-06, |
| "loss": 1.3827, |
| "step": 19020 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 3.899599313735962, |
| "learning_rate": 7.437288135593221e-06, |
| "loss": 1.4144, |
| "step": 19030 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 6.436791896820068, |
| "learning_rate": 7.430508474576272e-06, |
| "loss": 1.2087, |
| "step": 19040 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 4.624211311340332, |
| "learning_rate": 7.423728813559322e-06, |
| "loss": 1.3824, |
| "step": 19050 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 6.657593727111816, |
| "learning_rate": 7.4169491525423735e-06, |
| "loss": 1.3856, |
| "step": 19060 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 7.132912635803223, |
| "learning_rate": 7.410169491525424e-06, |
| "loss": 1.4319, |
| "step": 19070 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 2.8681843280792236, |
| "learning_rate": 7.4033898305084754e-06, |
| "loss": 1.419, |
| "step": 19080 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 5.819919109344482, |
| "learning_rate": 7.396610169491526e-06, |
| "loss": 1.3988, |
| "step": 19090 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 6.482515811920166, |
| "learning_rate": 7.3898305084745766e-06, |
| "loss": 1.3366, |
| "step": 19100 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 10.341208457946777, |
| "learning_rate": 7.383050847457628e-06, |
| "loss": 1.3502, |
| "step": 19110 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 2.820133686065674, |
| "learning_rate": 7.3762711864406785e-06, |
| "loss": 1.415, |
| "step": 19120 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 7.112204551696777, |
| "learning_rate": 7.36949152542373e-06, |
| "loss": 1.2609, |
| "step": 19130 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 6.172835826873779, |
| "learning_rate": 7.3627118644067805e-06, |
| "loss": 1.2456, |
| "step": 19140 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 2.9154605865478516, |
| "learning_rate": 7.355932203389831e-06, |
| "loss": 1.3332, |
| "step": 19150 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 6.8542256355285645, |
| "learning_rate": 7.3491525423728816e-06, |
| "loss": 1.2728, |
| "step": 19160 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 7.636801719665527, |
| "learning_rate": 7.342372881355932e-06, |
| "loss": 1.3744, |
| "step": 19170 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 7.556679725646973, |
| "learning_rate": 7.3355932203389835e-06, |
| "loss": 1.446, |
| "step": 19180 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 22.831199645996094, |
| "learning_rate": 7.328813559322034e-06, |
| "loss": 1.358, |
| "step": 19190 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 10.184800148010254, |
| "learning_rate": 7.3220338983050855e-06, |
| "loss": 1.2379, |
| "step": 19200 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 1.9062511920928955, |
| "learning_rate": 7.315254237288136e-06, |
| "loss": 1.3724, |
| "step": 19210 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 9.429797172546387, |
| "learning_rate": 7.3084745762711874e-06, |
| "loss": 1.33, |
| "step": 19220 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 3.129505157470703, |
| "learning_rate": 7.301694915254238e-06, |
| "loss": 1.1795, |
| "step": 19230 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 6.839028835296631, |
| "learning_rate": 7.294915254237289e-06, |
| "loss": 1.2983, |
| "step": 19240 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 15.552626609802246, |
| "learning_rate": 7.288135593220339e-06, |
| "loss": 1.4867, |
| "step": 19250 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 3.4032084941864014, |
| "learning_rate": 7.28135593220339e-06, |
| "loss": 1.4256, |
| "step": 19260 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 6.437047481536865, |
| "learning_rate": 7.274576271186441e-06, |
| "loss": 1.3215, |
| "step": 19270 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 5.592834949493408, |
| "learning_rate": 7.267796610169492e-06, |
| "loss": 1.3466, |
| "step": 19280 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 10.394824028015137, |
| "learning_rate": 7.261016949152543e-06, |
| "loss": 1.3806, |
| "step": 19290 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 6.921483039855957, |
| "learning_rate": 7.2542372881355936e-06, |
| "loss": 1.5624, |
| "step": 19300 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 3.078634262084961, |
| "learning_rate": 7.247457627118645e-06, |
| "loss": 1.1216, |
| "step": 19310 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 6.003988742828369, |
| "learning_rate": 7.2406779661016955e-06, |
| "loss": 1.2759, |
| "step": 19320 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 2.419410467147827, |
| "learning_rate": 7.233898305084747e-06, |
| "loss": 1.2067, |
| "step": 19330 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 3.014220952987671, |
| "learning_rate": 7.2271186440677975e-06, |
| "loss": 1.1819, |
| "step": 19340 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 5.271297454833984, |
| "learning_rate": 7.220338983050849e-06, |
| "loss": 1.4742, |
| "step": 19350 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 2.456838607788086, |
| "learning_rate": 7.2135593220338986e-06, |
| "loss": 1.4656, |
| "step": 19360 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 5.350526332855225, |
| "learning_rate": 7.206779661016949e-06, |
| "loss": 1.1238, |
| "step": 19370 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 5.42751932144165, |
| "learning_rate": 7.2000000000000005e-06, |
| "loss": 1.4704, |
| "step": 19380 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 3.97116756439209, |
| "learning_rate": 7.193220338983051e-06, |
| "loss": 1.4455, |
| "step": 19390 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 5.157166481018066, |
| "learning_rate": 7.1864406779661025e-06, |
| "loss": 1.3725, |
| "step": 19400 |
| }, |
| { |
| "epoch": 0.49, |
| "grad_norm": 4.2230072021484375, |
| "learning_rate": 7.179661016949153e-06, |
| "loss": 1.3768, |
| "step": 19410 |
| }, |
| { |
| "epoch": 0.49, |
| "grad_norm": 5.513181686401367, |
| "learning_rate": 7.1728813559322044e-06, |
| "loss": 1.3018, |
| "step": 19420 |
| }, |
| { |
| "epoch": 0.49, |
| "grad_norm": 7.353570461273193, |
| "learning_rate": 7.166101694915255e-06, |
| "loss": 1.513, |
| "step": 19430 |
| }, |
| { |
| "epoch": 0.49, |
| "grad_norm": 6.263894081115723, |
| "learning_rate": 7.159322033898306e-06, |
| "loss": 1.4521, |
| "step": 19440 |
| }, |
| { |
| "epoch": 0.49, |
| "grad_norm": 13.368189811706543, |
| "learning_rate": 7.152542372881357e-06, |
| "loss": 1.3158, |
| "step": 19450 |
| }, |
| { |
| "epoch": 0.49, |
| "grad_norm": 4.644199371337891, |
| "learning_rate": 7.145762711864407e-06, |
| "loss": 1.4184, |
| "step": 19460 |
| }, |
| { |
| "epoch": 0.49, |
| "grad_norm": 5.206189155578613, |
| "learning_rate": 7.138983050847458e-06, |
| "loss": 1.3848, |
| "step": 19470 |
| }, |
| { |
| "epoch": 0.49, |
| "grad_norm": 7.3628153800964355, |
| "learning_rate": 7.132203389830509e-06, |
| "loss": 1.2512, |
| "step": 19480 |
| }, |
| { |
| "epoch": 0.49, |
| "grad_norm": 7.519322395324707, |
| "learning_rate": 7.12542372881356e-06, |
| "loss": 1.261, |
| "step": 19490 |
| }, |
| { |
| "epoch": 0.49, |
| "grad_norm": 1.5350415706634521, |
| "learning_rate": 7.1186440677966106e-06, |
| "loss": 1.2517, |
| "step": 19500 |
| }, |
| { |
| "epoch": 0.49, |
| "eval_loss": 1.3194345235824585, |
| "eval_runtime": 66.1305, |
| "eval_samples_per_second": 15.122, |
| "eval_steps_per_second": 15.122, |
| "step": 19500 |
| }, |
| { |
| "epoch": 0.49, |
| "grad_norm": 3.0648765563964844, |
| "learning_rate": 7.111864406779662e-06, |
| "loss": 1.398, |
| "step": 19510 |
| }, |
| { |
| "epoch": 0.49, |
| "grad_norm": 5.124095439910889, |
| "learning_rate": 7.1050847457627125e-06, |
| "loss": 1.404, |
| "step": 19520 |
| }, |
| { |
| "epoch": 0.49, |
| "grad_norm": 2.8961544036865234, |
| "learning_rate": 7.098305084745764e-06, |
| "loss": 1.2651, |
| "step": 19530 |
| }, |
| { |
| "epoch": 0.49, |
| "grad_norm": 3.0641605854034424, |
| "learning_rate": 7.0915254237288145e-06, |
| "loss": 1.3587, |
| "step": 19540 |
| }, |
| { |
| "epoch": 0.49, |
| "grad_norm": 3.618454933166504, |
| "learning_rate": 7.084745762711865e-06, |
| "loss": 1.3533, |
| "step": 19550 |
| }, |
| { |
| "epoch": 0.49, |
| "grad_norm": 3.3083536624908447, |
| "learning_rate": 7.077966101694916e-06, |
| "loss": 1.2859, |
| "step": 19560 |
| }, |
| { |
| "epoch": 0.49, |
| "grad_norm": 1.260048508644104, |
| "learning_rate": 7.071186440677966e-06, |
| "loss": 1.2224, |
| "step": 19570 |
| }, |
| { |
| "epoch": 0.49, |
| "grad_norm": 5.410586833953857, |
| "learning_rate": 7.0644067796610175e-06, |
| "loss": 1.2916, |
| "step": 19580 |
| }, |
| { |
| "epoch": 0.49, |
| "grad_norm": 1.9295905828475952, |
| "learning_rate": 7.057627118644068e-06, |
| "loss": 1.3004, |
| "step": 19590 |
| }, |
| { |
| "epoch": 0.49, |
| "grad_norm": 10.48406982421875, |
| "learning_rate": 7.0508474576271195e-06, |
| "loss": 1.3105, |
| "step": 19600 |
| }, |
| { |
| "epoch": 0.49, |
| "grad_norm": 4.99778413772583, |
| "learning_rate": 7.04406779661017e-06, |
| "loss": 1.2481, |
| "step": 19610 |
| }, |
| { |
| "epoch": 0.49, |
| "grad_norm": 5.476469993591309, |
| "learning_rate": 7.037288135593221e-06, |
| "loss": 1.345, |
| "step": 19620 |
| }, |
| { |
| "epoch": 0.49, |
| "grad_norm": 7.250665664672852, |
| "learning_rate": 7.030508474576272e-06, |
| "loss": 1.2993, |
| "step": 19630 |
| }, |
| { |
| "epoch": 0.49, |
| "grad_norm": 7.802820682525635, |
| "learning_rate": 7.0237288135593225e-06, |
| "loss": 1.3106, |
| "step": 19640 |
| }, |
| { |
| "epoch": 0.49, |
| "grad_norm": 2.5077409744262695, |
| "learning_rate": 7.016949152542374e-06, |
| "loss": 1.1298, |
| "step": 19650 |
| }, |
| { |
| "epoch": 0.49, |
| "grad_norm": 2.3012547492980957, |
| "learning_rate": 7.0101694915254245e-06, |
| "loss": 1.2944, |
| "step": 19660 |
| }, |
| { |
| "epoch": 0.49, |
| "grad_norm": 10.731738090515137, |
| "learning_rate": 7.003389830508475e-06, |
| "loss": 1.2545, |
| "step": 19670 |
| }, |
| { |
| "epoch": 0.49, |
| "grad_norm": 11.851224899291992, |
| "learning_rate": 6.996610169491526e-06, |
| "loss": 1.3364, |
| "step": 19680 |
| }, |
| { |
| "epoch": 0.49, |
| "grad_norm": 2.060750722885132, |
| "learning_rate": 6.989830508474576e-06, |
| "loss": 1.2909, |
| "step": 19690 |
| }, |
| { |
| "epoch": 0.49, |
| "grad_norm": 8.549054145812988, |
| "learning_rate": 6.9830508474576275e-06, |
| "loss": 1.405, |
| "step": 19700 |
| }, |
| { |
| "epoch": 0.49, |
| "grad_norm": 5.37472677230835, |
| "learning_rate": 6.976271186440678e-06, |
| "loss": 1.5615, |
| "step": 19710 |
| }, |
| { |
| "epoch": 0.49, |
| "grad_norm": 4.753462314605713, |
| "learning_rate": 6.9694915254237295e-06, |
| "loss": 1.362, |
| "step": 19720 |
| }, |
| { |
| "epoch": 0.49, |
| "grad_norm": 6.330374717712402, |
| "learning_rate": 6.96271186440678e-06, |
| "loss": 1.3563, |
| "step": 19730 |
| }, |
| { |
| "epoch": 0.49, |
| "grad_norm": 6.962733268737793, |
| "learning_rate": 6.9559322033898315e-06, |
| "loss": 1.3272, |
| "step": 19740 |
| }, |
| { |
| "epoch": 0.49, |
| "grad_norm": 13.372142791748047, |
| "learning_rate": 6.949152542372882e-06, |
| "loss": 1.4422, |
| "step": 19750 |
| }, |
| { |
| "epoch": 0.49, |
| "grad_norm": 4.561896800994873, |
| "learning_rate": 6.942372881355933e-06, |
| "loss": 1.3691, |
| "step": 19760 |
| }, |
| { |
| "epoch": 0.49, |
| "grad_norm": 2.2547922134399414, |
| "learning_rate": 6.935593220338983e-06, |
| "loss": 1.4905, |
| "step": 19770 |
| }, |
| { |
| "epoch": 0.49, |
| "grad_norm": 4.15011739730835, |
| "learning_rate": 6.928813559322034e-06, |
| "loss": 1.3899, |
| "step": 19780 |
| }, |
| { |
| "epoch": 0.49, |
| "grad_norm": 11.278037071228027, |
| "learning_rate": 6.922033898305085e-06, |
| "loss": 1.0803, |
| "step": 19790 |
| }, |
| { |
| "epoch": 0.49, |
| "grad_norm": 9.100043296813965, |
| "learning_rate": 6.915254237288136e-06, |
| "loss": 1.4593, |
| "step": 19800 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 10.643383026123047, |
| "learning_rate": 6.908474576271187e-06, |
| "loss": 1.3571, |
| "step": 19810 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 11.314017295837402, |
| "learning_rate": 6.901694915254238e-06, |
| "loss": 1.2376, |
| "step": 19820 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 3.855220317840576, |
| "learning_rate": 6.894915254237289e-06, |
| "loss": 1.1095, |
| "step": 19830 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 5.896849155426025, |
| "learning_rate": 6.8881355932203395e-06, |
| "loss": 1.2078, |
| "step": 19840 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 2.1142632961273193, |
| "learning_rate": 6.881355932203391e-06, |
| "loss": 1.3017, |
| "step": 19850 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 7.116094589233398, |
| "learning_rate": 6.8745762711864415e-06, |
| "loss": 1.217, |
| "step": 19860 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 3.392282724380493, |
| "learning_rate": 6.867796610169493e-06, |
| "loss": 1.4027, |
| "step": 19870 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 5.760110855102539, |
| "learning_rate": 6.861016949152543e-06, |
| "loss": 1.2888, |
| "step": 19880 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 2.312904119491577, |
| "learning_rate": 6.854237288135593e-06, |
| "loss": 1.39, |
| "step": 19890 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 9.748230934143066, |
| "learning_rate": 6.8474576271186445e-06, |
| "loss": 1.408, |
| "step": 19900 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 8.029982566833496, |
| "learning_rate": 6.840677966101695e-06, |
| "loss": 1.2631, |
| "step": 19910 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 5.408463954925537, |
| "learning_rate": 6.8338983050847465e-06, |
| "loss": 1.3545, |
| "step": 19920 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 4.480403900146484, |
| "learning_rate": 6.827118644067797e-06, |
| "loss": 1.4549, |
| "step": 19930 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 8.981225967407227, |
| "learning_rate": 6.8203389830508485e-06, |
| "loss": 1.3352, |
| "step": 19940 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 1.9335066080093384, |
| "learning_rate": 6.813559322033899e-06, |
| "loss": 1.2825, |
| "step": 19950 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 7.6206464767456055, |
| "learning_rate": 6.80677966101695e-06, |
| "loss": 1.4976, |
| "step": 19960 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 14.687816619873047, |
| "learning_rate": 6.800000000000001e-06, |
| "loss": 1.4687, |
| "step": 19970 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 9.034219741821289, |
| "learning_rate": 6.793220338983051e-06, |
| "loss": 1.4715, |
| "step": 19980 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 8.120539665222168, |
| "learning_rate": 6.786440677966102e-06, |
| "loss": 1.4331, |
| "step": 19990 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 6.594362258911133, |
| "learning_rate": 6.779661016949153e-06, |
| "loss": 1.4516, |
| "step": 20000 |
| }, |
| { |
| "epoch": 0.5, |
| "eval_loss": 1.348677158355713, |
| "eval_runtime": 66.1188, |
| "eval_samples_per_second": 15.124, |
| "eval_steps_per_second": 15.124, |
| "step": 20000 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 2.159421920776367, |
| "learning_rate": 6.772881355932204e-06, |
| "loss": 1.4193, |
| "step": 20010 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 7.60541296005249, |
| "learning_rate": 6.766101694915255e-06, |
| "loss": 1.4747, |
| "step": 20020 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 2.0991897583007812, |
| "learning_rate": 6.759322033898306e-06, |
| "loss": 1.2775, |
| "step": 20030 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 7.756378173828125, |
| "learning_rate": 6.7525423728813565e-06, |
| "loss": 1.343, |
| "step": 20040 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 7.062932014465332, |
| "learning_rate": 6.745762711864408e-06, |
| "loss": 1.5168, |
| "step": 20050 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 6.689007759094238, |
| "learning_rate": 6.7389830508474585e-06, |
| "loss": 1.2894, |
| "step": 20060 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 5.31237268447876, |
| "learning_rate": 6.73220338983051e-06, |
| "loss": 1.3382, |
| "step": 20070 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 3.6598782539367676, |
| "learning_rate": 6.7254237288135604e-06, |
| "loss": 1.3839, |
| "step": 20080 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 3.3091630935668945, |
| "learning_rate": 6.71864406779661e-06, |
| "loss": 1.3654, |
| "step": 20090 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 2.1399312019348145, |
| "learning_rate": 6.7118644067796615e-06, |
| "loss": 1.5429, |
| "step": 20100 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 4.380695819854736, |
| "learning_rate": 6.705084745762712e-06, |
| "loss": 1.4629, |
| "step": 20110 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 9.330241203308105, |
| "learning_rate": 6.6983050847457635e-06, |
| "loss": 1.3547, |
| "step": 20120 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 5.803046226501465, |
| "learning_rate": 6.691525423728814e-06, |
| "loss": 1.3534, |
| "step": 20130 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 4.967430591583252, |
| "learning_rate": 6.6847457627118655e-06, |
| "loss": 1.4965, |
| "step": 20140 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 8.131784439086914, |
| "learning_rate": 6.677966101694916e-06, |
| "loss": 1.4012, |
| "step": 20150 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 9.220160484313965, |
| "learning_rate": 6.6711864406779666e-06, |
| "loss": 1.3637, |
| "step": 20160 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 6.833899974822998, |
| "learning_rate": 6.664406779661018e-06, |
| "loss": 1.3748, |
| "step": 20170 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 13.869956970214844, |
| "learning_rate": 6.6576271186440685e-06, |
| "loss": 1.4474, |
| "step": 20180 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 9.462739944458008, |
| "learning_rate": 6.650847457627119e-06, |
| "loss": 1.3893, |
| "step": 20190 |
| }, |
| { |
| "epoch": 0.51, |
| "grad_norm": 2.321714162826538, |
| "learning_rate": 6.64406779661017e-06, |
| "loss": 1.3836, |
| "step": 20200 |
| }, |
| { |
| "epoch": 0.51, |
| "grad_norm": 11.65925407409668, |
| "learning_rate": 6.637288135593221e-06, |
| "loss": 1.493, |
| "step": 20210 |
| }, |
| { |
| "epoch": 0.51, |
| "grad_norm": 6.808838367462158, |
| "learning_rate": 6.6305084745762716e-06, |
| "loss": 1.3632, |
| "step": 20220 |
| }, |
| { |
| "epoch": 0.51, |
| "grad_norm": 4.534874439239502, |
| "learning_rate": 6.623728813559322e-06, |
| "loss": 1.4303, |
| "step": 20230 |
| }, |
| { |
| "epoch": 0.51, |
| "grad_norm": 1.9837802648544312, |
| "learning_rate": 6.6169491525423735e-06, |
| "loss": 1.446, |
| "step": 20240 |
| }, |
| { |
| "epoch": 0.51, |
| "grad_norm": 7.779874324798584, |
| "learning_rate": 6.610169491525424e-06, |
| "loss": 1.4566, |
| "step": 20250 |
| }, |
| { |
| "epoch": 0.51, |
| "grad_norm": 5.729377746582031, |
| "learning_rate": 6.6033898305084755e-06, |
| "loss": 1.401, |
| "step": 20260 |
| }, |
| { |
| "epoch": 0.51, |
| "grad_norm": 12.633749008178711, |
| "learning_rate": 6.596610169491526e-06, |
| "loss": 1.2969, |
| "step": 20270 |
| }, |
| { |
| "epoch": 0.51, |
| "grad_norm": 2.7961552143096924, |
| "learning_rate": 6.5898305084745774e-06, |
| "loss": 1.4151, |
| "step": 20280 |
| }, |
| { |
| "epoch": 0.51, |
| "grad_norm": 8.461606979370117, |
| "learning_rate": 6.583050847457627e-06, |
| "loss": 1.2665, |
| "step": 20290 |
| }, |
| { |
| "epoch": 0.51, |
| "grad_norm": 10.325592041015625, |
| "learning_rate": 6.576271186440678e-06, |
| "loss": 1.3666, |
| "step": 20300 |
| }, |
| { |
| "epoch": 0.51, |
| "grad_norm": 3.0818538665771484, |
| "learning_rate": 6.569491525423729e-06, |
| "loss": 1.4307, |
| "step": 20310 |
| }, |
| { |
| "epoch": 0.51, |
| "grad_norm": 7.449318885803223, |
| "learning_rate": 6.56271186440678e-06, |
| "loss": 1.4585, |
| "step": 20320 |
| }, |
| { |
| "epoch": 0.51, |
| "grad_norm": 4.6587042808532715, |
| "learning_rate": 6.555932203389831e-06, |
| "loss": 1.4053, |
| "step": 20330 |
| }, |
| { |
| "epoch": 0.51, |
| "grad_norm": 5.837299346923828, |
| "learning_rate": 6.549152542372882e-06, |
| "loss": 1.3456, |
| "step": 20340 |
| }, |
| { |
| "epoch": 0.51, |
| "grad_norm": 7.345305442810059, |
| "learning_rate": 6.542372881355933e-06, |
| "loss": 1.3618, |
| "step": 20350 |
| }, |
| { |
| "epoch": 0.51, |
| "grad_norm": 5.679592609405518, |
| "learning_rate": 6.5355932203389836e-06, |
| "loss": 1.3875, |
| "step": 20360 |
| }, |
| { |
| "epoch": 0.51, |
| "grad_norm": 1.062429428100586, |
| "learning_rate": 6.528813559322035e-06, |
| "loss": 1.3083, |
| "step": 20370 |
| }, |
| { |
| "epoch": 0.51, |
| "grad_norm": 5.2157769203186035, |
| "learning_rate": 6.5220338983050855e-06, |
| "loss": 1.3582, |
| "step": 20380 |
| }, |
| { |
| "epoch": 0.51, |
| "grad_norm": 4.890625476837158, |
| "learning_rate": 6.515254237288137e-06, |
| "loss": 1.3184, |
| "step": 20390 |
| }, |
| { |
| "epoch": 0.51, |
| "grad_norm": 1.587312936782837, |
| "learning_rate": 6.508474576271187e-06, |
| "loss": 1.2191, |
| "step": 20400 |
| }, |
| { |
| "epoch": 0.51, |
| "grad_norm": 4.013046741485596, |
| "learning_rate": 6.501694915254237e-06, |
| "loss": 1.3331, |
| "step": 20410 |
| }, |
| { |
| "epoch": 0.51, |
| "grad_norm": 4.076744556427002, |
| "learning_rate": 6.4949152542372886e-06, |
| "loss": 1.4351, |
| "step": 20420 |
| }, |
| { |
| "epoch": 0.51, |
| "grad_norm": 6.493119239807129, |
| "learning_rate": 6.488135593220339e-06, |
| "loss": 1.3519, |
| "step": 20430 |
| }, |
| { |
| "epoch": 0.51, |
| "grad_norm": 6.127922058105469, |
| "learning_rate": 6.4813559322033905e-06, |
| "loss": 1.4255, |
| "step": 20440 |
| }, |
| { |
| "epoch": 0.51, |
| "grad_norm": 4.424916744232178, |
| "learning_rate": 6.474576271186441e-06, |
| "loss": 1.44, |
| "step": 20450 |
| }, |
| { |
| "epoch": 0.51, |
| "grad_norm": 9.894845008850098, |
| "learning_rate": 6.4677966101694925e-06, |
| "loss": 1.2922, |
| "step": 20460 |
| }, |
| { |
| "epoch": 0.51, |
| "grad_norm": 5.174190521240234, |
| "learning_rate": 6.461016949152543e-06, |
| "loss": 1.439, |
| "step": 20470 |
| }, |
| { |
| "epoch": 0.51, |
| "grad_norm": 6.313706398010254, |
| "learning_rate": 6.4542372881355944e-06, |
| "loss": 1.3229, |
| "step": 20480 |
| }, |
| { |
| "epoch": 0.51, |
| "grad_norm": 3.240434169769287, |
| "learning_rate": 6.447457627118645e-06, |
| "loss": 1.3911, |
| "step": 20490 |
| }, |
| { |
| "epoch": 0.51, |
| "grad_norm": 7.466948986053467, |
| "learning_rate": 6.440677966101695e-06, |
| "loss": 1.4467, |
| "step": 20500 |
| }, |
| { |
| "epoch": 0.51, |
| "eval_loss": 1.3339221477508545, |
| "eval_runtime": 66.152, |
| "eval_samples_per_second": 15.117, |
| "eval_steps_per_second": 15.117, |
| "step": 20500 |
| }, |
| { |
| "epoch": 0.51, |
| "grad_norm": 32.95411682128906, |
| "learning_rate": 6.433898305084746e-06, |
| "loss": 1.2821, |
| "step": 20510 |
| }, |
| { |
| "epoch": 0.51, |
| "grad_norm": 11.001514434814453, |
| "learning_rate": 6.427118644067797e-06, |
| "loss": 1.3523, |
| "step": 20520 |
| }, |
| { |
| "epoch": 0.51, |
| "grad_norm": 1.6923043727874756, |
| "learning_rate": 6.420338983050848e-06, |
| "loss": 1.3424, |
| "step": 20530 |
| }, |
| { |
| "epoch": 0.51, |
| "grad_norm": 6.267253398895264, |
| "learning_rate": 6.413559322033899e-06, |
| "loss": 1.2163, |
| "step": 20540 |
| }, |
| { |
| "epoch": 0.51, |
| "grad_norm": 15.797646522521973, |
| "learning_rate": 6.40677966101695e-06, |
| "loss": 1.2104, |
| "step": 20550 |
| }, |
| { |
| "epoch": 0.51, |
| "grad_norm": 3.1398627758026123, |
| "learning_rate": 6.4000000000000006e-06, |
| "loss": 1.2264, |
| "step": 20560 |
| }, |
| { |
| "epoch": 0.51, |
| "grad_norm": 1.7001848220825195, |
| "learning_rate": 6.393220338983052e-06, |
| "loss": 1.345, |
| "step": 20570 |
| }, |
| { |
| "epoch": 0.51, |
| "grad_norm": 6.551231861114502, |
| "learning_rate": 6.3864406779661025e-06, |
| "loss": 1.295, |
| "step": 20580 |
| }, |
| { |
| "epoch": 0.51, |
| "grad_norm": 2.501774787902832, |
| "learning_rate": 6.379661016949154e-06, |
| "loss": 1.2727, |
| "step": 20590 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 5.2971906661987305, |
| "learning_rate": 6.372881355932204e-06, |
| "loss": 1.1617, |
| "step": 20600 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 8.737128257751465, |
| "learning_rate": 6.366101694915254e-06, |
| "loss": 1.3792, |
| "step": 20610 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 1.8445500135421753, |
| "learning_rate": 6.3593220338983056e-06, |
| "loss": 1.2818, |
| "step": 20620 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 6.586302280426025, |
| "learning_rate": 6.352542372881356e-06, |
| "loss": 1.471, |
| "step": 20630 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 2.3581125736236572, |
| "learning_rate": 6.3457627118644075e-06, |
| "loss": 1.26, |
| "step": 20640 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 3.006883382797241, |
| "learning_rate": 6.338983050847458e-06, |
| "loss": 1.4312, |
| "step": 20650 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 1.8936930894851685, |
| "learning_rate": 6.3322033898305095e-06, |
| "loss": 1.3705, |
| "step": 20660 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 5.22953462600708, |
| "learning_rate": 6.32542372881356e-06, |
| "loss": 1.3252, |
| "step": 20670 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 7.669064044952393, |
| "learning_rate": 6.318644067796611e-06, |
| "loss": 1.4673, |
| "step": 20680 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 3.7508702278137207, |
| "learning_rate": 6.311864406779662e-06, |
| "loss": 1.3465, |
| "step": 20690 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 7.959824085235596, |
| "learning_rate": 6.3050847457627125e-06, |
| "loss": 1.4148, |
| "step": 20700 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 8.888174057006836, |
| "learning_rate": 6.298305084745763e-06, |
| "loss": 1.4874, |
| "step": 20710 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 11.385110855102539, |
| "learning_rate": 6.291525423728814e-06, |
| "loss": 1.3519, |
| "step": 20720 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 11.636566162109375, |
| "learning_rate": 6.284745762711865e-06, |
| "loss": 1.3237, |
| "step": 20730 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 2.6630842685699463, |
| "learning_rate": 6.277966101694916e-06, |
| "loss": 1.5615, |
| "step": 20740 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 15.339405059814453, |
| "learning_rate": 6.271186440677966e-06, |
| "loss": 1.0971, |
| "step": 20750 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 5.97685432434082, |
| "learning_rate": 6.2644067796610176e-06, |
| "loss": 1.2582, |
| "step": 20760 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 5.218130111694336, |
| "learning_rate": 6.257627118644068e-06, |
| "loss": 1.3272, |
| "step": 20770 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 8.943811416625977, |
| "learning_rate": 6.2508474576271195e-06, |
| "loss": 1.4001, |
| "step": 20780 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 3.288783550262451, |
| "learning_rate": 6.24406779661017e-06, |
| "loss": 1.3729, |
| "step": 20790 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 2.7321832180023193, |
| "learning_rate": 6.2372881355932215e-06, |
| "loss": 1.4365, |
| "step": 20800 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 7.656303405761719, |
| "learning_rate": 6.230508474576271e-06, |
| "loss": 1.3646, |
| "step": 20810 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 3.2849278450012207, |
| "learning_rate": 6.223728813559322e-06, |
| "loss": 1.0755, |
| "step": 20820 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 3.6145408153533936, |
| "learning_rate": 6.216949152542373e-06, |
| "loss": 1.2836, |
| "step": 20830 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 6.668073654174805, |
| "learning_rate": 6.210169491525424e-06, |
| "loss": 1.3706, |
| "step": 20840 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 10.408007621765137, |
| "learning_rate": 6.203389830508475e-06, |
| "loss": 1.5154, |
| "step": 20850 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 2.708711862564087, |
| "learning_rate": 6.196610169491526e-06, |
| "loss": 1.3841, |
| "step": 20860 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 1.7991631031036377, |
| "learning_rate": 6.189830508474577e-06, |
| "loss": 1.261, |
| "step": 20870 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 6.492557048797607, |
| "learning_rate": 6.183050847457628e-06, |
| "loss": 1.469, |
| "step": 20880 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 3.5670902729034424, |
| "learning_rate": 6.176271186440679e-06, |
| "loss": 1.4484, |
| "step": 20890 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 3.4511330127716064, |
| "learning_rate": 6.1694915254237295e-06, |
| "loss": 1.3566, |
| "step": 20900 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 7.958609580993652, |
| "learning_rate": 6.162711864406781e-06, |
| "loss": 1.3272, |
| "step": 20910 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 6.594333648681641, |
| "learning_rate": 6.155932203389831e-06, |
| "loss": 1.3926, |
| "step": 20920 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 7.329288005828857, |
| "learning_rate": 6.149152542372881e-06, |
| "loss": 1.4948, |
| "step": 20930 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 10.120712280273438, |
| "learning_rate": 6.142372881355933e-06, |
| "loss": 1.5394, |
| "step": 20940 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 4.3365983963012695, |
| "learning_rate": 6.135593220338983e-06, |
| "loss": 1.2156, |
| "step": 20950 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 2.0522942543029785, |
| "learning_rate": 6.1288135593220346e-06, |
| "loss": 1.3206, |
| "step": 20960 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 5.730597496032715, |
| "learning_rate": 6.122033898305085e-06, |
| "loss": 1.4396, |
| "step": 20970 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 13.125938415527344, |
| "learning_rate": 6.1152542372881365e-06, |
| "loss": 1.1969, |
| "step": 20980 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 3.8410260677337646, |
| "learning_rate": 6.108474576271187e-06, |
| "loss": 1.3842, |
| "step": 20990 |
| }, |
| { |
| "epoch": 0.53, |
| "grad_norm": 3.415696144104004, |
| "learning_rate": 6.1016949152542385e-06, |
| "loss": 1.3296, |
| "step": 21000 |
| }, |
| { |
| "epoch": 0.53, |
| "eval_loss": 1.344160556793213, |
| "eval_runtime": 66.1512, |
| "eval_samples_per_second": 15.117, |
| "eval_steps_per_second": 15.117, |
| "step": 21000 |
| }, |
| { |
| "epoch": 0.53, |
| "grad_norm": 9.95438003540039, |
| "learning_rate": 6.094915254237289e-06, |
| "loss": 1.3206, |
| "step": 21010 |
| }, |
| { |
| "epoch": 0.53, |
| "grad_norm": 10.410721778869629, |
| "learning_rate": 6.088135593220339e-06, |
| "loss": 1.4014, |
| "step": 21020 |
| }, |
| { |
| "epoch": 0.53, |
| "grad_norm": 9.159972190856934, |
| "learning_rate": 6.08135593220339e-06, |
| "loss": 1.3827, |
| "step": 21030 |
| }, |
| { |
| "epoch": 0.53, |
| "grad_norm": 4.856491565704346, |
| "learning_rate": 6.074576271186441e-06, |
| "loss": 1.4561, |
| "step": 21040 |
| }, |
| { |
| "epoch": 0.53, |
| "grad_norm": 5.863302707672119, |
| "learning_rate": 6.067796610169492e-06, |
| "loss": 1.3225, |
| "step": 21050 |
| }, |
| { |
| "epoch": 0.53, |
| "grad_norm": 2.968809127807617, |
| "learning_rate": 6.061016949152543e-06, |
| "loss": 1.3713, |
| "step": 21060 |
| }, |
| { |
| "epoch": 0.53, |
| "grad_norm": 4.19352388381958, |
| "learning_rate": 6.054237288135594e-06, |
| "loss": 1.3894, |
| "step": 21070 |
| }, |
| { |
| "epoch": 0.53, |
| "grad_norm": 4.841989040374756, |
| "learning_rate": 6.047457627118645e-06, |
| "loss": 1.3609, |
| "step": 21080 |
| }, |
| { |
| "epoch": 0.53, |
| "grad_norm": 8.693398475646973, |
| "learning_rate": 6.040677966101696e-06, |
| "loss": 1.4118, |
| "step": 21090 |
| }, |
| { |
| "epoch": 0.53, |
| "grad_norm": 10.902780532836914, |
| "learning_rate": 6.0338983050847465e-06, |
| "loss": 1.4529, |
| "step": 21100 |
| }, |
| { |
| "epoch": 0.53, |
| "grad_norm": 5.1729607582092285, |
| "learning_rate": 6.027118644067798e-06, |
| "loss": 1.3302, |
| "step": 21110 |
| }, |
| { |
| "epoch": 0.53, |
| "grad_norm": 1.9906094074249268, |
| "learning_rate": 6.020338983050848e-06, |
| "loss": 1.2294, |
| "step": 21120 |
| }, |
| { |
| "epoch": 0.53, |
| "grad_norm": 3.285928964614868, |
| "learning_rate": 6.013559322033898e-06, |
| "loss": 1.5006, |
| "step": 21130 |
| }, |
| { |
| "epoch": 0.53, |
| "grad_norm": 13.098603248596191, |
| "learning_rate": 6.00677966101695e-06, |
| "loss": 1.425, |
| "step": 21140 |
| }, |
| { |
| "epoch": 0.53, |
| "grad_norm": 4.391754150390625, |
| "learning_rate": 6e-06, |
| "loss": 1.2668, |
| "step": 21150 |
| }, |
| { |
| "epoch": 0.53, |
| "grad_norm": 8.409893035888672, |
| "learning_rate": 5.9932203389830516e-06, |
| "loss": 1.2061, |
| "step": 21160 |
| }, |
| { |
| "epoch": 0.53, |
| "grad_norm": 10.568397521972656, |
| "learning_rate": 5.986440677966102e-06, |
| "loss": 1.4191, |
| "step": 21170 |
| }, |
| { |
| "epoch": 0.53, |
| "grad_norm": 7.371358394622803, |
| "learning_rate": 5.9796610169491535e-06, |
| "loss": 1.2942, |
| "step": 21180 |
| }, |
| { |
| "epoch": 0.53, |
| "grad_norm": 2.6429450511932373, |
| "learning_rate": 5.972881355932204e-06, |
| "loss": 1.4216, |
| "step": 21190 |
| }, |
| { |
| "epoch": 0.53, |
| "grad_norm": 5.85234260559082, |
| "learning_rate": 5.9661016949152555e-06, |
| "loss": 1.3541, |
| "step": 21200 |
| }, |
| { |
| "epoch": 0.53, |
| "grad_norm": 3.2903425693511963, |
| "learning_rate": 5.959322033898306e-06, |
| "loss": 1.2969, |
| "step": 21210 |
| }, |
| { |
| "epoch": 0.53, |
| "grad_norm": 3.9652106761932373, |
| "learning_rate": 5.9525423728813566e-06, |
| "loss": 1.2853, |
| "step": 21220 |
| }, |
| { |
| "epoch": 0.53, |
| "grad_norm": 3.8703153133392334, |
| "learning_rate": 5.945762711864407e-06, |
| "loss": 1.3094, |
| "step": 21230 |
| }, |
| { |
| "epoch": 0.53, |
| "grad_norm": 8.018983840942383, |
| "learning_rate": 5.938983050847458e-06, |
| "loss": 1.4798, |
| "step": 21240 |
| }, |
| { |
| "epoch": 0.53, |
| "grad_norm": 2.752399206161499, |
| "learning_rate": 5.932203389830509e-06, |
| "loss": 1.4141, |
| "step": 21250 |
| }, |
| { |
| "epoch": 0.53, |
| "grad_norm": 5.574487686157227, |
| "learning_rate": 5.92542372881356e-06, |
| "loss": 1.2121, |
| "step": 21260 |
| }, |
| { |
| "epoch": 0.53, |
| "grad_norm": 10.352456092834473, |
| "learning_rate": 5.91864406779661e-06, |
| "loss": 1.3264, |
| "step": 21270 |
| }, |
| { |
| "epoch": 0.53, |
| "grad_norm": 4.182732582092285, |
| "learning_rate": 5.911864406779662e-06, |
| "loss": 1.4334, |
| "step": 21280 |
| }, |
| { |
| "epoch": 0.53, |
| "grad_norm": 5.310421943664551, |
| "learning_rate": 5.905084745762712e-06, |
| "loss": 1.5641, |
| "step": 21290 |
| }, |
| { |
| "epoch": 0.53, |
| "grad_norm": 5.489622592926025, |
| "learning_rate": 5.8983050847457635e-06, |
| "loss": 1.4699, |
| "step": 21300 |
| }, |
| { |
| "epoch": 0.53, |
| "grad_norm": 6.644534111022949, |
| "learning_rate": 5.891525423728814e-06, |
| "loss": 1.3273, |
| "step": 21310 |
| }, |
| { |
| "epoch": 0.53, |
| "grad_norm": 13.480459213256836, |
| "learning_rate": 5.8847457627118655e-06, |
| "loss": 1.2957, |
| "step": 21320 |
| }, |
| { |
| "epoch": 0.53, |
| "grad_norm": 9.34183120727539, |
| "learning_rate": 5.877966101694915e-06, |
| "loss": 1.4145, |
| "step": 21330 |
| }, |
| { |
| "epoch": 0.53, |
| "grad_norm": 4.792973518371582, |
| "learning_rate": 5.871186440677966e-06, |
| "loss": 1.4912, |
| "step": 21340 |
| }, |
| { |
| "epoch": 0.53, |
| "grad_norm": 4.098564147949219, |
| "learning_rate": 5.864406779661017e-06, |
| "loss": 1.1895, |
| "step": 21350 |
| }, |
| { |
| "epoch": 0.53, |
| "grad_norm": 7.23917293548584, |
| "learning_rate": 5.857627118644068e-06, |
| "loss": 1.3875, |
| "step": 21360 |
| }, |
| { |
| "epoch": 0.53, |
| "grad_norm": 4.86613130569458, |
| "learning_rate": 5.850847457627119e-06, |
| "loss": 1.4827, |
| "step": 21370 |
| }, |
| { |
| "epoch": 0.53, |
| "grad_norm": 7.629755973815918, |
| "learning_rate": 5.84406779661017e-06, |
| "loss": 1.2189, |
| "step": 21380 |
| }, |
| { |
| "epoch": 0.53, |
| "grad_norm": 3.80531907081604, |
| "learning_rate": 5.837288135593221e-06, |
| "loss": 1.4064, |
| "step": 21390 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 3.432089328765869, |
| "learning_rate": 5.830508474576272e-06, |
| "loss": 1.1929, |
| "step": 21400 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 9.766077995300293, |
| "learning_rate": 5.823728813559323e-06, |
| "loss": 1.4525, |
| "step": 21410 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 4.745760440826416, |
| "learning_rate": 5.8169491525423736e-06, |
| "loss": 1.4924, |
| "step": 21420 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 5.188168525695801, |
| "learning_rate": 5.810169491525425e-06, |
| "loss": 1.27, |
| "step": 21430 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 4.576213359832764, |
| "learning_rate": 5.803389830508475e-06, |
| "loss": 1.3165, |
| "step": 21440 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 10.540860176086426, |
| "learning_rate": 5.796610169491525e-06, |
| "loss": 1.2652, |
| "step": 21450 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 4.210390567779541, |
| "learning_rate": 5.789830508474577e-06, |
| "loss": 1.4372, |
| "step": 21460 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 8.733638763427734, |
| "learning_rate": 5.783050847457627e-06, |
| "loss": 1.2691, |
| "step": 21470 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 4.997326374053955, |
| "learning_rate": 5.776271186440679e-06, |
| "loss": 1.1454, |
| "step": 21480 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 10.108692169189453, |
| "learning_rate": 5.769491525423729e-06, |
| "loss": 1.2982, |
| "step": 21490 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 13.393025398254395, |
| "learning_rate": 5.7627118644067805e-06, |
| "loss": 1.4995, |
| "step": 21500 |
| }, |
| { |
| "epoch": 0.54, |
| "eval_loss": 1.3565526008605957, |
| "eval_runtime": 66.1537, |
| "eval_samples_per_second": 15.116, |
| "eval_steps_per_second": 15.116, |
| "step": 21500 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 5.675364017486572, |
| "learning_rate": 5.755932203389831e-06, |
| "loss": 1.2256, |
| "step": 21510 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 2.069751262664795, |
| "learning_rate": 5.7491525423728825e-06, |
| "loss": 1.2355, |
| "step": 21520 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 4.536093711853027, |
| "learning_rate": 5.742372881355933e-06, |
| "loss": 1.1957, |
| "step": 21530 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 2.5274765491485596, |
| "learning_rate": 5.735593220338983e-06, |
| "loss": 1.1843, |
| "step": 21540 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 4.418458461761475, |
| "learning_rate": 5.728813559322034e-06, |
| "loss": 1.1994, |
| "step": 21550 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 13.488496780395508, |
| "learning_rate": 5.722033898305085e-06, |
| "loss": 1.4173, |
| "step": 21560 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 5.223592758178711, |
| "learning_rate": 5.715254237288136e-06, |
| "loss": 1.3032, |
| "step": 21570 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 5.894464492797852, |
| "learning_rate": 5.708474576271187e-06, |
| "loss": 1.3027, |
| "step": 21580 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 6.945793151855469, |
| "learning_rate": 5.701694915254238e-06, |
| "loss": 1.3102, |
| "step": 21590 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 7.386875629425049, |
| "learning_rate": 5.694915254237289e-06, |
| "loss": 1.3643, |
| "step": 21600 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 3.5999162197113037, |
| "learning_rate": 5.68813559322034e-06, |
| "loss": 1.2456, |
| "step": 21610 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 5.8258490562438965, |
| "learning_rate": 5.6813559322033906e-06, |
| "loss": 1.1922, |
| "step": 21620 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 10.920169830322266, |
| "learning_rate": 5.674576271186442e-06, |
| "loss": 1.5312, |
| "step": 21630 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 3.898834705352783, |
| "learning_rate": 5.667796610169492e-06, |
| "loss": 1.5012, |
| "step": 21640 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 6.2130866050720215, |
| "learning_rate": 5.661016949152542e-06, |
| "loss": 1.543, |
| "step": 21650 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 3.604144811630249, |
| "learning_rate": 5.654237288135594e-06, |
| "loss": 1.4586, |
| "step": 21660 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 4.859696388244629, |
| "learning_rate": 5.647457627118644e-06, |
| "loss": 1.3056, |
| "step": 21670 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 2.9596614837646484, |
| "learning_rate": 5.640677966101696e-06, |
| "loss": 1.3888, |
| "step": 21680 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 5.489665985107422, |
| "learning_rate": 5.633898305084746e-06, |
| "loss": 1.2491, |
| "step": 21690 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 5.618114948272705, |
| "learning_rate": 5.6271186440677975e-06, |
| "loss": 1.2746, |
| "step": 21700 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 2.145024061203003, |
| "learning_rate": 5.620338983050848e-06, |
| "loss": 1.4036, |
| "step": 21710 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 2.4870400428771973, |
| "learning_rate": 5.6135593220338995e-06, |
| "loss": 1.4355, |
| "step": 21720 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 2.576144218444824, |
| "learning_rate": 5.60677966101695e-06, |
| "loss": 1.2988, |
| "step": 21730 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 5.971595764160156, |
| "learning_rate": 5.600000000000001e-06, |
| "loss": 1.3201, |
| "step": 21740 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 7.581085205078125, |
| "learning_rate": 5.593220338983051e-06, |
| "loss": 1.3358, |
| "step": 21750 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 4.148537635803223, |
| "learning_rate": 5.586440677966102e-06, |
| "loss": 1.2464, |
| "step": 21760 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 6.613537788391113, |
| "learning_rate": 5.579661016949153e-06, |
| "loss": 1.3156, |
| "step": 21770 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 10.526129722595215, |
| "learning_rate": 5.572881355932204e-06, |
| "loss": 1.2483, |
| "step": 21780 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 7.221047401428223, |
| "learning_rate": 5.566101694915255e-06, |
| "loss": 1.5878, |
| "step": 21790 |
| }, |
| { |
| "epoch": 0.55, |
| "grad_norm": 6.365529537200928, |
| "learning_rate": 5.559322033898306e-06, |
| "loss": 1.4117, |
| "step": 21800 |
| }, |
| { |
| "epoch": 0.55, |
| "grad_norm": 3.8305916786193848, |
| "learning_rate": 5.552542372881356e-06, |
| "loss": 1.1894, |
| "step": 21810 |
| }, |
| { |
| "epoch": 0.55, |
| "grad_norm": 3.672477960586548, |
| "learning_rate": 5.5457627118644076e-06, |
| "loss": 1.2199, |
| "step": 21820 |
| }, |
| { |
| "epoch": 0.55, |
| "grad_norm": 2.586512565612793, |
| "learning_rate": 5.538983050847458e-06, |
| "loss": 1.392, |
| "step": 21830 |
| }, |
| { |
| "epoch": 0.55, |
| "grad_norm": 4.2184624671936035, |
| "learning_rate": 5.5322033898305095e-06, |
| "loss": 1.3008, |
| "step": 21840 |
| }, |
| { |
| "epoch": 0.55, |
| "grad_norm": 7.834671974182129, |
| "learning_rate": 5.525423728813559e-06, |
| "loss": 1.1794, |
| "step": 21850 |
| }, |
| { |
| "epoch": 0.55, |
| "grad_norm": 3.5877692699432373, |
| "learning_rate": 5.518644067796611e-06, |
| "loss": 1.4339, |
| "step": 21860 |
| }, |
| { |
| "epoch": 0.55, |
| "grad_norm": 3.0174179077148438, |
| "learning_rate": 5.511864406779661e-06, |
| "loss": 1.2704, |
| "step": 21870 |
| }, |
| { |
| "epoch": 0.55, |
| "grad_norm": 2.9889588356018066, |
| "learning_rate": 5.505084745762712e-06, |
| "loss": 1.3588, |
| "step": 21880 |
| }, |
| { |
| "epoch": 0.55, |
| "grad_norm": 10.810959815979004, |
| "learning_rate": 5.498305084745763e-06, |
| "loss": 1.3539, |
| "step": 21890 |
| }, |
| { |
| "epoch": 0.55, |
| "grad_norm": 5.771850109100342, |
| "learning_rate": 5.491525423728814e-06, |
| "loss": 1.4278, |
| "step": 21900 |
| }, |
| { |
| "epoch": 0.55, |
| "grad_norm": 4.13969612121582, |
| "learning_rate": 5.484745762711865e-06, |
| "loss": 1.2798, |
| "step": 21910 |
| }, |
| { |
| "epoch": 0.55, |
| "grad_norm": 15.295929908752441, |
| "learning_rate": 5.477966101694916e-06, |
| "loss": 1.5213, |
| "step": 21920 |
| }, |
| { |
| "epoch": 0.55, |
| "grad_norm": 6.445948600769043, |
| "learning_rate": 5.471186440677967e-06, |
| "loss": 1.3498, |
| "step": 21930 |
| }, |
| { |
| "epoch": 0.55, |
| "grad_norm": 9.28097152709961, |
| "learning_rate": 5.464406779661018e-06, |
| "loss": 1.3046, |
| "step": 21940 |
| }, |
| { |
| "epoch": 0.55, |
| "grad_norm": 6.094447135925293, |
| "learning_rate": 5.457627118644067e-06, |
| "loss": 1.4336, |
| "step": 21950 |
| }, |
| { |
| "epoch": 0.55, |
| "grad_norm": 9.818504333496094, |
| "learning_rate": 5.450847457627119e-06, |
| "loss": 1.516, |
| "step": 21960 |
| }, |
| { |
| "epoch": 0.55, |
| "grad_norm": 11.956009864807129, |
| "learning_rate": 5.444067796610169e-06, |
| "loss": 1.3435, |
| "step": 21970 |
| }, |
| { |
| "epoch": 0.55, |
| "grad_norm": 7.544681072235107, |
| "learning_rate": 5.437288135593221e-06, |
| "loss": 1.4625, |
| "step": 21980 |
| }, |
| { |
| "epoch": 0.55, |
| "grad_norm": 3.394897222518921, |
| "learning_rate": 5.430508474576271e-06, |
| "loss": 1.4565, |
| "step": 21990 |
| }, |
| { |
| "epoch": 0.55, |
| "grad_norm": 5.722468852996826, |
| "learning_rate": 5.423728813559323e-06, |
| "loss": 1.2133, |
| "step": 22000 |
| }, |
| { |
| "epoch": 0.55, |
| "eval_loss": 1.3403723239898682, |
| "eval_runtime": 66.1471, |
| "eval_samples_per_second": 15.118, |
| "eval_steps_per_second": 15.118, |
| "step": 22000 |
| }, |
| { |
| "epoch": 0.55, |
| "grad_norm": 7.046730995178223, |
| "learning_rate": 5.416949152542373e-06, |
| "loss": 1.1353, |
| "step": 22010 |
| }, |
| { |
| "epoch": 0.55, |
| "grad_norm": 7.013365268707275, |
| "learning_rate": 5.4101694915254246e-06, |
| "loss": 1.4333, |
| "step": 22020 |
| }, |
| { |
| "epoch": 0.55, |
| "grad_norm": 3.9469892978668213, |
| "learning_rate": 5.403389830508475e-06, |
| "loss": 1.3601, |
| "step": 22030 |
| }, |
| { |
| "epoch": 0.55, |
| "grad_norm": 3.740983486175537, |
| "learning_rate": 5.3966101694915265e-06, |
| "loss": 1.0232, |
| "step": 22040 |
| }, |
| { |
| "epoch": 0.55, |
| "grad_norm": 4.604060649871826, |
| "learning_rate": 5.389830508474577e-06, |
| "loss": 1.4011, |
| "step": 22050 |
| }, |
| { |
| "epoch": 0.55, |
| "grad_norm": 0.5601249933242798, |
| "learning_rate": 5.383050847457627e-06, |
| "loss": 1.2068, |
| "step": 22060 |
| }, |
| { |
| "epoch": 0.55, |
| "grad_norm": 2.7781484127044678, |
| "learning_rate": 5.376271186440678e-06, |
| "loss": 1.3976, |
| "step": 22070 |
| }, |
| { |
| "epoch": 0.55, |
| "grad_norm": 10.417901992797852, |
| "learning_rate": 5.369491525423729e-06, |
| "loss": 1.2444, |
| "step": 22080 |
| }, |
| { |
| "epoch": 0.55, |
| "grad_norm": 8.263280868530273, |
| "learning_rate": 5.36271186440678e-06, |
| "loss": 1.0937, |
| "step": 22090 |
| }, |
| { |
| "epoch": 0.55, |
| "grad_norm": 6.128343105316162, |
| "learning_rate": 5.355932203389831e-06, |
| "loss": 1.2891, |
| "step": 22100 |
| }, |
| { |
| "epoch": 0.55, |
| "grad_norm": 4.099800109863281, |
| "learning_rate": 5.349152542372882e-06, |
| "loss": 1.117, |
| "step": 22110 |
| }, |
| { |
| "epoch": 0.55, |
| "grad_norm": 7.756937026977539, |
| "learning_rate": 5.342372881355933e-06, |
| "loss": 1.3112, |
| "step": 22120 |
| }, |
| { |
| "epoch": 0.55, |
| "grad_norm": 5.372160911560059, |
| "learning_rate": 5.335593220338984e-06, |
| "loss": 1.5276, |
| "step": 22130 |
| }, |
| { |
| "epoch": 0.55, |
| "grad_norm": 5.017634391784668, |
| "learning_rate": 5.328813559322035e-06, |
| "loss": 1.2825, |
| "step": 22140 |
| }, |
| { |
| "epoch": 0.55, |
| "grad_norm": 5.629271030426025, |
| "learning_rate": 5.322033898305086e-06, |
| "loss": 1.2428, |
| "step": 22150 |
| }, |
| { |
| "epoch": 0.55, |
| "grad_norm": 6.938544273376465, |
| "learning_rate": 5.315254237288136e-06, |
| "loss": 1.4563, |
| "step": 22160 |
| }, |
| { |
| "epoch": 0.55, |
| "grad_norm": 13.804441452026367, |
| "learning_rate": 5.308474576271186e-06, |
| "loss": 1.332, |
| "step": 22170 |
| }, |
| { |
| "epoch": 0.55, |
| "grad_norm": 10.347596168518066, |
| "learning_rate": 5.301694915254238e-06, |
| "loss": 1.3879, |
| "step": 22180 |
| }, |
| { |
| "epoch": 0.55, |
| "grad_norm": 2.613632917404175, |
| "learning_rate": 5.294915254237288e-06, |
| "loss": 1.3298, |
| "step": 22190 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 14.637787818908691, |
| "learning_rate": 5.28813559322034e-06, |
| "loss": 1.3301, |
| "step": 22200 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 2.5796003341674805, |
| "learning_rate": 5.28135593220339e-06, |
| "loss": 1.41, |
| "step": 22210 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 5.326439380645752, |
| "learning_rate": 5.2745762711864416e-06, |
| "loss": 1.2483, |
| "step": 22220 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 8.928110122680664, |
| "learning_rate": 5.267796610169492e-06, |
| "loss": 1.2566, |
| "step": 22230 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 9.285192489624023, |
| "learning_rate": 5.2610169491525435e-06, |
| "loss": 1.2982, |
| "step": 22240 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 2.101649045944214, |
| "learning_rate": 5.254237288135594e-06, |
| "loss": 1.3181, |
| "step": 22250 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 5.57994270324707, |
| "learning_rate": 5.247457627118645e-06, |
| "loss": 1.3745, |
| "step": 22260 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 12.0460844039917, |
| "learning_rate": 5.240677966101695e-06, |
| "loss": 1.202, |
| "step": 22270 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 5.367649078369141, |
| "learning_rate": 5.233898305084746e-06, |
| "loss": 1.2969, |
| "step": 22280 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 4.784932613372803, |
| "learning_rate": 5.227118644067797e-06, |
| "loss": 1.363, |
| "step": 22290 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 8.103744506835938, |
| "learning_rate": 5.220338983050848e-06, |
| "loss": 1.4144, |
| "step": 22300 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 5.561450958251953, |
| "learning_rate": 5.213559322033899e-06, |
| "loss": 1.3018, |
| "step": 22310 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 8.294450759887695, |
| "learning_rate": 5.20677966101695e-06, |
| "loss": 1.3509, |
| "step": 22320 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 8.498016357421875, |
| "learning_rate": 5.2e-06, |
| "loss": 1.4932, |
| "step": 22330 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 3.3640925884246826, |
| "learning_rate": 5.193220338983052e-06, |
| "loss": 1.4413, |
| "step": 22340 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 3.236924886703491, |
| "learning_rate": 5.186440677966102e-06, |
| "loss": 1.2535, |
| "step": 22350 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 6.234226703643799, |
| "learning_rate": 5.1796610169491535e-06, |
| "loss": 1.1816, |
| "step": 22360 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 6.9503703117370605, |
| "learning_rate": 5.172881355932203e-06, |
| "loss": 1.2022, |
| "step": 22370 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 11.745012283325195, |
| "learning_rate": 5.166101694915255e-06, |
| "loss": 1.543, |
| "step": 22380 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 8.977341651916504, |
| "learning_rate": 5.159322033898305e-06, |
| "loss": 1.4185, |
| "step": 22390 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 2.515448570251465, |
| "learning_rate": 5.152542372881356e-06, |
| "loss": 1.2665, |
| "step": 22400 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 9.47354507446289, |
| "learning_rate": 5.145762711864407e-06, |
| "loss": 1.3086, |
| "step": 22410 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 6.550108432769775, |
| "learning_rate": 5.138983050847458e-06, |
| "loss": 1.3129, |
| "step": 22420 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 3.860886335372925, |
| "learning_rate": 5.132203389830509e-06, |
| "loss": 1.4046, |
| "step": 22430 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 8.062068939208984, |
| "learning_rate": 5.12542372881356e-06, |
| "loss": 1.207, |
| "step": 22440 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 5.823556423187256, |
| "learning_rate": 5.118644067796611e-06, |
| "loss": 1.2547, |
| "step": 22450 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 7.168029308319092, |
| "learning_rate": 5.111864406779662e-06, |
| "loss": 1.2772, |
| "step": 22460 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 9.224081039428711, |
| "learning_rate": 5.105084745762711e-06, |
| "loss": 1.3066, |
| "step": 22470 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 3.878537178039551, |
| "learning_rate": 5.098305084745763e-06, |
| "loss": 1.2413, |
| "step": 22480 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 5.821982383728027, |
| "learning_rate": 5.091525423728813e-06, |
| "loss": 1.3609, |
| "step": 22490 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 9.057456970214844, |
| "learning_rate": 5.084745762711865e-06, |
| "loss": 1.2255, |
| "step": 22500 |
| }, |
| { |
| "epoch": 0.56, |
| "eval_loss": 1.3439137935638428, |
| "eval_runtime": 66.2088, |
| "eval_samples_per_second": 15.104, |
| "eval_steps_per_second": 15.104, |
| "step": 22500 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 3.232191562652588, |
| "learning_rate": 5.077966101694915e-06, |
| "loss": 1.3644, |
| "step": 22510 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 5.3384528160095215, |
| "learning_rate": 5.071186440677967e-06, |
| "loss": 1.3093, |
| "step": 22520 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 6.911621570587158, |
| "learning_rate": 5.064406779661017e-06, |
| "loss": 1.2571, |
| "step": 22530 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 6.84785270690918, |
| "learning_rate": 5.057627118644069e-06, |
| "loss": 1.324, |
| "step": 22540 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 4.634193420410156, |
| "learning_rate": 5.050847457627119e-06, |
| "loss": 1.4672, |
| "step": 22550 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 3.4189460277557373, |
| "learning_rate": 5.0440677966101705e-06, |
| "loss": 1.325, |
| "step": 22560 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 7.2140793800354, |
| "learning_rate": 5.037288135593221e-06, |
| "loss": 1.3923, |
| "step": 22570 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 3.916853189468384, |
| "learning_rate": 5.030508474576271e-06, |
| "loss": 1.3431, |
| "step": 22580 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 6.434349536895752, |
| "learning_rate": 5.023728813559322e-06, |
| "loss": 1.4615, |
| "step": 22590 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 6.851098537445068, |
| "learning_rate": 5.016949152542373e-06, |
| "loss": 1.3013, |
| "step": 22600 |
| }, |
| { |
| "epoch": 0.57, |
| "grad_norm": 7.541562080383301, |
| "learning_rate": 5.010169491525424e-06, |
| "loss": 1.3696, |
| "step": 22610 |
| }, |
| { |
| "epoch": 0.57, |
| "grad_norm": 4.100308895111084, |
| "learning_rate": 5.003389830508475e-06, |
| "loss": 1.3802, |
| "step": 22620 |
| }, |
| { |
| "epoch": 0.57, |
| "grad_norm": 12.694953918457031, |
| "learning_rate": 4.996610169491526e-06, |
| "loss": 1.2328, |
| "step": 22630 |
| }, |
| { |
| "epoch": 0.57, |
| "grad_norm": 12.468879699707031, |
| "learning_rate": 4.989830508474577e-06, |
| "loss": 1.3995, |
| "step": 22640 |
| }, |
| { |
| "epoch": 0.57, |
| "grad_norm": 3.142810821533203, |
| "learning_rate": 4.983050847457628e-06, |
| "loss": 1.2403, |
| "step": 22650 |
| }, |
| { |
| "epoch": 0.57, |
| "grad_norm": 7.71440315246582, |
| "learning_rate": 4.976271186440678e-06, |
| "loss": 1.2902, |
| "step": 22660 |
| }, |
| { |
| "epoch": 0.57, |
| "grad_norm": 4.5355634689331055, |
| "learning_rate": 4.969491525423729e-06, |
| "loss": 1.3243, |
| "step": 22670 |
| }, |
| { |
| "epoch": 0.57, |
| "grad_norm": 1.5578103065490723, |
| "learning_rate": 4.96271186440678e-06, |
| "loss": 1.2796, |
| "step": 22680 |
| }, |
| { |
| "epoch": 0.57, |
| "grad_norm": 4.997007846832275, |
| "learning_rate": 4.955932203389831e-06, |
| "loss": 1.1288, |
| "step": 22690 |
| }, |
| { |
| "epoch": 0.57, |
| "grad_norm": 4.240728855133057, |
| "learning_rate": 4.949152542372882e-06, |
| "loss": 1.408, |
| "step": 22700 |
| }, |
| { |
| "epoch": 0.57, |
| "grad_norm": 7.517406940460205, |
| "learning_rate": 4.942372881355932e-06, |
| "loss": 1.3879, |
| "step": 22710 |
| }, |
| { |
| "epoch": 0.57, |
| "grad_norm": 1.1581978797912598, |
| "learning_rate": 4.935593220338984e-06, |
| "loss": 1.3547, |
| "step": 22720 |
| }, |
| { |
| "epoch": 0.57, |
| "grad_norm": 5.073269367218018, |
| "learning_rate": 4.928813559322034e-06, |
| "loss": 1.3664, |
| "step": 22730 |
| }, |
| { |
| "epoch": 0.57, |
| "grad_norm": 3.7317769527435303, |
| "learning_rate": 4.922033898305086e-06, |
| "loss": 1.3942, |
| "step": 22740 |
| }, |
| { |
| "epoch": 0.57, |
| "grad_norm": 4.843672275543213, |
| "learning_rate": 4.915254237288136e-06, |
| "loss": 1.3723, |
| "step": 22750 |
| }, |
| { |
| "epoch": 0.57, |
| "grad_norm": 7.751224040985107, |
| "learning_rate": 4.908474576271187e-06, |
| "loss": 1.2057, |
| "step": 22760 |
| }, |
| { |
| "epoch": 0.57, |
| "grad_norm": 2.4519495964050293, |
| "learning_rate": 4.901694915254237e-06, |
| "loss": 1.3836, |
| "step": 22770 |
| }, |
| { |
| "epoch": 0.57, |
| "grad_norm": 8.7233304977417, |
| "learning_rate": 4.894915254237289e-06, |
| "loss": 1.3842, |
| "step": 22780 |
| }, |
| { |
| "epoch": 0.57, |
| "grad_norm": 2.717367172241211, |
| "learning_rate": 4.888135593220339e-06, |
| "loss": 1.4519, |
| "step": 22790 |
| }, |
| { |
| "epoch": 0.57, |
| "grad_norm": 4.797736167907715, |
| "learning_rate": 4.881355932203391e-06, |
| "loss": 1.1785, |
| "step": 22800 |
| }, |
| { |
| "epoch": 0.57, |
| "grad_norm": 11.28987979888916, |
| "learning_rate": 4.874576271186441e-06, |
| "loss": 1.3058, |
| "step": 22810 |
| }, |
| { |
| "epoch": 0.57, |
| "grad_norm": 5.097863674163818, |
| "learning_rate": 4.867796610169492e-06, |
| "loss": 1.232, |
| "step": 22820 |
| }, |
| { |
| "epoch": 0.57, |
| "grad_norm": 5.479716777801514, |
| "learning_rate": 4.861016949152543e-06, |
| "loss": 1.4677, |
| "step": 22830 |
| }, |
| { |
| "epoch": 0.57, |
| "grad_norm": 11.921456336975098, |
| "learning_rate": 4.854237288135594e-06, |
| "loss": 1.5442, |
| "step": 22840 |
| }, |
| { |
| "epoch": 0.57, |
| "grad_norm": 9.934676170349121, |
| "learning_rate": 4.847457627118645e-06, |
| "loss": 1.4147, |
| "step": 22850 |
| }, |
| { |
| "epoch": 0.57, |
| "grad_norm": 3.3140487670898438, |
| "learning_rate": 4.840677966101695e-06, |
| "loss": 1.2146, |
| "step": 22860 |
| }, |
| { |
| "epoch": 0.57, |
| "grad_norm": 2.1303317546844482, |
| "learning_rate": 4.833898305084746e-06, |
| "loss": 1.3021, |
| "step": 22870 |
| }, |
| { |
| "epoch": 0.57, |
| "grad_norm": 3.943474769592285, |
| "learning_rate": 4.827118644067797e-06, |
| "loss": 1.2028, |
| "step": 22880 |
| }, |
| { |
| "epoch": 0.57, |
| "grad_norm": 5.444009780883789, |
| "learning_rate": 4.820338983050848e-06, |
| "loss": 1.3873, |
| "step": 22890 |
| }, |
| { |
| "epoch": 0.57, |
| "grad_norm": 5.230558395385742, |
| "learning_rate": 4.813559322033899e-06, |
| "loss": 1.2907, |
| "step": 22900 |
| }, |
| { |
| "epoch": 0.57, |
| "grad_norm": 2.652158498764038, |
| "learning_rate": 4.80677966101695e-06, |
| "loss": 1.27, |
| "step": 22910 |
| }, |
| { |
| "epoch": 0.57, |
| "grad_norm": 5.423113822937012, |
| "learning_rate": 4.800000000000001e-06, |
| "loss": 1.3488, |
| "step": 22920 |
| }, |
| { |
| "epoch": 0.57, |
| "grad_norm": 6.731577396392822, |
| "learning_rate": 4.793220338983051e-06, |
| "loss": 1.3411, |
| "step": 22930 |
| }, |
| { |
| "epoch": 0.57, |
| "grad_norm": 6.142993927001953, |
| "learning_rate": 4.786440677966102e-06, |
| "loss": 1.4032, |
| "step": 22940 |
| }, |
| { |
| "epoch": 0.57, |
| "grad_norm": 7.845600605010986, |
| "learning_rate": 4.779661016949153e-06, |
| "loss": 1.3567, |
| "step": 22950 |
| }, |
| { |
| "epoch": 0.57, |
| "grad_norm": 3.123938798904419, |
| "learning_rate": 4.772881355932204e-06, |
| "loss": 1.2106, |
| "step": 22960 |
| }, |
| { |
| "epoch": 0.57, |
| "grad_norm": 4.684544086456299, |
| "learning_rate": 4.766101694915254e-06, |
| "loss": 1.3407, |
| "step": 22970 |
| }, |
| { |
| "epoch": 0.57, |
| "grad_norm": 11.232462882995605, |
| "learning_rate": 4.759322033898306e-06, |
| "loss": 1.136, |
| "step": 22980 |
| }, |
| { |
| "epoch": 0.57, |
| "grad_norm": 3.3728113174438477, |
| "learning_rate": 4.752542372881356e-06, |
| "loss": 1.199, |
| "step": 22990 |
| }, |
| { |
| "epoch": 0.57, |
| "grad_norm": 3.5851376056671143, |
| "learning_rate": 4.745762711864408e-06, |
| "loss": 1.463, |
| "step": 23000 |
| }, |
| { |
| "epoch": 0.57, |
| "eval_loss": 1.3161565065383911, |
| "eval_runtime": 66.103, |
| "eval_samples_per_second": 15.128, |
| "eval_steps_per_second": 15.128, |
| "step": 23000 |
| }, |
| { |
| "epoch": 0.58, |
| "grad_norm": 5.720829010009766, |
| "learning_rate": 4.738983050847458e-06, |
| "loss": 1.4393, |
| "step": 23010 |
| }, |
| { |
| "epoch": 0.58, |
| "grad_norm": 11.663517951965332, |
| "learning_rate": 4.732203389830509e-06, |
| "loss": 1.3158, |
| "step": 23020 |
| }, |
| { |
| "epoch": 0.58, |
| "grad_norm": 10.158949851989746, |
| "learning_rate": 4.725423728813559e-06, |
| "loss": 1.4086, |
| "step": 23030 |
| }, |
| { |
| "epoch": 0.58, |
| "grad_norm": 6.925542831420898, |
| "learning_rate": 4.718644067796611e-06, |
| "loss": 1.3655, |
| "step": 23040 |
| }, |
| { |
| "epoch": 0.58, |
| "grad_norm": 1.4193698167800903, |
| "learning_rate": 4.711864406779661e-06, |
| "loss": 1.2348, |
| "step": 23050 |
| }, |
| { |
| "epoch": 0.58, |
| "grad_norm": 6.884500980377197, |
| "learning_rate": 4.705084745762713e-06, |
| "loss": 1.4053, |
| "step": 23060 |
| }, |
| { |
| "epoch": 0.58, |
| "grad_norm": 4.412232875823975, |
| "learning_rate": 4.698305084745763e-06, |
| "loss": 1.2476, |
| "step": 23070 |
| }, |
| { |
| "epoch": 0.58, |
| "grad_norm": 2.403428077697754, |
| "learning_rate": 4.691525423728814e-06, |
| "loss": 1.4168, |
| "step": 23080 |
| }, |
| { |
| "epoch": 0.58, |
| "grad_norm": 7.022388458251953, |
| "learning_rate": 4.684745762711865e-06, |
| "loss": 1.359, |
| "step": 23090 |
| }, |
| { |
| "epoch": 0.58, |
| "grad_norm": 5.728121280670166, |
| "learning_rate": 4.677966101694916e-06, |
| "loss": 1.3916, |
| "step": 23100 |
| }, |
| { |
| "epoch": 0.58, |
| "grad_norm": 14.995932579040527, |
| "learning_rate": 4.671186440677967e-06, |
| "loss": 1.4541, |
| "step": 23110 |
| }, |
| { |
| "epoch": 0.58, |
| "grad_norm": 12.448729515075684, |
| "learning_rate": 4.664406779661017e-06, |
| "loss": 1.2989, |
| "step": 23120 |
| }, |
| { |
| "epoch": 0.58, |
| "grad_norm": 2.5807507038116455, |
| "learning_rate": 4.657627118644068e-06, |
| "loss": 1.4881, |
| "step": 23130 |
| }, |
| { |
| "epoch": 0.58, |
| "grad_norm": 4.670041561126709, |
| "learning_rate": 4.650847457627119e-06, |
| "loss": 1.4595, |
| "step": 23140 |
| }, |
| { |
| "epoch": 0.58, |
| "grad_norm": 3.8450145721435547, |
| "learning_rate": 4.64406779661017e-06, |
| "loss": 1.245, |
| "step": 23150 |
| }, |
| { |
| "epoch": 0.58, |
| "grad_norm": 15.546969413757324, |
| "learning_rate": 4.637288135593221e-06, |
| "loss": 1.2199, |
| "step": 23160 |
| }, |
| { |
| "epoch": 0.58, |
| "grad_norm": 8.563859939575195, |
| "learning_rate": 4.630508474576272e-06, |
| "loss": 1.3752, |
| "step": 23170 |
| }, |
| { |
| "epoch": 0.58, |
| "grad_norm": 8.742653846740723, |
| "learning_rate": 4.623728813559323e-06, |
| "loss": 1.2802, |
| "step": 23180 |
| }, |
| { |
| "epoch": 0.58, |
| "grad_norm": 6.253279685974121, |
| "learning_rate": 4.616949152542373e-06, |
| "loss": 1.4559, |
| "step": 23190 |
| }, |
| { |
| "epoch": 0.58, |
| "grad_norm": 5.3615827560424805, |
| "learning_rate": 4.610169491525424e-06, |
| "loss": 1.1649, |
| "step": 23200 |
| }, |
| { |
| "epoch": 0.58, |
| "grad_norm": 9.165109634399414, |
| "learning_rate": 4.603389830508475e-06, |
| "loss": 1.4435, |
| "step": 23210 |
| }, |
| { |
| "epoch": 0.58, |
| "grad_norm": 6.625391483306885, |
| "learning_rate": 4.596610169491526e-06, |
| "loss": 1.2911, |
| "step": 23220 |
| }, |
| { |
| "epoch": 0.58, |
| "grad_norm": 3.898466110229492, |
| "learning_rate": 4.589830508474576e-06, |
| "loss": 1.2653, |
| "step": 23230 |
| }, |
| { |
| "epoch": 0.58, |
| "grad_norm": 1.5483791828155518, |
| "learning_rate": 4.583050847457628e-06, |
| "loss": 1.1665, |
| "step": 23240 |
| }, |
| { |
| "epoch": 0.58, |
| "grad_norm": 4.248947620391846, |
| "learning_rate": 4.576271186440678e-06, |
| "loss": 1.2275, |
| "step": 23250 |
| }, |
| { |
| "epoch": 0.58, |
| "grad_norm": 3.522451639175415, |
| "learning_rate": 4.56949152542373e-06, |
| "loss": 1.3427, |
| "step": 23260 |
| }, |
| { |
| "epoch": 0.58, |
| "grad_norm": 5.79518461227417, |
| "learning_rate": 4.56271186440678e-06, |
| "loss": 1.2545, |
| "step": 23270 |
| }, |
| { |
| "epoch": 0.58, |
| "grad_norm": 7.211407661437988, |
| "learning_rate": 4.555932203389831e-06, |
| "loss": 1.3256, |
| "step": 23280 |
| }, |
| { |
| "epoch": 0.58, |
| "grad_norm": 2.218186855316162, |
| "learning_rate": 4.549152542372881e-06, |
| "loss": 1.327, |
| "step": 23290 |
| }, |
| { |
| "epoch": 0.58, |
| "grad_norm": 3.0725247859954834, |
| "learning_rate": 4.542372881355933e-06, |
| "loss": 1.1152, |
| "step": 23300 |
| }, |
| { |
| "epoch": 0.58, |
| "grad_norm": 10.184927940368652, |
| "learning_rate": 4.535593220338983e-06, |
| "loss": 1.3153, |
| "step": 23310 |
| }, |
| { |
| "epoch": 0.58, |
| "grad_norm": 6.575405597686768, |
| "learning_rate": 4.528813559322035e-06, |
| "loss": 1.2879, |
| "step": 23320 |
| }, |
| { |
| "epoch": 0.58, |
| "grad_norm": 5.2145094871521, |
| "learning_rate": 4.522033898305085e-06, |
| "loss": 1.2836, |
| "step": 23330 |
| }, |
| { |
| "epoch": 0.58, |
| "grad_norm": 16.586687088012695, |
| "learning_rate": 4.515254237288136e-06, |
| "loss": 1.245, |
| "step": 23340 |
| }, |
| { |
| "epoch": 0.58, |
| "grad_norm": 5.264496803283691, |
| "learning_rate": 4.508474576271187e-06, |
| "loss": 1.1731, |
| "step": 23350 |
| }, |
| { |
| "epoch": 0.58, |
| "grad_norm": 5.267670631408691, |
| "learning_rate": 4.501694915254238e-06, |
| "loss": 1.3662, |
| "step": 23360 |
| }, |
| { |
| "epoch": 0.58, |
| "grad_norm": 7.335075855255127, |
| "learning_rate": 4.494915254237289e-06, |
| "loss": 1.3261, |
| "step": 23370 |
| }, |
| { |
| "epoch": 0.58, |
| "grad_norm": 7.068376064300537, |
| "learning_rate": 4.488135593220339e-06, |
| "loss": 1.2805, |
| "step": 23380 |
| }, |
| { |
| "epoch": 0.58, |
| "grad_norm": 15.14303207397461, |
| "learning_rate": 4.48135593220339e-06, |
| "loss": 1.3605, |
| "step": 23390 |
| }, |
| { |
| "epoch": 0.58, |
| "grad_norm": 12.552229881286621, |
| "learning_rate": 4.474576271186441e-06, |
| "loss": 1.4804, |
| "step": 23400 |
| }, |
| { |
| "epoch": 0.59, |
| "grad_norm": 6.760104656219482, |
| "learning_rate": 4.467796610169492e-06, |
| "loss": 1.2604, |
| "step": 23410 |
| }, |
| { |
| "epoch": 0.59, |
| "grad_norm": 6.444414138793945, |
| "learning_rate": 4.461016949152543e-06, |
| "loss": 1.3677, |
| "step": 23420 |
| }, |
| { |
| "epoch": 0.59, |
| "grad_norm": 2.223396062850952, |
| "learning_rate": 4.454237288135594e-06, |
| "loss": 1.4585, |
| "step": 23430 |
| }, |
| { |
| "epoch": 0.59, |
| "grad_norm": 3.0469980239868164, |
| "learning_rate": 4.447457627118645e-06, |
| "loss": 1.2125, |
| "step": 23440 |
| }, |
| { |
| "epoch": 0.59, |
| "grad_norm": 9.140281677246094, |
| "learning_rate": 4.440677966101695e-06, |
| "loss": 1.2847, |
| "step": 23450 |
| }, |
| { |
| "epoch": 0.59, |
| "grad_norm": 10.596829414367676, |
| "learning_rate": 4.433898305084746e-06, |
| "loss": 1.1919, |
| "step": 23460 |
| }, |
| { |
| "epoch": 0.59, |
| "grad_norm": 6.769688129425049, |
| "learning_rate": 4.427118644067797e-06, |
| "loss": 1.3911, |
| "step": 23470 |
| }, |
| { |
| "epoch": 0.59, |
| "grad_norm": 8.3526029586792, |
| "learning_rate": 4.420338983050848e-06, |
| "loss": 1.3243, |
| "step": 23480 |
| }, |
| { |
| "epoch": 0.59, |
| "grad_norm": 1.516774296760559, |
| "learning_rate": 4.413559322033898e-06, |
| "loss": 1.4328, |
| "step": 23490 |
| }, |
| { |
| "epoch": 0.59, |
| "grad_norm": 5.758790493011475, |
| "learning_rate": 4.40677966101695e-06, |
| "loss": 1.1158, |
| "step": 23500 |
| }, |
| { |
| "epoch": 0.59, |
| "eval_loss": 1.3372619152069092, |
| "eval_runtime": 66.1468, |
| "eval_samples_per_second": 15.118, |
| "eval_steps_per_second": 15.118, |
| "step": 23500 |
| }, |
| { |
| "epoch": 0.59, |
| "grad_norm": 10.471700668334961, |
| "learning_rate": 4.4e-06, |
| "loss": 1.1029, |
| "step": 23510 |
| }, |
| { |
| "epoch": 0.59, |
| "grad_norm": 6.78934383392334, |
| "learning_rate": 4.393220338983052e-06, |
| "loss": 1.2838, |
| "step": 23520 |
| }, |
| { |
| "epoch": 0.59, |
| "grad_norm": 4.890566825866699, |
| "learning_rate": 4.386440677966102e-06, |
| "loss": 1.2861, |
| "step": 23530 |
| }, |
| { |
| "epoch": 0.59, |
| "grad_norm": 9.901065826416016, |
| "learning_rate": 4.379661016949153e-06, |
| "loss": 1.3043, |
| "step": 23540 |
| }, |
| { |
| "epoch": 0.59, |
| "grad_norm": 3.332019805908203, |
| "learning_rate": 4.372881355932203e-06, |
| "loss": 1.5098, |
| "step": 23550 |
| }, |
| { |
| "epoch": 0.59, |
| "grad_norm": 9.1102876663208, |
| "learning_rate": 4.366101694915255e-06, |
| "loss": 1.2222, |
| "step": 23560 |
| }, |
| { |
| "epoch": 0.59, |
| "grad_norm": 2.800964832305908, |
| "learning_rate": 4.359322033898305e-06, |
| "loss": 1.1916, |
| "step": 23570 |
| }, |
| { |
| "epoch": 0.59, |
| "grad_norm": 4.45274019241333, |
| "learning_rate": 4.352542372881357e-06, |
| "loss": 1.496, |
| "step": 23580 |
| }, |
| { |
| "epoch": 0.59, |
| "grad_norm": 7.7979350090026855, |
| "learning_rate": 4.345762711864407e-06, |
| "loss": 1.3483, |
| "step": 23590 |
| }, |
| { |
| "epoch": 0.59, |
| "grad_norm": 5.517279148101807, |
| "learning_rate": 4.338983050847458e-06, |
| "loss": 1.1602, |
| "step": 23600 |
| }, |
| { |
| "epoch": 0.59, |
| "grad_norm": 8.224603652954102, |
| "learning_rate": 4.332203389830509e-06, |
| "loss": 1.2216, |
| "step": 23610 |
| }, |
| { |
| "epoch": 0.59, |
| "grad_norm": 3.9079153537750244, |
| "learning_rate": 4.32542372881356e-06, |
| "loss": 1.4094, |
| "step": 23620 |
| }, |
| { |
| "epoch": 0.59, |
| "grad_norm": 7.209962844848633, |
| "learning_rate": 4.318644067796611e-06, |
| "loss": 1.4033, |
| "step": 23630 |
| }, |
| { |
| "epoch": 0.59, |
| "grad_norm": 6.915498733520508, |
| "learning_rate": 4.311864406779661e-06, |
| "loss": 1.3988, |
| "step": 23640 |
| }, |
| { |
| "epoch": 0.59, |
| "grad_norm": 6.8702778816223145, |
| "learning_rate": 4.305084745762712e-06, |
| "loss": 1.3308, |
| "step": 23650 |
| }, |
| { |
| "epoch": 0.59, |
| "grad_norm": 6.673946380615234, |
| "learning_rate": 4.298305084745763e-06, |
| "loss": 1.2756, |
| "step": 23660 |
| }, |
| { |
| "epoch": 0.59, |
| "grad_norm": 2.729367971420288, |
| "learning_rate": 4.291525423728814e-06, |
| "loss": 1.2702, |
| "step": 23670 |
| }, |
| { |
| "epoch": 0.59, |
| "grad_norm": 4.333055019378662, |
| "learning_rate": 4.284745762711865e-06, |
| "loss": 1.3365, |
| "step": 23680 |
| }, |
| { |
| "epoch": 0.59, |
| "grad_norm": 8.36184024810791, |
| "learning_rate": 4.277966101694915e-06, |
| "loss": 1.2783, |
| "step": 23690 |
| }, |
| { |
| "epoch": 0.59, |
| "grad_norm": 4.62699031829834, |
| "learning_rate": 4.271186440677967e-06, |
| "loss": 1.4385, |
| "step": 23700 |
| }, |
| { |
| "epoch": 0.59, |
| "grad_norm": 3.193026304244995, |
| "learning_rate": 4.264406779661017e-06, |
| "loss": 1.4843, |
| "step": 23710 |
| }, |
| { |
| "epoch": 0.59, |
| "grad_norm": 8.289533615112305, |
| "learning_rate": 4.257627118644068e-06, |
| "loss": 1.5263, |
| "step": 23720 |
| }, |
| { |
| "epoch": 0.59, |
| "grad_norm": 3.887775182723999, |
| "learning_rate": 4.250847457627119e-06, |
| "loss": 1.228, |
| "step": 23730 |
| }, |
| { |
| "epoch": 0.59, |
| "grad_norm": 10.728804588317871, |
| "learning_rate": 4.24406779661017e-06, |
| "loss": 1.4097, |
| "step": 23740 |
| }, |
| { |
| "epoch": 0.59, |
| "grad_norm": 5.405580997467041, |
| "learning_rate": 4.23728813559322e-06, |
| "loss": 1.227, |
| "step": 23750 |
| }, |
| { |
| "epoch": 0.59, |
| "grad_norm": 2.104985237121582, |
| "learning_rate": 4.230508474576272e-06, |
| "loss": 1.3258, |
| "step": 23760 |
| }, |
| { |
| "epoch": 0.59, |
| "grad_norm": 11.678805351257324, |
| "learning_rate": 4.223728813559322e-06, |
| "loss": 1.1797, |
| "step": 23770 |
| }, |
| { |
| "epoch": 0.59, |
| "grad_norm": 12.024051666259766, |
| "learning_rate": 4.216949152542374e-06, |
| "loss": 1.3278, |
| "step": 23780 |
| }, |
| { |
| "epoch": 0.59, |
| "grad_norm": 12.879485130310059, |
| "learning_rate": 4.210169491525424e-06, |
| "loss": 1.2552, |
| "step": 23790 |
| }, |
| { |
| "epoch": 0.59, |
| "grad_norm": 6.001992702484131, |
| "learning_rate": 4.203389830508475e-06, |
| "loss": 1.4639, |
| "step": 23800 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 7.713657855987549, |
| "learning_rate": 4.196610169491525e-06, |
| "loss": 1.3542, |
| "step": 23810 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 25.137435913085938, |
| "learning_rate": 4.189830508474577e-06, |
| "loss": 1.2694, |
| "step": 23820 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 13.080780029296875, |
| "learning_rate": 4.183050847457627e-06, |
| "loss": 1.5512, |
| "step": 23830 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 3.648967981338501, |
| "learning_rate": 4.176271186440679e-06, |
| "loss": 1.4919, |
| "step": 23840 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 2.8366498947143555, |
| "learning_rate": 4.169491525423729e-06, |
| "loss": 1.3528, |
| "step": 23850 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 2.3198916912078857, |
| "learning_rate": 4.16271186440678e-06, |
| "loss": 1.4445, |
| "step": 23860 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 9.170830726623535, |
| "learning_rate": 4.155932203389831e-06, |
| "loss": 1.1801, |
| "step": 23870 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 7.985089302062988, |
| "learning_rate": 4.149152542372882e-06, |
| "loss": 1.3195, |
| "step": 23880 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 10.688753128051758, |
| "learning_rate": 4.142372881355933e-06, |
| "loss": 1.4527, |
| "step": 23890 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 12.181285858154297, |
| "learning_rate": 4.135593220338983e-06, |
| "loss": 1.3875, |
| "step": 23900 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 10.353550910949707, |
| "learning_rate": 4.128813559322034e-06, |
| "loss": 1.3937, |
| "step": 23910 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 3.3962326049804688, |
| "learning_rate": 4.122033898305085e-06, |
| "loss": 1.2212, |
| "step": 23920 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 9.191743850708008, |
| "learning_rate": 4.115254237288136e-06, |
| "loss": 1.3884, |
| "step": 23930 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 8.74504566192627, |
| "learning_rate": 4.108474576271187e-06, |
| "loss": 1.369, |
| "step": 23940 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 15.484914779663086, |
| "learning_rate": 4.101694915254237e-06, |
| "loss": 1.3607, |
| "step": 23950 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 8.069631576538086, |
| "learning_rate": 4.094915254237289e-06, |
| "loss": 1.1674, |
| "step": 23960 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 5.688279151916504, |
| "learning_rate": 4.088135593220339e-06, |
| "loss": 1.2652, |
| "step": 23970 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 2.326960325241089, |
| "learning_rate": 4.081355932203391e-06, |
| "loss": 1.2149, |
| "step": 23980 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 9.749725341796875, |
| "learning_rate": 4.074576271186441e-06, |
| "loss": 1.2378, |
| "step": 23990 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 5.552289962768555, |
| "learning_rate": 4.067796610169492e-06, |
| "loss": 1.2699, |
| "step": 24000 |
| }, |
| { |
| "epoch": 0.6, |
| "eval_loss": 1.3411681652069092, |
| "eval_runtime": 66.1396, |
| "eval_samples_per_second": 15.12, |
| "eval_steps_per_second": 15.12, |
| "step": 24000 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 8.768118858337402, |
| "learning_rate": 4.061016949152542e-06, |
| "loss": 1.1869, |
| "step": 24010 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 1.0193852186203003, |
| "learning_rate": 4.054237288135594e-06, |
| "loss": 1.1477, |
| "step": 24020 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 11.04339599609375, |
| "learning_rate": 4.047457627118644e-06, |
| "loss": 1.2492, |
| "step": 24030 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 2.5347607135772705, |
| "learning_rate": 4.040677966101696e-06, |
| "loss": 1.2424, |
| "step": 24040 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 5.121871471405029, |
| "learning_rate": 4.033898305084746e-06, |
| "loss": 1.1773, |
| "step": 24050 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 8.53433609008789, |
| "learning_rate": 4.027118644067797e-06, |
| "loss": 1.4407, |
| "step": 24060 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 11.311376571655273, |
| "learning_rate": 4.020338983050847e-06, |
| "loss": 1.4095, |
| "step": 24070 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 2.8956375122070312, |
| "learning_rate": 4.013559322033899e-06, |
| "loss": 1.3076, |
| "step": 24080 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 3.6406021118164062, |
| "learning_rate": 4.006779661016949e-06, |
| "loss": 1.1801, |
| "step": 24090 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 4.67333459854126, |
| "learning_rate": 4.000000000000001e-06, |
| "loss": 1.3627, |
| "step": 24100 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 4.243159294128418, |
| "learning_rate": 3.993220338983051e-06, |
| "loss": 1.2065, |
| "step": 24110 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 4.570652484893799, |
| "learning_rate": 3.986440677966102e-06, |
| "loss": 1.4576, |
| "step": 24120 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 10.30574893951416, |
| "learning_rate": 3.979661016949153e-06, |
| "loss": 1.3798, |
| "step": 24130 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 1.7883845567703247, |
| "learning_rate": 3.972881355932204e-06, |
| "loss": 1.1474, |
| "step": 24140 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 2.429614305496216, |
| "learning_rate": 3.966101694915255e-06, |
| "loss": 1.3992, |
| "step": 24150 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 5.791226863861084, |
| "learning_rate": 3.959322033898305e-06, |
| "loss": 1.4164, |
| "step": 24160 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 6.212001800537109, |
| "learning_rate": 3.952542372881356e-06, |
| "loss": 1.4647, |
| "step": 24170 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 4.9569292068481445, |
| "learning_rate": 3.945762711864407e-06, |
| "loss": 1.2948, |
| "step": 24180 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 2.2119970321655273, |
| "learning_rate": 3.938983050847458e-06, |
| "loss": 1.3955, |
| "step": 24190 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 10.280770301818848, |
| "learning_rate": 3.932203389830509e-06, |
| "loss": 1.4461, |
| "step": 24200 |
| }, |
| { |
| "epoch": 0.61, |
| "grad_norm": 3.701272487640381, |
| "learning_rate": 3.925423728813559e-06, |
| "loss": 1.348, |
| "step": 24210 |
| }, |
| { |
| "epoch": 0.61, |
| "grad_norm": 8.827926635742188, |
| "learning_rate": 3.918644067796611e-06, |
| "loss": 1.3398, |
| "step": 24220 |
| }, |
| { |
| "epoch": 0.61, |
| "grad_norm": 6.997286319732666, |
| "learning_rate": 3.911864406779661e-06, |
| "loss": 1.4724, |
| "step": 24230 |
| }, |
| { |
| "epoch": 0.61, |
| "grad_norm": 5.5268449783325195, |
| "learning_rate": 3.905084745762713e-06, |
| "loss": 1.388, |
| "step": 24240 |
| }, |
| { |
| "epoch": 0.61, |
| "grad_norm": 8.842992782592773, |
| "learning_rate": 3.898305084745763e-06, |
| "loss": 1.2382, |
| "step": 24250 |
| }, |
| { |
| "epoch": 0.61, |
| "grad_norm": 11.24975872039795, |
| "learning_rate": 3.891525423728814e-06, |
| "loss": 1.2194, |
| "step": 24260 |
| }, |
| { |
| "epoch": 0.61, |
| "grad_norm": 2.875722646713257, |
| "learning_rate": 3.884745762711864e-06, |
| "loss": 1.3792, |
| "step": 24270 |
| }, |
| { |
| "epoch": 0.61, |
| "grad_norm": 8.459474563598633, |
| "learning_rate": 3.877966101694916e-06, |
| "loss": 1.3021, |
| "step": 24280 |
| }, |
| { |
| "epoch": 0.61, |
| "grad_norm": 3.315873861312866, |
| "learning_rate": 3.871186440677966e-06, |
| "loss": 1.2976, |
| "step": 24290 |
| }, |
| { |
| "epoch": 0.61, |
| "grad_norm": 6.280729293823242, |
| "learning_rate": 3.864406779661018e-06, |
| "loss": 1.3294, |
| "step": 24300 |
| }, |
| { |
| "epoch": 0.61, |
| "grad_norm": 6.004711627960205, |
| "learning_rate": 3.857627118644068e-06, |
| "loss": 1.3178, |
| "step": 24310 |
| }, |
| { |
| "epoch": 0.61, |
| "grad_norm": 8.207845687866211, |
| "learning_rate": 3.850847457627119e-06, |
| "loss": 1.424, |
| "step": 24320 |
| }, |
| { |
| "epoch": 0.61, |
| "grad_norm": 8.01065444946289, |
| "learning_rate": 3.844067796610169e-06, |
| "loss": 1.2842, |
| "step": 24330 |
| }, |
| { |
| "epoch": 0.61, |
| "grad_norm": 9.126721382141113, |
| "learning_rate": 3.837288135593221e-06, |
| "loss": 1.368, |
| "step": 24340 |
| }, |
| { |
| "epoch": 0.61, |
| "grad_norm": 11.590188026428223, |
| "learning_rate": 3.830508474576271e-06, |
| "loss": 1.375, |
| "step": 24350 |
| }, |
| { |
| "epoch": 0.61, |
| "grad_norm": 7.325139045715332, |
| "learning_rate": 3.823728813559323e-06, |
| "loss": 1.2593, |
| "step": 24360 |
| }, |
| { |
| "epoch": 0.61, |
| "grad_norm": 6.3924760818481445, |
| "learning_rate": 3.816949152542373e-06, |
| "loss": 1.3238, |
| "step": 24370 |
| }, |
| { |
| "epoch": 0.61, |
| "grad_norm": 5.093543529510498, |
| "learning_rate": 3.8101694915254238e-06, |
| "loss": 1.3398, |
| "step": 24380 |
| }, |
| { |
| "epoch": 0.61, |
| "grad_norm": 4.488302707672119, |
| "learning_rate": 3.8033898305084748e-06, |
| "loss": 1.2532, |
| "step": 24390 |
| }, |
| { |
| "epoch": 0.61, |
| "grad_norm": 3.5369062423706055, |
| "learning_rate": 3.7966101694915257e-06, |
| "loss": 1.1723, |
| "step": 24400 |
| }, |
| { |
| "epoch": 0.61, |
| "grad_norm": 3.2012510299682617, |
| "learning_rate": 3.7898305084745767e-06, |
| "loss": 1.3348, |
| "step": 24410 |
| }, |
| { |
| "epoch": 0.61, |
| "grad_norm": 1.5665017366409302, |
| "learning_rate": 3.7830508474576273e-06, |
| "loss": 1.4159, |
| "step": 24420 |
| }, |
| { |
| "epoch": 0.61, |
| "grad_norm": 12.912787437438965, |
| "learning_rate": 3.7762711864406782e-06, |
| "loss": 1.2406, |
| "step": 24430 |
| }, |
| { |
| "epoch": 0.61, |
| "grad_norm": 6.572142124176025, |
| "learning_rate": 3.7694915254237292e-06, |
| "loss": 1.1544, |
| "step": 24440 |
| }, |
| { |
| "epoch": 0.61, |
| "grad_norm": 4.999161720275879, |
| "learning_rate": 3.76271186440678e-06, |
| "loss": 1.1744, |
| "step": 24450 |
| }, |
| { |
| "epoch": 0.61, |
| "grad_norm": 3.322866439819336, |
| "learning_rate": 3.755932203389831e-06, |
| "loss": 1.3713, |
| "step": 24460 |
| }, |
| { |
| "epoch": 0.61, |
| "grad_norm": 5.197652816772461, |
| "learning_rate": 3.7491525423728813e-06, |
| "loss": 1.1858, |
| "step": 24470 |
| }, |
| { |
| "epoch": 0.61, |
| "grad_norm": 6.7361369132995605, |
| "learning_rate": 3.7423728813559323e-06, |
| "loss": 1.2778, |
| "step": 24480 |
| }, |
| { |
| "epoch": 0.61, |
| "grad_norm": 1.1276848316192627, |
| "learning_rate": 3.7355932203389833e-06, |
| "loss": 1.3971, |
| "step": 24490 |
| }, |
| { |
| "epoch": 0.61, |
| "grad_norm": 4.62593412399292, |
| "learning_rate": 3.7288135593220342e-06, |
| "loss": 1.479, |
| "step": 24500 |
| }, |
| { |
| "epoch": 0.61, |
| "eval_loss": 1.3212531805038452, |
| "eval_runtime": 66.1318, |
| "eval_samples_per_second": 15.121, |
| "eval_steps_per_second": 15.121, |
| "step": 24500 |
| }, |
| { |
| "epoch": 0.61, |
| "grad_norm": 7.978855133056641, |
| "learning_rate": 3.7220338983050852e-06, |
| "loss": 1.1919, |
| "step": 24510 |
| }, |
| { |
| "epoch": 0.61, |
| "grad_norm": 6.347212314605713, |
| "learning_rate": 3.715254237288136e-06, |
| "loss": 1.2493, |
| "step": 24520 |
| }, |
| { |
| "epoch": 0.61, |
| "grad_norm": 6.2206573486328125, |
| "learning_rate": 3.7084745762711867e-06, |
| "loss": 1.2258, |
| "step": 24530 |
| }, |
| { |
| "epoch": 0.61, |
| "grad_norm": 2.548797607421875, |
| "learning_rate": 3.7016949152542377e-06, |
| "loss": 1.262, |
| "step": 24540 |
| }, |
| { |
| "epoch": 0.61, |
| "grad_norm": 9.5992431640625, |
| "learning_rate": 3.6949152542372883e-06, |
| "loss": 1.4298, |
| "step": 24550 |
| }, |
| { |
| "epoch": 0.61, |
| "grad_norm": 2.236175298690796, |
| "learning_rate": 3.6881355932203393e-06, |
| "loss": 1.3466, |
| "step": 24560 |
| }, |
| { |
| "epoch": 0.61, |
| "grad_norm": 7.134004592895508, |
| "learning_rate": 3.6813559322033902e-06, |
| "loss": 1.2642, |
| "step": 24570 |
| }, |
| { |
| "epoch": 0.61, |
| "grad_norm": 12.453125, |
| "learning_rate": 3.6745762711864408e-06, |
| "loss": 1.369, |
| "step": 24580 |
| }, |
| { |
| "epoch": 0.61, |
| "grad_norm": 16.243106842041016, |
| "learning_rate": 3.6677966101694918e-06, |
| "loss": 1.3269, |
| "step": 24590 |
| }, |
| { |
| "epoch": 0.61, |
| "grad_norm": 11.406882286071777, |
| "learning_rate": 3.6610169491525427e-06, |
| "loss": 1.4339, |
| "step": 24600 |
| }, |
| { |
| "epoch": 0.62, |
| "grad_norm": 6.334946632385254, |
| "learning_rate": 3.6542372881355937e-06, |
| "loss": 1.2445, |
| "step": 24610 |
| }, |
| { |
| "epoch": 0.62, |
| "grad_norm": 9.451517105102539, |
| "learning_rate": 3.6474576271186447e-06, |
| "loss": 1.1902, |
| "step": 24620 |
| }, |
| { |
| "epoch": 0.62, |
| "grad_norm": 1.9071747064590454, |
| "learning_rate": 3.640677966101695e-06, |
| "loss": 1.3273, |
| "step": 24630 |
| }, |
| { |
| "epoch": 0.62, |
| "grad_norm": 4.054659843444824, |
| "learning_rate": 3.633898305084746e-06, |
| "loss": 1.2808, |
| "step": 24640 |
| }, |
| { |
| "epoch": 0.62, |
| "grad_norm": 6.314877986907959, |
| "learning_rate": 3.6271186440677968e-06, |
| "loss": 1.4351, |
| "step": 24650 |
| }, |
| { |
| "epoch": 0.62, |
| "grad_norm": 6.971933841705322, |
| "learning_rate": 3.6203389830508478e-06, |
| "loss": 1.2973, |
| "step": 24660 |
| }, |
| { |
| "epoch": 0.62, |
| "grad_norm": 2.7064402103424072, |
| "learning_rate": 3.6135593220338987e-06, |
| "loss": 1.4335, |
| "step": 24670 |
| }, |
| { |
| "epoch": 0.62, |
| "grad_norm": 5.698015213012695, |
| "learning_rate": 3.6067796610169493e-06, |
| "loss": 1.3042, |
| "step": 24680 |
| }, |
| { |
| "epoch": 0.62, |
| "grad_norm": 5.647088050842285, |
| "learning_rate": 3.6000000000000003e-06, |
| "loss": 1.3832, |
| "step": 24690 |
| }, |
| { |
| "epoch": 0.62, |
| "grad_norm": 10.945414543151855, |
| "learning_rate": 3.5932203389830512e-06, |
| "loss": 1.2363, |
| "step": 24700 |
| }, |
| { |
| "epoch": 0.62, |
| "grad_norm": 7.516660213470459, |
| "learning_rate": 3.5864406779661022e-06, |
| "loss": 1.2615, |
| "step": 24710 |
| }, |
| { |
| "epoch": 0.62, |
| "grad_norm": 3.6117703914642334, |
| "learning_rate": 3.579661016949153e-06, |
| "loss": 1.3533, |
| "step": 24720 |
| }, |
| { |
| "epoch": 0.62, |
| "grad_norm": 10.817008972167969, |
| "learning_rate": 3.5728813559322033e-06, |
| "loss": 1.3425, |
| "step": 24730 |
| }, |
| { |
| "epoch": 0.62, |
| "grad_norm": 7.7072858810424805, |
| "learning_rate": 3.5661016949152543e-06, |
| "loss": 1.2762, |
| "step": 24740 |
| }, |
| { |
| "epoch": 0.62, |
| "grad_norm": 5.9887495040893555, |
| "learning_rate": 3.5593220338983053e-06, |
| "loss": 1.3516, |
| "step": 24750 |
| }, |
| { |
| "epoch": 0.62, |
| "grad_norm": 3.4481067657470703, |
| "learning_rate": 3.5525423728813563e-06, |
| "loss": 1.0325, |
| "step": 24760 |
| }, |
| { |
| "epoch": 0.62, |
| "grad_norm": 5.2485551834106445, |
| "learning_rate": 3.5457627118644072e-06, |
| "loss": 1.4088, |
| "step": 24770 |
| }, |
| { |
| "epoch": 0.62, |
| "grad_norm": 6.970777988433838, |
| "learning_rate": 3.538983050847458e-06, |
| "loss": 1.4962, |
| "step": 24780 |
| }, |
| { |
| "epoch": 0.62, |
| "grad_norm": 6.037806034088135, |
| "learning_rate": 3.5322033898305088e-06, |
| "loss": 1.3806, |
| "step": 24790 |
| }, |
| { |
| "epoch": 0.62, |
| "grad_norm": 4.5726518630981445, |
| "learning_rate": 3.5254237288135597e-06, |
| "loss": 1.3086, |
| "step": 24800 |
| }, |
| { |
| "epoch": 0.62, |
| "grad_norm": 4.184850692749023, |
| "learning_rate": 3.5186440677966103e-06, |
| "loss": 1.4004, |
| "step": 24810 |
| }, |
| { |
| "epoch": 0.62, |
| "grad_norm": 9.181009292602539, |
| "learning_rate": 3.5118644067796613e-06, |
| "loss": 1.4348, |
| "step": 24820 |
| }, |
| { |
| "epoch": 0.62, |
| "grad_norm": 5.124319076538086, |
| "learning_rate": 3.5050847457627122e-06, |
| "loss": 1.2272, |
| "step": 24830 |
| }, |
| { |
| "epoch": 0.62, |
| "grad_norm": 7.30942440032959, |
| "learning_rate": 3.498305084745763e-06, |
| "loss": 1.3819, |
| "step": 24840 |
| }, |
| { |
| "epoch": 0.62, |
| "grad_norm": 7.727287769317627, |
| "learning_rate": 3.4915254237288138e-06, |
| "loss": 1.1956, |
| "step": 24850 |
| }, |
| { |
| "epoch": 0.62, |
| "grad_norm": 5.2934112548828125, |
| "learning_rate": 3.4847457627118648e-06, |
| "loss": 1.3141, |
| "step": 24860 |
| }, |
| { |
| "epoch": 0.62, |
| "grad_norm": 7.7370500564575195, |
| "learning_rate": 3.4779661016949157e-06, |
| "loss": 1.2418, |
| "step": 24870 |
| }, |
| { |
| "epoch": 0.62, |
| "grad_norm": 5.379338264465332, |
| "learning_rate": 3.4711864406779667e-06, |
| "loss": 1.435, |
| "step": 24880 |
| }, |
| { |
| "epoch": 0.62, |
| "grad_norm": 5.592279434204102, |
| "learning_rate": 3.464406779661017e-06, |
| "loss": 1.324, |
| "step": 24890 |
| }, |
| { |
| "epoch": 0.62, |
| "grad_norm": 4.178751468658447, |
| "learning_rate": 3.457627118644068e-06, |
| "loss": 1.4095, |
| "step": 24900 |
| }, |
| { |
| "epoch": 0.62, |
| "grad_norm": 2.7852957248687744, |
| "learning_rate": 3.450847457627119e-06, |
| "loss": 1.1834, |
| "step": 24910 |
| }, |
| { |
| "epoch": 0.62, |
| "grad_norm": 12.001542091369629, |
| "learning_rate": 3.4440677966101698e-06, |
| "loss": 1.3294, |
| "step": 24920 |
| }, |
| { |
| "epoch": 0.62, |
| "grad_norm": 4.010140895843506, |
| "learning_rate": 3.4372881355932207e-06, |
| "loss": 1.4615, |
| "step": 24930 |
| }, |
| { |
| "epoch": 0.62, |
| "grad_norm": 2.7130627632141113, |
| "learning_rate": 3.4305084745762713e-06, |
| "loss": 1.4789, |
| "step": 24940 |
| }, |
| { |
| "epoch": 0.62, |
| "grad_norm": 5.209987640380859, |
| "learning_rate": 3.4237288135593223e-06, |
| "loss": 1.2389, |
| "step": 24950 |
| }, |
| { |
| "epoch": 0.62, |
| "grad_norm": 4.1047515869140625, |
| "learning_rate": 3.4169491525423733e-06, |
| "loss": 1.3449, |
| "step": 24960 |
| }, |
| { |
| "epoch": 0.62, |
| "grad_norm": 19.238649368286133, |
| "learning_rate": 3.4101694915254242e-06, |
| "loss": 1.3224, |
| "step": 24970 |
| }, |
| { |
| "epoch": 0.62, |
| "grad_norm": 11.792010307312012, |
| "learning_rate": 3.403389830508475e-06, |
| "loss": 1.1061, |
| "step": 24980 |
| }, |
| { |
| "epoch": 0.62, |
| "grad_norm": 10.934020042419434, |
| "learning_rate": 3.3966101694915253e-06, |
| "loss": 1.4179, |
| "step": 24990 |
| }, |
| { |
| "epoch": 0.62, |
| "grad_norm": 1.5776662826538086, |
| "learning_rate": 3.3898305084745763e-06, |
| "loss": 1.3254, |
| "step": 25000 |
| }, |
| { |
| "epoch": 0.62, |
| "eval_loss": 1.3409814834594727, |
| "eval_runtime": 66.1173, |
| "eval_samples_per_second": 15.125, |
| "eval_steps_per_second": 15.125, |
| "step": 25000 |
| }, |
| { |
| "epoch": 0.63, |
| "grad_norm": 7.775777339935303, |
| "learning_rate": 3.3830508474576273e-06, |
| "loss": 1.5704, |
| "step": 25010 |
| }, |
| { |
| "epoch": 0.63, |
| "grad_norm": 6.8910698890686035, |
| "learning_rate": 3.3762711864406783e-06, |
| "loss": 1.2296, |
| "step": 25020 |
| }, |
| { |
| "epoch": 0.63, |
| "grad_norm": 3.11647367477417, |
| "learning_rate": 3.3694915254237292e-06, |
| "loss": 1.1465, |
| "step": 25030 |
| }, |
| { |
| "epoch": 0.63, |
| "grad_norm": 4.026238441467285, |
| "learning_rate": 3.3627118644067802e-06, |
| "loss": 1.4266, |
| "step": 25040 |
| }, |
| { |
| "epoch": 0.63, |
| "grad_norm": 3.3097872734069824, |
| "learning_rate": 3.3559322033898308e-06, |
| "loss": 1.4, |
| "step": 25050 |
| }, |
| { |
| "epoch": 0.63, |
| "grad_norm": 3.2518560886383057, |
| "learning_rate": 3.3491525423728817e-06, |
| "loss": 1.3535, |
| "step": 25060 |
| }, |
| { |
| "epoch": 0.63, |
| "grad_norm": 4.915974140167236, |
| "learning_rate": 3.3423728813559327e-06, |
| "loss": 1.3441, |
| "step": 25070 |
| }, |
| { |
| "epoch": 0.63, |
| "grad_norm": 4.099907398223877, |
| "learning_rate": 3.3355932203389833e-06, |
| "loss": 1.4649, |
| "step": 25080 |
| }, |
| { |
| "epoch": 0.63, |
| "grad_norm": 7.00651741027832, |
| "learning_rate": 3.3288135593220343e-06, |
| "loss": 1.4445, |
| "step": 25090 |
| }, |
| { |
| "epoch": 0.63, |
| "grad_norm": 2.7813143730163574, |
| "learning_rate": 3.322033898305085e-06, |
| "loss": 1.2997, |
| "step": 25100 |
| }, |
| { |
| "epoch": 0.63, |
| "grad_norm": 4.583659648895264, |
| "learning_rate": 3.3152542372881358e-06, |
| "loss": 1.3112, |
| "step": 25110 |
| }, |
| { |
| "epoch": 0.63, |
| "grad_norm": 9.5046968460083, |
| "learning_rate": 3.3084745762711868e-06, |
| "loss": 1.3488, |
| "step": 25120 |
| }, |
| { |
| "epoch": 0.63, |
| "grad_norm": 4.829354286193848, |
| "learning_rate": 3.3016949152542377e-06, |
| "loss": 1.2801, |
| "step": 25130 |
| }, |
| { |
| "epoch": 0.63, |
| "grad_norm": 8.664487838745117, |
| "learning_rate": 3.2949152542372887e-06, |
| "loss": 1.3001, |
| "step": 25140 |
| }, |
| { |
| "epoch": 0.63, |
| "grad_norm": 1.5184565782546997, |
| "learning_rate": 3.288135593220339e-06, |
| "loss": 1.3178, |
| "step": 25150 |
| }, |
| { |
| "epoch": 0.63, |
| "grad_norm": 2.527219295501709, |
| "learning_rate": 3.28135593220339e-06, |
| "loss": 1.1742, |
| "step": 25160 |
| }, |
| { |
| "epoch": 0.63, |
| "grad_norm": 1.9469351768493652, |
| "learning_rate": 3.274576271186441e-06, |
| "loss": 1.3338, |
| "step": 25170 |
| }, |
| { |
| "epoch": 0.63, |
| "grad_norm": 9.183582305908203, |
| "learning_rate": 3.2677966101694918e-06, |
| "loss": 1.2689, |
| "step": 25180 |
| }, |
| { |
| "epoch": 0.63, |
| "grad_norm": 8.95080280303955, |
| "learning_rate": 3.2610169491525428e-06, |
| "loss": 1.4399, |
| "step": 25190 |
| }, |
| { |
| "epoch": 0.63, |
| "grad_norm": 3.5885729789733887, |
| "learning_rate": 3.2542372881355933e-06, |
| "loss": 1.2411, |
| "step": 25200 |
| }, |
| { |
| "epoch": 0.63, |
| "grad_norm": 6.78897762298584, |
| "learning_rate": 3.2474576271186443e-06, |
| "loss": 1.1516, |
| "step": 25210 |
| }, |
| { |
| "epoch": 0.63, |
| "grad_norm": 16.358980178833008, |
| "learning_rate": 3.2406779661016953e-06, |
| "loss": 1.3401, |
| "step": 25220 |
| }, |
| { |
| "epoch": 0.63, |
| "grad_norm": 13.469403266906738, |
| "learning_rate": 3.2338983050847462e-06, |
| "loss": 1.1778, |
| "step": 25230 |
| }, |
| { |
| "epoch": 0.63, |
| "grad_norm": 9.235873222351074, |
| "learning_rate": 3.2271186440677972e-06, |
| "loss": 1.463, |
| "step": 25240 |
| }, |
| { |
| "epoch": 0.63, |
| "grad_norm": 5.634754657745361, |
| "learning_rate": 3.2203389830508473e-06, |
| "loss": 1.4592, |
| "step": 25250 |
| }, |
| { |
| "epoch": 0.63, |
| "grad_norm": 7.916177749633789, |
| "learning_rate": 3.2135593220338983e-06, |
| "loss": 1.2284, |
| "step": 25260 |
| }, |
| { |
| "epoch": 0.63, |
| "grad_norm": 3.810818672180176, |
| "learning_rate": 3.2067796610169493e-06, |
| "loss": 1.3948, |
| "step": 25270 |
| }, |
| { |
| "epoch": 0.63, |
| "grad_norm": 3.151276111602783, |
| "learning_rate": 3.2000000000000003e-06, |
| "loss": 1.3252, |
| "step": 25280 |
| }, |
| { |
| "epoch": 0.63, |
| "grad_norm": 4.654315948486328, |
| "learning_rate": 3.1932203389830513e-06, |
| "loss": 1.3577, |
| "step": 25290 |
| }, |
| { |
| "epoch": 0.63, |
| "grad_norm": 7.808940410614014, |
| "learning_rate": 3.186440677966102e-06, |
| "loss": 1.419, |
| "step": 25300 |
| }, |
| { |
| "epoch": 0.63, |
| "grad_norm": 6.512801647186279, |
| "learning_rate": 3.1796610169491528e-06, |
| "loss": 1.2751, |
| "step": 25310 |
| }, |
| { |
| "epoch": 0.63, |
| "grad_norm": 8.710541725158691, |
| "learning_rate": 3.1728813559322038e-06, |
| "loss": 1.2923, |
| "step": 25320 |
| }, |
| { |
| "epoch": 0.63, |
| "grad_norm": 3.160752296447754, |
| "learning_rate": 3.1661016949152547e-06, |
| "loss": 1.3357, |
| "step": 25330 |
| }, |
| { |
| "epoch": 0.63, |
| "grad_norm": 2.4420571327209473, |
| "learning_rate": 3.1593220338983053e-06, |
| "loss": 1.4122, |
| "step": 25340 |
| }, |
| { |
| "epoch": 0.63, |
| "grad_norm": 12.090134620666504, |
| "learning_rate": 3.1525423728813563e-06, |
| "loss": 1.1703, |
| "step": 25350 |
| }, |
| { |
| "epoch": 0.63, |
| "grad_norm": 5.109529972076416, |
| "learning_rate": 3.145762711864407e-06, |
| "loss": 1.4391, |
| "step": 25360 |
| }, |
| { |
| "epoch": 0.63, |
| "grad_norm": 3.0033655166625977, |
| "learning_rate": 3.138983050847458e-06, |
| "loss": 1.2308, |
| "step": 25370 |
| }, |
| { |
| "epoch": 0.63, |
| "grad_norm": 6.253593921661377, |
| "learning_rate": 3.1322033898305088e-06, |
| "loss": 1.1566, |
| "step": 25380 |
| }, |
| { |
| "epoch": 0.63, |
| "grad_norm": 5.59571647644043, |
| "learning_rate": 3.1254237288135598e-06, |
| "loss": 1.3234, |
| "step": 25390 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 15.45341968536377, |
| "learning_rate": 3.1186440677966107e-06, |
| "loss": 1.3298, |
| "step": 25400 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 4.012663841247559, |
| "learning_rate": 3.111864406779661e-06, |
| "loss": 1.3515, |
| "step": 25410 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 11.210050582885742, |
| "learning_rate": 3.105084745762712e-06, |
| "loss": 1.5219, |
| "step": 25420 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 12.784728050231934, |
| "learning_rate": 3.098305084745763e-06, |
| "loss": 1.1988, |
| "step": 25430 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 4.20169734954834, |
| "learning_rate": 3.091525423728814e-06, |
| "loss": 1.421, |
| "step": 25440 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 6.456515312194824, |
| "learning_rate": 3.0847457627118648e-06, |
| "loss": 1.1633, |
| "step": 25450 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 6.611431121826172, |
| "learning_rate": 3.0779661016949153e-06, |
| "loss": 1.2853, |
| "step": 25460 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 4.3997015953063965, |
| "learning_rate": 3.0711864406779663e-06, |
| "loss": 1.2609, |
| "step": 25470 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 12.016313552856445, |
| "learning_rate": 3.0644067796610173e-06, |
| "loss": 1.1744, |
| "step": 25480 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 5.508640289306641, |
| "learning_rate": 3.0576271186440683e-06, |
| "loss": 1.2545, |
| "step": 25490 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 7.7658305168151855, |
| "learning_rate": 3.0508474576271192e-06, |
| "loss": 1.132, |
| "step": 25500 |
| }, |
| { |
| "epoch": 0.64, |
| "eval_loss": 1.3264555931091309, |
| "eval_runtime": 66.1247, |
| "eval_samples_per_second": 15.123, |
| "eval_steps_per_second": 15.123, |
| "step": 25500 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 4.297203063964844, |
| "learning_rate": 3.0440677966101694e-06, |
| "loss": 1.0663, |
| "step": 25510 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 12.412454605102539, |
| "learning_rate": 3.0372881355932203e-06, |
| "loss": 1.1782, |
| "step": 25520 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 10.640459060668945, |
| "learning_rate": 3.0305084745762713e-06, |
| "loss": 1.4982, |
| "step": 25530 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 12.518424987792969, |
| "learning_rate": 3.0237288135593223e-06, |
| "loss": 1.4771, |
| "step": 25540 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 7.445399284362793, |
| "learning_rate": 3.0169491525423733e-06, |
| "loss": 1.2959, |
| "step": 25550 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 3.960470676422119, |
| "learning_rate": 3.010169491525424e-06, |
| "loss": 1.4464, |
| "step": 25560 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 11.259561538696289, |
| "learning_rate": 3.003389830508475e-06, |
| "loss": 1.2178, |
| "step": 25570 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 7.605580806732178, |
| "learning_rate": 2.9966101694915258e-06, |
| "loss": 1.3286, |
| "step": 25580 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 4.82462215423584, |
| "learning_rate": 2.9898305084745768e-06, |
| "loss": 1.4727, |
| "step": 25590 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 4.511000633239746, |
| "learning_rate": 2.9830508474576277e-06, |
| "loss": 1.3572, |
| "step": 25600 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 6.090604305267334, |
| "learning_rate": 2.9762711864406783e-06, |
| "loss": 1.4802, |
| "step": 25610 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 9.687544822692871, |
| "learning_rate": 2.969491525423729e-06, |
| "loss": 1.3652, |
| "step": 25620 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 12.760902404785156, |
| "learning_rate": 2.96271186440678e-06, |
| "loss": 1.3634, |
| "step": 25630 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 6.688235759735107, |
| "learning_rate": 2.955932203389831e-06, |
| "loss": 1.209, |
| "step": 25640 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 5.238013744354248, |
| "learning_rate": 2.9491525423728818e-06, |
| "loss": 1.5462, |
| "step": 25650 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 18.444650650024414, |
| "learning_rate": 2.9423728813559327e-06, |
| "loss": 1.2963, |
| "step": 25660 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 5.625192642211914, |
| "learning_rate": 2.935593220338983e-06, |
| "loss": 1.3514, |
| "step": 25670 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 5.223139762878418, |
| "learning_rate": 2.928813559322034e-06, |
| "loss": 1.2339, |
| "step": 25680 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 4.5006422996521, |
| "learning_rate": 2.922033898305085e-06, |
| "loss": 1.4424, |
| "step": 25690 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 9.127293586730957, |
| "learning_rate": 2.915254237288136e-06, |
| "loss": 1.2334, |
| "step": 25700 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 5.78049373626709, |
| "learning_rate": 2.9084745762711868e-06, |
| "loss": 1.301, |
| "step": 25710 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 5.395204544067383, |
| "learning_rate": 2.9016949152542373e-06, |
| "loss": 1.1889, |
| "step": 25720 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 2.3502519130706787, |
| "learning_rate": 2.8949152542372883e-06, |
| "loss": 1.41, |
| "step": 25730 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 3.3766727447509766, |
| "learning_rate": 2.8881355932203393e-06, |
| "loss": 1.2768, |
| "step": 25740 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 6.30269718170166, |
| "learning_rate": 2.8813559322033903e-06, |
| "loss": 1.3432, |
| "step": 25750 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 7.727334022521973, |
| "learning_rate": 2.8745762711864412e-06, |
| "loss": 1.3465, |
| "step": 25760 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 8.983928680419922, |
| "learning_rate": 2.8677966101694914e-06, |
| "loss": 1.1202, |
| "step": 25770 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 4.910791397094727, |
| "learning_rate": 2.8610169491525424e-06, |
| "loss": 1.2038, |
| "step": 25780 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 12.079176902770996, |
| "learning_rate": 2.8542372881355933e-06, |
| "loss": 1.1599, |
| "step": 25790 |
| }, |
| { |
| "epoch": 0.65, |
| "grad_norm": 3.6942214965820312, |
| "learning_rate": 2.8474576271186443e-06, |
| "loss": 1.2242, |
| "step": 25800 |
| }, |
| { |
| "epoch": 0.65, |
| "grad_norm": 2.4002459049224854, |
| "learning_rate": 2.8406779661016953e-06, |
| "loss": 1.2288, |
| "step": 25810 |
| }, |
| { |
| "epoch": 0.65, |
| "grad_norm": 4.8168840408325195, |
| "learning_rate": 2.833898305084746e-06, |
| "loss": 1.4489, |
| "step": 25820 |
| }, |
| { |
| "epoch": 0.65, |
| "grad_norm": 10.90778636932373, |
| "learning_rate": 2.827118644067797e-06, |
| "loss": 1.4108, |
| "step": 25830 |
| }, |
| { |
| "epoch": 0.65, |
| "grad_norm": 9.852614402770996, |
| "learning_rate": 2.820338983050848e-06, |
| "loss": 1.3685, |
| "step": 25840 |
| }, |
| { |
| "epoch": 0.65, |
| "grad_norm": 8.391303062438965, |
| "learning_rate": 2.8135593220338988e-06, |
| "loss": 1.2921, |
| "step": 25850 |
| }, |
| { |
| "epoch": 0.65, |
| "grad_norm": 5.348249912261963, |
| "learning_rate": 2.8067796610169497e-06, |
| "loss": 1.3463, |
| "step": 25860 |
| }, |
| { |
| "epoch": 0.65, |
| "grad_norm": 3.3569209575653076, |
| "learning_rate": 2.8000000000000003e-06, |
| "loss": 1.2877, |
| "step": 25870 |
| }, |
| { |
| "epoch": 0.65, |
| "grad_norm": 9.246468544006348, |
| "learning_rate": 2.793220338983051e-06, |
| "loss": 1.3478, |
| "step": 25880 |
| }, |
| { |
| "epoch": 0.65, |
| "grad_norm": 7.3121771812438965, |
| "learning_rate": 2.786440677966102e-06, |
| "loss": 1.3116, |
| "step": 25890 |
| }, |
| { |
| "epoch": 0.65, |
| "grad_norm": 7.324717044830322, |
| "learning_rate": 2.779661016949153e-06, |
| "loss": 1.4986, |
| "step": 25900 |
| }, |
| { |
| "epoch": 0.65, |
| "grad_norm": 6.7373738288879395, |
| "learning_rate": 2.7728813559322038e-06, |
| "loss": 1.5417, |
| "step": 25910 |
| }, |
| { |
| "epoch": 0.65, |
| "grad_norm": 10.05711555480957, |
| "learning_rate": 2.7661016949152548e-06, |
| "loss": 1.3439, |
| "step": 25920 |
| }, |
| { |
| "epoch": 0.65, |
| "grad_norm": 3.843493700027466, |
| "learning_rate": 2.7593220338983053e-06, |
| "loss": 1.1626, |
| "step": 25930 |
| }, |
| { |
| "epoch": 0.65, |
| "grad_norm": 9.513094902038574, |
| "learning_rate": 2.752542372881356e-06, |
| "loss": 1.3412, |
| "step": 25940 |
| }, |
| { |
| "epoch": 0.65, |
| "grad_norm": 8.064391136169434, |
| "learning_rate": 2.745762711864407e-06, |
| "loss": 1.3483, |
| "step": 25950 |
| }, |
| { |
| "epoch": 0.65, |
| "grad_norm": 10.491897583007812, |
| "learning_rate": 2.738983050847458e-06, |
| "loss": 1.3214, |
| "step": 25960 |
| }, |
| { |
| "epoch": 0.65, |
| "grad_norm": 3.9402971267700195, |
| "learning_rate": 2.732203389830509e-06, |
| "loss": 1.3803, |
| "step": 25970 |
| }, |
| { |
| "epoch": 0.65, |
| "grad_norm": 9.979386329650879, |
| "learning_rate": 2.7254237288135593e-06, |
| "loss": 1.1408, |
| "step": 25980 |
| }, |
| { |
| "epoch": 0.65, |
| "grad_norm": 7.461246490478516, |
| "learning_rate": 2.7186440677966103e-06, |
| "loss": 1.3098, |
| "step": 25990 |
| }, |
| { |
| "epoch": 0.65, |
| "grad_norm": 4.412223815917969, |
| "learning_rate": 2.7118644067796613e-06, |
| "loss": 1.457, |
| "step": 26000 |
| }, |
| { |
| "epoch": 0.65, |
| "eval_loss": 1.3418877124786377, |
| "eval_runtime": 66.1458, |
| "eval_samples_per_second": 15.118, |
| "eval_steps_per_second": 15.118, |
| "step": 26000 |
| }, |
| { |
| "epoch": 0.65, |
| "grad_norm": 4.992754936218262, |
| "learning_rate": 2.7050847457627123e-06, |
| "loss": 1.1937, |
| "step": 26010 |
| }, |
| { |
| "epoch": 0.65, |
| "grad_norm": 5.497439861297607, |
| "learning_rate": 2.6983050847457633e-06, |
| "loss": 1.3435, |
| "step": 26020 |
| }, |
| { |
| "epoch": 0.65, |
| "grad_norm": 4.283881664276123, |
| "learning_rate": 2.6915254237288134e-06, |
| "loss": 1.3997, |
| "step": 26030 |
| }, |
| { |
| "epoch": 0.65, |
| "grad_norm": 6.579436779022217, |
| "learning_rate": 2.6847457627118644e-06, |
| "loss": 1.4047, |
| "step": 26040 |
| }, |
| { |
| "epoch": 0.65, |
| "grad_norm": 4.805014133453369, |
| "learning_rate": 2.6779661016949153e-06, |
| "loss": 1.4021, |
| "step": 26050 |
| }, |
| { |
| "epoch": 0.65, |
| "grad_norm": 4.568453788757324, |
| "learning_rate": 2.6711864406779663e-06, |
| "loss": 1.4761, |
| "step": 26060 |
| }, |
| { |
| "epoch": 0.65, |
| "grad_norm": 5.559253692626953, |
| "learning_rate": 2.6644067796610173e-06, |
| "loss": 1.2958, |
| "step": 26070 |
| }, |
| { |
| "epoch": 0.65, |
| "grad_norm": 14.279343605041504, |
| "learning_rate": 2.657627118644068e-06, |
| "loss": 1.194, |
| "step": 26080 |
| }, |
| { |
| "epoch": 0.65, |
| "grad_norm": 15.45864486694336, |
| "learning_rate": 2.650847457627119e-06, |
| "loss": 1.2134, |
| "step": 26090 |
| }, |
| { |
| "epoch": 0.65, |
| "grad_norm": 17.310068130493164, |
| "learning_rate": 2.64406779661017e-06, |
| "loss": 1.2946, |
| "step": 26100 |
| }, |
| { |
| "epoch": 0.65, |
| "grad_norm": 4.31659460067749, |
| "learning_rate": 2.6372881355932208e-06, |
| "loss": 1.3783, |
| "step": 26110 |
| }, |
| { |
| "epoch": 0.65, |
| "grad_norm": 14.684484481811523, |
| "learning_rate": 2.6305084745762718e-06, |
| "loss": 1.3414, |
| "step": 26120 |
| }, |
| { |
| "epoch": 0.65, |
| "grad_norm": 5.934599876403809, |
| "learning_rate": 2.6237288135593223e-06, |
| "loss": 1.3266, |
| "step": 26130 |
| }, |
| { |
| "epoch": 0.65, |
| "grad_norm": 7.535737037658691, |
| "learning_rate": 2.616949152542373e-06, |
| "loss": 1.2579, |
| "step": 26140 |
| }, |
| { |
| "epoch": 0.65, |
| "grad_norm": 5.8215556144714355, |
| "learning_rate": 2.610169491525424e-06, |
| "loss": 1.304, |
| "step": 26150 |
| }, |
| { |
| "epoch": 0.65, |
| "grad_norm": 5.646538734436035, |
| "learning_rate": 2.603389830508475e-06, |
| "loss": 1.2792, |
| "step": 26160 |
| }, |
| { |
| "epoch": 0.65, |
| "grad_norm": 3.834282398223877, |
| "learning_rate": 2.596610169491526e-06, |
| "loss": 1.4717, |
| "step": 26170 |
| }, |
| { |
| "epoch": 0.65, |
| "grad_norm": 3.313835620880127, |
| "learning_rate": 2.5898305084745768e-06, |
| "loss": 1.3312, |
| "step": 26180 |
| }, |
| { |
| "epoch": 0.65, |
| "grad_norm": 20.453479766845703, |
| "learning_rate": 2.5830508474576273e-06, |
| "loss": 1.3784, |
| "step": 26190 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 1.0731091499328613, |
| "learning_rate": 2.576271186440678e-06, |
| "loss": 1.4162, |
| "step": 26200 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 7.5899577140808105, |
| "learning_rate": 2.569491525423729e-06, |
| "loss": 1.2536, |
| "step": 26210 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 4.527303218841553, |
| "learning_rate": 2.56271186440678e-06, |
| "loss": 1.5048, |
| "step": 26220 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 3.702897787094116, |
| "learning_rate": 2.555932203389831e-06, |
| "loss": 1.1666, |
| "step": 26230 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 3.448979139328003, |
| "learning_rate": 2.5491525423728814e-06, |
| "loss": 1.2812, |
| "step": 26240 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 15.188081741333008, |
| "learning_rate": 2.5423728813559323e-06, |
| "loss": 1.3521, |
| "step": 26250 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 8.675294876098633, |
| "learning_rate": 2.5355932203389833e-06, |
| "loss": 1.4845, |
| "step": 26260 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 5.673226356506348, |
| "learning_rate": 2.5288135593220343e-06, |
| "loss": 1.2264, |
| "step": 26270 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 6.015854358673096, |
| "learning_rate": 2.5220338983050853e-06, |
| "loss": 1.3036, |
| "step": 26280 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 12.492931365966797, |
| "learning_rate": 2.5152542372881354e-06, |
| "loss": 1.4434, |
| "step": 26290 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 5.928922176361084, |
| "learning_rate": 2.5084745762711864e-06, |
| "loss": 1.3118, |
| "step": 26300 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 5.230995178222656, |
| "learning_rate": 2.5016949152542374e-06, |
| "loss": 1.3524, |
| "step": 26310 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 6.7296671867370605, |
| "learning_rate": 2.4949152542372883e-06, |
| "loss": 1.3651, |
| "step": 26320 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 2.378596544265747, |
| "learning_rate": 2.488135593220339e-06, |
| "loss": 1.4236, |
| "step": 26330 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 5.858238697052002, |
| "learning_rate": 2.48135593220339e-06, |
| "loss": 1.1553, |
| "step": 26340 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 3.7590560913085938, |
| "learning_rate": 2.474576271186441e-06, |
| "loss": 1.4302, |
| "step": 26350 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 10.852778434753418, |
| "learning_rate": 2.467796610169492e-06, |
| "loss": 1.1943, |
| "step": 26360 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 11.347558975219727, |
| "learning_rate": 2.461016949152543e-06, |
| "loss": 1.2559, |
| "step": 26370 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 6.081493854522705, |
| "learning_rate": 2.4542372881355933e-06, |
| "loss": 1.3532, |
| "step": 26380 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 8.263628959655762, |
| "learning_rate": 2.4474576271186443e-06, |
| "loss": 1.2041, |
| "step": 26390 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 7.144092082977295, |
| "learning_rate": 2.4406779661016953e-06, |
| "loss": 1.2193, |
| "step": 26400 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 6.578696250915527, |
| "learning_rate": 2.433898305084746e-06, |
| "loss": 1.3139, |
| "step": 26410 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 3.019033193588257, |
| "learning_rate": 2.427118644067797e-06, |
| "loss": 1.4642, |
| "step": 26420 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 5.443304538726807, |
| "learning_rate": 2.4203389830508474e-06, |
| "loss": 1.3738, |
| "step": 26430 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 10.051207542419434, |
| "learning_rate": 2.4135593220338984e-06, |
| "loss": 1.4396, |
| "step": 26440 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 10.604415893554688, |
| "learning_rate": 2.4067796610169493e-06, |
| "loss": 1.2971, |
| "step": 26450 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 9.754619598388672, |
| "learning_rate": 2.4000000000000003e-06, |
| "loss": 1.3421, |
| "step": 26460 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 6.159849166870117, |
| "learning_rate": 2.393220338983051e-06, |
| "loss": 1.3461, |
| "step": 26470 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 1.3210265636444092, |
| "learning_rate": 2.386440677966102e-06, |
| "loss": 1.2615, |
| "step": 26480 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 5.67736291885376, |
| "learning_rate": 2.379661016949153e-06, |
| "loss": 1.3296, |
| "step": 26490 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 4.055379867553711, |
| "learning_rate": 2.372881355932204e-06, |
| "loss": 1.4705, |
| "step": 26500 |
| }, |
| { |
| "epoch": 0.66, |
| "eval_loss": 1.2714667320251465, |
| "eval_runtime": 66.1328, |
| "eval_samples_per_second": 15.121, |
| "eval_steps_per_second": 15.121, |
| "step": 26500 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 7.19535493850708, |
| "learning_rate": 2.3661016949152544e-06, |
| "loss": 1.2956, |
| "step": 26510 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 7.952252388000488, |
| "learning_rate": 2.3593220338983053e-06, |
| "loss": 1.2952, |
| "step": 26520 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 1.3878239393234253, |
| "learning_rate": 2.3525423728813563e-06, |
| "loss": 1.3957, |
| "step": 26530 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 9.453435897827148, |
| "learning_rate": 2.345762711864407e-06, |
| "loss": 1.3314, |
| "step": 26540 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 2.3859434127807617, |
| "learning_rate": 2.338983050847458e-06, |
| "loss": 1.3959, |
| "step": 26550 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 4.052861213684082, |
| "learning_rate": 2.3322033898305084e-06, |
| "loss": 1.3097, |
| "step": 26560 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 14.907791137695312, |
| "learning_rate": 2.3254237288135594e-06, |
| "loss": 1.3706, |
| "step": 26570 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 7.020768642425537, |
| "learning_rate": 2.3186440677966103e-06, |
| "loss": 1.3536, |
| "step": 26580 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 15.225879669189453, |
| "learning_rate": 2.3118644067796613e-06, |
| "loss": 1.354, |
| "step": 26590 |
| }, |
| { |
| "epoch": 0.67, |
| "grad_norm": 6.249582767486572, |
| "learning_rate": 2.305084745762712e-06, |
| "loss": 1.2721, |
| "step": 26600 |
| }, |
| { |
| "epoch": 0.67, |
| "grad_norm": 4.756400108337402, |
| "learning_rate": 2.298305084745763e-06, |
| "loss": 1.3044, |
| "step": 26610 |
| }, |
| { |
| "epoch": 0.67, |
| "grad_norm": 9.848575592041016, |
| "learning_rate": 2.291525423728814e-06, |
| "loss": 1.2471, |
| "step": 26620 |
| }, |
| { |
| "epoch": 0.67, |
| "grad_norm": 9.997851371765137, |
| "learning_rate": 2.284745762711865e-06, |
| "loss": 1.1673, |
| "step": 26630 |
| }, |
| { |
| "epoch": 0.67, |
| "grad_norm": 11.57490062713623, |
| "learning_rate": 2.2779661016949154e-06, |
| "loss": 1.3241, |
| "step": 26640 |
| }, |
| { |
| "epoch": 0.67, |
| "grad_norm": 8.282011032104492, |
| "learning_rate": 2.2711864406779663e-06, |
| "loss": 1.2821, |
| "step": 26650 |
| }, |
| { |
| "epoch": 0.67, |
| "grad_norm": 6.8680644035339355, |
| "learning_rate": 2.2644067796610173e-06, |
| "loss": 1.2355, |
| "step": 26660 |
| }, |
| { |
| "epoch": 0.67, |
| "grad_norm": 15.327707290649414, |
| "learning_rate": 2.257627118644068e-06, |
| "loss": 1.2088, |
| "step": 26670 |
| }, |
| { |
| "epoch": 0.67, |
| "grad_norm": 6.978293418884277, |
| "learning_rate": 2.250847457627119e-06, |
| "loss": 1.3372, |
| "step": 26680 |
| }, |
| { |
| "epoch": 0.67, |
| "grad_norm": 10.799981117248535, |
| "learning_rate": 2.2440677966101694e-06, |
| "loss": 1.4839, |
| "step": 26690 |
| }, |
| { |
| "epoch": 0.67, |
| "grad_norm": 4.861376762390137, |
| "learning_rate": 2.2372881355932204e-06, |
| "loss": 1.3361, |
| "step": 26700 |
| }, |
| { |
| "epoch": 0.67, |
| "grad_norm": 5.07356595993042, |
| "learning_rate": 2.2305084745762714e-06, |
| "loss": 1.1581, |
| "step": 26710 |
| }, |
| { |
| "epoch": 0.67, |
| "grad_norm": 16.509151458740234, |
| "learning_rate": 2.2237288135593223e-06, |
| "loss": 1.3014, |
| "step": 26720 |
| }, |
| { |
| "epoch": 0.67, |
| "grad_norm": 8.232512474060059, |
| "learning_rate": 2.216949152542373e-06, |
| "loss": 1.4513, |
| "step": 26730 |
| }, |
| { |
| "epoch": 0.67, |
| "grad_norm": 4.673596382141113, |
| "learning_rate": 2.210169491525424e-06, |
| "loss": 1.3162, |
| "step": 26740 |
| }, |
| { |
| "epoch": 0.67, |
| "grad_norm": 3.565079927444458, |
| "learning_rate": 2.203389830508475e-06, |
| "loss": 1.3423, |
| "step": 26750 |
| }, |
| { |
| "epoch": 0.67, |
| "grad_norm": 8.415205001831055, |
| "learning_rate": 2.196610169491526e-06, |
| "loss": 1.3341, |
| "step": 26760 |
| }, |
| { |
| "epoch": 0.67, |
| "grad_norm": 13.892396926879883, |
| "learning_rate": 2.1898305084745764e-06, |
| "loss": 1.3268, |
| "step": 26770 |
| }, |
| { |
| "epoch": 0.67, |
| "grad_norm": 12.167963981628418, |
| "learning_rate": 2.1830508474576273e-06, |
| "loss": 1.3763, |
| "step": 26780 |
| }, |
| { |
| "epoch": 0.67, |
| "grad_norm": 9.574051856994629, |
| "learning_rate": 2.1762711864406783e-06, |
| "loss": 1.5376, |
| "step": 26790 |
| }, |
| { |
| "epoch": 0.67, |
| "grad_norm": 12.77506160736084, |
| "learning_rate": 2.169491525423729e-06, |
| "loss": 1.2457, |
| "step": 26800 |
| }, |
| { |
| "epoch": 0.67, |
| "grad_norm": 7.312320232391357, |
| "learning_rate": 2.16271186440678e-06, |
| "loss": 1.3077, |
| "step": 26810 |
| }, |
| { |
| "epoch": 0.67, |
| "grad_norm": 10.53618335723877, |
| "learning_rate": 2.1559322033898304e-06, |
| "loss": 1.3706, |
| "step": 26820 |
| }, |
| { |
| "epoch": 0.67, |
| "grad_norm": 4.399540424346924, |
| "learning_rate": 2.1491525423728814e-06, |
| "loss": 1.3237, |
| "step": 26830 |
| }, |
| { |
| "epoch": 0.67, |
| "grad_norm": 8.757078170776367, |
| "learning_rate": 2.1423728813559324e-06, |
| "loss": 1.2385, |
| "step": 26840 |
| }, |
| { |
| "epoch": 0.67, |
| "grad_norm": 3.866237163543701, |
| "learning_rate": 2.1355932203389833e-06, |
| "loss": 1.4038, |
| "step": 26850 |
| }, |
| { |
| "epoch": 0.67, |
| "grad_norm": 4.188089370727539, |
| "learning_rate": 2.128813559322034e-06, |
| "loss": 1.5465, |
| "step": 26860 |
| }, |
| { |
| "epoch": 0.67, |
| "grad_norm": 10.617339134216309, |
| "learning_rate": 2.122033898305085e-06, |
| "loss": 1.1873, |
| "step": 26870 |
| }, |
| { |
| "epoch": 0.67, |
| "grad_norm": 3.4754912853240967, |
| "learning_rate": 2.115254237288136e-06, |
| "loss": 1.2539, |
| "step": 26880 |
| }, |
| { |
| "epoch": 0.67, |
| "grad_norm": 6.057491302490234, |
| "learning_rate": 2.108474576271187e-06, |
| "loss": 1.2034, |
| "step": 26890 |
| }, |
| { |
| "epoch": 0.67, |
| "grad_norm": 9.13399887084961, |
| "learning_rate": 2.1016949152542374e-06, |
| "loss": 1.4333, |
| "step": 26900 |
| }, |
| { |
| "epoch": 0.67, |
| "grad_norm": 14.797123908996582, |
| "learning_rate": 2.0949152542372883e-06, |
| "loss": 1.1961, |
| "step": 26910 |
| }, |
| { |
| "epoch": 0.67, |
| "grad_norm": 6.3039398193359375, |
| "learning_rate": 2.0881355932203393e-06, |
| "loss": 1.0776, |
| "step": 26920 |
| }, |
| { |
| "epoch": 0.67, |
| "grad_norm": 5.700479507446289, |
| "learning_rate": 2.08135593220339e-06, |
| "loss": 1.5808, |
| "step": 26930 |
| }, |
| { |
| "epoch": 0.67, |
| "grad_norm": 17.28398895263672, |
| "learning_rate": 2.074576271186441e-06, |
| "loss": 1.4131, |
| "step": 26940 |
| }, |
| { |
| "epoch": 0.67, |
| "grad_norm": 5.036632061004639, |
| "learning_rate": 2.0677966101694914e-06, |
| "loss": 1.1761, |
| "step": 26950 |
| }, |
| { |
| "epoch": 0.67, |
| "grad_norm": 4.72507381439209, |
| "learning_rate": 2.0610169491525424e-06, |
| "loss": 1.3633, |
| "step": 26960 |
| }, |
| { |
| "epoch": 0.67, |
| "grad_norm": 18.702543258666992, |
| "learning_rate": 2.0542372881355934e-06, |
| "loss": 1.3922, |
| "step": 26970 |
| }, |
| { |
| "epoch": 0.67, |
| "grad_norm": 11.218758583068848, |
| "learning_rate": 2.0474576271186443e-06, |
| "loss": 1.3427, |
| "step": 26980 |
| }, |
| { |
| "epoch": 0.67, |
| "grad_norm": 5.716217517852783, |
| "learning_rate": 2.0406779661016953e-06, |
| "loss": 1.2603, |
| "step": 26990 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 8.085409164428711, |
| "learning_rate": 2.033898305084746e-06, |
| "loss": 1.4003, |
| "step": 27000 |
| }, |
| { |
| "epoch": 0.68, |
| "eval_loss": 1.3196759223937988, |
| "eval_runtime": 66.1008, |
| "eval_samples_per_second": 15.128, |
| "eval_steps_per_second": 15.128, |
| "step": 27000 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 2.983642578125, |
| "learning_rate": 2.027118644067797e-06, |
| "loss": 1.2987, |
| "step": 27010 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 3.0962276458740234, |
| "learning_rate": 2.020338983050848e-06, |
| "loss": 1.3189, |
| "step": 27020 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 4.857142925262451, |
| "learning_rate": 2.0135593220338984e-06, |
| "loss": 1.3651, |
| "step": 27030 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 4.3937482833862305, |
| "learning_rate": 2.0067796610169494e-06, |
| "loss": 1.4407, |
| "step": 27040 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 4.091692924499512, |
| "learning_rate": 2.0000000000000003e-06, |
| "loss": 1.2065, |
| "step": 27050 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 7.725543975830078, |
| "learning_rate": 1.993220338983051e-06, |
| "loss": 1.1836, |
| "step": 27060 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 5.124769687652588, |
| "learning_rate": 1.986440677966102e-06, |
| "loss": 1.3262, |
| "step": 27070 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 5.097384452819824, |
| "learning_rate": 1.9796610169491524e-06, |
| "loss": 1.3057, |
| "step": 27080 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 3.207469940185547, |
| "learning_rate": 1.9728813559322034e-06, |
| "loss": 1.241, |
| "step": 27090 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 8.80706787109375, |
| "learning_rate": 1.9661016949152544e-06, |
| "loss": 1.4066, |
| "step": 27100 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 2.1298069953918457, |
| "learning_rate": 1.9593220338983053e-06, |
| "loss": 1.3394, |
| "step": 27110 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 9.768424034118652, |
| "learning_rate": 1.9525423728813563e-06, |
| "loss": 1.4714, |
| "step": 27120 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 16.757766723632812, |
| "learning_rate": 1.945762711864407e-06, |
| "loss": 1.4322, |
| "step": 27130 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 4.268533229827881, |
| "learning_rate": 1.938983050847458e-06, |
| "loss": 1.3988, |
| "step": 27140 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 7.853205680847168, |
| "learning_rate": 1.932203389830509e-06, |
| "loss": 1.2837, |
| "step": 27150 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 3.1779301166534424, |
| "learning_rate": 1.9254237288135594e-06, |
| "loss": 1.3075, |
| "step": 27160 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 7.716470718383789, |
| "learning_rate": 1.9186440677966104e-06, |
| "loss": 1.2207, |
| "step": 27170 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 4.0963029861450195, |
| "learning_rate": 1.9118644067796613e-06, |
| "loss": 1.3241, |
| "step": 27180 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 10.319551467895508, |
| "learning_rate": 1.9050847457627119e-06, |
| "loss": 1.4133, |
| "step": 27190 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 4.663591384887695, |
| "learning_rate": 1.8983050847457629e-06, |
| "loss": 1.2505, |
| "step": 27200 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 8.912657737731934, |
| "learning_rate": 1.8915254237288136e-06, |
| "loss": 1.2057, |
| "step": 27210 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 11.967384338378906, |
| "learning_rate": 1.8847457627118646e-06, |
| "loss": 1.4043, |
| "step": 27220 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 19.304821014404297, |
| "learning_rate": 1.8779661016949156e-06, |
| "loss": 1.3083, |
| "step": 27230 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 4.1671600341796875, |
| "learning_rate": 1.8711864406779661e-06, |
| "loss": 1.415, |
| "step": 27240 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 3.430443525314331, |
| "learning_rate": 1.8644067796610171e-06, |
| "loss": 1.3154, |
| "step": 27250 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 8.876851081848145, |
| "learning_rate": 1.857627118644068e-06, |
| "loss": 1.2701, |
| "step": 27260 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 12.077353477478027, |
| "learning_rate": 1.8508474576271189e-06, |
| "loss": 1.3744, |
| "step": 27270 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 1.86147940158844, |
| "learning_rate": 1.8440677966101696e-06, |
| "loss": 1.4091, |
| "step": 27280 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 6.873291969299316, |
| "learning_rate": 1.8372881355932204e-06, |
| "loss": 1.3488, |
| "step": 27290 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 5.088196754455566, |
| "learning_rate": 1.8305084745762714e-06, |
| "loss": 1.437, |
| "step": 27300 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 3.1845881938934326, |
| "learning_rate": 1.8237288135593223e-06, |
| "loss": 1.3961, |
| "step": 27310 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 8.228582382202148, |
| "learning_rate": 1.816949152542373e-06, |
| "loss": 1.1022, |
| "step": 27320 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 15.932997703552246, |
| "learning_rate": 1.8101694915254239e-06, |
| "loss": 1.3529, |
| "step": 27330 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 1.3720171451568604, |
| "learning_rate": 1.8033898305084746e-06, |
| "loss": 1.2631, |
| "step": 27340 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 3.995771646499634, |
| "learning_rate": 1.7966101694915256e-06, |
| "loss": 1.2678, |
| "step": 27350 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 5.7671895027160645, |
| "learning_rate": 1.7898305084745766e-06, |
| "loss": 1.329, |
| "step": 27360 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 3.57239031791687, |
| "learning_rate": 1.7830508474576271e-06, |
| "loss": 1.2557, |
| "step": 27370 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 8.127071380615234, |
| "learning_rate": 1.7762711864406781e-06, |
| "loss": 1.3917, |
| "step": 27380 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 11.008809089660645, |
| "learning_rate": 1.769491525423729e-06, |
| "loss": 1.4125, |
| "step": 27390 |
| }, |
| { |
| "epoch": 0.69, |
| "grad_norm": 3.305449962615967, |
| "learning_rate": 1.7627118644067799e-06, |
| "loss": 1.4367, |
| "step": 27400 |
| }, |
| { |
| "epoch": 0.69, |
| "grad_norm": 4.471165180206299, |
| "learning_rate": 1.7559322033898306e-06, |
| "loss": 1.1336, |
| "step": 27410 |
| }, |
| { |
| "epoch": 0.69, |
| "grad_norm": 4.836517810821533, |
| "learning_rate": 1.7491525423728814e-06, |
| "loss": 1.37, |
| "step": 27420 |
| }, |
| { |
| "epoch": 0.69, |
| "grad_norm": 6.102312088012695, |
| "learning_rate": 1.7423728813559324e-06, |
| "loss": 1.3035, |
| "step": 27430 |
| }, |
| { |
| "epoch": 0.69, |
| "grad_norm": 4.232480049133301, |
| "learning_rate": 1.7355932203389834e-06, |
| "loss": 1.2197, |
| "step": 27440 |
| }, |
| { |
| "epoch": 0.69, |
| "grad_norm": 9.582504272460938, |
| "learning_rate": 1.728813559322034e-06, |
| "loss": 1.2699, |
| "step": 27450 |
| }, |
| { |
| "epoch": 0.69, |
| "grad_norm": 9.52001667022705, |
| "learning_rate": 1.7220338983050849e-06, |
| "loss": 1.356, |
| "step": 27460 |
| }, |
| { |
| "epoch": 0.69, |
| "grad_norm": 13.017762184143066, |
| "learning_rate": 1.7152542372881356e-06, |
| "loss": 1.2246, |
| "step": 27470 |
| }, |
| { |
| "epoch": 0.69, |
| "grad_norm": 7.059675693511963, |
| "learning_rate": 1.7084745762711866e-06, |
| "loss": 1.3978, |
| "step": 27480 |
| }, |
| { |
| "epoch": 0.69, |
| "grad_norm": 4.05330753326416, |
| "learning_rate": 1.7016949152542376e-06, |
| "loss": 1.3678, |
| "step": 27490 |
| }, |
| { |
| "epoch": 0.69, |
| "grad_norm": 8.122435569763184, |
| "learning_rate": 1.6949152542372882e-06, |
| "loss": 1.3062, |
| "step": 27500 |
| }, |
| { |
| "epoch": 0.69, |
| "eval_loss": 1.262895941734314, |
| "eval_runtime": 66.2138, |
| "eval_samples_per_second": 15.103, |
| "eval_steps_per_second": 15.103, |
| "step": 27500 |
| }, |
| { |
| "epoch": 0.69, |
| "grad_norm": 5.283616542816162, |
| "learning_rate": 1.6881355932203391e-06, |
| "loss": 1.3504, |
| "step": 27510 |
| }, |
| { |
| "epoch": 0.69, |
| "grad_norm": 3.6427159309387207, |
| "learning_rate": 1.6813559322033901e-06, |
| "loss": 1.3028, |
| "step": 27520 |
| }, |
| { |
| "epoch": 0.69, |
| "grad_norm": 5.701483249664307, |
| "learning_rate": 1.6745762711864409e-06, |
| "loss": 1.4436, |
| "step": 27530 |
| }, |
| { |
| "epoch": 0.69, |
| "grad_norm": 9.295339584350586, |
| "learning_rate": 1.6677966101694916e-06, |
| "loss": 1.4106, |
| "step": 27540 |
| }, |
| { |
| "epoch": 0.69, |
| "grad_norm": 3.1163859367370605, |
| "learning_rate": 1.6610169491525424e-06, |
| "loss": 1.2698, |
| "step": 27550 |
| }, |
| { |
| "epoch": 0.69, |
| "grad_norm": 6.673079967498779, |
| "learning_rate": 1.6542372881355934e-06, |
| "loss": 1.2202, |
| "step": 27560 |
| }, |
| { |
| "epoch": 0.69, |
| "grad_norm": 4.866211414337158, |
| "learning_rate": 1.6474576271186444e-06, |
| "loss": 1.2538, |
| "step": 27570 |
| }, |
| { |
| "epoch": 0.69, |
| "grad_norm": 3.319688081741333, |
| "learning_rate": 1.640677966101695e-06, |
| "loss": 1.4981, |
| "step": 27580 |
| }, |
| { |
| "epoch": 0.69, |
| "grad_norm": 5.170316219329834, |
| "learning_rate": 1.6338983050847459e-06, |
| "loss": 1.3682, |
| "step": 27590 |
| }, |
| { |
| "epoch": 0.69, |
| "grad_norm": 9.018121719360352, |
| "learning_rate": 1.6271186440677967e-06, |
| "loss": 1.3149, |
| "step": 27600 |
| }, |
| { |
| "epoch": 0.69, |
| "grad_norm": 6.896349906921387, |
| "learning_rate": 1.6203389830508476e-06, |
| "loss": 1.317, |
| "step": 27610 |
| }, |
| { |
| "epoch": 0.69, |
| "grad_norm": 7.63593053817749, |
| "learning_rate": 1.6135593220338986e-06, |
| "loss": 1.3238, |
| "step": 27620 |
| }, |
| { |
| "epoch": 0.69, |
| "grad_norm": 8.823158264160156, |
| "learning_rate": 1.6067796610169492e-06, |
| "loss": 1.4479, |
| "step": 27630 |
| }, |
| { |
| "epoch": 0.69, |
| "grad_norm": 8.540557861328125, |
| "learning_rate": 1.6000000000000001e-06, |
| "loss": 1.3663, |
| "step": 27640 |
| }, |
| { |
| "epoch": 0.69, |
| "grad_norm": 8.03847885131836, |
| "learning_rate": 1.593220338983051e-06, |
| "loss": 1.2248, |
| "step": 27650 |
| }, |
| { |
| "epoch": 0.69, |
| "grad_norm": 5.210377216339111, |
| "learning_rate": 1.5864406779661019e-06, |
| "loss": 1.3373, |
| "step": 27660 |
| }, |
| { |
| "epoch": 0.69, |
| "grad_norm": 6.994758129119873, |
| "learning_rate": 1.5796610169491526e-06, |
| "loss": 1.3166, |
| "step": 27670 |
| }, |
| { |
| "epoch": 0.69, |
| "grad_norm": 7.343669414520264, |
| "learning_rate": 1.5728813559322034e-06, |
| "loss": 1.3406, |
| "step": 27680 |
| }, |
| { |
| "epoch": 0.69, |
| "grad_norm": 12.689948081970215, |
| "learning_rate": 1.5661016949152544e-06, |
| "loss": 1.2073, |
| "step": 27690 |
| }, |
| { |
| "epoch": 0.69, |
| "grad_norm": 7.018815040588379, |
| "learning_rate": 1.5593220338983054e-06, |
| "loss": 1.2738, |
| "step": 27700 |
| }, |
| { |
| "epoch": 0.69, |
| "grad_norm": 5.334643840789795, |
| "learning_rate": 1.552542372881356e-06, |
| "loss": 1.2211, |
| "step": 27710 |
| }, |
| { |
| "epoch": 0.69, |
| "grad_norm": 1.1950844526290894, |
| "learning_rate": 1.545762711864407e-06, |
| "loss": 1.268, |
| "step": 27720 |
| }, |
| { |
| "epoch": 0.69, |
| "grad_norm": 11.679058074951172, |
| "learning_rate": 1.5389830508474577e-06, |
| "loss": 1.1873, |
| "step": 27730 |
| }, |
| { |
| "epoch": 0.69, |
| "grad_norm": 7.686078071594238, |
| "learning_rate": 1.5322033898305086e-06, |
| "loss": 1.277, |
| "step": 27740 |
| }, |
| { |
| "epoch": 0.69, |
| "grad_norm": 2.579845905303955, |
| "learning_rate": 1.5254237288135596e-06, |
| "loss": 1.3752, |
| "step": 27750 |
| }, |
| { |
| "epoch": 0.69, |
| "grad_norm": 11.227088928222656, |
| "learning_rate": 1.5186440677966102e-06, |
| "loss": 1.2593, |
| "step": 27760 |
| }, |
| { |
| "epoch": 0.69, |
| "grad_norm": 6.903045654296875, |
| "learning_rate": 1.5118644067796611e-06, |
| "loss": 1.2463, |
| "step": 27770 |
| }, |
| { |
| "epoch": 0.69, |
| "grad_norm": 12.534771919250488, |
| "learning_rate": 1.505084745762712e-06, |
| "loss": 1.2949, |
| "step": 27780 |
| }, |
| { |
| "epoch": 0.69, |
| "grad_norm": 4.901615619659424, |
| "learning_rate": 1.4983050847457629e-06, |
| "loss": 1.2975, |
| "step": 27790 |
| }, |
| { |
| "epoch": 0.69, |
| "grad_norm": 9.669910430908203, |
| "learning_rate": 1.4915254237288139e-06, |
| "loss": 1.3476, |
| "step": 27800 |
| }, |
| { |
| "epoch": 0.7, |
| "grad_norm": 6.982202529907227, |
| "learning_rate": 1.4847457627118644e-06, |
| "loss": 1.3839, |
| "step": 27810 |
| }, |
| { |
| "epoch": 0.7, |
| "grad_norm": 9.172724723815918, |
| "learning_rate": 1.4779661016949154e-06, |
| "loss": 1.3519, |
| "step": 27820 |
| }, |
| { |
| "epoch": 0.7, |
| "grad_norm": 8.561583518981934, |
| "learning_rate": 1.4711864406779664e-06, |
| "loss": 1.3402, |
| "step": 27830 |
| }, |
| { |
| "epoch": 0.7, |
| "grad_norm": 11.40105152130127, |
| "learning_rate": 1.464406779661017e-06, |
| "loss": 1.2805, |
| "step": 27840 |
| }, |
| { |
| "epoch": 0.7, |
| "grad_norm": 11.502050399780273, |
| "learning_rate": 1.457627118644068e-06, |
| "loss": 1.4351, |
| "step": 27850 |
| }, |
| { |
| "epoch": 0.7, |
| "grad_norm": 3.5157549381256104, |
| "learning_rate": 1.4508474576271187e-06, |
| "loss": 1.2948, |
| "step": 27860 |
| }, |
| { |
| "epoch": 0.7, |
| "grad_norm": 8.663883209228516, |
| "learning_rate": 1.4440677966101696e-06, |
| "loss": 1.3285, |
| "step": 27870 |
| }, |
| { |
| "epoch": 0.7, |
| "grad_norm": 4.502950191497803, |
| "learning_rate": 1.4372881355932206e-06, |
| "loss": 1.4507, |
| "step": 27880 |
| }, |
| { |
| "epoch": 0.7, |
| "grad_norm": 2.098318338394165, |
| "learning_rate": 1.4305084745762712e-06, |
| "loss": 1.1753, |
| "step": 27890 |
| }, |
| { |
| "epoch": 0.7, |
| "grad_norm": 2.721998453140259, |
| "learning_rate": 1.4237288135593222e-06, |
| "loss": 1.3162, |
| "step": 27900 |
| }, |
| { |
| "epoch": 0.7, |
| "grad_norm": 6.501703262329102, |
| "learning_rate": 1.416949152542373e-06, |
| "loss": 1.2442, |
| "step": 27910 |
| }, |
| { |
| "epoch": 0.7, |
| "grad_norm": 4.597460746765137, |
| "learning_rate": 1.410169491525424e-06, |
| "loss": 1.2659, |
| "step": 27920 |
| }, |
| { |
| "epoch": 0.7, |
| "grad_norm": 1.399740219116211, |
| "learning_rate": 1.4033898305084749e-06, |
| "loss": 1.2234, |
| "step": 27930 |
| }, |
| { |
| "epoch": 0.7, |
| "grad_norm": 1.2826783657073975, |
| "learning_rate": 1.3966101694915254e-06, |
| "loss": 1.3775, |
| "step": 27940 |
| }, |
| { |
| "epoch": 0.7, |
| "grad_norm": 9.264411926269531, |
| "learning_rate": 1.3898305084745764e-06, |
| "loss": 1.299, |
| "step": 27950 |
| }, |
| { |
| "epoch": 0.7, |
| "grad_norm": 11.41451644897461, |
| "learning_rate": 1.3830508474576274e-06, |
| "loss": 1.2851, |
| "step": 27960 |
| }, |
| { |
| "epoch": 0.7, |
| "grad_norm": 4.351644515991211, |
| "learning_rate": 1.376271186440678e-06, |
| "loss": 1.1979, |
| "step": 27970 |
| }, |
| { |
| "epoch": 0.7, |
| "grad_norm": 9.026026725769043, |
| "learning_rate": 1.369491525423729e-06, |
| "loss": 1.3119, |
| "step": 27980 |
| }, |
| { |
| "epoch": 0.7, |
| "grad_norm": 6.000504970550537, |
| "learning_rate": 1.3627118644067797e-06, |
| "loss": 1.2543, |
| "step": 27990 |
| }, |
| { |
| "epoch": 0.7, |
| "grad_norm": 22.265581130981445, |
| "learning_rate": 1.3559322033898307e-06, |
| "loss": 1.34, |
| "step": 28000 |
| }, |
| { |
| "epoch": 0.7, |
| "eval_loss": 1.3058114051818848, |
| "eval_runtime": 66.2214, |
| "eval_samples_per_second": 15.101, |
| "eval_steps_per_second": 15.101, |
| "step": 28000 |
| }, |
| { |
| "epoch": 0.7, |
| "grad_norm": 4.122670650482178, |
| "learning_rate": 1.3491525423728816e-06, |
| "loss": 1.2058, |
| "step": 28010 |
| }, |
| { |
| "epoch": 0.7, |
| "grad_norm": 3.766960859298706, |
| "learning_rate": 1.3423728813559322e-06, |
| "loss": 1.3474, |
| "step": 28020 |
| }, |
| { |
| "epoch": 0.7, |
| "grad_norm": 2.3636386394500732, |
| "learning_rate": 1.3355932203389832e-06, |
| "loss": 1.1637, |
| "step": 28030 |
| }, |
| { |
| "epoch": 0.7, |
| "grad_norm": 9.471797943115234, |
| "learning_rate": 1.328813559322034e-06, |
| "loss": 1.3953, |
| "step": 28040 |
| }, |
| { |
| "epoch": 0.7, |
| "grad_norm": 8.232218742370605, |
| "learning_rate": 1.322033898305085e-06, |
| "loss": 1.4269, |
| "step": 28050 |
| }, |
| { |
| "epoch": 0.7, |
| "grad_norm": 1.1584389209747314, |
| "learning_rate": 1.3152542372881359e-06, |
| "loss": 1.3231, |
| "step": 28060 |
| }, |
| { |
| "epoch": 0.7, |
| "grad_norm": 4.911566257476807, |
| "learning_rate": 1.3084745762711864e-06, |
| "loss": 1.241, |
| "step": 28070 |
| }, |
| { |
| "epoch": 0.7, |
| "grad_norm": 18.741342544555664, |
| "learning_rate": 1.3016949152542374e-06, |
| "loss": 1.3838, |
| "step": 28080 |
| }, |
| { |
| "epoch": 0.7, |
| "grad_norm": 8.622126579284668, |
| "learning_rate": 1.2949152542372884e-06, |
| "loss": 1.2792, |
| "step": 28090 |
| }, |
| { |
| "epoch": 0.7, |
| "grad_norm": 4.402095794677734, |
| "learning_rate": 1.288135593220339e-06, |
| "loss": 1.2308, |
| "step": 28100 |
| }, |
| { |
| "epoch": 0.7, |
| "grad_norm": 5.238518714904785, |
| "learning_rate": 1.28135593220339e-06, |
| "loss": 1.3537, |
| "step": 28110 |
| }, |
| { |
| "epoch": 0.7, |
| "grad_norm": 5.757815837860107, |
| "learning_rate": 1.2745762711864407e-06, |
| "loss": 1.3987, |
| "step": 28120 |
| }, |
| { |
| "epoch": 0.7, |
| "grad_norm": 9.999316215515137, |
| "learning_rate": 1.2677966101694917e-06, |
| "loss": 1.1397, |
| "step": 28130 |
| }, |
| { |
| "epoch": 0.7, |
| "grad_norm": 6.276950359344482, |
| "learning_rate": 1.2610169491525426e-06, |
| "loss": 1.5024, |
| "step": 28140 |
| }, |
| { |
| "epoch": 0.7, |
| "grad_norm": 5.9979634284973145, |
| "learning_rate": 1.2542372881355932e-06, |
| "loss": 1.4296, |
| "step": 28150 |
| }, |
| { |
| "epoch": 0.7, |
| "grad_norm": 7.124503135681152, |
| "learning_rate": 1.2474576271186442e-06, |
| "loss": 1.279, |
| "step": 28160 |
| }, |
| { |
| "epoch": 0.7, |
| "grad_norm": 10.153301239013672, |
| "learning_rate": 1.240677966101695e-06, |
| "loss": 1.3013, |
| "step": 28170 |
| }, |
| { |
| "epoch": 0.7, |
| "grad_norm": 6.391571521759033, |
| "learning_rate": 1.233898305084746e-06, |
| "loss": 1.2782, |
| "step": 28180 |
| }, |
| { |
| "epoch": 0.7, |
| "grad_norm": 10.13975715637207, |
| "learning_rate": 1.2271186440677967e-06, |
| "loss": 1.4136, |
| "step": 28190 |
| }, |
| { |
| "epoch": 0.7, |
| "grad_norm": 5.052265167236328, |
| "learning_rate": 1.2203389830508477e-06, |
| "loss": 1.3246, |
| "step": 28200 |
| }, |
| { |
| "epoch": 0.71, |
| "grad_norm": 6.8834638595581055, |
| "learning_rate": 1.2135593220338984e-06, |
| "loss": 1.3436, |
| "step": 28210 |
| }, |
| { |
| "epoch": 0.71, |
| "grad_norm": 10.564448356628418, |
| "learning_rate": 1.2067796610169492e-06, |
| "loss": 1.1619, |
| "step": 28220 |
| }, |
| { |
| "epoch": 0.71, |
| "grad_norm": 3.8192801475524902, |
| "learning_rate": 1.2000000000000002e-06, |
| "loss": 1.2328, |
| "step": 28230 |
| }, |
| { |
| "epoch": 0.71, |
| "grad_norm": 5.635645866394043, |
| "learning_rate": 1.193220338983051e-06, |
| "loss": 1.3258, |
| "step": 28240 |
| }, |
| { |
| "epoch": 0.71, |
| "grad_norm": 6.429792881011963, |
| "learning_rate": 1.186440677966102e-06, |
| "loss": 1.3368, |
| "step": 28250 |
| }, |
| { |
| "epoch": 0.71, |
| "grad_norm": 11.116402626037598, |
| "learning_rate": 1.1796610169491527e-06, |
| "loss": 1.2527, |
| "step": 28260 |
| }, |
| { |
| "epoch": 0.71, |
| "grad_norm": 2.7472074031829834, |
| "learning_rate": 1.1728813559322034e-06, |
| "loss": 1.4633, |
| "step": 28270 |
| }, |
| { |
| "epoch": 0.71, |
| "grad_norm": 4.118687629699707, |
| "learning_rate": 1.1661016949152542e-06, |
| "loss": 1.4852, |
| "step": 28280 |
| }, |
| { |
| "epoch": 0.71, |
| "grad_norm": 14.502837181091309, |
| "learning_rate": 1.1593220338983052e-06, |
| "loss": 1.1964, |
| "step": 28290 |
| }, |
| { |
| "epoch": 0.71, |
| "grad_norm": 3.8546032905578613, |
| "learning_rate": 1.152542372881356e-06, |
| "loss": 1.1832, |
| "step": 28300 |
| }, |
| { |
| "epoch": 0.71, |
| "grad_norm": 2.377305507659912, |
| "learning_rate": 1.145762711864407e-06, |
| "loss": 1.3164, |
| "step": 28310 |
| }, |
| { |
| "epoch": 0.71, |
| "grad_norm": 2.172879219055176, |
| "learning_rate": 1.1389830508474577e-06, |
| "loss": 1.3163, |
| "step": 28320 |
| }, |
| { |
| "epoch": 0.71, |
| "grad_norm": 4.290719032287598, |
| "learning_rate": 1.1322033898305087e-06, |
| "loss": 1.2505, |
| "step": 28330 |
| }, |
| { |
| "epoch": 0.71, |
| "grad_norm": 15.109819412231445, |
| "learning_rate": 1.1254237288135594e-06, |
| "loss": 1.2868, |
| "step": 28340 |
| }, |
| { |
| "epoch": 0.71, |
| "grad_norm": 11.07304859161377, |
| "learning_rate": 1.1186440677966102e-06, |
| "loss": 1.2118, |
| "step": 28350 |
| }, |
| { |
| "epoch": 0.71, |
| "grad_norm": 14.64116096496582, |
| "learning_rate": 1.1118644067796612e-06, |
| "loss": 1.2904, |
| "step": 28360 |
| }, |
| { |
| "epoch": 0.71, |
| "grad_norm": 12.49071216583252, |
| "learning_rate": 1.105084745762712e-06, |
| "loss": 1.286, |
| "step": 28370 |
| }, |
| { |
| "epoch": 0.71, |
| "grad_norm": 3.750757932662964, |
| "learning_rate": 1.098305084745763e-06, |
| "loss": 1.2723, |
| "step": 28380 |
| }, |
| { |
| "epoch": 0.71, |
| "grad_norm": 11.984790802001953, |
| "learning_rate": 1.0915254237288137e-06, |
| "loss": 1.1732, |
| "step": 28390 |
| }, |
| { |
| "epoch": 0.71, |
| "grad_norm": 13.987404823303223, |
| "learning_rate": 1.0847457627118644e-06, |
| "loss": 1.2168, |
| "step": 28400 |
| }, |
| { |
| "epoch": 0.71, |
| "grad_norm": 10.837672233581543, |
| "learning_rate": 1.0779661016949152e-06, |
| "loss": 1.4036, |
| "step": 28410 |
| }, |
| { |
| "epoch": 0.71, |
| "grad_norm": 12.291699409484863, |
| "learning_rate": 1.0711864406779662e-06, |
| "loss": 1.1172, |
| "step": 28420 |
| }, |
| { |
| "epoch": 0.71, |
| "grad_norm": 2.2296297550201416, |
| "learning_rate": 1.064406779661017e-06, |
| "loss": 1.3241, |
| "step": 28430 |
| }, |
| { |
| "epoch": 0.71, |
| "grad_norm": 6.434119701385498, |
| "learning_rate": 1.057627118644068e-06, |
| "loss": 1.3096, |
| "step": 28440 |
| }, |
| { |
| "epoch": 0.71, |
| "grad_norm": 3.2857985496520996, |
| "learning_rate": 1.0508474576271187e-06, |
| "loss": 1.3744, |
| "step": 28450 |
| }, |
| { |
| "epoch": 0.71, |
| "grad_norm": 6.822338104248047, |
| "learning_rate": 1.0440677966101697e-06, |
| "loss": 1.3724, |
| "step": 28460 |
| }, |
| { |
| "epoch": 0.71, |
| "grad_norm": 6.319363117218018, |
| "learning_rate": 1.0372881355932204e-06, |
| "loss": 1.2181, |
| "step": 28470 |
| }, |
| { |
| "epoch": 0.71, |
| "grad_norm": 3.777921676635742, |
| "learning_rate": 1.0305084745762712e-06, |
| "loss": 1.3495, |
| "step": 28480 |
| }, |
| { |
| "epoch": 0.71, |
| "grad_norm": 6.540971755981445, |
| "learning_rate": 1.0237288135593222e-06, |
| "loss": 1.4589, |
| "step": 28490 |
| }, |
| { |
| "epoch": 0.71, |
| "grad_norm": 7.9154744148254395, |
| "learning_rate": 1.016949152542373e-06, |
| "loss": 1.3015, |
| "step": 28500 |
| }, |
| { |
| "epoch": 0.71, |
| "eval_loss": 1.3162422180175781, |
| "eval_runtime": 66.2512, |
| "eval_samples_per_second": 15.094, |
| "eval_steps_per_second": 15.094, |
| "step": 28500 |
| }, |
| { |
| "epoch": 0.71, |
| "grad_norm": 15.148056983947754, |
| "learning_rate": 1.010169491525424e-06, |
| "loss": 1.2363, |
| "step": 28510 |
| }, |
| { |
| "epoch": 0.71, |
| "grad_norm": 10.743448257446289, |
| "learning_rate": 1.0033898305084747e-06, |
| "loss": 1.1965, |
| "step": 28520 |
| }, |
| { |
| "epoch": 0.71, |
| "grad_norm": 12.191396713256836, |
| "learning_rate": 9.966101694915254e-07, |
| "loss": 1.4387, |
| "step": 28530 |
| }, |
| { |
| "epoch": 0.71, |
| "grad_norm": 4.78171968460083, |
| "learning_rate": 9.898305084745762e-07, |
| "loss": 1.4977, |
| "step": 28540 |
| }, |
| { |
| "epoch": 0.71, |
| "grad_norm": 5.24019718170166, |
| "learning_rate": 9.830508474576272e-07, |
| "loss": 1.5177, |
| "step": 28550 |
| }, |
| { |
| "epoch": 0.71, |
| "grad_norm": 12.000500679016113, |
| "learning_rate": 9.762711864406782e-07, |
| "loss": 1.1637, |
| "step": 28560 |
| }, |
| { |
| "epoch": 0.71, |
| "grad_norm": 10.612434387207031, |
| "learning_rate": 9.69491525423729e-07, |
| "loss": 1.334, |
| "step": 28570 |
| }, |
| { |
| "epoch": 0.71, |
| "grad_norm": 6.343203544616699, |
| "learning_rate": 9.627118644067797e-07, |
| "loss": 1.3152, |
| "step": 28580 |
| }, |
| { |
| "epoch": 0.71, |
| "grad_norm": 5.986273288726807, |
| "learning_rate": 9.559322033898307e-07, |
| "loss": 1.3724, |
| "step": 28590 |
| }, |
| { |
| "epoch": 0.71, |
| "grad_norm": 12.447896003723145, |
| "learning_rate": 9.491525423728814e-07, |
| "loss": 1.2404, |
| "step": 28600 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 18.211698532104492, |
| "learning_rate": 9.423728813559323e-07, |
| "loss": 1.3091, |
| "step": 28610 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 5.227106094360352, |
| "learning_rate": 9.355932203389831e-07, |
| "loss": 1.1275, |
| "step": 28620 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 4.874502182006836, |
| "learning_rate": 9.28813559322034e-07, |
| "loss": 1.2959, |
| "step": 28630 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 7.699239253997803, |
| "learning_rate": 9.220338983050848e-07, |
| "loss": 1.3687, |
| "step": 28640 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 7.712405681610107, |
| "learning_rate": 9.152542372881357e-07, |
| "loss": 1.3746, |
| "step": 28650 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 8.854902267456055, |
| "learning_rate": 9.084745762711864e-07, |
| "loss": 1.3399, |
| "step": 28660 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 5.26395320892334, |
| "learning_rate": 9.016949152542373e-07, |
| "loss": 1.2296, |
| "step": 28670 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 5.966766357421875, |
| "learning_rate": 8.949152542372883e-07, |
| "loss": 1.2806, |
| "step": 28680 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 5.988807201385498, |
| "learning_rate": 8.881355932203391e-07, |
| "loss": 1.4652, |
| "step": 28690 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 1.523200273513794, |
| "learning_rate": 8.813559322033899e-07, |
| "loss": 1.4848, |
| "step": 28700 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 3.4972944259643555, |
| "learning_rate": 8.745762711864407e-07, |
| "loss": 1.296, |
| "step": 28710 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 4.062095642089844, |
| "learning_rate": 8.677966101694917e-07, |
| "loss": 1.3716, |
| "step": 28720 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 1.065338134765625, |
| "learning_rate": 8.610169491525424e-07, |
| "loss": 1.2375, |
| "step": 28730 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 6.1202006340026855, |
| "learning_rate": 8.542372881355933e-07, |
| "loss": 1.3413, |
| "step": 28740 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 17.116790771484375, |
| "learning_rate": 8.474576271186441e-07, |
| "loss": 1.2014, |
| "step": 28750 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 6.310455799102783, |
| "learning_rate": 8.406779661016951e-07, |
| "loss": 1.2509, |
| "step": 28760 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 19.508827209472656, |
| "learning_rate": 8.338983050847458e-07, |
| "loss": 1.4035, |
| "step": 28770 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 10.587761878967285, |
| "learning_rate": 8.271186440677967e-07, |
| "loss": 1.3374, |
| "step": 28780 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 15.715071678161621, |
| "learning_rate": 8.203389830508475e-07, |
| "loss": 1.3229, |
| "step": 28790 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 3.0983753204345703, |
| "learning_rate": 8.135593220338983e-07, |
| "loss": 1.4297, |
| "step": 28800 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 4.541349411010742, |
| "learning_rate": 8.067796610169493e-07, |
| "loss": 1.3327, |
| "step": 28810 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 4.102433204650879, |
| "learning_rate": 8.000000000000001e-07, |
| "loss": 1.3362, |
| "step": 28820 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 6.362680435180664, |
| "learning_rate": 7.932203389830509e-07, |
| "loss": 1.2275, |
| "step": 28830 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 9.411408424377441, |
| "learning_rate": 7.864406779661017e-07, |
| "loss": 1.4023, |
| "step": 28840 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 6.113147258758545, |
| "learning_rate": 7.796610169491527e-07, |
| "loss": 1.2319, |
| "step": 28850 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 6.580341815948486, |
| "learning_rate": 7.728813559322034e-07, |
| "loss": 1.3559, |
| "step": 28860 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 10.778473854064941, |
| "learning_rate": 7.661016949152543e-07, |
| "loss": 1.2247, |
| "step": 28870 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 4.692105770111084, |
| "learning_rate": 7.593220338983051e-07, |
| "loss": 1.2254, |
| "step": 28880 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 8.076458930969238, |
| "learning_rate": 7.52542372881356e-07, |
| "loss": 1.3264, |
| "step": 28890 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 11.618599891662598, |
| "learning_rate": 7.457627118644069e-07, |
| "loss": 1.174, |
| "step": 28900 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 10.995185852050781, |
| "learning_rate": 7.389830508474577e-07, |
| "loss": 1.21, |
| "step": 28910 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 12.011213302612305, |
| "learning_rate": 7.322033898305085e-07, |
| "loss": 1.3272, |
| "step": 28920 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 4.367415428161621, |
| "learning_rate": 7.254237288135593e-07, |
| "loss": 1.3997, |
| "step": 28930 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 11.401775360107422, |
| "learning_rate": 7.186440677966103e-07, |
| "loss": 1.3507, |
| "step": 28940 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 8.856404304504395, |
| "learning_rate": 7.118644067796611e-07, |
| "loss": 1.3274, |
| "step": 28950 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 5.873079776763916, |
| "learning_rate": 7.05084745762712e-07, |
| "loss": 1.2987, |
| "step": 28960 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 7.142796993255615, |
| "learning_rate": 6.983050847457627e-07, |
| "loss": 1.2049, |
| "step": 28970 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 5.9704179763793945, |
| "learning_rate": 6.915254237288137e-07, |
| "loss": 1.3481, |
| "step": 28980 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 10.48417854309082, |
| "learning_rate": 6.847457627118645e-07, |
| "loss": 1.3498, |
| "step": 28990 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 12.131574630737305, |
| "learning_rate": 6.779661016949153e-07, |
| "loss": 1.3529, |
| "step": 29000 |
| }, |
| { |
| "epoch": 0.72, |
| "eval_loss": 1.3368879556655884, |
| "eval_runtime": 66.2441, |
| "eval_samples_per_second": 15.096, |
| "eval_steps_per_second": 15.096, |
| "step": 29000 |
| }, |
| { |
| "epoch": 0.73, |
| "grad_norm": 7.354034423828125, |
| "learning_rate": 6.711864406779661e-07, |
| "loss": 1.2698, |
| "step": 29010 |
| }, |
| { |
| "epoch": 0.73, |
| "grad_norm": 3.9224977493286133, |
| "learning_rate": 6.64406779661017e-07, |
| "loss": 1.3377, |
| "step": 29020 |
| }, |
| { |
| "epoch": 0.73, |
| "grad_norm": 8.08880615234375, |
| "learning_rate": 6.576271186440679e-07, |
| "loss": 1.4448, |
| "step": 29030 |
| }, |
| { |
| "epoch": 0.73, |
| "grad_norm": 3.625216245651245, |
| "learning_rate": 6.508474576271187e-07, |
| "loss": 1.3561, |
| "step": 29040 |
| }, |
| { |
| "epoch": 0.73, |
| "grad_norm": 13.905680656433105, |
| "learning_rate": 6.440677966101695e-07, |
| "loss": 1.2956, |
| "step": 29050 |
| }, |
| { |
| "epoch": 0.73, |
| "grad_norm": 3.773040294647217, |
| "learning_rate": 6.372881355932203e-07, |
| "loss": 1.2934, |
| "step": 29060 |
| }, |
| { |
| "epoch": 0.73, |
| "grad_norm": 5.38674259185791, |
| "learning_rate": 6.305084745762713e-07, |
| "loss": 1.3364, |
| "step": 29070 |
| }, |
| { |
| "epoch": 0.73, |
| "grad_norm": 4.701632022857666, |
| "learning_rate": 6.237288135593221e-07, |
| "loss": 1.1754, |
| "step": 29080 |
| }, |
| { |
| "epoch": 0.73, |
| "grad_norm": 8.243915557861328, |
| "learning_rate": 6.16949152542373e-07, |
| "loss": 1.4113, |
| "step": 29090 |
| }, |
| { |
| "epoch": 0.73, |
| "grad_norm": 4.2352213859558105, |
| "learning_rate": 6.101694915254238e-07, |
| "loss": 1.4673, |
| "step": 29100 |
| }, |
| { |
| "epoch": 0.73, |
| "grad_norm": 19.758262634277344, |
| "learning_rate": 6.033898305084746e-07, |
| "loss": 1.0756, |
| "step": 29110 |
| }, |
| { |
| "epoch": 0.73, |
| "grad_norm": 9.024798393249512, |
| "learning_rate": 5.966101694915255e-07, |
| "loss": 1.1835, |
| "step": 29120 |
| }, |
| { |
| "epoch": 0.73, |
| "grad_norm": 2.7573533058166504, |
| "learning_rate": 5.898305084745763e-07, |
| "loss": 1.3342, |
| "step": 29130 |
| }, |
| { |
| "epoch": 0.73, |
| "grad_norm": 2.6472461223602295, |
| "learning_rate": 5.830508474576271e-07, |
| "loss": 1.478, |
| "step": 29140 |
| }, |
| { |
| "epoch": 0.73, |
| "grad_norm": 8.194090843200684, |
| "learning_rate": 5.76271186440678e-07, |
| "loss": 1.2781, |
| "step": 29150 |
| }, |
| { |
| "epoch": 0.73, |
| "grad_norm": 7.336125373840332, |
| "learning_rate": 5.694915254237288e-07, |
| "loss": 1.3871, |
| "step": 29160 |
| }, |
| { |
| "epoch": 0.73, |
| "grad_norm": 9.800869941711426, |
| "learning_rate": 5.627118644067797e-07, |
| "loss": 1.237, |
| "step": 29170 |
| }, |
| { |
| "epoch": 0.73, |
| "grad_norm": 3.7578229904174805, |
| "learning_rate": 5.559322033898306e-07, |
| "loss": 1.3408, |
| "step": 29180 |
| }, |
| { |
| "epoch": 0.73, |
| "grad_norm": 11.04594612121582, |
| "learning_rate": 5.491525423728815e-07, |
| "loss": 1.4338, |
| "step": 29190 |
| }, |
| { |
| "epoch": 0.73, |
| "grad_norm": 9.137775421142578, |
| "learning_rate": 5.423728813559322e-07, |
| "loss": 1.2292, |
| "step": 29200 |
| }, |
| { |
| "epoch": 0.73, |
| "grad_norm": 6.266888618469238, |
| "learning_rate": 5.355932203389831e-07, |
| "loss": 1.3781, |
| "step": 29210 |
| }, |
| { |
| "epoch": 0.73, |
| "grad_norm": 10.63198184967041, |
| "learning_rate": 5.28813559322034e-07, |
| "loss": 1.2394, |
| "step": 29220 |
| }, |
| { |
| "epoch": 0.73, |
| "grad_norm": 16.352039337158203, |
| "learning_rate": 5.220338983050848e-07, |
| "loss": 1.3784, |
| "step": 29230 |
| }, |
| { |
| "epoch": 0.73, |
| "grad_norm": 9.548080444335938, |
| "learning_rate": 5.152542372881356e-07, |
| "loss": 1.3201, |
| "step": 29240 |
| }, |
| { |
| "epoch": 0.73, |
| "grad_norm": 2.56595516204834, |
| "learning_rate": 5.084745762711865e-07, |
| "loss": 1.2335, |
| "step": 29250 |
| }, |
| { |
| "epoch": 0.73, |
| "grad_norm": 3.092132568359375, |
| "learning_rate": 5.016949152542373e-07, |
| "loss": 1.2999, |
| "step": 29260 |
| }, |
| { |
| "epoch": 0.73, |
| "grad_norm": 12.341950416564941, |
| "learning_rate": 4.949152542372881e-07, |
| "loss": 1.2719, |
| "step": 29270 |
| }, |
| { |
| "epoch": 0.73, |
| "grad_norm": 16.05048179626465, |
| "learning_rate": 4.881355932203391e-07, |
| "loss": 1.1151, |
| "step": 29280 |
| }, |
| { |
| "epoch": 0.73, |
| "grad_norm": 8.888206481933594, |
| "learning_rate": 4.813559322033898e-07, |
| "loss": 1.0549, |
| "step": 29290 |
| }, |
| { |
| "epoch": 0.73, |
| "grad_norm": 16.083812713623047, |
| "learning_rate": 4.745762711864407e-07, |
| "loss": 1.2443, |
| "step": 29300 |
| }, |
| { |
| "epoch": 0.73, |
| "grad_norm": 5.3232741355896, |
| "learning_rate": 4.6779661016949154e-07, |
| "loss": 1.2953, |
| "step": 29310 |
| }, |
| { |
| "epoch": 0.73, |
| "grad_norm": 15.959102630615234, |
| "learning_rate": 4.610169491525424e-07, |
| "loss": 1.282, |
| "step": 29320 |
| }, |
| { |
| "epoch": 0.73, |
| "grad_norm": 8.539166450500488, |
| "learning_rate": 4.542372881355932e-07, |
| "loss": 1.2538, |
| "step": 29330 |
| }, |
| { |
| "epoch": 0.73, |
| "grad_norm": 4.907639980316162, |
| "learning_rate": 4.4745762711864415e-07, |
| "loss": 1.3666, |
| "step": 29340 |
| }, |
| { |
| "epoch": 0.73, |
| "grad_norm": 2.455517530441284, |
| "learning_rate": 4.4067796610169497e-07, |
| "loss": 1.2269, |
| "step": 29350 |
| }, |
| { |
| "epoch": 0.73, |
| "grad_norm": 9.032910346984863, |
| "learning_rate": 4.3389830508474584e-07, |
| "loss": 1.2813, |
| "step": 29360 |
| }, |
| { |
| "epoch": 0.73, |
| "grad_norm": 2.8847286701202393, |
| "learning_rate": 4.2711864406779666e-07, |
| "loss": 1.3336, |
| "step": 29370 |
| }, |
| { |
| "epoch": 0.73, |
| "grad_norm": 11.284420013427734, |
| "learning_rate": 4.2033898305084753e-07, |
| "loss": 1.286, |
| "step": 29380 |
| }, |
| { |
| "epoch": 0.73, |
| "grad_norm": 11.412368774414062, |
| "learning_rate": 4.1355932203389835e-07, |
| "loss": 1.3317, |
| "step": 29390 |
| }, |
| { |
| "epoch": 0.73, |
| "grad_norm": 4.6027302742004395, |
| "learning_rate": 4.0677966101694916e-07, |
| "loss": 1.3263, |
| "step": 29400 |
| }, |
| { |
| "epoch": 0.74, |
| "grad_norm": 2.39072322845459, |
| "learning_rate": 4.0000000000000003e-07, |
| "loss": 1.3174, |
| "step": 29410 |
| }, |
| { |
| "epoch": 0.74, |
| "grad_norm": 2.7101926803588867, |
| "learning_rate": 3.9322033898305085e-07, |
| "loss": 1.2673, |
| "step": 29420 |
| }, |
| { |
| "epoch": 0.74, |
| "grad_norm": 10.152178764343262, |
| "learning_rate": 3.864406779661017e-07, |
| "loss": 1.3241, |
| "step": 29430 |
| }, |
| { |
| "epoch": 0.74, |
| "grad_norm": 4.905571937561035, |
| "learning_rate": 3.7966101694915254e-07, |
| "loss": 1.2261, |
| "step": 29440 |
| }, |
| { |
| "epoch": 0.74, |
| "grad_norm": 10.35471248626709, |
| "learning_rate": 3.7288135593220347e-07, |
| "loss": 1.2083, |
| "step": 29450 |
| }, |
| { |
| "epoch": 0.74, |
| "grad_norm": 11.7713623046875, |
| "learning_rate": 3.6610169491525423e-07, |
| "loss": 1.2647, |
| "step": 29460 |
| }, |
| { |
| "epoch": 0.74, |
| "grad_norm": 3.181910276412964, |
| "learning_rate": 3.5932203389830516e-07, |
| "loss": 1.4678, |
| "step": 29470 |
| }, |
| { |
| "epoch": 0.74, |
| "grad_norm": 6.756598472595215, |
| "learning_rate": 3.52542372881356e-07, |
| "loss": 1.273, |
| "step": 29480 |
| }, |
| { |
| "epoch": 0.74, |
| "grad_norm": 12.660133361816406, |
| "learning_rate": 3.4576271186440684e-07, |
| "loss": 1.3497, |
| "step": 29490 |
| }, |
| { |
| "epoch": 0.74, |
| "grad_norm": 11.418837547302246, |
| "learning_rate": 3.3898305084745766e-07, |
| "loss": 1.3025, |
| "step": 29500 |
| }, |
| { |
| "epoch": 0.74, |
| "eval_loss": 1.2983591556549072, |
| "eval_runtime": 66.2327, |
| "eval_samples_per_second": 15.098, |
| "eval_steps_per_second": 15.098, |
| "step": 29500 |
| }, |
| { |
| "epoch": 0.74, |
| "grad_norm": 5.526192665100098, |
| "learning_rate": 3.322033898305085e-07, |
| "loss": 1.1364, |
| "step": 29510 |
| }, |
| { |
| "epoch": 0.74, |
| "grad_norm": 4.229588031768799, |
| "learning_rate": 3.2542372881355935e-07, |
| "loss": 1.3351, |
| "step": 29520 |
| }, |
| { |
| "epoch": 0.74, |
| "grad_norm": 3.8703114986419678, |
| "learning_rate": 3.1864406779661017e-07, |
| "loss": 1.547, |
| "step": 29530 |
| }, |
| { |
| "epoch": 0.74, |
| "grad_norm": 4.0574445724487305, |
| "learning_rate": 3.1186440677966104e-07, |
| "loss": 1.5269, |
| "step": 29540 |
| }, |
| { |
| "epoch": 0.74, |
| "grad_norm": 2.609175443649292, |
| "learning_rate": 3.050847457627119e-07, |
| "loss": 1.3574, |
| "step": 29550 |
| }, |
| { |
| "epoch": 0.74, |
| "grad_norm": 5.445046901702881, |
| "learning_rate": 2.9830508474576273e-07, |
| "loss": 1.3081, |
| "step": 29560 |
| }, |
| { |
| "epoch": 0.74, |
| "grad_norm": 8.432710647583008, |
| "learning_rate": 2.9152542372881355e-07, |
| "loss": 1.2343, |
| "step": 29570 |
| }, |
| { |
| "epoch": 0.74, |
| "grad_norm": 10.050646781921387, |
| "learning_rate": 2.847457627118644e-07, |
| "loss": 1.2665, |
| "step": 29580 |
| }, |
| { |
| "epoch": 0.74, |
| "grad_norm": 6.895621299743652, |
| "learning_rate": 2.779661016949153e-07, |
| "loss": 1.1077, |
| "step": 29590 |
| }, |
| { |
| "epoch": 0.74, |
| "grad_norm": 6.045702934265137, |
| "learning_rate": 2.711864406779661e-07, |
| "loss": 1.0306, |
| "step": 29600 |
| }, |
| { |
| "epoch": 0.74, |
| "grad_norm": 2.18096661567688, |
| "learning_rate": 2.64406779661017e-07, |
| "loss": 1.167, |
| "step": 29610 |
| }, |
| { |
| "epoch": 0.74, |
| "grad_norm": 8.721917152404785, |
| "learning_rate": 2.576271186440678e-07, |
| "loss": 1.2476, |
| "step": 29620 |
| }, |
| { |
| "epoch": 0.74, |
| "grad_norm": 5.546863079071045, |
| "learning_rate": 2.5084745762711867e-07, |
| "loss": 1.3273, |
| "step": 29630 |
| }, |
| { |
| "epoch": 0.74, |
| "grad_norm": 7.264005661010742, |
| "learning_rate": 2.4406779661016954e-07, |
| "loss": 1.4333, |
| "step": 29640 |
| }, |
| { |
| "epoch": 0.74, |
| "grad_norm": 4.217822551727295, |
| "learning_rate": 2.3728813559322036e-07, |
| "loss": 1.5513, |
| "step": 29650 |
| }, |
| { |
| "epoch": 0.74, |
| "grad_norm": 21.425151824951172, |
| "learning_rate": 2.305084745762712e-07, |
| "loss": 1.15, |
| "step": 29660 |
| }, |
| { |
| "epoch": 0.74, |
| "grad_norm": 17.36193084716797, |
| "learning_rate": 2.2372881355932207e-07, |
| "loss": 1.4419, |
| "step": 29670 |
| }, |
| { |
| "epoch": 0.74, |
| "grad_norm": 9.74802017211914, |
| "learning_rate": 2.1694915254237292e-07, |
| "loss": 1.2543, |
| "step": 29680 |
| }, |
| { |
| "epoch": 0.74, |
| "grad_norm": 12.033125877380371, |
| "learning_rate": 2.1016949152542376e-07, |
| "loss": 1.378, |
| "step": 29690 |
| }, |
| { |
| "epoch": 0.74, |
| "grad_norm": 14.735278129577637, |
| "learning_rate": 2.0338983050847458e-07, |
| "loss": 1.2712, |
| "step": 29700 |
| }, |
| { |
| "epoch": 0.74, |
| "grad_norm": 11.541913986206055, |
| "learning_rate": 1.9661016949152543e-07, |
| "loss": 1.0699, |
| "step": 29710 |
| }, |
| { |
| "epoch": 0.74, |
| "grad_norm": 3.0062718391418457, |
| "learning_rate": 1.8983050847457627e-07, |
| "loss": 1.3865, |
| "step": 29720 |
| }, |
| { |
| "epoch": 0.74, |
| "grad_norm": 7.821793079376221, |
| "learning_rate": 1.8305084745762712e-07, |
| "loss": 1.1679, |
| "step": 29730 |
| }, |
| { |
| "epoch": 0.74, |
| "grad_norm": 12.209012031555176, |
| "learning_rate": 1.76271186440678e-07, |
| "loss": 1.1567, |
| "step": 29740 |
| }, |
| { |
| "epoch": 0.74, |
| "grad_norm": 3.285269021987915, |
| "learning_rate": 1.6949152542372883e-07, |
| "loss": 1.3667, |
| "step": 29750 |
| }, |
| { |
| "epoch": 0.74, |
| "grad_norm": 5.812708377838135, |
| "learning_rate": 1.6271186440677968e-07, |
| "loss": 1.2521, |
| "step": 29760 |
| }, |
| { |
| "epoch": 0.74, |
| "grad_norm": 3.4106059074401855, |
| "learning_rate": 1.5593220338983052e-07, |
| "loss": 1.3847, |
| "step": 29770 |
| }, |
| { |
| "epoch": 0.74, |
| "grad_norm": 9.550897598266602, |
| "learning_rate": 1.4915254237288137e-07, |
| "loss": 1.2853, |
| "step": 29780 |
| }, |
| { |
| "epoch": 0.74, |
| "grad_norm": 13.644095420837402, |
| "learning_rate": 1.423728813559322e-07, |
| "loss": 1.3305, |
| "step": 29790 |
| }, |
| { |
| "epoch": 0.74, |
| "grad_norm": 6.250187397003174, |
| "learning_rate": 1.3559322033898305e-07, |
| "loss": 1.2896, |
| "step": 29800 |
| }, |
| { |
| "epoch": 0.75, |
| "grad_norm": 11.07441234588623, |
| "learning_rate": 1.288135593220339e-07, |
| "loss": 1.2263, |
| "step": 29810 |
| }, |
| { |
| "epoch": 0.75, |
| "grad_norm": 13.597912788391113, |
| "learning_rate": 1.2203389830508477e-07, |
| "loss": 1.4195, |
| "step": 29820 |
| }, |
| { |
| "epoch": 0.75, |
| "grad_norm": 4.825064659118652, |
| "learning_rate": 1.152542372881356e-07, |
| "loss": 1.3549, |
| "step": 29830 |
| }, |
| { |
| "epoch": 0.75, |
| "grad_norm": 3.6931655406951904, |
| "learning_rate": 1.0847457627118646e-07, |
| "loss": 1.3675, |
| "step": 29840 |
| }, |
| { |
| "epoch": 0.75, |
| "grad_norm": 5.718669414520264, |
| "learning_rate": 1.0169491525423729e-07, |
| "loss": 1.4715, |
| "step": 29850 |
| }, |
| { |
| "epoch": 0.75, |
| "grad_norm": 6.913152694702148, |
| "learning_rate": 9.491525423728814e-08, |
| "loss": 1.4853, |
| "step": 29860 |
| }, |
| { |
| "epoch": 0.75, |
| "grad_norm": 5.6454644203186035, |
| "learning_rate": 8.8135593220339e-08, |
| "loss": 1.3638, |
| "step": 29870 |
| }, |
| { |
| "epoch": 0.75, |
| "grad_norm": 7.006107330322266, |
| "learning_rate": 8.135593220338984e-08, |
| "loss": 1.387, |
| "step": 29880 |
| }, |
| { |
| "epoch": 0.75, |
| "grad_norm": 7.425577163696289, |
| "learning_rate": 7.457627118644068e-08, |
| "loss": 1.314, |
| "step": 29890 |
| }, |
| { |
| "epoch": 0.75, |
| "grad_norm": 12.527627944946289, |
| "learning_rate": 6.779661016949153e-08, |
| "loss": 1.3822, |
| "step": 29900 |
| }, |
| { |
| "epoch": 0.75, |
| "grad_norm": 7.276275634765625, |
| "learning_rate": 6.101694915254239e-08, |
| "loss": 1.1887, |
| "step": 29910 |
| }, |
| { |
| "epoch": 0.75, |
| "grad_norm": 1.8828225135803223, |
| "learning_rate": 5.423728813559323e-08, |
| "loss": 1.3262, |
| "step": 29920 |
| }, |
| { |
| "epoch": 0.75, |
| "grad_norm": 3.2337329387664795, |
| "learning_rate": 4.745762711864407e-08, |
| "loss": 1.3487, |
| "step": 29930 |
| }, |
| { |
| "epoch": 0.75, |
| "grad_norm": 6.47509765625, |
| "learning_rate": 4.067796610169492e-08, |
| "loss": 1.3835, |
| "step": 29940 |
| }, |
| { |
| "epoch": 0.75, |
| "grad_norm": 8.018141746520996, |
| "learning_rate": 3.3898305084745764e-08, |
| "loss": 1.1818, |
| "step": 29950 |
| }, |
| { |
| "epoch": 0.75, |
| "grad_norm": 6.676591396331787, |
| "learning_rate": 2.7118644067796615e-08, |
| "loss": 1.2067, |
| "step": 29960 |
| }, |
| { |
| "epoch": 0.75, |
| "grad_norm": 3.872598648071289, |
| "learning_rate": 2.033898305084746e-08, |
| "loss": 1.2989, |
| "step": 29970 |
| }, |
| { |
| "epoch": 0.75, |
| "grad_norm": 2.7663283348083496, |
| "learning_rate": 1.3559322033898307e-08, |
| "loss": 1.321, |
| "step": 29980 |
| }, |
| { |
| "epoch": 0.75, |
| "grad_norm": 6.447133541107178, |
| "learning_rate": 6.779661016949154e-09, |
| "loss": 1.255, |
| "step": 29990 |
| }, |
| { |
| "epoch": 0.75, |
| "grad_norm": 5.224142074584961, |
| "learning_rate": 0.0, |
| "loss": 1.2453, |
| "step": 30000 |
| }, |
| { |
| "epoch": 0.75, |
| "eval_loss": 1.307248592376709, |
| "eval_runtime": 66.2202, |
| "eval_samples_per_second": 15.101, |
| "eval_steps_per_second": 15.101, |
| "step": 30000 |
| } |
| ], |
| "logging_steps": 10, |
| "max_steps": 30000, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 1, |
| "save_steps": 2500, |
| "total_flos": 4.8306377981952e+17, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|