| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 5000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.002, | |
| "grad_norm": 0.3647916913032532, | |
| "learning_rate": 2e-08, | |
| "loss": 0.5569, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.004, | |
| "grad_norm": 0.7772038578987122, | |
| "learning_rate": 4e-08, | |
| "loss": 0.5903, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.006, | |
| "grad_norm": 0.4401947259902954, | |
| "learning_rate": 6e-08, | |
| "loss": 0.7141, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.008, | |
| "grad_norm": 0.5190258622169495, | |
| "learning_rate": 8e-08, | |
| "loss": 0.5588, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 0.29183804988861084, | |
| "learning_rate": 1e-07, | |
| "loss": 0.6131, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.012, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.2e-07, | |
| "loss": 0.5862, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.014, | |
| "grad_norm": 0.7411805987358093, | |
| "learning_rate": 1.4e-07, | |
| "loss": 0.6266, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.016, | |
| "grad_norm": 0.7374635338783264, | |
| "learning_rate": 1.6e-07, | |
| "loss": 0.7009, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.018, | |
| "grad_norm": 0.4058414697647095, | |
| "learning_rate": 1.8e-07, | |
| "loss": 0.452, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 0.22336336970329285, | |
| "learning_rate": 2e-07, | |
| "loss": 0.5137, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.022, | |
| "grad_norm": 1.8679215908050537, | |
| "learning_rate": 2.1999999999999998e-07, | |
| "loss": 0.8075, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.024, | |
| "grad_norm": 0.37687167525291443, | |
| "learning_rate": 2.4e-07, | |
| "loss": 0.5353, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.026, | |
| "grad_norm": 0.5401660799980164, | |
| "learning_rate": 2.6e-07, | |
| "loss": 0.514, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.028, | |
| "grad_norm": 0.24486061930656433, | |
| "learning_rate": 2.8e-07, | |
| "loss": 0.4853, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 0.628027617931366, | |
| "learning_rate": 3e-07, | |
| "loss": 0.6838, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.032, | |
| "grad_norm": 0.5709905624389648, | |
| "learning_rate": 3.2e-07, | |
| "loss": 0.4199, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.034, | |
| "grad_norm": 0.3342764377593994, | |
| "learning_rate": 3.4000000000000003e-07, | |
| "loss": 0.6163, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.036, | |
| "grad_norm": 0.3317417800426483, | |
| "learning_rate": 3.6e-07, | |
| "loss": 0.6549, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.038, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.7999999999999996e-07, | |
| "loss": 0.6489, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 0.4099644422531128, | |
| "learning_rate": 4e-07, | |
| "loss": 0.7401, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.042, | |
| "grad_norm": 0.5073985457420349, | |
| "learning_rate": 4.1999999999999995e-07, | |
| "loss": 0.5801, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.044, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.3999999999999997e-07, | |
| "loss": 0.6649, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.046, | |
| "grad_norm": 0.4103664457798004, | |
| "learning_rate": 4.6e-07, | |
| "loss": 0.5962, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.048, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.8e-07, | |
| "loss": 0.2218, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 0.8555938005447388, | |
| "learning_rate": 5e-07, | |
| "loss": 0.6688, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.052, | |
| "grad_norm": 0.18369270861148834, | |
| "learning_rate": 5.2e-07, | |
| "loss": 0.5419, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.054, | |
| "grad_norm": 1.0677138566970825, | |
| "learning_rate": 5.4e-07, | |
| "loss": 0.7335, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.056, | |
| "grad_norm": 0.6109152436256409, | |
| "learning_rate": 5.6e-07, | |
| "loss": 0.548, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.058, | |
| "grad_norm": 0.4339801073074341, | |
| "learning_rate": 5.8e-07, | |
| "loss": 0.5819, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 0.5122050642967224, | |
| "learning_rate": 6e-07, | |
| "loss": 0.5329, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.062, | |
| "grad_norm": 0.5236920714378357, | |
| "learning_rate": 6.2e-07, | |
| "loss": 0.6093, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.064, | |
| "grad_norm": 0.7817385196685791, | |
| "learning_rate": 6.4e-07, | |
| "loss": 0.46, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.066, | |
| "grad_norm": 0.7094607353210449, | |
| "learning_rate": 6.6e-07, | |
| "loss": 0.6628, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.068, | |
| "grad_norm": 0.6325480341911316, | |
| "learning_rate": 6.800000000000001e-07, | |
| "loss": 0.7074, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 0.7745736241340637, | |
| "learning_rate": 7e-07, | |
| "loss": 0.4944, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.072, | |
| "grad_norm": 0.6897551417350769, | |
| "learning_rate": 7.2e-07, | |
| "loss": 0.5193, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.074, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.4e-07, | |
| "loss": 0.5934, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.076, | |
| "grad_norm": 0.5348847508430481, | |
| "learning_rate": 7.599999999999999e-07, | |
| "loss": 0.6671, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.078, | |
| "grad_norm": 1.388077974319458, | |
| "learning_rate": 7.799999999999999e-07, | |
| "loss": 0.6666, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 0.3098819851875305, | |
| "learning_rate": 8e-07, | |
| "loss": 0.5733, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.082, | |
| "grad_norm": 0.5731427073478699, | |
| "learning_rate": 8.199999999999999e-07, | |
| "loss": 0.8238, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.084, | |
| "grad_norm": 0.34736743569374084, | |
| "learning_rate": 8.399999999999999e-07, | |
| "loss": 0.7012, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.086, | |
| "grad_norm": 0.7676399350166321, | |
| "learning_rate": 8.599999999999999e-07, | |
| "loss": 0.5746, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.088, | |
| "grad_norm": 0.3169877231121063, | |
| "learning_rate": 8.799999999999999e-07, | |
| "loss": 0.4205, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 0.9648386240005493, | |
| "learning_rate": 9e-07, | |
| "loss": 0.5958, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.092, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.2e-07, | |
| "loss": 0.4632, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.094, | |
| "grad_norm": 0.3318028748035431, | |
| "learning_rate": 9.399999999999999e-07, | |
| "loss": 0.694, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.096, | |
| "grad_norm": 0.6652544140815735, | |
| "learning_rate": 9.6e-07, | |
| "loss": 0.5028, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.098, | |
| "grad_norm": 1.2257837057113647, | |
| "learning_rate": 9.8e-07, | |
| "loss": 0.683, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 1.051938533782959, | |
| "learning_rate": 1e-06, | |
| "loss": 0.6468, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.102, | |
| "grad_norm": 0.36456993222236633, | |
| "learning_rate": 9.999878153526972e-07, | |
| "loss": 0.3102, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.104, | |
| "grad_norm": 0.6668845415115356, | |
| "learning_rate": 9.99951262004652e-07, | |
| "loss": 1.0302, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.106, | |
| "grad_norm": 0.8017757534980774, | |
| "learning_rate": 9.998903417374226e-07, | |
| "loss": 0.4671, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.108, | |
| "grad_norm": 0.7417317032814026, | |
| "learning_rate": 9.99805057520177e-07, | |
| "loss": 0.4329, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.996954135095478e-07, | |
| "loss": 0.5071, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.112, | |
| "grad_norm": 0.7268297076225281, | |
| "learning_rate": 9.99561415049429e-07, | |
| "loss": 0.6039, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.114, | |
| "grad_norm": 0.3723108470439911, | |
| "learning_rate": 9.99403068670717e-07, | |
| "loss": 0.3555, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.116, | |
| "grad_norm": 0.61618971824646, | |
| "learning_rate": 9.992203820909905e-07, | |
| "loss": 0.4968, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.118, | |
| "grad_norm": 0.5067716836929321, | |
| "learning_rate": 9.990133642141357e-07, | |
| "loss": 0.6656, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 0.5604730248451233, | |
| "learning_rate": 9.98782025129912e-07, | |
| "loss": 0.5863, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.122, | |
| "grad_norm": 0.839195728302002, | |
| "learning_rate": 9.9852637611346e-07, | |
| "loss": 0.6651, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.124, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.982464296247522e-07, | |
| "loss": 0.468, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.126, | |
| "grad_norm": 0.35427430272102356, | |
| "learning_rate": 9.97942199307985e-07, | |
| "loss": 0.6606, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.128, | |
| "grad_norm": 1.1575822830200195, | |
| "learning_rate": 9.976136999909155e-07, | |
| "loss": 0.6023, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 0.45604798197746277, | |
| "learning_rate": 9.972609476841365e-07, | |
| "loss": 0.5876, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.132, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.968839595802981e-07, | |
| "loss": 0.3609, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.134, | |
| "grad_norm": 0.4722711443901062, | |
| "learning_rate": 9.964827540532684e-07, | |
| "loss": 0.3629, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.136, | |
| "grad_norm": 0.34166085720062256, | |
| "learning_rate": 9.960573506572389e-07, | |
| "loss": 0.3346, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.138, | |
| "grad_norm": 0.6107274293899536, | |
| "learning_rate": 9.956077701257707e-07, | |
| "loss": 0.501, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 0.40759605169296265, | |
| "learning_rate": 9.95134034370785e-07, | |
| "loss": 0.4942, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.142, | |
| "grad_norm": 0.8941773176193237, | |
| "learning_rate": 9.946361664814943e-07, | |
| "loss": 0.5462, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.144, | |
| "grad_norm": 0.416051983833313, | |
| "learning_rate": 9.941141907232763e-07, | |
| "loss": 0.4108, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.146, | |
| "grad_norm": 0.7638356685638428, | |
| "learning_rate": 9.93568132536494e-07, | |
| "loss": 0.417, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.148, | |
| "grad_norm": 0.25665077567100525, | |
| "learning_rate": 9.929980185352525e-07, | |
| "loss": 0.5837, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 2.3630926609039307, | |
| "learning_rate": 9.92403876506104e-07, | |
| "loss": 0.5312, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.152, | |
| "grad_norm": 0.37974926829338074, | |
| "learning_rate": 9.91785735406693e-07, | |
| "loss": 0.5809, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.154, | |
| "grad_norm": 0.46899351477622986, | |
| "learning_rate": 9.911436253643443e-07, | |
| "loss": 0.4575, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.156, | |
| "grad_norm": 0.788362443447113, | |
| "learning_rate": 9.904775776745956e-07, | |
| "loss": 0.4922, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.158, | |
| "grad_norm": 0.9416216611862183, | |
| "learning_rate": 9.89787624799672e-07, | |
| "loss": 0.4482, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 0.40510323643684387, | |
| "learning_rate": 9.890738003669027e-07, | |
| "loss": 0.3883, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.162, | |
| "grad_norm": 0.22145682573318481, | |
| "learning_rate": 9.883361391670839e-07, | |
| "loss": 0.3739, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.164, | |
| "grad_norm": 0.6583501696586609, | |
| "learning_rate": 9.875746771527815e-07, | |
| "loss": 0.4527, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.166, | |
| "grad_norm": 0.4808110296726227, | |
| "learning_rate": 9.8678945143658e-07, | |
| "loss": 0.5489, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.168, | |
| "grad_norm": 0.44500961899757385, | |
| "learning_rate": 9.859805002892731e-07, | |
| "loss": 0.4352, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 0.31563296914100647, | |
| "learning_rate": 9.851478631379982e-07, | |
| "loss": 0.7334, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.172, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.842915805643156e-07, | |
| "loss": 0.2776, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.174, | |
| "grad_norm": 0.546096920967102, | |
| "learning_rate": 9.834116943022297e-07, | |
| "loss": 0.4341, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.176, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.825082472361556e-07, | |
| "loss": 0.3616, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.178, | |
| "grad_norm": 0.48355451226234436, | |
| "learning_rate": 9.81581283398829e-07, | |
| "loss": 0.375, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 0.3133198618888855, | |
| "learning_rate": 9.806308479691594e-07, | |
| "loss": 0.3654, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.182, | |
| "grad_norm": 0.40422284603118896, | |
| "learning_rate": 9.796569872700287e-07, | |
| "loss": 0.4661, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.184, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.786597487660335e-07, | |
| "loss": 0.2929, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.186, | |
| "grad_norm": 0.6017171740531921, | |
| "learning_rate": 9.776391810611718e-07, | |
| "loss": 0.5152, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.188, | |
| "grad_norm": 0.7013118863105774, | |
| "learning_rate": 9.765953338964734e-07, | |
| "loss": 0.3121, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 0.35765185952186584, | |
| "learning_rate": 9.755282581475767e-07, | |
| "loss": 0.3817, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.192, | |
| "grad_norm": 0.4983702301979065, | |
| "learning_rate": 9.744380058222482e-07, | |
| "loss": 0.5493, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.194, | |
| "grad_norm": 0.5736071467399597, | |
| "learning_rate": 9.733246300578482e-07, | |
| "loss": 0.4638, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.196, | |
| "grad_norm": 0.3114262521266937, | |
| "learning_rate": 9.721881851187405e-07, | |
| "loss": 0.3779, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.198, | |
| "grad_norm": 0.7032444477081299, | |
| "learning_rate": 9.710287263936483e-07, | |
| "loss": 0.5224, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 0.5082822442054749, | |
| "learning_rate": 9.698463103929541e-07, | |
| "loss": 0.3522, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.202, | |
| "grad_norm": 0.29254528880119324, | |
| "learning_rate": 9.686409947459457e-07, | |
| "loss": 0.5438, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.204, | |
| "grad_norm": 0.5429019927978516, | |
| "learning_rate": 9.674128381980071e-07, | |
| "loss": 0.3603, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.206, | |
| "grad_norm": 0.6649332046508789, | |
| "learning_rate": 9.661619006077561e-07, | |
| "loss": 0.4164, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.208, | |
| "grad_norm": 0.4190509021282196, | |
| "learning_rate": 9.648882429441256e-07, | |
| "loss": 0.3494, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.635919272833937e-07, | |
| "loss": 0.2614, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.212, | |
| "grad_norm": 0.5640019774436951, | |
| "learning_rate": 9.622730168061567e-07, | |
| "loss": 0.4488, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.214, | |
| "grad_norm": 0.4980197250843048, | |
| "learning_rate": 9.609315757942502e-07, | |
| "loss": 0.4833, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.216, | |
| "grad_norm": 0.5959740281105042, | |
| "learning_rate": 9.595676696276171e-07, | |
| "loss": 0.5463, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.218, | |
| "grad_norm": 0.5091766715049744, | |
| "learning_rate": 9.581813647811197e-07, | |
| "loss": 0.4226, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 1.7578771114349365, | |
| "learning_rate": 9.567727288213004e-07, | |
| "loss": 0.4465, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.222, | |
| "grad_norm": 0.849757969379425, | |
| "learning_rate": 9.553418304030885e-07, | |
| "loss": 0.4341, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.224, | |
| "grad_norm": 0.2365778684616089, | |
| "learning_rate": 9.538887392664543e-07, | |
| "loss": 0.252, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.226, | |
| "grad_norm": 0.28902509808540344, | |
| "learning_rate": 9.524135262330098e-07, | |
| "loss": 0.4412, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.228, | |
| "grad_norm": 0.29451924562454224, | |
| "learning_rate": 9.509162632025569e-07, | |
| "loss": 0.383, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 0.6582534313201904, | |
| "learning_rate": 9.493970231495834e-07, | |
| "loss": 0.498, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.232, | |
| "grad_norm": 0.43566933274269104, | |
| "learning_rate": 9.478558801197064e-07, | |
| "loss": 0.4814, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.234, | |
| "grad_norm": 0.5926059484481812, | |
| "learning_rate": 9.462929092260628e-07, | |
| "loss": 0.3324, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.236, | |
| "grad_norm": 0.2790277302265167, | |
| "learning_rate": 9.447081866456487e-07, | |
| "loss": 0.2978, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.238, | |
| "grad_norm": 0.26161783933639526, | |
| "learning_rate": 9.431017896156073e-07, | |
| "loss": 0.3284, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 0.7597343921661377, | |
| "learning_rate": 9.414737964294634e-07, | |
| "loss": 0.3644, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.242, | |
| "grad_norm": 0.5080224275588989, | |
| "learning_rate": 9.398242864333083e-07, | |
| "loss": 0.3615, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.244, | |
| "grad_norm": 0.9933916330337524, | |
| "learning_rate": 9.381533400219317e-07, | |
| "loss": 0.5431, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.246, | |
| "grad_norm": 0.6644570231437683, | |
| "learning_rate": 9.364610386349047e-07, | |
| "loss": 0.4106, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.248, | |
| "grad_norm": 0.6827860474586487, | |
| "learning_rate": 9.347474647526095e-07, | |
| "loss": 0.4181, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 0.580407440662384, | |
| "learning_rate": 9.330127018922193e-07, | |
| "loss": 0.2969, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.252, | |
| "grad_norm": 0.7604784369468689, | |
| "learning_rate": 9.312568346036287e-07, | |
| "loss": 0.4105, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.254, | |
| "grad_norm": 0.42066580057144165, | |
| "learning_rate": 9.294799484653322e-07, | |
| "loss": 0.3617, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.256, | |
| "grad_norm": 0.8827233910560608, | |
| "learning_rate": 9.276821300802533e-07, | |
| "loss": 0.488, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.258, | |
| "grad_norm": 0.586871325969696, | |
| "learning_rate": 9.258634670715237e-07, | |
| "loss": 0.2852, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 0.2628524601459503, | |
| "learning_rate": 9.240240480782129e-07, | |
| "loss": 0.3642, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.262, | |
| "grad_norm": 0.4368833601474762, | |
| "learning_rate": 9.221639627510075e-07, | |
| "loss": 0.3637, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.264, | |
| "grad_norm": 0.5321241617202759, | |
| "learning_rate": 9.202833017478421e-07, | |
| "loss": 0.3316, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.266, | |
| "grad_norm": 0.2961817681789398, | |
| "learning_rate": 9.183821567294808e-07, | |
| "loss": 0.3285, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.268, | |
| "grad_norm": 0.16444119811058044, | |
| "learning_rate": 9.164606203550497e-07, | |
| "loss": 0.3077, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 0.28540509939193726, | |
| "learning_rate": 9.145187862775208e-07, | |
| "loss": 0.347, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.272, | |
| "grad_norm": 2.098088264465332, | |
| "learning_rate": 9.125567491391475e-07, | |
| "loss": 0.4526, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.274, | |
| "grad_norm": 0.4260193109512329, | |
| "learning_rate": 9.10574604566852e-07, | |
| "loss": 0.3004, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.276, | |
| "grad_norm": 3.244032621383667, | |
| "learning_rate": 9.085724491675642e-07, | |
| "loss": 0.4287, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.278, | |
| "grad_norm": 0.31434857845306396, | |
| "learning_rate": 9.065503805235137e-07, | |
| "loss": 0.2818, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 0.3869690001010895, | |
| "learning_rate": 9.045084971874737e-07, | |
| "loss": 0.247, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.282, | |
| "grad_norm": 2.252443790435791, | |
| "learning_rate": 9.02446898677957e-07, | |
| "loss": 0.4637, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.284, | |
| "grad_norm": 0.4407874047756195, | |
| "learning_rate": 9.003656854743666e-07, | |
| "loss": 0.321, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.286, | |
| "grad_norm": 0.2926657795906067, | |
| "learning_rate": 8.982649590120981e-07, | |
| "loss": 0.3643, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.288, | |
| "grad_norm": 0.8922311663627625, | |
| "learning_rate": 8.961448216775953e-07, | |
| "loss": 0.4458, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 0.5968508124351501, | |
| "learning_rate": 8.940053768033608e-07, | |
| "loss": 0.3387, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.292, | |
| "grad_norm": 0.502495288848877, | |
| "learning_rate": 8.918467286629198e-07, | |
| "loss": 0.2984, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.294, | |
| "grad_norm": 0.2826671898365021, | |
| "learning_rate": 8.896689824657371e-07, | |
| "loss": 0.2773, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.296, | |
| "grad_norm": 0.6212120056152344, | |
| "learning_rate": 8.874722443520898e-07, | |
| "loss": 0.3047, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.298, | |
| "grad_norm": 0.35743391513824463, | |
| "learning_rate": 8.852566213878946e-07, | |
| "loss": 0.249, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 0.21640463173389435, | |
| "learning_rate": 8.83022221559489e-07, | |
| "loss": 0.5102, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.302, | |
| "grad_norm": 0.5322903990745544, | |
| "learning_rate": 8.807691537683684e-07, | |
| "loss": 0.4785, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.304, | |
| "grad_norm": 0.5649182200431824, | |
| "learning_rate": 8.784975278258782e-07, | |
| "loss": 0.4196, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.306, | |
| "grad_norm": 0.8210440278053284, | |
| "learning_rate": 8.762074544478621e-07, | |
| "loss": 0.4145, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.308, | |
| "grad_norm": 0.5836989283561707, | |
| "learning_rate": 8.73899045249266e-07, | |
| "loss": 0.3771, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 0.3912012279033661, | |
| "learning_rate": 8.71572412738697e-07, | |
| "loss": 0.275, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.312, | |
| "grad_norm": 0.9098830819129944, | |
| "learning_rate": 8.69227670312942e-07, | |
| "loss": 0.3984, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.314, | |
| "grad_norm": 0.34057459235191345, | |
| "learning_rate": 8.668649322514381e-07, | |
| "loss": 0.198, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.316, | |
| "grad_norm": 0.5625966787338257, | |
| "learning_rate": 8.644843137107057e-07, | |
| "loss": 0.3518, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.318, | |
| "grad_norm": 0.42884501814842224, | |
| "learning_rate": 8.620859307187338e-07, | |
| "loss": 0.2761, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 0.45647886395454407, | |
| "learning_rate": 8.596699001693255e-07, | |
| "loss": 0.2784, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.322, | |
| "grad_norm": 0.6005297899246216, | |
| "learning_rate": 8.572363398164016e-07, | |
| "loss": 0.314, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.324, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.547853682682604e-07, | |
| "loss": 0.3661, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.326, | |
| "grad_norm": 0.1766682118177414, | |
| "learning_rate": 8.523171049817973e-07, | |
| "loss": 0.3609, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.328, | |
| "grad_norm": 0.3773224353790283, | |
| "learning_rate": 8.498316702566826e-07, | |
| "loss": 0.3768, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 0.8536304235458374, | |
| "learning_rate": 8.473291852294986e-07, | |
| "loss": 0.3376, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.332, | |
| "grad_norm": 0.40247610211372375, | |
| "learning_rate": 8.448097718678348e-07, | |
| "loss": 0.3568, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.334, | |
| "grad_norm": 0.711200475692749, | |
| "learning_rate": 8.422735529643443e-07, | |
| "loss": 0.245, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.336, | |
| "grad_norm": 0.271049439907074, | |
| "learning_rate": 8.397206521307583e-07, | |
| "loss": 0.2278, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.338, | |
| "grad_norm": 0.7005532383918762, | |
| "learning_rate": 8.371511937918617e-07, | |
| "loss": 0.3255, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 0.2691313922405243, | |
| "learning_rate": 8.34565303179429e-07, | |
| "loss": 0.2114, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.342, | |
| "grad_norm": 0.2832666337490082, | |
| "learning_rate": 8.319631063261207e-07, | |
| "loss": 0.3853, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.344, | |
| "grad_norm": 0.5301315188407898, | |
| "learning_rate": 8.293447300593402e-07, | |
| "loss": 0.3129, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.346, | |
| "grad_norm": 1.369352102279663, | |
| "learning_rate": 8.267103019950528e-07, | |
| "loss": 0.2986, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.348, | |
| "grad_norm": 1.4075043201446533, | |
| "learning_rate": 8.240599505315654e-07, | |
| "loss": 0.3362, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 0.5943218469619751, | |
| "learning_rate": 8.213938048432696e-07, | |
| "loss": 0.2817, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.352, | |
| "grad_norm": 1.011788249015808, | |
| "learning_rate": 8.187119948743449e-07, | |
| "loss": 0.3021, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.354, | |
| "grad_norm": 0.20643028616905212, | |
| "learning_rate": 8.160146513324254e-07, | |
| "loss": 0.38, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.356, | |
| "grad_norm": 0.623815655708313, | |
| "learning_rate": 8.133019056822302e-07, | |
| "loss": 0.3966, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.358, | |
| "grad_norm": 0.9031881093978882, | |
| "learning_rate": 8.105738901391551e-07, | |
| "loss": 0.25, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 0.29175126552581787, | |
| "learning_rate": 8.07830737662829e-07, | |
| "loss": 0.262, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.362, | |
| "grad_norm": 0.6108918190002441, | |
| "learning_rate": 8.050725819506339e-07, | |
| "loss": 0.2751, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.364, | |
| "grad_norm": 0.32167190313339233, | |
| "learning_rate": 8.022995574311875e-07, | |
| "loss": 0.2439, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.366, | |
| "grad_norm": 1.1157898902893066, | |
| "learning_rate": 7.995117992577928e-07, | |
| "loss": 0.3475, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.368, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.967094433018508e-07, | |
| "loss": 0.3295, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 0.3029947280883789, | |
| "learning_rate": 7.938926261462365e-07, | |
| "loss": 0.257, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.372, | |
| "grad_norm": 0.2989325225353241, | |
| "learning_rate": 7.910614850786447e-07, | |
| "loss": 0.3749, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.374, | |
| "grad_norm": 0.3622772693634033, | |
| "learning_rate": 7.882161580848966e-07, | |
| "loss": 0.373, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.376, | |
| "grad_norm": 0.49973657727241516, | |
| "learning_rate": 7.853567838422159e-07, | |
| "loss": 0.3807, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.378, | |
| "grad_norm": 0.33801835775375366, | |
| "learning_rate": 7.82483501712469e-07, | |
| "loss": 0.1554, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 0.8740425705909729, | |
| "learning_rate": 7.795964517353733e-07, | |
| "loss": 0.2553, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.382, | |
| "grad_norm": 0.6293899416923523, | |
| "learning_rate": 7.76695774621672e-07, | |
| "loss": 0.2488, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.384, | |
| "grad_norm": 0.6553785800933838, | |
| "learning_rate": 7.737816117462751e-07, | |
| "loss": 0.2794, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.386, | |
| "grad_norm": 0.6106793880462646, | |
| "learning_rate": 7.7085410514137e-07, | |
| "loss": 0.2829, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.388, | |
| "grad_norm": 0.6467621326446533, | |
| "learning_rate": 7.679133974894982e-07, | |
| "loss": 0.3499, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 0.3079082667827606, | |
| "learning_rate": 7.649596321166024e-07, | |
| "loss": 0.1809, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.392, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.619929529850396e-07, | |
| "loss": 0.2201, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.394, | |
| "grad_norm": 0.7928106784820557, | |
| "learning_rate": 7.590135046865651e-07, | |
| "loss": 0.2696, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.396, | |
| "grad_norm": 0.4864124357700348, | |
| "learning_rate": 7.560214324352858e-07, | |
| "loss": 0.295, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.398, | |
| "grad_norm": 0.41892245411872864, | |
| "learning_rate": 7.530168820605818e-07, | |
| "loss": 0.1679, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 1.443067193031311, | |
| "learning_rate": 7.5e-07, | |
| "loss": 0.3207, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.402, | |
| "grad_norm": 0.29227420687675476, | |
| "learning_rate": 7.469709332921154e-07, | |
| "loss": 0.2237, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.404, | |
| "grad_norm": 0.9847503304481506, | |
| "learning_rate": 7.439298295693663e-07, | |
| "loss": 0.2219, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 0.406, | |
| "grad_norm": 0.5221825838088989, | |
| "learning_rate": 7.408768370508576e-07, | |
| "loss": 0.2063, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 0.408, | |
| "grad_norm": 0.781994640827179, | |
| "learning_rate": 7.378121045351377e-07, | |
| "loss": 0.2827, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 0.4918607771396637, | |
| "learning_rate": 7.347357813929454e-07, | |
| "loss": 0.2603, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.412, | |
| "grad_norm": 0.2811141610145569, | |
| "learning_rate": 7.316480175599308e-07, | |
| "loss": 0.1887, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 0.414, | |
| "grad_norm": 0.6080925464630127, | |
| "learning_rate": 7.285489635293471e-07, | |
| "loss": 0.2102, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 0.416, | |
| "grad_norm": 0.4952521026134491, | |
| "learning_rate": 7.254387703447153e-07, | |
| "loss": 0.2386, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 0.418, | |
| "grad_norm": 0.44920068979263306, | |
| "learning_rate": 7.223175895924637e-07, | |
| "loss": 0.203, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 0.4240502417087555, | |
| "learning_rate": 7.191855733945386e-07, | |
| "loss": 0.268, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.422, | |
| "grad_norm": 1.42499577999115, | |
| "learning_rate": 7.160428744009912e-07, | |
| "loss": 0.3531, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 0.424, | |
| "grad_norm": 0.6819471716880798, | |
| "learning_rate": 7.128896457825363e-07, | |
| "loss": 0.3363, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 0.426, | |
| "grad_norm": 0.4226500391960144, | |
| "learning_rate": 7.097260412230885e-07, | |
| "loss": 0.3331, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 0.428, | |
| "grad_norm": 0.32036811113357544, | |
| "learning_rate": 7.065522149122709e-07, | |
| "loss": 0.2405, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 0.4470788538455963, | |
| "learning_rate": 7.033683215379002e-07, | |
| "loss": 0.1376, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.432, | |
| "grad_norm": 1.326379418373108, | |
| "learning_rate": 7.001745162784475e-07, | |
| "loss": 0.4589, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 0.434, | |
| "grad_norm": 0.26921626925468445, | |
| "learning_rate": 6.969709547954755e-07, | |
| "loss": 0.2441, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 0.436, | |
| "grad_norm": 0.6607092618942261, | |
| "learning_rate": 6.937577932260514e-07, | |
| "loss": 0.2402, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 0.438, | |
| "grad_norm": 0.40199458599090576, | |
| "learning_rate": 6.905351881751371e-07, | |
| "loss": 0.2452, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.87303296707956e-07, | |
| "loss": 0.1521, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.442, | |
| "grad_norm": 0.5950799584388733, | |
| "learning_rate": 6.840622763423391e-07, | |
| "loss": 0.2665, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 0.444, | |
| "grad_norm": 0.33319640159606934, | |
| "learning_rate": 6.80812285041046e-07, | |
| "loss": 0.2881, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 0.446, | |
| "grad_norm": 0.2309122532606125, | |
| "learning_rate": 6.775534812040686e-07, | |
| "loss": 0.2833, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 0.448, | |
| "grad_norm": 0.5239594578742981, | |
| "learning_rate": 6.742860236609076e-07, | |
| "loss": 0.2009, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 0.32858043909072876, | |
| "learning_rate": 6.710100716628344e-07, | |
| "loss": 0.2794, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.452, | |
| "grad_norm": 1.1937505006790161, | |
| "learning_rate": 6.677257848751276e-07, | |
| "loss": 0.2411, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 0.454, | |
| "grad_norm": 0.4281042218208313, | |
| "learning_rate": 6.644333233692916e-07, | |
| "loss": 0.1991, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 0.456, | |
| "grad_norm": 0.3388921916484833, | |
| "learning_rate": 6.611328476152556e-07, | |
| "loss": 0.3334, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 0.458, | |
| "grad_norm": 0.3999691903591156, | |
| "learning_rate": 6.578245184735512e-07, | |
| "loss": 0.3575, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 0.5984915494918823, | |
| "learning_rate": 6.545084971874736e-07, | |
| "loss": 0.3567, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.462, | |
| "grad_norm": 0.5047705769538879, | |
| "learning_rate": 6.511849453752223e-07, | |
| "loss": 0.2235, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 0.464, | |
| "grad_norm": 0.19550199806690216, | |
| "learning_rate": 6.478540250220233e-07, | |
| "loss": 0.3879, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 0.466, | |
| "grad_norm": 0.7670475244522095, | |
| "learning_rate": 6.445158984722358e-07, | |
| "loss": 0.2557, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 0.468, | |
| "grad_norm": 0.20435401797294617, | |
| "learning_rate": 6.411707284214383e-07, | |
| "loss": 0.2397, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 0.2008686661720276, | |
| "learning_rate": 6.378186779084995e-07, | |
| "loss": 0.3854, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.472, | |
| "grad_norm": 1.6063778400421143, | |
| "learning_rate": 6.344599103076328e-07, | |
| "loss": 0.2838, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 0.474, | |
| "grad_norm": 0.9098937511444092, | |
| "learning_rate": 6.310945893204324e-07, | |
| "loss": 0.3221, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 0.476, | |
| "grad_norm": 0.4095039963722229, | |
| "learning_rate": 6.277228789678953e-07, | |
| "loss": 0.2223, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 0.478, | |
| "grad_norm": 0.31623950600624084, | |
| "learning_rate": 6.243449435824276e-07, | |
| "loss": 0.2595, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 0.6135280728340149, | |
| "learning_rate": 6.209609477998338e-07, | |
| "loss": 0.19, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.482, | |
| "grad_norm": 0.8254637122154236, | |
| "learning_rate": 6.17571056551295e-07, | |
| "loss": 0.2618, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 0.484, | |
| "grad_norm": 0.33638447523117065, | |
| "learning_rate": 6.141754350553279e-07, | |
| "loss": 0.3483, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 0.486, | |
| "grad_norm": 0.2547425925731659, | |
| "learning_rate": 6.107742488097338e-07, | |
| "loss": 0.184, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 0.488, | |
| "grad_norm": 0.23442941904067993, | |
| "learning_rate": 6.073676635835316e-07, | |
| "loss": 0.3503, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 0.33613914251327515, | |
| "learning_rate": 6.039558454088795e-07, | |
| "loss": 0.3457, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 0.492, | |
| "grad_norm": 0.5778683423995972, | |
| "learning_rate": 6.005389605729824e-07, | |
| "loss": 0.3099, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 0.494, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.97117175609986e-07, | |
| "loss": 0.2064, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 0.496, | |
| "grad_norm": 0.3998168110847473, | |
| "learning_rate": 5.936906572928624e-07, | |
| "loss": 0.2502, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 0.498, | |
| "grad_norm": 0.35442838072776794, | |
| "learning_rate": 5.9025957262528e-07, | |
| "loss": 0.2189, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 0.31255003809928894, | |
| "learning_rate": 5.868240888334652e-07, | |
| "loss": 0.2579, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.502, | |
| "grad_norm": 0.3876568675041199, | |
| "learning_rate": 5.833843733580512e-07, | |
| "loss": 0.291, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 0.504, | |
| "grad_norm": 0.7023966908454895, | |
| "learning_rate": 5.799405938459174e-07, | |
| "loss": 0.1659, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 0.506, | |
| "grad_norm": 0.42696765065193176, | |
| "learning_rate": 5.764929181420191e-07, | |
| "loss": 0.2178, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 0.508, | |
| "grad_norm": 0.24111975729465485, | |
| "learning_rate": 5.730415142812058e-07, | |
| "loss": 0.2218, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 1.253818154335022, | |
| "learning_rate": 5.695865504800327e-07, | |
| "loss": 0.2646, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 0.512, | |
| "grad_norm": 0.4682632088661194, | |
| "learning_rate": 5.661281951285612e-07, | |
| "loss": 0.3102, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 0.514, | |
| "grad_norm": 0.7523025870323181, | |
| "learning_rate": 5.626666167821521e-07, | |
| "loss": 0.3413, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 0.516, | |
| "grad_norm": 0.3234543204307556, | |
| "learning_rate": 5.592019841532506e-07, | |
| "loss": 0.1998, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 0.518, | |
| "grad_norm": 0.9460989832878113, | |
| "learning_rate": 5.557344661031627e-07, | |
| "loss": 0.2994, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 0.8611673712730408, | |
| "learning_rate": 5.522642316338268e-07, | |
| "loss": 0.2624, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.522, | |
| "grad_norm": 0.3594551086425781, | |
| "learning_rate": 5.487914498795747e-07, | |
| "loss": 0.2005, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 0.524, | |
| "grad_norm": 0.5597927570343018, | |
| "learning_rate": 5.453162900988901e-07, | |
| "loss": 0.3244, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 0.526, | |
| "grad_norm": 0.3440828025341034, | |
| "learning_rate": 5.418389216661578e-07, | |
| "loss": 0.2086, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 0.528, | |
| "grad_norm": 0.4287198781967163, | |
| "learning_rate": 5.383595140634093e-07, | |
| "loss": 0.2158, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 0.7096256017684937, | |
| "learning_rate": 5.348782368720625e-07, | |
| "loss": 0.2943, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 0.532, | |
| "grad_norm": 0.32011571526527405, | |
| "learning_rate": 5.313952597646567e-07, | |
| "loss": 0.3096, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 0.534, | |
| "grad_norm": 0.47610387206077576, | |
| "learning_rate": 5.27910752496582e-07, | |
| "loss": 0.3096, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 0.536, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.244248848978067e-07, | |
| "loss": 0.2443, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 0.538, | |
| "grad_norm": 0.4076542854309082, | |
| "learning_rate": 5.209378268645997e-07, | |
| "loss": 0.2085, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 0.8643516898155212, | |
| "learning_rate": 5.174497483512505e-07, | |
| "loss": 0.3603, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.542, | |
| "grad_norm": 0.3362959027290344, | |
| "learning_rate": 5.139608193617844e-07, | |
| "loss": 0.2014, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 0.544, | |
| "grad_norm": 0.35498881340026855, | |
| "learning_rate": 5.104712099416785e-07, | |
| "loss": 0.2257, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 0.546, | |
| "grad_norm": 0.48586198687553406, | |
| "learning_rate": 5.069810901695727e-07, | |
| "loss": 0.2808, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 0.548, | |
| "grad_norm": 0.35722529888153076, | |
| "learning_rate": 5.034906301489807e-07, | |
| "loss": 0.13, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 0.3690369725227356, | |
| "learning_rate": 5e-07, | |
| "loss": 0.3317, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 0.552, | |
| "grad_norm": 1.0494160652160645, | |
| "learning_rate": 4.965093698510192e-07, | |
| "loss": 0.2685, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 0.554, | |
| "grad_norm": 0.4894683063030243, | |
| "learning_rate": 4.930189098304274e-07, | |
| "loss": 0.2293, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 0.556, | |
| "grad_norm": 0.6006208062171936, | |
| "learning_rate": 4.895287900583216e-07, | |
| "loss": 0.247, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 0.558, | |
| "grad_norm": 0.27367955446243286, | |
| "learning_rate": 4.860391806382156e-07, | |
| "loss": 0.2167, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 1.0968672037124634, | |
| "learning_rate": 4.825502516487496e-07, | |
| "loss": 0.2455, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 0.562, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.790621731354002e-07, | |
| "loss": 0.2411, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 0.564, | |
| "grad_norm": 0.3560165762901306, | |
| "learning_rate": 4.7557511510219335e-07, | |
| "loss": 0.1861, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 0.566, | |
| "grad_norm": 0.3030807673931122, | |
| "learning_rate": 4.7208924750341805e-07, | |
| "loss": 0.1857, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 0.568, | |
| "grad_norm": 0.5121121406555176, | |
| "learning_rate": 4.686047402353433e-07, | |
| "loss": 0.2738, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 0.2215707153081894, | |
| "learning_rate": 4.6512176312793735e-07, | |
| "loss": 0.2099, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 0.572, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.6164048593659065e-07, | |
| "loss": 0.1693, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 0.574, | |
| "grad_norm": 0.5592711567878723, | |
| "learning_rate": 4.5816107833384233e-07, | |
| "loss": 0.3155, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 0.576, | |
| "grad_norm": 0.49754905700683594, | |
| "learning_rate": 4.5468370990110997e-07, | |
| "loss": 0.208, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 0.578, | |
| "grad_norm": 0.6235384941101074, | |
| "learning_rate": 4.512085501204253e-07, | |
| "loss": 0.2906, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 0.35063299536705017, | |
| "learning_rate": 4.477357683661733e-07, | |
| "loss": 0.291, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 0.582, | |
| "grad_norm": 0.8492065668106079, | |
| "learning_rate": 4.442655338968373e-07, | |
| "loss": 0.2493, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 0.584, | |
| "grad_norm": 0.31068241596221924, | |
| "learning_rate": 4.407980158467495e-07, | |
| "loss": 0.2199, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 0.586, | |
| "grad_norm": 0.16246047616004944, | |
| "learning_rate": 4.3733338321784777e-07, | |
| "loss": 0.2611, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 0.588, | |
| "grad_norm": 0.5197228789329529, | |
| "learning_rate": 4.338718048714387e-07, | |
| "loss": 0.2822, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 0.9255732297897339, | |
| "learning_rate": 4.304134495199674e-07, | |
| "loss": 0.228, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 0.592, | |
| "grad_norm": 0.6209078431129456, | |
| "learning_rate": 4.2695848571879424e-07, | |
| "loss": 0.2105, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 0.594, | |
| "grad_norm": 0.699409544467926, | |
| "learning_rate": 4.23507081857981e-07, | |
| "loss": 0.2484, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 0.596, | |
| "grad_norm": 0.596609890460968, | |
| "learning_rate": 4.200594061540826e-07, | |
| "loss": 0.2553, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 0.598, | |
| "grad_norm": 0.5129525661468506, | |
| "learning_rate": 4.166156266419489e-07, | |
| "loss": 0.2247, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 0.39060530066490173, | |
| "learning_rate": 4.131759111665348e-07, | |
| "loss": 0.3284, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.602, | |
| "grad_norm": 0.41641587018966675, | |
| "learning_rate": 4.0974042737472005e-07, | |
| "loss": 0.3142, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 0.604, | |
| "grad_norm": 0.9066689014434814, | |
| "learning_rate": 4.0630934270713755e-07, | |
| "loss": 0.2861, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 0.606, | |
| "grad_norm": 0.5587870478630066, | |
| "learning_rate": 4.028828243900141e-07, | |
| "loss": 0.2254, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 0.608, | |
| "grad_norm": 0.47786420583724976, | |
| "learning_rate": 3.9946103942701775e-07, | |
| "loss": 0.2112, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 0.6182255744934082, | |
| "learning_rate": 3.960441545911204e-07, | |
| "loss": 0.3442, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 0.612, | |
| "grad_norm": 0.8767617344856262, | |
| "learning_rate": 3.9263233641646836e-07, | |
| "loss": 0.294, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 0.614, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.8922575119026635e-07, | |
| "loss": 0.1191, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 0.616, | |
| "grad_norm": 0.3942841589450836, | |
| "learning_rate": 3.8582456494467206e-07, | |
| "loss": 0.2486, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 0.618, | |
| "grad_norm": 0.457390159368515, | |
| "learning_rate": 3.8242894344870495e-07, | |
| "loss": 0.291, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 0.34195929765701294, | |
| "learning_rate": 3.790390522001662e-07, | |
| "loss": 0.2265, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 0.622, | |
| "grad_norm": 0.2878647744655609, | |
| "learning_rate": 3.7565505641757266e-07, | |
| "loss": 0.274, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 0.624, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.722771210321048e-07, | |
| "loss": 0.1274, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 0.626, | |
| "grad_norm": 0.38419225811958313, | |
| "learning_rate": 3.689054106795677e-07, | |
| "loss": 0.2817, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 0.628, | |
| "grad_norm": 0.4910019636154175, | |
| "learning_rate": 3.6554008969236715e-07, | |
| "loss": 0.2044, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 0.2987191379070282, | |
| "learning_rate": 3.621813220915004e-07, | |
| "loss": 0.2311, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 0.632, | |
| "grad_norm": 0.3759290874004364, | |
| "learning_rate": 3.5882927157856167e-07, | |
| "loss": 0.2061, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 0.634, | |
| "grad_norm": 0.37429606914520264, | |
| "learning_rate": 3.554841015277641e-07, | |
| "loss": 0.2124, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 0.636, | |
| "grad_norm": 0.2639157772064209, | |
| "learning_rate": 3.521459749779768e-07, | |
| "loss": 0.2445, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 0.638, | |
| "grad_norm": 0.2917717695236206, | |
| "learning_rate": 3.488150546247778e-07, | |
| "loss": 0.325, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 0.2606163024902344, | |
| "learning_rate": 3.454915028125263e-07, | |
| "loss": 0.1581, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 0.642, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.421754815264488e-07, | |
| "loss": 0.2244, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 0.644, | |
| "grad_norm": 0.6836721897125244, | |
| "learning_rate": 3.388671523847445e-07, | |
| "loss": 0.178, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 0.646, | |
| "grad_norm": 0.36950552463531494, | |
| "learning_rate": 3.3556667663070835e-07, | |
| "loss": 0.2215, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 0.648, | |
| "grad_norm": 0.7161170840263367, | |
| "learning_rate": 3.3227421512487255e-07, | |
| "loss": 0.1874, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 0.33053773641586304, | |
| "learning_rate": 3.2898992833716563e-07, | |
| "loss": 0.1942, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 0.652, | |
| "grad_norm": 0.39361345767974854, | |
| "learning_rate": 3.257139763390925e-07, | |
| "loss": 0.2147, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 0.654, | |
| "grad_norm": 0.34917065501213074, | |
| "learning_rate": 3.2244651879593156e-07, | |
| "loss": 0.2706, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 0.656, | |
| "grad_norm": 0.29661568999290466, | |
| "learning_rate": 3.191877149589539e-07, | |
| "loss": 0.2537, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 0.658, | |
| "grad_norm": 1.5755507946014404, | |
| "learning_rate": 3.15937723657661e-07, | |
| "loss": 0.2887, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 0.30394697189331055, | |
| "learning_rate": 3.1269670329204393e-07, | |
| "loss": 0.1861, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 0.662, | |
| "grad_norm": 0.666857123374939, | |
| "learning_rate": 3.0946481182486297e-07, | |
| "loss": 0.2361, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 0.664, | |
| "grad_norm": 0.4149036109447479, | |
| "learning_rate": 3.0624220677394854e-07, | |
| "loss": 0.3015, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 0.666, | |
| "grad_norm": 0.5183743238449097, | |
| "learning_rate": 3.0302904520452443e-07, | |
| "loss": 0.2615, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 0.668, | |
| "grad_norm": 0.2939796447753906, | |
| "learning_rate": 2.9982548372155256e-07, | |
| "loss": 0.2192, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.9663167846209996e-07, | |
| "loss": 0.1743, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 0.672, | |
| "grad_norm": 1.0357635021209717, | |
| "learning_rate": 2.9344778508772914e-07, | |
| "loss": 0.1922, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 0.674, | |
| "grad_norm": 0.22886469960212708, | |
| "learning_rate": 2.902739587769114e-07, | |
| "loss": 0.2038, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 0.676, | |
| "grad_norm": 0.5248194336891174, | |
| "learning_rate": 2.8711035421746363e-07, | |
| "loss": 0.1597, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 0.678, | |
| "grad_norm": 0.26883548498153687, | |
| "learning_rate": 2.8395712559900874e-07, | |
| "loss": 0.2412, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.808144266054612e-07, | |
| "loss": 0.3742, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 0.682, | |
| "grad_norm": 0.5440124273300171, | |
| "learning_rate": 2.776824104075364e-07, | |
| "loss": 0.1784, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 0.684, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.745612296552847e-07, | |
| "loss": 0.1994, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 0.686, | |
| "grad_norm": 0.5974320769309998, | |
| "learning_rate": 2.71451036470653e-07, | |
| "loss": 0.251, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 0.688, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.683519824400692e-07, | |
| "loss": 0.1304, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 0.6028246283531189, | |
| "learning_rate": 2.6526421860705473e-07, | |
| "loss": 0.1921, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 0.692, | |
| "grad_norm": 0.27002623677253723, | |
| "learning_rate": 2.621878954648623e-07, | |
| "loss": 0.2107, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 0.694, | |
| "grad_norm": 0.47267985343933105, | |
| "learning_rate": 2.591231629491423e-07, | |
| "loss": 0.2283, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 0.696, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.5607017043063353e-07, | |
| "loss": 0.1611, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 0.698, | |
| "grad_norm": 0.5085638165473938, | |
| "learning_rate": 2.530290667078846e-07, | |
| "loss": 0.1577, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 0.6995971202850342, | |
| "learning_rate": 2.500000000000001e-07, | |
| "loss": 0.2268, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.702, | |
| "grad_norm": 1.0333393812179565, | |
| "learning_rate": 2.469831179394182e-07, | |
| "loss": 0.2646, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 0.704, | |
| "grad_norm": 0.5018756985664368, | |
| "learning_rate": 2.439785675647143e-07, | |
| "loss": 0.2841, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 0.706, | |
| "grad_norm": 0.4787593483924866, | |
| "learning_rate": 2.4098649531343494e-07, | |
| "loss": 0.1832, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 0.708, | |
| "grad_norm": 0.20107752084732056, | |
| "learning_rate": 2.380070470149605e-07, | |
| "loss": 0.2042, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 0.6337828636169434, | |
| "learning_rate": 2.350403678833976e-07, | |
| "loss": 0.2196, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 0.712, | |
| "grad_norm": 0.5903733968734741, | |
| "learning_rate": 2.3208660251050156e-07, | |
| "loss": 0.1625, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 0.714, | |
| "grad_norm": 0.463150292634964, | |
| "learning_rate": 2.2914589485863012e-07, | |
| "loss": 0.1901, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 0.716, | |
| "grad_norm": 0.2560785710811615, | |
| "learning_rate": 2.262183882537249e-07, | |
| "loss": 0.2732, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 0.718, | |
| "grad_norm": 0.8427993059158325, | |
| "learning_rate": 2.23304225378328e-07, | |
| "loss": 0.2463, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 0.26840728521347046, | |
| "learning_rate": 2.2040354826462664e-07, | |
| "loss": 0.2391, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 0.722, | |
| "grad_norm": 1.2490087747573853, | |
| "learning_rate": 2.1751649828753106e-07, | |
| "loss": 0.2908, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 0.724, | |
| "grad_norm": 0.19666559994220734, | |
| "learning_rate": 2.146432161577842e-07, | |
| "loss": 0.1933, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 0.726, | |
| "grad_norm": 0.41633477807044983, | |
| "learning_rate": 2.117838419151034e-07, | |
| "loss": 0.1907, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 0.728, | |
| "grad_norm": 0.29759231209754944, | |
| "learning_rate": 2.0893851492135532e-07, | |
| "loss": 0.2814, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 0.5257274508476257, | |
| "learning_rate": 2.0610737385376348e-07, | |
| "loss": 0.2377, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 0.732, | |
| "grad_norm": 1.2364554405212402, | |
| "learning_rate": 2.0329055669814933e-07, | |
| "loss": 0.3568, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 0.734, | |
| "grad_norm": 1.1709978580474854, | |
| "learning_rate": 2.0048820074220711e-07, | |
| "loss": 0.2385, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 0.736, | |
| "grad_norm": 0.9572620391845703, | |
| "learning_rate": 1.9770044256881258e-07, | |
| "loss": 0.1968, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 0.738, | |
| "grad_norm": 0.2426954209804535, | |
| "learning_rate": 1.9492741804936618e-07, | |
| "loss": 0.175, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 0.5391064286231995, | |
| "learning_rate": 1.9216926233717084e-07, | |
| "loss": 0.2462, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 0.742, | |
| "grad_norm": 0.5289126038551331, | |
| "learning_rate": 1.8942610986084484e-07, | |
| "loss": 0.2174, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 0.744, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.8669809431776988e-07, | |
| "loss": 0.1438, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 0.746, | |
| "grad_norm": 0.551702618598938, | |
| "learning_rate": 1.8398534866757455e-07, | |
| "loss": 0.2802, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 0.748, | |
| "grad_norm": 0.3379834294319153, | |
| "learning_rate": 1.812880051256551e-07, | |
| "loss": 0.1889, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 1.0366861820220947, | |
| "learning_rate": 1.7860619515673032e-07, | |
| "loss": 0.2649, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 0.752, | |
| "grad_norm": 0.3923557698726654, | |
| "learning_rate": 1.7594004946843454e-07, | |
| "loss": 0.2012, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 0.754, | |
| "grad_norm": 1.2161628007888794, | |
| "learning_rate": 1.7328969800494726e-07, | |
| "loss": 0.3344, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 0.756, | |
| "grad_norm": 0.23506614565849304, | |
| "learning_rate": 1.7065526994065972e-07, | |
| "loss": 0.2097, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 0.758, | |
| "grad_norm": 0.2504694163799286, | |
| "learning_rate": 1.6803689367387918e-07, | |
| "loss": 0.1493, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 1.1120474338531494, | |
| "learning_rate": 1.6543469682057104e-07, | |
| "loss": 0.2224, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 0.762, | |
| "grad_norm": 0.2815347909927368, | |
| "learning_rate": 1.6284880620813846e-07, | |
| "loss": 0.209, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 0.764, | |
| "grad_norm": 0.31576356291770935, | |
| "learning_rate": 1.6027934786924185e-07, | |
| "loss": 0.1237, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 0.766, | |
| "grad_norm": 0.2061535269021988, | |
| "learning_rate": 1.5772644703565564e-07, | |
| "loss": 0.1967, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 0.768, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.551902281321651e-07, | |
| "loss": 0.2432, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 0.45145711302757263, | |
| "learning_rate": 1.5267081477050131e-07, | |
| "loss": 0.2098, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 0.772, | |
| "grad_norm": 0.5424890518188477, | |
| "learning_rate": 1.5016832974331723e-07, | |
| "loss": 0.3124, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 0.774, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.4768289501820263e-07, | |
| "loss": 0.1453, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 0.776, | |
| "grad_norm": 0.47473931312561035, | |
| "learning_rate": 1.4521463173173965e-07, | |
| "loss": 0.1799, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 0.778, | |
| "grad_norm": 0.27455076575279236, | |
| "learning_rate": 1.4276366018359842e-07, | |
| "loss": 0.273, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.4033009983067452e-07, | |
| "loss": 0.2012, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 0.782, | |
| "grad_norm": 0.25595951080322266, | |
| "learning_rate": 1.3791406928126635e-07, | |
| "loss": 0.1351, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 0.784, | |
| "grad_norm": 1.085986614227295, | |
| "learning_rate": 1.3551568628929432e-07, | |
| "loss": 0.3306, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 0.786, | |
| "grad_norm": 0.5502387285232544, | |
| "learning_rate": 1.3313506774856175e-07, | |
| "loss": 0.1963, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 0.788, | |
| "grad_norm": 0.9978371262550354, | |
| "learning_rate": 1.3077232968705805e-07, | |
| "loss": 0.2349, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 0.419802188873291, | |
| "learning_rate": 1.284275872613028e-07, | |
| "loss": 0.1495, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 0.792, | |
| "grad_norm": 0.5003088116645813, | |
| "learning_rate": 1.2610095475073413e-07, | |
| "loss": 0.1413, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 0.794, | |
| "grad_norm": 0.5178236365318298, | |
| "learning_rate": 1.2379254555213786e-07, | |
| "loss": 0.2002, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 0.796, | |
| "grad_norm": 1.4785641431808472, | |
| "learning_rate": 1.2150247217412185e-07, | |
| "loss": 0.1613, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 0.798, | |
| "grad_norm": 0.4960174858570099, | |
| "learning_rate": 1.192308462316317e-07, | |
| "loss": 0.1687, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 0.4522833824157715, | |
| "learning_rate": 1.1697777844051104e-07, | |
| "loss": 0.1916, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.802, | |
| "grad_norm": 0.4012996256351471, | |
| "learning_rate": 1.1474337861210543e-07, | |
| "loss": 0.1677, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 0.804, | |
| "grad_norm": 1.1145271062850952, | |
| "learning_rate": 1.1252775564791023e-07, | |
| "loss": 0.2675, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 0.806, | |
| "grad_norm": 1.063022494316101, | |
| "learning_rate": 1.1033101753426282e-07, | |
| "loss": 0.169, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 0.808, | |
| "grad_norm": 0.5367292761802673, | |
| "learning_rate": 1.0815327133708013e-07, | |
| "loss": 0.2466, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 0.46209707856178284, | |
| "learning_rate": 1.0599462319663904e-07, | |
| "loss": 0.2542, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 0.812, | |
| "grad_norm": 0.48917677998542786, | |
| "learning_rate": 1.038551783224047e-07, | |
| "loss": 0.2497, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 0.814, | |
| "grad_norm": 0.49316373467445374, | |
| "learning_rate": 1.0173504098790186e-07, | |
| "loss": 0.1824, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 0.816, | |
| "grad_norm": 0.7999529838562012, | |
| "learning_rate": 9.963431452563331e-08, | |
| "loss": 0.2714, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 0.818, | |
| "grad_norm": 1.2927374839782715, | |
| "learning_rate": 9.755310132204297e-08, | |
| "loss": 0.2306, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 0.6657562851905823, | |
| "learning_rate": 9.549150281252632e-08, | |
| "loss": 0.2175, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 0.822, | |
| "grad_norm": 0.2347097098827362, | |
| "learning_rate": 9.344961947648622e-08, | |
| "loss": 0.2158, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 0.824, | |
| "grad_norm": 0.6625514626502991, | |
| "learning_rate": 9.142755083243575e-08, | |
| "loss": 0.299, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 0.826, | |
| "grad_norm": 0.2735314667224884, | |
| "learning_rate": 8.942539543314798e-08, | |
| "loss": 0.1775, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 0.828, | |
| "grad_norm": 0.48911941051483154, | |
| "learning_rate": 8.744325086085247e-08, | |
| "loss": 0.2273, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 0.2327689230442047, | |
| "learning_rate": 8.548121372247919e-08, | |
| "loss": 0.2096, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 0.832, | |
| "grad_norm": 0.7817517518997192, | |
| "learning_rate": 8.353937964495028e-08, | |
| "loss": 0.1858, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 0.834, | |
| "grad_norm": 0.698259711265564, | |
| "learning_rate": 8.161784327051919e-08, | |
| "loss": 0.1888, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 0.836, | |
| "grad_norm": 0.5820159316062927, | |
| "learning_rate": 7.971669825215787e-08, | |
| "loss": 0.186, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 0.838, | |
| "grad_norm": 0.5032526850700378, | |
| "learning_rate": 7.783603724899257e-08, | |
| "loss": 0.1618, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 1.1663309335708618, | |
| "learning_rate": 7.597595192178702e-08, | |
| "loss": 0.2173, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 0.842, | |
| "grad_norm": 0.275285542011261, | |
| "learning_rate": 7.413653292847616e-08, | |
| "loss": 0.3055, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 0.844, | |
| "grad_norm": 0.6619118452072144, | |
| "learning_rate": 7.23178699197467e-08, | |
| "loss": 0.2306, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 0.846, | |
| "grad_norm": 0.667110800743103, | |
| "learning_rate": 7.052005153466778e-08, | |
| "loss": 0.2826, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 0.848, | |
| "grad_norm": 0.374187171459198, | |
| "learning_rate": 6.874316539637126e-08, | |
| "loss": 0.2028, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 0.23237831890583038, | |
| "learning_rate": 6.698729810778064e-08, | |
| "loss": 0.2217, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 0.852, | |
| "grad_norm": 0.6198431253433228, | |
| "learning_rate": 6.52525352473905e-08, | |
| "loss": 0.1642, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 0.854, | |
| "grad_norm": 0.560222864151001, | |
| "learning_rate": 6.353896136509524e-08, | |
| "loss": 0.2124, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 0.856, | |
| "grad_norm": 0.38097235560417175, | |
| "learning_rate": 6.184665997806831e-08, | |
| "loss": 0.2093, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 0.858, | |
| "grad_norm": 0.42547518014907837, | |
| "learning_rate": 6.017571356669182e-08, | |
| "loss": 0.2598, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 0.4948174059391022, | |
| "learning_rate": 5.8526203570536504e-08, | |
| "loss": 0.1848, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 0.862, | |
| "grad_norm": 0.18645498156547546, | |
| "learning_rate": 5.689821038439263e-08, | |
| "loss": 0.1874, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 0.864, | |
| "grad_norm": 0.848637580871582, | |
| "learning_rate": 5.529181335435124e-08, | |
| "loss": 0.1541, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 0.866, | |
| "grad_norm": 0.7719595432281494, | |
| "learning_rate": 5.37070907739372e-08, | |
| "loss": 0.2264, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 0.868, | |
| "grad_norm": 0.5386465787887573, | |
| "learning_rate": 5.2144119880293544e-08, | |
| "loss": 0.1415, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 0.2296571284532547, | |
| "learning_rate": 5.060297685041659e-08, | |
| "loss": 0.181, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 0.872, | |
| "grad_norm": 0.2710702121257782, | |
| "learning_rate": 4.908373679744315e-08, | |
| "loss": 0.228, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 0.874, | |
| "grad_norm": 0.18128183484077454, | |
| "learning_rate": 4.758647376699032e-08, | |
| "loss": 0.1991, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 0.876, | |
| "grad_norm": 1.9789559841156006, | |
| "learning_rate": 4.611126073354571e-08, | |
| "loss": 0.2768, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 0.878, | |
| "grad_norm": 0.7291713953018188, | |
| "learning_rate": 4.465816959691149e-08, | |
| "loss": 0.2409, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.322727117869951e-08, | |
| "loss": 0.1278, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 0.882, | |
| "grad_norm": 0.7022453546524048, | |
| "learning_rate": 4.181863521888018e-08, | |
| "loss": 0.2995, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 0.884, | |
| "grad_norm": 1.2862244844436646, | |
| "learning_rate": 4.043233037238281e-08, | |
| "loss": 0.2753, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 0.886, | |
| "grad_norm": 0.14757445454597473, | |
| "learning_rate": 3.9068424205749794e-08, | |
| "loss": 0.2235, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 0.888, | |
| "grad_norm": 2.4390058517456055, | |
| "learning_rate": 3.7726983193843485e-08, | |
| "loss": 0.2061, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 1.5575977563858032, | |
| "learning_rate": 3.6408072716606345e-08, | |
| "loss": 0.2763, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 0.892, | |
| "grad_norm": 0.5324432849884033, | |
| "learning_rate": 3.5111757055874326e-08, | |
| "loss": 0.1996, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 0.894, | |
| "grad_norm": 0.3378039598464966, | |
| "learning_rate": 3.3838099392243915e-08, | |
| "loss": 0.2702, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 0.896, | |
| "grad_norm": 0.5573419332504272, | |
| "learning_rate": 3.258716180199278e-08, | |
| "loss": 0.1961, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 0.898, | |
| "grad_norm": 0.3722284734249115, | |
| "learning_rate": 3.135900525405427e-08, | |
| "loss": 0.1993, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 0.7958338856697083, | |
| "learning_rate": 3.015368960704584e-08, | |
| "loss": 0.2263, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.902, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.8971273606351655e-08, | |
| "loss": 0.206, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 0.904, | |
| "grad_norm": 0.4656100273132324, | |
| "learning_rate": 2.78118148812595e-08, | |
| "loss": 0.1972, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 0.906, | |
| "grad_norm": 0.3063383102416992, | |
| "learning_rate": 2.667536994215186e-08, | |
| "loss": 0.1975, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 0.908, | |
| "grad_norm": 1.4156655073165894, | |
| "learning_rate": 2.5561994177751732e-08, | |
| "loss": 0.2039, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 0.23086102306842804, | |
| "learning_rate": 2.4471741852423233e-08, | |
| "loss": 0.1558, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 0.912, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.3404666103526537e-08, | |
| "loss": 0.1516, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 0.914, | |
| "grad_norm": 0.9894579648971558, | |
| "learning_rate": 2.2360818938828187e-08, | |
| "loss": 0.27, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 0.916, | |
| "grad_norm": 0.47403454780578613, | |
| "learning_rate": 2.1340251233966377e-08, | |
| "loss": 0.1885, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 0.918, | |
| "grad_norm": 1.002657175064087, | |
| "learning_rate": 2.0343012729971243e-08, | |
| "loss": 0.1384, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 0.4796471893787384, | |
| "learning_rate": 1.936915203084055e-08, | |
| "loss": 0.1593, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 0.922, | |
| "grad_norm": 0.39311739802360535, | |
| "learning_rate": 1.8418716601170947e-08, | |
| "loss": 0.1925, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 0.924, | |
| "grad_norm": 0.5048733353614807, | |
| "learning_rate": 1.7491752763844292e-08, | |
| "loss": 0.3124, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 0.926, | |
| "grad_norm": 0.5101653933525085, | |
| "learning_rate": 1.658830569777031e-08, | |
| "loss": 0.1582, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 0.928, | |
| "grad_norm": 0.3665623664855957, | |
| "learning_rate": 1.570841943568446e-08, | |
| "loss": 0.1796, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 0.20403508841991425, | |
| "learning_rate": 1.4852136862001763e-08, | |
| "loss": 0.3377, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 0.932, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.4019499710726911e-08, | |
| "loss": 0.1866, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 0.934, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.3210548563419855e-08, | |
| "loss": 0.1837, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 0.936, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.2425322847218367e-08, | |
| "loss": 0.1362, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 0.938, | |
| "grad_norm": 0.3222580850124359, | |
| "learning_rate": 1.166386083291604e-08, | |
| "loss": 0.2977, | |
| "step": 4690 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 0.8778265118598938, | |
| "learning_rate": 1.0926199633097154e-08, | |
| "loss": 0.2056, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 0.942, | |
| "grad_norm": 0.33410125970840454, | |
| "learning_rate": 1.0212375200327972e-08, | |
| "loss": 0.244, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 0.944, | |
| "grad_norm": 0.2924971878528595, | |
| "learning_rate": 9.522422325404233e-09, | |
| "loss": 0.1457, | |
| "step": 4720 | |
| }, | |
| { | |
| "epoch": 0.946, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.856374635655695e-09, | |
| "loss": 0.2427, | |
| "step": 4730 | |
| }, | |
| { | |
| "epoch": 0.948, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.214264593307096e-09, | |
| "loss": 0.2146, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 0.33636531233787537, | |
| "learning_rate": 7.59612349389599e-09, | |
| "loss": 0.1394, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 0.952, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.0019814647475636e-09, | |
| "loss": 0.0868, | |
| "step": 4760 | |
| }, | |
| { | |
| "epoch": 0.954, | |
| "grad_norm": 1.4758881330490112, | |
| "learning_rate": 6.431867463506046e-09, | |
| "loss": 0.2783, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 0.956, | |
| "grad_norm": 0.6460458040237427, | |
| "learning_rate": 5.8858092767236076e-09, | |
| "loss": 0.2321, | |
| "step": 4780 | |
| }, | |
| { | |
| "epoch": 0.958, | |
| "grad_norm": 0.5238158702850342, | |
| "learning_rate": 5.3638335185058335e-09, | |
| "loss": 0.2235, | |
| "step": 4790 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 1.5409115552902222, | |
| "learning_rate": 4.865965629214819e-09, | |
| "loss": 0.2579, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 0.962, | |
| "grad_norm": 0.5042092800140381, | |
| "learning_rate": 4.3922298742291585e-09, | |
| "loss": 0.1759, | |
| "step": 4810 | |
| }, | |
| { | |
| "epoch": 0.964, | |
| "grad_norm": 0.4713117182254791, | |
| "learning_rate": 3.9426493427611175e-09, | |
| "loss": 0.2138, | |
| "step": 4820 | |
| }, | |
| { | |
| "epoch": 0.966, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.5172459467315286e-09, | |
| "loss": 0.1715, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 0.968, | |
| "grad_norm": 0.8907610177993774, | |
| "learning_rate": 3.116040419701815e-09, | |
| "loss": 0.1743, | |
| "step": 4840 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 0.20034664869308472, | |
| "learning_rate": 2.739052315863355e-09, | |
| "loss": 0.2379, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 0.972, | |
| "grad_norm": 0.3425753712654114, | |
| "learning_rate": 2.3863000090844076e-09, | |
| "loss": 0.2835, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 0.974, | |
| "grad_norm": 0.7830394506454468, | |
| "learning_rate": 2.057800692014833e-09, | |
| "loss": 0.1899, | |
| "step": 4870 | |
| }, | |
| { | |
| "epoch": 0.976, | |
| "grad_norm": 0.5811526775360107, | |
| "learning_rate": 1.7535703752478147e-09, | |
| "loss": 0.3774, | |
| "step": 4880 | |
| }, | |
| { | |
| "epoch": 0.978, | |
| "grad_norm": 0.09480271488428116, | |
| "learning_rate": 1.4736238865398765e-09, | |
| "loss": 0.189, | |
| "step": 4890 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 0.22772358357906342, | |
| "learning_rate": 1.217974870087901e-09, | |
| "loss": 0.196, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 0.982, | |
| "grad_norm": 0.4435659348964691, | |
| "learning_rate": 9.866357858642205e-10, | |
| "loss": 0.3854, | |
| "step": 4910 | |
| }, | |
| { | |
| "epoch": 0.984, | |
| "grad_norm": 0.30677875876426697, | |
| "learning_rate": 7.79617909009489e-10, | |
| "loss": 0.3105, | |
| "step": 4920 | |
| }, | |
| { | |
| "epoch": 0.986, | |
| "grad_norm": 0.5438311696052551, | |
| "learning_rate": 5.969313292830125e-10, | |
| "loss": 0.2419, | |
| "step": 4930 | |
| }, | |
| { | |
| "epoch": 0.988, | |
| "grad_norm": 0.39847153425216675, | |
| "learning_rate": 4.3858495057080836e-10, | |
| "loss": 0.2969, | |
| "step": 4940 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.0458649045211894e-10, | |
| "loss": 0.1753, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 0.992, | |
| "grad_norm": 0.3326397240161896, | |
| "learning_rate": 1.9494247982282387e-10, | |
| "loss": 0.1342, | |
| "step": 4960 | |
| }, | |
| { | |
| "epoch": 0.994, | |
| "grad_norm": 1.0523000955581665, | |
| "learning_rate": 1.0965826257725019e-10, | |
| "loss": 0.2628, | |
| "step": 4970 | |
| }, | |
| { | |
| "epoch": 0.996, | |
| "grad_norm": 0.48323336243629456, | |
| "learning_rate": 4.873799534788059e-11, | |
| "loss": 0.1767, | |
| "step": 4980 | |
| }, | |
| { | |
| "epoch": 0.998, | |
| "grad_norm": 0.9073156118392944, | |
| "learning_rate": 1.2184647302626582e-11, | |
| "loss": 0.1813, | |
| "step": 4990 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.6704990267753601, | |
| "learning_rate": 0.0, | |
| "loss": 0.2975, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 5000, | |
| "total_flos": 2.3466648993521664e+16, | |
| "train_loss": 0.3158116644740105, | |
| "train_runtime": 807.301, | |
| "train_samples_per_second": 6.193, | |
| "train_steps_per_second": 6.193 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 5000, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 4000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 2.3466648993521664e+16, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |