Invalid JSON: Unexpected token 'N', ..."ad_norm": NaN,
"... is not valid JSON
| { | |
| "best_metric": 0.44340217113494873, | |
| "best_model_checkpoint": "saved_model/lop_jun2024/checkpoint-10982", | |
| "epoch": 2.999931707983337, | |
| "eval_steps": 500, | |
| "global_step": 10982, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "grad_norm": NaN, | |
| "learning_rate": 0.0, | |
| "loss": 74.2157, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "grad_norm": 13.99621295928955, | |
| "learning_rate": 2.5e-06, | |
| "loss": 74.5237, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 14.391114234924316, | |
| "learning_rate": 6.5000000000000004e-06, | |
| "loss": 74.2338, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 15.539421081542969, | |
| "learning_rate": 1.1500000000000002e-05, | |
| "loss": 72.8847, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 16.487953186035156, | |
| "learning_rate": 1.65e-05, | |
| "loss": 70.7699, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 21.840412139892578, | |
| "learning_rate": 2.15e-05, | |
| "loss": 66.4777, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 23.74176025390625, | |
| "learning_rate": 2.6000000000000002e-05, | |
| "loss": 59.438, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 26.7894287109375, | |
| "learning_rate": 3.1e-05, | |
| "loss": 45.7214, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 21.767683029174805, | |
| "learning_rate": 3.55e-05, | |
| "loss": 27.7672, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 15.309523582458496, | |
| "learning_rate": 4.05e-05, | |
| "loss": 12.9366, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 7.069633960723877, | |
| "learning_rate": 4.55e-05, | |
| "loss": 4.342, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 10.602043151855469, | |
| "learning_rate": 5.05e-05, | |
| "loss": 1.7191, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 8.00312614440918, | |
| "learning_rate": 5.550000000000001e-05, | |
| "loss": 1.3934, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 7.026750087738037, | |
| "learning_rate": 6.05e-05, | |
| "loss": 1.3728, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 10.686494827270508, | |
| "learning_rate": 6.55e-05, | |
| "loss": 1.2595, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 7.127823352813721, | |
| "learning_rate": 7.05e-05, | |
| "loss": 1.2226, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 6.253384590148926, | |
| "learning_rate": 7.55e-05, | |
| "loss": 1.2934, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 7.56700325012207, | |
| "learning_rate": 8.05e-05, | |
| "loss": 1.2571, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 9.671475410461426, | |
| "learning_rate": 8.55e-05, | |
| "loss": 1.2005, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 7.772515296936035, | |
| "learning_rate": 9.05e-05, | |
| "loss": 1.253, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 6.593009948730469, | |
| "learning_rate": 9.55e-05, | |
| "loss": 1.2269, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 9.796707153320312, | |
| "learning_rate": 9.999725274725276e-05, | |
| "loss": 1.2425, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 5.557741641998291, | |
| "learning_rate": 9.996978021978023e-05, | |
| "loss": 1.2383, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 7.321559906005859, | |
| "learning_rate": 9.994230769230771e-05, | |
| "loss": 1.2026, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 3.337256908416748, | |
| "learning_rate": 9.991483516483518e-05, | |
| "loss": 1.1289, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 4.283127307891846, | |
| "learning_rate": 9.988736263736264e-05, | |
| "loss": 1.0357, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 4.2405242919921875, | |
| "learning_rate": 9.985989010989013e-05, | |
| "loss": 1.0067, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 6.622239112854004, | |
| "learning_rate": 9.98324175824176e-05, | |
| "loss": 0.9816, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 5.521809101104736, | |
| "learning_rate": 9.980494505494506e-05, | |
| "loss": 0.9578, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 3.7347898483276367, | |
| "learning_rate": 9.977747252747254e-05, | |
| "loss": 0.8762, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 6.452709674835205, | |
| "learning_rate": 9.975000000000001e-05, | |
| "loss": 0.8188, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 3.2089035511016846, | |
| "learning_rate": 9.972252747252748e-05, | |
| "loss": 0.8247, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 4.455322742462158, | |
| "learning_rate": 9.969505494505496e-05, | |
| "loss": 0.7599, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 4.537048816680908, | |
| "learning_rate": 9.966758241758242e-05, | |
| "loss": 0.7968, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 3.754340648651123, | |
| "learning_rate": 9.964010989010988e-05, | |
| "loss": 0.72, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 4.538580894470215, | |
| "learning_rate": 9.961263736263737e-05, | |
| "loss": 0.7207, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 4.3613457679748535, | |
| "learning_rate": 9.958516483516483e-05, | |
| "loss": 0.7235, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 2.2669098377227783, | |
| "learning_rate": 9.95576923076923e-05, | |
| "loss": 0.7018, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 5.329736232757568, | |
| "learning_rate": 9.953021978021978e-05, | |
| "loss": 0.7193, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 2.5049781799316406, | |
| "learning_rate": 9.950274725274725e-05, | |
| "loss": 0.6948, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 3.5907931327819824, | |
| "learning_rate": 9.947527472527472e-05, | |
| "loss": 0.6637, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 3.264726161956787, | |
| "learning_rate": 9.94478021978022e-05, | |
| "loss": 0.6588, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 3.763380765914917, | |
| "learning_rate": 9.942032967032967e-05, | |
| "loss": 0.6479, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 1.9857630729675293, | |
| "learning_rate": 9.939285714285714e-05, | |
| "loss": 0.6481, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 2.649977922439575, | |
| "learning_rate": 9.936538461538462e-05, | |
| "loss": 0.6284, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 1.4232739210128784, | |
| "learning_rate": 9.933791208791209e-05, | |
| "loss": 0.6214, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 5.327674865722656, | |
| "learning_rate": 9.931043956043956e-05, | |
| "loss": 0.633, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 3.673241376876831, | |
| "learning_rate": 9.928296703296704e-05, | |
| "loss": 0.6792, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 3.6698083877563477, | |
| "learning_rate": 9.92554945054945e-05, | |
| "loss": 0.6297, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 2.056175470352173, | |
| "learning_rate": 9.922802197802197e-05, | |
| "loss": 0.6172, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 1.345395803451538, | |
| "learning_rate": 9.920054945054946e-05, | |
| "loss": 0.605, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 1.7540231943130493, | |
| "learning_rate": 9.917307692307692e-05, | |
| "loss": 0.6288, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 4.050562858581543, | |
| "learning_rate": 9.914560439560439e-05, | |
| "loss": 0.6281, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 5.210700511932373, | |
| "learning_rate": 9.911813186813187e-05, | |
| "loss": 0.6138, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 3.6927013397216797, | |
| "learning_rate": 9.909065934065934e-05, | |
| "loss": 0.6033, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 3.4765613079071045, | |
| "learning_rate": 9.906318681318681e-05, | |
| "loss": 0.5991, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 1.8127527236938477, | |
| "learning_rate": 9.903571428571429e-05, | |
| "loss": 0.5994, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 1.6926054954528809, | |
| "learning_rate": 9.900824175824176e-05, | |
| "loss": 0.5988, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 2.3790557384490967, | |
| "learning_rate": 9.898076923076923e-05, | |
| "loss": 0.6147, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 3.558159589767456, | |
| "learning_rate": 9.895329670329671e-05, | |
| "loss": 0.5858, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 3.5203194618225098, | |
| "learning_rate": 9.892582417582418e-05, | |
| "loss": 0.6036, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 2.4314634799957275, | |
| "learning_rate": 9.889835164835165e-05, | |
| "loss": 0.5789, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 2.956555128097534, | |
| "learning_rate": 9.887087912087913e-05, | |
| "loss": 0.6021, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 2.231612205505371, | |
| "learning_rate": 9.88434065934066e-05, | |
| "loss": 0.5825, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 2.1588950157165527, | |
| "learning_rate": 9.881593406593406e-05, | |
| "loss": 0.5924, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 2.8479809761047363, | |
| "learning_rate": 9.878846153846155e-05, | |
| "loss": 0.584, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 2.2029404640197754, | |
| "learning_rate": 9.876098901098901e-05, | |
| "loss": 0.5855, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 2.243802070617676, | |
| "learning_rate": 9.873351648351648e-05, | |
| "loss": 0.584, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 2.8837687969207764, | |
| "learning_rate": 9.870604395604396e-05, | |
| "loss": 0.5867, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 2.0768215656280518, | |
| "learning_rate": 9.867857142857143e-05, | |
| "loss": 0.5823, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 2.950639247894287, | |
| "learning_rate": 9.86510989010989e-05, | |
| "loss": 0.5832, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 1.0848056077957153, | |
| "learning_rate": 9.862362637362638e-05, | |
| "loss": 0.5595, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 1.293261170387268, | |
| "learning_rate": 9.859615384615385e-05, | |
| "loss": 0.5926, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 2.286271095275879, | |
| "learning_rate": 9.856868131868132e-05, | |
| "loss": 0.5658, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 2.9983866214752197, | |
| "learning_rate": 9.85412087912088e-05, | |
| "loss": 0.5749, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 2.712332248687744, | |
| "learning_rate": 9.851373626373627e-05, | |
| "loss": 0.5757, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 3.5591819286346436, | |
| "learning_rate": 9.848626373626374e-05, | |
| "loss": 0.5706, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 3.00175142288208, | |
| "learning_rate": 9.845879120879122e-05, | |
| "loss": 0.5654, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 1.5290532112121582, | |
| "learning_rate": 9.843131868131869e-05, | |
| "loss": 0.578, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 1.8737398386001587, | |
| "learning_rate": 9.840384615384615e-05, | |
| "loss": 0.5598, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 1.190427541732788, | |
| "learning_rate": 9.837637362637364e-05, | |
| "loss": 0.5507, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 2.8329577445983887, | |
| "learning_rate": 9.83489010989011e-05, | |
| "loss": 0.5394, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 2.6253738403320312, | |
| "learning_rate": 9.832142857142857e-05, | |
| "loss": 0.5404, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 2.1394851207733154, | |
| "learning_rate": 9.829395604395605e-05, | |
| "loss": 0.5616, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 1.5708709955215454, | |
| "learning_rate": 9.826648351648352e-05, | |
| "loss": 0.5606, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 1.521190881729126, | |
| "learning_rate": 9.823901098901099e-05, | |
| "loss": 0.5605, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 1.7570327520370483, | |
| "learning_rate": 9.821153846153847e-05, | |
| "loss": 0.5475, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 2.039400577545166, | |
| "learning_rate": 9.818406593406594e-05, | |
| "loss": 0.5451, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 1.9855557680130005, | |
| "learning_rate": 9.815659340659341e-05, | |
| "loss": 0.5378, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 1.3518325090408325, | |
| "learning_rate": 9.812912087912089e-05, | |
| "loss": 0.5447, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 2.36753511428833, | |
| "learning_rate": 9.810164835164836e-05, | |
| "loss": 0.5593, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 1.611568570137024, | |
| "learning_rate": 9.807417582417583e-05, | |
| "loss": 0.536, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 1.3011342287063599, | |
| "learning_rate": 9.804670329670331e-05, | |
| "loss": 0.5508, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 1.6673119068145752, | |
| "learning_rate": 9.801923076923078e-05, | |
| "loss": 0.5427, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 1.4966050386428833, | |
| "learning_rate": 9.799175824175824e-05, | |
| "loss": 0.5364, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 1.2000831365585327, | |
| "learning_rate": 9.796428571428573e-05, | |
| "loss": 0.5452, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 1.9247843027114868, | |
| "learning_rate": 9.79368131868132e-05, | |
| "loss": 0.529, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 1.6240330934524536, | |
| "learning_rate": 9.790934065934066e-05, | |
| "loss": 0.5523, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 1.4320908784866333, | |
| "learning_rate": 9.788186813186814e-05, | |
| "loss": 0.5567, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 1.7884244918823242, | |
| "learning_rate": 9.785439560439561e-05, | |
| "loss": 0.5244, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 1.9070675373077393, | |
| "learning_rate": 9.782692307692308e-05, | |
| "loss": 0.5354, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 1.0304174423217773, | |
| "learning_rate": 9.779945054945056e-05, | |
| "loss": 0.5443, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 2.184297800064087, | |
| "learning_rate": 9.777197802197803e-05, | |
| "loss": 0.5201, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 1.2110681533813477, | |
| "learning_rate": 9.77445054945055e-05, | |
| "loss": 0.5363, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 2.25685977935791, | |
| "learning_rate": 9.771703296703298e-05, | |
| "loss": 0.5392, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 2.558457851409912, | |
| "learning_rate": 9.768956043956045e-05, | |
| "loss": 0.5466, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 1.5898990631103516, | |
| "learning_rate": 9.766208791208792e-05, | |
| "loss": 0.5386, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 2.6933228969573975, | |
| "learning_rate": 9.76346153846154e-05, | |
| "loss": 0.5565, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 1.440042495727539, | |
| "learning_rate": 9.760714285714287e-05, | |
| "loss": 0.5507, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 1.9476529359817505, | |
| "learning_rate": 9.757967032967033e-05, | |
| "loss": 0.5276, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 3.142998218536377, | |
| "learning_rate": 9.755219780219782e-05, | |
| "loss": 0.5238, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 2.13360857963562, | |
| "learning_rate": 9.752472527472528e-05, | |
| "loss": 0.5326, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 1.2551095485687256, | |
| "learning_rate": 9.749725274725275e-05, | |
| "loss": 0.5569, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 1.0858782529830933, | |
| "learning_rate": 9.746978021978023e-05, | |
| "loss": 0.5425, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 1.3724364042282104, | |
| "learning_rate": 9.74423076923077e-05, | |
| "loss": 0.5381, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 1.2211335897445679, | |
| "learning_rate": 9.741483516483517e-05, | |
| "loss": 0.5147, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 1.806498646736145, | |
| "learning_rate": 9.738736263736264e-05, | |
| "loss": 0.5218, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 0.7817394733428955, | |
| "learning_rate": 9.735989010989012e-05, | |
| "loss": 0.539, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 2.054344415664673, | |
| "learning_rate": 9.733241758241759e-05, | |
| "loss": 0.5305, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 0.8021188974380493, | |
| "learning_rate": 9.730494505494506e-05, | |
| "loss": 0.519, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 1.833899736404419, | |
| "learning_rate": 9.727747252747254e-05, | |
| "loss": 0.5589, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 2.6109519004821777, | |
| "learning_rate": 9.725e-05, | |
| "loss": 0.5333, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 2.0866036415100098, | |
| "learning_rate": 9.722252747252747e-05, | |
| "loss": 0.5436, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 2.150935649871826, | |
| "learning_rate": 9.719505494505496e-05, | |
| "loss": 0.5202, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 1.280685544013977, | |
| "learning_rate": 9.716758241758242e-05, | |
| "loss": 0.5146, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 0.7753365635871887, | |
| "learning_rate": 9.714010989010989e-05, | |
| "loss": 0.5106, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 0.8706464767456055, | |
| "learning_rate": 9.711263736263737e-05, | |
| "loss": 0.4952, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 1.3467265367507935, | |
| "learning_rate": 9.708516483516484e-05, | |
| "loss": 0.5105, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 2.5618174076080322, | |
| "learning_rate": 9.705769230769231e-05, | |
| "loss": 0.5065, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 1.3450369834899902, | |
| "learning_rate": 9.703021978021979e-05, | |
| "loss": 0.5201, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 1.3272931575775146, | |
| "learning_rate": 9.700274725274726e-05, | |
| "loss": 0.5189, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 2.324739694595337, | |
| "learning_rate": 9.697527472527473e-05, | |
| "loss": 0.5298, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 1.375444769859314, | |
| "learning_rate": 9.694780219780221e-05, | |
| "loss": 0.527, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 1.1685923337936401, | |
| "learning_rate": 9.692032967032968e-05, | |
| "loss": 0.5303, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 1.6014115810394287, | |
| "learning_rate": 9.689285714285715e-05, | |
| "loss": 0.5399, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 2.3533401489257812, | |
| "learning_rate": 9.686538461538463e-05, | |
| "loss": 0.5185, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 1.6813533306121826, | |
| "learning_rate": 9.68379120879121e-05, | |
| "loss": 0.4986, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 1.1102067232131958, | |
| "learning_rate": 9.681043956043956e-05, | |
| "loss": 0.5313, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 1.576412320137024, | |
| "learning_rate": 9.678296703296705e-05, | |
| "loss": 0.5087, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 1.5507354736328125, | |
| "learning_rate": 9.675549450549451e-05, | |
| "loss": 0.532, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 1.4840365648269653, | |
| "learning_rate": 9.672802197802198e-05, | |
| "loss": 0.5191, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 1.8767573833465576, | |
| "learning_rate": 9.670054945054946e-05, | |
| "loss": 0.5252, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 1.0486218929290771, | |
| "learning_rate": 9.667307692307693e-05, | |
| "loss": 0.5152, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 2.066174030303955, | |
| "learning_rate": 9.66456043956044e-05, | |
| "loss": 0.5163, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 1.7480829954147339, | |
| "learning_rate": 9.661813186813188e-05, | |
| "loss": 0.5292, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 1.6554828882217407, | |
| "learning_rate": 9.659065934065935e-05, | |
| "loss": 0.5301, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 1.0723321437835693, | |
| "learning_rate": 9.656318681318682e-05, | |
| "loss": 0.5118, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 1.7398673295974731, | |
| "learning_rate": 9.653571428571429e-05, | |
| "loss": 0.5268, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 1.2732940912246704, | |
| "learning_rate": 9.650824175824175e-05, | |
| "loss": 0.5085, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 1.0260119438171387, | |
| "learning_rate": 9.648076923076924e-05, | |
| "loss": 0.5243, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 0.8816061019897461, | |
| "learning_rate": 9.64532967032967e-05, | |
| "loss": 0.5158, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 2.7940518856048584, | |
| "learning_rate": 9.642582417582417e-05, | |
| "loss": 0.5051, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 1.1889257431030273, | |
| "learning_rate": 9.639835164835165e-05, | |
| "loss": 0.5111, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 1.8643608093261719, | |
| "learning_rate": 9.637087912087912e-05, | |
| "loss": 0.5136, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 1.658339023590088, | |
| "learning_rate": 9.634340659340659e-05, | |
| "loss": 0.5132, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 0.6262873411178589, | |
| "learning_rate": 9.631593406593407e-05, | |
| "loss": 0.5047, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 1.2276610136032104, | |
| "learning_rate": 9.628846153846154e-05, | |
| "loss": 0.4997, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 1.3661067485809326, | |
| "learning_rate": 9.626098901098901e-05, | |
| "loss": 0.5113, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 1.423560380935669, | |
| "learning_rate": 9.623351648351649e-05, | |
| "loss": 0.5167, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 1.391201376914978, | |
| "learning_rate": 9.620604395604396e-05, | |
| "loss": 0.5057, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 1.5331265926361084, | |
| "learning_rate": 9.617857142857143e-05, | |
| "loss": 0.5105, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 2.0285804271698, | |
| "learning_rate": 9.615109890109891e-05, | |
| "loss": 0.5009, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 1.470106840133667, | |
| "learning_rate": 9.612362637362638e-05, | |
| "loss": 0.5082, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 0.8231136798858643, | |
| "learning_rate": 9.609615384615384e-05, | |
| "loss": 0.4893, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 1.4825439453125, | |
| "learning_rate": 9.606868131868133e-05, | |
| "loss": 0.5132, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 1.8054287433624268, | |
| "learning_rate": 9.60412087912088e-05, | |
| "loss": 0.4878, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 0.9411340355873108, | |
| "learning_rate": 9.601373626373626e-05, | |
| "loss": 0.4908, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 1.754782795906067, | |
| "learning_rate": 9.598626373626374e-05, | |
| "loss": 0.5206, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 1.1740814447402954, | |
| "learning_rate": 9.595879120879121e-05, | |
| "loss": 0.514, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 1.0887936353683472, | |
| "learning_rate": 9.593131868131868e-05, | |
| "loss": 0.5095, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 2.0167980194091797, | |
| "learning_rate": 9.590384615384616e-05, | |
| "loss": 0.5066, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 1.6176742315292358, | |
| "learning_rate": 9.587637362637363e-05, | |
| "loss": 0.5016, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 1.0261584520339966, | |
| "learning_rate": 9.58489010989011e-05, | |
| "loss": 0.5032, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 1.0735365152359009, | |
| "learning_rate": 9.582142857142858e-05, | |
| "loss": 0.4894, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 1.4293698072433472, | |
| "learning_rate": 9.579395604395605e-05, | |
| "loss": 0.5111, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 1.296971321105957, | |
| "learning_rate": 9.576648351648352e-05, | |
| "loss": 0.5078, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 0.7727321982383728, | |
| "learning_rate": 9.5739010989011e-05, | |
| "loss": 0.4923, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 2.6028566360473633, | |
| "learning_rate": 9.571153846153847e-05, | |
| "loss": 0.4994, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 1.2606744766235352, | |
| "learning_rate": 9.568406593406593e-05, | |
| "loss": 0.4885, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 1.812680721282959, | |
| "learning_rate": 9.565659340659342e-05, | |
| "loss": 0.5101, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 1.2432329654693604, | |
| "learning_rate": 9.562912087912088e-05, | |
| "loss": 0.5012, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 1.365795373916626, | |
| "learning_rate": 9.560164835164835e-05, | |
| "loss": 0.5093, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 1.4509589672088623, | |
| "learning_rate": 9.557417582417583e-05, | |
| "loss": 0.4921, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 1.0986443758010864, | |
| "learning_rate": 9.55467032967033e-05, | |
| "loss": 0.5003, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 1.46748685836792, | |
| "learning_rate": 9.551923076923077e-05, | |
| "loss": 0.4925, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 1.3228458166122437, | |
| "learning_rate": 9.549175824175825e-05, | |
| "loss": 0.5032, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 0.8542389273643494, | |
| "learning_rate": 9.546428571428572e-05, | |
| "loss": 0.4896, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 1.7039434909820557, | |
| "learning_rate": 9.543681318681319e-05, | |
| "loss": 0.4852, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 1.1597633361816406, | |
| "learning_rate": 9.540934065934067e-05, | |
| "loss": 0.5015, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 2.178933620452881, | |
| "learning_rate": 9.538186813186814e-05, | |
| "loss": 0.5298, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 1.1454273462295532, | |
| "learning_rate": 9.53543956043956e-05, | |
| "loss": 0.5046, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 0.7407316565513611, | |
| "learning_rate": 9.532692307692309e-05, | |
| "loss": 0.486, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 1.7394108772277832, | |
| "learning_rate": 9.529945054945056e-05, | |
| "loss": 0.4977, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 0.9852740168571472, | |
| "learning_rate": 9.527197802197802e-05, | |
| "loss": 0.4871, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 1.4632673263549805, | |
| "learning_rate": 9.52445054945055e-05, | |
| "loss": 0.5082, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 1.4848504066467285, | |
| "learning_rate": 9.521703296703297e-05, | |
| "loss": 0.5067, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 1.5140033960342407, | |
| "learning_rate": 9.518956043956044e-05, | |
| "loss": 0.5063, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 2.4344804286956787, | |
| "learning_rate": 9.516208791208791e-05, | |
| "loss": 0.5099, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 0.8636808395385742, | |
| "learning_rate": 9.513461538461539e-05, | |
| "loss": 0.4902, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 1.4363267421722412, | |
| "learning_rate": 9.510714285714286e-05, | |
| "loss": 0.498, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 0.7588983774185181, | |
| "learning_rate": 9.507967032967033e-05, | |
| "loss": 0.4884, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 1.4090018272399902, | |
| "learning_rate": 9.505219780219781e-05, | |
| "loss": 0.5082, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 2.2110331058502197, | |
| "learning_rate": 9.502472527472528e-05, | |
| "loss": 0.5064, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 1.2035874128341675, | |
| "learning_rate": 9.499725274725275e-05, | |
| "loss": 0.4952, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 1.597834825515747, | |
| "learning_rate": 9.496978021978023e-05, | |
| "loss": 0.4956, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 1.4878921508789062, | |
| "learning_rate": 9.49423076923077e-05, | |
| "loss": 0.4865, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 1.6747124195098877, | |
| "learning_rate": 9.491483516483516e-05, | |
| "loss": 0.4935, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 1.3186380863189697, | |
| "learning_rate": 9.488736263736264e-05, | |
| "loss": 0.4938, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 2.2865145206451416, | |
| "learning_rate": 9.485989010989011e-05, | |
| "loss": 0.4985, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 2.515855550765991, | |
| "learning_rate": 9.483241758241758e-05, | |
| "loss": 0.5049, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 1.2123080492019653, | |
| "learning_rate": 9.480494505494506e-05, | |
| "loss": 0.5004, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 1.0919526815414429, | |
| "learning_rate": 9.477747252747253e-05, | |
| "loss": 0.4854, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 1.3105583190917969, | |
| "learning_rate": 9.475e-05, | |
| "loss": 0.4866, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 1.745622158050537, | |
| "learning_rate": 9.472252747252748e-05, | |
| "loss": 0.4892, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 0.6633902192115784, | |
| "learning_rate": 9.469505494505495e-05, | |
| "loss": 0.507, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 1.0530184507369995, | |
| "learning_rate": 9.466758241758242e-05, | |
| "loss": 0.5035, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 1.2395071983337402, | |
| "learning_rate": 9.46401098901099e-05, | |
| "loss": 0.4926, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 1.139224886894226, | |
| "learning_rate": 9.461263736263737e-05, | |
| "loss": 0.4861, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 0.844968318939209, | |
| "learning_rate": 9.458516483516484e-05, | |
| "loss": 0.4855, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 1.2388585805892944, | |
| "learning_rate": 9.455769230769232e-05, | |
| "loss": 0.4887, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 1.4267388582229614, | |
| "learning_rate": 9.453021978021978e-05, | |
| "loss": 0.5031, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 0.7797860503196716, | |
| "learning_rate": 9.450274725274725e-05, | |
| "loss": 0.5014, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 1.3723804950714111, | |
| "learning_rate": 9.447527472527473e-05, | |
| "loss": 0.4887, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 1.396022081375122, | |
| "learning_rate": 9.44478021978022e-05, | |
| "loss": 0.4869, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 1.3555471897125244, | |
| "learning_rate": 9.442032967032967e-05, | |
| "loss": 0.4879, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 0.9702646136283875, | |
| "learning_rate": 9.439285714285715e-05, | |
| "loss": 0.4894, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 0.7753974199295044, | |
| "learning_rate": 9.436538461538462e-05, | |
| "loss": 0.5092, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 1.020336389541626, | |
| "learning_rate": 9.433791208791209e-05, | |
| "loss": 0.4961, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 1.2176239490509033, | |
| "learning_rate": 9.431043956043957e-05, | |
| "loss": 0.4827, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 0.8337396383285522, | |
| "learning_rate": 9.428296703296704e-05, | |
| "loss": 0.4931, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 0.8999798893928528, | |
| "learning_rate": 9.425549450549451e-05, | |
| "loss": 0.4912, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 1.6992242336273193, | |
| "learning_rate": 9.422802197802199e-05, | |
| "loss": 0.4835, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 0.7739867568016052, | |
| "learning_rate": 9.420054945054946e-05, | |
| "loss": 0.4783, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 1.2350486516952515, | |
| "learning_rate": 9.417307692307692e-05, | |
| "loss": 0.4904, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 1.1423838138580322, | |
| "learning_rate": 9.41456043956044e-05, | |
| "loss": 0.4884, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 1.0562171936035156, | |
| "learning_rate": 9.411813186813187e-05, | |
| "loss": 0.5082, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 0.7696706056594849, | |
| "learning_rate": 9.409065934065934e-05, | |
| "loss": 0.4889, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 0.5980774760246277, | |
| "learning_rate": 9.406318681318682e-05, | |
| "loss": 0.48, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 1.413926124572754, | |
| "learning_rate": 9.403571428571429e-05, | |
| "loss": 0.4971, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 0.8020157217979431, | |
| "learning_rate": 9.400824175824176e-05, | |
| "loss": 0.4926, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 0.6620104312896729, | |
| "learning_rate": 9.398076923076924e-05, | |
| "loss": 0.4941, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 1.9766381978988647, | |
| "learning_rate": 9.395329670329671e-05, | |
| "loss": 0.4867, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 1.3235608339309692, | |
| "learning_rate": 9.392582417582418e-05, | |
| "loss": 0.5061, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 0.8266246914863586, | |
| "learning_rate": 9.389835164835166e-05, | |
| "loss": 0.4842, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 1.1611332893371582, | |
| "learning_rate": 9.387087912087913e-05, | |
| "loss": 0.4795, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 1.750346064567566, | |
| "learning_rate": 9.38434065934066e-05, | |
| "loss": 0.4859, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 0.7080681324005127, | |
| "learning_rate": 9.381593406593408e-05, | |
| "loss": 0.4762, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 1.3531988859176636, | |
| "learning_rate": 9.378846153846155e-05, | |
| "loss": 0.4921, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 0.8100531697273254, | |
| "learning_rate": 9.376098901098901e-05, | |
| "loss": 0.4912, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 1.6417909860610962, | |
| "learning_rate": 9.37335164835165e-05, | |
| "loss": 0.4868, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 2.7357239723205566, | |
| "learning_rate": 9.370604395604396e-05, | |
| "loss": 0.4837, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 0.7493315935134888, | |
| "learning_rate": 9.367857142857143e-05, | |
| "loss": 0.494, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 1.4594992399215698, | |
| "learning_rate": 9.365109890109891e-05, | |
| "loss": 0.4967, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 1.2568315267562866, | |
| "learning_rate": 9.362362637362638e-05, | |
| "loss": 0.4865, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 0.8711192607879639, | |
| "learning_rate": 9.359615384615385e-05, | |
| "loss": 0.4915, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 1.6470551490783691, | |
| "learning_rate": 9.356868131868133e-05, | |
| "loss": 0.4736, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 2.410200357437134, | |
| "learning_rate": 9.35412087912088e-05, | |
| "loss": 0.5078, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 1.0611746311187744, | |
| "learning_rate": 9.351373626373627e-05, | |
| "loss": 0.4986, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 0.820321798324585, | |
| "learning_rate": 9.348626373626375e-05, | |
| "loss": 0.4788, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 0.7887302041053772, | |
| "learning_rate": 9.345879120879122e-05, | |
| "loss": 0.4878, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 1.2939894199371338, | |
| "learning_rate": 9.343131868131869e-05, | |
| "loss": 0.4896, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 0.7468405961990356, | |
| "learning_rate": 9.340384615384615e-05, | |
| "loss": 0.4932, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 1.63861882686615, | |
| "learning_rate": 9.337637362637362e-05, | |
| "loss": 0.4739, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 1.1842321157455444, | |
| "learning_rate": 9.33489010989011e-05, | |
| "loss": 0.4845, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 1.0613666772842407, | |
| "learning_rate": 9.332142857142857e-05, | |
| "loss": 0.4806, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 0.9317752718925476, | |
| "learning_rate": 9.329395604395604e-05, | |
| "loss": 0.4803, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 1.378394603729248, | |
| "learning_rate": 9.326648351648352e-05, | |
| "loss": 0.4942, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 0.748068630695343, | |
| "learning_rate": 9.323901098901099e-05, | |
| "loss": 0.4763, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 1.4142990112304688, | |
| "learning_rate": 9.321153846153846e-05, | |
| "loss": 0.4913, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 0.6746355891227722, | |
| "learning_rate": 9.318406593406594e-05, | |
| "loss": 0.4781, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 1.146001935005188, | |
| "learning_rate": 9.315659340659341e-05, | |
| "loss": 0.4789, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 1.0291472673416138, | |
| "learning_rate": 9.312912087912088e-05, | |
| "loss": 0.4813, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 1.7905545234680176, | |
| "learning_rate": 9.310164835164836e-05, | |
| "loss": 0.475, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 0.8916068077087402, | |
| "learning_rate": 9.307417582417583e-05, | |
| "loss": 0.4891, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 1.0133769512176514, | |
| "learning_rate": 9.30467032967033e-05, | |
| "loss": 0.4857, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 1.6718567609786987, | |
| "learning_rate": 9.301923076923078e-05, | |
| "loss": 0.4915, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 1.2999911308288574, | |
| "learning_rate": 9.299175824175824e-05, | |
| "loss": 0.4872, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 1.4812780618667603, | |
| "learning_rate": 9.296428571428571e-05, | |
| "loss": 0.4886, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 1.3728208541870117, | |
| "learning_rate": 9.293681318681318e-05, | |
| "loss": 0.5005, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 1.2773633003234863, | |
| "learning_rate": 9.290934065934066e-05, | |
| "loss": 0.4796, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 0.8197434544563293, | |
| "learning_rate": 9.288186813186813e-05, | |
| "loss": 0.4817, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 0.830235481262207, | |
| "learning_rate": 9.28543956043956e-05, | |
| "loss": 0.4956, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 3.1286449432373047, | |
| "learning_rate": 9.282692307692308e-05, | |
| "loss": 0.4822, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 0.9349818825721741, | |
| "learning_rate": 9.279945054945055e-05, | |
| "loss": 0.4924, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 1.8471746444702148, | |
| "learning_rate": 9.277197802197802e-05, | |
| "loss": 0.4771, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 0.9180948734283447, | |
| "learning_rate": 9.27445054945055e-05, | |
| "loss": 0.4855, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 0.7180835604667664, | |
| "learning_rate": 9.271703296703297e-05, | |
| "loss": 0.4801, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 1.5345978736877441, | |
| "learning_rate": 9.268956043956043e-05, | |
| "loss": 0.4791, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 1.521747350692749, | |
| "learning_rate": 9.266208791208792e-05, | |
| "loss": 0.4793, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 1.2364057302474976, | |
| "learning_rate": 9.263461538461538e-05, | |
| "loss": 0.4756, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 0.5741211771965027, | |
| "learning_rate": 9.260714285714285e-05, | |
| "loss": 0.4849, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 1.7251524925231934, | |
| "learning_rate": 9.257967032967033e-05, | |
| "loss": 0.4762, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 2.704491138458252, | |
| "learning_rate": 9.25521978021978e-05, | |
| "loss": 0.4777, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 0.9178886413574219, | |
| "learning_rate": 9.252472527472527e-05, | |
| "loss": 0.5077, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 1.2007173299789429, | |
| "learning_rate": 9.249725274725275e-05, | |
| "loss": 0.4916, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 0.8556911945343018, | |
| "learning_rate": 9.246978021978022e-05, | |
| "loss": 0.477, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 0.7754554152488708, | |
| "learning_rate": 9.244230769230769e-05, | |
| "loss": 0.4859, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 2.333521842956543, | |
| "learning_rate": 9.241483516483517e-05, | |
| "loss": 0.4811, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 0.9892757534980774, | |
| "learning_rate": 9.238736263736264e-05, | |
| "loss": 0.4777, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 1.0326083898544312, | |
| "learning_rate": 9.23598901098901e-05, | |
| "loss": 0.4891, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 1.4555460214614868, | |
| "learning_rate": 9.233241758241759e-05, | |
| "loss": 0.4802, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 0.8390610814094543, | |
| "learning_rate": 9.230494505494506e-05, | |
| "loss": 0.4835, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 1.0490790605545044, | |
| "learning_rate": 9.227747252747252e-05, | |
| "loss": 0.4806, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 0.8793150186538696, | |
| "learning_rate": 9.225e-05, | |
| "loss": 0.4846, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 1.2842731475830078, | |
| "learning_rate": 9.222252747252747e-05, | |
| "loss": 0.4715, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 0.7668898701667786, | |
| "learning_rate": 9.219505494505494e-05, | |
| "loss": 0.4749, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 0.8144903182983398, | |
| "learning_rate": 9.216758241758242e-05, | |
| "loss": 0.4705, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 0.6727710962295532, | |
| "learning_rate": 9.214010989010989e-05, | |
| "loss": 0.4826, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 0.8702958822250366, | |
| "learning_rate": 9.211263736263736e-05, | |
| "loss": 0.4679, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 0.9223732352256775, | |
| "learning_rate": 9.208516483516484e-05, | |
| "loss": 0.4875, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 0.7086817622184753, | |
| "learning_rate": 9.205769230769231e-05, | |
| "loss": 0.4771, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 1.000447154045105, | |
| "learning_rate": 9.203021978021978e-05, | |
| "loss": 0.4674, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 1.6982853412628174, | |
| "learning_rate": 9.200274725274726e-05, | |
| "loss": 0.4853, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 0.8975854516029358, | |
| "learning_rate": 9.197527472527473e-05, | |
| "loss": 0.4757, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 1.638816237449646, | |
| "learning_rate": 9.19478021978022e-05, | |
| "loss": 0.4862, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 1.0290194749832153, | |
| "learning_rate": 9.192032967032968e-05, | |
| "loss": 0.4758, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 1.0205775499343872, | |
| "learning_rate": 9.189285714285715e-05, | |
| "loss": 0.4723, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 1.2193140983581543, | |
| "learning_rate": 9.186538461538461e-05, | |
| "loss": 0.4869, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 1.0402013063430786, | |
| "learning_rate": 9.18379120879121e-05, | |
| "loss": 0.4911, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 1.1915889978408813, | |
| "learning_rate": 9.181043956043956e-05, | |
| "loss": 0.4757, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 1.2371594905853271, | |
| "learning_rate": 9.178296703296703e-05, | |
| "loss": 0.4811, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 0.9872990846633911, | |
| "learning_rate": 9.175549450549451e-05, | |
| "loss": 0.4784, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 0.9502705335617065, | |
| "learning_rate": 9.172802197802198e-05, | |
| "loss": 0.4716, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 1.9108206033706665, | |
| "learning_rate": 9.170054945054945e-05, | |
| "loss": 0.4828, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 0.5662224888801575, | |
| "learning_rate": 9.167307692307693e-05, | |
| "loss": 0.485, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 0.9142247438430786, | |
| "learning_rate": 9.16456043956044e-05, | |
| "loss": 0.4851, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 1.817579984664917, | |
| "learning_rate": 9.161813186813187e-05, | |
| "loss": 0.4807, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 1.3611973524093628, | |
| "learning_rate": 9.159065934065935e-05, | |
| "loss": 0.4815, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 1.4995872974395752, | |
| "learning_rate": 9.156318681318682e-05, | |
| "loss": 0.4779, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 0.8315975069999695, | |
| "learning_rate": 9.153571428571429e-05, | |
| "loss": 0.4732, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 1.285447359085083, | |
| "learning_rate": 9.150824175824177e-05, | |
| "loss": 0.4769, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 1.265435814857483, | |
| "learning_rate": 9.148076923076924e-05, | |
| "loss": 0.4884, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 0.7437517046928406, | |
| "learning_rate": 9.14532967032967e-05, | |
| "loss": 0.4807, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 0.8307008743286133, | |
| "learning_rate": 9.142582417582419e-05, | |
| "loss": 0.4723, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 1.2148619890213013, | |
| "learning_rate": 9.139835164835165e-05, | |
| "loss": 0.461, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 0.5682019591331482, | |
| "learning_rate": 9.137087912087912e-05, | |
| "loss": 0.4673, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 0.582708477973938, | |
| "learning_rate": 9.13434065934066e-05, | |
| "loss": 0.4657, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 1.0724083185195923, | |
| "learning_rate": 9.131593406593407e-05, | |
| "loss": 0.4634, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 1.511168360710144, | |
| "learning_rate": 9.128846153846154e-05, | |
| "loss": 0.4888, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 1.5445438623428345, | |
| "learning_rate": 9.126098901098902e-05, | |
| "loss": 0.4757, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 0.9609503746032715, | |
| "learning_rate": 9.123351648351649e-05, | |
| "loss": 0.4813, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 2.094918966293335, | |
| "learning_rate": 9.120604395604396e-05, | |
| "loss": 0.4963, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 1.6844788789749146, | |
| "learning_rate": 9.117857142857144e-05, | |
| "loss": 0.4902, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 1.2747464179992676, | |
| "learning_rate": 9.115109890109891e-05, | |
| "loss": 0.4884, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 1.4035594463348389, | |
| "learning_rate": 9.112362637362638e-05, | |
| "loss": 0.482, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 0.6047748923301697, | |
| "learning_rate": 9.109615384615386e-05, | |
| "loss": 0.4846, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 1.6141401529312134, | |
| "learning_rate": 9.106868131868133e-05, | |
| "loss": 0.4744, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 0.8329553008079529, | |
| "learning_rate": 9.10412087912088e-05, | |
| "loss": 0.4738, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 0.7500654458999634, | |
| "learning_rate": 9.101373626373628e-05, | |
| "loss": 0.4763, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 0.8617835640907288, | |
| "learning_rate": 9.098626373626374e-05, | |
| "loss": 0.4738, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 1.0339410305023193, | |
| "learning_rate": 9.095879120879121e-05, | |
| "loss": 0.4726, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 1.13742995262146, | |
| "learning_rate": 9.09313186813187e-05, | |
| "loss": 0.4827, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 1.611344575881958, | |
| "learning_rate": 9.090384615384616e-05, | |
| "loss": 0.4844, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 1.056773066520691, | |
| "learning_rate": 9.087637362637363e-05, | |
| "loss": 0.4799, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 1.2290446758270264, | |
| "learning_rate": 9.084890109890111e-05, | |
| "loss": 0.4764, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 0.7461050748825073, | |
| "learning_rate": 9.082142857142858e-05, | |
| "loss": 0.4664, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 0.5372506380081177, | |
| "learning_rate": 9.079395604395605e-05, | |
| "loss": 0.4795, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 1.658233642578125, | |
| "learning_rate": 9.076648351648353e-05, | |
| "loss": 0.4655, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 1.7918599843978882, | |
| "learning_rate": 9.0739010989011e-05, | |
| "loss": 0.4777, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 0.9504883289337158, | |
| "learning_rate": 9.071153846153847e-05, | |
| "loss": 0.4791, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 1.5631986856460571, | |
| "learning_rate": 9.068406593406595e-05, | |
| "loss": 0.4807, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 0.9991289377212524, | |
| "learning_rate": 9.065659340659342e-05, | |
| "loss": 0.4679, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 1.130434274673462, | |
| "learning_rate": 9.062912087912088e-05, | |
| "loss": 0.4781, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 0.639451265335083, | |
| "learning_rate": 9.060164835164837e-05, | |
| "loss": 0.4815, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 1.1776291131973267, | |
| "learning_rate": 9.057417582417583e-05, | |
| "loss": 0.4983, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 1.4073742628097534, | |
| "learning_rate": 9.05467032967033e-05, | |
| "loss": 0.4711, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 1.308344841003418, | |
| "learning_rate": 9.051923076923078e-05, | |
| "loss": 0.4847, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_loss": 0.44810301065444946, | |
| "eval_runtime": 1256.2985, | |
| "eval_samples_per_second": 232.516, | |
| "eval_steps_per_second": 3.634, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.6774665117263794, | |
| "learning_rate": 9.049175824175825e-05, | |
| "loss": 0.4791, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "grad_norm": 0.6826792359352112, | |
| "learning_rate": 9.046428571428572e-05, | |
| "loss": 0.4775, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "grad_norm": 0.7845907211303711, | |
| "learning_rate": 9.04368131868132e-05, | |
| "loss": 0.4765, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "grad_norm": 0.9099870324134827, | |
| "learning_rate": 9.040934065934067e-05, | |
| "loss": 0.4689, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "grad_norm": 1.374972939491272, | |
| "learning_rate": 9.038186813186814e-05, | |
| "loss": 0.469, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "grad_norm": 0.8698770403862, | |
| "learning_rate": 9.035439560439562e-05, | |
| "loss": 0.4717, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "grad_norm": 0.8832442164421082, | |
| "learning_rate": 9.032692307692309e-05, | |
| "loss": 0.4653, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "grad_norm": 0.722259521484375, | |
| "learning_rate": 9.029945054945054e-05, | |
| "loss": 0.4818, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "grad_norm": 1.1362375020980835, | |
| "learning_rate": 9.027197802197802e-05, | |
| "loss": 0.4674, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "grad_norm": 1.0420037508010864, | |
| "learning_rate": 9.024450549450549e-05, | |
| "loss": 0.4806, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "grad_norm": 0.9857313632965088, | |
| "learning_rate": 9.021703296703296e-05, | |
| "loss": 0.478, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "grad_norm": 1.1428401470184326, | |
| "learning_rate": 9.018956043956044e-05, | |
| "loss": 0.4742, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "grad_norm": 1.5837888717651367, | |
| "learning_rate": 9.016208791208791e-05, | |
| "loss": 0.4841, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "grad_norm": 1.9123620986938477, | |
| "learning_rate": 9.013461538461538e-05, | |
| "loss": 0.4707, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "grad_norm": 0.9019024968147278, | |
| "learning_rate": 9.010714285714286e-05, | |
| "loss": 0.4731, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "grad_norm": 0.9601519107818604, | |
| "learning_rate": 9.007967032967033e-05, | |
| "loss": 0.4644, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "grad_norm": 0.4769773483276367, | |
| "learning_rate": 9.00521978021978e-05, | |
| "loss": 0.4755, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "grad_norm": 1.2578529119491577, | |
| "learning_rate": 9.002472527472528e-05, | |
| "loss": 0.4703, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "grad_norm": 1.1451194286346436, | |
| "learning_rate": 8.999725274725275e-05, | |
| "loss": 0.4748, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "grad_norm": 0.4718180000782013, | |
| "learning_rate": 8.996978021978021e-05, | |
| "loss": 0.4673, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "grad_norm": 1.0323463678359985, | |
| "learning_rate": 8.99423076923077e-05, | |
| "loss": 0.4611, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "grad_norm": 0.905623197555542, | |
| "learning_rate": 8.991483516483516e-05, | |
| "loss": 0.4651, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "grad_norm": 0.6803371906280518, | |
| "learning_rate": 8.988736263736263e-05, | |
| "loss": 0.4693, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "grad_norm": 0.7653062343597412, | |
| "learning_rate": 8.985989010989011e-05, | |
| "loss": 0.4676, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "grad_norm": 0.6493901610374451, | |
| "learning_rate": 8.983241758241758e-05, | |
| "loss": 0.4796, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "grad_norm": 1.3194290399551392, | |
| "learning_rate": 8.980494505494505e-05, | |
| "loss": 0.4709, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "grad_norm": 1.3907560110092163, | |
| "learning_rate": 8.977747252747253e-05, | |
| "loss": 0.4791, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "grad_norm": 0.9326291084289551, | |
| "learning_rate": 8.975e-05, | |
| "loss": 0.4733, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "grad_norm": 0.6711992025375366, | |
| "learning_rate": 8.972252747252747e-05, | |
| "loss": 0.4607, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "grad_norm": 1.0052268505096436, | |
| "learning_rate": 8.969505494505495e-05, | |
| "loss": 0.4565, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "grad_norm": 1.893130898475647, | |
| "learning_rate": 8.966758241758242e-05, | |
| "loss": 0.4678, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "grad_norm": 1.019831657409668, | |
| "learning_rate": 8.964010989010989e-05, | |
| "loss": 0.4929, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "grad_norm": 1.096341848373413, | |
| "learning_rate": 8.961263736263737e-05, | |
| "loss": 0.4812, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "grad_norm": 1.3617221117019653, | |
| "learning_rate": 8.958516483516484e-05, | |
| "loss": 0.479, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "grad_norm": 0.6594845056533813, | |
| "learning_rate": 8.95576923076923e-05, | |
| "loss": 0.4784, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "grad_norm": 0.7108995914459229, | |
| "learning_rate": 8.953021978021979e-05, | |
| "loss": 0.4624, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "grad_norm": 0.4914230704307556, | |
| "learning_rate": 8.950274725274725e-05, | |
| "loss": 0.4573, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "grad_norm": 0.738325834274292, | |
| "learning_rate": 8.947527472527472e-05, | |
| "loss": 0.4598, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "grad_norm": 1.5622313022613525, | |
| "learning_rate": 8.94478021978022e-05, | |
| "loss": 0.4769, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "grad_norm": 0.84259033203125, | |
| "learning_rate": 8.942032967032967e-05, | |
| "loss": 0.4587, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "grad_norm": 1.1515220403671265, | |
| "learning_rate": 8.939285714285714e-05, | |
| "loss": 0.4698, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "grad_norm": 1.2757478952407837, | |
| "learning_rate": 8.936538461538462e-05, | |
| "loss": 0.4662, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "grad_norm": 1.413041591644287, | |
| "learning_rate": 8.933791208791209e-05, | |
| "loss": 0.4739, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "grad_norm": 1.3950985670089722, | |
| "learning_rate": 8.931043956043956e-05, | |
| "loss": 0.4652, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "grad_norm": 1.1247773170471191, | |
| "learning_rate": 8.928296703296704e-05, | |
| "loss": 0.4704, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "grad_norm": 0.7197868824005127, | |
| "learning_rate": 8.925549450549451e-05, | |
| "loss": 0.466, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "grad_norm": 1.606062889099121, | |
| "learning_rate": 8.922802197802198e-05, | |
| "loss": 0.4697, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "grad_norm": 0.8834720849990845, | |
| "learning_rate": 8.920054945054946e-05, | |
| "loss": 0.4787, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "grad_norm": 0.6989302635192871, | |
| "learning_rate": 8.917307692307693e-05, | |
| "loss": 0.4553, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "grad_norm": 1.1178115606307983, | |
| "learning_rate": 8.91456043956044e-05, | |
| "loss": 0.4679, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "grad_norm": 0.8493313789367676, | |
| "learning_rate": 8.911813186813188e-05, | |
| "loss": 0.4915, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "grad_norm": 0.6862457990646362, | |
| "learning_rate": 8.909065934065934e-05, | |
| "loss": 0.4667, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "grad_norm": 0.5265287160873413, | |
| "learning_rate": 8.906318681318681e-05, | |
| "loss": 0.4675, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "grad_norm": 1.6895486116409302, | |
| "learning_rate": 8.903571428571429e-05, | |
| "loss": 0.4694, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "grad_norm": 0.9672292470932007, | |
| "learning_rate": 8.900824175824176e-05, | |
| "loss": 0.4788, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "grad_norm": 0.9348291158676147, | |
| "learning_rate": 8.898076923076923e-05, | |
| "loss": 0.461, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "grad_norm": 0.5047799348831177, | |
| "learning_rate": 8.895329670329671e-05, | |
| "loss": 0.467, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "grad_norm": 1.6985957622528076, | |
| "learning_rate": 8.892582417582418e-05, | |
| "loss": 0.4581, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "grad_norm": 0.8573766946792603, | |
| "learning_rate": 8.889835164835165e-05, | |
| "loss": 0.4624, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "grad_norm": 0.6403746008872986, | |
| "learning_rate": 8.887087912087913e-05, | |
| "loss": 0.4574, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "grad_norm": 0.9546878933906555, | |
| "learning_rate": 8.88434065934066e-05, | |
| "loss": 0.4803, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "grad_norm": 0.6992652416229248, | |
| "learning_rate": 8.881593406593407e-05, | |
| "loss": 0.4696, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "grad_norm": 1.035543441772461, | |
| "learning_rate": 8.878846153846155e-05, | |
| "loss": 0.4787, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "grad_norm": 1.5563474893569946, | |
| "learning_rate": 8.876098901098902e-05, | |
| "loss": 0.4708, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "grad_norm": 1.7203930616378784, | |
| "learning_rate": 8.873351648351648e-05, | |
| "loss": 0.4725, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "grad_norm": 1.4295490980148315, | |
| "learning_rate": 8.870604395604396e-05, | |
| "loss": 0.464, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "grad_norm": 1.1832698583602905, | |
| "learning_rate": 8.867857142857143e-05, | |
| "loss": 0.475, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "grad_norm": 0.9679028391838074, | |
| "learning_rate": 8.86510989010989e-05, | |
| "loss": 0.4656, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "grad_norm": 0.9646287560462952, | |
| "learning_rate": 8.862362637362638e-05, | |
| "loss": 0.4707, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "grad_norm": 1.2284992933273315, | |
| "learning_rate": 8.859615384615385e-05, | |
| "loss": 0.4812, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "grad_norm": 0.7886459231376648, | |
| "learning_rate": 8.856868131868132e-05, | |
| "loss": 0.4641, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "grad_norm": 0.8464663028717041, | |
| "learning_rate": 8.85412087912088e-05, | |
| "loss": 0.4685, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "grad_norm": 1.0549769401550293, | |
| "learning_rate": 8.851373626373627e-05, | |
| "loss": 0.4573, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "grad_norm": 1.3452584743499756, | |
| "learning_rate": 8.848626373626374e-05, | |
| "loss": 0.4823, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "grad_norm": 1.5250259637832642, | |
| "learning_rate": 8.845879120879122e-05, | |
| "loss": 0.4762, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "grad_norm": 0.7843578457832336, | |
| "learning_rate": 8.843131868131869e-05, | |
| "loss": 0.4714, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "grad_norm": 1.0666967630386353, | |
| "learning_rate": 8.840384615384616e-05, | |
| "loss": 0.4743, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "grad_norm": 1.4348440170288086, | |
| "learning_rate": 8.837637362637364e-05, | |
| "loss": 0.4879, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "grad_norm": 1.036281704902649, | |
| "learning_rate": 8.83489010989011e-05, | |
| "loss": 0.469, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "grad_norm": 0.7014042139053345, | |
| "learning_rate": 8.832142857142857e-05, | |
| "loss": 0.4716, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "grad_norm": 1.593533992767334, | |
| "learning_rate": 8.829395604395605e-05, | |
| "loss": 0.4745, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "grad_norm": 0.4499157965183258, | |
| "learning_rate": 8.826648351648352e-05, | |
| "loss": 0.4764, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "grad_norm": 0.7330986857414246, | |
| "learning_rate": 8.823901098901099e-05, | |
| "loss": 0.4733, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "grad_norm": 0.7915944457054138, | |
| "learning_rate": 8.821153846153847e-05, | |
| "loss": 0.4605, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "grad_norm": 0.9147601127624512, | |
| "learning_rate": 8.818406593406594e-05, | |
| "loss": 0.4729, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "grad_norm": 1.1145246028900146, | |
| "learning_rate": 8.815659340659341e-05, | |
| "loss": 0.4757, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "grad_norm": 1.0525473356246948, | |
| "learning_rate": 8.812912087912089e-05, | |
| "loss": 0.4585, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "grad_norm": 0.7515787482261658, | |
| "learning_rate": 8.810164835164836e-05, | |
| "loss": 0.4557, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "grad_norm": 0.9737600088119507, | |
| "learning_rate": 8.807417582417583e-05, | |
| "loss": 0.4647, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "grad_norm": 1.1443486213684082, | |
| "learning_rate": 8.804670329670331e-05, | |
| "loss": 0.4685, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "grad_norm": 1.4745419025421143, | |
| "learning_rate": 8.801923076923078e-05, | |
| "loss": 0.4736, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "grad_norm": 1.1491049528121948, | |
| "learning_rate": 8.799175824175824e-05, | |
| "loss": 0.4604, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "grad_norm": 0.742324948310852, | |
| "learning_rate": 8.796428571428573e-05, | |
| "loss": 0.457, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "grad_norm": 1.1914622783660889, | |
| "learning_rate": 8.79368131868132e-05, | |
| "loss": 0.4638, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "grad_norm": 0.6555135846138, | |
| "learning_rate": 8.790934065934066e-05, | |
| "loss": 0.4806, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "grad_norm": 0.8627590537071228, | |
| "learning_rate": 8.788186813186814e-05, | |
| "loss": 0.4803, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "grad_norm": 1.0868836641311646, | |
| "learning_rate": 8.785439560439561e-05, | |
| "loss": 0.4598, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "grad_norm": 1.291483998298645, | |
| "learning_rate": 8.782692307692308e-05, | |
| "loss": 0.4707, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "grad_norm": 0.9766128659248352, | |
| "learning_rate": 8.779945054945056e-05, | |
| "loss": 0.4707, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "grad_norm": 1.0301162004470825, | |
| "learning_rate": 8.777197802197803e-05, | |
| "loss": 0.4821, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "grad_norm": 1.0474865436553955, | |
| "learning_rate": 8.77445054945055e-05, | |
| "loss": 0.4603, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "grad_norm": 1.1218842267990112, | |
| "learning_rate": 8.771703296703298e-05, | |
| "loss": 0.4827, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "grad_norm": 1.0587611198425293, | |
| "learning_rate": 8.768956043956045e-05, | |
| "loss": 0.4746, | |
| "step": 4690 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "grad_norm": 0.8249474763870239, | |
| "learning_rate": 8.766208791208792e-05, | |
| "loss": 0.4648, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "grad_norm": 0.6225512027740479, | |
| "learning_rate": 8.76346153846154e-05, | |
| "loss": 0.4582, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "grad_norm": 1.2126622200012207, | |
| "learning_rate": 8.760714285714287e-05, | |
| "loss": 0.4617, | |
| "step": 4720 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "grad_norm": 1.6850072145462036, | |
| "learning_rate": 8.757967032967033e-05, | |
| "loss": 0.4793, | |
| "step": 4730 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "grad_norm": 0.5053130984306335, | |
| "learning_rate": 8.755219780219782e-05, | |
| "loss": 0.4648, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "grad_norm": 0.4924371838569641, | |
| "learning_rate": 8.752472527472528e-05, | |
| "loss": 0.4694, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "grad_norm": 0.7182402014732361, | |
| "learning_rate": 8.749725274725275e-05, | |
| "loss": 0.4611, | |
| "step": 4760 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "grad_norm": 0.5569953322410583, | |
| "learning_rate": 8.746978021978023e-05, | |
| "loss": 0.4632, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "grad_norm": 1.0777251720428467, | |
| "learning_rate": 8.74423076923077e-05, | |
| "loss": 0.4735, | |
| "step": 4780 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "grad_norm": 0.6310242414474487, | |
| "learning_rate": 8.741483516483517e-05, | |
| "loss": 0.4567, | |
| "step": 4790 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "grad_norm": 0.94218510389328, | |
| "learning_rate": 8.738736263736265e-05, | |
| "loss": 0.4626, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "grad_norm": 0.4554530084133148, | |
| "learning_rate": 8.735989010989012e-05, | |
| "loss": 0.4681, | |
| "step": 4810 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "grad_norm": 1.2381364107131958, | |
| "learning_rate": 8.733241758241759e-05, | |
| "loss": 0.467, | |
| "step": 4820 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "grad_norm": 1.1469919681549072, | |
| "learning_rate": 8.730494505494507e-05, | |
| "loss": 0.4643, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "grad_norm": 0.7159097194671631, | |
| "learning_rate": 8.727747252747254e-05, | |
| "loss": 0.4731, | |
| "step": 4840 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "grad_norm": 0.8695803880691528, | |
| "learning_rate": 8.725e-05, | |
| "loss": 0.4646, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "grad_norm": 0.8535722494125366, | |
| "learning_rate": 8.722252747252749e-05, | |
| "loss": 0.4612, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "grad_norm": 1.231641173362732, | |
| "learning_rate": 8.719505494505496e-05, | |
| "loss": 0.48, | |
| "step": 4870 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "grad_norm": 1.0003236532211304, | |
| "learning_rate": 8.716758241758241e-05, | |
| "loss": 0.4741, | |
| "step": 4880 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "grad_norm": 1.8520704507827759, | |
| "learning_rate": 8.714010989010989e-05, | |
| "loss": 0.4693, | |
| "step": 4890 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "grad_norm": 1.0661410093307495, | |
| "learning_rate": 8.711263736263736e-05, | |
| "loss": 0.4848, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "grad_norm": 0.8280912637710571, | |
| "learning_rate": 8.708516483516483e-05, | |
| "loss": 0.4703, | |
| "step": 4910 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "grad_norm": 1.169047474861145, | |
| "learning_rate": 8.705769230769231e-05, | |
| "loss": 0.4777, | |
| "step": 4920 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "grad_norm": 0.7568297982215881, | |
| "learning_rate": 8.703021978021978e-05, | |
| "loss": 0.4752, | |
| "step": 4930 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "grad_norm": 1.5538333654403687, | |
| "learning_rate": 8.700274725274725e-05, | |
| "loss": 0.4569, | |
| "step": 4940 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "grad_norm": 0.6673538684844971, | |
| "learning_rate": 8.697527472527473e-05, | |
| "loss": 0.4659, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "grad_norm": 0.9370101690292358, | |
| "learning_rate": 8.69478021978022e-05, | |
| "loss": 0.4644, | |
| "step": 4960 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "grad_norm": 1.748659610748291, | |
| "learning_rate": 8.692032967032966e-05, | |
| "loss": 0.458, | |
| "step": 4970 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "grad_norm": 0.8026989102363586, | |
| "learning_rate": 8.689285714285715e-05, | |
| "loss": 0.4696, | |
| "step": 4980 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "grad_norm": 1.7652183771133423, | |
| "learning_rate": 8.686538461538461e-05, | |
| "loss": 0.4676, | |
| "step": 4990 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "grad_norm": 0.6286242008209229, | |
| "learning_rate": 8.683791208791208e-05, | |
| "loss": 0.4638, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "grad_norm": 2.5077078342437744, | |
| "learning_rate": 8.681043956043956e-05, | |
| "loss": 0.4774, | |
| "step": 5010 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "grad_norm": 0.6874992251396179, | |
| "learning_rate": 8.678296703296703e-05, | |
| "loss": 0.4621, | |
| "step": 5020 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "grad_norm": 1.2562603950500488, | |
| "learning_rate": 8.67554945054945e-05, | |
| "loss": 0.4745, | |
| "step": 5030 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "grad_norm": 1.0717673301696777, | |
| "learning_rate": 8.672802197802198e-05, | |
| "loss": 0.4726, | |
| "step": 5040 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "grad_norm": 0.744522750377655, | |
| "learning_rate": 8.670054945054945e-05, | |
| "loss": 0.4823, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "grad_norm": 0.879859447479248, | |
| "learning_rate": 8.667307692307692e-05, | |
| "loss": 0.4651, | |
| "step": 5060 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "grad_norm": 0.6319553852081299, | |
| "learning_rate": 8.66456043956044e-05, | |
| "loss": 0.4714, | |
| "step": 5070 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "grad_norm": 0.8513883948326111, | |
| "learning_rate": 8.661813186813187e-05, | |
| "loss": 0.4644, | |
| "step": 5080 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "grad_norm": 1.7870076894760132, | |
| "learning_rate": 8.659065934065934e-05, | |
| "loss": 0.4714, | |
| "step": 5090 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "grad_norm": 0.8447625041007996, | |
| "learning_rate": 8.656318681318682e-05, | |
| "loss": 0.4717, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "grad_norm": 0.7471101880073547, | |
| "learning_rate": 8.653571428571429e-05, | |
| "loss": 0.4594, | |
| "step": 5110 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "grad_norm": 0.7438103556632996, | |
| "learning_rate": 8.650824175824175e-05, | |
| "loss": 0.4622, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "grad_norm": 0.8779565095901489, | |
| "learning_rate": 8.648076923076924e-05, | |
| "loss": 0.4639, | |
| "step": 5130 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "grad_norm": 1.4103574752807617, | |
| "learning_rate": 8.64532967032967e-05, | |
| "loss": 0.4568, | |
| "step": 5140 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "grad_norm": 1.2644096612930298, | |
| "learning_rate": 8.642582417582417e-05, | |
| "loss": 0.4696, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "grad_norm": 0.7385484576225281, | |
| "learning_rate": 8.639835164835165e-05, | |
| "loss": 0.4638, | |
| "step": 5160 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "grad_norm": 0.7698835730552673, | |
| "learning_rate": 8.637087912087912e-05, | |
| "loss": 0.4762, | |
| "step": 5170 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "grad_norm": 1.049194574356079, | |
| "learning_rate": 8.634340659340659e-05, | |
| "loss": 0.4588, | |
| "step": 5180 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "grad_norm": 0.7376847267150879, | |
| "learning_rate": 8.631593406593407e-05, | |
| "loss": 0.46, | |
| "step": 5190 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "grad_norm": 1.1413201093673706, | |
| "learning_rate": 8.628846153846154e-05, | |
| "loss": 0.4662, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "grad_norm": 0.9843841195106506, | |
| "learning_rate": 8.626098901098901e-05, | |
| "loss": 0.4663, | |
| "step": 5210 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "grad_norm": 0.6186099052429199, | |
| "learning_rate": 8.623351648351649e-05, | |
| "loss": 0.4683, | |
| "step": 5220 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "grad_norm": 1.4082993268966675, | |
| "learning_rate": 8.620604395604396e-05, | |
| "loss": 0.4639, | |
| "step": 5230 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "grad_norm": 0.9625537395477295, | |
| "learning_rate": 8.617857142857143e-05, | |
| "loss": 0.4646, | |
| "step": 5240 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "grad_norm": 1.0915727615356445, | |
| "learning_rate": 8.615109890109891e-05, | |
| "loss": 0.4669, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "grad_norm": 0.9645349383354187, | |
| "learning_rate": 8.612362637362638e-05, | |
| "loss": 0.4572, | |
| "step": 5260 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "grad_norm": 0.7993157505989075, | |
| "learning_rate": 8.609615384615384e-05, | |
| "loss": 0.4623, | |
| "step": 5270 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "grad_norm": 1.3415577411651611, | |
| "learning_rate": 8.606868131868133e-05, | |
| "loss": 0.4569, | |
| "step": 5280 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "grad_norm": 1.1217197179794312, | |
| "learning_rate": 8.60412087912088e-05, | |
| "loss": 0.4632, | |
| "step": 5290 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "grad_norm": 0.6021980047225952, | |
| "learning_rate": 8.601373626373626e-05, | |
| "loss": 0.4688, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "grad_norm": 0.9672883749008179, | |
| "learning_rate": 8.598626373626374e-05, | |
| "loss": 0.4594, | |
| "step": 5310 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "grad_norm": 0.8534507155418396, | |
| "learning_rate": 8.595879120879121e-05, | |
| "loss": 0.4496, | |
| "step": 5320 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "grad_norm": 1.6172966957092285, | |
| "learning_rate": 8.593131868131868e-05, | |
| "loss": 0.4751, | |
| "step": 5330 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "grad_norm": 1.378061056137085, | |
| "learning_rate": 8.590384615384616e-05, | |
| "loss": 0.4597, | |
| "step": 5340 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "grad_norm": 0.8239535093307495, | |
| "learning_rate": 8.587637362637363e-05, | |
| "loss": 0.4549, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "grad_norm": 0.6672975420951843, | |
| "learning_rate": 8.58489010989011e-05, | |
| "loss": 0.4694, | |
| "step": 5360 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "grad_norm": 1.042051076889038, | |
| "learning_rate": 8.582142857142858e-05, | |
| "loss": 0.4667, | |
| "step": 5370 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "grad_norm": 1.6398990154266357, | |
| "learning_rate": 8.579395604395605e-05, | |
| "loss": 0.4657, | |
| "step": 5380 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "grad_norm": 0.5243840217590332, | |
| "learning_rate": 8.576648351648352e-05, | |
| "loss": 0.4733, | |
| "step": 5390 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "grad_norm": 1.3996922969818115, | |
| "learning_rate": 8.5739010989011e-05, | |
| "loss": 0.4747, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "grad_norm": 0.7381965517997742, | |
| "learning_rate": 8.571153846153847e-05, | |
| "loss": 0.4694, | |
| "step": 5410 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "grad_norm": 1.1151881217956543, | |
| "learning_rate": 8.568406593406593e-05, | |
| "loss": 0.4658, | |
| "step": 5420 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "grad_norm": 1.0418914556503296, | |
| "learning_rate": 8.565659340659342e-05, | |
| "loss": 0.4568, | |
| "step": 5430 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "grad_norm": 0.7290151119232178, | |
| "learning_rate": 8.562912087912088e-05, | |
| "loss": 0.4642, | |
| "step": 5440 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "grad_norm": 0.8483603000640869, | |
| "learning_rate": 8.560164835164835e-05, | |
| "loss": 0.4912, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "grad_norm": 1.1545953750610352, | |
| "learning_rate": 8.557417582417583e-05, | |
| "loss": 0.4738, | |
| "step": 5460 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "grad_norm": 1.0104776620864868, | |
| "learning_rate": 8.55467032967033e-05, | |
| "loss": 0.4751, | |
| "step": 5470 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "grad_norm": 0.9386792778968811, | |
| "learning_rate": 8.551923076923077e-05, | |
| "loss": 0.4549, | |
| "step": 5480 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "grad_norm": 0.6779844760894775, | |
| "learning_rate": 8.549175824175825e-05, | |
| "loss": 0.4682, | |
| "step": 5490 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "grad_norm": 0.7957237958908081, | |
| "learning_rate": 8.546428571428572e-05, | |
| "loss": 0.4561, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "grad_norm": 0.8393499255180359, | |
| "learning_rate": 8.543681318681319e-05, | |
| "loss": 0.4659, | |
| "step": 5510 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "grad_norm": 0.9744943380355835, | |
| "learning_rate": 8.540934065934067e-05, | |
| "loss": 0.4545, | |
| "step": 5520 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "grad_norm": 1.1456732749938965, | |
| "learning_rate": 8.538186813186814e-05, | |
| "loss": 0.4614, | |
| "step": 5530 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "grad_norm": 0.8066684603691101, | |
| "learning_rate": 8.53543956043956e-05, | |
| "loss": 0.475, | |
| "step": 5540 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "grad_norm": 1.4929931163787842, | |
| "learning_rate": 8.532692307692309e-05, | |
| "loss": 0.4695, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "grad_norm": 0.7223564386367798, | |
| "learning_rate": 8.529945054945056e-05, | |
| "loss": 0.4692, | |
| "step": 5560 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "grad_norm": 1.3222299814224243, | |
| "learning_rate": 8.527197802197802e-05, | |
| "loss": 0.4533, | |
| "step": 5570 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "grad_norm": 1.1262595653533936, | |
| "learning_rate": 8.52445054945055e-05, | |
| "loss": 0.4637, | |
| "step": 5580 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "grad_norm": 0.9097437858581543, | |
| "learning_rate": 8.521703296703297e-05, | |
| "loss": 0.4692, | |
| "step": 5590 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "grad_norm": 1.5121077299118042, | |
| "learning_rate": 8.518956043956044e-05, | |
| "loss": 0.4591, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "grad_norm": 1.1432225704193115, | |
| "learning_rate": 8.516208791208792e-05, | |
| "loss": 0.4723, | |
| "step": 5610 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "grad_norm": 1.3745416402816772, | |
| "learning_rate": 8.513461538461539e-05, | |
| "loss": 0.4725, | |
| "step": 5620 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "grad_norm": 0.8508789539337158, | |
| "learning_rate": 8.510714285714286e-05, | |
| "loss": 0.4607, | |
| "step": 5630 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "grad_norm": 1.3097485303878784, | |
| "learning_rate": 8.507967032967034e-05, | |
| "loss": 0.4635, | |
| "step": 5640 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "grad_norm": 1.29444420337677, | |
| "learning_rate": 8.505219780219781e-05, | |
| "loss": 0.4628, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "grad_norm": 0.5336117744445801, | |
| "learning_rate": 8.502472527472528e-05, | |
| "loss": 0.4568, | |
| "step": 5660 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "grad_norm": 0.7139732837677002, | |
| "learning_rate": 8.499725274725276e-05, | |
| "loss": 0.4574, | |
| "step": 5670 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "grad_norm": 0.7436287999153137, | |
| "learning_rate": 8.496978021978023e-05, | |
| "loss": 0.455, | |
| "step": 5680 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "grad_norm": 0.9436907172203064, | |
| "learning_rate": 8.49423076923077e-05, | |
| "loss": 0.4678, | |
| "step": 5690 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "grad_norm": 0.9827747344970703, | |
| "learning_rate": 8.491483516483518e-05, | |
| "loss": 0.4614, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "grad_norm": 0.7332632541656494, | |
| "learning_rate": 8.488736263736265e-05, | |
| "loss": 0.4675, | |
| "step": 5710 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "grad_norm": 1.1933000087738037, | |
| "learning_rate": 8.485989010989011e-05, | |
| "loss": 0.4528, | |
| "step": 5720 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "grad_norm": 1.1396616697311401, | |
| "learning_rate": 8.48324175824176e-05, | |
| "loss": 0.4663, | |
| "step": 5730 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "grad_norm": 1.4335391521453857, | |
| "learning_rate": 8.480494505494506e-05, | |
| "loss": 0.4658, | |
| "step": 5740 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "grad_norm": 0.85484778881073, | |
| "learning_rate": 8.477747252747253e-05, | |
| "loss": 0.4536, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "grad_norm": 1.206037163734436, | |
| "learning_rate": 8.475000000000001e-05, | |
| "loss": 0.4665, | |
| "step": 5760 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "grad_norm": 1.0052751302719116, | |
| "learning_rate": 8.472252747252748e-05, | |
| "loss": 0.4657, | |
| "step": 5770 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "grad_norm": 0.6977716088294983, | |
| "learning_rate": 8.469505494505495e-05, | |
| "loss": 0.4594, | |
| "step": 5780 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "grad_norm": 0.975903332233429, | |
| "learning_rate": 8.466758241758243e-05, | |
| "loss": 0.4739, | |
| "step": 5790 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "grad_norm": 1.0114597082138062, | |
| "learning_rate": 8.46401098901099e-05, | |
| "loss": 0.4732, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "grad_norm": 0.5334266424179077, | |
| "learning_rate": 8.461263736263737e-05, | |
| "loss": 0.4585, | |
| "step": 5810 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "grad_norm": 1.6815012693405151, | |
| "learning_rate": 8.458516483516485e-05, | |
| "loss": 0.4737, | |
| "step": 5820 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "grad_norm": 1.1055243015289307, | |
| "learning_rate": 8.455769230769232e-05, | |
| "loss": 0.4608, | |
| "step": 5830 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "grad_norm": 1.3854832649230957, | |
| "learning_rate": 8.453021978021979e-05, | |
| "loss": 0.4659, | |
| "step": 5840 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "grad_norm": 1.0487163066864014, | |
| "learning_rate": 8.450274725274727e-05, | |
| "loss": 0.4519, | |
| "step": 5850 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "grad_norm": 1.0301164388656616, | |
| "learning_rate": 8.447527472527474e-05, | |
| "loss": 0.4598, | |
| "step": 5860 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "grad_norm": 0.7901486754417419, | |
| "learning_rate": 8.44478021978022e-05, | |
| "loss": 0.4727, | |
| "step": 5870 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "grad_norm": 0.6557927131652832, | |
| "learning_rate": 8.442032967032969e-05, | |
| "loss": 0.4523, | |
| "step": 5880 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "grad_norm": 0.8954731225967407, | |
| "learning_rate": 8.439285714285715e-05, | |
| "loss": 0.4672, | |
| "step": 5890 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "grad_norm": 0.6038153171539307, | |
| "learning_rate": 8.436538461538462e-05, | |
| "loss": 0.4604, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "grad_norm": 0.9118047952651978, | |
| "learning_rate": 8.433791208791209e-05, | |
| "loss": 0.4592, | |
| "step": 5910 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "grad_norm": 1.125707745552063, | |
| "learning_rate": 8.431043956043957e-05, | |
| "loss": 0.4713, | |
| "step": 5920 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "grad_norm": 0.7669263482093811, | |
| "learning_rate": 8.428296703296704e-05, | |
| "loss": 0.465, | |
| "step": 5930 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "grad_norm": 1.0012739896774292, | |
| "learning_rate": 8.425549450549451e-05, | |
| "loss": 0.4661, | |
| "step": 5940 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "grad_norm": 0.650155782699585, | |
| "learning_rate": 8.422802197802199e-05, | |
| "loss": 0.4649, | |
| "step": 5950 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "grad_norm": 1.224027156829834, | |
| "learning_rate": 8.420054945054946e-05, | |
| "loss": 0.4486, | |
| "step": 5960 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "grad_norm": 1.350738763809204, | |
| "learning_rate": 8.417307692307693e-05, | |
| "loss": 0.4714, | |
| "step": 5970 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "grad_norm": 1.238620400428772, | |
| "learning_rate": 8.414560439560441e-05, | |
| "loss": 0.4644, | |
| "step": 5980 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "grad_norm": 1.4456909894943237, | |
| "learning_rate": 8.411813186813188e-05, | |
| "loss": 0.4674, | |
| "step": 5990 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "grad_norm": 0.9819515943527222, | |
| "learning_rate": 8.409065934065934e-05, | |
| "loss": 0.4681, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "grad_norm": 0.7437455058097839, | |
| "learning_rate": 8.406318681318683e-05, | |
| "loss": 0.4701, | |
| "step": 6010 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "grad_norm": 0.7628225088119507, | |
| "learning_rate": 8.403571428571428e-05, | |
| "loss": 0.4608, | |
| "step": 6020 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "grad_norm": 1.0726768970489502, | |
| "learning_rate": 8.400824175824176e-05, | |
| "loss": 0.461, | |
| "step": 6030 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "grad_norm": 0.799857497215271, | |
| "learning_rate": 8.398076923076923e-05, | |
| "loss": 0.4587, | |
| "step": 6040 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "grad_norm": 1.242092251777649, | |
| "learning_rate": 8.39532967032967e-05, | |
| "loss": 0.4569, | |
| "step": 6050 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "grad_norm": 0.971057116985321, | |
| "learning_rate": 8.392582417582418e-05, | |
| "loss": 0.4537, | |
| "step": 6060 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "grad_norm": 1.1566013097763062, | |
| "learning_rate": 8.389835164835165e-05, | |
| "loss": 0.4638, | |
| "step": 6070 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "grad_norm": 1.2802190780639648, | |
| "learning_rate": 8.387087912087912e-05, | |
| "loss": 0.4663, | |
| "step": 6080 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "grad_norm": 1.0269036293029785, | |
| "learning_rate": 8.38434065934066e-05, | |
| "loss": 0.4628, | |
| "step": 6090 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "grad_norm": 0.7012946605682373, | |
| "learning_rate": 8.381593406593407e-05, | |
| "loss": 0.4513, | |
| "step": 6100 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "grad_norm": 0.5895891189575195, | |
| "learning_rate": 8.378846153846153e-05, | |
| "loss": 0.4584, | |
| "step": 6110 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "grad_norm": 0.9521509408950806, | |
| "learning_rate": 8.376098901098902e-05, | |
| "loss": 0.4587, | |
| "step": 6120 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "grad_norm": 1.409681797027588, | |
| "learning_rate": 8.373351648351648e-05, | |
| "loss": 0.4592, | |
| "step": 6130 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "grad_norm": 1.0833555459976196, | |
| "learning_rate": 8.370604395604395e-05, | |
| "loss": 0.4562, | |
| "step": 6140 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "grad_norm": 0.8994373679161072, | |
| "learning_rate": 8.367857142857143e-05, | |
| "loss": 0.4593, | |
| "step": 6150 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "grad_norm": 1.2591134309768677, | |
| "learning_rate": 8.36510989010989e-05, | |
| "loss": 0.4632, | |
| "step": 6160 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "grad_norm": 0.8163917064666748, | |
| "learning_rate": 8.362362637362637e-05, | |
| "loss": 0.4767, | |
| "step": 6170 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "grad_norm": 0.6897978186607361, | |
| "learning_rate": 8.359615384615385e-05, | |
| "loss": 0.4569, | |
| "step": 6180 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "grad_norm": 1.0527364015579224, | |
| "learning_rate": 8.356868131868132e-05, | |
| "loss": 0.4618, | |
| "step": 6190 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "grad_norm": 1.4118075370788574, | |
| "learning_rate": 8.354120879120879e-05, | |
| "loss": 0.4542, | |
| "step": 6200 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "grad_norm": 0.9567866325378418, | |
| "learning_rate": 8.351373626373627e-05, | |
| "loss": 0.4673, | |
| "step": 6210 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "grad_norm": 1.0207067728042603, | |
| "learning_rate": 8.348626373626374e-05, | |
| "loss": 0.4538, | |
| "step": 6220 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "grad_norm": 0.5897815227508545, | |
| "learning_rate": 8.34587912087912e-05, | |
| "loss": 0.4577, | |
| "step": 6230 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "grad_norm": 0.5948817133903503, | |
| "learning_rate": 8.343131868131869e-05, | |
| "loss": 0.4629, | |
| "step": 6240 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "grad_norm": 1.6685130596160889, | |
| "learning_rate": 8.340384615384616e-05, | |
| "loss": 0.461, | |
| "step": 6250 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "grad_norm": 1.502945899963379, | |
| "learning_rate": 8.337637362637362e-05, | |
| "loss": 0.4649, | |
| "step": 6260 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "grad_norm": 1.5422593355178833, | |
| "learning_rate": 8.33489010989011e-05, | |
| "loss": 0.4494, | |
| "step": 6270 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "grad_norm": 0.999934732913971, | |
| "learning_rate": 8.332142857142857e-05, | |
| "loss": 0.464, | |
| "step": 6280 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "grad_norm": 0.6580365896224976, | |
| "learning_rate": 8.329395604395604e-05, | |
| "loss": 0.4382, | |
| "step": 6290 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "grad_norm": 0.9826914668083191, | |
| "learning_rate": 8.326648351648352e-05, | |
| "loss": 0.4646, | |
| "step": 6300 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "grad_norm": 1.6006591320037842, | |
| "learning_rate": 8.323901098901099e-05, | |
| "loss": 0.4709, | |
| "step": 6310 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "grad_norm": 0.46454253792762756, | |
| "learning_rate": 8.321153846153846e-05, | |
| "loss": 0.4572, | |
| "step": 6320 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "grad_norm": 1.1660053730010986, | |
| "learning_rate": 8.318406593406594e-05, | |
| "loss": 0.4537, | |
| "step": 6330 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "grad_norm": 0.7951260805130005, | |
| "learning_rate": 8.315659340659341e-05, | |
| "loss": 0.4491, | |
| "step": 6340 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "grad_norm": 0.9338926672935486, | |
| "learning_rate": 8.312912087912088e-05, | |
| "loss": 0.4608, | |
| "step": 6350 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "grad_norm": 1.2365031242370605, | |
| "learning_rate": 8.310164835164836e-05, | |
| "loss": 0.4582, | |
| "step": 6360 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "grad_norm": 0.7614923119544983, | |
| "learning_rate": 8.307417582417583e-05, | |
| "loss": 0.4665, | |
| "step": 6370 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "grad_norm": 0.9391283392906189, | |
| "learning_rate": 8.30467032967033e-05, | |
| "loss": 0.4593, | |
| "step": 6380 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "grad_norm": 1.4633766412734985, | |
| "learning_rate": 8.301923076923078e-05, | |
| "loss": 0.4611, | |
| "step": 6390 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "grad_norm": 1.159623146057129, | |
| "learning_rate": 8.299175824175825e-05, | |
| "loss": 0.4622, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "grad_norm": 1.0221785306930542, | |
| "learning_rate": 8.296428571428571e-05, | |
| "loss": 0.4483, | |
| "step": 6410 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "grad_norm": 1.5070372819900513, | |
| "learning_rate": 8.29368131868132e-05, | |
| "loss": 0.4677, | |
| "step": 6420 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "grad_norm": 0.9225922226905823, | |
| "learning_rate": 8.290934065934066e-05, | |
| "loss": 0.4711, | |
| "step": 6430 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "grad_norm": 0.9344289302825928, | |
| "learning_rate": 8.288186813186813e-05, | |
| "loss": 0.4591, | |
| "step": 6440 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "grad_norm": 0.5448782444000244, | |
| "learning_rate": 8.285439560439561e-05, | |
| "loss": 0.4635, | |
| "step": 6450 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "grad_norm": 0.465524822473526, | |
| "learning_rate": 8.282692307692308e-05, | |
| "loss": 0.4581, | |
| "step": 6460 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "grad_norm": 1.3583608865737915, | |
| "learning_rate": 8.279945054945055e-05, | |
| "loss": 0.4718, | |
| "step": 6470 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "grad_norm": 1.3173762559890747, | |
| "learning_rate": 8.277197802197803e-05, | |
| "loss": 0.4577, | |
| "step": 6480 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "grad_norm": 1.3845728635787964, | |
| "learning_rate": 8.27445054945055e-05, | |
| "loss": 0.4623, | |
| "step": 6490 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "grad_norm": 1.0992422103881836, | |
| "learning_rate": 8.271703296703297e-05, | |
| "loss": 0.4543, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "grad_norm": 0.5556319355964661, | |
| "learning_rate": 8.268956043956045e-05, | |
| "loss": 0.4613, | |
| "step": 6510 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "grad_norm": 0.7095728516578674, | |
| "learning_rate": 8.266208791208792e-05, | |
| "loss": 0.455, | |
| "step": 6520 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "grad_norm": 1.1960381269454956, | |
| "learning_rate": 8.263461538461539e-05, | |
| "loss": 0.4545, | |
| "step": 6530 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "grad_norm": 1.0696423053741455, | |
| "learning_rate": 8.260714285714287e-05, | |
| "loss": 0.4666, | |
| "step": 6540 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "grad_norm": 0.5211924314498901, | |
| "learning_rate": 8.257967032967034e-05, | |
| "loss": 0.4535, | |
| "step": 6550 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "grad_norm": 1.1284767389297485, | |
| "learning_rate": 8.25521978021978e-05, | |
| "loss": 0.4633, | |
| "step": 6560 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "grad_norm": 1.3033208847045898, | |
| "learning_rate": 8.252472527472529e-05, | |
| "loss": 0.4812, | |
| "step": 6570 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "grad_norm": 1.2627853155136108, | |
| "learning_rate": 8.249725274725275e-05, | |
| "loss": 0.4676, | |
| "step": 6580 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "grad_norm": 0.5073243379592896, | |
| "learning_rate": 8.246978021978022e-05, | |
| "loss": 0.4638, | |
| "step": 6590 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "grad_norm": 0.6492868661880493, | |
| "learning_rate": 8.24423076923077e-05, | |
| "loss": 0.46, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "grad_norm": 0.8350780010223389, | |
| "learning_rate": 8.241483516483517e-05, | |
| "loss": 0.4549, | |
| "step": 6610 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "grad_norm": 1.3048648834228516, | |
| "learning_rate": 8.238736263736264e-05, | |
| "loss": 0.47, | |
| "step": 6620 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "grad_norm": 1.2212404012680054, | |
| "learning_rate": 8.235989010989012e-05, | |
| "loss": 0.4623, | |
| "step": 6630 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "grad_norm": 0.9617910981178284, | |
| "learning_rate": 8.233241758241759e-05, | |
| "loss": 0.4566, | |
| "step": 6640 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "grad_norm": 0.5445905923843384, | |
| "learning_rate": 8.230494505494506e-05, | |
| "loss": 0.458, | |
| "step": 6650 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "grad_norm": 0.6512438654899597, | |
| "learning_rate": 8.227747252747254e-05, | |
| "loss": 0.4516, | |
| "step": 6660 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "grad_norm": 1.2203713655471802, | |
| "learning_rate": 8.225000000000001e-05, | |
| "loss": 0.4566, | |
| "step": 6670 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "grad_norm": 0.5803031325340271, | |
| "learning_rate": 8.222252747252748e-05, | |
| "loss": 0.4622, | |
| "step": 6680 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "grad_norm": 0.5457717180252075, | |
| "learning_rate": 8.219505494505496e-05, | |
| "loss": 0.4509, | |
| "step": 6690 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "grad_norm": 0.4855049252510071, | |
| "learning_rate": 8.216758241758242e-05, | |
| "loss": 0.4517, | |
| "step": 6700 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "grad_norm": 0.49980971217155457, | |
| "learning_rate": 8.214010989010989e-05, | |
| "loss": 0.4592, | |
| "step": 6710 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "grad_norm": 0.8323667049407959, | |
| "learning_rate": 8.211263736263736e-05, | |
| "loss": 0.4515, | |
| "step": 6720 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "grad_norm": 0.6058827638626099, | |
| "learning_rate": 8.208516483516484e-05, | |
| "loss": 0.4437, | |
| "step": 6730 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "grad_norm": 1.3227874040603638, | |
| "learning_rate": 8.205769230769231e-05, | |
| "loss": 0.4497, | |
| "step": 6740 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "grad_norm": 1.14075767993927, | |
| "learning_rate": 8.203021978021978e-05, | |
| "loss": 0.455, | |
| "step": 6750 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "grad_norm": 1.0633773803710938, | |
| "learning_rate": 8.200274725274726e-05, | |
| "loss": 0.4582, | |
| "step": 6760 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "grad_norm": 0.671051025390625, | |
| "learning_rate": 8.197527472527473e-05, | |
| "loss": 0.4584, | |
| "step": 6770 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "grad_norm": 0.8227337002754211, | |
| "learning_rate": 8.19478021978022e-05, | |
| "loss": 0.4564, | |
| "step": 6780 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "grad_norm": 1.3290925025939941, | |
| "learning_rate": 8.192032967032968e-05, | |
| "loss": 0.4527, | |
| "step": 6790 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "grad_norm": 1.160757303237915, | |
| "learning_rate": 8.189285714285715e-05, | |
| "loss": 0.4417, | |
| "step": 6800 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "grad_norm": 0.8233458399772644, | |
| "learning_rate": 8.186538461538462e-05, | |
| "loss": 0.4459, | |
| "step": 6810 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "grad_norm": 0.7575914859771729, | |
| "learning_rate": 8.18379120879121e-05, | |
| "loss": 0.4636, | |
| "step": 6820 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "grad_norm": 0.35609036684036255, | |
| "learning_rate": 8.181043956043956e-05, | |
| "loss": 0.4479, | |
| "step": 6830 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "grad_norm": 1.0687757730484009, | |
| "learning_rate": 8.178296703296703e-05, | |
| "loss": 0.4494, | |
| "step": 6840 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "grad_norm": 0.936469316482544, | |
| "learning_rate": 8.175549450549451e-05, | |
| "loss": 0.4605, | |
| "step": 6850 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "grad_norm": 0.6257254481315613, | |
| "learning_rate": 8.172802197802198e-05, | |
| "loss": 0.4559, | |
| "step": 6860 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "grad_norm": 1.0161064863204956, | |
| "learning_rate": 8.170054945054945e-05, | |
| "loss": 0.4677, | |
| "step": 6870 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "grad_norm": 1.0407649278640747, | |
| "learning_rate": 8.167307692307693e-05, | |
| "loss": 0.4542, | |
| "step": 6880 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "grad_norm": 0.7439064383506775, | |
| "learning_rate": 8.16456043956044e-05, | |
| "loss": 0.4747, | |
| "step": 6890 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "grad_norm": 0.37432214617729187, | |
| "learning_rate": 8.161813186813187e-05, | |
| "loss": 0.4595, | |
| "step": 6900 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "grad_norm": 0.9039118885993958, | |
| "learning_rate": 8.159065934065935e-05, | |
| "loss": 0.4701, | |
| "step": 6910 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "grad_norm": 0.5294177532196045, | |
| "learning_rate": 8.156318681318682e-05, | |
| "loss": 0.4533, | |
| "step": 6920 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "grad_norm": 0.8023412227630615, | |
| "learning_rate": 8.153571428571429e-05, | |
| "loss": 0.4632, | |
| "step": 6930 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "grad_norm": 1.1089890003204346, | |
| "learning_rate": 8.150824175824177e-05, | |
| "loss": 0.4627, | |
| "step": 6940 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "grad_norm": 0.5587044954299927, | |
| "learning_rate": 8.148076923076924e-05, | |
| "loss": 0.4613, | |
| "step": 6950 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "grad_norm": 0.7494495511054993, | |
| "learning_rate": 8.14532967032967e-05, | |
| "loss": 0.4478, | |
| "step": 6960 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "grad_norm": 0.7425660490989685, | |
| "learning_rate": 8.142582417582419e-05, | |
| "loss": 0.4585, | |
| "step": 6970 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "grad_norm": 0.9916179180145264, | |
| "learning_rate": 8.139835164835165e-05, | |
| "loss": 0.464, | |
| "step": 6980 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "grad_norm": 0.9130895137786865, | |
| "learning_rate": 8.137087912087912e-05, | |
| "loss": 0.4628, | |
| "step": 6990 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "grad_norm": 0.7386452555656433, | |
| "learning_rate": 8.13434065934066e-05, | |
| "loss": 0.457, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "grad_norm": 1.5726503133773804, | |
| "learning_rate": 8.131593406593407e-05, | |
| "loss": 0.461, | |
| "step": 7010 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "grad_norm": 0.6793839335441589, | |
| "learning_rate": 8.128846153846154e-05, | |
| "loss": 0.4521, | |
| "step": 7020 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "grad_norm": 1.5302358865737915, | |
| "learning_rate": 8.126098901098902e-05, | |
| "loss": 0.4598, | |
| "step": 7030 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "grad_norm": 0.9924741387367249, | |
| "learning_rate": 8.123351648351649e-05, | |
| "loss": 0.4599, | |
| "step": 7040 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "grad_norm": 0.7319537997245789, | |
| "learning_rate": 8.120604395604396e-05, | |
| "loss": 0.4585, | |
| "step": 7050 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "grad_norm": 1.067428469657898, | |
| "learning_rate": 8.117857142857144e-05, | |
| "loss": 0.4509, | |
| "step": 7060 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "grad_norm": 1.2225853204727173, | |
| "learning_rate": 8.115109890109891e-05, | |
| "loss": 0.4591, | |
| "step": 7070 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "grad_norm": 0.7233381867408752, | |
| "learning_rate": 8.112362637362638e-05, | |
| "loss": 0.4568, | |
| "step": 7080 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "grad_norm": 1.3318099975585938, | |
| "learning_rate": 8.109615384615386e-05, | |
| "loss": 0.4715, | |
| "step": 7090 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "grad_norm": 1.4362515211105347, | |
| "learning_rate": 8.106868131868133e-05, | |
| "loss": 0.4539, | |
| "step": 7100 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "grad_norm": 0.8498636484146118, | |
| "learning_rate": 8.10412087912088e-05, | |
| "loss": 0.4531, | |
| "step": 7110 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "grad_norm": 0.5961604118347168, | |
| "learning_rate": 8.101373626373628e-05, | |
| "loss": 0.4561, | |
| "step": 7120 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "grad_norm": 1.013180136680603, | |
| "learning_rate": 8.098626373626374e-05, | |
| "loss": 0.4581, | |
| "step": 7130 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "grad_norm": 0.9159004092216492, | |
| "learning_rate": 8.095879120879121e-05, | |
| "loss": 0.4498, | |
| "step": 7140 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "grad_norm": 0.9847846031188965, | |
| "learning_rate": 8.093131868131868e-05, | |
| "loss": 0.4584, | |
| "step": 7150 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "grad_norm": 1.5007630586624146, | |
| "learning_rate": 8.090384615384615e-05, | |
| "loss": 0.4524, | |
| "step": 7160 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "grad_norm": 0.8937559723854065, | |
| "learning_rate": 8.087637362637363e-05, | |
| "loss": 0.4701, | |
| "step": 7170 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "grad_norm": 0.9551745653152466, | |
| "learning_rate": 8.08489010989011e-05, | |
| "loss": 0.4412, | |
| "step": 7180 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "grad_norm": 0.7207891345024109, | |
| "learning_rate": 8.082142857142857e-05, | |
| "loss": 0.4577, | |
| "step": 7190 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "grad_norm": 0.5330765843391418, | |
| "learning_rate": 8.079395604395605e-05, | |
| "loss": 0.4531, | |
| "step": 7200 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "grad_norm": 1.0517854690551758, | |
| "learning_rate": 8.076648351648352e-05, | |
| "loss": 0.4339, | |
| "step": 7210 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "grad_norm": 1.2942813634872437, | |
| "learning_rate": 8.073901098901098e-05, | |
| "loss": 0.4575, | |
| "step": 7220 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "grad_norm": 0.9348652362823486, | |
| "learning_rate": 8.071153846153847e-05, | |
| "loss": 0.4472, | |
| "step": 7230 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "grad_norm": 1.095439076423645, | |
| "learning_rate": 8.068406593406593e-05, | |
| "loss": 0.4503, | |
| "step": 7240 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "grad_norm": 0.5965198278427124, | |
| "learning_rate": 8.06565934065934e-05, | |
| "loss": 0.4499, | |
| "step": 7250 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "grad_norm": 0.5533403158187866, | |
| "learning_rate": 8.062912087912088e-05, | |
| "loss": 0.4676, | |
| "step": 7260 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "grad_norm": 1.1052013635635376, | |
| "learning_rate": 8.060164835164835e-05, | |
| "loss": 0.4635, | |
| "step": 7270 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "grad_norm": 0.9924907088279724, | |
| "learning_rate": 8.057417582417582e-05, | |
| "loss": 0.4606, | |
| "step": 7280 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "grad_norm": 0.5882344245910645, | |
| "learning_rate": 8.05467032967033e-05, | |
| "loss": 0.4456, | |
| "step": 7290 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "grad_norm": 1.0022166967391968, | |
| "learning_rate": 8.051923076923077e-05, | |
| "loss": 0.4505, | |
| "step": 7300 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.8352431058883667, | |
| "learning_rate": 8.049175824175824e-05, | |
| "loss": 0.4567, | |
| "step": 7310 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 1.0245552062988281, | |
| "learning_rate": 8.046428571428572e-05, | |
| "loss": 0.4509, | |
| "step": 7320 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_loss": 0.44436022639274597, | |
| "eval_runtime": 1246.3805, | |
| "eval_samples_per_second": 234.366, | |
| "eval_steps_per_second": 3.663, | |
| "step": 7321 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.6890380382537842, | |
| "learning_rate": 8.043681318681319e-05, | |
| "loss": 0.4559, | |
| "step": 7330 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "grad_norm": 0.8071466088294983, | |
| "learning_rate": 8.040934065934066e-05, | |
| "loss": 0.4494, | |
| "step": 7340 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "grad_norm": 0.5615882873535156, | |
| "learning_rate": 8.038186813186814e-05, | |
| "loss": 0.444, | |
| "step": 7350 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "grad_norm": 1.312966227531433, | |
| "learning_rate": 8.03543956043956e-05, | |
| "loss": 0.4549, | |
| "step": 7360 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "grad_norm": 0.7778192162513733, | |
| "learning_rate": 8.032692307692307e-05, | |
| "loss": 0.4561, | |
| "step": 7370 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "grad_norm": 0.9343681335449219, | |
| "learning_rate": 8.029945054945056e-05, | |
| "loss": 0.4565, | |
| "step": 7380 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "grad_norm": 0.585110068321228, | |
| "learning_rate": 8.027197802197802e-05, | |
| "loss": 0.4502, | |
| "step": 7390 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "grad_norm": 1.0197051763534546, | |
| "learning_rate": 8.024450549450549e-05, | |
| "loss": 0.4595, | |
| "step": 7400 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "grad_norm": 1.0265450477600098, | |
| "learning_rate": 8.021703296703297e-05, | |
| "loss": 0.4425, | |
| "step": 7410 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "grad_norm": 1.1836851835250854, | |
| "learning_rate": 8.018956043956044e-05, | |
| "loss": 0.4606, | |
| "step": 7420 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "grad_norm": 0.9758961796760559, | |
| "learning_rate": 8.016208791208791e-05, | |
| "loss": 0.4494, | |
| "step": 7430 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "grad_norm": 1.0412979125976562, | |
| "learning_rate": 8.013461538461539e-05, | |
| "loss": 0.4593, | |
| "step": 7440 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "grad_norm": 1.13507878780365, | |
| "learning_rate": 8.010714285714286e-05, | |
| "loss": 0.4598, | |
| "step": 7450 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "grad_norm": 1.118143081665039, | |
| "learning_rate": 8.007967032967033e-05, | |
| "loss": 0.4597, | |
| "step": 7460 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "grad_norm": 0.6100110411643982, | |
| "learning_rate": 8.005219780219781e-05, | |
| "loss": 0.46, | |
| "step": 7470 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "grad_norm": 1.4135487079620361, | |
| "learning_rate": 8.002472527472528e-05, | |
| "loss": 0.455, | |
| "step": 7480 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "grad_norm": 0.7120094299316406, | |
| "learning_rate": 7.999725274725275e-05, | |
| "loss": 0.4459, | |
| "step": 7490 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "grad_norm": 0.7432197332382202, | |
| "learning_rate": 7.996978021978023e-05, | |
| "loss": 0.4509, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "grad_norm": 1.1492975950241089, | |
| "learning_rate": 7.99423076923077e-05, | |
| "loss": 0.4638, | |
| "step": 7510 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "grad_norm": 1.0090208053588867, | |
| "learning_rate": 7.991483516483516e-05, | |
| "loss": 0.4702, | |
| "step": 7520 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "grad_norm": 1.09323251247406, | |
| "learning_rate": 7.988736263736263e-05, | |
| "loss": 0.4518, | |
| "step": 7530 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "grad_norm": 1.0353306531906128, | |
| "learning_rate": 7.985989010989011e-05, | |
| "loss": 0.4522, | |
| "step": 7540 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "grad_norm": 0.8499657511711121, | |
| "learning_rate": 7.983241758241758e-05, | |
| "loss": 0.4637, | |
| "step": 7550 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "grad_norm": 1.0327955484390259, | |
| "learning_rate": 7.980494505494505e-05, | |
| "loss": 0.4444, | |
| "step": 7560 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "grad_norm": 1.239608883857727, | |
| "learning_rate": 7.977747252747253e-05, | |
| "loss": 0.4536, | |
| "step": 7570 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "grad_norm": 0.6841428279876709, | |
| "learning_rate": 7.975e-05, | |
| "loss": 0.4519, | |
| "step": 7580 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "grad_norm": 1.1058799028396606, | |
| "learning_rate": 7.972252747252747e-05, | |
| "loss": 0.4511, | |
| "step": 7590 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "grad_norm": 1.1671322584152222, | |
| "learning_rate": 7.969505494505495e-05, | |
| "loss": 0.4512, | |
| "step": 7600 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "grad_norm": 0.6706550121307373, | |
| "learning_rate": 7.966758241758242e-05, | |
| "loss": 0.4493, | |
| "step": 7610 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "grad_norm": 0.752795398235321, | |
| "learning_rate": 7.964010989010989e-05, | |
| "loss": 0.4494, | |
| "step": 7620 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "grad_norm": 0.6255919933319092, | |
| "learning_rate": 7.961263736263737e-05, | |
| "loss": 0.4525, | |
| "step": 7630 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "grad_norm": 0.4543440341949463, | |
| "learning_rate": 7.958516483516484e-05, | |
| "loss": 0.4527, | |
| "step": 7640 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "grad_norm": 0.6038147807121277, | |
| "learning_rate": 7.95576923076923e-05, | |
| "loss": 0.4431, | |
| "step": 7650 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "grad_norm": 1.010972261428833, | |
| "learning_rate": 7.953021978021979e-05, | |
| "loss": 0.451, | |
| "step": 7660 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "grad_norm": 1.0627601146697998, | |
| "learning_rate": 7.950274725274725e-05, | |
| "loss": 0.4441, | |
| "step": 7670 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "grad_norm": 0.6361072063446045, | |
| "learning_rate": 7.947527472527472e-05, | |
| "loss": 0.4592, | |
| "step": 7680 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "grad_norm": 0.897415280342102, | |
| "learning_rate": 7.94478021978022e-05, | |
| "loss": 0.4535, | |
| "step": 7690 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "grad_norm": 0.6545829772949219, | |
| "learning_rate": 7.942032967032967e-05, | |
| "loss": 0.4589, | |
| "step": 7700 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "grad_norm": 0.9311442375183105, | |
| "learning_rate": 7.939285714285714e-05, | |
| "loss": 0.4417, | |
| "step": 7710 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "grad_norm": 1.5107172727584839, | |
| "learning_rate": 7.936538461538462e-05, | |
| "loss": 0.4503, | |
| "step": 7720 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "grad_norm": 0.9508922100067139, | |
| "learning_rate": 7.933791208791209e-05, | |
| "loss": 0.4532, | |
| "step": 7730 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "grad_norm": 0.8631314039230347, | |
| "learning_rate": 7.931043956043956e-05, | |
| "loss": 0.4551, | |
| "step": 7740 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "grad_norm": 0.639757513999939, | |
| "learning_rate": 7.928296703296704e-05, | |
| "loss": 0.4531, | |
| "step": 7750 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "grad_norm": 0.8901143074035645, | |
| "learning_rate": 7.925549450549451e-05, | |
| "loss": 0.4563, | |
| "step": 7760 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "grad_norm": 1.7677491903305054, | |
| "learning_rate": 7.922802197802198e-05, | |
| "loss": 0.4467, | |
| "step": 7770 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "grad_norm": 0.9918643236160278, | |
| "learning_rate": 7.920054945054946e-05, | |
| "loss": 0.4604, | |
| "step": 7780 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "grad_norm": 0.7385686635971069, | |
| "learning_rate": 7.917307692307693e-05, | |
| "loss": 0.4484, | |
| "step": 7790 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "grad_norm": 0.4740804433822632, | |
| "learning_rate": 7.91456043956044e-05, | |
| "loss": 0.4581, | |
| "step": 7800 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "grad_norm": 0.3722800016403198, | |
| "learning_rate": 7.911813186813188e-05, | |
| "loss": 0.4437, | |
| "step": 7810 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "grad_norm": 0.5825007557868958, | |
| "learning_rate": 7.909065934065934e-05, | |
| "loss": 0.4495, | |
| "step": 7820 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "grad_norm": 0.7577399015426636, | |
| "learning_rate": 7.906318681318681e-05, | |
| "loss": 0.4507, | |
| "step": 7830 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "grad_norm": 0.5723931193351746, | |
| "learning_rate": 7.90357142857143e-05, | |
| "loss": 0.4512, | |
| "step": 7840 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "grad_norm": 1.0552576780319214, | |
| "learning_rate": 7.900824175824176e-05, | |
| "loss": 0.4493, | |
| "step": 7850 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "grad_norm": 1.1965206861495972, | |
| "learning_rate": 7.898076923076923e-05, | |
| "loss": 0.4502, | |
| "step": 7860 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "grad_norm": 0.7538771629333496, | |
| "learning_rate": 7.895329670329671e-05, | |
| "loss": 0.4576, | |
| "step": 7870 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "grad_norm": 1.2679303884506226, | |
| "learning_rate": 7.892582417582418e-05, | |
| "loss": 0.4464, | |
| "step": 7880 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "grad_norm": 0.8973550200462341, | |
| "learning_rate": 7.889835164835165e-05, | |
| "loss": 0.4596, | |
| "step": 7890 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "grad_norm": 0.7081739902496338, | |
| "learning_rate": 7.887087912087913e-05, | |
| "loss": 0.4368, | |
| "step": 7900 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "grad_norm": 0.7710354328155518, | |
| "learning_rate": 7.88434065934066e-05, | |
| "loss": 0.4463, | |
| "step": 7910 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "grad_norm": 0.6749390363693237, | |
| "learning_rate": 7.881593406593407e-05, | |
| "loss": 0.4515, | |
| "step": 7920 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "grad_norm": 0.6680644154548645, | |
| "learning_rate": 7.878846153846155e-05, | |
| "loss": 0.4433, | |
| "step": 7930 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "grad_norm": 1.5634280443191528, | |
| "learning_rate": 7.876098901098902e-05, | |
| "loss": 0.4702, | |
| "step": 7940 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "grad_norm": 0.6959145665168762, | |
| "learning_rate": 7.873351648351648e-05, | |
| "loss": 0.4504, | |
| "step": 7950 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "grad_norm": 0.7739858627319336, | |
| "learning_rate": 7.870604395604397e-05, | |
| "loss": 0.4606, | |
| "step": 7960 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "grad_norm": 0.5519704818725586, | |
| "learning_rate": 7.867857142857143e-05, | |
| "loss": 0.4658, | |
| "step": 7970 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "grad_norm": 0.5425352454185486, | |
| "learning_rate": 7.86510989010989e-05, | |
| "loss": 0.4445, | |
| "step": 7980 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "grad_norm": 0.8816283941268921, | |
| "learning_rate": 7.862362637362638e-05, | |
| "loss": 0.4621, | |
| "step": 7990 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "grad_norm": 1.0816186666488647, | |
| "learning_rate": 7.859615384615385e-05, | |
| "loss": 0.4622, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "grad_norm": 0.8483924865722656, | |
| "learning_rate": 7.856868131868132e-05, | |
| "loss": 0.4533, | |
| "step": 8010 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "grad_norm": 0.45229572057724, | |
| "learning_rate": 7.85412087912088e-05, | |
| "loss": 0.4626, | |
| "step": 8020 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "grad_norm": 0.6593546867370605, | |
| "learning_rate": 7.851373626373627e-05, | |
| "loss": 0.4498, | |
| "step": 8030 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "grad_norm": 1.214553952217102, | |
| "learning_rate": 7.848626373626374e-05, | |
| "loss": 0.4497, | |
| "step": 8040 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "grad_norm": 0.7151647210121155, | |
| "learning_rate": 7.845879120879122e-05, | |
| "loss": 0.4447, | |
| "step": 8050 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "grad_norm": 0.7126444578170776, | |
| "learning_rate": 7.843131868131869e-05, | |
| "loss": 0.4622, | |
| "step": 8060 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "grad_norm": 0.8902627229690552, | |
| "learning_rate": 7.840384615384616e-05, | |
| "loss": 0.4439, | |
| "step": 8070 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "grad_norm": 0.7599775195121765, | |
| "learning_rate": 7.837637362637364e-05, | |
| "loss": 0.4537, | |
| "step": 8080 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "grad_norm": 0.9020054340362549, | |
| "learning_rate": 7.83489010989011e-05, | |
| "loss": 0.4658, | |
| "step": 8090 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "grad_norm": 0.8659080266952515, | |
| "learning_rate": 7.832142857142857e-05, | |
| "loss": 0.4553, | |
| "step": 8100 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "grad_norm": 0.6685529351234436, | |
| "learning_rate": 7.829395604395606e-05, | |
| "loss": 0.4604, | |
| "step": 8110 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "grad_norm": 0.6612065434455872, | |
| "learning_rate": 7.826648351648352e-05, | |
| "loss": 0.4522, | |
| "step": 8120 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "grad_norm": 0.7674763202667236, | |
| "learning_rate": 7.823901098901099e-05, | |
| "loss": 0.4523, | |
| "step": 8130 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "grad_norm": 0.5680274963378906, | |
| "learning_rate": 7.821153846153847e-05, | |
| "loss": 0.4649, | |
| "step": 8140 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "grad_norm": 1.0545960664749146, | |
| "learning_rate": 7.818406593406594e-05, | |
| "loss": 0.4507, | |
| "step": 8150 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "grad_norm": 0.5723569393157959, | |
| "learning_rate": 7.815659340659341e-05, | |
| "loss": 0.4418, | |
| "step": 8160 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "grad_norm": 0.9119620323181152, | |
| "learning_rate": 7.812912087912089e-05, | |
| "loss": 0.4535, | |
| "step": 8170 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "grad_norm": 1.0251758098602295, | |
| "learning_rate": 7.810164835164836e-05, | |
| "loss": 0.4503, | |
| "step": 8180 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "grad_norm": 0.6839396357536316, | |
| "learning_rate": 7.807417582417583e-05, | |
| "loss": 0.452, | |
| "step": 8190 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "grad_norm": 1.3468687534332275, | |
| "learning_rate": 7.804670329670331e-05, | |
| "loss": 0.4442, | |
| "step": 8200 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "grad_norm": 0.9562901854515076, | |
| "learning_rate": 7.801923076923078e-05, | |
| "loss": 0.4596, | |
| "step": 8210 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "grad_norm": 1.092677354812622, | |
| "learning_rate": 7.799175824175825e-05, | |
| "loss": 0.4454, | |
| "step": 8220 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "grad_norm": 0.7029426097869873, | |
| "learning_rate": 7.796428571428573e-05, | |
| "loss": 0.4463, | |
| "step": 8230 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "grad_norm": 0.7543437480926514, | |
| "learning_rate": 7.79368131868132e-05, | |
| "loss": 0.4588, | |
| "step": 8240 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "grad_norm": 1.5089532136917114, | |
| "learning_rate": 7.790934065934066e-05, | |
| "loss": 0.4531, | |
| "step": 8250 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "grad_norm": 0.6691949963569641, | |
| "learning_rate": 7.788186813186815e-05, | |
| "loss": 0.4494, | |
| "step": 8260 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "grad_norm": 1.0210888385772705, | |
| "learning_rate": 7.785439560439561e-05, | |
| "loss": 0.4548, | |
| "step": 8270 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "grad_norm": 0.9251496195793152, | |
| "learning_rate": 7.782692307692308e-05, | |
| "loss": 0.4576, | |
| "step": 8280 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "grad_norm": 1.2618598937988281, | |
| "learning_rate": 7.779945054945055e-05, | |
| "loss": 0.4478, | |
| "step": 8290 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "grad_norm": 1.2252585887908936, | |
| "learning_rate": 7.777197802197802e-05, | |
| "loss": 0.4573, | |
| "step": 8300 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "grad_norm": 0.6124866604804993, | |
| "learning_rate": 7.77445054945055e-05, | |
| "loss": 0.4506, | |
| "step": 8310 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "grad_norm": 1.0547674894332886, | |
| "learning_rate": 7.771703296703297e-05, | |
| "loss": 0.4418, | |
| "step": 8320 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "grad_norm": 1.1824958324432373, | |
| "learning_rate": 7.768956043956044e-05, | |
| "loss": 0.4529, | |
| "step": 8330 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "grad_norm": 0.9790034890174866, | |
| "learning_rate": 7.76620879120879e-05, | |
| "loss": 0.455, | |
| "step": 8340 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "grad_norm": 0.9925460815429688, | |
| "learning_rate": 7.763461538461539e-05, | |
| "loss": 0.4607, | |
| "step": 8350 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "grad_norm": 0.8586925864219666, | |
| "learning_rate": 7.760714285714285e-05, | |
| "loss": 0.4644, | |
| "step": 8360 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "grad_norm": 1.4445006847381592, | |
| "learning_rate": 7.757967032967032e-05, | |
| "loss": 0.4513, | |
| "step": 8370 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "grad_norm": 0.9150937795639038, | |
| "learning_rate": 7.75521978021978e-05, | |
| "loss": 0.4534, | |
| "step": 8380 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "grad_norm": 0.9148111939430237, | |
| "learning_rate": 7.752472527472527e-05, | |
| "loss": 0.4424, | |
| "step": 8390 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "grad_norm": 0.9973462224006653, | |
| "learning_rate": 7.749725274725274e-05, | |
| "loss": 0.4523, | |
| "step": 8400 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "grad_norm": 1.022926926612854, | |
| "learning_rate": 7.746978021978022e-05, | |
| "loss": 0.444, | |
| "step": 8410 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "grad_norm": 0.7249711155891418, | |
| "learning_rate": 7.744230769230769e-05, | |
| "loss": 0.4473, | |
| "step": 8420 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "grad_norm": 0.44306471943855286, | |
| "learning_rate": 7.741483516483516e-05, | |
| "loss": 0.4452, | |
| "step": 8430 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "grad_norm": 0.9625614881515503, | |
| "learning_rate": 7.738736263736264e-05, | |
| "loss": 0.4373, | |
| "step": 8440 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "grad_norm": 0.5116167664527893, | |
| "learning_rate": 7.735989010989011e-05, | |
| "loss": 0.4505, | |
| "step": 8450 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "grad_norm": 0.7496402859687805, | |
| "learning_rate": 7.733241758241758e-05, | |
| "loss": 0.4451, | |
| "step": 8460 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "grad_norm": 1.3204615116119385, | |
| "learning_rate": 7.730494505494506e-05, | |
| "loss": 0.4576, | |
| "step": 8470 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "grad_norm": 0.7599005699157715, | |
| "learning_rate": 7.727747252747253e-05, | |
| "loss": 0.4493, | |
| "step": 8480 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "grad_norm": 0.5699270367622375, | |
| "learning_rate": 7.725e-05, | |
| "loss": 0.4568, | |
| "step": 8490 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "grad_norm": 0.6626598238945007, | |
| "learning_rate": 7.722252747252748e-05, | |
| "loss": 0.4504, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "grad_norm": 0.6977862119674683, | |
| "learning_rate": 7.719505494505494e-05, | |
| "loss": 0.4529, | |
| "step": 8510 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "grad_norm": 0.883980393409729, | |
| "learning_rate": 7.716758241758241e-05, | |
| "loss": 0.4527, | |
| "step": 8520 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "grad_norm": 0.9930477738380432, | |
| "learning_rate": 7.71401098901099e-05, | |
| "loss": 0.4524, | |
| "step": 8530 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "grad_norm": 0.5075715780258179, | |
| "learning_rate": 7.711263736263736e-05, | |
| "loss": 0.4436, | |
| "step": 8540 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "grad_norm": 0.7683893442153931, | |
| "learning_rate": 7.708516483516483e-05, | |
| "loss": 0.4608, | |
| "step": 8550 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "grad_norm": 1.6546239852905273, | |
| "learning_rate": 7.705769230769231e-05, | |
| "loss": 0.4521, | |
| "step": 8560 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "grad_norm": 0.8132153153419495, | |
| "learning_rate": 7.703021978021978e-05, | |
| "loss": 0.4542, | |
| "step": 8570 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "grad_norm": 0.5142685770988464, | |
| "learning_rate": 7.700274725274725e-05, | |
| "loss": 0.4548, | |
| "step": 8580 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "grad_norm": 1.100368618965149, | |
| "learning_rate": 7.697527472527473e-05, | |
| "loss": 0.4602, | |
| "step": 8590 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "grad_norm": 1.018351674079895, | |
| "learning_rate": 7.69478021978022e-05, | |
| "loss": 0.4424, | |
| "step": 8600 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "grad_norm": 0.557134211063385, | |
| "learning_rate": 7.692032967032967e-05, | |
| "loss": 0.4486, | |
| "step": 8610 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "grad_norm": 0.701932966709137, | |
| "learning_rate": 7.689285714285715e-05, | |
| "loss": 0.4543, | |
| "step": 8620 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "grad_norm": 1.0351616144180298, | |
| "learning_rate": 7.686538461538462e-05, | |
| "loss": 0.4628, | |
| "step": 8630 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "grad_norm": 0.5028539896011353, | |
| "learning_rate": 7.683791208791208e-05, | |
| "loss": 0.4566, | |
| "step": 8640 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "grad_norm": 0.7769728302955627, | |
| "learning_rate": 7.681043956043957e-05, | |
| "loss": 0.4429, | |
| "step": 8650 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "grad_norm": 0.7966272830963135, | |
| "learning_rate": 7.678296703296703e-05, | |
| "loss": 0.4377, | |
| "step": 8660 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "grad_norm": 1.1576814651489258, | |
| "learning_rate": 7.67554945054945e-05, | |
| "loss": 0.4507, | |
| "step": 8670 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "grad_norm": 0.8654575943946838, | |
| "learning_rate": 7.672802197802198e-05, | |
| "loss": 0.4598, | |
| "step": 8680 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "grad_norm": 0.7794680595397949, | |
| "learning_rate": 7.670054945054945e-05, | |
| "loss": 0.4424, | |
| "step": 8690 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "grad_norm": 0.8271937370300293, | |
| "learning_rate": 7.667307692307692e-05, | |
| "loss": 0.4586, | |
| "step": 8700 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "grad_norm": 0.8499754071235657, | |
| "learning_rate": 7.66456043956044e-05, | |
| "loss": 0.4476, | |
| "step": 8710 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "grad_norm": 0.7553250789642334, | |
| "learning_rate": 7.661813186813187e-05, | |
| "loss": 0.4435, | |
| "step": 8720 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "grad_norm": 0.5660779476165771, | |
| "learning_rate": 7.659065934065934e-05, | |
| "loss": 0.4518, | |
| "step": 8730 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "grad_norm": 1.4875767230987549, | |
| "learning_rate": 7.656318681318682e-05, | |
| "loss": 0.4542, | |
| "step": 8740 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "grad_norm": 1.1859349012374878, | |
| "learning_rate": 7.653846153846153e-05, | |
| "loss": 0.4528, | |
| "step": 8750 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "grad_norm": 0.6259100437164307, | |
| "learning_rate": 7.651098901098902e-05, | |
| "loss": 0.4629, | |
| "step": 8760 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "grad_norm": 0.4763346314430237, | |
| "learning_rate": 7.648351648351648e-05, | |
| "loss": 0.4479, | |
| "step": 8770 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "grad_norm": 0.6478772759437561, | |
| "learning_rate": 7.645604395604395e-05, | |
| "loss": 0.4313, | |
| "step": 8780 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "grad_norm": 1.1578466892242432, | |
| "learning_rate": 7.642857142857143e-05, | |
| "loss": 0.4499, | |
| "step": 8790 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "grad_norm": 0.5163312554359436, | |
| "learning_rate": 7.64010989010989e-05, | |
| "loss": 0.4479, | |
| "step": 8800 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "grad_norm": 1.183361530303955, | |
| "learning_rate": 7.637362637362637e-05, | |
| "loss": 0.4524, | |
| "step": 8810 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "grad_norm": 0.6872965097427368, | |
| "learning_rate": 7.634615384615385e-05, | |
| "loss": 0.4445, | |
| "step": 8820 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "grad_norm": 0.7339246273040771, | |
| "learning_rate": 7.631868131868132e-05, | |
| "loss": 0.4446, | |
| "step": 8830 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "grad_norm": 0.7101340889930725, | |
| "learning_rate": 7.629120879120879e-05, | |
| "loss": 0.45, | |
| "step": 8840 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "grad_norm": 0.5717099905014038, | |
| "learning_rate": 7.626373626373627e-05, | |
| "loss": 0.4503, | |
| "step": 8850 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "grad_norm": 0.6713473796844482, | |
| "learning_rate": 7.623626373626374e-05, | |
| "loss": 0.4528, | |
| "step": 8860 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "grad_norm": 0.9480137228965759, | |
| "learning_rate": 7.62087912087912e-05, | |
| "loss": 0.4569, | |
| "step": 8870 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "grad_norm": 0.594673752784729, | |
| "learning_rate": 7.618131868131869e-05, | |
| "loss": 0.4627, | |
| "step": 8880 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "grad_norm": 0.8691803216934204, | |
| "learning_rate": 7.615384615384616e-05, | |
| "loss": 0.4459, | |
| "step": 8890 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "grad_norm": 0.8882876634597778, | |
| "learning_rate": 7.612637362637362e-05, | |
| "loss": 0.4478, | |
| "step": 8900 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "grad_norm": 0.48794516921043396, | |
| "learning_rate": 7.60989010989011e-05, | |
| "loss": 0.4479, | |
| "step": 8910 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "grad_norm": 0.4966289699077606, | |
| "learning_rate": 7.607142857142857e-05, | |
| "loss": 0.4395, | |
| "step": 8920 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "grad_norm": 0.792293131351471, | |
| "learning_rate": 7.604395604395604e-05, | |
| "loss": 0.4353, | |
| "step": 8930 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "grad_norm": 0.6615070104598999, | |
| "learning_rate": 7.601648351648351e-05, | |
| "loss": 0.4478, | |
| "step": 8940 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "grad_norm": 0.8686419725418091, | |
| "learning_rate": 7.598901098901099e-05, | |
| "loss": 0.4516, | |
| "step": 8950 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "grad_norm": 1.2833001613616943, | |
| "learning_rate": 7.596153846153846e-05, | |
| "loss": 0.4526, | |
| "step": 8960 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "grad_norm": 0.7414630055427551, | |
| "learning_rate": 7.593406593406593e-05, | |
| "loss": 0.4549, | |
| "step": 8970 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "grad_norm": 0.9745138883590698, | |
| "learning_rate": 7.590659340659341e-05, | |
| "loss": 0.4412, | |
| "step": 8980 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "grad_norm": 0.8407401442527771, | |
| "learning_rate": 7.587912087912088e-05, | |
| "loss": 0.4511, | |
| "step": 8990 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "grad_norm": 1.1383010149002075, | |
| "learning_rate": 7.585164835164835e-05, | |
| "loss": 0.4556, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "grad_norm": 1.0419915914535522, | |
| "learning_rate": 7.582417582417583e-05, | |
| "loss": 0.4504, | |
| "step": 9010 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "grad_norm": 0.931141197681427, | |
| "learning_rate": 7.57967032967033e-05, | |
| "loss": 0.4464, | |
| "step": 9020 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "grad_norm": 0.6264098286628723, | |
| "learning_rate": 7.576923076923076e-05, | |
| "loss": 0.4627, | |
| "step": 9030 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "grad_norm": 0.8912045359611511, | |
| "learning_rate": 7.574175824175825e-05, | |
| "loss": 0.4452, | |
| "step": 9040 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "grad_norm": 1.3010141849517822, | |
| "learning_rate": 7.571428571428571e-05, | |
| "loss": 0.4595, | |
| "step": 9050 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "grad_norm": 0.8275282979011536, | |
| "learning_rate": 7.568681318681318e-05, | |
| "loss": 0.4486, | |
| "step": 9060 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "grad_norm": 0.8537049293518066, | |
| "learning_rate": 7.565934065934066e-05, | |
| "loss": 0.4506, | |
| "step": 9070 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "grad_norm": 0.5747140645980835, | |
| "learning_rate": 7.563186813186813e-05, | |
| "loss": 0.4488, | |
| "step": 9080 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "grad_norm": 0.8508340716362, | |
| "learning_rate": 7.56043956043956e-05, | |
| "loss": 0.4436, | |
| "step": 9090 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "grad_norm": 1.1689362525939941, | |
| "learning_rate": 7.557692307692308e-05, | |
| "loss": 0.4586, | |
| "step": 9100 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "grad_norm": 1.1048357486724854, | |
| "learning_rate": 7.554945054945055e-05, | |
| "loss": 0.4568, | |
| "step": 9110 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "grad_norm": 1.1694691181182861, | |
| "learning_rate": 7.552197802197802e-05, | |
| "loss": 0.4571, | |
| "step": 9120 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "grad_norm": 0.9257465600967407, | |
| "learning_rate": 7.54945054945055e-05, | |
| "loss": 0.4544, | |
| "step": 9130 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "grad_norm": 1.3826684951782227, | |
| "learning_rate": 7.546703296703297e-05, | |
| "loss": 0.44, | |
| "step": 9140 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "grad_norm": 0.8815349340438843, | |
| "learning_rate": 7.543956043956044e-05, | |
| "loss": 0.4438, | |
| "step": 9150 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "grad_norm": 0.661222517490387, | |
| "learning_rate": 7.541208791208792e-05, | |
| "loss": 0.4479, | |
| "step": 9160 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "grad_norm": 1.222886085510254, | |
| "learning_rate": 7.538461538461539e-05, | |
| "loss": 0.4487, | |
| "step": 9170 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "grad_norm": 1.1454687118530273, | |
| "learning_rate": 7.535714285714285e-05, | |
| "loss": 0.4625, | |
| "step": 9180 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "grad_norm": 0.49560263752937317, | |
| "learning_rate": 7.532967032967034e-05, | |
| "loss": 0.4518, | |
| "step": 9190 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "grad_norm": 0.8997675776481628, | |
| "learning_rate": 7.53021978021978e-05, | |
| "loss": 0.4426, | |
| "step": 9200 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "grad_norm": 0.9074673056602478, | |
| "learning_rate": 7.527472527472527e-05, | |
| "loss": 0.4524, | |
| "step": 9210 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "grad_norm": 0.7328269481658936, | |
| "learning_rate": 7.524725274725275e-05, | |
| "loss": 0.4512, | |
| "step": 9220 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "grad_norm": 0.7607899904251099, | |
| "learning_rate": 7.521978021978022e-05, | |
| "loss": 0.4491, | |
| "step": 9230 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "grad_norm": 0.5627162456512451, | |
| "learning_rate": 7.519230769230769e-05, | |
| "loss": 0.4511, | |
| "step": 9240 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "grad_norm": 0.9358956813812256, | |
| "learning_rate": 7.516483516483517e-05, | |
| "loss": 0.453, | |
| "step": 9250 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "grad_norm": 1.2619479894638062, | |
| "learning_rate": 7.513736263736264e-05, | |
| "loss": 0.4432, | |
| "step": 9260 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "grad_norm": 0.9375318884849548, | |
| "learning_rate": 7.510989010989011e-05, | |
| "loss": 0.4539, | |
| "step": 9270 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "grad_norm": 0.8454050421714783, | |
| "learning_rate": 7.508241758241759e-05, | |
| "loss": 0.4515, | |
| "step": 9280 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "grad_norm": 0.9856541156768799, | |
| "learning_rate": 7.505494505494506e-05, | |
| "loss": 0.445, | |
| "step": 9290 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "grad_norm": 0.6597672700881958, | |
| "learning_rate": 7.502747252747253e-05, | |
| "loss": 0.4505, | |
| "step": 9300 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "grad_norm": 1.0249258279800415, | |
| "learning_rate": 7.500000000000001e-05, | |
| "loss": 0.4516, | |
| "step": 9310 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "grad_norm": 1.003083348274231, | |
| "learning_rate": 7.497252747252748e-05, | |
| "loss": 0.4457, | |
| "step": 9320 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "grad_norm": 1.236377477645874, | |
| "learning_rate": 7.494505494505494e-05, | |
| "loss": 0.4573, | |
| "step": 9330 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "grad_norm": 1.5360130071640015, | |
| "learning_rate": 7.491758241758242e-05, | |
| "loss": 0.4478, | |
| "step": 9340 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "grad_norm": 0.9379085898399353, | |
| "learning_rate": 7.489010989010989e-05, | |
| "loss": 0.4575, | |
| "step": 9350 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "grad_norm": 0.7734600901603699, | |
| "learning_rate": 7.486263736263736e-05, | |
| "loss": 0.4571, | |
| "step": 9360 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "grad_norm": 0.6792673468589783, | |
| "learning_rate": 7.483516483516484e-05, | |
| "loss": 0.4522, | |
| "step": 9370 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "grad_norm": 0.8936319351196289, | |
| "learning_rate": 7.480769230769231e-05, | |
| "loss": 0.4576, | |
| "step": 9380 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "grad_norm": 0.7541890144348145, | |
| "learning_rate": 7.478021978021978e-05, | |
| "loss": 0.4456, | |
| "step": 9390 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "grad_norm": 0.8661054968833923, | |
| "learning_rate": 7.475274725274726e-05, | |
| "loss": 0.4428, | |
| "step": 9400 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "grad_norm": 0.919201672077179, | |
| "learning_rate": 7.472527472527473e-05, | |
| "loss": 0.4507, | |
| "step": 9410 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "grad_norm": 1.0688129663467407, | |
| "learning_rate": 7.46978021978022e-05, | |
| "loss": 0.4469, | |
| "step": 9420 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "grad_norm": 7.527632236480713, | |
| "learning_rate": 7.467032967032968e-05, | |
| "loss": 0.4512, | |
| "step": 9430 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "grad_norm": 0.5668841600418091, | |
| "learning_rate": 7.464285714285715e-05, | |
| "loss": 0.4484, | |
| "step": 9440 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "grad_norm": 0.7174908518791199, | |
| "learning_rate": 7.461538461538462e-05, | |
| "loss": 0.4361, | |
| "step": 9450 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "grad_norm": 0.693144679069519, | |
| "learning_rate": 7.45879120879121e-05, | |
| "loss": 0.4483, | |
| "step": 9460 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "grad_norm": 1.2530773878097534, | |
| "learning_rate": 7.456043956043956e-05, | |
| "loss": 0.4495, | |
| "step": 9470 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "grad_norm": 0.7528218626976013, | |
| "learning_rate": 7.453296703296703e-05, | |
| "loss": 0.4429, | |
| "step": 9480 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "grad_norm": 0.596477746963501, | |
| "learning_rate": 7.450549450549451e-05, | |
| "loss": 0.4526, | |
| "step": 9490 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "grad_norm": 0.5093569755554199, | |
| "learning_rate": 7.447802197802198e-05, | |
| "loss": 0.4419, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "grad_norm": 1.0681297779083252, | |
| "learning_rate": 7.445054945054945e-05, | |
| "loss": 0.4394, | |
| "step": 9510 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "grad_norm": 1.280721664428711, | |
| "learning_rate": 7.442307692307693e-05, | |
| "loss": 0.4425, | |
| "step": 9520 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "grad_norm": 0.8306493759155273, | |
| "learning_rate": 7.43956043956044e-05, | |
| "loss": 0.4464, | |
| "step": 9530 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "grad_norm": 0.7929884195327759, | |
| "learning_rate": 7.436813186813187e-05, | |
| "loss": 0.4532, | |
| "step": 9540 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "grad_norm": 0.9622000455856323, | |
| "learning_rate": 7.434065934065935e-05, | |
| "loss": 0.4516, | |
| "step": 9550 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "grad_norm": 0.8442659378051758, | |
| "learning_rate": 7.431318681318682e-05, | |
| "loss": 0.4417, | |
| "step": 9560 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "grad_norm": 0.6593378782272339, | |
| "learning_rate": 7.428571428571429e-05, | |
| "loss": 0.4417, | |
| "step": 9570 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "grad_norm": 0.8290765881538391, | |
| "learning_rate": 7.425824175824177e-05, | |
| "loss": 0.4471, | |
| "step": 9580 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "grad_norm": 0.9711737632751465, | |
| "learning_rate": 7.423076923076924e-05, | |
| "loss": 0.4403, | |
| "step": 9590 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "grad_norm": 0.9451545476913452, | |
| "learning_rate": 7.42032967032967e-05, | |
| "loss": 0.4522, | |
| "step": 9600 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "grad_norm": 1.1259006261825562, | |
| "learning_rate": 7.417582417582419e-05, | |
| "loss": 0.4557, | |
| "step": 9610 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "grad_norm": 0.6328985095024109, | |
| "learning_rate": 7.414835164835165e-05, | |
| "loss": 0.4454, | |
| "step": 9620 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "grad_norm": 0.9557839035987854, | |
| "learning_rate": 7.412087912087912e-05, | |
| "loss": 0.4627, | |
| "step": 9630 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "grad_norm": 0.6751061081886292, | |
| "learning_rate": 7.40934065934066e-05, | |
| "loss": 0.4521, | |
| "step": 9640 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "grad_norm": 0.49840301275253296, | |
| "learning_rate": 7.406593406593407e-05, | |
| "loss": 0.4541, | |
| "step": 9650 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "grad_norm": 1.2455097436904907, | |
| "learning_rate": 7.403846153846154e-05, | |
| "loss": 0.4466, | |
| "step": 9660 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "grad_norm": 1.1257638931274414, | |
| "learning_rate": 7.401098901098902e-05, | |
| "loss": 0.4492, | |
| "step": 9670 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "grad_norm": 0.623653769493103, | |
| "learning_rate": 7.398351648351649e-05, | |
| "loss": 0.4475, | |
| "step": 9680 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "grad_norm": 0.6021344661712646, | |
| "learning_rate": 7.395604395604396e-05, | |
| "loss": 0.4393, | |
| "step": 9690 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "grad_norm": 0.7180885672569275, | |
| "learning_rate": 7.392857142857144e-05, | |
| "loss": 0.4571, | |
| "step": 9700 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "grad_norm": 0.5711954832077026, | |
| "learning_rate": 7.390109890109891e-05, | |
| "loss": 0.4534, | |
| "step": 9710 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "grad_norm": 0.8202961683273315, | |
| "learning_rate": 7.387362637362638e-05, | |
| "loss": 0.4568, | |
| "step": 9720 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "grad_norm": 0.5622140765190125, | |
| "learning_rate": 7.384615384615386e-05, | |
| "loss": 0.4415, | |
| "step": 9730 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "grad_norm": 0.9798756837844849, | |
| "learning_rate": 7.381868131868133e-05, | |
| "loss": 0.4371, | |
| "step": 9740 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "grad_norm": 0.6842076778411865, | |
| "learning_rate": 7.37912087912088e-05, | |
| "loss": 0.4495, | |
| "step": 9750 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "grad_norm": 0.6150854229927063, | |
| "learning_rate": 7.376373626373628e-05, | |
| "loss": 0.4566, | |
| "step": 9760 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "grad_norm": 0.5068964958190918, | |
| "learning_rate": 7.373626373626374e-05, | |
| "loss": 0.4445, | |
| "step": 9770 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "grad_norm": 0.8547071218490601, | |
| "learning_rate": 7.370879120879121e-05, | |
| "loss": 0.4398, | |
| "step": 9780 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "grad_norm": 0.9119787216186523, | |
| "learning_rate": 7.36813186813187e-05, | |
| "loss": 0.4622, | |
| "step": 9790 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "grad_norm": 0.6831972002983093, | |
| "learning_rate": 7.365384615384616e-05, | |
| "loss": 0.4493, | |
| "step": 9800 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "grad_norm": 1.850950002670288, | |
| "learning_rate": 7.362637362637363e-05, | |
| "loss": 0.4418, | |
| "step": 9810 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "grad_norm": 1.3177289962768555, | |
| "learning_rate": 7.359890109890111e-05, | |
| "loss": 0.4514, | |
| "step": 9820 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "grad_norm": 0.8253613114356995, | |
| "learning_rate": 7.357142857142858e-05, | |
| "loss": 0.4536, | |
| "step": 9830 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "grad_norm": 0.9882771968841553, | |
| "learning_rate": 7.354395604395605e-05, | |
| "loss": 0.4388, | |
| "step": 9840 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "grad_norm": 1.1343753337860107, | |
| "learning_rate": 7.351648351648353e-05, | |
| "loss": 0.4495, | |
| "step": 9850 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "grad_norm": 0.9710454344749451, | |
| "learning_rate": 7.3489010989011e-05, | |
| "loss": 0.4546, | |
| "step": 9860 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "grad_norm": 1.0125503540039062, | |
| "learning_rate": 7.346153846153847e-05, | |
| "loss": 0.4436, | |
| "step": 9870 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "grad_norm": 0.972205400466919, | |
| "learning_rate": 7.343406593406593e-05, | |
| "loss": 0.4603, | |
| "step": 9880 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "grad_norm": 0.8217471837997437, | |
| "learning_rate": 7.34065934065934e-05, | |
| "loss": 0.4551, | |
| "step": 9890 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "grad_norm": 0.9497684836387634, | |
| "learning_rate": 7.337912087912087e-05, | |
| "loss": 0.4398, | |
| "step": 9900 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "grad_norm": 1.2341935634613037, | |
| "learning_rate": 7.335164835164835e-05, | |
| "loss": 0.4412, | |
| "step": 9910 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "grad_norm": 1.0097248554229736, | |
| "learning_rate": 7.332417582417582e-05, | |
| "loss": 0.4536, | |
| "step": 9920 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "grad_norm": 1.1135015487670898, | |
| "learning_rate": 7.329670329670329e-05, | |
| "loss": 0.4482, | |
| "step": 9930 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "grad_norm": 1.0130164623260498, | |
| "learning_rate": 7.326923076923077e-05, | |
| "loss": 0.4499, | |
| "step": 9940 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "grad_norm": 0.802757740020752, | |
| "learning_rate": 7.324175824175824e-05, | |
| "loss": 0.4544, | |
| "step": 9950 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "grad_norm": 0.5492908954620361, | |
| "learning_rate": 7.321428571428571e-05, | |
| "loss": 0.4457, | |
| "step": 9960 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "grad_norm": 1.1192032098770142, | |
| "learning_rate": 7.318681318681319e-05, | |
| "loss": 0.4464, | |
| "step": 9970 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "grad_norm": 0.5543531179428101, | |
| "learning_rate": 7.315934065934066e-05, | |
| "loss": 0.456, | |
| "step": 9980 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "grad_norm": 0.741788387298584, | |
| "learning_rate": 7.313186813186812e-05, | |
| "loss": 0.4384, | |
| "step": 9990 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "grad_norm": 0.6588950157165527, | |
| "learning_rate": 7.31043956043956e-05, | |
| "loss": 0.4606, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "grad_norm": 0.5929410457611084, | |
| "learning_rate": 7.307692307692307e-05, | |
| "loss": 0.4474, | |
| "step": 10010 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "grad_norm": 0.624821662902832, | |
| "learning_rate": 7.304945054945054e-05, | |
| "loss": 0.4486, | |
| "step": 10020 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "grad_norm": 0.7971046566963196, | |
| "learning_rate": 7.302197802197802e-05, | |
| "loss": 0.4433, | |
| "step": 10030 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "grad_norm": 0.81767737865448, | |
| "learning_rate": 7.299450549450549e-05, | |
| "loss": 0.4583, | |
| "step": 10040 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "grad_norm": 0.7773674726486206, | |
| "learning_rate": 7.296703296703296e-05, | |
| "loss": 0.4488, | |
| "step": 10050 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "grad_norm": 0.7860414981842041, | |
| "learning_rate": 7.293956043956044e-05, | |
| "loss": 0.4548, | |
| "step": 10060 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "grad_norm": 0.627571702003479, | |
| "learning_rate": 7.291208791208791e-05, | |
| "loss": 0.4477, | |
| "step": 10070 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "grad_norm": 0.7049760818481445, | |
| "learning_rate": 7.288461538461538e-05, | |
| "loss": 0.4376, | |
| "step": 10080 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "grad_norm": 0.5372808575630188, | |
| "learning_rate": 7.285714285714286e-05, | |
| "loss": 0.4552, | |
| "step": 10090 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "grad_norm": 0.6712595224380493, | |
| "learning_rate": 7.282967032967033e-05, | |
| "loss": 0.4555, | |
| "step": 10100 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "grad_norm": 0.4722815752029419, | |
| "learning_rate": 7.28021978021978e-05, | |
| "loss": 0.4536, | |
| "step": 10110 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "grad_norm": 0.48171865940093994, | |
| "learning_rate": 7.277472527472528e-05, | |
| "loss": 0.4542, | |
| "step": 10120 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "grad_norm": 0.8346227407455444, | |
| "learning_rate": 7.274725274725275e-05, | |
| "loss": 0.454, | |
| "step": 10130 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "grad_norm": 0.42196372151374817, | |
| "learning_rate": 7.271978021978021e-05, | |
| "loss": 0.4514, | |
| "step": 10140 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "grad_norm": 0.655756950378418, | |
| "learning_rate": 7.26923076923077e-05, | |
| "loss": 0.458, | |
| "step": 10150 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "grad_norm": 1.0113352537155151, | |
| "learning_rate": 7.266483516483516e-05, | |
| "loss": 0.4537, | |
| "step": 10160 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "grad_norm": 0.5985142588615417, | |
| "learning_rate": 7.263736263736263e-05, | |
| "loss": 0.4408, | |
| "step": 10170 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "grad_norm": 1.50113844871521, | |
| "learning_rate": 7.260989010989011e-05, | |
| "loss": 0.4516, | |
| "step": 10180 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "grad_norm": 1.536289095878601, | |
| "learning_rate": 7.258241758241758e-05, | |
| "loss": 0.4465, | |
| "step": 10190 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "grad_norm": 0.6726506948471069, | |
| "learning_rate": 7.255494505494505e-05, | |
| "loss": 0.4365, | |
| "step": 10200 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "grad_norm": 0.6390563249588013, | |
| "learning_rate": 7.252747252747253e-05, | |
| "loss": 0.4401, | |
| "step": 10210 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "grad_norm": 0.7105517983436584, | |
| "learning_rate": 7.25e-05, | |
| "loss": 0.445, | |
| "step": 10220 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "grad_norm": 0.9596685767173767, | |
| "learning_rate": 7.247252747252747e-05, | |
| "loss": 0.4367, | |
| "step": 10230 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "grad_norm": 1.161080002784729, | |
| "learning_rate": 7.244505494505495e-05, | |
| "loss": 0.4372, | |
| "step": 10240 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "grad_norm": 0.9476664662361145, | |
| "learning_rate": 7.241758241758242e-05, | |
| "loss": 0.4416, | |
| "step": 10250 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "grad_norm": 0.7320071458816528, | |
| "learning_rate": 7.239010989010989e-05, | |
| "loss": 0.4549, | |
| "step": 10260 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "grad_norm": 1.2132612466812134, | |
| "learning_rate": 7.236263736263737e-05, | |
| "loss": 0.4573, | |
| "step": 10270 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "grad_norm": 0.5772107839584351, | |
| "learning_rate": 7.233516483516484e-05, | |
| "loss": 0.4502, | |
| "step": 10280 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "grad_norm": 1.0478837490081787, | |
| "learning_rate": 7.23076923076923e-05, | |
| "loss": 0.452, | |
| "step": 10290 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "grad_norm": 0.6411312222480774, | |
| "learning_rate": 7.228021978021979e-05, | |
| "loss": 0.4424, | |
| "step": 10300 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "grad_norm": 0.7831696271896362, | |
| "learning_rate": 7.225274725274725e-05, | |
| "loss": 0.4525, | |
| "step": 10310 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "grad_norm": 0.45390376448631287, | |
| "learning_rate": 7.222527472527472e-05, | |
| "loss": 0.4401, | |
| "step": 10320 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "grad_norm": 0.8081623911857605, | |
| "learning_rate": 7.21978021978022e-05, | |
| "loss": 0.4396, | |
| "step": 10330 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "grad_norm": 1.091840147972107, | |
| "learning_rate": 7.217032967032967e-05, | |
| "loss": 0.4481, | |
| "step": 10340 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "grad_norm": 1.3704334497451782, | |
| "learning_rate": 7.214285714285714e-05, | |
| "loss": 0.4563, | |
| "step": 10350 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "grad_norm": 0.8897873163223267, | |
| "learning_rate": 7.211538461538462e-05, | |
| "loss": 0.4537, | |
| "step": 10360 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "grad_norm": 1.2123398780822754, | |
| "learning_rate": 7.208791208791209e-05, | |
| "loss": 0.4421, | |
| "step": 10370 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "grad_norm": 0.7756679058074951, | |
| "learning_rate": 7.206043956043956e-05, | |
| "loss": 0.4455, | |
| "step": 10380 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "grad_norm": 0.5030421614646912, | |
| "learning_rate": 7.203296703296704e-05, | |
| "loss": 0.4482, | |
| "step": 10390 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "grad_norm": 0.5804952383041382, | |
| "learning_rate": 7.200549450549451e-05, | |
| "loss": 0.4468, | |
| "step": 10400 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "grad_norm": 0.5755320191383362, | |
| "learning_rate": 7.197802197802198e-05, | |
| "loss": 0.4473, | |
| "step": 10410 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "grad_norm": 0.7598744034767151, | |
| "learning_rate": 7.195054945054946e-05, | |
| "loss": 0.4609, | |
| "step": 10420 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "grad_norm": 0.5830199122428894, | |
| "learning_rate": 7.192307692307693e-05, | |
| "loss": 0.4574, | |
| "step": 10430 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "grad_norm": 0.6908131837844849, | |
| "learning_rate": 7.18956043956044e-05, | |
| "loss": 0.4419, | |
| "step": 10440 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "grad_norm": 1.064139485359192, | |
| "learning_rate": 7.186813186813188e-05, | |
| "loss": 0.4467, | |
| "step": 10450 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "grad_norm": 1.5694611072540283, | |
| "learning_rate": 7.184065934065934e-05, | |
| "loss": 0.4638, | |
| "step": 10460 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "grad_norm": 0.6071065664291382, | |
| "learning_rate": 7.181318681318681e-05, | |
| "loss": 0.4449, | |
| "step": 10470 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "grad_norm": 0.7417973279953003, | |
| "learning_rate": 7.17857142857143e-05, | |
| "loss": 0.4452, | |
| "step": 10480 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "grad_norm": 1.0433728694915771, | |
| "learning_rate": 7.175824175824176e-05, | |
| "loss": 0.4413, | |
| "step": 10490 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "grad_norm": 0.5444348454475403, | |
| "learning_rate": 7.173076923076923e-05, | |
| "loss": 0.453, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "grad_norm": 1.0602647066116333, | |
| "learning_rate": 7.170329670329671e-05, | |
| "loss": 0.4489, | |
| "step": 10510 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "grad_norm": 0.7549176216125488, | |
| "learning_rate": 7.167582417582418e-05, | |
| "loss": 0.4322, | |
| "step": 10520 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "grad_norm": 0.7759855389595032, | |
| "learning_rate": 7.164835164835165e-05, | |
| "loss": 0.4459, | |
| "step": 10530 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "grad_norm": 0.6525812149047852, | |
| "learning_rate": 7.162087912087913e-05, | |
| "loss": 0.4533, | |
| "step": 10540 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "grad_norm": 0.7948151230812073, | |
| "learning_rate": 7.15934065934066e-05, | |
| "loss": 0.4412, | |
| "step": 10550 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "grad_norm": 0.8408129215240479, | |
| "learning_rate": 7.156593406593407e-05, | |
| "loss": 0.4441, | |
| "step": 10560 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "grad_norm": 0.8035346865653992, | |
| "learning_rate": 7.153846153846155e-05, | |
| "loss": 0.463, | |
| "step": 10570 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "grad_norm": 0.9992803335189819, | |
| "learning_rate": 7.151098901098902e-05, | |
| "loss": 0.4426, | |
| "step": 10580 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "grad_norm": 0.7550234794616699, | |
| "learning_rate": 7.148351648351648e-05, | |
| "loss": 0.4455, | |
| "step": 10590 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "grad_norm": 0.47757139801979065, | |
| "learning_rate": 7.145604395604397e-05, | |
| "loss": 0.4487, | |
| "step": 10600 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "grad_norm": 0.6475192308425903, | |
| "learning_rate": 7.142857142857143e-05, | |
| "loss": 0.4493, | |
| "step": 10610 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "grad_norm": 1.0151840448379517, | |
| "learning_rate": 7.14010989010989e-05, | |
| "loss": 0.432, | |
| "step": 10620 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "grad_norm": 0.586617648601532, | |
| "learning_rate": 7.137362637362638e-05, | |
| "loss": 0.44, | |
| "step": 10630 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "grad_norm": 0.49713996052742004, | |
| "learning_rate": 7.134615384615385e-05, | |
| "loss": 0.4411, | |
| "step": 10640 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "grad_norm": 0.8117252588272095, | |
| "learning_rate": 7.131868131868132e-05, | |
| "loss": 0.4505, | |
| "step": 10650 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "grad_norm": 0.9269289374351501, | |
| "learning_rate": 7.12912087912088e-05, | |
| "loss": 0.4481, | |
| "step": 10660 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "grad_norm": 1.0942814350128174, | |
| "learning_rate": 7.126373626373627e-05, | |
| "loss": 0.4407, | |
| "step": 10670 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "grad_norm": 0.6903828382492065, | |
| "learning_rate": 7.123626373626374e-05, | |
| "loss": 0.439, | |
| "step": 10680 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "grad_norm": 0.8993271589279175, | |
| "learning_rate": 7.120879120879122e-05, | |
| "loss": 0.4421, | |
| "step": 10690 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "grad_norm": 0.8367154002189636, | |
| "learning_rate": 7.118131868131869e-05, | |
| "loss": 0.4492, | |
| "step": 10700 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "grad_norm": 0.7720317244529724, | |
| "learning_rate": 7.115384615384616e-05, | |
| "loss": 0.4521, | |
| "step": 10710 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "grad_norm": 1.0028263330459595, | |
| "learning_rate": 7.112637362637364e-05, | |
| "loss": 0.4537, | |
| "step": 10720 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "grad_norm": 0.663088321685791, | |
| "learning_rate": 7.10989010989011e-05, | |
| "loss": 0.4484, | |
| "step": 10730 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "grad_norm": 1.360903263092041, | |
| "learning_rate": 7.107142857142857e-05, | |
| "loss": 0.4663, | |
| "step": 10740 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "grad_norm": 1.5924625396728516, | |
| "learning_rate": 7.104395604395606e-05, | |
| "loss": 0.4599, | |
| "step": 10750 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "grad_norm": 0.7223337888717651, | |
| "learning_rate": 7.101648351648352e-05, | |
| "loss": 0.4528, | |
| "step": 10760 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "grad_norm": 0.8362312316894531, | |
| "learning_rate": 7.098901098901099e-05, | |
| "loss": 0.4469, | |
| "step": 10770 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "grad_norm": 0.5594087839126587, | |
| "learning_rate": 7.096153846153847e-05, | |
| "loss": 0.4484, | |
| "step": 10780 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "grad_norm": 0.8571014404296875, | |
| "learning_rate": 7.093406593406594e-05, | |
| "loss": 0.4414, | |
| "step": 10790 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "grad_norm": 0.9150867462158203, | |
| "learning_rate": 7.090659340659341e-05, | |
| "loss": 0.453, | |
| "step": 10800 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "grad_norm": 0.6607587337493896, | |
| "learning_rate": 7.087912087912089e-05, | |
| "loss": 0.4507, | |
| "step": 10810 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "grad_norm": 0.7428970336914062, | |
| "learning_rate": 7.085164835164836e-05, | |
| "loss": 0.4498, | |
| "step": 10820 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "grad_norm": 0.9267810583114624, | |
| "learning_rate": 7.082417582417583e-05, | |
| "loss": 0.4536, | |
| "step": 10830 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "grad_norm": 0.4540146291255951, | |
| "learning_rate": 7.079670329670331e-05, | |
| "loss": 0.4396, | |
| "step": 10840 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "grad_norm": 1.3146427869796753, | |
| "learning_rate": 7.076923076923078e-05, | |
| "loss": 0.4439, | |
| "step": 10850 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "grad_norm": 0.7954951524734497, | |
| "learning_rate": 7.074175824175825e-05, | |
| "loss": 0.4452, | |
| "step": 10860 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "grad_norm": 0.9023503661155701, | |
| "learning_rate": 7.071428571428573e-05, | |
| "loss": 0.4248, | |
| "step": 10870 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "grad_norm": 0.7970243692398071, | |
| "learning_rate": 7.06868131868132e-05, | |
| "loss": 0.4546, | |
| "step": 10880 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "grad_norm": 0.6860815286636353, | |
| "learning_rate": 7.065934065934066e-05, | |
| "loss": 0.4393, | |
| "step": 10890 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "grad_norm": 0.6077113747596741, | |
| "learning_rate": 7.063186813186815e-05, | |
| "loss": 0.4438, | |
| "step": 10900 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "grad_norm": 0.5479728579521179, | |
| "learning_rate": 7.060439560439561e-05, | |
| "loss": 0.4474, | |
| "step": 10910 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "grad_norm": 0.8500062823295593, | |
| "learning_rate": 7.057692307692308e-05, | |
| "loss": 0.4403, | |
| "step": 10920 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "grad_norm": 0.8599127531051636, | |
| "learning_rate": 7.054945054945056e-05, | |
| "loss": 0.4367, | |
| "step": 10930 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "grad_norm": 1.3294146060943604, | |
| "learning_rate": 7.052197802197803e-05, | |
| "loss": 0.4352, | |
| "step": 10940 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "grad_norm": 0.7617499828338623, | |
| "learning_rate": 7.04945054945055e-05, | |
| "loss": 0.4454, | |
| "step": 10950 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "grad_norm": 1.2405683994293213, | |
| "learning_rate": 7.046703296703298e-05, | |
| "loss": 0.4513, | |
| "step": 10960 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "grad_norm": 0.768068253993988, | |
| "learning_rate": 7.043956043956045e-05, | |
| "loss": 0.4448, | |
| "step": 10970 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "grad_norm": 1.0528051853179932, | |
| "learning_rate": 7.041208791208792e-05, | |
| "loss": 0.4436, | |
| "step": 10980 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "eval_loss": 0.44340217113494873, | |
| "eval_runtime": 1239.9169, | |
| "eval_samples_per_second": 235.588, | |
| "eval_steps_per_second": 3.682, | |
| "step": 10982 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 36600, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 10, | |
| "save_steps": 500, | |
| "total_flos": 4.683801644117262e+18, | |
| "train_batch_size": 6, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |