| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 5943, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0016826518593303045, | |
| "grad_norm": 5.367858933563703, | |
| "learning_rate": 4.9999999999999996e-06, | |
| "loss": 0.9537, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.003365303718660609, | |
| "grad_norm": 9.386746384686745, | |
| "learning_rate": 9.999999999999999e-06, | |
| "loss": 0.943, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.005047955577990914, | |
| "grad_norm": 7.387362447577942, | |
| "learning_rate": 1.5e-05, | |
| "loss": 0.934, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.006730607437321218, | |
| "grad_norm": 6.9256319824932655, | |
| "learning_rate": 1.9999999999999998e-05, | |
| "loss": 0.8376, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.008413259296651522, | |
| "grad_norm": 9.1148382590838, | |
| "learning_rate": 2.5e-05, | |
| "loss": 0.8484, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.010095911155981827, | |
| "grad_norm": 3.9989232759892426, | |
| "learning_rate": 3e-05, | |
| "loss": 0.8097, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.011778563015312132, | |
| "grad_norm": 3.892371218590039, | |
| "learning_rate": 2.9999786123888308e-05, | |
| "loss": 0.7811, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.013461214874642436, | |
| "grad_norm": 8.096662196282066, | |
| "learning_rate": 2.9999144501652298e-05, | |
| "loss": 0.7446, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.01514386673397274, | |
| "grad_norm": 1.5769306611206149, | |
| "learning_rate": 2.9998075151588992e-05, | |
| "loss": 0.7258, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.016826518593303044, | |
| "grad_norm": 8.47430485487969, | |
| "learning_rate": 2.999657810419285e-05, | |
| "loss": 0.7052, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.01850917045263335, | |
| "grad_norm": 2.363071299913598, | |
| "learning_rate": 2.999465340215489e-05, | |
| "loss": 0.7657, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.020191822311963654, | |
| "grad_norm": 1.9252385425154874, | |
| "learning_rate": 2.999230110036149e-05, | |
| "loss": 0.7329, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.02187447417129396, | |
| "grad_norm": 8.946028475031488, | |
| "learning_rate": 2.99895212658928e-05, | |
| "loss": 0.7304, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.023557126030624265, | |
| "grad_norm": 6.877609312630206, | |
| "learning_rate": 2.9986313978020846e-05, | |
| "loss": 0.7453, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.02523977788995457, | |
| "grad_norm": 2.5256324882367993, | |
| "learning_rate": 2.9982679328207262e-05, | |
| "loss": 0.7366, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.02692242974928487, | |
| "grad_norm": 2.709550398238738, | |
| "learning_rate": 2.9978617420100692e-05, | |
| "loss": 0.7258, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.028605081608615177, | |
| "grad_norm": 1.543550019689774, | |
| "learning_rate": 2.9974128369533805e-05, | |
| "loss": 0.7372, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.03028773346794548, | |
| "grad_norm": 3.3453966881155504, | |
| "learning_rate": 2.9969212304520034e-05, | |
| "loss": 0.743, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.03197038532727579, | |
| "grad_norm": 1.922001656181265, | |
| "learning_rate": 2.9963869365249895e-05, | |
| "loss": 0.7819, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.03365303718660609, | |
| "grad_norm": 2.0611188483400036, | |
| "learning_rate": 2.995809970408699e-05, | |
| "loss": 0.7155, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.0353356890459364, | |
| "grad_norm": 1.5313041833127259, | |
| "learning_rate": 2.9951903485563685e-05, | |
| "loss": 0.7322, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.0370183409052667, | |
| "grad_norm": 2.0124191694435085, | |
| "learning_rate": 2.99452808863764e-05, | |
| "loss": 0.6759, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.03870099276459701, | |
| "grad_norm": 3.182123324389477, | |
| "learning_rate": 2.993823209538056e-05, | |
| "loss": 0.6953, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.04038364462392731, | |
| "grad_norm": 1.6122782177661379, | |
| "learning_rate": 2.9930757313585238e-05, | |
| "loss": 0.6953, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.04206629648325761, | |
| "grad_norm": 2.2027482596695647, | |
| "learning_rate": 2.9922856754147406e-05, | |
| "loss": 0.7301, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.04374894834258792, | |
| "grad_norm": 2.6782477155989213, | |
| "learning_rate": 2.9914530642365852e-05, | |
| "loss": 0.6891, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.04543160020191822, | |
| "grad_norm": 1.9740401144541417, | |
| "learning_rate": 2.990577921567476e-05, | |
| "loss": 0.7231, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.04711425206124853, | |
| "grad_norm": 1.719874620968932, | |
| "learning_rate": 2.989660272363696e-05, | |
| "loss": 0.7505, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.04879690392057883, | |
| "grad_norm": 1.3138364164203409, | |
| "learning_rate": 2.988700142793676e-05, | |
| "loss": 0.7116, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.05047955577990914, | |
| "grad_norm": 5.853627389344256, | |
| "learning_rate": 2.9876975602372536e-05, | |
| "loss": 0.719, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.05216220763923944, | |
| "grad_norm": 2.347259437170711, | |
| "learning_rate": 2.9866525532848906e-05, | |
| "loss": 0.6803, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.05384485949856974, | |
| "grad_norm": 1.937679220955038, | |
| "learning_rate": 2.9855651517368567e-05, | |
| "loss": 0.7461, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.05552751135790005, | |
| "grad_norm": 1.6661300351569575, | |
| "learning_rate": 2.9844353866023802e-05, | |
| "loss": 0.7472, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.05721016321723035, | |
| "grad_norm": 2.357915869204484, | |
| "learning_rate": 2.9832632900987642e-05, | |
| "loss": 0.7148, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.05889281507656066, | |
| "grad_norm": 4.398815186243292, | |
| "learning_rate": 2.982048895650468e-05, | |
| "loss": 0.6992, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.06057546693589096, | |
| "grad_norm": 12.662682224480092, | |
| "learning_rate": 2.9807922378881537e-05, | |
| "loss": 0.7539, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.06225811879522127, | |
| "grad_norm": 0.8642696401357872, | |
| "learning_rate": 2.979493352647697e-05, | |
| "loss": 0.7212, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.06394077065455157, | |
| "grad_norm": 27.047937858232604, | |
| "learning_rate": 2.9781522769691686e-05, | |
| "loss": 0.722, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.06562342251388188, | |
| "grad_norm": 2.598805292448644, | |
| "learning_rate": 2.9767690490957758e-05, | |
| "loss": 0.7065, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.06730607437321218, | |
| "grad_norm": 1.2314762895092763, | |
| "learning_rate": 2.9753437084727713e-05, | |
| "loss": 0.7498, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.06898872623254249, | |
| "grad_norm": 1.6421909669790502, | |
| "learning_rate": 2.9738762957463292e-05, | |
| "loss": 0.6992, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.0706713780918728, | |
| "grad_norm": 2.023552968622588, | |
| "learning_rate": 2.9723668527623877e-05, | |
| "loss": 0.6943, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.0723540299512031, | |
| "grad_norm": 1.5172337910969138, | |
| "learning_rate": 2.9708154225654526e-05, | |
| "loss": 0.6987, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.0740366818105334, | |
| "grad_norm": 1.197852135730745, | |
| "learning_rate": 2.9692220493973712e-05, | |
| "loss": 0.7302, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.0757193336698637, | |
| "grad_norm": 2.4396443837967183, | |
| "learning_rate": 2.9675867786960718e-05, | |
| "loss": 0.7318, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.07740198552919401, | |
| "grad_norm": 1.4599851880563282, | |
| "learning_rate": 2.9659096570942654e-05, | |
| "loss": 0.6941, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.07908463738852431, | |
| "grad_norm": 1.117755825364562, | |
| "learning_rate": 2.9641907324181194e-05, | |
| "loss": 0.7399, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.08076728924785462, | |
| "grad_norm": 2.9235378164576242, | |
| "learning_rate": 2.96243005368589e-05, | |
| "loss": 0.7207, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.08244994110718493, | |
| "grad_norm": 7.308883163781362, | |
| "learning_rate": 2.960627671106527e-05, | |
| "loss": 0.682, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.08413259296651522, | |
| "grad_norm": 3.4394827932955234, | |
| "learning_rate": 2.9587836360782405e-05, | |
| "loss": 0.708, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.08581524482584553, | |
| "grad_norm": 3.2314529856927634, | |
| "learning_rate": 2.9568980011870357e-05, | |
| "loss": 0.7335, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.08749789668517584, | |
| "grad_norm": 1.825724533695325, | |
| "learning_rate": 2.954970820205214e-05, | |
| "loss": 0.6951, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.08918054854450615, | |
| "grad_norm": 3.3231741746640076, | |
| "learning_rate": 2.9530021480898393e-05, | |
| "loss": 0.7793, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.09086320040383644, | |
| "grad_norm": 1.3097651462571123, | |
| "learning_rate": 2.9509920409811696e-05, | |
| "loss": 0.7087, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.09254585226316675, | |
| "grad_norm": 6.685911471215255, | |
| "learning_rate": 2.9489405562010565e-05, | |
| "loss": 0.6906, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.09422850412249706, | |
| "grad_norm": 2.870746617513948, | |
| "learning_rate": 2.9468477522513132e-05, | |
| "loss": 0.7028, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.09591115598182735, | |
| "grad_norm": 1.782555352805469, | |
| "learning_rate": 2.9447136888120408e-05, | |
| "loss": 0.6901, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.09759380784115766, | |
| "grad_norm": 2.336519711000487, | |
| "learning_rate": 2.9425384267399327e-05, | |
| "loss": 0.7779, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.09927645970048797, | |
| "grad_norm": 8.935574410818228, | |
| "learning_rate": 2.940322028066534e-05, | |
| "loss": 0.7503, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.10095911155981828, | |
| "grad_norm": 2.754713786882031, | |
| "learning_rate": 2.938064555996476e-05, | |
| "loss": 0.7208, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.10264176341914857, | |
| "grad_norm": 1.5082503557652136, | |
| "learning_rate": 2.9357660749056713e-05, | |
| "loss": 0.7169, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.10432441527847888, | |
| "grad_norm": 9.04522194526273, | |
| "learning_rate": 2.9334266503394803e-05, | |
| "loss": 0.6927, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.10600706713780919, | |
| "grad_norm": 55.28278686388287, | |
| "learning_rate": 2.9310463490108397e-05, | |
| "loss": 0.7107, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.10768971899713949, | |
| "grad_norm": 3.721916069105249, | |
| "learning_rate": 2.928625238798362e-05, | |
| "loss": 0.6951, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.1093723708564698, | |
| "grad_norm": 2.5040797323750112, | |
| "learning_rate": 2.9261633887443993e-05, | |
| "loss": 0.6916, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.1110550227158001, | |
| "grad_norm": 3.5468924769840617, | |
| "learning_rate": 2.9236608690530738e-05, | |
| "loss": 0.7077, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.11273767457513041, | |
| "grad_norm": 3.0266819778200746, | |
| "learning_rate": 2.921117751088276e-05, | |
| "loss": 0.6952, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.1144203264344607, | |
| "grad_norm": 1.634743894298146, | |
| "learning_rate": 2.91853410737163e-05, | |
| "loss": 0.6936, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.11610297829379101, | |
| "grad_norm": 1.0925365801520501, | |
| "learning_rate": 2.915910011580426e-05, | |
| "loss": 0.7317, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.11778563015312132, | |
| "grad_norm": 1.6959112138540386, | |
| "learning_rate": 2.9132455385455176e-05, | |
| "loss": 0.6917, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.11946828201245162, | |
| "grad_norm": 1.9723433746891168, | |
| "learning_rate": 2.9105407642491895e-05, | |
| "loss": 0.7209, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.12115093387178193, | |
| "grad_norm": 2.1537215293733833, | |
| "learning_rate": 2.907795765822989e-05, | |
| "loss": 0.7488, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.12283358573111224, | |
| "grad_norm": 3.227101869737169, | |
| "learning_rate": 2.9050106215455283e-05, | |
| "loss": 0.7152, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.12451623759044254, | |
| "grad_norm": 2.7222358893572554, | |
| "learning_rate": 2.9021854108402516e-05, | |
| "loss": 0.708, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.12619888944977284, | |
| "grad_norm": 2.1054843767538136, | |
| "learning_rate": 2.8993202142731693e-05, | |
| "loss": 0.7251, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.12788154130910315, | |
| "grad_norm": 2.11845883419618, | |
| "learning_rate": 2.8964151135505616e-05, | |
| "loss": 0.7405, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.12956419316843346, | |
| "grad_norm": 13.171512404187755, | |
| "learning_rate": 2.8934701915166477e-05, | |
| "loss": 0.6844, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.13124684502776376, | |
| "grad_norm": 2.7633375632879127, | |
| "learning_rate": 2.890485532151225e-05, | |
| "loss": 0.6766, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.13292949688709407, | |
| "grad_norm": 1.8420785342693768, | |
| "learning_rate": 2.887461220567271e-05, | |
| "loss": 0.7037, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.13461214874642435, | |
| "grad_norm": 1.5557447509529954, | |
| "learning_rate": 2.8843973430085204e-05, | |
| "loss": 0.6991, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.13629480060575466, | |
| "grad_norm": 1.9295826624758823, | |
| "learning_rate": 2.8812939868470016e-05, | |
| "loss": 0.6956, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.13797745246508497, | |
| "grad_norm": 3.3211216557707126, | |
| "learning_rate": 2.878151240580548e-05, | |
| "loss": 0.6774, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.13966010432441528, | |
| "grad_norm": 4.196064403930616, | |
| "learning_rate": 2.874969193830274e-05, | |
| "loss": 0.6752, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.1413427561837456, | |
| "grad_norm": 5.574976270137628, | |
| "learning_rate": 2.871747937338016e-05, | |
| "loss": 0.6553, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.1430254080430759, | |
| "grad_norm": 1.6494038718740478, | |
| "learning_rate": 2.8684875629637505e-05, | |
| "loss": 0.7152, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.1447080599024062, | |
| "grad_norm": 1.3061892609414858, | |
| "learning_rate": 2.8651881636829698e-05, | |
| "loss": 0.7462, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.1463907117617365, | |
| "grad_norm": 4.321044418392694, | |
| "learning_rate": 2.861849833584032e-05, | |
| "loss": 0.6902, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.1480733636210668, | |
| "grad_norm": 2.9444722968009764, | |
| "learning_rate": 2.8584726678654787e-05, | |
| "loss": 0.6813, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.1497560154803971, | |
| "grad_norm": 1.4940245340163587, | |
| "learning_rate": 2.85505676283332e-05, | |
| "loss": 0.689, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.1514386673397274, | |
| "grad_norm": 3.3704010040589565, | |
| "learning_rate": 2.851602215898287e-05, | |
| "loss": 0.6953, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.15312131919905772, | |
| "grad_norm": 1.6597144402924948, | |
| "learning_rate": 2.8481091255730552e-05, | |
| "loss": 0.7277, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.15480397105838803, | |
| "grad_norm": 10.969872224353953, | |
| "learning_rate": 2.844577591469435e-05, | |
| "loss": 0.7142, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.15648662291771834, | |
| "grad_norm": 8.45616831264245, | |
| "learning_rate": 2.8410077142955304e-05, | |
| "loss": 0.7197, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.15816927477704862, | |
| "grad_norm": 2.9594258901214427, | |
| "learning_rate": 2.8373995958528683e-05, | |
| "loss": 0.7351, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.15985192663637893, | |
| "grad_norm": 2.168676312428759, | |
| "learning_rate": 2.8337533390334942e-05, | |
| "loss": 0.7544, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.16153457849570924, | |
| "grad_norm": 7.898767360662744, | |
| "learning_rate": 2.8300690478170388e-05, | |
| "loss": 0.7015, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.16321723035503954, | |
| "grad_norm": 16.83650212945308, | |
| "learning_rate": 2.826346827267753e-05, | |
| "loss": 0.7139, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.16489988221436985, | |
| "grad_norm": 2.3791337429068977, | |
| "learning_rate": 2.8225867835315114e-05, | |
| "loss": 0.7053, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.16658253407370016, | |
| "grad_norm": 1.9679363325295285, | |
| "learning_rate": 2.8187890238327842e-05, | |
| "loss": 0.7313, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.16826518593303044, | |
| "grad_norm": 1.4822625638777076, | |
| "learning_rate": 2.814953656471583e-05, | |
| "loss": 0.7085, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.16994783779236075, | |
| "grad_norm": 2.647291447509443, | |
| "learning_rate": 2.8110807908203682e-05, | |
| "loss": 0.6638, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.17163048965169106, | |
| "grad_norm": 2.969379719654364, | |
| "learning_rate": 2.8071705373209328e-05, | |
| "loss": 0.6884, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.17331314151102137, | |
| "grad_norm": 1.1163745403124403, | |
| "learning_rate": 2.803223007481252e-05, | |
| "loss": 0.6885, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.17499579337035168, | |
| "grad_norm": 1.2686557979094786, | |
| "learning_rate": 2.7992383138723034e-05, | |
| "loss": 0.7037, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.17667844522968199, | |
| "grad_norm": 4.648945448875594, | |
| "learning_rate": 2.7952165701248573e-05, | |
| "loss": 0.6933, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.1783610970890123, | |
| "grad_norm": 4.723564874595428, | |
| "learning_rate": 2.7911578909262353e-05, | |
| "loss": 0.7144, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.18004374894834257, | |
| "grad_norm": 5.211806926801946, | |
| "learning_rate": 2.787062392017041e-05, | |
| "loss": 0.7266, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.18172640080767288, | |
| "grad_norm": 1.3725560316172503, | |
| "learning_rate": 2.7829301901878592e-05, | |
| "loss": 0.7445, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.1834090526670032, | |
| "grad_norm": 0.9012241436004484, | |
| "learning_rate": 2.7787614032759243e-05, | |
| "loss": 0.6986, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.1850917045263335, | |
| "grad_norm": 2.912544243603394, | |
| "learning_rate": 2.7745561501617605e-05, | |
| "loss": 0.7173, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.1867743563856638, | |
| "grad_norm": 1.4248442614931247, | |
| "learning_rate": 2.7703145507657923e-05, | |
| "loss": 0.7035, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.18845700824499412, | |
| "grad_norm": 2.186609904533333, | |
| "learning_rate": 2.766036726044926e-05, | |
| "loss": 0.7371, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.19013966010432443, | |
| "grad_norm": 2.0524595532166603, | |
| "learning_rate": 2.7617227979890957e-05, | |
| "loss": 0.6986, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.1918223119636547, | |
| "grad_norm": 1.8227045280907195, | |
| "learning_rate": 2.7573728896177897e-05, | |
| "loss": 0.7075, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.19350496382298502, | |
| "grad_norm": 1.8425998009576734, | |
| "learning_rate": 2.7529871249765397e-05, | |
| "loss": 0.6897, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.19518761568231532, | |
| "grad_norm": 5.3035191638420836, | |
| "learning_rate": 2.7485656291333845e-05, | |
| "loss": 0.7027, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.19687026754164563, | |
| "grad_norm": 3.3228474353685504, | |
| "learning_rate": 2.7441085281753028e-05, | |
| "loss": 0.7091, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.19855291940097594, | |
| "grad_norm": 3.5016968564731283, | |
| "learning_rate": 2.739615949204617e-05, | |
| "loss": 0.7241, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.20023557126030625, | |
| "grad_norm": 1.7190048028902127, | |
| "learning_rate": 2.7350880203353703e-05, | |
| "loss": 0.7192, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.20191822311963656, | |
| "grad_norm": 3.7186824247487515, | |
| "learning_rate": 2.7305248706896722e-05, | |
| "loss": 0.7063, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.20360087497896684, | |
| "grad_norm": 4.1717869895766935, | |
| "learning_rate": 2.7259266303940164e-05, | |
| "loss": 0.7088, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.20528352683829715, | |
| "grad_norm": 2.5124857963805804, | |
| "learning_rate": 2.7212934305755697e-05, | |
| "loss": 0.7198, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.20696617869762746, | |
| "grad_norm": 2.095136268936366, | |
| "learning_rate": 2.7166254033584343e-05, | |
| "loss": 0.753, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.20864883055695777, | |
| "grad_norm": 3.2661098868577256, | |
| "learning_rate": 2.7119226818598784e-05, | |
| "loss": 0.6779, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.21033148241628807, | |
| "grad_norm": 3.055506603735091, | |
| "learning_rate": 2.7071854001865402e-05, | |
| "loss": 0.7013, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.21201413427561838, | |
| "grad_norm": 12.522953778477769, | |
| "learning_rate": 2.702413693430604e-05, | |
| "loss": 0.7088, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.2136967861349487, | |
| "grad_norm": 3.476240301739368, | |
| "learning_rate": 2.697607697665948e-05, | |
| "loss": 0.689, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.21537943799427897, | |
| "grad_norm": 1.1862686197570156, | |
| "learning_rate": 2.6927675499442648e-05, | |
| "loss": 0.7243, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.21706208985360928, | |
| "grad_norm": 1.6505042403801382, | |
| "learning_rate": 2.68789338829115e-05, | |
| "loss": 0.7083, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.2187447417129396, | |
| "grad_norm": 4.74071740077375, | |
| "learning_rate": 2.6829853517021698e-05, | |
| "loss": 0.7016, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.2204273935722699, | |
| "grad_norm": 4.124079283639458, | |
| "learning_rate": 2.6780435801388945e-05, | |
| "loss": 0.7077, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.2221100454316002, | |
| "grad_norm": 1.9487864410536297, | |
| "learning_rate": 2.6730682145249093e-05, | |
| "loss": 0.7355, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.22379269729093051, | |
| "grad_norm": 2.4839241050514733, | |
| "learning_rate": 2.668059396741795e-05, | |
| "loss": 0.7092, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.22547534915026082, | |
| "grad_norm": 2.841913657394254, | |
| "learning_rate": 2.6630172696250804e-05, | |
| "loss": 0.7303, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.2271580010095911, | |
| "grad_norm": 2.7442870185873347, | |
| "learning_rate": 2.6579419769601715e-05, | |
| "loss": 0.6739, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.2288406528689214, | |
| "grad_norm": 1.3854365909071105, | |
| "learning_rate": 2.6528336634782493e-05, | |
| "loss": 0.7073, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.23052330472825172, | |
| "grad_norm": 3.115941001607779, | |
| "learning_rate": 2.6476924748521443e-05, | |
| "loss": 0.7267, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.23220595658758203, | |
| "grad_norm": 6.9185951332741, | |
| "learning_rate": 2.6425185576921812e-05, | |
| "loss": 0.7456, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.23388860844691234, | |
| "grad_norm": 2.378601355345996, | |
| "learning_rate": 2.637312059541997e-05, | |
| "loss": 0.6912, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.23557126030624265, | |
| "grad_norm": 2.7929947858543906, | |
| "learning_rate": 2.632073128874336e-05, | |
| "loss": 0.7184, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.23725391216557296, | |
| "grad_norm": 1.5382855773213957, | |
| "learning_rate": 2.6268019150868144e-05, | |
| "loss": 0.7099, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.23893656402490324, | |
| "grad_norm": 6.1010563795570025, | |
| "learning_rate": 2.62149856849766e-05, | |
| "loss": 0.6895, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.24061921588423354, | |
| "grad_norm": 5.999491987974443, | |
| "learning_rate": 2.616163240341426e-05, | |
| "loss": 0.7493, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.24230186774356385, | |
| "grad_norm": 2.837037600849311, | |
| "learning_rate": 2.6107960827646774e-05, | |
| "loss": 0.7176, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.24398451960289416, | |
| "grad_norm": 1.7029089834427125, | |
| "learning_rate": 2.6053972488216538e-05, | |
| "loss": 0.6852, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.24566717146222447, | |
| "grad_norm": 1.382189249222589, | |
| "learning_rate": 2.5999668924699035e-05, | |
| "loss": 0.685, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.24734982332155478, | |
| "grad_norm": 1.9496045543050813, | |
| "learning_rate": 2.5945051685658923e-05, | |
| "loss": 0.6591, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.2490324751808851, | |
| "grad_norm": 5.479390805764353, | |
| "learning_rate": 2.5890122328605908e-05, | |
| "loss": 0.7085, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.25071512704021537, | |
| "grad_norm": 1.7567995670915637, | |
| "learning_rate": 2.5834882419950295e-05, | |
| "loss": 0.7091, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.2523977788995457, | |
| "grad_norm": 1.9685911084195309, | |
| "learning_rate": 2.577933353495833e-05, | |
| "loss": 0.7218, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.254080430758876, | |
| "grad_norm": 3.400633915540874, | |
| "learning_rate": 2.5723477257707293e-05, | |
| "loss": 0.7148, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.2557630826182063, | |
| "grad_norm": 1.2116738326443663, | |
| "learning_rate": 2.566731518104029e-05, | |
| "loss": 0.7321, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.2574457344775366, | |
| "grad_norm": 1.3376343864594256, | |
| "learning_rate": 2.5610848906520878e-05, | |
| "loss": 0.748, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.2591283863368669, | |
| "grad_norm": 2.6089861003232055, | |
| "learning_rate": 2.5554080044387344e-05, | |
| "loss": 0.7127, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.2608110381961972, | |
| "grad_norm": 3.2047926120640526, | |
| "learning_rate": 2.5497010213506825e-05, | |
| "loss": 0.7262, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.26249369005552753, | |
| "grad_norm": 1.4899957348295265, | |
| "learning_rate": 2.5439641041329128e-05, | |
| "loss": 0.7122, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.26417634191485784, | |
| "grad_norm": 3.595968473922136, | |
| "learning_rate": 2.5381974163840313e-05, | |
| "loss": 0.7092, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.26585899377418815, | |
| "grad_norm": 3.5232117574234003, | |
| "learning_rate": 2.532401122551605e-05, | |
| "loss": 0.6924, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.2675416456335184, | |
| "grad_norm": 2.618947453668302, | |
| "learning_rate": 2.526575387927473e-05, | |
| "loss": 0.7067, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.2692242974928487, | |
| "grad_norm": 3.6282673284589566, | |
| "learning_rate": 2.52072037864303e-05, | |
| "loss": 0.6945, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.270906949352179, | |
| "grad_norm": 2.2274379147013, | |
| "learning_rate": 2.5148362616644926e-05, | |
| "loss": 0.6727, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.2725896012115093, | |
| "grad_norm": 2.823867881580523, | |
| "learning_rate": 2.508923204788135e-05, | |
| "loss": 0.7158, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.27427225307083963, | |
| "grad_norm": 2.0118901151982245, | |
| "learning_rate": 2.5029813766355062e-05, | |
| "loss": 0.7422, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.27595490493016994, | |
| "grad_norm": 1.2843584175617246, | |
| "learning_rate": 2.4970109466486202e-05, | |
| "loss": 0.7099, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.27763755678950025, | |
| "grad_norm": 3.5059277881120914, | |
| "learning_rate": 2.491012085085122e-05, | |
| "loss": 0.7164, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.27932020864883056, | |
| "grad_norm": 1.7458993688338285, | |
| "learning_rate": 2.4849849630134384e-05, | |
| "loss": 0.6901, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.28100286050816087, | |
| "grad_norm": 5.813346226937464, | |
| "learning_rate": 2.4789297523078924e-05, | |
| "loss": 0.7181, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.2826855123674912, | |
| "grad_norm": 2.0515286491489237, | |
| "learning_rate": 2.4728466256438072e-05, | |
| "loss": 0.7431, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.2843681642268215, | |
| "grad_norm": 2.6702746679350375, | |
| "learning_rate": 2.4667357564925798e-05, | |
| "loss": 0.701, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.2860508160861518, | |
| "grad_norm": 2.707565805299449, | |
| "learning_rate": 2.460597319116735e-05, | |
| "loss": 0.6725, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.2877334679454821, | |
| "grad_norm": 1.7994267796032153, | |
| "learning_rate": 2.4544314885649552e-05, | |
| "loss": 0.7043, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.2894161198048124, | |
| "grad_norm": 2.240627477157692, | |
| "learning_rate": 2.4482384406670883e-05, | |
| "loss": 0.7337, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.29109877166414266, | |
| "grad_norm": 1.4093208691675285, | |
| "learning_rate": 2.4420183520291354e-05, | |
| "loss": 0.706, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.292781423523473, | |
| "grad_norm": 1.5799653304195502, | |
| "learning_rate": 2.4357714000282127e-05, | |
| "loss": 0.7254, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.2944640753828033, | |
| "grad_norm": 1.8282839714116759, | |
| "learning_rate": 2.4294977628074938e-05, | |
| "loss": 0.68, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.2961467272421336, | |
| "grad_norm": 13.490769798309381, | |
| "learning_rate": 2.42319761927113e-05, | |
| "loss": 0.6984, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.2978293791014639, | |
| "grad_norm": 1.1660842236351188, | |
| "learning_rate": 2.4168711490791484e-05, | |
| "loss": 0.6893, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.2995120309607942, | |
| "grad_norm": 1.4880113732457052, | |
| "learning_rate": 2.4105185326423286e-05, | |
| "loss": 0.7371, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.3011946828201245, | |
| "grad_norm": 1.9796491202207207, | |
| "learning_rate": 2.4041399511170574e-05, | |
| "loss": 0.7372, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.3028773346794548, | |
| "grad_norm": 3.2861914347482846, | |
| "learning_rate": 2.3977355864001635e-05, | |
| "loss": 0.7145, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.30455998653878513, | |
| "grad_norm": 3.8536888582450595, | |
| "learning_rate": 2.3913056211237304e-05, | |
| "loss": 0.7244, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.30624263839811544, | |
| "grad_norm": 2.250827213388724, | |
| "learning_rate": 2.3848502386498866e-05, | |
| "loss": 0.7444, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.30792529025744575, | |
| "grad_norm": 1.6760548188250846, | |
| "learning_rate": 2.3783696230655802e-05, | |
| "loss": 0.7415, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.30960794211677606, | |
| "grad_norm": 2.83690011157284, | |
| "learning_rate": 2.371863959177326e-05, | |
| "loss": 0.6769, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.31129059397610637, | |
| "grad_norm": 3.6586666108883037, | |
| "learning_rate": 2.365333432505937e-05, | |
| "loss": 0.6981, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.3129732458354367, | |
| "grad_norm": 2.967916913846329, | |
| "learning_rate": 2.3587782292812323e-05, | |
| "loss": 0.7235, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.31465589769476693, | |
| "grad_norm": 2.7607388194454607, | |
| "learning_rate": 2.35219853643673e-05, | |
| "loss": 0.7202, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.31633854955409724, | |
| "grad_norm": 2.5793375573884925, | |
| "learning_rate": 2.3455945416043132e-05, | |
| "loss": 0.7437, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.31802120141342755, | |
| "grad_norm": 1.6474727320404343, | |
| "learning_rate": 2.338966433108879e-05, | |
| "loss": 0.6664, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.31970385327275785, | |
| "grad_norm": 2.8252072958720102, | |
| "learning_rate": 2.3323143999629712e-05, | |
| "loss": 0.6641, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.32138650513208816, | |
| "grad_norm": 1.8240997471681801, | |
| "learning_rate": 2.3256386318613877e-05, | |
| "loss": 0.7029, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.32306915699141847, | |
| "grad_norm": 1.7867386563705459, | |
| "learning_rate": 2.318939319175771e-05, | |
| "loss": 0.6806, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.3247518088507488, | |
| "grad_norm": 2.519605910503542, | |
| "learning_rate": 2.3122166529491822e-05, | |
| "loss": 0.6837, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.3264344607100791, | |
| "grad_norm": 1.5090617010699425, | |
| "learning_rate": 2.3054708248906483e-05, | |
| "loss": 0.7201, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.3281171125694094, | |
| "grad_norm": 1.85373627743108, | |
| "learning_rate": 2.2987020273696996e-05, | |
| "loss": 0.7007, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.3297997644287397, | |
| "grad_norm": 3.1668783585579714, | |
| "learning_rate": 2.2919104534108825e-05, | |
| "loss": 0.6827, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.33148241628807, | |
| "grad_norm": 2.802801151344103, | |
| "learning_rate": 2.2850962966882547e-05, | |
| "loss": 0.733, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.3331650681474003, | |
| "grad_norm": 4.351080547606847, | |
| "learning_rate": 2.278259751519861e-05, | |
| "loss": 0.7125, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.33484772000673063, | |
| "grad_norm": 1.4284076903376268, | |
| "learning_rate": 2.2714010128621957e-05, | |
| "loss": 0.7166, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.3365303718660609, | |
| "grad_norm": 1.4047557097137526, | |
| "learning_rate": 2.2645202763046385e-05, | |
| "loss": 0.7306, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.3382130237253912, | |
| "grad_norm": 1.7792478988054032, | |
| "learning_rate": 2.2576177380638808e-05, | |
| "loss": 0.6819, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.3398956755847215, | |
| "grad_norm": 1.541280861427235, | |
| "learning_rate": 2.2506935949783277e-05, | |
| "loss": 0.7188, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 0.3415783274440518, | |
| "grad_norm": 3.1010621866032717, | |
| "learning_rate": 2.243748044502485e-05, | |
| "loss": 0.7262, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 0.3432609793033821, | |
| "grad_norm": 2.3564311476470245, | |
| "learning_rate": 2.236781284701332e-05, | |
| "loss": 0.6862, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.3449436311627124, | |
| "grad_norm": 2.277474191898381, | |
| "learning_rate": 2.229793514244666e-05, | |
| "loss": 0.7086, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.34662628302204274, | |
| "grad_norm": 1.9189792309740905, | |
| "learning_rate": 2.222784932401445e-05, | |
| "loss": 0.6785, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 0.34830893488137304, | |
| "grad_norm": 8.557409847942546, | |
| "learning_rate": 2.2157557390341e-05, | |
| "loss": 0.7162, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 0.34999158674070335, | |
| "grad_norm": 2.0580462138378324, | |
| "learning_rate": 2.2087061345928375e-05, | |
| "loss": 0.663, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 0.35167423860003366, | |
| "grad_norm": 11.262172640870563, | |
| "learning_rate": 2.2016363201099205e-05, | |
| "loss": 0.7363, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 0.35335689045936397, | |
| "grad_norm": 12.967220942427355, | |
| "learning_rate": 2.1945464971939424e-05, | |
| "loss": 0.7169, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.3550395423186943, | |
| "grad_norm": 31.131995950709616, | |
| "learning_rate": 2.1874368680240692e-05, | |
| "loss": 0.7109, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 0.3567221941780246, | |
| "grad_norm": 3.759967527046392, | |
| "learning_rate": 2.1803076353442806e-05, | |
| "loss": 0.72, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 0.3584048460373549, | |
| "grad_norm": 2.0854209231683494, | |
| "learning_rate": 2.1731590024575848e-05, | |
| "loss": 0.7443, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 0.36008749789668515, | |
| "grad_norm": 1.550491190575623, | |
| "learning_rate": 2.165991173220223e-05, | |
| "loss": 0.7272, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 0.36177014975601546, | |
| "grad_norm": 3.184378797474582, | |
| "learning_rate": 2.158804352035855e-05, | |
| "loss": 0.6796, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.36345280161534577, | |
| "grad_norm": 2.2017388277936623, | |
| "learning_rate": 2.1515987438497295e-05, | |
| "loss": 0.7079, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 0.3651354534746761, | |
| "grad_norm": 1.8785352439752243, | |
| "learning_rate": 2.1443745541428416e-05, | |
| "loss": 0.7157, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 0.3668181053340064, | |
| "grad_norm": 5.799376408169486, | |
| "learning_rate": 2.137131988926072e-05, | |
| "loss": 0.7147, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 0.3685007571933367, | |
| "grad_norm": 1.463705775425984, | |
| "learning_rate": 2.129871254734312e-05, | |
| "loss": 0.6965, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 0.370183409052667, | |
| "grad_norm": 2.8778655656985435, | |
| "learning_rate": 2.122592558620575e-05, | |
| "loss": 0.6853, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.3718660609119973, | |
| "grad_norm": 2.830232793716089, | |
| "learning_rate": 2.1152961081500906e-05, | |
| "loss": 0.6682, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 0.3735487127713276, | |
| "grad_norm": 7.790226305121679, | |
| "learning_rate": 2.1079821113943863e-05, | |
| "loss": 0.6855, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 0.3752313646306579, | |
| "grad_norm": 1.635629077245975, | |
| "learning_rate": 2.100650776925353e-05, | |
| "loss": 0.7156, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 0.37691401648998824, | |
| "grad_norm": 1.6353332190474046, | |
| "learning_rate": 2.0933023138092995e-05, | |
| "loss": 0.7083, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 0.37859666834931854, | |
| "grad_norm": 2.4020219788955237, | |
| "learning_rate": 2.0859369316009877e-05, | |
| "loss": 0.6873, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.38027932020864885, | |
| "grad_norm": 1.5183766349594623, | |
| "learning_rate": 2.0785548403376592e-05, | |
| "loss": 0.6882, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 0.38196197206797916, | |
| "grad_norm": 3.9509980884297313, | |
| "learning_rate": 2.0711562505330437e-05, | |
| "loss": 0.7037, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 0.3836446239273094, | |
| "grad_norm": 1.080804508141519, | |
| "learning_rate": 2.063741373171357e-05, | |
| "loss": 0.7124, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 0.3853272757866397, | |
| "grad_norm": 2.4771901968792975, | |
| "learning_rate": 2.0563104197012847e-05, | |
| "loss": 0.7348, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 0.38700992764597003, | |
| "grad_norm": 1.4012276716265397, | |
| "learning_rate": 2.048863602029951e-05, | |
| "loss": 0.7157, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.38869257950530034, | |
| "grad_norm": 1.8764316814985353, | |
| "learning_rate": 2.0414011325168777e-05, | |
| "loss": 0.6932, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 0.39037523136463065, | |
| "grad_norm": 3.2158348437646183, | |
| "learning_rate": 2.0339232239679252e-05, | |
| "loss": 0.6789, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 0.39205788322396096, | |
| "grad_norm": 2.468508068736803, | |
| "learning_rate": 2.026430089629229e-05, | |
| "loss": 0.7163, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 0.39374053508329127, | |
| "grad_norm": 2.0179845911955656, | |
| "learning_rate": 2.0189219431811123e-05, | |
| "loss": 0.6566, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 0.3954231869426216, | |
| "grad_norm": 0.9653345936734927, | |
| "learning_rate": 2.0113989987319988e-05, | |
| "loss": 0.6939, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.3971058388019519, | |
| "grad_norm": 1.568403155522187, | |
| "learning_rate": 2.0038614708123023e-05, | |
| "loss": 0.7288, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 0.3987884906612822, | |
| "grad_norm": 5.098482519541986, | |
| "learning_rate": 1.996309574368311e-05, | |
| "loss": 0.7217, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 0.4004711425206125, | |
| "grad_norm": 1.656770228609968, | |
| "learning_rate": 1.9887435247560586e-05, | |
| "loss": 0.743, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 0.4021537943799428, | |
| "grad_norm": 1.679661595344395, | |
| "learning_rate": 1.981163537735181e-05, | |
| "loss": 0.724, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 0.4038364462392731, | |
| "grad_norm": 2.057009585071239, | |
| "learning_rate": 1.9735698294627644e-05, | |
| "loss": 0.7228, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.4055190980986034, | |
| "grad_norm": 1.6262124746744704, | |
| "learning_rate": 1.9659626164871828e-05, | |
| "loss": 0.6621, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 0.4072017499579337, | |
| "grad_norm": 8.087231459721274, | |
| "learning_rate": 1.95834211574192e-05, | |
| "loss": 0.6988, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 0.408884401817264, | |
| "grad_norm": 2.053339294676776, | |
| "learning_rate": 1.9507085445393855e-05, | |
| "loss": 0.7056, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 0.4105670536765943, | |
| "grad_norm": 2.295299690475712, | |
| "learning_rate": 1.9430621205647156e-05, | |
| "loss": 0.7231, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 0.4122497055359246, | |
| "grad_norm": 2.419261777687713, | |
| "learning_rate": 1.935403061869568e-05, | |
| "loss": 0.7786, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 0.4139323573952549, | |
| "grad_norm": 1.2971369467313154, | |
| "learning_rate": 1.9277315868659017e-05, | |
| "loss": 0.6857, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 0.4156150092545852, | |
| "grad_norm": 3.89724254450292, | |
| "learning_rate": 1.920047914319749e-05, | |
| "loss": 0.6795, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 0.41729766111391553, | |
| "grad_norm": 1.8276445458438462, | |
| "learning_rate": 1.9123522633449772e-05, | |
| "loss": 0.7249, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 0.41898031297324584, | |
| "grad_norm": 1.7262471459079933, | |
| "learning_rate": 1.9046448533970423e-05, | |
| "loss": 0.7055, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 0.42066296483257615, | |
| "grad_norm": 1.5815796760993692, | |
| "learning_rate": 1.8969259042667255e-05, | |
| "loss": 0.7012, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.42234561669190646, | |
| "grad_norm": 1.0102816357132662, | |
| "learning_rate": 1.8891956360738706e-05, | |
| "loss": 0.6803, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 0.42402826855123676, | |
| "grad_norm": 8.978060365810029, | |
| "learning_rate": 1.881454269261105e-05, | |
| "loss": 0.6791, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 0.4257109204105671, | |
| "grad_norm": 1.7740952536546388, | |
| "learning_rate": 1.873702024587553e-05, | |
| "loss": 0.7118, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 0.4273935722698974, | |
| "grad_norm": 16.141334008508057, | |
| "learning_rate": 1.865939123122541e-05, | |
| "loss": 0.7053, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 0.4290762241292277, | |
| "grad_norm": 1.7964747107424808, | |
| "learning_rate": 1.858165786239293e-05, | |
| "loss": 0.706, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 0.43075887598855794, | |
| "grad_norm": 2.1026202521481743, | |
| "learning_rate": 1.850382235608618e-05, | |
| "loss": 0.7057, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 0.43244152784788825, | |
| "grad_norm": 2.8681027149657226, | |
| "learning_rate": 1.842588693192589e-05, | |
| "loss": 0.7111, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 0.43412417970721856, | |
| "grad_norm": 1.2285580771772646, | |
| "learning_rate": 1.834785381238212e-05, | |
| "loss": 0.7023, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 0.43580683156654887, | |
| "grad_norm": 1.86184784397222, | |
| "learning_rate": 1.8269725222710895e-05, | |
| "loss": 0.6614, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 0.4374894834258792, | |
| "grad_norm": 1.7349607835894336, | |
| "learning_rate": 1.8191503390890745e-05, | |
| "loss": 0.6976, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.4391721352852095, | |
| "grad_norm": 4.623472425426236, | |
| "learning_rate": 1.8113190547559167e-05, | |
| "loss": 0.7147, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 0.4408547871445398, | |
| "grad_norm": 1.71339692622417, | |
| "learning_rate": 1.8034788925949024e-05, | |
| "loss": 0.6664, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 0.4425374390038701, | |
| "grad_norm": 3.0050627586678975, | |
| "learning_rate": 1.795630076182484e-05, | |
| "loss": 0.6802, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 0.4442200908632004, | |
| "grad_norm": 3.1349481547325992, | |
| "learning_rate": 1.7877728293419067e-05, | |
| "loss": 0.6981, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 0.4459027427225307, | |
| "grad_norm": 3.3593498761324336, | |
| "learning_rate": 1.7799073761368236e-05, | |
| "loss": 0.7219, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 0.44758539458186103, | |
| "grad_norm": 1.7981704686011195, | |
| "learning_rate": 1.7720339408649084e-05, | |
| "loss": 0.7163, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 0.44926804644119134, | |
| "grad_norm": 1.2590076147308225, | |
| "learning_rate": 1.7641527480514575e-05, | |
| "loss": 0.7086, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 0.45095069830052165, | |
| "grad_norm": 5.385441773268162, | |
| "learning_rate": 1.756264022442987e-05, | |
| "loss": 0.6695, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 0.4526333501598519, | |
| "grad_norm": 1.6481056783365962, | |
| "learning_rate": 1.7483679890008247e-05, | |
| "loss": 0.7314, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 0.4543160020191822, | |
| "grad_norm": 1.6160788306327938, | |
| "learning_rate": 1.740464872894695e-05, | |
| "loss": 0.7336, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.4559986538785125, | |
| "grad_norm": 1.4049942343651247, | |
| "learning_rate": 1.732554899496297e-05, | |
| "loss": 0.6399, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 0.4576813057378428, | |
| "grad_norm": 1.0231254626105863, | |
| "learning_rate": 1.7246382943728777e-05, | |
| "loss": 0.7213, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 0.45936395759717313, | |
| "grad_norm": 1.938670327906363, | |
| "learning_rate": 1.7167152832807997e-05, | |
| "loss": 0.6879, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 0.46104660945650344, | |
| "grad_norm": 3.044969011471669, | |
| "learning_rate": 1.7087860921591047e-05, | |
| "loss": 0.7113, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 0.46272926131583375, | |
| "grad_norm": 2.2385420988451394, | |
| "learning_rate": 1.7008509471230673e-05, | |
| "loss": 0.6928, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 0.46441191317516406, | |
| "grad_norm": 2.281718793802536, | |
| "learning_rate": 1.69291007445775e-05, | |
| "loss": 0.7608, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 0.46609456503449437, | |
| "grad_norm": 2.588995378875439, | |
| "learning_rate": 1.684963700611548e-05, | |
| "loss": 0.6455, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 0.4677772168938247, | |
| "grad_norm": 2.8099788810235027, | |
| "learning_rate": 1.677012052189734e-05, | |
| "loss": 0.7263, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 0.469459868753155, | |
| "grad_norm": 4.221047912947499, | |
| "learning_rate": 1.6690553559479946e-05, | |
| "loss": 0.6781, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 0.4711425206124853, | |
| "grad_norm": 2.1824366142795304, | |
| "learning_rate": 1.6610938387859623e-05, | |
| "loss": 0.6756, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 0.4728251724718156, | |
| "grad_norm": 3.1830476070083282, | |
| "learning_rate": 1.6531277277407497e-05, | |
| "loss": 0.6975, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 0.4745078243311459, | |
| "grad_norm": 1.6848222892389952, | |
| "learning_rate": 1.64515724998047e-05, | |
| "loss": 0.7008, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 0.47619047619047616, | |
| "grad_norm": 2.127082253311685, | |
| "learning_rate": 1.6371826327977624e-05, | |
| "loss": 0.7062, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 0.4778731280498065, | |
| "grad_norm": 1.6518089926121424, | |
| "learning_rate": 1.6292041036033088e-05, | |
| "loss": 0.7074, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 0.4795557799091368, | |
| "grad_norm": 4.7973964544416035, | |
| "learning_rate": 1.6212218899193497e-05, | |
| "loss": 0.7636, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 0.4812384317684671, | |
| "grad_norm": 1.618278098822002, | |
| "learning_rate": 1.6132362193731943e-05, | |
| "loss": 0.6747, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 0.4829210836277974, | |
| "grad_norm": 1.8105398299358957, | |
| "learning_rate": 1.605247319690732e-05, | |
| "loss": 0.698, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 0.4846037354871277, | |
| "grad_norm": 3.1211446249471404, | |
| "learning_rate": 1.5972554186899366e-05, | |
| "loss": 0.6739, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 0.486286387346458, | |
| "grad_norm": 5.621021474409387, | |
| "learning_rate": 1.5892607442743688e-05, | |
| "loss": 0.7058, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 0.4879690392057883, | |
| "grad_norm": 6.624570724686767, | |
| "learning_rate": 1.5812635244266796e-05, | |
| "loss": 0.7004, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 0.48965169106511863, | |
| "grad_norm": 7.030770382947687, | |
| "learning_rate": 1.573263987202107e-05, | |
| "loss": 0.7288, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 0.49133434292444894, | |
| "grad_norm": 1.5061023877257589, | |
| "learning_rate": 1.5652623607219725e-05, | |
| "loss": 0.7259, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 0.49301699478377925, | |
| "grad_norm": 1.9928266509751398, | |
| "learning_rate": 1.5572588731671784e-05, | |
| "loss": 0.7403, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 0.49469964664310956, | |
| "grad_norm": 1.6276048909503598, | |
| "learning_rate": 1.549253752771697e-05, | |
| "loss": 0.695, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 0.49638229850243987, | |
| "grad_norm": 2.49890274764129, | |
| "learning_rate": 1.5412472278160655e-05, | |
| "loss": 0.6716, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 0.4980649503617702, | |
| "grad_norm": 4.745602630196767, | |
| "learning_rate": 1.5332395266208732e-05, | |
| "loss": 0.7174, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 0.49974760222110043, | |
| "grad_norm": 4.953610176848769, | |
| "learning_rate": 1.5252308775402532e-05, | |
| "loss": 0.7433, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 0.5014302540804307, | |
| "grad_norm": 2.3863439187560505, | |
| "learning_rate": 1.5172215089553686e-05, | |
| "loss": 0.6696, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 0.5031129059397611, | |
| "grad_norm": 3.620682084963547, | |
| "learning_rate": 1.5092116492679014e-05, | |
| "loss": 0.6845, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 0.5047955577990914, | |
| "grad_norm": 1.9658764046153223, | |
| "learning_rate": 1.5012015268935365e-05, | |
| "loss": 0.6859, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.5064782096584217, | |
| "grad_norm": 3.220698387276685, | |
| "learning_rate": 1.4931913702554521e-05, | |
| "loss": 0.6906, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 0.508160861517752, | |
| "grad_norm": 3.621796474971464, | |
| "learning_rate": 1.4851814077778017e-05, | |
| "loss": 0.6979, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 0.5098435133770823, | |
| "grad_norm": 1.9814230521405456, | |
| "learning_rate": 1.4771718678792025e-05, | |
| "loss": 0.6867, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 0.5115261652364126, | |
| "grad_norm": 2.2044982958739685, | |
| "learning_rate": 1.4691629789662213e-05, | |
| "loss": 0.7341, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 0.5132088170957428, | |
| "grad_norm": 2.780933062036216, | |
| "learning_rate": 1.4611549694268613e-05, | |
| "loss": 0.733, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 0.5148914689550732, | |
| "grad_norm": 1.0871434822173816, | |
| "learning_rate": 1.4531480676240473e-05, | |
| "loss": 0.6575, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 0.5165741208144035, | |
| "grad_norm": 3.8009580157459486, | |
| "learning_rate": 1.445142501889116e-05, | |
| "loss": 0.6651, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 0.5182567726737338, | |
| "grad_norm": 2.016432134793962, | |
| "learning_rate": 1.4371385005153037e-05, | |
| "loss": 0.6977, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 0.5199394245330641, | |
| "grad_norm": 2.3845094735358727, | |
| "learning_rate": 1.4291362917512366e-05, | |
| "loss": 0.6977, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 0.5216220763923944, | |
| "grad_norm": 2.671148993475096, | |
| "learning_rate": 1.4211361037944197e-05, | |
| "loss": 0.6943, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 0.5233047282517247, | |
| "grad_norm": 1.0531838895819492, | |
| "learning_rate": 1.4131381647847327e-05, | |
| "loss": 0.7189, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 0.5249873801110551, | |
| "grad_norm": 2.1694895865671135, | |
| "learning_rate": 1.4051427027979219e-05, | |
| "loss": 0.6862, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 0.5266700319703853, | |
| "grad_norm": 17.604939617214857, | |
| "learning_rate": 1.3971499458390965e-05, | |
| "loss": 0.6712, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 0.5283526838297157, | |
| "grad_norm": 1.8630173214339067, | |
| "learning_rate": 1.3891601218362272e-05, | |
| "loss": 0.723, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 0.5300353356890459, | |
| "grad_norm": 1.079975309531233, | |
| "learning_rate": 1.3811734586336462e-05, | |
| "loss": 0.6921, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 0.5317179875483763, | |
| "grad_norm": 5.31265761197723, | |
| "learning_rate": 1.3731901839855496e-05, | |
| "loss": 0.7146, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 0.5334006394077065, | |
| "grad_norm": 1.369250501286093, | |
| "learning_rate": 1.3652105255495033e-05, | |
| "loss": 0.6715, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 0.5350832912670368, | |
| "grad_norm": 10.056582399023032, | |
| "learning_rate": 1.3572347108799487e-05, | |
| "loss": 0.7043, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 0.5367659431263672, | |
| "grad_norm": 1.3823052893137193, | |
| "learning_rate": 1.3492629674217172e-05, | |
| "loss": 0.7382, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 0.5384485949856974, | |
| "grad_norm": 2.066836912567277, | |
| "learning_rate": 1.341295522503541e-05, | |
| "loss": 0.6904, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 0.5401312468450278, | |
| "grad_norm": 0.9347211662205901, | |
| "learning_rate": 1.3333326033315728e-05, | |
| "loss": 0.7414, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 0.541813898704358, | |
| "grad_norm": 2.7359707780749822, | |
| "learning_rate": 1.3253744369829032e-05, | |
| "loss": 0.6792, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 0.5434965505636884, | |
| "grad_norm": 1.6145156682099768, | |
| "learning_rate": 1.3174212503990897e-05, | |
| "loss": 0.7462, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 0.5451792024230186, | |
| "grad_norm": 3.4914058348134125, | |
| "learning_rate": 1.3094732703796819e-05, | |
| "loss": 0.7357, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 0.546861854282349, | |
| "grad_norm": 1.8454832561666616, | |
| "learning_rate": 1.3015307235757551e-05, | |
| "loss": 0.6908, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 0.5485445061416793, | |
| "grad_norm": 3.3205926754493027, | |
| "learning_rate": 1.2935938364834456e-05, | |
| "loss": 0.7159, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 0.5502271580010096, | |
| "grad_norm": 1.2994315445098243, | |
| "learning_rate": 1.2856628354374937e-05, | |
| "loss": 0.694, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 0.5519098098603399, | |
| "grad_norm": 1.7940735581626492, | |
| "learning_rate": 1.277737946604788e-05, | |
| "loss": 0.681, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 0.5535924617196702, | |
| "grad_norm": 2.7901617710152684, | |
| "learning_rate": 1.2698193959779166e-05, | |
| "loss": 0.7389, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 0.5552751135790005, | |
| "grad_norm": 1.9365531084227832, | |
| "learning_rate": 1.2619074093687222e-05, | |
| "loss": 0.7161, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 0.5569577654383308, | |
| "grad_norm": 2.135764737178054, | |
| "learning_rate": 1.2540022124018616e-05, | |
| "loss": 0.7054, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 0.5586404172976611, | |
| "grad_norm": 2.1837448586467274, | |
| "learning_rate": 1.2461040305083738e-05, | |
| "loss": 0.669, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 0.5603230691569914, | |
| "grad_norm": 2.191220090306169, | |
| "learning_rate": 1.2382130889192504e-05, | |
| "loss": 0.7048, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 0.5620057210163217, | |
| "grad_norm": 1.5430867210389865, | |
| "learning_rate": 1.2303296126590116e-05, | |
| "loss": 0.6741, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 0.563688372875652, | |
| "grad_norm": 2.6660882401969745, | |
| "learning_rate": 1.22245382653929e-05, | |
| "loss": 0.6805, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 0.5653710247349824, | |
| "grad_norm": 3.635510958056303, | |
| "learning_rate": 1.2145859551524212e-05, | |
| "loss": 0.7429, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 0.5670536765943126, | |
| "grad_norm": 5.210284099191435, | |
| "learning_rate": 1.2067262228650367e-05, | |
| "loss": 0.7131, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 0.568736328453643, | |
| "grad_norm": 2.0053597397223926, | |
| "learning_rate": 1.1988748538116684e-05, | |
| "loss": 0.6727, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 0.5704189803129732, | |
| "grad_norm": 0.9841565389270883, | |
| "learning_rate": 1.1910320718883527e-05, | |
| "loss": 0.688, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 0.5721016321723036, | |
| "grad_norm": 3.5502835335131984, | |
| "learning_rate": 1.1831981007462505e-05, | |
| "loss": 0.7127, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 0.5737842840316338, | |
| "grad_norm": 19.100507224173477, | |
| "learning_rate": 1.175373163785267e-05, | |
| "loss": 0.7516, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 0.5754669358909642, | |
| "grad_norm": 6.488529354452652, | |
| "learning_rate": 1.1675574841476812e-05, | |
| "loss": 0.7213, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 0.5771495877502945, | |
| "grad_norm": 1.3195988910473815, | |
| "learning_rate": 1.1597512847117818e-05, | |
| "loss": 0.6716, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 0.5788322396096248, | |
| "grad_norm": 2.627355517798598, | |
| "learning_rate": 1.1519547880855138e-05, | |
| "loss": 0.7214, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 0.5805148914689551, | |
| "grad_norm": 7.467588454068418, | |
| "learning_rate": 1.144168216600128e-05, | |
| "loss": 0.7147, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 0.5821975433282853, | |
| "grad_norm": 11.274227587849943, | |
| "learning_rate": 1.1363917923038428e-05, | |
| "loss": 0.6986, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 0.5838801951876157, | |
| "grad_norm": 3.364065045729195, | |
| "learning_rate": 1.1286257369555091e-05, | |
| "loss": 0.6948, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 0.585562847046946, | |
| "grad_norm": 3.731255541211076, | |
| "learning_rate": 1.1208702720182901e-05, | |
| "loss": 0.73, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 0.5872454989062763, | |
| "grad_norm": 2.0213050947070488, | |
| "learning_rate": 1.1131256186533446e-05, | |
| "loss": 0.7232, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 0.5889281507656066, | |
| "grad_norm": 1.7237707844698982, | |
| "learning_rate": 1.1053919977135187e-05, | |
| "loss": 0.7131, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.5906108026249369, | |
| "grad_norm": 1.2277608957803374, | |
| "learning_rate": 1.0976696297370486e-05, | |
| "loss": 0.6737, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 0.5922934544842672, | |
| "grad_norm": 1.687899648178338, | |
| "learning_rate": 1.0899587349412728e-05, | |
| "loss": 0.7072, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 0.5939761063435975, | |
| "grad_norm": 3.125010833721455, | |
| "learning_rate": 1.0822595332163511e-05, | |
| "loss": 0.6994, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 0.5956587582029278, | |
| "grad_norm": 2.2807698201693247, | |
| "learning_rate": 1.0745722441189938e-05, | |
| "loss": 0.6933, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 0.5973414100622582, | |
| "grad_norm": 1.8478465718160726, | |
| "learning_rate": 1.0668970868662008e-05, | |
| "loss": 0.7002, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 0.5990240619215884, | |
| "grad_norm": 12.056048787970695, | |
| "learning_rate": 1.0592342803290111e-05, | |
| "loss": 0.7155, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 0.6007067137809188, | |
| "grad_norm": 6.748779186476062, | |
| "learning_rate": 1.0515840430262598e-05, | |
| "loss": 0.7305, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 0.602389365640249, | |
| "grad_norm": 1.9804031993242994, | |
| "learning_rate": 1.0439465931183482e-05, | |
| "loss": 0.6783, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 0.6040720174995793, | |
| "grad_norm": 1.751295087344201, | |
| "learning_rate": 1.0363221484010223e-05, | |
| "loss": 0.6992, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 0.6057546693589096, | |
| "grad_norm": 20.49442717936162, | |
| "learning_rate": 1.0287109262991594e-05, | |
| "loss": 0.7148, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 0.6074373212182399, | |
| "grad_norm": 2.404555975357852, | |
| "learning_rate": 1.0211131438605721e-05, | |
| "loss": 0.6831, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 0.6091199730775703, | |
| "grad_norm": 4.232783732944882, | |
| "learning_rate": 1.0135290177498157e-05, | |
| "loss": 0.7313, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 0.6108026249369005, | |
| "grad_norm": 2.3278170388349433, | |
| "learning_rate": 1.0059587642420111e-05, | |
| "loss": 0.7, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 0.6124852767962309, | |
| "grad_norm": 1.2677891442467946, | |
| "learning_rate": 9.984025992166752e-06, | |
| "loss": 0.7049, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 0.6141679286555611, | |
| "grad_norm": 1.3834489123001021, | |
| "learning_rate": 9.908607381515677e-06, | |
| "loss": 0.7256, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 0.6158505805148915, | |
| "grad_norm": 3.1687877645254887, | |
| "learning_rate": 9.83333396116545e-06, | |
| "loss": 0.706, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 0.6175332323742218, | |
| "grad_norm": 3.611540701981002, | |
| "learning_rate": 9.758207877674268e-06, | |
| "loss": 0.7062, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 0.6192158842335521, | |
| "grad_norm": 2.1085401225045897, | |
| "learning_rate": 9.683231273398734e-06, | |
| "loss": 0.7202, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 0.6208985360928824, | |
| "grad_norm": 4.048470318925133, | |
| "learning_rate": 9.608406286432803e-06, | |
| "loss": 0.7409, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 0.6225811879522127, | |
| "grad_norm": 3.4384556222292644, | |
| "learning_rate": 9.533735050546776e-06, | |
| "loss": 0.7092, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 0.624263839811543, | |
| "grad_norm": 1.4713683631234602, | |
| "learning_rate": 9.459219695126468e-06, | |
| "loss": 0.6641, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 0.6259464916708734, | |
| "grad_norm": 1.6516067628874913, | |
| "learning_rate": 9.384862345112468e-06, | |
| "loss": 0.694, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 0.6276291435302036, | |
| "grad_norm": 6.815718424286625, | |
| "learning_rate": 9.310665120939569e-06, | |
| "loss": 0.6987, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 0.6293117953895339, | |
| "grad_norm": 5.49915185804474, | |
| "learning_rate": 9.236630138476274e-06, | |
| "loss": 0.6884, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 0.6309944472488642, | |
| "grad_norm": 2.7594651411633033, | |
| "learning_rate": 9.162759508964484e-06, | |
| "loss": 0.6936, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 0.6326770991081945, | |
| "grad_norm": 2.3122209947688614, | |
| "learning_rate": 9.08905533895925e-06, | |
| "loss": 0.7161, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 0.6343597509675248, | |
| "grad_norm": 3.136134339043737, | |
| "learning_rate": 9.015519730268755e-06, | |
| "loss": 0.7163, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 0.6360424028268551, | |
| "grad_norm": 1.596660947543887, | |
| "learning_rate": 8.942154779894339e-06, | |
| "loss": 0.6992, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 0.6377250546861855, | |
| "grad_norm": 1.6027158724492945, | |
| "learning_rate": 8.86896257997071e-06, | |
| "loss": 0.7212, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 0.6394077065455157, | |
| "grad_norm": 2.1735047736544386, | |
| "learning_rate": 8.79594521770629e-06, | |
| "loss": 0.6952, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 0.6410903584048461, | |
| "grad_norm": 3.2675280706809526, | |
| "learning_rate": 8.723104775323672e-06, | |
| "loss": 0.6465, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 0.6427730102641763, | |
| "grad_norm": 3.2863543816926244, | |
| "learning_rate": 8.650443330000277e-06, | |
| "loss": 0.7207, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 0.6444556621235067, | |
| "grad_norm": 2.890016728917051, | |
| "learning_rate": 8.577962953809086e-06, | |
| "loss": 0.6738, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 0.6461383139828369, | |
| "grad_norm": 1.362053654556354, | |
| "learning_rate": 8.505665713659581e-06, | |
| "loss": 0.7282, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 0.6478209658421673, | |
| "grad_norm": 2.21989690661548, | |
| "learning_rate": 8.433553671238758e-06, | |
| "loss": 0.6898, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 0.6495036177014976, | |
| "grad_norm": 1.681070867021593, | |
| "learning_rate": 8.361628882952395e-06, | |
| "loss": 0.6989, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 0.6511862695608278, | |
| "grad_norm": 4.528153407163741, | |
| "learning_rate": 8.289893399866363e-06, | |
| "loss": 0.6938, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 0.6528689214201582, | |
| "grad_norm": 1.6045963488044075, | |
| "learning_rate": 8.218349267648159e-06, | |
| "loss": 0.6943, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 0.6545515732794884, | |
| "grad_norm": 3.6806684672632173, | |
| "learning_rate": 8.146998526508548e-06, | |
| "loss": 0.6765, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 0.6562342251388188, | |
| "grad_norm": 47.37957081347139, | |
| "learning_rate": 8.075843211143412e-06, | |
| "loss": 0.7285, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 0.657916876998149, | |
| "grad_norm": 2.180165228225439, | |
| "learning_rate": 8.00488535067571e-06, | |
| "loss": 0.6827, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 0.6595995288574794, | |
| "grad_norm": 2.7463208643191934, | |
| "learning_rate": 7.93412696859762e-06, | |
| "loss": 0.7303, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 0.6612821807168097, | |
| "grad_norm": 3.3891720801120204, | |
| "learning_rate": 7.86357008271281e-06, | |
| "loss": 0.7184, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 0.66296483257614, | |
| "grad_norm": 1.9690570289137448, | |
| "learning_rate": 7.793216705078945e-06, | |
| "loss": 0.6967, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 0.6646474844354703, | |
| "grad_norm": 2.063266380571361, | |
| "learning_rate": 7.72306884195027e-06, | |
| "loss": 0.6969, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 0.6663301362948006, | |
| "grad_norm": 1.908562867410139, | |
| "learning_rate": 7.653128493720417e-06, | |
| "loss": 0.6885, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 0.6680127881541309, | |
| "grad_norm": 2.7355519404862156, | |
| "learning_rate": 7.58339765486534e-06, | |
| "loss": 0.7686, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 0.6696954400134613, | |
| "grad_norm": 3.2037677399154956, | |
| "learning_rate": 7.51387831388647e-06, | |
| "loss": 0.6937, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 0.6713780918727915, | |
| "grad_norm": 2.8428120166568065, | |
| "learning_rate": 7.444572453253983e-06, | |
| "loss": 0.7294, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 0.6730607437321218, | |
| "grad_norm": 11.203176756330937, | |
| "learning_rate": 7.375482049350279e-06, | |
| "loss": 0.7171, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.6747433955914521, | |
| "grad_norm": 2.8609073056055294, | |
| "learning_rate": 7.306609072413616e-06, | |
| "loss": 0.7118, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 0.6764260474507824, | |
| "grad_norm": 1.7309044834010798, | |
| "learning_rate": 7.237955486481934e-06, | |
| "loss": 0.7212, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 0.6781086993101128, | |
| "grad_norm": 9.5794380121902, | |
| "learning_rate": 7.169523249336824e-06, | |
| "loss": 0.6779, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 0.679791351169443, | |
| "grad_norm": 1.0470303662149365, | |
| "learning_rate": 7.101314312447732e-06, | |
| "loss": 0.7227, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 0.6814740030287734, | |
| "grad_norm": 2.938737811376039, | |
| "learning_rate": 7.033330620916281e-06, | |
| "loss": 0.6765, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 0.6831566548881036, | |
| "grad_norm": 1.8142414277489092, | |
| "learning_rate": 6.965574113420825e-06, | |
| "loss": 0.6759, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 0.684839306747434, | |
| "grad_norm": 3.306432673522347, | |
| "learning_rate": 6.89804672216114e-06, | |
| "loss": 0.6777, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 0.6865219586067642, | |
| "grad_norm": 4.412738972965688, | |
| "learning_rate": 6.830750372803344e-06, | |
| "loss": 0.6929, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 0.6882046104660946, | |
| "grad_norm": 1.8179503082139996, | |
| "learning_rate": 6.763686984424981e-06, | |
| "loss": 0.7213, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 0.6898872623254249, | |
| "grad_norm": 8.559675878645226, | |
| "learning_rate": 6.6968584694602745e-06, | |
| "loss": 0.7118, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 0.6915699141847552, | |
| "grad_norm": 1.5235263113886581, | |
| "learning_rate": 6.630266733645619e-06, | |
| "loss": 0.6961, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 0.6932525660440855, | |
| "grad_norm": 1.7534150453148492, | |
| "learning_rate": 6.563913675965215e-06, | |
| "loss": 0.6613, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 0.6949352179034158, | |
| "grad_norm": 2.1016655084827125, | |
| "learning_rate": 6.497801188596935e-06, | |
| "loss": 0.7291, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 0.6966178697627461, | |
| "grad_norm": 1.7644912995977626, | |
| "learning_rate": 6.4319311568583325e-06, | |
| "loss": 0.7308, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 0.6983005216220763, | |
| "grad_norm": 2.069543429346417, | |
| "learning_rate": 6.366305459152913e-06, | |
| "loss": 0.6921, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 0.6999831734814067, | |
| "grad_norm": 5.3985111656628195, | |
| "learning_rate": 6.30092596691655e-06, | |
| "loss": 0.7021, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 0.701665825340737, | |
| "grad_norm": 9.934488475975845, | |
| "learning_rate": 6.23579454456413e-06, | |
| "loss": 0.6776, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 0.7033484772000673, | |
| "grad_norm": 1.7631176725206237, | |
| "learning_rate": 6.170913049436354e-06, | |
| "loss": 0.7004, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 0.7050311290593976, | |
| "grad_norm": 2.36679174558339, | |
| "learning_rate": 6.106283331746816e-06, | |
| "loss": 0.6863, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 0.7067137809187279, | |
| "grad_norm": 2.65918219794226, | |
| "learning_rate": 6.0419072345292096e-06, | |
| "loss": 0.6662, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 0.7083964327780582, | |
| "grad_norm": 2.301967761790869, | |
| "learning_rate": 5.977786593584789e-06, | |
| "loss": 0.7096, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 0.7100790846373886, | |
| "grad_norm": 2.533286417560157, | |
| "learning_rate": 5.913923237429993e-06, | |
| "loss": 0.7017, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 0.7117617364967188, | |
| "grad_norm": 3.70662346899352, | |
| "learning_rate": 5.850318987244331e-06, | |
| "loss": 0.7055, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 0.7134443883560492, | |
| "grad_norm": 8.767648827056764, | |
| "learning_rate": 5.786975656818433e-06, | |
| "loss": 0.703, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 0.7151270402153794, | |
| "grad_norm": 1.6090692025921287, | |
| "learning_rate": 5.7238950525023284e-06, | |
| "loss": 0.6942, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 0.7168096920747098, | |
| "grad_norm": 2.647849228603198, | |
| "learning_rate": 5.661078973153938e-06, | |
| "loss": 0.7167, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 0.71849234393404, | |
| "grad_norm": 5.378380355668809, | |
| "learning_rate": 5.598529210087758e-06, | |
| "loss": 0.6901, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 0.7201749957933703, | |
| "grad_norm": 1.9043651705501667, | |
| "learning_rate": 5.5362475470238095e-06, | |
| "loss": 0.6859, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 0.7218576476527007, | |
| "grad_norm": 2.4742127833100223, | |
| "learning_rate": 5.474235760036748e-06, | |
| "loss": 0.6876, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 0.7235402995120309, | |
| "grad_norm": 1.8107278644915739, | |
| "learning_rate": 5.41249561750523e-06, | |
| "loss": 0.6497, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 0.7252229513713613, | |
| "grad_norm": 1.5303336535147203, | |
| "learning_rate": 5.3510288800614605e-06, | |
| "loss": 0.7242, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 0.7269056032306915, | |
| "grad_norm": 3.390771537462041, | |
| "learning_rate": 5.28983730054102e-06, | |
| "loss": 0.6897, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 0.7285882550900219, | |
| "grad_norm": 4.623477582122852, | |
| "learning_rate": 5.228922623932854e-06, | |
| "loss": 0.6922, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 0.7302709069493521, | |
| "grad_norm": 4.443913942041135, | |
| "learning_rate": 5.168286587329523e-06, | |
| "loss": 0.6885, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 0.7319535588086825, | |
| "grad_norm": 2.0513158482891267, | |
| "learning_rate": 5.10793091987765e-06, | |
| "loss": 0.6756, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 0.7336362106680128, | |
| "grad_norm": 8.824148285049553, | |
| "learning_rate": 5.047857342728636e-06, | |
| "loss": 0.7012, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 0.7353188625273431, | |
| "grad_norm": 2.588274219801613, | |
| "learning_rate": 4.988067568989562e-06, | |
| "loss": 0.7103, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 0.7370015143866734, | |
| "grad_norm": 1.2936808305024126, | |
| "learning_rate": 4.928563303674341e-06, | |
| "loss": 0.7106, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 0.7386841662460037, | |
| "grad_norm": 2.831170397180483, | |
| "learning_rate": 4.869346243655084e-06, | |
| "loss": 0.7047, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 0.740366818105334, | |
| "grad_norm": 1.0752465342448296, | |
| "learning_rate": 4.810418077613734e-06, | |
| "loss": 0.6829, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 0.7420494699646644, | |
| "grad_norm": 1.3052985764725058, | |
| "learning_rate": 4.751780485993894e-06, | |
| "loss": 0.7079, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 0.7437321218239946, | |
| "grad_norm": 1.5689263749994986, | |
| "learning_rate": 4.693435140952909e-06, | |
| "loss": 0.6813, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 0.7454147736833249, | |
| "grad_norm": 1.7182295271298886, | |
| "learning_rate": 4.635383706314186e-06, | |
| "loss": 0.7427, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 0.7470974255426552, | |
| "grad_norm": 2.4517333341343064, | |
| "learning_rate": 4.577627837519744e-06, | |
| "loss": 0.6984, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 0.7487800774019855, | |
| "grad_norm": 1.2680047350352834, | |
| "learning_rate": 4.520169181582992e-06, | |
| "loss": 0.667, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 0.7504627292613159, | |
| "grad_norm": 6.546209551875827, | |
| "learning_rate": 4.463009377041792e-06, | |
| "loss": 0.6849, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 0.7521453811206461, | |
| "grad_norm": 1.8969159720456337, | |
| "learning_rate": 4.4061500539117075e-06, | |
| "loss": 0.7007, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 0.7538280329799765, | |
| "grad_norm": 2.3612018106530206, | |
| "learning_rate": 4.349592833639533e-06, | |
| "loss": 0.6959, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 0.7555106848393067, | |
| "grad_norm": 3.27754190821155, | |
| "learning_rate": 4.293339329057048e-06, | |
| "loss": 0.7339, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 0.7571933366986371, | |
| "grad_norm": 1.6912599989319876, | |
| "learning_rate": 4.237391144335029e-06, | |
| "loss": 0.7306, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.7588759885579673, | |
| "grad_norm": 2.47269722903074, | |
| "learning_rate": 4.181749874937512e-06, | |
| "loss": 0.7192, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 0.7605586404172977, | |
| "grad_norm": 2.4270239455743647, | |
| "learning_rate": 4.126417107576264e-06, | |
| "loss": 0.7108, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 0.762241292276628, | |
| "grad_norm": 2.0691359145600763, | |
| "learning_rate": 4.071394420165575e-06, | |
| "loss": 0.6692, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 0.7639239441359583, | |
| "grad_norm": 4.316429669761126, | |
| "learning_rate": 4.0166833817772355e-06, | |
| "loss": 0.6288, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 0.7656065959952886, | |
| "grad_norm": 7.112125476704325, | |
| "learning_rate": 3.9622855525958075e-06, | |
| "loss": 0.6884, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 0.7672892478546188, | |
| "grad_norm": 1.6166181806129818, | |
| "learning_rate": 3.908202483874104e-06, | |
| "loss": 0.7253, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 0.7689718997139492, | |
| "grad_norm": 1.3786177725170563, | |
| "learning_rate": 3.85443571788899e-06, | |
| "loss": 0.691, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 0.7706545515732794, | |
| "grad_norm": 4.162975405559696, | |
| "learning_rate": 3.800986787897379e-06, | |
| "loss": 0.6519, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 0.7723372034326098, | |
| "grad_norm": 1.6443311700956842, | |
| "learning_rate": 3.747857218092518e-06, | |
| "loss": 0.6957, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 0.7740198552919401, | |
| "grad_norm": 2.1764445363958447, | |
| "learning_rate": 3.695048523560506e-06, | |
| "loss": 0.6941, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 0.7757025071512704, | |
| "grad_norm": 2.7741275412482937, | |
| "learning_rate": 3.642562210237112e-06, | |
| "loss": 0.7096, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 0.7773851590106007, | |
| "grad_norm": 1.8479566269445717, | |
| "learning_rate": 3.59039977486482e-06, | |
| "loss": 0.683, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 0.779067810869931, | |
| "grad_norm": 4.010334492952094, | |
| "learning_rate": 3.5385627049501475e-06, | |
| "loss": 0.7032, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 0.7807504627292613, | |
| "grad_norm": 3.6006058024268937, | |
| "learning_rate": 3.487052478721213e-06, | |
| "loss": 0.7196, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 0.7824331145885917, | |
| "grad_norm": 1.8050166899102325, | |
| "learning_rate": 3.435870565085605e-06, | |
| "loss": 0.7108, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 0.7841157664479219, | |
| "grad_norm": 1.5899705391079808, | |
| "learning_rate": 3.3850184235884853e-06, | |
| "loss": 0.7064, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 0.7857984183072523, | |
| "grad_norm": 1.8557370283024723, | |
| "learning_rate": 3.334497504370959e-06, | |
| "loss": 0.6606, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 0.7874810701665825, | |
| "grad_norm": 1.208005641481982, | |
| "learning_rate": 3.284309248128723e-06, | |
| "loss": 0.6884, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 0.7891637220259128, | |
| "grad_norm": 9.341318892290367, | |
| "learning_rate": 3.2344550860709924e-06, | |
| "loss": 0.6748, | |
| "step": 4690 | |
| }, | |
| { | |
| "epoch": 0.7908463738852431, | |
| "grad_norm": 2.1358641985110385, | |
| "learning_rate": 3.184936439879679e-06, | |
| "loss": 0.6675, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 0.7925290257445734, | |
| "grad_norm": 1.7392534944543885, | |
| "learning_rate": 3.1357547216688537e-06, | |
| "loss": 0.6825, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 0.7942116776039038, | |
| "grad_norm": 3.4771245266390567, | |
| "learning_rate": 3.0869113339444637e-06, | |
| "loss": 0.6702, | |
| "step": 4720 | |
| }, | |
| { | |
| "epoch": 0.795894329463234, | |
| "grad_norm": 2.316111862748991, | |
| "learning_rate": 3.038407669564358e-06, | |
| "loss": 0.7001, | |
| "step": 4730 | |
| }, | |
| { | |
| "epoch": 0.7975769813225644, | |
| "grad_norm": 4.426615946911938, | |
| "learning_rate": 2.9902451116985553e-06, | |
| "loss": 0.7226, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 0.7992596331818946, | |
| "grad_norm": 2.5743881424551964, | |
| "learning_rate": 2.9424250337898045e-06, | |
| "loss": 0.6813, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 0.800942285041225, | |
| "grad_norm": 1.6110016833153897, | |
| "learning_rate": 2.8949487995144197e-06, | |
| "loss": 0.7067, | |
| "step": 4760 | |
| }, | |
| { | |
| "epoch": 0.8026249369005553, | |
| "grad_norm": 5.352560710393416, | |
| "learning_rate": 2.8478177627433742e-06, | |
| "loss": 0.6565, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 0.8043075887598856, | |
| "grad_norm": 1.4057981400907735, | |
| "learning_rate": 2.8010332675037263e-06, | |
| "loss": 0.6934, | |
| "step": 4780 | |
| }, | |
| { | |
| "epoch": 0.8059902406192159, | |
| "grad_norm": 3.691333782150777, | |
| "learning_rate": 2.754596647940267e-06, | |
| "loss": 0.7209, | |
| "step": 4790 | |
| }, | |
| { | |
| "epoch": 0.8076728924785462, | |
| "grad_norm": 2.288564373928713, | |
| "learning_rate": 2.708509228277482e-06, | |
| "loss": 0.7232, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 0.8093555443378765, | |
| "grad_norm": 1.8419234716731552, | |
| "learning_rate": 2.6627723227817813e-06, | |
| "loss": 0.6955, | |
| "step": 4810 | |
| }, | |
| { | |
| "epoch": 0.8110381961972069, | |
| "grad_norm": 11.213190172742888, | |
| "learning_rate": 2.6173872357240345e-06, | |
| "loss": 0.7024, | |
| "step": 4820 | |
| }, | |
| { | |
| "epoch": 0.8127208480565371, | |
| "grad_norm": 4.455859392122162, | |
| "learning_rate": 2.572355261342369e-06, | |
| "loss": 0.7039, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 0.8144034999158674, | |
| "grad_norm": 3.0854085348169358, | |
| "learning_rate": 2.5276776838052624e-06, | |
| "loss": 0.7247, | |
| "step": 4840 | |
| }, | |
| { | |
| "epoch": 0.8160861517751977, | |
| "grad_norm": 3.3436474650696213, | |
| "learning_rate": 2.483355777174924e-06, | |
| "loss": 0.7175, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 0.817768803634528, | |
| "grad_norm": 1.356163440526268, | |
| "learning_rate": 2.439390805370964e-06, | |
| "loss": 0.6906, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 0.8194514554938583, | |
| "grad_norm": 1.47779447638527, | |
| "learning_rate": 2.3957840221343376e-06, | |
| "loss": 0.7205, | |
| "step": 4870 | |
| }, | |
| { | |
| "epoch": 0.8211341073531886, | |
| "grad_norm": 7.054023719896974, | |
| "learning_rate": 2.3525366709916123e-06, | |
| "loss": 0.6832, | |
| "step": 4880 | |
| }, | |
| { | |
| "epoch": 0.822816759212519, | |
| "grad_norm": 3.8814864475141904, | |
| "learning_rate": 2.3096499852194995e-06, | |
| "loss": 0.7336, | |
| "step": 4890 | |
| }, | |
| { | |
| "epoch": 0.8244994110718492, | |
| "grad_norm": 6.776255653827943, | |
| "learning_rate": 2.267125187809674e-06, | |
| "loss": 0.7018, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 0.8261820629311796, | |
| "grad_norm": 1.8228193057263617, | |
| "learning_rate": 2.224963491433916e-06, | |
| "loss": 0.703, | |
| "step": 4910 | |
| }, | |
| { | |
| "epoch": 0.8278647147905098, | |
| "grad_norm": 3.626342130356556, | |
| "learning_rate": 2.183166098409516e-06, | |
| "loss": 0.7023, | |
| "step": 4920 | |
| }, | |
| { | |
| "epoch": 0.8295473666498402, | |
| "grad_norm": 3.26362029970282, | |
| "learning_rate": 2.1417342006649905e-06, | |
| "loss": 0.6721, | |
| "step": 4930 | |
| }, | |
| { | |
| "epoch": 0.8312300185091704, | |
| "grad_norm": 2.2999383130301334, | |
| "learning_rate": 2.1006689797060997e-06, | |
| "loss": 0.6786, | |
| "step": 4940 | |
| }, | |
| { | |
| "epoch": 0.8329126703685008, | |
| "grad_norm": 2.9057170420259975, | |
| "learning_rate": 2.059971606582148e-06, | |
| "loss": 0.6815, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 0.8345953222278311, | |
| "grad_norm": 1.9626001854854767, | |
| "learning_rate": 2.019643241852595e-06, | |
| "loss": 0.7499, | |
| "step": 4960 | |
| }, | |
| { | |
| "epoch": 0.8362779740871613, | |
| "grad_norm": 329.7310930895053, | |
| "learning_rate": 1.9796850355539476e-06, | |
| "loss": 0.6736, | |
| "step": 4970 | |
| }, | |
| { | |
| "epoch": 0.8379606259464917, | |
| "grad_norm": 1.1972269920102658, | |
| "learning_rate": 1.9400981271669798e-06, | |
| "loss": 0.6985, | |
| "step": 4980 | |
| }, | |
| { | |
| "epoch": 0.8396432778058219, | |
| "grad_norm": 7.776076224561801, | |
| "learning_rate": 1.90088364558423e-06, | |
| "loss": 0.6954, | |
| "step": 4990 | |
| }, | |
| { | |
| "epoch": 0.8413259296651523, | |
| "grad_norm": 2.4624684249480167, | |
| "learning_rate": 1.8620427090778124e-06, | |
| "loss": 0.6708, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.8430085815244825, | |
| "grad_norm": 5.736712818540104, | |
| "learning_rate": 1.8235764252675236e-06, | |
| "loss": 0.6974, | |
| "step": 5010 | |
| }, | |
| { | |
| "epoch": 0.8446912333838129, | |
| "grad_norm": 0.7730890124741077, | |
| "learning_rate": 1.785485891089255e-06, | |
| "loss": 0.6788, | |
| "step": 5020 | |
| }, | |
| { | |
| "epoch": 0.8463738852431432, | |
| "grad_norm": 2.9811414207023206, | |
| "learning_rate": 1.7477721927637225e-06, | |
| "loss": 0.6832, | |
| "step": 5030 | |
| }, | |
| { | |
| "epoch": 0.8480565371024735, | |
| "grad_norm": 3.216350957632891, | |
| "learning_rate": 1.710436405765478e-06, | |
| "loss": 0.7281, | |
| "step": 5040 | |
| }, | |
| { | |
| "epoch": 0.8497391889618038, | |
| "grad_norm": 3.4668104918306524, | |
| "learning_rate": 1.6734795947922522e-06, | |
| "loss": 0.6975, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 0.8514218408211341, | |
| "grad_norm": 1.795221845364889, | |
| "learning_rate": 1.6369028137345776e-06, | |
| "loss": 0.6917, | |
| "step": 5060 | |
| }, | |
| { | |
| "epoch": 0.8531044926804644, | |
| "grad_norm": 2.549182338144749, | |
| "learning_rate": 1.60070710564575e-06, | |
| "loss": 0.6944, | |
| "step": 5070 | |
| }, | |
| { | |
| "epoch": 0.8547871445397948, | |
| "grad_norm": 22.1866919142017, | |
| "learning_rate": 1.5648935027120775e-06, | |
| "loss": 0.6802, | |
| "step": 5080 | |
| }, | |
| { | |
| "epoch": 0.856469796399125, | |
| "grad_norm": 1.687641844262827, | |
| "learning_rate": 1.5294630262234493e-06, | |
| "loss": 0.7476, | |
| "step": 5090 | |
| }, | |
| { | |
| "epoch": 0.8581524482584554, | |
| "grad_norm": 1.2836006280455892, | |
| "learning_rate": 1.4944166865441994e-06, | |
| "loss": 0.6956, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 0.8598351001177856, | |
| "grad_norm": 2.516350114463321, | |
| "learning_rate": 1.4597554830843107e-06, | |
| "loss": 0.7205, | |
| "step": 5110 | |
| }, | |
| { | |
| "epoch": 0.8615177519771159, | |
| "grad_norm": 9.19196693665457, | |
| "learning_rate": 1.4254804042709068e-06, | |
| "loss": 0.6662, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 0.8632004038364463, | |
| "grad_norm": 3.954066952143847, | |
| "learning_rate": 1.3915924275200675e-06, | |
| "loss": 0.7031, | |
| "step": 5130 | |
| }, | |
| { | |
| "epoch": 0.8648830556957765, | |
| "grad_norm": 2.2061938832524417, | |
| "learning_rate": 1.3580925192089493e-06, | |
| "loss": 0.6784, | |
| "step": 5140 | |
| }, | |
| { | |
| "epoch": 0.8665657075551069, | |
| "grad_norm": 3.455046382432621, | |
| "learning_rate": 1.324981634648238e-06, | |
| "loss": 0.6664, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 0.8682483594144371, | |
| "grad_norm": 5.8040509908212154, | |
| "learning_rate": 1.2922607180548995e-06, | |
| "loss": 0.6653, | |
| "step": 5160 | |
| }, | |
| { | |
| "epoch": 0.8699310112737675, | |
| "grad_norm": 2.697273491938428, | |
| "learning_rate": 1.2599307025252576e-06, | |
| "loss": 0.704, | |
| "step": 5170 | |
| }, | |
| { | |
| "epoch": 0.8716136631330977, | |
| "grad_norm": 2.6088244377011702, | |
| "learning_rate": 1.2279925100083762e-06, | |
| "loss": 0.6865, | |
| "step": 5180 | |
| }, | |
| { | |
| "epoch": 0.8732963149924281, | |
| "grad_norm": 2.0817334962394285, | |
| "learning_rate": 1.1964470512797832e-06, | |
| "loss": 0.7205, | |
| "step": 5190 | |
| }, | |
| { | |
| "epoch": 0.8749789668517584, | |
| "grad_norm": 1.7958470597004403, | |
| "learning_rate": 1.1652952259154826e-06, | |
| "loss": 0.7099, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 0.8766616187110887, | |
| "grad_norm": 3.2733151360632036, | |
| "learning_rate": 1.1345379222663171e-06, | |
| "loss": 0.6987, | |
| "step": 5210 | |
| }, | |
| { | |
| "epoch": 0.878344270570419, | |
| "grad_norm": 1.6087614204576393, | |
| "learning_rate": 1.1041760174326143e-06, | |
| "loss": 0.6978, | |
| "step": 5220 | |
| }, | |
| { | |
| "epoch": 0.8800269224297493, | |
| "grad_norm": 3.297188115731345, | |
| "learning_rate": 1.0742103772391992e-06, | |
| "loss": 0.6781, | |
| "step": 5230 | |
| }, | |
| { | |
| "epoch": 0.8817095742890796, | |
| "grad_norm": 2.917636673707956, | |
| "learning_rate": 1.044641856210683e-06, | |
| "loss": 0.7437, | |
| "step": 5240 | |
| }, | |
| { | |
| "epoch": 0.8833922261484098, | |
| "grad_norm": 8.019619725218233, | |
| "learning_rate": 1.0154712975471102e-06, | |
| "loss": 0.758, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 0.8850748780077402, | |
| "grad_norm": 7.193736679369726, | |
| "learning_rate": 9.86699533099899e-07, | |
| "loss": 0.6678, | |
| "step": 5260 | |
| }, | |
| { | |
| "epoch": 0.8867575298670705, | |
| "grad_norm": 4.75197740855129, | |
| "learning_rate": 9.583273833481353e-07, | |
| "loss": 0.6939, | |
| "step": 5270 | |
| }, | |
| { | |
| "epoch": 0.8884401817264008, | |
| "grad_norm": 1.3875395588777142, | |
| "learning_rate": 9.303556573751565e-07, | |
| "loss": 0.6715, | |
| "step": 5280 | |
| }, | |
| { | |
| "epoch": 0.8901228335857311, | |
| "grad_norm": 7.269926639485838, | |
| "learning_rate": 9.027851528454966e-07, | |
| "loss": 0.6991, | |
| "step": 5290 | |
| }, | |
| { | |
| "epoch": 0.8918054854450614, | |
| "grad_norm": 4.075839399698397, | |
| "learning_rate": 8.756166559821277e-07, | |
| "loss": 0.7314, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 0.8934881373043917, | |
| "grad_norm": 1.3826784402983028, | |
| "learning_rate": 8.488509415440482e-07, | |
| "loss": 0.7293, | |
| "step": 5310 | |
| }, | |
| { | |
| "epoch": 0.8951707891637221, | |
| "grad_norm": 1.4652147153970938, | |
| "learning_rate": 8.224887728041813e-07, | |
| "loss": 0.6606, | |
| "step": 5320 | |
| }, | |
| { | |
| "epoch": 0.8968534410230523, | |
| "grad_norm": 2.0863978417309323, | |
| "learning_rate": 7.96530901527614e-07, | |
| "loss": 0.7296, | |
| "step": 5330 | |
| }, | |
| { | |
| "epoch": 0.8985360928823827, | |
| "grad_norm": 1.9386591366517139, | |
| "learning_rate": 7.709780679501572e-07, | |
| "loss": 0.6946, | |
| "step": 5340 | |
| }, | |
| { | |
| "epoch": 0.9002187447417129, | |
| "grad_norm": 3.0390055437806285, | |
| "learning_rate": 7.458310007572328e-07, | |
| "loss": 0.6847, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 0.9019013966010433, | |
| "grad_norm": 1.163325287527846, | |
| "learning_rate": 7.210904170631022e-07, | |
| "loss": 0.7207, | |
| "step": 5360 | |
| }, | |
| { | |
| "epoch": 0.9035840484603735, | |
| "grad_norm": 5.48257877681163, | |
| "learning_rate": 6.967570223904124e-07, | |
| "loss": 0.7263, | |
| "step": 5370 | |
| }, | |
| { | |
| "epoch": 0.9052667003197038, | |
| "grad_norm": 6.123097026808169, | |
| "learning_rate": 6.728315106500754e-07, | |
| "loss": 0.7256, | |
| "step": 5380 | |
| }, | |
| { | |
| "epoch": 0.9069493521790342, | |
| "grad_norm": 3.0929718580276124, | |
| "learning_rate": 6.493145641214759e-07, | |
| "loss": 0.6962, | |
| "step": 5390 | |
| }, | |
| { | |
| "epoch": 0.9086320040383644, | |
| "grad_norm": 3.166026920813825, | |
| "learning_rate": 6.2620685343303e-07, | |
| "loss": 0.6831, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 0.9103146558976948, | |
| "grad_norm": 2.630461596894742, | |
| "learning_rate": 6.035090375430457e-07, | |
| "loss": 0.7223, | |
| "step": 5410 | |
| }, | |
| { | |
| "epoch": 0.911997307757025, | |
| "grad_norm": 1.7352101832444968, | |
| "learning_rate": 5.81221763720936e-07, | |
| "loss": 0.7472, | |
| "step": 5420 | |
| }, | |
| { | |
| "epoch": 0.9136799596163554, | |
| "grad_norm": 1.1751102801576652, | |
| "learning_rate": 5.593456675287606e-07, | |
| "loss": 0.6748, | |
| "step": 5430 | |
| }, | |
| { | |
| "epoch": 0.9153626114756857, | |
| "grad_norm": 1.8578887898122585, | |
| "learning_rate": 5.378813728031084e-07, | |
| "loss": 0.7157, | |
| "step": 5440 | |
| }, | |
| { | |
| "epoch": 0.917045263335016, | |
| "grad_norm": 3.038430900939285, | |
| "learning_rate": 5.168294916372973e-07, | |
| "loss": 0.6973, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 0.9187279151943463, | |
| "grad_norm": 5.115909422632603, | |
| "learning_rate": 4.961906243639275e-07, | |
| "loss": 0.7049, | |
| "step": 5460 | |
| }, | |
| { | |
| "epoch": 0.9204105670536766, | |
| "grad_norm": 2.398364535038852, | |
| "learning_rate": 4.759653595377539e-07, | |
| "loss": 0.7006, | |
| "step": 5470 | |
| }, | |
| { | |
| "epoch": 0.9220932189130069, | |
| "grad_norm": 3.3023283633867946, | |
| "learning_rate": 4.5615427391891116e-07, | |
| "loss": 0.7014, | |
| "step": 5480 | |
| }, | |
| { | |
| "epoch": 0.9237758707723372, | |
| "grad_norm": 1.1475766127698004, | |
| "learning_rate": 4.3675793245646025e-07, | |
| "loss": 0.684, | |
| "step": 5490 | |
| }, | |
| { | |
| "epoch": 0.9254585226316675, | |
| "grad_norm": 3.6822695086224826, | |
| "learning_rate": 4.17776888272281e-07, | |
| "loss": 0.6958, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.9271411744909979, | |
| "grad_norm": 1.718740428551119, | |
| "learning_rate": 3.992116826452985e-07, | |
| "loss": 0.6899, | |
| "step": 5510 | |
| }, | |
| { | |
| "epoch": 0.9288238263503281, | |
| "grad_norm": 1.599672421061828, | |
| "learning_rate": 3.810628449960418e-07, | |
| "loss": 0.7165, | |
| "step": 5520 | |
| }, | |
| { | |
| "epoch": 0.9305064782096584, | |
| "grad_norm": 2.5587015609740624, | |
| "learning_rate": 3.633308928715545e-07, | |
| "loss": 0.7062, | |
| "step": 5530 | |
| }, | |
| { | |
| "epoch": 0.9321891300689887, | |
| "grad_norm": 1.3444954334832635, | |
| "learning_rate": 3.4601633193063473e-07, | |
| "loss": 0.6733, | |
| "step": 5540 | |
| }, | |
| { | |
| "epoch": 0.933871781928319, | |
| "grad_norm": 1.3838657312145675, | |
| "learning_rate": 3.291196559294135e-07, | |
| "loss": 0.7021, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 0.9355544337876494, | |
| "grad_norm": 2.2439673400854927, | |
| "learning_rate": 3.1264134670726916e-07, | |
| "loss": 0.697, | |
| "step": 5560 | |
| }, | |
| { | |
| "epoch": 0.9372370856469796, | |
| "grad_norm": 2.57298478462355, | |
| "learning_rate": 2.965818741730969e-07, | |
| "loss": 0.6808, | |
| "step": 5570 | |
| }, | |
| { | |
| "epoch": 0.93891973750631, | |
| "grad_norm": 1.9748099778905932, | |
| "learning_rate": 2.8094169629190093e-07, | |
| "loss": 0.6898, | |
| "step": 5580 | |
| }, | |
| { | |
| "epoch": 0.9406023893656402, | |
| "grad_norm": 1.7598873523945464, | |
| "learning_rate": 2.6572125907174336e-07, | |
| "loss": 0.7443, | |
| "step": 5590 | |
| }, | |
| { | |
| "epoch": 0.9422850412249706, | |
| "grad_norm": 1.0267616624562237, | |
| "learning_rate": 2.5092099655100953e-07, | |
| "loss": 0.6702, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 0.9439676930843008, | |
| "grad_norm": 4.051418037354739, | |
| "learning_rate": 2.3654133078604756e-07, | |
| "loss": 0.7195, | |
| "step": 5610 | |
| }, | |
| { | |
| "epoch": 0.9456503449436312, | |
| "grad_norm": 2.9487347944707674, | |
| "learning_rate": 2.225826718391266e-07, | |
| "loss": 0.719, | |
| "step": 5620 | |
| }, | |
| { | |
| "epoch": 0.9473329968029615, | |
| "grad_norm": 1.3722015426706022, | |
| "learning_rate": 2.0904541776673947e-07, | |
| "loss": 0.7412, | |
| "step": 5630 | |
| }, | |
| { | |
| "epoch": 0.9490156486622918, | |
| "grad_norm": 2.8036706297716463, | |
| "learning_rate": 1.9592995460825158e-07, | |
| "loss": 0.7019, | |
| "step": 5640 | |
| }, | |
| { | |
| "epoch": 0.9506983005216221, | |
| "grad_norm": 9.103926061132224, | |
| "learning_rate": 1.8323665637489994e-07, | |
| "loss": 0.7174, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 0.9523809523809523, | |
| "grad_norm": 1.7592200853027784, | |
| "learning_rate": 1.7096588503912157e-07, | |
| "loss": 0.7195, | |
| "step": 5660 | |
| }, | |
| { | |
| "epoch": 0.9540636042402827, | |
| "grad_norm": 3.1270828934952344, | |
| "learning_rate": 1.5911799052423181e-07, | |
| "loss": 0.6657, | |
| "step": 5670 | |
| }, | |
| { | |
| "epoch": 0.955746256099613, | |
| "grad_norm": 2.1567590085070623, | |
| "learning_rate": 1.4769331069444225e-07, | |
| "loss": 0.7052, | |
| "step": 5680 | |
| }, | |
| { | |
| "epoch": 0.9574289079589433, | |
| "grad_norm": 1.1902859282588563, | |
| "learning_rate": 1.3669217134523515e-07, | |
| "loss": 0.6877, | |
| "step": 5690 | |
| }, | |
| { | |
| "epoch": 0.9591115598182736, | |
| "grad_norm": 7.922649277989734, | |
| "learning_rate": 1.261148861940642e-07, | |
| "loss": 0.686, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 0.9607942116776039, | |
| "grad_norm": 1.615937417848637, | |
| "learning_rate": 1.159617568714133e-07, | |
| "loss": 0.7074, | |
| "step": 5710 | |
| }, | |
| { | |
| "epoch": 0.9624768635369342, | |
| "grad_norm": 15.858587476598592, | |
| "learning_rate": 1.062330729121902e-07, | |
| "loss": 0.6786, | |
| "step": 5720 | |
| }, | |
| { | |
| "epoch": 0.9641595153962645, | |
| "grad_norm": 1.637566321973649, | |
| "learning_rate": 9.692911174747465e-08, | |
| "loss": 0.716, | |
| "step": 5730 | |
| }, | |
| { | |
| "epoch": 0.9658421672555948, | |
| "grad_norm": 2.761244845163171, | |
| "learning_rate": 8.80501386966065e-08, | |
| "loss": 0.6758, | |
| "step": 5740 | |
| }, | |
| { | |
| "epoch": 0.9675248191149252, | |
| "grad_norm": 5.5241655574023865, | |
| "learning_rate": 7.959640695961667e-08, | |
| "loss": 0.7456, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 0.9692074709742554, | |
| "grad_norm": 1.5337162009745597, | |
| "learning_rate": 7.156815761000635e-08, | |
| "loss": 0.7126, | |
| "step": 5760 | |
| }, | |
| { | |
| "epoch": 0.9708901228335858, | |
| "grad_norm": 1.7912409816389805, | |
| "learning_rate": 6.396561958787739e-08, | |
| "loss": 0.6826, | |
| "step": 5770 | |
| }, | |
| { | |
| "epoch": 0.972572774692916, | |
| "grad_norm": 1.8515929529446165, | |
| "learning_rate": 5.678900969339762e-08, | |
| "loss": 0.6876, | |
| "step": 5780 | |
| }, | |
| { | |
| "epoch": 0.9742554265522464, | |
| "grad_norm": 1.420161377910867, | |
| "learning_rate": 5.003853258062574e-08, | |
| "loss": 0.7281, | |
| "step": 5790 | |
| }, | |
| { | |
| "epoch": 0.9759380784115766, | |
| "grad_norm": 1.366745752085519, | |
| "learning_rate": 4.371438075166767e-08, | |
| "loss": 0.7176, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 0.9776207302709069, | |
| "grad_norm": 3.204422850633802, | |
| "learning_rate": 3.7816734551187614e-08, | |
| "loss": 0.7202, | |
| "step": 5810 | |
| }, | |
| { | |
| "epoch": 0.9793033821302373, | |
| "grad_norm": 1.628769098083527, | |
| "learning_rate": 3.234576216127216e-08, | |
| "loss": 0.7259, | |
| "step": 5820 | |
| }, | |
| { | |
| "epoch": 0.9809860339895675, | |
| "grad_norm": 3.9642094146966222, | |
| "learning_rate": 2.7301619596629136e-08, | |
| "loss": 0.6832, | |
| "step": 5830 | |
| }, | |
| { | |
| "epoch": 0.9826686858488979, | |
| "grad_norm": 1.88831762326948, | |
| "learning_rate": 2.2684450700137805e-08, | |
| "loss": 0.7115, | |
| "step": 5840 | |
| }, | |
| { | |
| "epoch": 0.9843513377082281, | |
| "grad_norm": 3.6088675270194783, | |
| "learning_rate": 1.8494387138748848e-08, | |
| "loss": 0.7047, | |
| "step": 5850 | |
| }, | |
| { | |
| "epoch": 0.9860339895675585, | |
| "grad_norm": 1.8877205726129367, | |
| "learning_rate": 1.4731548399729011e-08, | |
| "loss": 0.6976, | |
| "step": 5860 | |
| }, | |
| { | |
| "epoch": 0.9877166414268888, | |
| "grad_norm": 2.042194635543674, | |
| "learning_rate": 1.1396041787255506e-08, | |
| "loss": 0.7131, | |
| "step": 5870 | |
| }, | |
| { | |
| "epoch": 0.9893992932862191, | |
| "grad_norm": 3.1156490038039886, | |
| "learning_rate": 8.487962419350126e-09, | |
| "loss": 0.6616, | |
| "step": 5880 | |
| }, | |
| { | |
| "epoch": 0.9910819451455494, | |
| "grad_norm": 1.8176538264310005, | |
| "learning_rate": 6.007393225176405e-09, | |
| "loss": 0.6745, | |
| "step": 5890 | |
| }, | |
| { | |
| "epoch": 0.9927645970048797, | |
| "grad_norm": 2.325273854524485, | |
| "learning_rate": 3.954404942664858e-09, | |
| "loss": 0.6781, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 0.99444724886421, | |
| "grad_norm": 2.5926710895230207, | |
| "learning_rate": 2.3290561165029144e-09, | |
| "loss": 0.6723, | |
| "step": 5910 | |
| }, | |
| { | |
| "epoch": 0.9961299007235404, | |
| "grad_norm": 1.4959217102097229, | |
| "learning_rate": 1.1313930964645946e-09, | |
| "loss": 0.7133, | |
| "step": 5920 | |
| }, | |
| { | |
| "epoch": 0.9978125525828706, | |
| "grad_norm": 1.7034564614382208, | |
| "learning_rate": 3.614500360815676e-10, | |
| "loss": 0.6896, | |
| "step": 5930 | |
| }, | |
| { | |
| "epoch": 0.9994952044422009, | |
| "grad_norm": 1.1236420488964254, | |
| "learning_rate": 1.924889167892463e-11, | |
| "loss": 0.6763, | |
| "step": 5940 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 5943, | |
| "total_flos": 2.7076016696375378e+19, | |
| "train_loss": 0.7068847330174581, | |
| "train_runtime": 81427.1666, | |
| "train_samples_per_second": 4.671, | |
| "train_steps_per_second": 0.073 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 5943, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 400, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 2.7076016696375378e+19, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |