| { | |
| "best_metric": 0.7099470604625244, | |
| "best_model_checkpoint": "Emotion_DF_Image_VIT_V1/checkpoint-5385", | |
| "epoch": 3.0, | |
| "eval_steps": 500, | |
| "global_step": 5385, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.005571030640668524, | |
| "grad_norm": 1.7724220752716064, | |
| "learning_rate": 4.990714948932219e-05, | |
| "loss": 1.9052, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.011142061281337047, | |
| "grad_norm": 2.042982578277588, | |
| "learning_rate": 4.981429897864439e-05, | |
| "loss": 1.8026, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.016713091922005572, | |
| "grad_norm": 1.7499141693115234, | |
| "learning_rate": 4.972144846796657e-05, | |
| "loss": 1.7188, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.022284122562674095, | |
| "grad_norm": 1.839320421218872, | |
| "learning_rate": 4.962859795728877e-05, | |
| "loss": 1.6334, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.027855153203342618, | |
| "grad_norm": 2.4035816192626953, | |
| "learning_rate": 4.953574744661096e-05, | |
| "loss": 1.5432, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.033426183844011144, | |
| "grad_norm": 2.6126906871795654, | |
| "learning_rate": 4.9442896935933144e-05, | |
| "loss": 1.489, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.03899721448467967, | |
| "grad_norm": 2.406311273574829, | |
| "learning_rate": 4.9350046425255343e-05, | |
| "loss": 1.4671, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.04456824512534819, | |
| "grad_norm": 2.1221556663513184, | |
| "learning_rate": 4.925719591457753e-05, | |
| "loss": 1.4329, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.05013927576601671, | |
| "grad_norm": 2.5221104621887207, | |
| "learning_rate": 4.916434540389973e-05, | |
| "loss": 1.3217, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.055710306406685235, | |
| "grad_norm": 2.779143810272217, | |
| "learning_rate": 4.9071494893221914e-05, | |
| "loss": 1.3284, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.06128133704735376, | |
| "grad_norm": 2.719045400619507, | |
| "learning_rate": 4.897864438254411e-05, | |
| "loss": 1.2146, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.06685236768802229, | |
| "grad_norm": 3.121081590652466, | |
| "learning_rate": 4.88857938718663e-05, | |
| "loss": 1.3155, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.07242339832869081, | |
| "grad_norm": 3.414604425430298, | |
| "learning_rate": 4.8792943361188485e-05, | |
| "loss": 1.3054, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.07799442896935933, | |
| "grad_norm": 4.522671222686768, | |
| "learning_rate": 4.8700092850510685e-05, | |
| "loss": 1.3444, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.08356545961002786, | |
| "grad_norm": 3.215402603149414, | |
| "learning_rate": 4.860724233983287e-05, | |
| "loss": 1.3849, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.08913649025069638, | |
| "grad_norm": 2.473184108734131, | |
| "learning_rate": 4.851439182915506e-05, | |
| "loss": 1.309, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.0947075208913649, | |
| "grad_norm": 3.685305595397949, | |
| "learning_rate": 4.8421541318477255e-05, | |
| "loss": 1.176, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.10027855153203342, | |
| "grad_norm": 2.6124424934387207, | |
| "learning_rate": 4.832869080779944e-05, | |
| "loss": 1.1778, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.10584958217270195, | |
| "grad_norm": 2.4001147747039795, | |
| "learning_rate": 4.823584029712164e-05, | |
| "loss": 1.3552, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.11142061281337047, | |
| "grad_norm": 3.7218260765075684, | |
| "learning_rate": 4.8142989786443826e-05, | |
| "loss": 1.1984, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.116991643454039, | |
| "grad_norm": 2.5889687538146973, | |
| "learning_rate": 4.805013927576602e-05, | |
| "loss": 1.2019, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.12256267409470752, | |
| "grad_norm": 1.5679919719696045, | |
| "learning_rate": 4.795728876508821e-05, | |
| "loss": 1.2056, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.12813370473537605, | |
| "grad_norm": 3.407369375228882, | |
| "learning_rate": 4.7864438254410404e-05, | |
| "loss": 1.2292, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.13370473537604458, | |
| "grad_norm": 3.2745847702026367, | |
| "learning_rate": 4.7771587743732597e-05, | |
| "loss": 1.0303, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.1392757660167131, | |
| "grad_norm": 2.460291862487793, | |
| "learning_rate": 4.767873723305478e-05, | |
| "loss": 1.0992, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.14484679665738162, | |
| "grad_norm": 3.020585775375366, | |
| "learning_rate": 4.7585886722376975e-05, | |
| "loss": 1.173, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.15041782729805014, | |
| "grad_norm": 3.79120135307312, | |
| "learning_rate": 4.749303621169917e-05, | |
| "loss": 1.2393, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.15598885793871867, | |
| "grad_norm": 2.429368019104004, | |
| "learning_rate": 4.740018570102136e-05, | |
| "loss": 1.1848, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.1615598885793872, | |
| "grad_norm": 3.2640347480773926, | |
| "learning_rate": 4.7307335190343546e-05, | |
| "loss": 1.1227, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.1671309192200557, | |
| "grad_norm": 5.605760097503662, | |
| "learning_rate": 4.721448467966574e-05, | |
| "loss": 1.1953, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.17270194986072424, | |
| "grad_norm": 3.241220474243164, | |
| "learning_rate": 4.712163416898793e-05, | |
| "loss": 1.2692, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.17827298050139276, | |
| "grad_norm": 1.84876549243927, | |
| "learning_rate": 4.702878365831012e-05, | |
| "loss": 1.0829, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.18384401114206128, | |
| "grad_norm": 4.22404670715332, | |
| "learning_rate": 4.6935933147632316e-05, | |
| "loss": 1.1851, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.1894150417827298, | |
| "grad_norm": 2.8334600925445557, | |
| "learning_rate": 4.68430826369545e-05, | |
| "loss": 1.1582, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.19498607242339833, | |
| "grad_norm": 2.526007890701294, | |
| "learning_rate": 4.6750232126276694e-05, | |
| "loss": 1.1031, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.20055710306406685, | |
| "grad_norm": 2.2652316093444824, | |
| "learning_rate": 4.665738161559889e-05, | |
| "loss": 1.0609, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.20612813370473537, | |
| "grad_norm": 3.004528284072876, | |
| "learning_rate": 4.656453110492108e-05, | |
| "loss": 1.1383, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.2116991643454039, | |
| "grad_norm": 1.7893799543380737, | |
| "learning_rate": 4.647168059424327e-05, | |
| "loss": 1.0547, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.21727019498607242, | |
| "grad_norm": 2.7427473068237305, | |
| "learning_rate": 4.637883008356546e-05, | |
| "loss": 1.085, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.22284122562674094, | |
| "grad_norm": 3.2746737003326416, | |
| "learning_rate": 4.628597957288766e-05, | |
| "loss": 1.0598, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.22841225626740946, | |
| "grad_norm": 3.6754062175750732, | |
| "learning_rate": 4.619312906220984e-05, | |
| "loss": 1.1675, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.233983286908078, | |
| "grad_norm": 2.6632232666015625, | |
| "learning_rate": 4.6100278551532035e-05, | |
| "loss": 0.9843, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.2395543175487465, | |
| "grad_norm": 3.018148899078369, | |
| "learning_rate": 4.600742804085423e-05, | |
| "loss": 1.2118, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.24512534818941503, | |
| "grad_norm": 4.811627388000488, | |
| "learning_rate": 4.5914577530176414e-05, | |
| "loss": 1.0827, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.25069637883008355, | |
| "grad_norm": 3.8523404598236084, | |
| "learning_rate": 4.582172701949861e-05, | |
| "loss": 1.1762, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.2562674094707521, | |
| "grad_norm": 2.6023926734924316, | |
| "learning_rate": 4.57288765088208e-05, | |
| "loss": 1.0629, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.2618384401114206, | |
| "grad_norm": 3.7375123500823975, | |
| "learning_rate": 4.563602599814299e-05, | |
| "loss": 1.0148, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.26740947075208915, | |
| "grad_norm": 3.1458210945129395, | |
| "learning_rate": 4.5543175487465184e-05, | |
| "loss": 1.0659, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.27298050139275765, | |
| "grad_norm": 4.204915523529053, | |
| "learning_rate": 4.545032497678737e-05, | |
| "loss": 1.2268, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.2785515320334262, | |
| "grad_norm": 4.100999355316162, | |
| "learning_rate": 4.535747446610957e-05, | |
| "loss": 1.1576, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.2841225626740947, | |
| "grad_norm": 2.977987051010132, | |
| "learning_rate": 4.5264623955431755e-05, | |
| "loss": 1.0668, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.28969359331476324, | |
| "grad_norm": 2.7143728733062744, | |
| "learning_rate": 4.5171773444753954e-05, | |
| "loss": 1.2645, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.29526462395543174, | |
| "grad_norm": 3.3356759548187256, | |
| "learning_rate": 4.507892293407614e-05, | |
| "loss": 1.0379, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.3008356545961003, | |
| "grad_norm": 4.00338077545166, | |
| "learning_rate": 4.4986072423398326e-05, | |
| "loss": 1.0597, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.3064066852367688, | |
| "grad_norm": 2.7598226070404053, | |
| "learning_rate": 4.4893221912720525e-05, | |
| "loss": 1.0865, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.31197771587743733, | |
| "grad_norm": 3.0780742168426514, | |
| "learning_rate": 4.480037140204271e-05, | |
| "loss": 1.0278, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.31754874651810583, | |
| "grad_norm": 2.746990442276001, | |
| "learning_rate": 4.470752089136491e-05, | |
| "loss": 1.0914, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.3231197771587744, | |
| "grad_norm": 3.7979509830474854, | |
| "learning_rate": 4.4614670380687096e-05, | |
| "loss": 1.115, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.3286908077994429, | |
| "grad_norm": 2.0203397274017334, | |
| "learning_rate": 4.452181987000928e-05, | |
| "loss": 0.9985, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.3342618384401114, | |
| "grad_norm": 2.2943918704986572, | |
| "learning_rate": 4.442896935933148e-05, | |
| "loss": 0.9477, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.3398328690807799, | |
| "grad_norm": 3.32389760017395, | |
| "learning_rate": 4.433611884865367e-05, | |
| "loss": 1.015, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.34540389972144847, | |
| "grad_norm": 4.5039801597595215, | |
| "learning_rate": 4.4243268337975866e-05, | |
| "loss": 1.024, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.35097493036211697, | |
| "grad_norm": 4.04258918762207, | |
| "learning_rate": 4.415041782729805e-05, | |
| "loss": 1.093, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.3565459610027855, | |
| "grad_norm": 3.79727840423584, | |
| "learning_rate": 4.4057567316620244e-05, | |
| "loss": 1.0203, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.362116991643454, | |
| "grad_norm": 3.895496129989624, | |
| "learning_rate": 4.396471680594244e-05, | |
| "loss": 1.0243, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.36768802228412256, | |
| "grad_norm": 4.588969707489014, | |
| "learning_rate": 4.387186629526462e-05, | |
| "loss": 0.9466, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.3732590529247911, | |
| "grad_norm": 2.5958499908447266, | |
| "learning_rate": 4.377901578458682e-05, | |
| "loss": 0.9206, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.3788300835654596, | |
| "grad_norm": 3.2079598903656006, | |
| "learning_rate": 4.368616527390901e-05, | |
| "loss": 1.1272, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.38440111420612816, | |
| "grad_norm": 3.1226675510406494, | |
| "learning_rate": 4.35933147632312e-05, | |
| "loss": 0.9476, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.38997214484679665, | |
| "grad_norm": 4.5104475021362305, | |
| "learning_rate": 4.350046425255339e-05, | |
| "loss": 1.0093, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.3955431754874652, | |
| "grad_norm": 3.53946852684021, | |
| "learning_rate": 4.340761374187558e-05, | |
| "loss": 1.0781, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.4011142061281337, | |
| "grad_norm": 2.8066279888153076, | |
| "learning_rate": 4.331476323119778e-05, | |
| "loss": 1.0066, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.40668523676880225, | |
| "grad_norm": 3.863245725631714, | |
| "learning_rate": 4.3221912720519964e-05, | |
| "loss": 0.9463, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.41225626740947074, | |
| "grad_norm": 2.5134997367858887, | |
| "learning_rate": 4.3129062209842156e-05, | |
| "loss": 1.0134, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.4178272980501393, | |
| "grad_norm": 3.644773006439209, | |
| "learning_rate": 4.303621169916435e-05, | |
| "loss": 1.0764, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.4233983286908078, | |
| "grad_norm": 3.082054376602173, | |
| "learning_rate": 4.2943361188486535e-05, | |
| "loss": 0.9475, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.42896935933147634, | |
| "grad_norm": 3.091280937194824, | |
| "learning_rate": 4.2850510677808734e-05, | |
| "loss": 1.0513, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.43454038997214484, | |
| "grad_norm": 2.4878854751586914, | |
| "learning_rate": 4.275766016713092e-05, | |
| "loss": 0.9297, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.4401114206128134, | |
| "grad_norm": 2.918303966522217, | |
| "learning_rate": 4.266480965645311e-05, | |
| "loss": 1.151, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.4456824512534819, | |
| "grad_norm": 3.757899284362793, | |
| "learning_rate": 4.2571959145775305e-05, | |
| "loss": 0.9776, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.45125348189415043, | |
| "grad_norm": 2.8936688899993896, | |
| "learning_rate": 4.24791086350975e-05, | |
| "loss": 1.1334, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.4568245125348189, | |
| "grad_norm": 3.672881841659546, | |
| "learning_rate": 4.238625812441969e-05, | |
| "loss": 0.9685, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.4623955431754875, | |
| "grad_norm": 3.355109691619873, | |
| "learning_rate": 4.2293407613741876e-05, | |
| "loss": 0.9977, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.467966573816156, | |
| "grad_norm": 4.297019958496094, | |
| "learning_rate": 4.220055710306407e-05, | |
| "loss": 0.9713, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.4735376044568245, | |
| "grad_norm": 2.775784730911255, | |
| "learning_rate": 4.210770659238626e-05, | |
| "loss": 0.995, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.479108635097493, | |
| "grad_norm": 2.9572598934173584, | |
| "learning_rate": 4.201485608170845e-05, | |
| "loss": 0.9962, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.48467966573816157, | |
| "grad_norm": 2.668698310852051, | |
| "learning_rate": 4.192200557103064e-05, | |
| "loss": 0.9233, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.49025069637883006, | |
| "grad_norm": 3.679955005645752, | |
| "learning_rate": 4.182915506035283e-05, | |
| "loss": 1.1134, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.4958217270194986, | |
| "grad_norm": 3.2309141159057617, | |
| "learning_rate": 4.1736304549675024e-05, | |
| "loss": 0.9775, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.5013927576601671, | |
| "grad_norm": 4.367251396179199, | |
| "learning_rate": 4.164345403899722e-05, | |
| "loss": 0.9837, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.5069637883008357, | |
| "grad_norm": 3.036268711090088, | |
| "learning_rate": 4.155060352831941e-05, | |
| "loss": 1.0347, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.5125348189415042, | |
| "grad_norm": 3.834496021270752, | |
| "learning_rate": 4.1457753017641595e-05, | |
| "loss": 0.9134, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.5181058495821727, | |
| "grad_norm": 4.641286849975586, | |
| "learning_rate": 4.1364902506963794e-05, | |
| "loss": 0.976, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.5236768802228412, | |
| "grad_norm": 5.904285430908203, | |
| "learning_rate": 4.127205199628598e-05, | |
| "loss": 0.9421, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.5292479108635098, | |
| "grad_norm": 2.7388248443603516, | |
| "learning_rate": 4.117920148560817e-05, | |
| "loss": 0.8805, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.5348189415041783, | |
| "grad_norm": 4.416590690612793, | |
| "learning_rate": 4.1086350974930365e-05, | |
| "loss": 0.9017, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.5403899721448467, | |
| "grad_norm": 5.286828994750977, | |
| "learning_rate": 4.099350046425255e-05, | |
| "loss": 1.0434, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.5459610027855153, | |
| "grad_norm": 3.1228902339935303, | |
| "learning_rate": 4.090064995357475e-05, | |
| "loss": 0.953, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.5515320334261838, | |
| "grad_norm": 3.2225730419158936, | |
| "learning_rate": 4.0807799442896936e-05, | |
| "loss": 0.8677, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.5571030640668524, | |
| "grad_norm": 4.216846942901611, | |
| "learning_rate": 4.071494893221913e-05, | |
| "loss": 1.0036, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.5626740947075209, | |
| "grad_norm": 2.7185237407684326, | |
| "learning_rate": 4.062209842154132e-05, | |
| "loss": 0.9435, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.5682451253481894, | |
| "grad_norm": 3.1824262142181396, | |
| "learning_rate": 4.052924791086351e-05, | |
| "loss": 1.0351, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.5738161559888579, | |
| "grad_norm": 3.7866647243499756, | |
| "learning_rate": 4.0436397400185706e-05, | |
| "loss": 0.9876, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.5793871866295265, | |
| "grad_norm": 3.192934989929199, | |
| "learning_rate": 4.034354688950789e-05, | |
| "loss": 0.8558, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.584958217270195, | |
| "grad_norm": 3.7569358348846436, | |
| "learning_rate": 4.0250696378830085e-05, | |
| "loss": 0.9368, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.5905292479108635, | |
| "grad_norm": 3.53072190284729, | |
| "learning_rate": 4.015784586815228e-05, | |
| "loss": 0.9955, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.596100278551532, | |
| "grad_norm": 2.880999803543091, | |
| "learning_rate": 4.006499535747446e-05, | |
| "loss": 0.9874, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.6016713091922006, | |
| "grad_norm": 3.211893320083618, | |
| "learning_rate": 3.997214484679666e-05, | |
| "loss": 0.8937, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.6072423398328691, | |
| "grad_norm": 3.6051464080810547, | |
| "learning_rate": 3.987929433611885e-05, | |
| "loss": 0.9925, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.6128133704735376, | |
| "grad_norm": 3.4191086292266846, | |
| "learning_rate": 3.978644382544105e-05, | |
| "loss": 0.9265, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.6183844011142061, | |
| "grad_norm": 3.484645128250122, | |
| "learning_rate": 3.969359331476323e-05, | |
| "loss": 0.9116, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.6239554317548747, | |
| "grad_norm": 4.512744426727295, | |
| "learning_rate": 3.960074280408542e-05, | |
| "loss": 0.9396, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.6295264623955432, | |
| "grad_norm": 3.186840772628784, | |
| "learning_rate": 3.950789229340762e-05, | |
| "loss": 0.9437, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.6350974930362117, | |
| "grad_norm": 3.2555806636810303, | |
| "learning_rate": 3.9415041782729804e-05, | |
| "loss": 0.9879, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.6406685236768802, | |
| "grad_norm": 3.2986598014831543, | |
| "learning_rate": 3.9322191272052003e-05, | |
| "loss": 0.8755, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.6462395543175488, | |
| "grad_norm": 3.292611837387085, | |
| "learning_rate": 3.922934076137419e-05, | |
| "loss": 0.9109, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.6518105849582173, | |
| "grad_norm": 2.7159316539764404, | |
| "learning_rate": 3.9136490250696375e-05, | |
| "loss": 1.0027, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.6573816155988857, | |
| "grad_norm": 4.204648971557617, | |
| "learning_rate": 3.9043639740018574e-05, | |
| "loss": 0.9761, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.6629526462395543, | |
| "grad_norm": 2.5838115215301514, | |
| "learning_rate": 3.895078922934076e-05, | |
| "loss": 0.875, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.6685236768802229, | |
| "grad_norm": 3.0180771350860596, | |
| "learning_rate": 3.885793871866296e-05, | |
| "loss": 1.0162, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.6740947075208914, | |
| "grad_norm": 3.756748914718628, | |
| "learning_rate": 3.8765088207985145e-05, | |
| "loss": 1.0033, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.6796657381615598, | |
| "grad_norm": 4.750040531158447, | |
| "learning_rate": 3.867223769730734e-05, | |
| "loss": 1.003, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.6852367688022284, | |
| "grad_norm": 2.008877992630005, | |
| "learning_rate": 3.857938718662953e-05, | |
| "loss": 0.7859, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.6908077994428969, | |
| "grad_norm": 3.9416215419769287, | |
| "learning_rate": 3.8486536675951716e-05, | |
| "loss": 0.971, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.6963788300835655, | |
| "grad_norm": 3.8396077156066895, | |
| "learning_rate": 3.8393686165273915e-05, | |
| "loss": 1.0351, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.7019498607242339, | |
| "grad_norm": 3.6760761737823486, | |
| "learning_rate": 3.83008356545961e-05, | |
| "loss": 0.9603, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.7075208913649025, | |
| "grad_norm": 3.7321598529815674, | |
| "learning_rate": 3.8207985143918294e-05, | |
| "loss": 0.9743, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.713091922005571, | |
| "grad_norm": 2.594165325164795, | |
| "learning_rate": 3.8115134633240486e-05, | |
| "loss": 0.9199, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.7186629526462396, | |
| "grad_norm": 3.0368616580963135, | |
| "learning_rate": 3.802228412256267e-05, | |
| "loss": 0.8403, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.724233983286908, | |
| "grad_norm": 2.8159286975860596, | |
| "learning_rate": 3.792943361188487e-05, | |
| "loss": 0.9106, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.7298050139275766, | |
| "grad_norm": 4.351017475128174, | |
| "learning_rate": 3.783658310120706e-05, | |
| "loss": 1.0043, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.7353760445682451, | |
| "grad_norm": 2.638453960418701, | |
| "learning_rate": 3.774373259052925e-05, | |
| "loss": 0.8995, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.7409470752089137, | |
| "grad_norm": 3.244128465652466, | |
| "learning_rate": 3.765088207985144e-05, | |
| "loss": 0.8562, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.7465181058495822, | |
| "grad_norm": 3.379518747329712, | |
| "learning_rate": 3.755803156917363e-05, | |
| "loss": 0.7807, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.7520891364902507, | |
| "grad_norm": 3.127863645553589, | |
| "learning_rate": 3.746518105849583e-05, | |
| "loss": 0.9695, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.7576601671309192, | |
| "grad_norm": 4.407602310180664, | |
| "learning_rate": 3.737233054781801e-05, | |
| "loss": 0.9514, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.7632311977715878, | |
| "grad_norm": 2.0727860927581787, | |
| "learning_rate": 3.7279480037140206e-05, | |
| "loss": 0.9497, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.7688022284122563, | |
| "grad_norm": 4.9297590255737305, | |
| "learning_rate": 3.71866295264624e-05, | |
| "loss": 1.0568, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.7743732590529248, | |
| "grad_norm": 3.622457981109619, | |
| "learning_rate": 3.709377901578459e-05, | |
| "loss": 0.9383, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.7799442896935933, | |
| "grad_norm": 2.3367862701416016, | |
| "learning_rate": 3.700092850510678e-05, | |
| "loss": 0.929, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.7855153203342619, | |
| "grad_norm": 3.38315486907959, | |
| "learning_rate": 3.690807799442897e-05, | |
| "loss": 0.8984, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.7910863509749304, | |
| "grad_norm": 5.530807018280029, | |
| "learning_rate": 3.681522748375116e-05, | |
| "loss": 1.0223, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.7966573816155988, | |
| "grad_norm": 2.0339179039001465, | |
| "learning_rate": 3.6722376973073354e-05, | |
| "loss": 1.0331, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.8022284122562674, | |
| "grad_norm": 3.6576170921325684, | |
| "learning_rate": 3.662952646239555e-05, | |
| "loss": 0.8678, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.807799442896936, | |
| "grad_norm": 3.3415591716766357, | |
| "learning_rate": 3.653667595171773e-05, | |
| "loss": 1.0513, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.8133704735376045, | |
| "grad_norm": 2.7140233516693115, | |
| "learning_rate": 3.6443825441039925e-05, | |
| "loss": 0.916, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.8189415041782729, | |
| "grad_norm": 3.1542391777038574, | |
| "learning_rate": 3.635097493036212e-05, | |
| "loss": 1.0301, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.8245125348189415, | |
| "grad_norm": 2.5809764862060547, | |
| "learning_rate": 3.625812441968431e-05, | |
| "loss": 0.9945, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.83008356545961, | |
| "grad_norm": 3.00136661529541, | |
| "learning_rate": 3.61652739090065e-05, | |
| "loss": 0.9597, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.8356545961002786, | |
| "grad_norm": 2.6830506324768066, | |
| "learning_rate": 3.607242339832869e-05, | |
| "loss": 0.9092, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.841225626740947, | |
| "grad_norm": 2.1529722213745117, | |
| "learning_rate": 3.597957288765089e-05, | |
| "loss": 0.9479, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.8467966573816156, | |
| "grad_norm": 2.4851369857788086, | |
| "learning_rate": 3.5886722376973074e-05, | |
| "loss": 0.9432, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.8523676880222841, | |
| "grad_norm": 3.878922939300537, | |
| "learning_rate": 3.5793871866295266e-05, | |
| "loss": 0.9956, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.8579387186629527, | |
| "grad_norm": 4.410084247589111, | |
| "learning_rate": 3.570102135561746e-05, | |
| "loss": 1.0484, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.8635097493036211, | |
| "grad_norm": 2.1463232040405273, | |
| "learning_rate": 3.5608170844939645e-05, | |
| "loss": 0.9908, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.8690807799442897, | |
| "grad_norm": 2.9710140228271484, | |
| "learning_rate": 3.5515320334261844e-05, | |
| "loss": 0.9709, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.8746518105849582, | |
| "grad_norm": 2.664862871170044, | |
| "learning_rate": 3.542246982358403e-05, | |
| "loss": 0.8727, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.8802228412256268, | |
| "grad_norm": 2.054464101791382, | |
| "learning_rate": 3.532961931290622e-05, | |
| "loss": 0.8774, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.8857938718662952, | |
| "grad_norm": 3.988269090652466, | |
| "learning_rate": 3.5236768802228415e-05, | |
| "loss": 0.9397, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.8913649025069638, | |
| "grad_norm": 4.200674533843994, | |
| "learning_rate": 3.51439182915506e-05, | |
| "loss": 0.8342, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.8969359331476323, | |
| "grad_norm": 3.2249672412872314, | |
| "learning_rate": 3.50510677808728e-05, | |
| "loss": 0.9048, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.9025069637883009, | |
| "grad_norm": 4.159034729003906, | |
| "learning_rate": 3.4958217270194986e-05, | |
| "loss": 1.0362, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.9080779944289693, | |
| "grad_norm": 3.2033538818359375, | |
| "learning_rate": 3.4865366759517185e-05, | |
| "loss": 0.8371, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.9136490250696379, | |
| "grad_norm": 3.175391674041748, | |
| "learning_rate": 3.477251624883937e-05, | |
| "loss": 0.9104, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.9192200557103064, | |
| "grad_norm": 4.99779748916626, | |
| "learning_rate": 3.4679665738161556e-05, | |
| "loss": 0.8844, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.924791086350975, | |
| "grad_norm": 3.133185625076294, | |
| "learning_rate": 3.4586815227483756e-05, | |
| "loss": 0.9252, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.9303621169916435, | |
| "grad_norm": 2.974994659423828, | |
| "learning_rate": 3.449396471680594e-05, | |
| "loss": 0.9909, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.935933147632312, | |
| "grad_norm": 2.782477617263794, | |
| "learning_rate": 3.440111420612814e-05, | |
| "loss": 0.9024, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.9415041782729805, | |
| "grad_norm": 4.230593681335449, | |
| "learning_rate": 3.430826369545033e-05, | |
| "loss": 0.9197, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.947075208913649, | |
| "grad_norm": 4.659527778625488, | |
| "learning_rate": 3.421541318477251e-05, | |
| "loss": 0.8859, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.9526462395543176, | |
| "grad_norm": 2.4073703289031982, | |
| "learning_rate": 3.412256267409471e-05, | |
| "loss": 1.04, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.958217270194986, | |
| "grad_norm": 3.6646125316619873, | |
| "learning_rate": 3.40297121634169e-05, | |
| "loss": 0.8362, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.9637883008356546, | |
| "grad_norm": 2.379714012145996, | |
| "learning_rate": 3.39368616527391e-05, | |
| "loss": 0.8265, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.9693593314763231, | |
| "grad_norm": 2.8113396167755127, | |
| "learning_rate": 3.384401114206128e-05, | |
| "loss": 0.8607, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.9749303621169917, | |
| "grad_norm": 4.4973955154418945, | |
| "learning_rate": 3.375116063138347e-05, | |
| "loss": 0.8007, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.9805013927576601, | |
| "grad_norm": 2.4099206924438477, | |
| "learning_rate": 3.365831012070567e-05, | |
| "loss": 0.903, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.9860724233983287, | |
| "grad_norm": 2.6482093334198, | |
| "learning_rate": 3.3565459610027854e-05, | |
| "loss": 1.0105, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.9916434540389972, | |
| "grad_norm": 3.1650354862213135, | |
| "learning_rate": 3.347260909935005e-05, | |
| "loss": 0.9164, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.9972144846796658, | |
| "grad_norm": 3.000230550765991, | |
| "learning_rate": 3.337975858867224e-05, | |
| "loss": 0.9584, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_accuracy": 0.6581220395653385, | |
| "eval_loss": 0.9149981141090393, | |
| "eval_runtime": 55.2638, | |
| "eval_samples_per_second": 64.943, | |
| "eval_steps_per_second": 4.071, | |
| "step": 1795 | |
| }, | |
| { | |
| "epoch": 1.0027855153203342, | |
| "grad_norm": 2.146998167037964, | |
| "learning_rate": 3.328690807799443e-05, | |
| "loss": 0.8704, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 1.0083565459610029, | |
| "grad_norm": 3.532796621322632, | |
| "learning_rate": 3.3194057567316624e-05, | |
| "loss": 0.7819, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 1.0139275766016713, | |
| "grad_norm": 3.2359650135040283, | |
| "learning_rate": 3.310120705663881e-05, | |
| "loss": 0.7653, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 1.0194986072423398, | |
| "grad_norm": 2.2468202114105225, | |
| "learning_rate": 3.300835654596101e-05, | |
| "loss": 0.5893, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 1.0250696378830084, | |
| "grad_norm": 2.938746213912964, | |
| "learning_rate": 3.2915506035283195e-05, | |
| "loss": 0.754, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 1.0306406685236769, | |
| "grad_norm": 6.2073774337768555, | |
| "learning_rate": 3.282265552460539e-05, | |
| "loss": 0.7421, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 1.0362116991643453, | |
| "grad_norm": 2.68640398979187, | |
| "learning_rate": 3.272980501392758e-05, | |
| "loss": 0.8489, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 1.041782729805014, | |
| "grad_norm": 2.984046697616577, | |
| "learning_rate": 3.2636954503249766e-05, | |
| "loss": 0.7622, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 1.0473537604456824, | |
| "grad_norm": 4.016560077667236, | |
| "learning_rate": 3.2544103992571965e-05, | |
| "loss": 0.8421, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 1.052924791086351, | |
| "grad_norm": 4.0480427742004395, | |
| "learning_rate": 3.245125348189415e-05, | |
| "loss": 0.7281, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 1.0584958217270195, | |
| "grad_norm": 3.571326494216919, | |
| "learning_rate": 3.235840297121634e-05, | |
| "loss": 0.7151, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 1.064066852367688, | |
| "grad_norm": 2.5628035068511963, | |
| "learning_rate": 3.2265552460538536e-05, | |
| "loss": 0.7681, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 1.0696378830083566, | |
| "grad_norm": 3.5606746673583984, | |
| "learning_rate": 3.217270194986073e-05, | |
| "loss": 0.8217, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 1.075208913649025, | |
| "grad_norm": 2.901460647583008, | |
| "learning_rate": 3.207985143918292e-05, | |
| "loss": 0.6321, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 1.0807799442896937, | |
| "grad_norm": 4.441089153289795, | |
| "learning_rate": 3.1987000928505107e-05, | |
| "loss": 0.8465, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 1.0863509749303621, | |
| "grad_norm": 2.9387269020080566, | |
| "learning_rate": 3.18941504178273e-05, | |
| "loss": 0.7423, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 1.0919220055710306, | |
| "grad_norm": 4.128583908081055, | |
| "learning_rate": 3.180129990714949e-05, | |
| "loss": 0.7409, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 1.0974930362116992, | |
| "grad_norm": 5.6328911781311035, | |
| "learning_rate": 3.1708449396471684e-05, | |
| "loss": 0.7903, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 1.1030640668523677, | |
| "grad_norm": 3.1918294429779053, | |
| "learning_rate": 3.161559888579388e-05, | |
| "loss": 0.7739, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 1.1086350974930361, | |
| "grad_norm": 3.357419967651367, | |
| "learning_rate": 3.152274837511606e-05, | |
| "loss": 0.8122, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 1.1142061281337048, | |
| "grad_norm": 5.250940322875977, | |
| "learning_rate": 3.1429897864438255e-05, | |
| "loss": 0.7817, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 1.1197771587743732, | |
| "grad_norm": 4.065739631652832, | |
| "learning_rate": 3.133704735376045e-05, | |
| "loss": 0.6688, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 1.1253481894150417, | |
| "grad_norm": 3.3966140747070312, | |
| "learning_rate": 3.124419684308264e-05, | |
| "loss": 0.9103, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 1.1309192200557103, | |
| "grad_norm": 3.450237274169922, | |
| "learning_rate": 3.1151346332404826e-05, | |
| "loss": 0.6775, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 1.1364902506963788, | |
| "grad_norm": 2.966513156890869, | |
| "learning_rate": 3.105849582172702e-05, | |
| "loss": 0.7393, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 1.1420612813370474, | |
| "grad_norm": 4.148128986358643, | |
| "learning_rate": 3.096564531104921e-05, | |
| "loss": 0.7087, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 1.1476323119777159, | |
| "grad_norm": 3.3900933265686035, | |
| "learning_rate": 3.0872794800371404e-05, | |
| "loss": 0.8012, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 1.1532033426183843, | |
| "grad_norm": 1.1098343133926392, | |
| "learning_rate": 3.0779944289693596e-05, | |
| "loss": 0.5613, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 1.158774373259053, | |
| "grad_norm": 2.9282333850860596, | |
| "learning_rate": 3.068709377901578e-05, | |
| "loss": 0.7911, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 1.1643454038997214, | |
| "grad_norm": 3.531545639038086, | |
| "learning_rate": 3.059424326833798e-05, | |
| "loss": 0.6876, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 1.16991643454039, | |
| "grad_norm": 4.742493152618408, | |
| "learning_rate": 3.050139275766017e-05, | |
| "loss": 0.7992, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 1.1754874651810585, | |
| "grad_norm": 4.898960590362549, | |
| "learning_rate": 3.0408542246982356e-05, | |
| "loss": 0.7948, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 1.181058495821727, | |
| "grad_norm": 4.260847568511963, | |
| "learning_rate": 3.0315691736304552e-05, | |
| "loss": 0.7129, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 1.1866295264623956, | |
| "grad_norm": 2.722090005874634, | |
| "learning_rate": 3.022284122562674e-05, | |
| "loss": 0.627, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 1.192200557103064, | |
| "grad_norm": 2.056386947631836, | |
| "learning_rate": 3.0129990714948937e-05, | |
| "loss": 0.5682, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 1.1977715877437327, | |
| "grad_norm": 3.16398024559021, | |
| "learning_rate": 3.0037140204271123e-05, | |
| "loss": 0.7183, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 1.2033426183844012, | |
| "grad_norm": 2.589129686355591, | |
| "learning_rate": 2.9944289693593312e-05, | |
| "loss": 0.7993, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 1.2089136490250696, | |
| "grad_norm": 6.259389877319336, | |
| "learning_rate": 2.9851439182915508e-05, | |
| "loss": 0.9054, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 1.2144846796657383, | |
| "grad_norm": 4.096316814422607, | |
| "learning_rate": 2.9758588672237697e-05, | |
| "loss": 0.7831, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 1.2200557103064067, | |
| "grad_norm": 4.087615966796875, | |
| "learning_rate": 2.9665738161559893e-05, | |
| "loss": 0.7216, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 1.2256267409470751, | |
| "grad_norm": 4.487613201141357, | |
| "learning_rate": 2.957288765088208e-05, | |
| "loss": 0.6588, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 1.2311977715877438, | |
| "grad_norm": 3.7831146717071533, | |
| "learning_rate": 2.9480037140204275e-05, | |
| "loss": 0.8339, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 1.2367688022284122, | |
| "grad_norm": 5.81228494644165, | |
| "learning_rate": 2.9387186629526464e-05, | |
| "loss": 0.721, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 1.2423398328690807, | |
| "grad_norm": 5.113112449645996, | |
| "learning_rate": 2.9294336118848653e-05, | |
| "loss": 0.6777, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 1.2479108635097493, | |
| "grad_norm": 4.003824710845947, | |
| "learning_rate": 2.920148560817085e-05, | |
| "loss": 0.8218, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 1.2534818941504178, | |
| "grad_norm": 4.8881120681762695, | |
| "learning_rate": 2.9108635097493035e-05, | |
| "loss": 0.714, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 1.2590529247910864, | |
| "grad_norm": 6.676164627075195, | |
| "learning_rate": 2.901578458681523e-05, | |
| "loss": 0.7644, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 1.2646239554317549, | |
| "grad_norm": 2.559022903442383, | |
| "learning_rate": 2.892293407613742e-05, | |
| "loss": 0.7641, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 1.2701949860724233, | |
| "grad_norm": 2.7222700119018555, | |
| "learning_rate": 2.883008356545961e-05, | |
| "loss": 0.7616, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 1.275766016713092, | |
| "grad_norm": 4.195894241333008, | |
| "learning_rate": 2.8737233054781805e-05, | |
| "loss": 0.8697, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 1.2813370473537604, | |
| "grad_norm": 5.248186111450195, | |
| "learning_rate": 2.864438254410399e-05, | |
| "loss": 0.7673, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 1.286908077994429, | |
| "grad_norm": 5.167591571807861, | |
| "learning_rate": 2.8551532033426187e-05, | |
| "loss": 0.6413, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 1.2924791086350975, | |
| "grad_norm": 2.6084582805633545, | |
| "learning_rate": 2.8458681522748376e-05, | |
| "loss": 0.6684, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 1.298050139275766, | |
| "grad_norm": 2.669848918914795, | |
| "learning_rate": 2.8365831012070565e-05, | |
| "loss": 0.6509, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 1.3036211699164346, | |
| "grad_norm": 4.120858192443848, | |
| "learning_rate": 2.827298050139276e-05, | |
| "loss": 0.8132, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 1.309192200557103, | |
| "grad_norm": 3.833299160003662, | |
| "learning_rate": 2.8180129990714947e-05, | |
| "loss": 0.8605, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 1.3147632311977717, | |
| "grad_norm": 3.052611827850342, | |
| "learning_rate": 2.8087279480037143e-05, | |
| "loss": 0.6801, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 1.3203342618384402, | |
| "grad_norm": 2.838963031768799, | |
| "learning_rate": 2.7994428969359332e-05, | |
| "loss": 0.7063, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 1.3259052924791086, | |
| "grad_norm": 5.363685131072998, | |
| "learning_rate": 2.7901578458681528e-05, | |
| "loss": 0.7922, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 1.331476323119777, | |
| "grad_norm": 4.10276460647583, | |
| "learning_rate": 2.7808727948003717e-05, | |
| "loss": 0.8215, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 1.3370473537604457, | |
| "grad_norm": 2.9703421592712402, | |
| "learning_rate": 2.7715877437325903e-05, | |
| "loss": 0.6872, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 1.3426183844011141, | |
| "grad_norm": 2.325404644012451, | |
| "learning_rate": 2.76230269266481e-05, | |
| "loss": 0.7106, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 1.3481894150417828, | |
| "grad_norm": 3.2471485137939453, | |
| "learning_rate": 2.7530176415970288e-05, | |
| "loss": 0.7266, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 1.3537604456824512, | |
| "grad_norm": 4.239513874053955, | |
| "learning_rate": 2.7437325905292484e-05, | |
| "loss": 0.6973, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 1.3593314763231197, | |
| "grad_norm": 2.0118629932403564, | |
| "learning_rate": 2.734447539461467e-05, | |
| "loss": 0.7247, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 1.3649025069637883, | |
| "grad_norm": 3.550149440765381, | |
| "learning_rate": 2.725162488393686e-05, | |
| "loss": 0.7266, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 1.3704735376044568, | |
| "grad_norm": 4.473415851593018, | |
| "learning_rate": 2.7158774373259055e-05, | |
| "loss": 0.6131, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 1.3760445682451254, | |
| "grad_norm": 3.9009509086608887, | |
| "learning_rate": 2.7065923862581244e-05, | |
| "loss": 0.8645, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 1.3816155988857939, | |
| "grad_norm": 4.10238790512085, | |
| "learning_rate": 2.697307335190344e-05, | |
| "loss": 0.8191, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 1.3871866295264623, | |
| "grad_norm": 4.880800247192383, | |
| "learning_rate": 2.6880222841225626e-05, | |
| "loss": 0.7277, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 1.392757660167131, | |
| "grad_norm": 4.270191669464111, | |
| "learning_rate": 2.678737233054782e-05, | |
| "loss": 0.6846, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 1.3983286908077994, | |
| "grad_norm": 3.650736093521118, | |
| "learning_rate": 2.669452181987001e-05, | |
| "loss": 0.6887, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 1.403899721448468, | |
| "grad_norm": 3.844275712966919, | |
| "learning_rate": 2.66016713091922e-05, | |
| "loss": 0.8149, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 1.4094707520891365, | |
| "grad_norm": 2.4047205448150635, | |
| "learning_rate": 2.6518105849582174e-05, | |
| "loss": 0.6856, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 1.415041782729805, | |
| "grad_norm": 4.748696804046631, | |
| "learning_rate": 2.6425255338904363e-05, | |
| "loss": 0.7345, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 1.4206128133704734, | |
| "grad_norm": 5.907768249511719, | |
| "learning_rate": 2.633240482822656e-05, | |
| "loss": 0.7076, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 1.426183844011142, | |
| "grad_norm": 4.654882907867432, | |
| "learning_rate": 2.6239554317548748e-05, | |
| "loss": 0.7767, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 1.4317548746518105, | |
| "grad_norm": 3.9069807529449463, | |
| "learning_rate": 2.6146703806870937e-05, | |
| "loss": 0.7012, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 1.4373259052924792, | |
| "grad_norm": 4.143378734588623, | |
| "learning_rate": 2.605385329619313e-05, | |
| "loss": 0.7515, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 1.4428969359331476, | |
| "grad_norm": 2.7219676971435547, | |
| "learning_rate": 2.596100278551532e-05, | |
| "loss": 0.6977, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 1.448467966573816, | |
| "grad_norm": 2.6653151512145996, | |
| "learning_rate": 2.5868152274837515e-05, | |
| "loss": 0.6582, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 1.4540389972144847, | |
| "grad_norm": 3.6775689125061035, | |
| "learning_rate": 2.5775301764159704e-05, | |
| "loss": 0.6233, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 1.4596100278551531, | |
| "grad_norm": 4.783838272094727, | |
| "learning_rate": 2.5682451253481893e-05, | |
| "loss": 0.807, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 1.4651810584958218, | |
| "grad_norm": 2.852348566055298, | |
| "learning_rate": 2.5589600742804086e-05, | |
| "loss": 0.6957, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 1.4707520891364902, | |
| "grad_norm": 3.3512966632843018, | |
| "learning_rate": 2.5496750232126275e-05, | |
| "loss": 0.7498, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 1.4763231197771587, | |
| "grad_norm": 5.658099174499512, | |
| "learning_rate": 2.540389972144847e-05, | |
| "loss": 0.8034, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 1.4818941504178273, | |
| "grad_norm": 5.467595100402832, | |
| "learning_rate": 2.531104921077066e-05, | |
| "loss": 0.6955, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 1.4874651810584958, | |
| "grad_norm": 3.4584598541259766, | |
| "learning_rate": 2.5218198700092853e-05, | |
| "loss": 0.7959, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 1.4930362116991645, | |
| "grad_norm": 4.718947887420654, | |
| "learning_rate": 2.5125348189415042e-05, | |
| "loss": 0.7106, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 1.498607242339833, | |
| "grad_norm": 3.158700942993164, | |
| "learning_rate": 2.503249767873723e-05, | |
| "loss": 0.8245, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 1.5041782729805013, | |
| "grad_norm": 3.9719223976135254, | |
| "learning_rate": 2.4939647168059427e-05, | |
| "loss": 0.8293, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 1.5097493036211698, | |
| "grad_norm": 2.6596412658691406, | |
| "learning_rate": 2.4846796657381616e-05, | |
| "loss": 0.497, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 1.5153203342618384, | |
| "grad_norm": 2.3325681686401367, | |
| "learning_rate": 2.475394614670381e-05, | |
| "loss": 0.6574, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 1.520891364902507, | |
| "grad_norm": 6.3681254386901855, | |
| "learning_rate": 2.4661095636025998e-05, | |
| "loss": 0.8019, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 1.5264623955431755, | |
| "grad_norm": 2.895540237426758, | |
| "learning_rate": 2.456824512534819e-05, | |
| "loss": 0.687, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 1.532033426183844, | |
| "grad_norm": 4.242018222808838, | |
| "learning_rate": 2.4475394614670383e-05, | |
| "loss": 0.6777, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 1.5376044568245124, | |
| "grad_norm": 4.966222763061523, | |
| "learning_rate": 2.4382544103992576e-05, | |
| "loss": 0.6506, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 1.543175487465181, | |
| "grad_norm": 3.686152458190918, | |
| "learning_rate": 2.4289693593314765e-05, | |
| "loss": 0.7752, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 1.5487465181058497, | |
| "grad_norm": 3.1768383979797363, | |
| "learning_rate": 2.4196843082636954e-05, | |
| "loss": 0.7676, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 1.5543175487465182, | |
| "grad_norm": 4.296181678771973, | |
| "learning_rate": 2.4103992571959146e-05, | |
| "loss": 0.7853, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 1.5598885793871866, | |
| "grad_norm": 5.16563606262207, | |
| "learning_rate": 2.401114206128134e-05, | |
| "loss": 0.8417, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 1.565459610027855, | |
| "grad_norm": 4.229692459106445, | |
| "learning_rate": 2.391829155060353e-05, | |
| "loss": 0.6923, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 1.5710306406685237, | |
| "grad_norm": 3.268195390701294, | |
| "learning_rate": 2.382544103992572e-05, | |
| "loss": 0.7702, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 1.5766016713091922, | |
| "grad_norm": 2.733262300491333, | |
| "learning_rate": 2.373259052924791e-05, | |
| "loss": 0.621, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 1.5821727019498608, | |
| "grad_norm": 2.770465850830078, | |
| "learning_rate": 2.3639740018570102e-05, | |
| "loss": 0.6921, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 1.5877437325905293, | |
| "grad_norm": 5.763338088989258, | |
| "learning_rate": 2.3546889507892295e-05, | |
| "loss": 0.6394, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 1.5933147632311977, | |
| "grad_norm": 3.826629161834717, | |
| "learning_rate": 2.3454038997214488e-05, | |
| "loss": 0.7547, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 1.5988857938718661, | |
| "grad_norm": 4.690371036529541, | |
| "learning_rate": 2.3361188486536677e-05, | |
| "loss": 0.6925, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 1.6044568245125348, | |
| "grad_norm": 2.971897602081299, | |
| "learning_rate": 2.3268337975858866e-05, | |
| "loss": 0.5923, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 1.6100278551532035, | |
| "grad_norm": 6.178647518157959, | |
| "learning_rate": 2.317548746518106e-05, | |
| "loss": 0.7312, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 1.615598885793872, | |
| "grad_norm": 5.96959114074707, | |
| "learning_rate": 2.308263695450325e-05, | |
| "loss": 0.7223, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 1.6211699164345403, | |
| "grad_norm": 5.036275386810303, | |
| "learning_rate": 2.2989786443825444e-05, | |
| "loss": 0.6623, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 1.6267409470752088, | |
| "grad_norm": 4.670976638793945, | |
| "learning_rate": 2.2896935933147633e-05, | |
| "loss": 0.705, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 1.6323119777158774, | |
| "grad_norm": 4.005093574523926, | |
| "learning_rate": 2.2804085422469825e-05, | |
| "loss": 0.6657, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 1.637883008356546, | |
| "grad_norm": 5.3175368309021, | |
| "learning_rate": 2.2711234911792014e-05, | |
| "loss": 0.6608, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 1.6434540389972145, | |
| "grad_norm": 5.519412040710449, | |
| "learning_rate": 2.2618384401114207e-05, | |
| "loss": 0.6161, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 1.649025069637883, | |
| "grad_norm": 6.778628349304199, | |
| "learning_rate": 2.25255338904364e-05, | |
| "loss": 0.799, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 1.6545961002785514, | |
| "grad_norm": 8.406840324401855, | |
| "learning_rate": 2.243268337975859e-05, | |
| "loss": 0.8115, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 1.66016713091922, | |
| "grad_norm": 3.8956949710845947, | |
| "learning_rate": 2.233983286908078e-05, | |
| "loss": 0.6207, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 1.6657381615598887, | |
| "grad_norm": 6.5204176902771, | |
| "learning_rate": 2.2246982358402974e-05, | |
| "loss": 0.7214, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 1.6713091922005572, | |
| "grad_norm": 2.431002140045166, | |
| "learning_rate": 2.2154131847725163e-05, | |
| "loss": 0.5765, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 1.6768802228412256, | |
| "grad_norm": 3.933286428451538, | |
| "learning_rate": 2.2061281337047355e-05, | |
| "loss": 0.6506, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 1.682451253481894, | |
| "grad_norm": 4.694821357727051, | |
| "learning_rate": 2.1968430826369545e-05, | |
| "loss": 0.7904, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 1.6880222841225627, | |
| "grad_norm": 5.494531154632568, | |
| "learning_rate": 2.1875580315691737e-05, | |
| "loss": 0.7732, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 1.6935933147632312, | |
| "grad_norm": 4.451411247253418, | |
| "learning_rate": 2.178272980501393e-05, | |
| "loss": 0.711, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 1.6991643454038998, | |
| "grad_norm": 5.802227020263672, | |
| "learning_rate": 2.1689879294336122e-05, | |
| "loss": 0.5912, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 1.7047353760445683, | |
| "grad_norm": 3.4462058544158936, | |
| "learning_rate": 2.159702878365831e-05, | |
| "loss": 0.664, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 1.7103064066852367, | |
| "grad_norm": 5.039124965667725, | |
| "learning_rate": 2.15041782729805e-05, | |
| "loss": 0.8159, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 1.7158774373259051, | |
| "grad_norm": 7.094409942626953, | |
| "learning_rate": 2.1411327762302693e-05, | |
| "loss": 0.6654, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 1.7214484679665738, | |
| "grad_norm": 5.430813789367676, | |
| "learning_rate": 2.1318477251624886e-05, | |
| "loss": 0.6988, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 1.7270194986072425, | |
| "grad_norm": 4.098752021789551, | |
| "learning_rate": 2.1225626740947078e-05, | |
| "loss": 0.802, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 1.732590529247911, | |
| "grad_norm": 2.234248161315918, | |
| "learning_rate": 2.1132776230269267e-05, | |
| "loss": 0.6296, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 1.7381615598885793, | |
| "grad_norm": 4.9986114501953125, | |
| "learning_rate": 2.1039925719591457e-05, | |
| "loss": 0.6501, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 1.7437325905292478, | |
| "grad_norm": 6.444380760192871, | |
| "learning_rate": 2.094707520891365e-05, | |
| "loss": 0.6722, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 1.7493036211699164, | |
| "grad_norm": 2.4304497241973877, | |
| "learning_rate": 2.085422469823584e-05, | |
| "loss": 0.9041, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 1.754874651810585, | |
| "grad_norm": 4.071289539337158, | |
| "learning_rate": 2.0761374187558034e-05, | |
| "loss": 0.6974, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 1.7604456824512535, | |
| "grad_norm": 3.203155994415283, | |
| "learning_rate": 2.0668523676880223e-05, | |
| "loss": 0.7301, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 1.766016713091922, | |
| "grad_norm": 7.236194610595703, | |
| "learning_rate": 2.0575673166202413e-05, | |
| "loss": 0.7152, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 1.7715877437325904, | |
| "grad_norm": 5.022251605987549, | |
| "learning_rate": 2.0482822655524605e-05, | |
| "loss": 0.6484, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 1.777158774373259, | |
| "grad_norm": 4.954112529754639, | |
| "learning_rate": 2.0389972144846798e-05, | |
| "loss": 0.8323, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 1.7827298050139275, | |
| "grad_norm": 7.8344221115112305, | |
| "learning_rate": 2.029712163416899e-05, | |
| "loss": 0.6161, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 1.7883008356545962, | |
| "grad_norm": 5.728160858154297, | |
| "learning_rate": 2.020427112349118e-05, | |
| "loss": 0.6358, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 1.7938718662952646, | |
| "grad_norm": 3.981351137161255, | |
| "learning_rate": 2.0111420612813372e-05, | |
| "loss": 0.7321, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 1.799442896935933, | |
| "grad_norm": 2.969008207321167, | |
| "learning_rate": 2.001857010213556e-05, | |
| "loss": 0.6447, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 1.8050139275766015, | |
| "grad_norm": 5.601192951202393, | |
| "learning_rate": 1.9925719591457754e-05, | |
| "loss": 0.5966, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 1.8105849582172702, | |
| "grad_norm": 2.57912015914917, | |
| "learning_rate": 1.9832869080779946e-05, | |
| "loss": 0.7527, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 1.8161559888579388, | |
| "grad_norm": 4.265676498413086, | |
| "learning_rate": 1.9740018570102135e-05, | |
| "loss": 0.6568, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 1.8217270194986073, | |
| "grad_norm": 3.1897799968719482, | |
| "learning_rate": 1.9647168059424328e-05, | |
| "loss": 0.5621, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 1.8272980501392757, | |
| "grad_norm": 4.776169300079346, | |
| "learning_rate": 1.955431754874652e-05, | |
| "loss": 0.6414, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 1.8328690807799441, | |
| "grad_norm": 5.068026542663574, | |
| "learning_rate": 1.946146703806871e-05, | |
| "loss": 0.5563, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 1.8384401114206128, | |
| "grad_norm": 4.400922775268555, | |
| "learning_rate": 1.9368616527390902e-05, | |
| "loss": 0.571, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 1.8440111420612815, | |
| "grad_norm": 5.004662990570068, | |
| "learning_rate": 1.927576601671309e-05, | |
| "loss": 0.6671, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 1.84958217270195, | |
| "grad_norm": 4.169490337371826, | |
| "learning_rate": 1.9182915506035284e-05, | |
| "loss": 0.6829, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 1.8551532033426184, | |
| "grad_norm": 3.9044158458709717, | |
| "learning_rate": 1.9090064995357476e-05, | |
| "loss": 0.704, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 1.8607242339832868, | |
| "grad_norm": 5.892938613891602, | |
| "learning_rate": 1.899721448467967e-05, | |
| "loss": 0.7547, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 1.8662952646239555, | |
| "grad_norm": 6.47431755065918, | |
| "learning_rate": 1.8904363974001858e-05, | |
| "loss": 0.7811, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 1.8718662952646241, | |
| "grad_norm": 5.054313659667969, | |
| "learning_rate": 1.8811513463324047e-05, | |
| "loss": 0.6151, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 1.8774373259052926, | |
| "grad_norm": 6.2499098777771, | |
| "learning_rate": 1.871866295264624e-05, | |
| "loss": 0.6831, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 1.883008356545961, | |
| "grad_norm": 5.934417724609375, | |
| "learning_rate": 1.8625812441968432e-05, | |
| "loss": 0.7428, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 1.8885793871866294, | |
| "grad_norm": 5.0701704025268555, | |
| "learning_rate": 1.8532961931290625e-05, | |
| "loss": 0.5764, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 1.894150417827298, | |
| "grad_norm": 5.690725803375244, | |
| "learning_rate": 1.8440111420612814e-05, | |
| "loss": 0.7749, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 1.8997214484679665, | |
| "grad_norm": 5.534970760345459, | |
| "learning_rate": 1.8347260909935003e-05, | |
| "loss": 0.5587, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 1.9052924791086352, | |
| "grad_norm": 5.369340419769287, | |
| "learning_rate": 1.8254410399257196e-05, | |
| "loss": 0.8287, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 1.9108635097493036, | |
| "grad_norm": 4.442090034484863, | |
| "learning_rate": 1.816155988857939e-05, | |
| "loss": 0.6117, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 1.916434540389972, | |
| "grad_norm": 4.4989423751831055, | |
| "learning_rate": 1.806870937790158e-05, | |
| "loss": 0.4946, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 1.9220055710306405, | |
| "grad_norm": 2.9878928661346436, | |
| "learning_rate": 1.797585886722377e-05, | |
| "loss": 0.6236, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 1.9275766016713092, | |
| "grad_norm": 5.484077453613281, | |
| "learning_rate": 1.788300835654596e-05, | |
| "loss": 0.7101, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 1.9331476323119778, | |
| "grad_norm": 4.663434028625488, | |
| "learning_rate": 1.7790157845868152e-05, | |
| "loss": 0.7605, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 1.9387186629526463, | |
| "grad_norm": 5.077414512634277, | |
| "learning_rate": 1.7697307335190344e-05, | |
| "loss": 0.7128, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 1.9442896935933147, | |
| "grad_norm": 2.7661216259002686, | |
| "learning_rate": 1.7604456824512537e-05, | |
| "loss": 0.7643, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 1.9498607242339832, | |
| "grad_norm": 3.1606996059417725, | |
| "learning_rate": 1.7511606313834726e-05, | |
| "loss": 0.6893, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 1.9554317548746518, | |
| "grad_norm": 3.2165732383728027, | |
| "learning_rate": 1.741875580315692e-05, | |
| "loss": 0.7209, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 1.9610027855153205, | |
| "grad_norm": 5.69070291519165, | |
| "learning_rate": 1.7325905292479108e-05, | |
| "loss": 0.6691, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 1.966573816155989, | |
| "grad_norm": 3.7449820041656494, | |
| "learning_rate": 1.72330547818013e-05, | |
| "loss": 0.616, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 1.9721448467966574, | |
| "grad_norm": 3.9729132652282715, | |
| "learning_rate": 1.7140204271123493e-05, | |
| "loss": 0.6138, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 1.9777158774373258, | |
| "grad_norm": 6.665079593658447, | |
| "learning_rate": 1.7047353760445682e-05, | |
| "loss": 0.7017, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 1.9832869080779945, | |
| "grad_norm": 8.24975872039795, | |
| "learning_rate": 1.6954503249767875e-05, | |
| "loss": 0.5986, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 1.988857938718663, | |
| "grad_norm": 4.6404290199279785, | |
| "learning_rate": 1.6861652739090067e-05, | |
| "loss": 0.6759, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 1.9944289693593316, | |
| "grad_norm": 5.132004737854004, | |
| "learning_rate": 1.6768802228412256e-05, | |
| "loss": 0.6387, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 8.930645942687988, | |
| "learning_rate": 1.667595171773445e-05, | |
| "loss": 0.5361, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_accuracy": 0.7049317358595709, | |
| "eval_loss": 0.8222558498382568, | |
| "eval_runtime": 39.047, | |
| "eval_samples_per_second": 91.915, | |
| "eval_steps_per_second": 5.762, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 2.0055710306406684, | |
| "grad_norm": 3.889174222946167, | |
| "learning_rate": 1.6583101207056638e-05, | |
| "loss": 0.5788, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 2.011142061281337, | |
| "grad_norm": 3.227085590362549, | |
| "learning_rate": 1.649025069637883e-05, | |
| "loss": 0.4455, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 2.0167130919220058, | |
| "grad_norm": 4.082145690917969, | |
| "learning_rate": 1.6397400185701023e-05, | |
| "loss": 0.4816, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 2.022284122562674, | |
| "grad_norm": 4.98834228515625, | |
| "learning_rate": 1.6304549675023216e-05, | |
| "loss": 0.5477, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 2.0278551532033426, | |
| "grad_norm": 3.9467737674713135, | |
| "learning_rate": 1.6211699164345405e-05, | |
| "loss": 0.605, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 2.033426183844011, | |
| "grad_norm": 7.22300386428833, | |
| "learning_rate": 1.6118848653667594e-05, | |
| "loss": 0.5354, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 2.0389972144846795, | |
| "grad_norm": 4.983692169189453, | |
| "learning_rate": 1.6025998142989787e-05, | |
| "loss": 0.4666, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 2.0445682451253484, | |
| "grad_norm": 3.4938926696777344, | |
| "learning_rate": 1.593314763231198e-05, | |
| "loss": 0.4547, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 2.050139275766017, | |
| "grad_norm": 5.80803918838501, | |
| "learning_rate": 1.584029712163417e-05, | |
| "loss": 0.3969, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 2.0557103064066853, | |
| "grad_norm": 5.283419132232666, | |
| "learning_rate": 1.574744661095636e-05, | |
| "loss": 0.4851, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 2.0612813370473537, | |
| "grad_norm": 5.452208518981934, | |
| "learning_rate": 1.565459610027855e-05, | |
| "loss": 0.5, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 2.066852367688022, | |
| "grad_norm": 4.245022773742676, | |
| "learning_rate": 1.5561745589600743e-05, | |
| "loss": 0.5629, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 2.0724233983286906, | |
| "grad_norm": 3.675978660583496, | |
| "learning_rate": 1.5468895078922935e-05, | |
| "loss": 0.5837, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 2.0779944289693595, | |
| "grad_norm": 3.861818552017212, | |
| "learning_rate": 1.5376044568245128e-05, | |
| "loss": 0.4685, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 2.083565459610028, | |
| "grad_norm": 3.467484474182129, | |
| "learning_rate": 1.5283194057567317e-05, | |
| "loss": 0.3894, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 2.0891364902506964, | |
| "grad_norm": 0.9584236741065979, | |
| "learning_rate": 1.519034354688951e-05, | |
| "loss": 0.506, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 2.094707520891365, | |
| "grad_norm": 3.1029670238494873, | |
| "learning_rate": 1.5097493036211699e-05, | |
| "loss": 0.4932, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 2.1002785515320332, | |
| "grad_norm": 3.9961135387420654, | |
| "learning_rate": 1.5004642525533891e-05, | |
| "loss": 0.4128, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 2.105849582172702, | |
| "grad_norm": 5.93654203414917, | |
| "learning_rate": 1.4911792014856082e-05, | |
| "loss": 0.5624, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 2.1114206128133706, | |
| "grad_norm": 3.2904577255249023, | |
| "learning_rate": 1.4818941504178274e-05, | |
| "loss": 0.4743, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 2.116991643454039, | |
| "grad_norm": 3.4849934577941895, | |
| "learning_rate": 1.4726090993500465e-05, | |
| "loss": 0.3288, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 2.1225626740947074, | |
| "grad_norm": 4.94980001449585, | |
| "learning_rate": 1.4633240482822655e-05, | |
| "loss": 0.4052, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 2.128133704735376, | |
| "grad_norm": 4.073460102081299, | |
| "learning_rate": 1.4540389972144847e-05, | |
| "loss": 0.4061, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 2.1337047353760448, | |
| "grad_norm": 5.284788131713867, | |
| "learning_rate": 1.4447539461467038e-05, | |
| "loss": 0.5306, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 2.139275766016713, | |
| "grad_norm": 3.1698975563049316, | |
| "learning_rate": 1.435468895078923e-05, | |
| "loss": 0.4429, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 2.1448467966573816, | |
| "grad_norm": 4.465912342071533, | |
| "learning_rate": 1.4261838440111421e-05, | |
| "loss": 0.6064, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 2.15041782729805, | |
| "grad_norm": 1.6745854616165161, | |
| "learning_rate": 1.4168987929433614e-05, | |
| "loss": 0.336, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 2.1559888579387185, | |
| "grad_norm": 5.794722080230713, | |
| "learning_rate": 1.4076137418755803e-05, | |
| "loss": 0.3712, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 2.1615598885793874, | |
| "grad_norm": 3.66721773147583, | |
| "learning_rate": 1.3983286908077994e-05, | |
| "loss": 0.4622, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 2.167130919220056, | |
| "grad_norm": 4.879077434539795, | |
| "learning_rate": 1.3890436397400186e-05, | |
| "loss": 0.4695, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 2.1727019498607243, | |
| "grad_norm": 5.087412357330322, | |
| "learning_rate": 1.3797585886722377e-05, | |
| "loss": 0.4698, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 2.1782729805013927, | |
| "grad_norm": 4.329985618591309, | |
| "learning_rate": 1.370473537604457e-05, | |
| "loss": 0.5304, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 2.183844011142061, | |
| "grad_norm": 5.840746879577637, | |
| "learning_rate": 1.361188486536676e-05, | |
| "loss": 0.4619, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 2.1894150417827296, | |
| "grad_norm": 6.990253925323486, | |
| "learning_rate": 1.351903435468895e-05, | |
| "loss": 0.5303, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 2.1949860724233985, | |
| "grad_norm": 5.975421905517578, | |
| "learning_rate": 1.3426183844011142e-05, | |
| "loss": 0.4523, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 2.200557103064067, | |
| "grad_norm": 4.276307106018066, | |
| "learning_rate": 1.3333333333333333e-05, | |
| "loss": 0.5446, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 2.2061281337047354, | |
| "grad_norm": 2.314199686050415, | |
| "learning_rate": 1.3240482822655526e-05, | |
| "loss": 0.3849, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 2.211699164345404, | |
| "grad_norm": 2.7282533645629883, | |
| "learning_rate": 1.3147632311977717e-05, | |
| "loss": 0.5092, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 2.2172701949860723, | |
| "grad_norm": 5.947664737701416, | |
| "learning_rate": 1.305478180129991e-05, | |
| "loss": 0.4737, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 2.222841225626741, | |
| "grad_norm": 3.190096378326416, | |
| "learning_rate": 1.2961931290622098e-05, | |
| "loss": 0.4186, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 2.2284122562674096, | |
| "grad_norm": 6.639342308044434, | |
| "learning_rate": 1.286908077994429e-05, | |
| "loss": 0.5871, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 2.233983286908078, | |
| "grad_norm": 2.7380943298339844, | |
| "learning_rate": 1.2776230269266482e-05, | |
| "loss": 0.4876, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 2.2395543175487465, | |
| "grad_norm": 4.760270118713379, | |
| "learning_rate": 1.2683379758588673e-05, | |
| "loss": 0.4603, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 2.245125348189415, | |
| "grad_norm": 3.747488260269165, | |
| "learning_rate": 1.2590529247910865e-05, | |
| "loss": 0.548, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 2.2506963788300833, | |
| "grad_norm": 3.5704824924468994, | |
| "learning_rate": 1.2497678737233056e-05, | |
| "loss": 0.4724, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 2.256267409470752, | |
| "grad_norm": 4.090631484985352, | |
| "learning_rate": 1.2404828226555247e-05, | |
| "loss": 0.4502, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 2.2618384401114207, | |
| "grad_norm": 2.886296272277832, | |
| "learning_rate": 1.2311977715877438e-05, | |
| "loss": 0.4281, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 2.267409470752089, | |
| "grad_norm": 3.407174587249756, | |
| "learning_rate": 1.2219127205199629e-05, | |
| "loss": 0.5625, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 2.2729805013927575, | |
| "grad_norm": 1.6675838232040405, | |
| "learning_rate": 1.2126276694521821e-05, | |
| "loss": 0.4449, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 2.2785515320334264, | |
| "grad_norm": 3.721078634262085, | |
| "learning_rate": 1.2033426183844012e-05, | |
| "loss": 0.3982, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 2.284122562674095, | |
| "grad_norm": 3.702766180038452, | |
| "learning_rate": 1.1940575673166203e-05, | |
| "loss": 0.3637, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 2.2896935933147633, | |
| "grad_norm": 7.1998748779296875, | |
| "learning_rate": 1.1847725162488395e-05, | |
| "loss": 0.5362, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 2.2952646239554317, | |
| "grad_norm": 1.0540097951889038, | |
| "learning_rate": 1.1754874651810585e-05, | |
| "loss": 0.3079, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 2.3008356545961, | |
| "grad_norm": 6.923671722412109, | |
| "learning_rate": 1.1662024141132777e-05, | |
| "loss": 0.3345, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 2.3064066852367686, | |
| "grad_norm": 4.8872575759887695, | |
| "learning_rate": 1.1569173630454968e-05, | |
| "loss": 0.5205, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 2.3119777158774375, | |
| "grad_norm": 4.3670244216918945, | |
| "learning_rate": 1.1476323119777159e-05, | |
| "loss": 0.4622, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 2.317548746518106, | |
| "grad_norm": 3.558671474456787, | |
| "learning_rate": 1.1383472609099351e-05, | |
| "loss": 0.5876, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 2.3231197771587744, | |
| "grad_norm": 3.1379001140594482, | |
| "learning_rate": 1.1290622098421542e-05, | |
| "loss": 0.4676, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 2.328690807799443, | |
| "grad_norm": 5.652819633483887, | |
| "learning_rate": 1.1197771587743733e-05, | |
| "loss": 0.4292, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 2.3342618384401113, | |
| "grad_norm": 3.546596050262451, | |
| "learning_rate": 1.1104921077065924e-05, | |
| "loss": 0.4429, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 2.33983286908078, | |
| "grad_norm": 3.5589184761047363, | |
| "learning_rate": 1.1012070566388117e-05, | |
| "loss": 0.4568, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 2.3454038997214486, | |
| "grad_norm": 4.787576198577881, | |
| "learning_rate": 1.0919220055710307e-05, | |
| "loss": 0.4462, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 2.350974930362117, | |
| "grad_norm": 4.410435199737549, | |
| "learning_rate": 1.0826369545032498e-05, | |
| "loss": 0.4565, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 2.3565459610027855, | |
| "grad_norm": 2.8344180583953857, | |
| "learning_rate": 1.073351903435469e-05, | |
| "loss": 0.4908, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 2.362116991643454, | |
| "grad_norm": 3.592573404312134, | |
| "learning_rate": 1.064066852367688e-05, | |
| "loss": 0.5487, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 2.3676880222841223, | |
| "grad_norm": 3.614192485809326, | |
| "learning_rate": 1.0547818012999073e-05, | |
| "loss": 0.6118, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 2.3732590529247912, | |
| "grad_norm": 3.9269447326660156, | |
| "learning_rate": 1.0454967502321263e-05, | |
| "loss": 0.2933, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 2.3788300835654597, | |
| "grad_norm": 7.300518035888672, | |
| "learning_rate": 1.0362116991643454e-05, | |
| "loss": 0.6262, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 2.384401114206128, | |
| "grad_norm": 7.274951934814453, | |
| "learning_rate": 1.0269266480965647e-05, | |
| "loss": 0.4862, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 2.3899721448467965, | |
| "grad_norm": 4.726593971252441, | |
| "learning_rate": 1.0176415970287836e-05, | |
| "loss": 0.437, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 2.3955431754874654, | |
| "grad_norm": 5.178894519805908, | |
| "learning_rate": 1.0083565459610029e-05, | |
| "loss": 0.5367, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 2.401114206128134, | |
| "grad_norm": 5.717586040496826, | |
| "learning_rate": 9.99071494893222e-06, | |
| "loss": 0.3972, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 2.4066852367688023, | |
| "grad_norm": 4.987515926361084, | |
| "learning_rate": 9.89786443825441e-06, | |
| "loss": 0.3347, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 2.4122562674094707, | |
| "grad_norm": 1.9475771188735962, | |
| "learning_rate": 9.805013927576603e-06, | |
| "loss": 0.4143, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 2.417827298050139, | |
| "grad_norm": 3.4363367557525635, | |
| "learning_rate": 9.712163416898794e-06, | |
| "loss": 0.4253, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 2.4233983286908076, | |
| "grad_norm": 3.5124638080596924, | |
| "learning_rate": 9.619312906220985e-06, | |
| "loss": 0.4637, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 2.4289693593314765, | |
| "grad_norm": 5.052842617034912, | |
| "learning_rate": 9.526462395543175e-06, | |
| "loss": 0.4427, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 2.434540389972145, | |
| "grad_norm": 3.195524215698242, | |
| "learning_rate": 9.433611884865368e-06, | |
| "loss": 0.5245, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 2.4401114206128134, | |
| "grad_norm": 5.07759952545166, | |
| "learning_rate": 9.340761374187559e-06, | |
| "loss": 0.4723, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 2.445682451253482, | |
| "grad_norm": 5.409955978393555, | |
| "learning_rate": 9.24791086350975e-06, | |
| "loss": 0.3694, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 2.4512534818941503, | |
| "grad_norm": 6.566311359405518, | |
| "learning_rate": 9.155060352831942e-06, | |
| "loss": 0.5223, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 2.456824512534819, | |
| "grad_norm": 4.150636672973633, | |
| "learning_rate": 9.062209842154131e-06, | |
| "loss": 0.5151, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 2.4623955431754876, | |
| "grad_norm": 3.1298868656158447, | |
| "learning_rate": 8.969359331476324e-06, | |
| "loss": 0.4783, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 2.467966573816156, | |
| "grad_norm": 4.833754062652588, | |
| "learning_rate": 8.876508820798515e-06, | |
| "loss": 0.4791, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 2.4735376044568245, | |
| "grad_norm": 2.8329479694366455, | |
| "learning_rate": 8.783658310120706e-06, | |
| "loss": 0.3898, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 2.479108635097493, | |
| "grad_norm": 5.188013076782227, | |
| "learning_rate": 8.690807799442898e-06, | |
| "loss": 0.406, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 2.4846796657381613, | |
| "grad_norm": 5.230032920837402, | |
| "learning_rate": 8.597957288765089e-06, | |
| "loss": 0.4813, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 2.4902506963788302, | |
| "grad_norm": 6.618623733520508, | |
| "learning_rate": 8.50510677808728e-06, | |
| "loss": 0.3411, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 2.4958217270194987, | |
| "grad_norm": 6.0990800857543945, | |
| "learning_rate": 8.41225626740947e-06, | |
| "loss": 0.5043, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 2.501392757660167, | |
| "grad_norm": 3.5568933486938477, | |
| "learning_rate": 8.319405756731663e-06, | |
| "loss": 0.4675, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 2.5069637883008355, | |
| "grad_norm": 5.779839515686035, | |
| "learning_rate": 8.226555246053854e-06, | |
| "loss": 0.4414, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 2.5125348189415044, | |
| "grad_norm": 6.266837120056152, | |
| "learning_rate": 8.133704735376045e-06, | |
| "loss": 0.4811, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 2.518105849582173, | |
| "grad_norm": 4.401478290557861, | |
| "learning_rate": 8.040854224698238e-06, | |
| "loss": 0.4353, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 2.5236768802228413, | |
| "grad_norm": 4.887507915496826, | |
| "learning_rate": 7.948003714020427e-06, | |
| "loss": 0.4586, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 2.5292479108635098, | |
| "grad_norm": 4.332605361938477, | |
| "learning_rate": 7.85515320334262e-06, | |
| "loss": 0.3896, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 2.534818941504178, | |
| "grad_norm": 4.483108997344971, | |
| "learning_rate": 7.76230269266481e-06, | |
| "loss": 0.3309, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 2.5403899721448466, | |
| "grad_norm": 4.728216171264648, | |
| "learning_rate": 7.669452181987001e-06, | |
| "loss": 0.4499, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 2.545961002785515, | |
| "grad_norm": 4.626336574554443, | |
| "learning_rate": 7.576601671309193e-06, | |
| "loss": 0.438, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 2.551532033426184, | |
| "grad_norm": 8.29094409942627, | |
| "learning_rate": 7.4837511606313835e-06, | |
| "loss": 0.479, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 2.5571030640668524, | |
| "grad_norm": 6.774120330810547, | |
| "learning_rate": 7.390900649953575e-06, | |
| "loss": 0.4587, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 2.562674094707521, | |
| "grad_norm": 4.699492454528809, | |
| "learning_rate": 7.298050139275767e-06, | |
| "loss": 0.4226, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 2.5682451253481893, | |
| "grad_norm": 8.156336784362793, | |
| "learning_rate": 7.205199628597957e-06, | |
| "loss": 0.4484, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 2.573816155988858, | |
| "grad_norm": 5.780287265777588, | |
| "learning_rate": 7.112349117920149e-06, | |
| "loss": 0.4439, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 2.5793871866295266, | |
| "grad_norm": 4.5614752769470215, | |
| "learning_rate": 7.01949860724234e-06, | |
| "loss": 0.5142, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 2.584958217270195, | |
| "grad_norm": 5.965260028839111, | |
| "learning_rate": 6.926648096564531e-06, | |
| "loss": 0.4245, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 2.5905292479108635, | |
| "grad_norm": 3.2824435234069824, | |
| "learning_rate": 6.833797585886723e-06, | |
| "loss": 0.3454, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 2.596100278551532, | |
| "grad_norm": 5.84599494934082, | |
| "learning_rate": 6.740947075208915e-06, | |
| "loss": 0.5685, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 2.6016713091922004, | |
| "grad_norm": 2.833799362182617, | |
| "learning_rate": 6.648096564531105e-06, | |
| "loss": 0.4198, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 2.6072423398328692, | |
| "grad_norm": 5.735970973968506, | |
| "learning_rate": 6.555246053853296e-06, | |
| "loss": 0.3875, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 2.6128133704735377, | |
| "grad_norm": 3.7901411056518555, | |
| "learning_rate": 6.4716805942432695e-06, | |
| "loss": 0.5831, | |
| "step": 4690 | |
| }, | |
| { | |
| "epoch": 2.618384401114206, | |
| "grad_norm": 5.210600852966309, | |
| "learning_rate": 6.3788300835654595e-06, | |
| "loss": 0.4029, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 2.6239554317548746, | |
| "grad_norm": 4.811458110809326, | |
| "learning_rate": 6.285979572887651e-06, | |
| "loss": 0.4125, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 2.6295264623955434, | |
| "grad_norm": 4.759501934051514, | |
| "learning_rate": 6.193129062209842e-06, | |
| "loss": 0.3436, | |
| "step": 4720 | |
| }, | |
| { | |
| "epoch": 2.635097493036212, | |
| "grad_norm": 6.578216552734375, | |
| "learning_rate": 6.100278551532034e-06, | |
| "loss": 0.4117, | |
| "step": 4730 | |
| }, | |
| { | |
| "epoch": 2.6406685236768803, | |
| "grad_norm": 6.116782188415527, | |
| "learning_rate": 6.0074280408542255e-06, | |
| "loss": 0.5337, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 2.6462395543175488, | |
| "grad_norm": 6.460090637207031, | |
| "learning_rate": 5.914577530176416e-06, | |
| "loss": 0.5183, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 2.651810584958217, | |
| "grad_norm": 4.072648525238037, | |
| "learning_rate": 5.821727019498607e-06, | |
| "loss": 0.3908, | |
| "step": 4760 | |
| }, | |
| { | |
| "epoch": 2.6573816155988856, | |
| "grad_norm": 3.010607957839966, | |
| "learning_rate": 5.728876508820799e-06, | |
| "loss": 0.4368, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 2.662952646239554, | |
| "grad_norm": 4.1488213539123535, | |
| "learning_rate": 5.63602599814299e-06, | |
| "loss": 0.3858, | |
| "step": 4780 | |
| }, | |
| { | |
| "epoch": 2.668523676880223, | |
| "grad_norm": 5.021961212158203, | |
| "learning_rate": 5.5431754874651814e-06, | |
| "loss": 0.4922, | |
| "step": 4790 | |
| }, | |
| { | |
| "epoch": 2.6740947075208914, | |
| "grad_norm": 7.602121829986572, | |
| "learning_rate": 5.450324976787373e-06, | |
| "loss": 0.4895, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 2.67966573816156, | |
| "grad_norm": 4.0199384689331055, | |
| "learning_rate": 5.357474466109564e-06, | |
| "loss": 0.2544, | |
| "step": 4810 | |
| }, | |
| { | |
| "epoch": 2.6852367688022283, | |
| "grad_norm": 5.216213703155518, | |
| "learning_rate": 5.264623955431755e-06, | |
| "loss": 0.4402, | |
| "step": 4820 | |
| }, | |
| { | |
| "epoch": 2.690807799442897, | |
| "grad_norm": 3.985248565673828, | |
| "learning_rate": 5.171773444753946e-06, | |
| "loss": 0.4524, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 2.6963788300835656, | |
| "grad_norm": 6.052826881408691, | |
| "learning_rate": 5.078922934076137e-06, | |
| "loss": 0.4937, | |
| "step": 4840 | |
| }, | |
| { | |
| "epoch": 2.701949860724234, | |
| "grad_norm": 4.246743202209473, | |
| "learning_rate": 4.986072423398329e-06, | |
| "loss": 0.5177, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 2.7075208913649025, | |
| "grad_norm": 3.1840410232543945, | |
| "learning_rate": 4.89322191272052e-06, | |
| "loss": 0.3973, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 2.713091922005571, | |
| "grad_norm": 4.7223591804504395, | |
| "learning_rate": 4.800371402042712e-06, | |
| "loss": 0.4977, | |
| "step": 4870 | |
| }, | |
| { | |
| "epoch": 2.7186629526462394, | |
| "grad_norm": 7.073842525482178, | |
| "learning_rate": 4.7075208913649025e-06, | |
| "loss": 0.4918, | |
| "step": 4880 | |
| }, | |
| { | |
| "epoch": 2.724233983286908, | |
| "grad_norm": 8.0216646194458, | |
| "learning_rate": 4.614670380687093e-06, | |
| "loss": 0.4601, | |
| "step": 4890 | |
| }, | |
| { | |
| "epoch": 2.7298050139275767, | |
| "grad_norm": 3.22691011428833, | |
| "learning_rate": 4.521819870009285e-06, | |
| "loss": 0.3823, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 2.735376044568245, | |
| "grad_norm": 2.334157943725586, | |
| "learning_rate": 4.428969359331477e-06, | |
| "loss": 0.3607, | |
| "step": 4910 | |
| }, | |
| { | |
| "epoch": 2.7409470752089136, | |
| "grad_norm": 2.9531102180480957, | |
| "learning_rate": 4.336118848653668e-06, | |
| "loss": 0.4263, | |
| "step": 4920 | |
| }, | |
| { | |
| "epoch": 2.7465181058495824, | |
| "grad_norm": 4.451778888702393, | |
| "learning_rate": 4.243268337975859e-06, | |
| "loss": 0.3662, | |
| "step": 4930 | |
| }, | |
| { | |
| "epoch": 2.752089136490251, | |
| "grad_norm": 5.060407638549805, | |
| "learning_rate": 4.15041782729805e-06, | |
| "loss": 0.4455, | |
| "step": 4940 | |
| }, | |
| { | |
| "epoch": 2.7576601671309193, | |
| "grad_norm": 4.149548530578613, | |
| "learning_rate": 4.057567316620241e-06, | |
| "loss": 0.4136, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 2.7632311977715878, | |
| "grad_norm": 4.839876174926758, | |
| "learning_rate": 3.964716805942433e-06, | |
| "loss": 0.4169, | |
| "step": 4960 | |
| }, | |
| { | |
| "epoch": 2.768802228412256, | |
| "grad_norm": 5.978821754455566, | |
| "learning_rate": 3.8718662952646245e-06, | |
| "loss": 0.4595, | |
| "step": 4970 | |
| }, | |
| { | |
| "epoch": 2.7743732590529246, | |
| "grad_norm": 3.7743992805480957, | |
| "learning_rate": 3.7790157845868153e-06, | |
| "loss": 0.4597, | |
| "step": 4980 | |
| }, | |
| { | |
| "epoch": 2.779944289693593, | |
| "grad_norm": 4.5495829582214355, | |
| "learning_rate": 3.686165273909006e-06, | |
| "loss": 0.3729, | |
| "step": 4990 | |
| }, | |
| { | |
| "epoch": 2.785515320334262, | |
| "grad_norm": 2.0721793174743652, | |
| "learning_rate": 3.593314763231198e-06, | |
| "loss": 0.3199, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 2.7910863509749304, | |
| "grad_norm": 3.427213668823242, | |
| "learning_rate": 3.50974930362117e-06, | |
| "loss": 0.4138, | |
| "step": 5010 | |
| }, | |
| { | |
| "epoch": 2.796657381615599, | |
| "grad_norm": 3.8407859802246094, | |
| "learning_rate": 3.4168987929433615e-06, | |
| "loss": 0.4224, | |
| "step": 5020 | |
| }, | |
| { | |
| "epoch": 2.8022284122562673, | |
| "grad_norm": 4.26210355758667, | |
| "learning_rate": 3.3240482822655523e-06, | |
| "loss": 0.4787, | |
| "step": 5030 | |
| }, | |
| { | |
| "epoch": 2.807799442896936, | |
| "grad_norm": 7.723387241363525, | |
| "learning_rate": 3.231197771587744e-06, | |
| "loss": 0.3606, | |
| "step": 5040 | |
| }, | |
| { | |
| "epoch": 2.8133704735376046, | |
| "grad_norm": 5.821560859680176, | |
| "learning_rate": 3.1383472609099353e-06, | |
| "loss": 0.4126, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 2.818941504178273, | |
| "grad_norm": 4.950361251831055, | |
| "learning_rate": 3.0454967502321266e-06, | |
| "loss": 0.3101, | |
| "step": 5060 | |
| }, | |
| { | |
| "epoch": 2.8245125348189415, | |
| "grad_norm": 5.746707916259766, | |
| "learning_rate": 2.9526462395543174e-06, | |
| "loss": 0.3703, | |
| "step": 5070 | |
| }, | |
| { | |
| "epoch": 2.83008356545961, | |
| "grad_norm": 2.0385026931762695, | |
| "learning_rate": 2.859795728876509e-06, | |
| "loss": 0.3529, | |
| "step": 5080 | |
| }, | |
| { | |
| "epoch": 2.8356545961002784, | |
| "grad_norm": 6.230392932891846, | |
| "learning_rate": 2.7669452181987004e-06, | |
| "loss": 0.3917, | |
| "step": 5090 | |
| }, | |
| { | |
| "epoch": 2.841225626740947, | |
| "grad_norm": 4.094936370849609, | |
| "learning_rate": 2.6740947075208913e-06, | |
| "loss": 0.3203, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 2.8467966573816157, | |
| "grad_norm": 7.037858009338379, | |
| "learning_rate": 2.581244196843083e-06, | |
| "loss": 0.4476, | |
| "step": 5110 | |
| }, | |
| { | |
| "epoch": 2.852367688022284, | |
| "grad_norm": 7.54578161239624, | |
| "learning_rate": 2.488393686165274e-06, | |
| "loss": 0.4672, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 2.8579387186629526, | |
| "grad_norm": 7.049654006958008, | |
| "learning_rate": 2.395543175487465e-06, | |
| "loss": 0.4612, | |
| "step": 5130 | |
| }, | |
| { | |
| "epoch": 2.863509749303621, | |
| "grad_norm": 5.218736171722412, | |
| "learning_rate": 2.302692664809657e-06, | |
| "loss": 0.5456, | |
| "step": 5140 | |
| }, | |
| { | |
| "epoch": 2.86908077994429, | |
| "grad_norm": 7.270079135894775, | |
| "learning_rate": 2.2098421541318477e-06, | |
| "loss": 0.3675, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 2.8746518105849583, | |
| "grad_norm": 4.587645053863525, | |
| "learning_rate": 2.116991643454039e-06, | |
| "loss": 0.4582, | |
| "step": 5160 | |
| }, | |
| { | |
| "epoch": 2.8802228412256268, | |
| "grad_norm": 3.36704683303833, | |
| "learning_rate": 2.0241411327762307e-06, | |
| "loss": 0.3136, | |
| "step": 5170 | |
| }, | |
| { | |
| "epoch": 2.885793871866295, | |
| "grad_norm": 6.552951335906982, | |
| "learning_rate": 1.9312906220984215e-06, | |
| "loss": 0.3797, | |
| "step": 5180 | |
| }, | |
| { | |
| "epoch": 2.8913649025069637, | |
| "grad_norm": 3.7569034099578857, | |
| "learning_rate": 1.838440111420613e-06, | |
| "loss": 0.3851, | |
| "step": 5190 | |
| }, | |
| { | |
| "epoch": 2.896935933147632, | |
| "grad_norm": 5.141645908355713, | |
| "learning_rate": 1.745589600742804e-06, | |
| "loss": 0.3947, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 2.902506963788301, | |
| "grad_norm": 6.302481651306152, | |
| "learning_rate": 1.6527390900649954e-06, | |
| "loss": 0.3634, | |
| "step": 5210 | |
| }, | |
| { | |
| "epoch": 2.9080779944289694, | |
| "grad_norm": 5.418778419494629, | |
| "learning_rate": 1.5598885793871867e-06, | |
| "loss": 0.5314, | |
| "step": 5220 | |
| }, | |
| { | |
| "epoch": 2.913649025069638, | |
| "grad_norm": 4.124113082885742, | |
| "learning_rate": 1.467038068709378e-06, | |
| "loss": 0.4834, | |
| "step": 5230 | |
| }, | |
| { | |
| "epoch": 2.9192200557103063, | |
| "grad_norm": 5.347223281860352, | |
| "learning_rate": 1.3741875580315692e-06, | |
| "loss": 0.5451, | |
| "step": 5240 | |
| }, | |
| { | |
| "epoch": 2.924791086350975, | |
| "grad_norm": 3.6875739097595215, | |
| "learning_rate": 1.2813370473537605e-06, | |
| "loss": 0.4952, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 2.9303621169916436, | |
| "grad_norm": 4.22422456741333, | |
| "learning_rate": 1.1884865366759518e-06, | |
| "loss": 0.5036, | |
| "step": 5260 | |
| }, | |
| { | |
| "epoch": 2.935933147632312, | |
| "grad_norm": 5.798152923583984, | |
| "learning_rate": 1.095636025998143e-06, | |
| "loss": 0.5213, | |
| "step": 5270 | |
| }, | |
| { | |
| "epoch": 2.9415041782729805, | |
| "grad_norm": 3.032393217086792, | |
| "learning_rate": 1.0027855153203343e-06, | |
| "loss": 0.4278, | |
| "step": 5280 | |
| }, | |
| { | |
| "epoch": 2.947075208913649, | |
| "grad_norm": 2.0774576663970947, | |
| "learning_rate": 9.099350046425255e-07, | |
| "loss": 0.3567, | |
| "step": 5290 | |
| }, | |
| { | |
| "epoch": 2.9526462395543174, | |
| "grad_norm": 4.3096137046813965, | |
| "learning_rate": 8.170844939647168e-07, | |
| "loss": 0.4126, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 2.958217270194986, | |
| "grad_norm": 4.806858539581299, | |
| "learning_rate": 7.242339832869081e-07, | |
| "loss": 0.5211, | |
| "step": 5310 | |
| }, | |
| { | |
| "epoch": 2.9637883008356547, | |
| "grad_norm": 7.838840484619141, | |
| "learning_rate": 6.313834726090994e-07, | |
| "loss": 0.4344, | |
| "step": 5320 | |
| }, | |
| { | |
| "epoch": 2.969359331476323, | |
| "grad_norm": 7.3093037605285645, | |
| "learning_rate": 5.385329619312906e-07, | |
| "loss": 0.3714, | |
| "step": 5330 | |
| }, | |
| { | |
| "epoch": 2.9749303621169916, | |
| "grad_norm": 3.43082857131958, | |
| "learning_rate": 4.456824512534819e-07, | |
| "loss": 0.3626, | |
| "step": 5340 | |
| }, | |
| { | |
| "epoch": 2.98050139275766, | |
| "grad_norm": 4.765067100524902, | |
| "learning_rate": 3.528319405756732e-07, | |
| "loss": 0.5246, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 2.986072423398329, | |
| "grad_norm": 5.970546245574951, | |
| "learning_rate": 2.599814298978645e-07, | |
| "loss": 0.3315, | |
| "step": 5360 | |
| }, | |
| { | |
| "epoch": 2.9916434540389973, | |
| "grad_norm": 7.789907455444336, | |
| "learning_rate": 1.671309192200557e-07, | |
| "loss": 0.4864, | |
| "step": 5370 | |
| }, | |
| { | |
| "epoch": 2.997214484679666, | |
| "grad_norm": 7.811412811279297, | |
| "learning_rate": 7.428040854224699e-08, | |
| "loss": 0.483, | |
| "step": 5380 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "eval_accuracy": 0.7099470604625244, | |
| "eval_loss": 0.8620737195014954, | |
| "eval_runtime": 38.7741, | |
| "eval_samples_per_second": 92.562, | |
| "eval_steps_per_second": 5.803, | |
| "step": 5385 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "step": 5385, | |
| "total_flos": 6.674451681129007e+18, | |
| "train_loss": 0.7367175497827202, | |
| "train_runtime": 2278.8111, | |
| "train_samples_per_second": 37.795, | |
| "train_steps_per_second": 2.363 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 5385, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 6.674451681129007e+18, | |
| "train_batch_size": 16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |