| { |
| "best_global_step": null, |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 2.0, |
| "eval_steps": 500, |
| "global_step": 10482, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.019080328181644724, |
| "grad_norm": 3213.42529296875, |
| "learning_rate": 4.990555237550087e-05, |
| "loss": 15.0258, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.03816065636328945, |
| "grad_norm": 57.823360443115234, |
| "learning_rate": 4.981015073459264e-05, |
| "loss": 10.8207, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.057240984544934176, |
| "grad_norm": 21.06822967529297, |
| "learning_rate": 4.971474909368441e-05, |
| "loss": 8.1947, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.0763213127265789, |
| "grad_norm": 25.405773162841797, |
| "learning_rate": 4.9619347452776186e-05, |
| "loss": 6.9094, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.09540164090822362, |
| "grad_norm": 10.772579193115234, |
| "learning_rate": 4.9523945811867966e-05, |
| "loss": 6.1739, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.11448196908986835, |
| "grad_norm": 7.486720085144043, |
| "learning_rate": 4.9428544170959746e-05, |
| "loss": 5.7799, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.13356229727151306, |
| "grad_norm": 17.71492576599121, |
| "learning_rate": 4.933314253005152e-05, |
| "loss": 5.4888, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.1526426254531578, |
| "grad_norm": 7.538534164428711, |
| "learning_rate": 4.92377408891433e-05, |
| "loss": 5.1287, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.17172295363480253, |
| "grad_norm": 4.852605819702148, |
| "learning_rate": 4.914233924823507e-05, |
| "loss": 5.0529, |
| "step": 900 |
| }, |
| { |
| "epoch": 0.19080328181644723, |
| "grad_norm": 13.604716300964355, |
| "learning_rate": 4.9046937607326846e-05, |
| "loss": 4.918, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.20988360999809197, |
| "grad_norm": 7.916113376617432, |
| "learning_rate": 4.8951535966418626e-05, |
| "loss": 4.676, |
| "step": 1100 |
| }, |
| { |
| "epoch": 0.2289639381797367, |
| "grad_norm": 9.100526809692383, |
| "learning_rate": 4.88561343255104e-05, |
| "loss": 4.5912, |
| "step": 1200 |
| }, |
| { |
| "epoch": 0.2480442663613814, |
| "grad_norm": 7.34255313873291, |
| "learning_rate": 4.876073268460218e-05, |
| "loss": 4.58, |
| "step": 1300 |
| }, |
| { |
| "epoch": 0.2671245945430261, |
| "grad_norm": 11.250228881835938, |
| "learning_rate": 4.866533104369395e-05, |
| "loss": 4.3254, |
| "step": 1400 |
| }, |
| { |
| "epoch": 0.28620492272467085, |
| "grad_norm": 10.732246398925781, |
| "learning_rate": 4.856992940278573e-05, |
| "loss": 4.2089, |
| "step": 1500 |
| }, |
| { |
| "epoch": 0.3052852509063156, |
| "grad_norm": 5.343315124511719, |
| "learning_rate": 4.847452776187751e-05, |
| "loss": 4.2465, |
| "step": 1600 |
| }, |
| { |
| "epoch": 0.3243655790879603, |
| "grad_norm": 4.741443157196045, |
| "learning_rate": 4.837912612096928e-05, |
| "loss": 4.0806, |
| "step": 1700 |
| }, |
| { |
| "epoch": 0.34344590726960506, |
| "grad_norm": 5.156026363372803, |
| "learning_rate": 4.828372448006106e-05, |
| "loss": 3.9851, |
| "step": 1800 |
| }, |
| { |
| "epoch": 0.36252623545124973, |
| "grad_norm": 5.886138916015625, |
| "learning_rate": 4.818832283915283e-05, |
| "loss": 3.9438, |
| "step": 1900 |
| }, |
| { |
| "epoch": 0.38160656363289447, |
| "grad_norm": 8.471810340881348, |
| "learning_rate": 4.809292119824461e-05, |
| "loss": 3.9255, |
| "step": 2000 |
| }, |
| { |
| "epoch": 0.4006868918145392, |
| "grad_norm": 6.079995155334473, |
| "learning_rate": 4.799751955733639e-05, |
| "loss": 3.8405, |
| "step": 2100 |
| }, |
| { |
| "epoch": 0.41976721999618394, |
| "grad_norm": 8.389578819274902, |
| "learning_rate": 4.7902117916428164e-05, |
| "loss": 3.8096, |
| "step": 2200 |
| }, |
| { |
| "epoch": 0.4388475481778287, |
| "grad_norm": 5.557002544403076, |
| "learning_rate": 4.7806716275519944e-05, |
| "loss": 3.8691, |
| "step": 2300 |
| }, |
| { |
| "epoch": 0.4579278763594734, |
| "grad_norm": 22.748138427734375, |
| "learning_rate": 4.771131463461172e-05, |
| "loss": 3.8514, |
| "step": 2400 |
| }, |
| { |
| "epoch": 0.4770082045411181, |
| "grad_norm": 8.196257591247559, |
| "learning_rate": 4.761591299370349e-05, |
| "loss": 3.6942, |
| "step": 2500 |
| }, |
| { |
| "epoch": 0.4960885327227628, |
| "grad_norm": 5.773292064666748, |
| "learning_rate": 4.752051135279527e-05, |
| "loss": 3.5213, |
| "step": 2600 |
| }, |
| { |
| "epoch": 0.5151688609044076, |
| "grad_norm": 8.48924446105957, |
| "learning_rate": 4.7425109711887044e-05, |
| "loss": 3.4898, |
| "step": 2700 |
| }, |
| { |
| "epoch": 0.5342491890860522, |
| "grad_norm": 4.775367736816406, |
| "learning_rate": 4.7329708070978824e-05, |
| "loss": 3.6168, |
| "step": 2800 |
| }, |
| { |
| "epoch": 0.553329517267697, |
| "grad_norm": 8.374899864196777, |
| "learning_rate": 4.7234306430070604e-05, |
| "loss": 3.3548, |
| "step": 2900 |
| }, |
| { |
| "epoch": 0.5724098454493417, |
| "grad_norm": 8.98188591003418, |
| "learning_rate": 4.713890478916238e-05, |
| "loss": 3.4166, |
| "step": 3000 |
| }, |
| { |
| "epoch": 0.5914901736309864, |
| "grad_norm": 5.574390411376953, |
| "learning_rate": 4.704350314825415e-05, |
| "loss": 3.3954, |
| "step": 3100 |
| }, |
| { |
| "epoch": 0.6105705018126312, |
| "grad_norm": 6.005886554718018, |
| "learning_rate": 4.694810150734592e-05, |
| "loss": 3.3784, |
| "step": 3200 |
| }, |
| { |
| "epoch": 0.6296508299942759, |
| "grad_norm": 5.605027675628662, |
| "learning_rate": 4.68526998664377e-05, |
| "loss": 3.4084, |
| "step": 3300 |
| }, |
| { |
| "epoch": 0.6487311581759206, |
| "grad_norm": 6.774117946624756, |
| "learning_rate": 4.675729822552948e-05, |
| "loss": 3.2394, |
| "step": 3400 |
| }, |
| { |
| "epoch": 0.6678114863575654, |
| "grad_norm": 6.457028865814209, |
| "learning_rate": 4.6661896584621256e-05, |
| "loss": 3.1922, |
| "step": 3500 |
| }, |
| { |
| "epoch": 0.6868918145392101, |
| "grad_norm": 5.8072028160095215, |
| "learning_rate": 4.6566494943713036e-05, |
| "loss": 3.2006, |
| "step": 3600 |
| }, |
| { |
| "epoch": 0.7059721427208548, |
| "grad_norm": 5.757169723510742, |
| "learning_rate": 4.647109330280481e-05, |
| "loss": 3.2241, |
| "step": 3700 |
| }, |
| { |
| "epoch": 0.7250524709024995, |
| "grad_norm": 7.030402660369873, |
| "learning_rate": 4.637569166189659e-05, |
| "loss": 3.1935, |
| "step": 3800 |
| }, |
| { |
| "epoch": 0.7441327990841442, |
| "grad_norm": 5.983602523803711, |
| "learning_rate": 4.628029002098836e-05, |
| "loss": 3.2654, |
| "step": 3900 |
| }, |
| { |
| "epoch": 0.7632131272657889, |
| "grad_norm": 5.327564239501953, |
| "learning_rate": 4.6184888380080136e-05, |
| "loss": 3.1183, |
| "step": 4000 |
| }, |
| { |
| "epoch": 0.7822934554474337, |
| "grad_norm": 5.376145362854004, |
| "learning_rate": 4.6089486739171916e-05, |
| "loss": 2.9999, |
| "step": 4100 |
| }, |
| { |
| "epoch": 0.8013737836290784, |
| "grad_norm": 7.061012268066406, |
| "learning_rate": 4.5994085098263696e-05, |
| "loss": 2.9889, |
| "step": 4200 |
| }, |
| { |
| "epoch": 0.8204541118107231, |
| "grad_norm": 7.72469425201416, |
| "learning_rate": 4.589868345735547e-05, |
| "loss": 3.1113, |
| "step": 4300 |
| }, |
| { |
| "epoch": 0.8395344399923679, |
| "grad_norm": 7.000074863433838, |
| "learning_rate": 4.580328181644725e-05, |
| "loss": 3.0533, |
| "step": 4400 |
| }, |
| { |
| "epoch": 0.8586147681740126, |
| "grad_norm": 4.172057628631592, |
| "learning_rate": 4.570788017553902e-05, |
| "loss": 3.0605, |
| "step": 4500 |
| }, |
| { |
| "epoch": 0.8776950963556573, |
| "grad_norm": 7.515087604522705, |
| "learning_rate": 4.5612478534630795e-05, |
| "loss": 3.0591, |
| "step": 4600 |
| }, |
| { |
| "epoch": 0.8967754245373021, |
| "grad_norm": 5.259555339813232, |
| "learning_rate": 4.5517076893722575e-05, |
| "loss": 3.0083, |
| "step": 4700 |
| }, |
| { |
| "epoch": 0.9158557527189468, |
| "grad_norm": 6.854462146759033, |
| "learning_rate": 4.542167525281435e-05, |
| "loss": 2.8963, |
| "step": 4800 |
| }, |
| { |
| "epoch": 0.9349360809005914, |
| "grad_norm": 7.629165172576904, |
| "learning_rate": 4.532627361190613e-05, |
| "loss": 2.9848, |
| "step": 4900 |
| }, |
| { |
| "epoch": 0.9540164090822362, |
| "grad_norm": 7.160517692565918, |
| "learning_rate": 4.52308719709979e-05, |
| "loss": 2.989, |
| "step": 5000 |
| }, |
| { |
| "epoch": 0.9730967372638809, |
| "grad_norm": 5.499044895172119, |
| "learning_rate": 4.513547033008968e-05, |
| "loss": 2.8799, |
| "step": 5100 |
| }, |
| { |
| "epoch": 0.9921770654455256, |
| "grad_norm": 7.522376537322998, |
| "learning_rate": 4.504006868918146e-05, |
| "loss": 2.8484, |
| "step": 5200 |
| }, |
| { |
| "epoch": 1.0, |
| "eval_loss": 2.272996425628662, |
| "eval_runtime": 202.0635, |
| "eval_samples_per_second": 23.052, |
| "eval_steps_per_second": 2.885, |
| "step": 5241 |
| }, |
| { |
| "epoch": 1.0112573936271705, |
| "grad_norm": 6.653578281402588, |
| "learning_rate": 4.4944667048273234e-05, |
| "loss": 2.8934, |
| "step": 5300 |
| }, |
| { |
| "epoch": 1.0303377218088152, |
| "grad_norm": 4.845913887023926, |
| "learning_rate": 4.484926540736501e-05, |
| "loss": 2.6501, |
| "step": 5400 |
| }, |
| { |
| "epoch": 1.0494180499904597, |
| "grad_norm": 5.232843399047852, |
| "learning_rate": 4.475386376645678e-05, |
| "loss": 2.7856, |
| "step": 5500 |
| }, |
| { |
| "epoch": 1.0684983781721045, |
| "grad_norm": 6.629660129547119, |
| "learning_rate": 4.465846212554856e-05, |
| "loss": 2.8155, |
| "step": 5600 |
| }, |
| { |
| "epoch": 1.0875787063537492, |
| "grad_norm": 4.096762180328369, |
| "learning_rate": 4.456306048464034e-05, |
| "loss": 2.789, |
| "step": 5700 |
| }, |
| { |
| "epoch": 1.106659034535394, |
| "grad_norm": 6.521574020385742, |
| "learning_rate": 4.4467658843732114e-05, |
| "loss": 2.749, |
| "step": 5800 |
| }, |
| { |
| "epoch": 1.1257393627170387, |
| "grad_norm": 4.849175930023193, |
| "learning_rate": 4.4372257202823894e-05, |
| "loss": 2.7133, |
| "step": 5900 |
| }, |
| { |
| "epoch": 1.1448196908986834, |
| "grad_norm": 5.988980770111084, |
| "learning_rate": 4.427685556191567e-05, |
| "loss": 2.704, |
| "step": 6000 |
| }, |
| { |
| "epoch": 1.1639000190803281, |
| "grad_norm": 4.623400688171387, |
| "learning_rate": 4.418145392100744e-05, |
| "loss": 2.6576, |
| "step": 6100 |
| }, |
| { |
| "epoch": 1.1829803472619729, |
| "grad_norm": 4.707732200622559, |
| "learning_rate": 4.408605228009922e-05, |
| "loss": 2.7199, |
| "step": 6200 |
| }, |
| { |
| "epoch": 1.2020606754436176, |
| "grad_norm": 6.403053283691406, |
| "learning_rate": 4.399065063919099e-05, |
| "loss": 2.6899, |
| "step": 6300 |
| }, |
| { |
| "epoch": 1.2211410036252623, |
| "grad_norm": 6.6001152992248535, |
| "learning_rate": 4.389524899828277e-05, |
| "loss": 2.7606, |
| "step": 6400 |
| }, |
| { |
| "epoch": 1.240221331806907, |
| "grad_norm": 5.213536739349365, |
| "learning_rate": 4.379984735737455e-05, |
| "loss": 2.7052, |
| "step": 6500 |
| }, |
| { |
| "epoch": 1.2593016599885518, |
| "grad_norm": 4.794836044311523, |
| "learning_rate": 4.3704445716466326e-05, |
| "loss": 2.7579, |
| "step": 6600 |
| }, |
| { |
| "epoch": 1.2783819881701965, |
| "grad_norm": 5.8824143409729, |
| "learning_rate": 4.3609044075558106e-05, |
| "loss": 2.6076, |
| "step": 6700 |
| }, |
| { |
| "epoch": 1.2974623163518413, |
| "grad_norm": 6.338265419006348, |
| "learning_rate": 4.351364243464988e-05, |
| "loss": 2.5968, |
| "step": 6800 |
| }, |
| { |
| "epoch": 1.316542644533486, |
| "grad_norm": 6.901667594909668, |
| "learning_rate": 4.341824079374165e-05, |
| "loss": 2.6482, |
| "step": 6900 |
| }, |
| { |
| "epoch": 1.3356229727151308, |
| "grad_norm": 6.267462730407715, |
| "learning_rate": 4.332283915283343e-05, |
| "loss": 2.6434, |
| "step": 7000 |
| }, |
| { |
| "epoch": 1.3547033008967755, |
| "grad_norm": 5.581694602966309, |
| "learning_rate": 4.3227437511925206e-05, |
| "loss": 2.5478, |
| "step": 7100 |
| }, |
| { |
| "epoch": 1.3737836290784202, |
| "grad_norm": 4.70138692855835, |
| "learning_rate": 4.3132035871016985e-05, |
| "loss": 2.5637, |
| "step": 7200 |
| }, |
| { |
| "epoch": 1.392863957260065, |
| "grad_norm": 6.99065637588501, |
| "learning_rate": 4.303663423010876e-05, |
| "loss": 2.5464, |
| "step": 7300 |
| }, |
| { |
| "epoch": 1.4119442854417095, |
| "grad_norm": 6.660660743713379, |
| "learning_rate": 4.294123258920054e-05, |
| "loss": 2.5766, |
| "step": 7400 |
| }, |
| { |
| "epoch": 1.4310246136233542, |
| "grad_norm": 5.83965539932251, |
| "learning_rate": 4.284583094829231e-05, |
| "loss": 2.5757, |
| "step": 7500 |
| }, |
| { |
| "epoch": 1.450104941804999, |
| "grad_norm": 5.41910982131958, |
| "learning_rate": 4.2750429307384085e-05, |
| "loss": 2.6146, |
| "step": 7600 |
| }, |
| { |
| "epoch": 1.4691852699866437, |
| "grad_norm": 4.368034839630127, |
| "learning_rate": 4.2655027666475865e-05, |
| "loss": 2.4073, |
| "step": 7700 |
| }, |
| { |
| "epoch": 1.4882655981682884, |
| "grad_norm": 3.716670036315918, |
| "learning_rate": 4.255962602556764e-05, |
| "loss": 2.5729, |
| "step": 7800 |
| }, |
| { |
| "epoch": 1.5073459263499331, |
| "grad_norm": 4.219146251678467, |
| "learning_rate": 4.246422438465942e-05, |
| "loss": 2.4766, |
| "step": 7900 |
| }, |
| { |
| "epoch": 1.5264262545315779, |
| "grad_norm": 5.474557399749756, |
| "learning_rate": 4.23688227437512e-05, |
| "loss": 2.5176, |
| "step": 8000 |
| }, |
| { |
| "epoch": 1.5455065827132226, |
| "grad_norm": 93.11466217041016, |
| "learning_rate": 4.227342110284297e-05, |
| "loss": 2.3887, |
| "step": 8100 |
| }, |
| { |
| "epoch": 1.5645869108948673, |
| "grad_norm": 6.055609703063965, |
| "learning_rate": 4.217801946193475e-05, |
| "loss": 2.5206, |
| "step": 8200 |
| }, |
| { |
| "epoch": 1.583667239076512, |
| "grad_norm": 6.243997573852539, |
| "learning_rate": 4.2082617821026524e-05, |
| "loss": 2.4598, |
| "step": 8300 |
| }, |
| { |
| "epoch": 1.6027475672581568, |
| "grad_norm": 5.589599132537842, |
| "learning_rate": 4.19872161801183e-05, |
| "loss": 2.4932, |
| "step": 8400 |
| }, |
| { |
| "epoch": 1.6218278954398015, |
| "grad_norm": 6.761661052703857, |
| "learning_rate": 4.189181453921008e-05, |
| "loss": 2.4582, |
| "step": 8500 |
| }, |
| { |
| "epoch": 1.6409082236214463, |
| "grad_norm": 4.9730963706970215, |
| "learning_rate": 4.179641289830185e-05, |
| "loss": 2.4221, |
| "step": 8600 |
| }, |
| { |
| "epoch": 1.659988551803091, |
| "grad_norm": 6.11653995513916, |
| "learning_rate": 4.170101125739363e-05, |
| "loss": 2.4959, |
| "step": 8700 |
| }, |
| { |
| "epoch": 1.6790688799847358, |
| "grad_norm": 5.8039398193359375, |
| "learning_rate": 4.160560961648541e-05, |
| "loss": 2.4431, |
| "step": 8800 |
| }, |
| { |
| "epoch": 1.6981492081663805, |
| "grad_norm": 4.404674530029297, |
| "learning_rate": 4.1510207975577184e-05, |
| "loss": 2.5057, |
| "step": 8900 |
| }, |
| { |
| "epoch": 1.7172295363480252, |
| "grad_norm": 4.7745256423950195, |
| "learning_rate": 4.141480633466896e-05, |
| "loss": 2.4474, |
| "step": 9000 |
| }, |
| { |
| "epoch": 1.73630986452967, |
| "grad_norm": 4.619002819061279, |
| "learning_rate": 4.131940469376073e-05, |
| "loss": 2.3865, |
| "step": 9100 |
| }, |
| { |
| "epoch": 1.7553901927113147, |
| "grad_norm": 5.063472270965576, |
| "learning_rate": 4.122400305285251e-05, |
| "loss": 2.3381, |
| "step": 9200 |
| }, |
| { |
| "epoch": 1.7744705208929594, |
| "grad_norm": 5.410485744476318, |
| "learning_rate": 4.112860141194429e-05, |
| "loss": 2.4036, |
| "step": 9300 |
| }, |
| { |
| "epoch": 1.7935508490746042, |
| "grad_norm": 5.242465496063232, |
| "learning_rate": 4.103319977103606e-05, |
| "loss": 2.3774, |
| "step": 9400 |
| }, |
| { |
| "epoch": 1.812631177256249, |
| "grad_norm": 5.760533809661865, |
| "learning_rate": 4.093779813012784e-05, |
| "loss": 2.3344, |
| "step": 9500 |
| }, |
| { |
| "epoch": 1.8317115054378936, |
| "grad_norm": 6.042536735534668, |
| "learning_rate": 4.0842396489219616e-05, |
| "loss": 2.37, |
| "step": 9600 |
| }, |
| { |
| "epoch": 1.8507918336195384, |
| "grad_norm": 5.290925025939941, |
| "learning_rate": 4.0746994848311396e-05, |
| "loss": 2.4738, |
| "step": 9700 |
| }, |
| { |
| "epoch": 1.869872161801183, |
| "grad_norm": 4.640475273132324, |
| "learning_rate": 4.065159320740317e-05, |
| "loss": 2.3234, |
| "step": 9800 |
| }, |
| { |
| "epoch": 1.8889524899828278, |
| "grad_norm": 6.546270847320557, |
| "learning_rate": 4.055619156649494e-05, |
| "loss": 2.412, |
| "step": 9900 |
| }, |
| { |
| "epoch": 1.9080328181644726, |
| "grad_norm": 4.5001325607299805, |
| "learning_rate": 4.046078992558672e-05, |
| "loss": 2.3115, |
| "step": 10000 |
| }, |
| { |
| "epoch": 1.9271131463461173, |
| "grad_norm": 4.442992210388184, |
| "learning_rate": 4.0365388284678495e-05, |
| "loss": 2.3329, |
| "step": 10100 |
| }, |
| { |
| "epoch": 1.946193474527762, |
| "grad_norm": 4.229004383087158, |
| "learning_rate": 4.0269986643770275e-05, |
| "loss": 2.2672, |
| "step": 10200 |
| }, |
| { |
| "epoch": 1.9652738027094065, |
| "grad_norm": 5.293257713317871, |
| "learning_rate": 4.0174585002862055e-05, |
| "loss": 2.3171, |
| "step": 10300 |
| }, |
| { |
| "epoch": 1.9843541308910513, |
| "grad_norm": 5.781225681304932, |
| "learning_rate": 4.007918336195383e-05, |
| "loss": 2.4069, |
| "step": 10400 |
| }, |
| { |
| "epoch": 2.0, |
| "eval_loss": 1.8545597791671753, |
| "eval_runtime": 201.9056, |
| "eval_samples_per_second": 23.07, |
| "eval_steps_per_second": 2.887, |
| "step": 10482 |
| } |
| ], |
| "logging_steps": 100, |
| "max_steps": 52410, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 10, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": false |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 2.870402397555917e+16, |
| "train_batch_size": 8, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|