| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 24.271844660194176, | |
| "eval_steps": 500, | |
| "global_step": 5000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.24271844660194175, | |
| "grad_norm": 2.8362834453582764, | |
| "learning_rate": 4.960355987055016e-05, | |
| "loss": 6.0742, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.4854368932038835, | |
| "grad_norm": 2.9484496116638184, | |
| "learning_rate": 4.9199029126213595e-05, | |
| "loss": 5.7218, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.7281553398058253, | |
| "grad_norm": 3.542572259902954, | |
| "learning_rate": 4.879449838187702e-05, | |
| "loss": 5.4227, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.970873786407767, | |
| "grad_norm": 4.634098052978516, | |
| "learning_rate": 4.8389967637540455e-05, | |
| "loss": 5.1655, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.2135922330097086, | |
| "grad_norm": 5.699330806732178, | |
| "learning_rate": 4.798543689320388e-05, | |
| "loss": 4.8009, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.4563106796116505, | |
| "grad_norm": 5.711933135986328, | |
| "learning_rate": 4.7580906148867315e-05, | |
| "loss": 4.607, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.6990291262135924, | |
| "grad_norm": 5.113691806793213, | |
| "learning_rate": 4.717637540453075e-05, | |
| "loss": 4.462, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.941747572815534, | |
| "grad_norm": 5.632521152496338, | |
| "learning_rate": 4.6771844660194174e-05, | |
| "loss": 4.3695, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 2.1844660194174756, | |
| "grad_norm": 5.428906440734863, | |
| "learning_rate": 4.636731391585761e-05, | |
| "loss": 4.1549, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 2.4271844660194173, | |
| "grad_norm": 5.037013530731201, | |
| "learning_rate": 4.596278317152104e-05, | |
| "loss": 4.0921, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 2.6699029126213594, | |
| "grad_norm": 5.231846809387207, | |
| "learning_rate": 4.555825242718447e-05, | |
| "loss": 4.0662, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 2.912621359223301, | |
| "grad_norm": 5.506278038024902, | |
| "learning_rate": 4.51537216828479e-05, | |
| "loss": 4.0403, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 3.1553398058252426, | |
| "grad_norm": 5.410216331481934, | |
| "learning_rate": 4.4749190938511334e-05, | |
| "loss": 3.8375, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 3.3980582524271843, | |
| "grad_norm": 5.637699127197266, | |
| "learning_rate": 4.434466019417476e-05, | |
| "loss": 3.7473, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 3.6407766990291264, | |
| "grad_norm": 5.575273513793945, | |
| "learning_rate": 4.3940129449838194e-05, | |
| "loss": 3.7835, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 3.883495145631068, | |
| "grad_norm": 5.737198352813721, | |
| "learning_rate": 4.353559870550162e-05, | |
| "loss": 3.7919, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 4.12621359223301, | |
| "grad_norm": 5.645532608032227, | |
| "learning_rate": 4.313106796116505e-05, | |
| "loss": 3.6425, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 4.368932038834951, | |
| "grad_norm": 6.308927059173584, | |
| "learning_rate": 4.272653721682848e-05, | |
| "loss": 3.4785, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 4.611650485436893, | |
| "grad_norm": 5.863094329833984, | |
| "learning_rate": 4.232200647249191e-05, | |
| "loss": 3.5268, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 4.854368932038835, | |
| "grad_norm": 6.120258331298828, | |
| "learning_rate": 4.191747572815534e-05, | |
| "loss": 3.5296, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 5.097087378640777, | |
| "grad_norm": 6.231515884399414, | |
| "learning_rate": 4.1512944983818774e-05, | |
| "loss": 3.3802, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 5.339805825242719, | |
| "grad_norm": 6.211188316345215, | |
| "learning_rate": 4.11084142394822e-05, | |
| "loss": 3.2148, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 5.58252427184466, | |
| "grad_norm": 6.8676934242248535, | |
| "learning_rate": 4.0703883495145634e-05, | |
| "loss": 3.2111, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 5.825242718446602, | |
| "grad_norm": 6.851133346557617, | |
| "learning_rate": 4.029935275080906e-05, | |
| "loss": 3.2771, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 6.067961165048544, | |
| "grad_norm": 6.708765506744385, | |
| "learning_rate": 3.9894822006472494e-05, | |
| "loss": 3.1587, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 6.310679611650485, | |
| "grad_norm": 7.136553764343262, | |
| "learning_rate": 3.949029126213593e-05, | |
| "loss": 2.9192, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 6.553398058252427, | |
| "grad_norm": 7.370823383331299, | |
| "learning_rate": 3.9085760517799354e-05, | |
| "loss": 2.922, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 6.796116504854369, | |
| "grad_norm": 7.630361557006836, | |
| "learning_rate": 3.868122977346279e-05, | |
| "loss": 2.9417, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 7.038834951456311, | |
| "grad_norm": 7.765511989593506, | |
| "learning_rate": 3.827669902912622e-05, | |
| "loss": 2.8669, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 7.281553398058253, | |
| "grad_norm": 7.85439920425415, | |
| "learning_rate": 3.787216828478965e-05, | |
| "loss": 2.5412, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 7.524271844660194, | |
| "grad_norm": 8.064071655273438, | |
| "learning_rate": 3.746763754045307e-05, | |
| "loss": 2.6131, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 7.766990291262136, | |
| "grad_norm": 8.692522048950195, | |
| "learning_rate": 3.7063106796116507e-05, | |
| "loss": 2.6356, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 8.009708737864077, | |
| "grad_norm": 8.012410163879395, | |
| "learning_rate": 3.665857605177993e-05, | |
| "loss": 2.599, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 8.25242718446602, | |
| "grad_norm": 8.6799898147583, | |
| "learning_rate": 3.6254045307443366e-05, | |
| "loss": 2.1992, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 8.495145631067961, | |
| "grad_norm": 8.963507652282715, | |
| "learning_rate": 3.584951456310679e-05, | |
| "loss": 2.2701, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 8.737864077669903, | |
| "grad_norm": 9.833822250366211, | |
| "learning_rate": 3.5444983818770226e-05, | |
| "loss": 2.2837, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 8.980582524271846, | |
| "grad_norm": 9.47242259979248, | |
| "learning_rate": 3.504045307443366e-05, | |
| "loss": 2.2854, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 9.223300970873787, | |
| "grad_norm": 8.856402397155762, | |
| "learning_rate": 3.4635922330097086e-05, | |
| "loss": 1.8862, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 9.466019417475728, | |
| "grad_norm": 9.948341369628906, | |
| "learning_rate": 3.423139158576052e-05, | |
| "loss": 1.922, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 9.70873786407767, | |
| "grad_norm": 10.28939151763916, | |
| "learning_rate": 3.382686084142395e-05, | |
| "loss": 1.9622, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 9.951456310679612, | |
| "grad_norm": 10.114766120910645, | |
| "learning_rate": 3.342233009708738e-05, | |
| "loss": 1.9741, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 10.194174757281553, | |
| "grad_norm": 9.489270210266113, | |
| "learning_rate": 3.301779935275081e-05, | |
| "loss": 1.6509, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 10.436893203883495, | |
| "grad_norm": 10.16482162475586, | |
| "learning_rate": 3.2613268608414246e-05, | |
| "loss": 1.5883, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 10.679611650485437, | |
| "grad_norm": 11.778059005737305, | |
| "learning_rate": 3.220873786407767e-05, | |
| "loss": 1.6234, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 10.922330097087379, | |
| "grad_norm": 10.798454284667969, | |
| "learning_rate": 3.1804207119741106e-05, | |
| "loss": 1.667, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 11.16504854368932, | |
| "grad_norm": 9.751585006713867, | |
| "learning_rate": 3.139967637540453e-05, | |
| "loss": 1.3785, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 11.407766990291263, | |
| "grad_norm": 10.266364097595215, | |
| "learning_rate": 3.099514563106796e-05, | |
| "loss": 1.2956, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 11.650485436893204, | |
| "grad_norm": 10.660856246948242, | |
| "learning_rate": 3.059061488673139e-05, | |
| "loss": 1.3321, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 11.893203883495145, | |
| "grad_norm": 10.896187782287598, | |
| "learning_rate": 3.0186084142394822e-05, | |
| "loss": 1.3533, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 12.135922330097088, | |
| "grad_norm": 10.952502250671387, | |
| "learning_rate": 2.9781553398058252e-05, | |
| "loss": 1.1486, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 12.37864077669903, | |
| "grad_norm": 10.766463279724121, | |
| "learning_rate": 2.9377022653721686e-05, | |
| "loss": 1.0227, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 12.62135922330097, | |
| "grad_norm": 9.385764122009277, | |
| "learning_rate": 2.8972491909385112e-05, | |
| "loss": 1.0651, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 12.864077669902912, | |
| "grad_norm": 11.573925018310547, | |
| "learning_rate": 2.8567961165048546e-05, | |
| "loss": 1.1013, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 13.106796116504855, | |
| "grad_norm": 10.138091087341309, | |
| "learning_rate": 2.816343042071198e-05, | |
| "loss": 0.9545, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 13.349514563106796, | |
| "grad_norm": 9.867119789123535, | |
| "learning_rate": 2.7758899676375405e-05, | |
| "loss": 0.7892, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 13.592233009708737, | |
| "grad_norm": 10.443971633911133, | |
| "learning_rate": 2.735436893203884e-05, | |
| "loss": 0.8432, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 13.83495145631068, | |
| "grad_norm": 10.633298873901367, | |
| "learning_rate": 2.6949838187702265e-05, | |
| "loss": 0.8543, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 14.077669902912621, | |
| "grad_norm": 8.640830039978027, | |
| "learning_rate": 2.6545307443365695e-05, | |
| "loss": 0.7751, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 14.320388349514563, | |
| "grad_norm": 10.461247444152832, | |
| "learning_rate": 2.614077669902913e-05, | |
| "loss": 0.6074, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 14.563106796116505, | |
| "grad_norm": 10.757479667663574, | |
| "learning_rate": 2.5736245954692555e-05, | |
| "loss": 0.6354, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 14.805825242718447, | |
| "grad_norm": 11.599132537841797, | |
| "learning_rate": 2.533171521035599e-05, | |
| "loss": 0.6694, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 15.048543689320388, | |
| "grad_norm": 8.085358619689941, | |
| "learning_rate": 2.492718446601942e-05, | |
| "loss": 0.6306, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 15.29126213592233, | |
| "grad_norm": 8.327988624572754, | |
| "learning_rate": 2.452265372168285e-05, | |
| "loss": 0.4628, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 15.533980582524272, | |
| "grad_norm": 8.545391082763672, | |
| "learning_rate": 2.411812297734628e-05, | |
| "loss": 0.4915, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 15.776699029126213, | |
| "grad_norm": 10.176375389099121, | |
| "learning_rate": 2.3713592233009708e-05, | |
| "loss": 0.5105, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 16.019417475728154, | |
| "grad_norm": 7.821159362792969, | |
| "learning_rate": 2.3309061488673138e-05, | |
| "loss": 0.5055, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 16.262135922330096, | |
| "grad_norm": 7.797800064086914, | |
| "learning_rate": 2.290453074433657e-05, | |
| "loss": 0.3486, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 16.50485436893204, | |
| "grad_norm": 8.497370719909668, | |
| "learning_rate": 2.25e-05, | |
| "loss": 0.3627, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 16.74757281553398, | |
| "grad_norm": 9.842249870300293, | |
| "learning_rate": 2.209546925566343e-05, | |
| "loss": 0.3969, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 16.990291262135923, | |
| "grad_norm": 10.705739974975586, | |
| "learning_rate": 2.169093851132686e-05, | |
| "loss": 0.4028, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 17.233009708737864, | |
| "grad_norm": 7.950681686401367, | |
| "learning_rate": 2.1286407766990295e-05, | |
| "loss": 0.284, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 17.475728155339805, | |
| "grad_norm": 8.526535034179688, | |
| "learning_rate": 2.0881877022653725e-05, | |
| "loss": 0.2934, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 17.718446601941746, | |
| "grad_norm": 8.999290466308594, | |
| "learning_rate": 2.047734627831715e-05, | |
| "loss": 0.2964, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 17.96116504854369, | |
| "grad_norm": 9.228137969970703, | |
| "learning_rate": 2.007281553398058e-05, | |
| "loss": 0.311, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 18.203883495145632, | |
| "grad_norm": 6.715802192687988, | |
| "learning_rate": 1.9668284789644014e-05, | |
| "loss": 0.2263, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 18.446601941747574, | |
| "grad_norm": 7.834264278411865, | |
| "learning_rate": 1.9263754045307444e-05, | |
| "loss": 0.2293, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 18.689320388349515, | |
| "grad_norm": 6.83228874206543, | |
| "learning_rate": 1.8859223300970874e-05, | |
| "loss": 0.2392, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 18.932038834951456, | |
| "grad_norm": 8.507209777832031, | |
| "learning_rate": 1.8454692556634304e-05, | |
| "loss": 0.2377, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 19.174757281553397, | |
| "grad_norm": 6.565855026245117, | |
| "learning_rate": 1.8050161812297738e-05, | |
| "loss": 0.1905, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 19.41747572815534, | |
| "grad_norm": 7.470388889312744, | |
| "learning_rate": 1.7645631067961167e-05, | |
| "loss": 0.1846, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 19.660194174757283, | |
| "grad_norm": 7.033578395843506, | |
| "learning_rate": 1.7241100323624594e-05, | |
| "loss": 0.1911, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 19.902912621359224, | |
| "grad_norm": 8.110376358032227, | |
| "learning_rate": 1.6836569579288027e-05, | |
| "loss": 0.1954, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 20.145631067961165, | |
| "grad_norm": 7.335451602935791, | |
| "learning_rate": 1.6432038834951457e-05, | |
| "loss": 0.1619, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 20.388349514563107, | |
| "grad_norm": 7.478648662567139, | |
| "learning_rate": 1.6027508090614887e-05, | |
| "loss": 0.1497, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 20.631067961165048, | |
| "grad_norm": 7.764925479888916, | |
| "learning_rate": 1.5622977346278317e-05, | |
| "loss": 0.1559, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 20.87378640776699, | |
| "grad_norm": 6.432894706726074, | |
| "learning_rate": 1.5218446601941749e-05, | |
| "loss": 0.1601, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 21.116504854368934, | |
| "grad_norm": 7.728542327880859, | |
| "learning_rate": 1.4813915857605179e-05, | |
| "loss": 0.1416, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 21.359223300970875, | |
| "grad_norm": 6.370733737945557, | |
| "learning_rate": 1.4409385113268609e-05, | |
| "loss": 0.1211, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 21.601941747572816, | |
| "grad_norm": 5.939419746398926, | |
| "learning_rate": 1.4004854368932039e-05, | |
| "loss": 0.1278, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 21.844660194174757, | |
| "grad_norm": 6.0013957023620605, | |
| "learning_rate": 1.360032362459547e-05, | |
| "loss": 0.1274, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 22.0873786407767, | |
| "grad_norm": 6.1365966796875, | |
| "learning_rate": 1.31957928802589e-05, | |
| "loss": 0.1205, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 22.33009708737864, | |
| "grad_norm": 5.679115295410156, | |
| "learning_rate": 1.279126213592233e-05, | |
| "loss": 0.106, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 22.57281553398058, | |
| "grad_norm": 4.750304698944092, | |
| "learning_rate": 1.2386731391585762e-05, | |
| "loss": 0.108, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 22.815533980582526, | |
| "grad_norm": 6.115650653839111, | |
| "learning_rate": 1.1982200647249192e-05, | |
| "loss": 0.1086, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 23.058252427184467, | |
| "grad_norm": 5.252938270568848, | |
| "learning_rate": 1.1577669902912622e-05, | |
| "loss": 0.1041, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 23.300970873786408, | |
| "grad_norm": 5.175116062164307, | |
| "learning_rate": 1.1173139158576053e-05, | |
| "loss": 0.0872, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 23.54368932038835, | |
| "grad_norm": 5.768143653869629, | |
| "learning_rate": 1.0768608414239483e-05, | |
| "loss": 0.0945, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 23.78640776699029, | |
| "grad_norm": 5.235666275024414, | |
| "learning_rate": 1.0364077669902913e-05, | |
| "loss": 0.0969, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 24.02912621359223, | |
| "grad_norm": 4.407253265380859, | |
| "learning_rate": 9.959546925566343e-06, | |
| "loss": 0.0933, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 24.271844660194176, | |
| "grad_norm": 5.28978157043457, | |
| "learning_rate": 9.555016181229775e-06, | |
| "loss": 0.0766, | |
| "step": 5000 | |
| } | |
| ], | |
| "logging_steps": 50, | |
| "max_steps": 6180, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 30, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 2321767464960000.0, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |