| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.0, | |
| "eval_steps": 500, | |
| "global_step": 266, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 2.5e-06, | |
| "loss": 2.0742, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 5e-06, | |
| "loss": 2.0547, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 7.500000000000001e-06, | |
| "loss": 2.0146, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 1e-05, | |
| "loss": 1.8975, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 1.25e-05, | |
| "loss": 1.3799, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 1.5000000000000002e-05, | |
| "loss": 0.6846, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 1.7500000000000002e-05, | |
| "loss": 0.5581, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 2e-05, | |
| "loss": 0.4543, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 1.9999258647132645e-05, | |
| "loss": 0.4529, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 1.9997034698451396e-05, | |
| "loss": 0.6243, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 1.9993328483702393e-05, | |
| "loss": 0.3416, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 1.998814055240823e-05, | |
| "loss": 0.3396, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 1.998147167378645e-05, | |
| "loss": 0.3477, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 1.9973322836635517e-05, | |
| "loss": 0.2388, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 1.9963695249188185e-05, | |
| "loss": 0.2283, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 1.995259033893236e-05, | |
| "loss": 0.231, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 1.9940009752399462e-05, | |
| "loss": 0.2572, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 1.9925955354920265e-05, | |
| "loss": 0.2421, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 1.9910429230348348e-05, | |
| "loss": 0.1833, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 1.9893433680751105e-05, | |
| "loss": 0.2149, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 1.9874971226068417e-05, | |
| "loss": 0.2416, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 1.985504460373903e-05, | |
| "loss": 0.2078, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 1.983365676829466e-05, | |
| "loss": 0.2142, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 1.9810810890921943e-05, | |
| "loss": 0.2382, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 1.9786510358992213e-05, | |
| "loss": 0.2029, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 1.9760758775559275e-05, | |
| "loss": 0.2825, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 1.9733559958825167e-05, | |
| "loss": 0.1848, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 1.9704917941574053e-05, | |
| "loss": 0.2075, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 1.9674836970574253e-05, | |
| "loss": 0.2456, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 1.9643321505948588e-05, | |
| "loss": 0.2137, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 1.9610376220513067e-05, | |
| "loss": 0.202, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 1.957600599908406e-05, | |
| "loss": 0.1826, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 1.954021593775401e-05, | |
| "loss": 0.1988, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 1.9503011343135828e-05, | |
| "loss": 0.2067, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 1.9464397731576093e-05, | |
| "loss": 0.2019, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 1.9424380828337146e-05, | |
| "loss": 0.2025, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 1.938296656674817e-05, | |
| "loss": 0.1572, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 1.9340161087325483e-05, | |
| "loss": 0.1804, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 1.9295970736862063e-05, | |
| "loss": 0.2239, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 1.9250402067486523e-05, | |
| "loss": 0.1693, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 1.9203461835691596e-05, | |
| "loss": 0.1964, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 1.9155157001332374e-05, | |
| "loss": 0.1744, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 1.9105494726594344e-05, | |
| "loss": 0.2098, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 1.905448237493147e-05, | |
| "loss": 0.1784, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 1.9002127509974376e-05, | |
| "loss": 0.1815, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 1.894843789440892e-05, | |
| "loss": 0.1642, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 1.889342148882519e-05, | |
| "loss": 0.1773, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 1.8837086450537195e-05, | |
| "loss": 0.1722, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 1.877944113237336e-05, | |
| "loss": 0.1935, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 1.872049408143808e-05, | |
| "loss": 0.1766, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 1.866025403784439e-05, | |
| "loss": 0.1514, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 1.8598729933418102e-05, | |
| "loss": 0.1553, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 1.8535930890373467e-05, | |
| "loss": 0.1625, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 1.8471866219960604e-05, | |
| "loss": 0.1744, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 1.840654542108494e-05, | |
| "loss": 0.1664, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 1.833997817889878e-05, | |
| "loss": 0.1796, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 1.82721743633653e-05, | |
| "loss": 0.1835, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 1.820314402779511e-05, | |
| "loss": 0.1347, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 1.8132897407355657e-05, | |
| "loss": 0.1498, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 1.806144491755363e-05, | |
| "loss": 0.1874, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 1.798879715269067e-05, | |
| "loss": 0.1813, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 1.7914964884292543e-05, | |
| "loss": 0.1652, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 1.7839959059512016e-05, | |
| "loss": 0.1758, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 1.7763790799505746e-05, | |
| "loss": 0.1517, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 1.7686471397785322e-05, | |
| "loss": 0.1331, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 1.760801231854278e-05, | |
| "loss": 0.1804, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 1.7528425194950794e-05, | |
| "loss": 0.1702, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 1.744772182743782e-05, | |
| "loss": 0.1869, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 1.736591418193844e-05, | |
| "loss": 0.164, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 1.728301438811916e-05, | |
| "loss": 0.153, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 1.7199034737579962e-05, | |
| "loss": 0.1337, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 1.711398768203178e-05, | |
| "loss": 0.1545, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 1.7027885831450318e-05, | |
| "loss": 0.147, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 1.6940741952206342e-05, | |
| "loss": 0.1668, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 1.6852568965172794e-05, | |
| "loss": 0.1582, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 1.676337994380903e-05, | |
| "loss": 0.1497, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.6673188112222394e-05, | |
| "loss": 0.1652, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.658200684320748e-05, | |
| "loss": 0.1369, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.6489849656263336e-05, | |
| "loss": 0.1319, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.6396730215588913e-05, | |
| "loss": 0.142, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.630266232805709e-05, | |
| "loss": 0.1476, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.6207659941167485e-05, | |
| "loss": 0.1442, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.6111737140978495e-05, | |
| "loss": 0.1619, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.6014908150018703e-05, | |
| "loss": 0.1325, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.591718732517814e-05, | |
| "loss": 0.1619, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.581858915557953e-05, | |
| "loss": 0.1843, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.571912826043003e-05, | |
| "loss": 0.1698, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.5618819386853607e-05, | |
| "loss": 0.1383, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.551767740770446e-05, | |
| "loss": 0.1434, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.541571731936185e-05, | |
| "loss": 0.1462, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.5312954239506536e-05, | |
| "loss": 0.1529, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.5209403404879305e-05, | |
| "loss": 0.1664, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.5105080169021792e-05, | |
| "loss": 0.1861, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.5000000000000002e-05, | |
| "loss": 0.1297, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.4894178478110856e-05, | |
| "loss": 0.1562, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 1.4787631293572094e-05, | |
| "loss": 0.1788, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 1.4680374244195861e-05, | |
| "loss": 0.1285, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 1.4572423233046386e-05, | |
| "loss": 0.1382, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 1.4463794266081994e-05, | |
| "loss": 0.1442, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 1.4354503449781914e-05, | |
| "loss": 0.1635, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 1.4244566988758152e-05, | |
| "loss": 0.1526, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 1.4134001183352833e-05, | |
| "loss": 0.1498, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 1.4022822427221325e-05, | |
| "loss": 0.1404, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 1.391104720490156e-05, | |
| "loss": 0.1343, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 1.3798692089369855e-05, | |
| "loss": 0.1554, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 1.368577373958362e-05, | |
| "loss": 0.1398, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 1.3572308898011328e-05, | |
| "loss": 0.1562, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.3458314388150115e-05, | |
| "loss": 0.1654, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.3343807112031329e-05, | |
| "loss": 0.1649, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.3228804047714462e-05, | |
| "loss": 0.1546, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.3113322246769817e-05, | |
| "loss": 0.1614, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.2997378831750242e-05, | |
| "loss": 0.1662, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.2880990993652379e-05, | |
| "loss": 0.1458, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.2764175989367717e-05, | |
| "loss": 0.1398, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.2646951139123935e-05, | |
| "loss": 0.1199, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 1.2529333823916807e-05, | |
| "loss": 0.139, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.241134148293311e-05, | |
| "loss": 0.1296, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.2292991610964902e-05, | |
| "loss": 0.1225, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.2174301755815572e-05, | |
| "loss": 0.1279, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.2055289515698008e-05, | |
| "loss": 0.1222, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.1935972536625302e-05, | |
| "loss": 0.1698, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 1.1816368509794365e-05, | |
| "loss": 0.172, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 1.1696495168962848e-05, | |
| "loss": 0.1402, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 1.1576370287819737e-05, | |
| "loss": 0.1676, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.1456011677350052e-05, | |
| "loss": 0.1151, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.133543718319398e-05, | |
| "loss": 0.1575, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.1214664683000927e-05, | |
| "loss": 0.129, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.1093712083778748e-05, | |
| "loss": 0.1293, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.0972597319238692e-05, | |
| "loss": 0.1532, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 1.0851338347136358e-05, | |
| "loss": 0.1334, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 1.0729953146609076e-05, | |
| "loss": 0.1505, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.060845971551014e-05, | |
| "loss": 0.1459, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 1.0486876067740253e-05, | |
| "loss": 0.135, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 1.0365220230576592e-05, | |
| "loss": 0.12, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 1.0243510241999898e-05, | |
| "loss": 0.1477, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 1.0121764148019977e-05, | |
| "loss": 0.1238, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 1e-05, | |
| "loss": 0.1257, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 9.878235851980027e-06, | |
| "loss": 0.1036, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 9.756489758000105e-06, | |
| "loss": 0.1221, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 9.634779769423412e-06, | |
| "loss": 0.122, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 9.51312393225975e-06, | |
| "loss": 0.1373, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 9.391540284489862e-06, | |
| "loss": 0.139, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 9.270046853390924e-06, | |
| "loss": 0.1224, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 9.148661652863644e-06, | |
| "loss": 0.1137, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 9.02740268076131e-06, | |
| "loss": 0.1207, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 8.906287916221259e-06, | |
| "loss": 0.118, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 8.785335316999078e-06, | |
| "loss": 0.1257, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 8.664562816806022e-06, | |
| "loss": 0.1539, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 8.543988322649954e-06, | |
| "loss": 0.1462, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 8.423629712180265e-06, | |
| "loss": 0.11, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 8.303504831037154e-06, | |
| "loss": 0.1104, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 8.183631490205636e-06, | |
| "loss": 0.1078, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 8.064027463374702e-06, | |
| "loss": 0.1068, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 7.944710484301995e-06, | |
| "loss": 0.1045, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 7.825698244184432e-06, | |
| "loss": 0.1284, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 7.707008389035102e-06, | |
| "loss": 0.1277, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 7.588658517066893e-06, | |
| "loss": 0.1289, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 7.470666176083193e-06, | |
| "loss": 0.109, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 7.3530488608760645e-06, | |
| "loss": 0.1153, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 7.235824010632284e-06, | |
| "loss": 0.1118, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 7.119009006347625e-06, | |
| "loss": 0.1049, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 7.002621168249759e-06, | |
| "loss": 0.1234, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 6.886677753230184e-06, | |
| "loss": 0.1219, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 6.771195952285541e-06, | |
| "loss": 0.1054, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 6.656192887968675e-06, | |
| "loss": 0.1237, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 6.5416856118498874e-06, | |
| "loss": 0.1561, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 6.427691101988673e-06, | |
| "loss": 0.0963, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 6.314226260416383e-06, | |
| "loss": 0.1124, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 6.2013079106301454e-06, | |
| "loss": 0.1276, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 6.088952795098442e-06, | |
| "loss": 0.1115, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 5.977177572778679e-06, | |
| "loss": 0.0958, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 5.8659988166471715e-06, | |
| "loss": 0.1177, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 5.755433011241851e-06, | |
| "loss": 0.1144, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 5.645496550218089e-06, | |
| "loss": 0.108, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 5.5362057339180075e-06, | |
| "loss": 0.1177, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 5.427576766953615e-06, | |
| "loss": 0.092, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 5.3196257558041386e-06, | |
| "loss": 0.1225, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 5.212368706427913e-06, | |
| "loss": 0.1392, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 5.105821521889147e-06, | |
| "loss": 0.1061, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 5.000000000000003e-06, | |
| "loss": 0.118, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 4.894919830978212e-06, | |
| "loss": 0.1248, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 4.790596595120699e-06, | |
| "loss": 0.1234, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 4.687045760493468e-06, | |
| "loss": 0.1224, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 4.584282680638155e-06, | |
| "loss": 0.133, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 4.482322592295541e-06, | |
| "loss": 0.1417, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 4.381180613146396e-06, | |
| "loss": 0.1147, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 4.280871739569972e-06, | |
| "loss": 0.1213, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 4.181410844420473e-06, | |
| "loss": 0.0836, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 4.082812674821865e-06, | |
| "loss": 0.0853, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 3.9850918499812976e-06, | |
| "loss": 0.1254, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 3.888262859021508e-06, | |
| "loss": 0.1277, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 3.7923400588325156e-06, | |
| "loss": 0.0917, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 3.6973376719429134e-06, | |
| "loss": 0.1456, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 3.6032697844110896e-06, | |
| "loss": 0.124, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 3.5101503437366678e-06, | |
| "loss": 0.0943, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 3.4179931567925216e-06, | |
| "loss": 0.1146, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 3.326811887777607e-06, | |
| "loss": 0.1019, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 3.236620056190972e-06, | |
| "loss": 0.1053, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 3.1474310348272084e-06, | |
| "loss": 0.1224, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 3.0592580477936606e-06, | |
| "loss": 0.1314, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 2.9721141685496825e-06, | |
| "loss": 0.0914, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 2.8860123179682244e-06, | |
| "loss": 0.1222, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 2.8009652624200436e-06, | |
| "loss": 0.1019, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 2.7169856118808414e-06, | |
| "loss": 0.1086, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 2.634085818061565e-06, | |
| "loss": 0.108, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 2.5522781725621814e-06, | |
| "loss": 0.1176, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 2.471574805049206e-06, | |
| "loss": 0.1178, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 2.3919876814572197e-06, | |
| "loss": 0.1147, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 2.3135286022146785e-06, | |
| "loss": 0.106, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 2.2362092004942583e-06, | |
| "loss": 0.1154, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 2.1600409404879875e-06, | |
| "loss": 0.0973, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 2.08503511570746e-06, | |
| "loss": 0.1172, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 2.0112028473093294e-06, | |
| "loss": 0.1213, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 1.9385550824463727e-06, | |
| "loss": 0.1039, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 1.8671025926443464e-06, | |
| "loss": 0.1249, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 1.7968559722048906e-06, | |
| "loss": 0.1056, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 1.7278256366347034e-06, | |
| "loss": 0.1184, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 1.660021821101222e-06, | |
| "loss": 0.1202, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 1.5934545789150625e-06, | |
| "loss": 0.1163, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 1.528133780039397e-06, | |
| "loss": 0.1143, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 1.4640691096265358e-06, | |
| "loss": 0.1092, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 1.401270066581899e-06, | |
| "loss": 0.1157, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 1.339745962155613e-06, | |
| "loss": 0.0928, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 1.279505918561923e-06, | |
| "loss": 0.1296, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 1.220558867626639e-06, | |
| "loss": 0.1038, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 1.1629135494628097e-06, | |
| "loss": 0.141, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 1.1065785111748117e-06, | |
| "loss": 0.1101, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 1.051562105591082e-06, | |
| "loss": 0.1127, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 9.978724900256265e-07, | |
| "loss": 0.1355, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 9.455176250685338e-07, | |
| "loss": 0.0976, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 8.945052734056581e-07, | |
| "loss": 0.1107, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 8.448429986676298e-07, | |
| "loss": 0.1114, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 7.965381643084069e-07, | |
| "loss": 0.1155, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 7.495979325134806e-07, | |
| "loss": 0.122, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 7.040292631379386e-07, | |
| "loss": 0.0782, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 6.598389126745209e-07, | |
| "loss": 0.1371, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 6.170334332518325e-07, | |
| "loss": 0.1231, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 5.756191716628556e-07, | |
| "loss": 0.1062, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 5.35602268423906e-07, | |
| "loss": 0.1248, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 4.969886568641757e-07, | |
| "loss": 0.1003, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 4.597840622459937e-07, | |
| "loss": 0.1039, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 4.2399400091594154e-07, | |
| "loss": 0.0974, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 3.8962377948693395e-07, | |
| "loss": 0.1321, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 3.566784940514145e-07, | |
| "loss": 0.1183, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 3.2516302942574794e-07, | |
| "loss": 0.125, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 2.9508205842594727e-07, | |
| "loss": 0.1149, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 2.6644004117483357e-07, | |
| "loss": 0.1093, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 2.392412244407294e-07, | |
| "loss": 0.1132, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 2.1348964100778914e-07, | |
| "loss": 0.1186, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 1.8918910907805733e-07, | |
| "loss": 0.1076, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 1.6634323170533928e-07, | |
| "loss": 0.1241, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 1.4495539626097289e-07, | |
| "loss": 0.0967, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 1.2502877393158587e-07, | |
| "loss": 0.1174, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 1.0656631924889749e-07, | |
| "loss": 0.1166, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 8.957076965165234e-08, | |
| "loss": 0.1073, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 7.404464507973608e-08, | |
| "loss": 0.1028, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 5.999024760054095e-08, | |
| "loss": 0.1204, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 4.740966106764222e-08, | |
| "loss": 0.1044, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 3.630475081181861e-08, | |
| "loss": 0.1172, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 2.667716336448356e-08, | |
| "loss": 0.0876, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 1.8528326213548276e-08, | |
| "loss": 0.098, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 1.1859447591769934e-08, | |
| "loss": 0.1297, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 6.671516297606095e-09, | |
| "loss": 0.1068, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 2.9653015486064143e-09, | |
| "loss": 0.1143, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 7.413528673549941e-10, | |
| "loss": 0.1131, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 0.0, | |
| "loss": 0.1078, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "step": 266, | |
| "total_flos": 48998327648256.0, | |
| "train_loss": 0.18359931429525964, | |
| "train_runtime": 932.4861, | |
| "train_samples_per_second": 36.462, | |
| "train_steps_per_second": 0.285 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 266, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 2, | |
| "save_steps": 1000, | |
| "total_flos": 48998327648256.0, | |
| "train_batch_size": 16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |