| {"current_steps": 5, "total_steps": 453, "loss": 0.6997, "learning_rate": 4.998497170031657e-05, "epoch": 0.033112582781456956, "percentage": 1.1, "elapsed_time": "0:00:11", "remaining_time": "0:17:06", "throughput": 428.77, "total_tokens": 4912} | |
| {"current_steps": 10, "total_steps": 453, "loss": 0.3509, "learning_rate": 4.9939904869249616e-05, "epoch": 0.06622516556291391, "percentage": 2.21, "elapsed_time": "0:00:22", "remaining_time": "0:16:36", "throughput": 441.91, "total_tokens": 9936} | |
| {"current_steps": 15, "total_steps": 453, "loss": 0.2853, "learning_rate": 4.9864853689026556e-05, "epoch": 0.09933774834437085, "percentage": 3.31, "elapsed_time": "0:00:33", "remaining_time": "0:16:17", "throughput": 443.85, "total_tokens": 14864} | |
| {"current_steps": 20, "total_steps": 453, "loss": 0.2456, "learning_rate": 4.975990839097764e-05, "epoch": 0.13245033112582782, "percentage": 4.42, "elapsed_time": "0:00:44", "remaining_time": "0:15:56", "throughput": 448.8, "total_tokens": 19824} | |
| {"current_steps": 25, "total_steps": 453, "loss": 0.2084, "learning_rate": 4.9625195147054034e-05, "epoch": 0.16556291390728478, "percentage": 5.52, "elapsed_time": "0:00:55", "remaining_time": "0:15:43", "throughput": 444.22, "total_tokens": 24480} | |
| {"current_steps": 30, "total_steps": 453, "loss": 0.1626, "learning_rate": 4.9460875918135804e-05, "epoch": 0.1986754966887417, "percentage": 6.62, "elapsed_time": "0:01:05", "remaining_time": "0:15:30", "throughput": 444.12, "total_tokens": 29296} | |
| {"current_steps": 35, "total_steps": 453, "loss": 0.1405, "learning_rate": 4.9267148259312224e-05, "epoch": 0.23178807947019867, "percentage": 7.73, "elapsed_time": "0:01:16", "remaining_time": "0:15:15", "throughput": 442.72, "total_tokens": 33936} | |
| {"current_steps": 40, "total_steps": 453, "loss": 0.1182, "learning_rate": 4.9044245082368415e-05, "epoch": 0.26490066225165565, "percentage": 8.83, "elapsed_time": "0:01:27", "remaining_time": "0:15:07", "throughput": 444.57, "total_tokens": 39056} | |
| {"current_steps": 45, "total_steps": 453, "loss": 0.1285, "learning_rate": 4.879243437576383e-05, "epoch": 0.2980132450331126, "percentage": 9.93, "elapsed_time": "0:01:39", "remaining_time": "0:15:00", "throughput": 438.23, "total_tokens": 43520} | |
| {"current_steps": 50, "total_steps": 453, "loss": 0.0906, "learning_rate": 4.8512018882439475e-05, "epoch": 0.33112582781456956, "percentage": 11.04, "elapsed_time": "0:01:50", "remaining_time": "0:14:50", "throughput": 440.48, "total_tokens": 48656} | |
| {"current_steps": 55, "total_steps": 453, "loss": 0.054, "learning_rate": 4.820333573584091e-05, "epoch": 0.36423841059602646, "percentage": 12.14, "elapsed_time": "0:02:01", "remaining_time": "0:14:37", "throughput": 441.77, "total_tokens": 53600} | |
| {"current_steps": 60, "total_steps": 453, "loss": 0.073, "learning_rate": 4.786675605459487e-05, "epoch": 0.3973509933774834, "percentage": 13.25, "elapsed_time": "0:02:12", "remaining_time": "0:14:25", "throughput": 442.07, "total_tokens": 58384} | |
| {"current_steps": 65, "total_steps": 453, "loss": 0.0524, "learning_rate": 4.7502684496326746e-05, "epoch": 0.4304635761589404, "percentage": 14.35, "elapsed_time": "0:02:22", "remaining_time": "0:14:11", "throughput": 442.81, "total_tokens": 63152} | |
| {"current_steps": 70, "total_steps": 453, "loss": 0.0534, "learning_rate": 4.711155877115523e-05, "epoch": 0.46357615894039733, "percentage": 15.45, "elapsed_time": "0:02:33", "remaining_time": "0:13:58", "throughput": 442.95, "total_tokens": 67888} | |
| {"current_steps": 75, "total_steps": 453, "loss": 0.0759, "learning_rate": 4.669384911544927e-05, "epoch": 0.4966887417218543, "percentage": 16.56, "elapsed_time": "0:02:43", "remaining_time": "0:13:46", "throughput": 442.11, "total_tokens": 72480} | |
| {"current_steps": 80, "total_steps": 453, "loss": 0.0766, "learning_rate": 4.625005772647979e-05, "epoch": 0.5298013245033113, "percentage": 17.66, "elapsed_time": "0:02:54", "remaining_time": "0:13:35", "throughput": 440.9, "total_tokens": 77120} | |
| {"current_steps": 85, "total_steps": 453, "loss": 0.0383, "learning_rate": 4.578071815864602e-05, "epoch": 0.5629139072847682, "percentage": 18.76, "elapsed_time": "0:03:05", "remaining_time": "0:13:25", "throughput": 438.74, "total_tokens": 81584} | |
| {"current_steps": 90, "total_steps": 453, "loss": 0.0393, "learning_rate": 4.528639468200226e-05, "epoch": 0.5960264900662252, "percentage": 19.87, "elapsed_time": "0:03:16", "remaining_time": "0:13:13", "throughput": 439.35, "total_tokens": 86416} | |
| {"current_steps": 95, "total_steps": 453, "loss": 0.0589, "learning_rate": 4.476768160385632e-05, "epoch": 0.6291390728476821, "percentage": 20.97, "elapsed_time": "0:03:27", "remaining_time": "0:13:00", "throughput": 440.4, "total_tokens": 91248} | |
| {"current_steps": 100, "total_steps": 453, "loss": 0.0351, "learning_rate": 4.4225202554255227e-05, "epoch": 0.6622516556291391, "percentage": 22.08, "elapsed_time": "0:03:37", "remaining_time": "0:12:49", "throughput": 440.1, "total_tokens": 95936} | |
| {"current_steps": 105, "total_steps": 453, "loss": 0.0417, "learning_rate": 4.3659609736217344e-05, "epoch": 0.695364238410596, "percentage": 23.18, "elapsed_time": "0:03:49", "remaining_time": "0:12:39", "throughput": 439.61, "total_tokens": 100704} | |
| {"current_steps": 110, "total_steps": 453, "loss": 0.0437, "learning_rate": 4.3071583141612135e-05, "epoch": 0.7284768211920529, "percentage": 24.28, "elapsed_time": "0:03:59", "remaining_time": "0:12:28", "throughput": 439.17, "total_tokens": 105376} | |
| {"current_steps": 115, "total_steps": 453, "loss": 0.0498, "learning_rate": 4.2461829733630435e-05, "epoch": 0.7615894039735099, "percentage": 25.39, "elapsed_time": "0:04:10", "remaining_time": "0:12:17", "throughput": 439.15, "total_tokens": 110208} | |
| {"current_steps": 120, "total_steps": 453, "loss": 0.0239, "learning_rate": 4.1831082596828106e-05, "epoch": 0.7947019867549668, "percentage": 26.49, "elapsed_time": "0:04:21", "remaining_time": "0:12:06", "throughput": 438.31, "total_tokens": 114704} | |
| {"current_steps": 125, "total_steps": 453, "loss": 0.0228, "learning_rate": 4.118010005576485e-05, "epoch": 0.8278145695364238, "percentage": 27.59, "elapsed_time": "0:04:32", "remaining_time": "0:11:54", "throughput": 439.58, "total_tokens": 119744} | |
| {"current_steps": 130, "total_steps": 453, "loss": 0.039, "learning_rate": 4.050966476329793e-05, "epoch": 0.8609271523178808, "percentage": 28.7, "elapsed_time": "0:04:43", "remaining_time": "0:11:44", "throughput": 440.13, "total_tokens": 124736} | |
| {"current_steps": 135, "total_steps": 453, "loss": 0.05, "learning_rate": 3.9820582759626825e-05, "epoch": 0.8940397350993378, "percentage": 29.8, "elapsed_time": "0:04:54", "remaining_time": "0:11:32", "throughput": 440.36, "total_tokens": 129552} | |
| {"current_steps": 140, "total_steps": 453, "loss": 0.02, "learning_rate": 3.911368250322014e-05, "epoch": 0.9271523178807947, "percentage": 30.91, "elapsed_time": "0:05:04", "remaining_time": "0:11:21", "throughput": 441.12, "total_tokens": 134400} | |
| {"current_steps": 145, "total_steps": 453, "loss": 0.0159, "learning_rate": 3.8389813874789856e-05, "epoch": 0.9602649006622517, "percentage": 32.01, "elapsed_time": "0:05:15", "remaining_time": "0:11:10", "throughput": 441.95, "total_tokens": 139424} | |
| {"current_steps": 150, "total_steps": 453, "loss": 0.017, "learning_rate": 3.764984715551032e-05, "epoch": 0.9933774834437086, "percentage": 33.11, "elapsed_time": "0:05:25", "remaining_time": "0:10:58", "throughput": 442.86, "total_tokens": 144368} | |
| {"current_steps": 155, "total_steps": 453, "loss": 0.0298, "learning_rate": 3.6894671980710574e-05, "epoch": 1.0264900662251655, "percentage": 34.22, "elapsed_time": "0:05:36", "remaining_time": "0:10:46", "throughput": 443.06, "total_tokens": 149040} | |
| {"current_steps": 160, "total_steps": 453, "loss": 0.0357, "learning_rate": 3.612519627029787e-05, "epoch": 1.0596026490066226, "percentage": 35.32, "elapsed_time": "0:05:46", "remaining_time": "0:10:35", "throughput": 443.05, "total_tokens": 153712} | |
| {"current_steps": 165, "total_steps": 453, "loss": 0.0185, "learning_rate": 3.534234513719821e-05, "epoch": 1.0927152317880795, "percentage": 36.42, "elapsed_time": "0:05:57", "remaining_time": "0:10:23", "throughput": 444.01, "total_tokens": 158640} | |
| {"current_steps": 170, "total_steps": 453, "loss": 0.0339, "learning_rate": 3.4547059775126445e-05, "epoch": 1.1258278145695364, "percentage": 37.53, "elapsed_time": "0:06:08", "remaining_time": "0:10:12", "throughput": 444.2, "total_tokens": 163552} | |
| {"current_steps": 175, "total_steps": 453, "loss": 0.0224, "learning_rate": 3.3740296327022984e-05, "epoch": 1.1589403973509933, "percentage": 38.63, "elapsed_time": "0:06:19", "remaining_time": "0:10:02", "throughput": 444.02, "total_tokens": 168352} | |
| {"current_steps": 180, "total_steps": 453, "loss": 0.0192, "learning_rate": 3.292302473551757e-05, "epoch": 1.1920529801324504, "percentage": 39.74, "elapsed_time": "0:06:29", "remaining_time": "0:09:51", "throughput": 444.69, "total_tokens": 173312} | |
| {"current_steps": 185, "total_steps": 453, "loss": 0.0275, "learning_rate": 3.20962275768022e-05, "epoch": 1.2251655629139073, "percentage": 40.84, "elapsed_time": "0:06:40", "remaining_time": "0:09:40", "throughput": 444.69, "total_tokens": 178160} | |
| {"current_steps": 190, "total_steps": 453, "loss": 0.0086, "learning_rate": 3.126089887931515e-05, "epoch": 1.2582781456953642, "percentage": 41.94, "elapsed_time": "0:06:51", "remaining_time": "0:09:29", "throughput": 444.59, "total_tokens": 182944} | |
| {"current_steps": 195, "total_steps": 453, "loss": 0.0166, "learning_rate": 3.0418042928656414e-05, "epoch": 1.2913907284768211, "percentage": 43.05, "elapsed_time": "0:07:02", "remaining_time": "0:09:18", "throughput": 445.07, "total_tokens": 187920} | |
| {"current_steps": 200, "total_steps": 453, "loss": 0.0167, "learning_rate": 2.9568673060171326e-05, "epoch": 1.3245033112582782, "percentage": 44.15, "elapsed_time": "0:07:13", "remaining_time": "0:09:08", "throughput": 445.5, "total_tokens": 192992} | |
| {"current_steps": 205, "total_steps": 453, "loss": 0.0137, "learning_rate": 2.8713810440653926e-05, "epoch": 1.3576158940397351, "percentage": 45.25, "elapsed_time": "0:07:24", "remaining_time": "0:08:57", "throughput": 444.49, "total_tokens": 197616} | |
| {"current_steps": 210, "total_steps": 453, "loss": 0.0142, "learning_rate": 2.7854482840634965e-05, "epoch": 1.390728476821192, "percentage": 46.36, "elapsed_time": "0:07:35", "remaining_time": "0:08:46", "throughput": 444.44, "total_tokens": 202400} | |
| {"current_steps": 215, "total_steps": 453, "loss": 0.0136, "learning_rate": 2.6991723398730383e-05, "epoch": 1.423841059602649, "percentage": 47.46, "elapsed_time": "0:07:46", "remaining_time": "0:08:36", "throughput": 444.3, "total_tokens": 207216} | |
| {"current_steps": 220, "total_steps": 453, "loss": 0.0184, "learning_rate": 2.6126569379535985e-05, "epoch": 1.4569536423841059, "percentage": 48.57, "elapsed_time": "0:07:57", "remaining_time": "0:08:25", "throughput": 443.79, "total_tokens": 211744} | |
| {"current_steps": 225, "total_steps": 453, "loss": 0.0098, "learning_rate": 2.526006092656161e-05, "epoch": 1.490066225165563, "percentage": 49.67, "elapsed_time": "0:08:08", "remaining_time": "0:08:14", "throughput": 443.82, "total_tokens": 216608} | |
| {"current_steps": 230, "total_steps": 453, "loss": 0.012, "learning_rate": 2.4393239811704e-05, "epoch": 1.5231788079470199, "percentage": 50.77, "elapsed_time": "0:08:18", "remaining_time": "0:08:03", "throughput": 444.28, "total_tokens": 221552} | |
| {"current_steps": 235, "total_steps": 453, "loss": 0.0106, "learning_rate": 2.3527148182762054e-05, "epoch": 1.5562913907284768, "percentage": 51.88, "elapsed_time": "0:08:29", "remaining_time": "0:07:52", "throughput": 443.85, "total_tokens": 226272} | |
| {"current_steps": 240, "total_steps": 453, "loss": 0.0128, "learning_rate": 2.2662827310499995e-05, "epoch": 1.589403973509934, "percentage": 52.98, "elapsed_time": "0:08:40", "remaining_time": "0:07:42", "throughput": 443.72, "total_tokens": 231072} | |
| {"current_steps": 245, "total_steps": 453, "loss": 0.0139, "learning_rate": 2.1801316336765126e-05, "epoch": 1.6225165562913908, "percentage": 54.08, "elapsed_time": "0:08:51", "remaining_time": "0:07:31", "throughput": 443.33, "total_tokens": 235728} | |
| {"current_steps": 250, "total_steps": 453, "loss": 0.0084, "learning_rate": 2.0943651025164932e-05, "epoch": 1.6556291390728477, "percentage": 55.19, "elapsed_time": "0:09:02", "remaining_time": "0:07:20", "throughput": 443.48, "total_tokens": 240560} | |
| {"current_steps": 255, "total_steps": 453, "loss": 0.0143, "learning_rate": 2.0090862515805898e-05, "epoch": 1.6887417218543046, "percentage": 56.29, "elapsed_time": "0:09:13", "remaining_time": "0:07:09", "throughput": 443.53, "total_tokens": 245408} | |
| {"current_steps": 260, "total_steps": 453, "loss": 0.011, "learning_rate": 1.9243976085590824e-05, "epoch": 1.7218543046357615, "percentage": 57.4, "elapsed_time": "0:09:24", "remaining_time": "0:06:58", "throughput": 443.94, "total_tokens": 250400} | |
| {"current_steps": 265, "total_steps": 453, "loss": 0.0127, "learning_rate": 1.840400991556541e-05, "epoch": 1.7549668874172184, "percentage": 58.5, "elapsed_time": "0:09:35", "remaining_time": "0:06:48", "throughput": 443.72, "total_tokens": 255216} | |
| {"current_steps": 270, "total_steps": 453, "loss": 0.0127, "learning_rate": 1.7571973866795815e-05, "epoch": 1.7880794701986755, "percentage": 59.6, "elapsed_time": "0:09:45", "remaining_time": "0:06:37", "throughput": 443.93, "total_tokens": 260080} | |
| {"current_steps": 275, "total_steps": 453, "loss": 0.0122, "learning_rate": 1.6748868266249114e-05, "epoch": 1.8211920529801324, "percentage": 60.71, "elapsed_time": "0:09:56", "remaining_time": "0:06:26", "throughput": 443.72, "total_tokens": 264848} | |
| {"current_steps": 280, "total_steps": 453, "loss": 0.0169, "learning_rate": 1.5935682704136183e-05, "epoch": 1.8543046357615895, "percentage": 61.81, "elapsed_time": "0:10:07", "remaining_time": "0:06:15", "throughput": 443.77, "total_tokens": 269776} | |
| {"current_steps": 285, "total_steps": 453, "loss": 0.0242, "learning_rate": 1.5133394844163093e-05, "epoch": 1.8874172185430464, "percentage": 62.91, "elapsed_time": "0:10:18", "remaining_time": "0:06:04", "throughput": 444.05, "total_tokens": 274752} | |
| {"current_steps": 290, "total_steps": 453, "loss": 0.0079, "learning_rate": 1.4342969248121185e-05, "epoch": 1.9205298013245033, "percentage": 64.02, "elapsed_time": "0:10:29", "remaining_time": "0:05:53", "throughput": 443.8, "total_tokens": 279440} | |
| {"current_steps": 295, "total_steps": 453, "loss": 0.0194, "learning_rate": 1.3565356216229268e-05, "epoch": 1.9536423841059603, "percentage": 65.12, "elapsed_time": "0:10:41", "remaining_time": "0:05:43", "throughput": 443.36, "total_tokens": 284288} | |
| {"current_steps": 300, "total_steps": 453, "loss": 0.0133, "learning_rate": 1.2801490644621789e-05, "epoch": 1.9867549668874172, "percentage": 66.23, "elapsed_time": "0:10:52", "remaining_time": "0:05:32", "throughput": 443.09, "total_tokens": 289216} | |
| {"current_steps": 305, "total_steps": 453, "loss": 0.0049, "learning_rate": 1.2052290901357025e-05, "epoch": 2.019867549668874, "percentage": 67.33, "elapsed_time": "0:11:04", "remaining_time": "0:05:22", "throughput": 442.42, "total_tokens": 293992} | |
| {"current_steps": 310, "total_steps": 453, "loss": 0.0121, "learning_rate": 1.1318657722296097e-05, "epoch": 2.052980132450331, "percentage": 68.43, "elapsed_time": "0:11:15", "remaining_time": "0:05:11", "throughput": 442.9, "total_tokens": 299048} | |
| {"current_steps": 315, "total_steps": 453, "loss": 0.0102, "learning_rate": 1.0601473128180855e-05, "epoch": 2.0860927152317883, "percentage": 69.54, "elapsed_time": "0:11:25", "remaining_time": "0:05:00", "throughput": 442.89, "total_tokens": 303768} | |
| {"current_steps": 320, "total_steps": 453, "loss": 0.0072, "learning_rate": 9.90159936421197e-06, "epoch": 2.119205298013245, "percentage": 70.64, "elapsed_time": "0:11:37", "remaining_time": "0:04:49", "throughput": 443.13, "total_tokens": 308872} | |
| {"current_steps": 325, "total_steps": 453, "loss": 0.0066, "learning_rate": 9.219877863402682e-06, "epoch": 2.152317880794702, "percentage": 71.74, "elapsed_time": "0:11:48", "remaining_time": "0:04:38", "throughput": 443.2, "total_tokens": 313800} | |
| {"current_steps": 330, "total_steps": 453, "loss": 0.0119, "learning_rate": 8.55712823495419e-06, "epoch": 2.185430463576159, "percentage": 72.85, "elapsed_time": "0:11:59", "remaining_time": "0:04:28", "throughput": 442.86, "total_tokens": 318536} | |
| {"current_steps": 335, "total_steps": 453, "loss": 0.0057, "learning_rate": 7.91414727886898e-06, "epoch": 2.218543046357616, "percentage": 73.95, "elapsed_time": "0:12:10", "remaining_time": "0:04:17", "throughput": 442.91, "total_tokens": 323512} | |
| {"current_steps": 340, "total_steps": 453, "loss": 0.0136, "learning_rate": 7.291708027986988e-06, "epoch": 2.251655629139073, "percentage": 75.06, "elapsed_time": "0:12:21", "remaining_time": "0:04:06", "throughput": 443.28, "total_tokens": 328552} | |
| {"current_steps": 345, "total_steps": 453, "loss": 0.0078, "learning_rate": 6.690558818595943e-06, "epoch": 2.2847682119205297, "percentage": 76.16, "elapsed_time": "0:12:32", "remaining_time": "0:03:55", "throughput": 443.17, "total_tokens": 333272} | |
| {"current_steps": 350, "total_steps": 453, "loss": 0.0141, "learning_rate": 6.111422390733715e-06, "epoch": 2.3178807947019866, "percentage": 77.26, "elapsed_time": "0:12:43", "remaining_time": "0:03:44", "throughput": 442.62, "total_tokens": 337816} | |
| {"current_steps": 355, "total_steps": 453, "loss": 0.014, "learning_rate": 5.55499501926394e-06, "epoch": 2.3509933774834435, "percentage": 78.37, "elapsed_time": "0:12:54", "remaining_time": "0:03:33", "throughput": 442.57, "total_tokens": 342792} | |
| {"current_steps": 360, "total_steps": 453, "loss": 0.0045, "learning_rate": 5.02194567676986e-06, "epoch": 2.384105960264901, "percentage": 79.47, "elapsed_time": "0:13:05", "remaining_time": "0:03:23", "throughput": 441.94, "total_tokens": 347288} | |
| {"current_steps": 365, "total_steps": 453, "loss": 0.0062, "learning_rate": 4.51291522927268e-06, "epoch": 2.4172185430463577, "percentage": 80.57, "elapsed_time": "0:13:16", "remaining_time": "0:03:12", "throughput": 441.94, "total_tokens": 352088} | |
| {"current_steps": 370, "total_steps": 453, "loss": 0.0028, "learning_rate": 4.028515665741439e-06, "epoch": 2.4503311258278146, "percentage": 81.68, "elapsed_time": "0:13:28", "remaining_time": "0:03:01", "throughput": 441.91, "total_tokens": 357096} | |
| {"current_steps": 375, "total_steps": 453, "loss": 0.0022, "learning_rate": 3.5693293623207086e-06, "epoch": 2.4834437086092715, "percentage": 82.78, "elapsed_time": "0:13:39", "remaining_time": "0:02:50", "throughput": 441.82, "total_tokens": 361928} | |
| {"current_steps": 380, "total_steps": 453, "loss": 0.0061, "learning_rate": 3.135908382160771e-06, "epoch": 2.5165562913907285, "percentage": 83.89, "elapsed_time": "0:13:49", "remaining_time": "0:02:39", "throughput": 441.87, "total_tokens": 366632} | |
| {"current_steps": 385, "total_steps": 453, "loss": 0.0077, "learning_rate": 2.728773811691923e-06, "epoch": 2.5496688741721854, "percentage": 84.99, "elapsed_time": "0:14:00", "remaining_time": "0:02:28", "throughput": 441.94, "total_tokens": 371352} | |
| {"current_steps": 390, "total_steps": 453, "loss": 0.0053, "learning_rate": 2.348415134141102e-06, "epoch": 2.5827814569536423, "percentage": 86.09, "elapsed_time": "0:14:10", "remaining_time": "0:02:17", "throughput": 441.82, "total_tokens": 375976} | |
| {"current_steps": 395, "total_steps": 453, "loss": 0.0021, "learning_rate": 1.995289641043768e-06, "epoch": 2.6158940397350996, "percentage": 87.2, "elapsed_time": "0:14:21", "remaining_time": "0:02:06", "throughput": 442.19, "total_tokens": 381016} | |
| {"current_steps": 400, "total_steps": 453, "loss": 0.0063, "learning_rate": 1.6698218824588164e-06, "epoch": 2.6490066225165565, "percentage": 88.3, "elapsed_time": "0:14:32", "remaining_time": "0:01:55", "throughput": 442.11, "total_tokens": 385688} | |
| {"current_steps": 405, "total_steps": 453, "loss": 0.0052, "learning_rate": 1.3724031565473112e-06, "epoch": 2.6821192052980134, "percentage": 89.4, "elapsed_time": "0:14:43", "remaining_time": "0:01:44", "throughput": 442.09, "total_tokens": 390584} | |
| {"current_steps": 410, "total_steps": 453, "loss": 0.0051, "learning_rate": 1.1033910391288065e-06, "epoch": 2.7152317880794703, "percentage": 90.51, "elapsed_time": "0:14:54", "remaining_time": "0:01:33", "throughput": 442.47, "total_tokens": 395576} | |
| {"current_steps": 415, "total_steps": 453, "loss": 0.0084, "learning_rate": 8.631089537808307e-07, "epoch": 2.748344370860927, "percentage": 91.61, "elapsed_time": "0:15:04", "remaining_time": "0:01:22", "throughput": 442.54, "total_tokens": 400328} | |
| {"current_steps": 420, "total_steps": 453, "loss": 0.0122, "learning_rate": 6.51845782998356e-07, "epoch": 2.781456953642384, "percentage": 92.72, "elapsed_time": "0:15:15", "remaining_time": "0:01:11", "throughput": 442.77, "total_tokens": 405192} | |
| {"current_steps": 425, "total_steps": 453, "loss": 0.006, "learning_rate": 4.698555208807853e-07, "epoch": 2.814569536423841, "percentage": 93.82, "elapsed_time": "0:15:25", "remaining_time": "0:01:01", "throughput": 442.71, "total_tokens": 409912} | |
| {"current_steps": 430, "total_steps": 453, "loss": 0.0044, "learning_rate": 3.1735696776400703e-07, "epoch": 2.847682119205298, "percentage": 94.92, "elapsed_time": "0:15:36", "remaining_time": "0:00:50", "throughput": 442.88, "total_tokens": 414792} | |
| {"current_steps": 435, "total_steps": 453, "loss": 0.0091, "learning_rate": 1.9453346716462317e-07, "epoch": 2.880794701986755, "percentage": 96.03, "elapsed_time": "0:15:47", "remaining_time": "0:00:39", "throughput": 442.96, "total_tokens": 419752} | |
| {"current_steps": 440, "total_steps": 453, "loss": 0.005, "learning_rate": 1.0153268535264827e-07, "epoch": 2.9139072847682117, "percentage": 97.13, "elapsed_time": "0:15:58", "remaining_time": "0:00:28", "throughput": 443.07, "total_tokens": 424808} | |
| {"current_steps": 445, "total_steps": 453, "loss": 0.0051, "learning_rate": 3.846643381766879e-08, "epoch": 2.9470198675496686, "percentage": 98.23, "elapsed_time": "0:16:09", "remaining_time": "0:00:17", "throughput": 443.03, "total_tokens": 429704} | |
| {"current_steps": 450, "total_steps": 453, "loss": 0.0049, "learning_rate": 5.41053484192644e-09, "epoch": 2.980132450331126, "percentage": 99.34, "elapsed_time": "0:16:21", "remaining_time": "0:00:06", "throughput": 442.9, "total_tokens": 434536} | |
| {"current_steps": 453, "total_steps": 453, "epoch": 3.0, "percentage": 100.0, "elapsed_time": "0:16:28", "remaining_time": "0:00:00", "throughput": 442.38, "total_tokens": 437216} | |