| { | |
| "best_metric": 1.5331557989120483, | |
| "best_model_checkpoint": "output/og-buda/checkpoint-750", | |
| "epoch": 5.0, | |
| "global_step": 750, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.00013683401763576845, | |
| "loss": 2.7577, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.0001357399755894565, | |
| "loss": 2.4782, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.00013392954733321228, | |
| "loss": 2.3702, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 0.00013142205020853694, | |
| "loss": 2.2696, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.00012824423930948275, | |
| "loss": 2.2909, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.00012443002200473538, | |
| "loss": 2.2339, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.00012002009614464126, | |
| "loss": 2.1468, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.00011506151581352585, | |
| "loss": 2.2085, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.00010960718926074231, | |
| "loss": 2.0772, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 0.00010371531436754662, | |
| "loss": 2.1209, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 9.744875767338973e-05, | |
| "loss": 2.1212, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 9.08743835874413e-05, | |
| "loss": 2.1052, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 8.40623409426843e-05, | |
| "loss": 2.0228, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 7.708531450507727e-05, | |
| "loss": 2.0757, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 7.001774942421023e-05, | |
| "loss": 2.001, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 6.293505690059801e-05, | |
| "loss": 1.9417, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 5.59128095451711e-05, | |
| "loss": 2.0215, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 4.902593501651139e-05, | |
| "loss": 2.0056, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 4.2347916539754844e-05, | |
| "loss": 2.0711, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 3.5950008837656755e-05, | |
| "loss": 2.0311, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 2.9900477839865778e-05, | |
| "loss": 1.9157, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 2.4263872282739445e-05, | |
| "loss": 2.0822, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 1.910033497175995e-05, | |
| "loss": 2.0431, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 1.4464961055407408e-05, | |
| "loss": 1.9706, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.040721015773445e-05, | |
| "loss": 1.9533, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 6.97037864221025e-06, | |
| "loss": 1.968, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 4.1911376378003306e-06, | |
| "loss": 1.9282, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 2.0991417565619363e-06, | |
| "loss": 1.9382, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 7.167126777538922e-07, | |
| "loss": 1.9811, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 5.860097463115006e-08, | |
| "loss": 1.9187, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_loss": 1.8772083520889282, | |
| "eval_runtime": 9.7424, | |
| "eval_samples_per_second": 22.274, | |
| "eval_steps_per_second": 2.874, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 1.3182872524144475e-07, | |
| "loss": 1.9052, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 9.35614586573241e-07, | |
| "loss": 1.9644, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 2.461382131516746e-06, | |
| "loss": 1.9317, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 4.692851359682261e-06, | |
| "loss": 2.0608, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 7.606212405651899e-06, | |
| "loss": 1.8731, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 1.1170379591190527e-05, | |
| "loss": 1.8801, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 1.5347323110669033e-05, | |
| "loss": 1.8913, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 2.009247481060283e-05, | |
| "loss": 1.9579, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 2.5355203733622272e-05, | |
| "loss": 1.9638, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 3.1079356352801514e-05, | |
| "loss": 1.9192, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 3.7203855732024184e-05, | |
| "loss": 1.9342, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 4.3663353219321276e-05, | |
| "loss": 1.9085, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 5.038892571958626e-05, | |
| "loss": 1.9788, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 5.7308811106741675e-05, | |
| "loss": 1.9669, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 6.434917392847842e-05, | |
| "loss": 1.927, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 7.143489323346855e-05, | |
| "loss": 1.9403, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 7.849036411490396e-05, | |
| "loss": 1.8539, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 8.54403044178588e-05, | |
| "loss": 1.9165, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 9.221055800287699e-05, | |
| "loss": 1.906, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 9.87288859949296e-05, | |
| "loss": 1.852, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 0.00010492573757508462, | |
| "loss": 1.8733, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 0.00011073499209051121, | |
| "loss": 1.874, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 0.00011609466456447531, | |
| "loss": 1.8853, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 0.00012094756707850673, | |
| "loss": 1.9022, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 0.0001252419189697637, | |
| "loss": 1.8298, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 0.00012893189933276512, | |
| "loss": 1.9105, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 0.00013197813593027427, | |
| "loss": 1.8641, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 0.00013434812529663614, | |
| "loss": 1.8622, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 0.00013601657955104887, | |
| "loss": 1.906, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 0.00013696569622025754, | |
| "loss": 1.9195, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_loss": 1.8195273876190186, | |
| "eval_runtime": 9.7405, | |
| "eval_samples_per_second": 22.278, | |
| "eval_steps_per_second": 2.875, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 0.0001371853481916521, | |
| "loss": 1.7633, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 0.00013667319176996132, | |
| "loss": 1.8122, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 0.00013543469168457496, | |
| "loss": 1.8677, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 0.00013348306278066356, | |
| "loss": 1.8531, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 0.000130839129016255, | |
| "loss": 1.8181, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 0.00012753110126977412, | |
| "loss": 1.8011, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 0.0001235942763288477, | |
| "loss": 1.8196, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 0.00011907066027217684, | |
| "loss": 1.8093, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 0.00011400852026300626, | |
| "loss": 1.8397, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 0.00010846186953657296, | |
| "loss": 1.7362, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 0.00010248989107673776, | |
| "loss": 1.6683, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 9.615630613119375e-05, | |
| "loss": 1.8356, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 8.952869430321868e-05, | |
| "loss": 1.8106, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 8.267777247462053e-05, | |
| "loss": 1.8027, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 7.567664025379571e-05, | |
| "loss": 1.7443, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 6.860000000000001e-05, | |
| "loss": 1.7579, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 6.152335974620434e-05, | |
| "loss": 1.7847, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 5.45222275253795e-05, | |
| "loss": 1.7549, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 4.76713056967813e-05, | |
| "loss": 1.7157, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 4.1043693868806304e-05, | |
| "loss": 1.7905, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 3.471010892326228e-05, | |
| "loss": 1.7419, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 2.873813046342713e-05, | |
| "loss": 1.7189, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 2.3191479736993775e-05, | |
| "loss": 1.7705, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 1.812933972782316e-05, | |
| "loss": 1.6987, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 1.3605723671152311e-05, | |
| "loss": 1.7951, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 9.668898730225861e-06, | |
| "loss": 1.7204, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 6.360870983745002e-06, | |
| "loss": 1.8063, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 3.7169372193364486e-06, | |
| "loss": 1.6961, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 1.7653083154250801e-06, | |
| "loss": 1.6979, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 5.268082300387043e-07, | |
| "loss": 1.7594, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 1.465180834793276e-08, | |
| "loss": 1.7887, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "eval_loss": 1.7267966270446777, | |
| "eval_runtime": 9.7478, | |
| "eval_samples_per_second": 22.262, | |
| "eval_steps_per_second": 2.872, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 5.708896496161406e-08, | |
| "loss": 1.777, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "eval_loss": 1.6688779592514038, | |
| "eval_runtime": 9.6717, | |
| "eval_samples_per_second": 20.576, | |
| "eval_steps_per_second": 2.585, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 3.1, | |
| "learning_rate": 3.35752298215246e-06, | |
| "loss": 1.6773, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 3.13, | |
| "learning_rate": 5.930781605717588e-06, | |
| "loss": 1.7743, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 3.17, | |
| "learning_rate": 9.190657300387474e-06, | |
| "loss": 1.6782, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 3.2, | |
| "learning_rate": 1.310143418587859e-05, | |
| "loss": 1.7254, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 3.23, | |
| "learning_rate": 1.7620264972250762e-05, | |
| "loss": 1.7116, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 3.27, | |
| "learning_rate": 2.269764040378228e-05, | |
| "loss": 1.737, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 3.3, | |
| "learning_rate": 2.827793169273632e-05, | |
| "loss": 1.6298, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 3.33, | |
| "learning_rate": 3.4300000000000014e-05, | |
| "loss": 1.784, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 3.37, | |
| "learning_rate": 4.069786628500004e-05, | |
| "loss": 1.7561, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 3.4, | |
| "learning_rate": 4.740143418587858e-05, | |
| "loss": 1.7454, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 3.43, | |
| "learning_rate": 5.4337258009901596e-05, | |
| "loss": 1.8009, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 3.47, | |
| "learning_rate": 6.142934741983902e-05, | |
| "loss": 1.7686, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 3.5, | |
| "learning_rate": 6.859999999999997e-05, | |
| "loss": 1.697, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 3.53, | |
| "learning_rate": 7.577065258016093e-05, | |
| "loss": 1.6804, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 3.57, | |
| "learning_rate": 8.286274199009834e-05, | |
| "loss": 1.7485, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 3.6, | |
| "learning_rate": 8.979856581412137e-05, | |
| "loss": 1.7389, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 3.63, | |
| "learning_rate": 9.650213371499991e-05, | |
| "loss": 1.6993, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 3.67, | |
| "learning_rate": 0.00010289999999999994, | |
| "loss": 1.7811, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 3.7, | |
| "learning_rate": 0.00010892206830726364, | |
| "loss": 1.6838, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 3.73, | |
| "learning_rate": 0.00011450235959621768, | |
| "loss": 1.76, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 3.77, | |
| "learning_rate": 0.0001195797350277492, | |
| "loss": 1.7419, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 3.8, | |
| "learning_rate": 0.00012409856581412136, | |
| "loss": 1.7739, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 3.83, | |
| "learning_rate": 0.0001280093426996125, | |
| "loss": 1.7108, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 3.87, | |
| "learning_rate": 0.0001312692183942824, | |
| "loss": 1.7828, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 3.9, | |
| "learning_rate": 0.00013384247701784751, | |
| "loss": 1.7588, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 3.93, | |
| "learning_rate": 0.00013570092541033904, | |
| "loss": 1.6845, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 3.97, | |
| "learning_rate": 0.00013682420202226357, | |
| "loss": 1.7817, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "learning_rate": 0.0001372, | |
| "loss": 1.7611, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "eval_loss": 1.6418354511260986, | |
| "eval_runtime": 10.0255, | |
| "eval_samples_per_second": 22.942, | |
| "eval_steps_per_second": 2.893, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 4.03, | |
| "learning_rate": 0.00013682420202226357, | |
| "loss": 1.7067, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 4.07, | |
| "learning_rate": 0.0001357009254103391, | |
| "loss": 1.7111, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 4.1, | |
| "learning_rate": 0.0001338424770178476, | |
| "loss": 1.6736, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 4.13, | |
| "learning_rate": 0.00013126921839428241, | |
| "loss": 1.7152, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 4.17, | |
| "learning_rate": 0.00012800934269961248, | |
| "loss": 1.7257, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 4.2, | |
| "learning_rate": 0.00012409856581412142, | |
| "loss": 1.7207, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 4.23, | |
| "learning_rate": 0.00011957973502774922, | |
| "loss": 1.6457, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 4.27, | |
| "learning_rate": 0.00011450235959621773, | |
| "loss": 1.713, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 4.3, | |
| "learning_rate": 0.00010892206830726369, | |
| "loss": 1.7242, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 4.33, | |
| "learning_rate": 0.00010290000000000009, | |
| "loss": 1.6386, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 4.37, | |
| "learning_rate": 9.650213371499996e-05, | |
| "loss": 1.6539, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 4.4, | |
| "learning_rate": 8.97985658141213e-05, | |
| "loss": 1.7034, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 4.43, | |
| "learning_rate": 8.286274199009828e-05, | |
| "loss": 1.6681, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 4.47, | |
| "learning_rate": 7.577065258016099e-05, | |
| "loss": 1.6742, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 4.5, | |
| "learning_rate": 6.860000000000003e-05, | |
| "loss": 1.687, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 4.53, | |
| "learning_rate": 6.14293474198391e-05, | |
| "loss": 1.7066, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 4.57, | |
| "learning_rate": 5.433725800990179e-05, | |
| "loss": 1.6622, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 4.6, | |
| "learning_rate": 4.740143418587876e-05, | |
| "loss": 1.6647, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 4.63, | |
| "learning_rate": 4.069786628500011e-05, | |
| "loss": 1.6251, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 4.67, | |
| "learning_rate": 3.429999999999996e-05, | |
| "loss": 1.6732, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 4.7, | |
| "learning_rate": 2.8277931692736372e-05, | |
| "loss": 1.5917, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 4.73, | |
| "learning_rate": 2.2697640403782324e-05, | |
| "loss": 1.5938, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 4.77, | |
| "learning_rate": 1.762026497225081e-05, | |
| "loss": 1.7145, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 4.8, | |
| "learning_rate": 1.3101434185878628e-05, | |
| "loss": 1.6437, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 4.83, | |
| "learning_rate": 9.190657300387574e-06, | |
| "loss": 1.5361, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 4.87, | |
| "learning_rate": 5.930781605717611e-06, | |
| "loss": 1.5239, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 4.9, | |
| "learning_rate": 3.3575229821524373e-06, | |
| "loss": 1.656, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 4.93, | |
| "learning_rate": 1.4990745896609297e-06, | |
| "loss": 1.6033, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 4.97, | |
| "learning_rate": 3.757979777364447e-07, | |
| "loss": 1.6277, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "learning_rate": 0.0, | |
| "loss": 1.5745, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "eval_loss": 1.5331557989120483, | |
| "eval_runtime": 11.0656, | |
| "eval_samples_per_second": 21.327, | |
| "eval_steps_per_second": 2.711, | |
| "step": 750 | |
| } | |
| ], | |
| "max_steps": 750, | |
| "num_train_epochs": 5, | |
| "total_flos": 781001883648000.0, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |