| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 10.0, | |
| "global_step": 1920, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 1.25e-05, | |
| "loss": 2.1623, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 2.5e-05, | |
| "loss": 2.0298, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 3.7500000000000003e-05, | |
| "loss": 1.9422, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 5e-05, | |
| "loss": 2.0168, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.999914564160437e-05, | |
| "loss": 1.9103, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.9996582624811725e-05, | |
| "loss": 2.1249, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.9992311124800875e-05, | |
| "loss": 2.002, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.9986331433523156e-05, | |
| "loss": 2.004, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.997864395968252e-05, | |
| "loss": 1.9562, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.996924922870762e-05, | |
| "loss": 2.078, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.995814788271582e-05, | |
| "loss": 1.9552, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.994534068046937e-05, | |
| "loss": 2.033, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 4.9930828497323526e-05, | |
| "loss": 1.9229, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 4.991461232516675e-05, | |
| "loss": 1.8923, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 4.9896693272352846e-05, | |
| "loss": 2.0122, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 4.9877072563625285e-05, | |
| "loss": 1.8244, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 4.985575154003345e-05, | |
| "loss": 1.976, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 4.9832731658840956e-05, | |
| "loss": 1.8806, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 4.980801449342613e-05, | |
| "loss": 1.7659, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 4.978160173317438e-05, | |
| "loss": 1.8351, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 4.9753495183362796e-05, | |
| "loss": 1.7661, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 4.972369676503672e-05, | |
| "loss": 1.9104, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 4.9692208514878444e-05, | |
| "loss": 1.9618, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 4.965903258506806e-05, | |
| "loss": 1.874, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 4.96241712431363e-05, | |
| "loss": 1.7711, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 4.958762687180956e-05, | |
| "loss": 1.7548, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 4.95494019688471e-05, | |
| "loss": 1.9196, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 4.9509499146870236e-05, | |
| "loss": 1.8337, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 4.946792113318386e-05, | |
| "loss": 2.0006, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 4.9424670769589984e-05, | |
| "loss": 1.8703, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.93797510121935e-05, | |
| "loss": 1.8343, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 4.933316493120015e-05, | |
| "loss": 1.8636, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 4.9284915710706695e-05, | |
| "loss": 1.9264, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 4.923500664848326e-05, | |
| "loss": 1.9231, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 4.918344115574796e-05, | |
| "loss": 1.6458, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 4.913022275693372e-05, | |
| "loss": 1.8824, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 4.907535508944741e-05, | |
| "loss": 1.7897, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 4.901884190342121e-05, | |
| "loss": 1.8587, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.8960687061456324e-05, | |
| "loss": 1.4371, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 4.8900894538358944e-05, | |
| "loss": 1.3405, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 4.8839468420868606e-05, | |
| "loss": 1.1991, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 4.877641290737884e-05, | |
| "loss": 1.2276, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 4.871173230765024e-05, | |
| "loss": 1.3785, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 4.864543104251587e-05, | |
| "loss": 1.2454, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 4.857751364357913e-05, | |
| "loss": 1.1988, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 4.850798475290403e-05, | |
| "loss": 1.2279, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 4.8436849122697883e-05, | |
| "loss": 1.2478, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 4.8364111614986527e-05, | |
| "loss": 1.3029, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 4.8289777201281974e-05, | |
| "loss": 1.1754, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 4.821385096224268e-05, | |
| "loss": 1.2921, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 4.8136338087326216e-05, | |
| "loss": 1.3902, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 4.805724387443462e-05, | |
| "loss": 1.3123, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 4.797657372955228e-05, | |
| "loss": 1.2955, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 4.789433316637644e-05, | |
| "loss": 1.3642, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 4.781052780594034e-05, | |
| "loss": 1.3868, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 4.7725163376229064e-05, | |
| "loss": 1.2758, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 4.763824571178798e-05, | |
| "loss": 1.327, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 4.754978075332398e-05, | |
| "loss": 1.1598, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 4.7459774547299475e-05, | |
| "loss": 1.2948, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 4.736823324551909e-05, | |
| "loss": 1.2907, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 4.72751631047092e-05, | |
| "loss": 1.2316, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 4.71805704860903e-05, | |
| "loss": 1.3136, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 4.708446185494222e-05, | |
| "loss": 1.3186, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 4.698684378016222e-05, | |
| "loss": 1.4358, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 4.6887722933816076e-05, | |
| "loss": 1.2789, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 4.678710609068193e-05, | |
| "loss": 1.3904, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 4.668500012778738e-05, | |
| "loss": 1.3282, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 4.6581412023939354e-05, | |
| "loss": 1.1926, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 4.6476348859247134e-05, | |
| "loss": 1.3098, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 4.6369817814638475e-05, | |
| "loss": 1.1817, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 4.6261826171368774e-05, | |
| "loss": 1.1436, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 4.6152381310523387e-05, | |
| "loss": 1.1436, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 4.604149071251318e-05, | |
| "loss": 1.2308, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 4.592916195656322e-05, | |
| "loss": 1.2855, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 4.581540272019476e-05, | |
| "loss": 1.2501, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 4.5700220778700504e-05, | |
| "loss": 1.3435, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 4.5583624004613145e-05, | |
| "loss": 1.2623, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 4.546562036716732e-05, | |
| "loss": 0.6914, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 4.534621793175487e-05, | |
| "loss": 0.6295, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 4.522542485937369e-05, | |
| "loss": 0.7132, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 4.510324940606979e-05, | |
| "loss": 0.6092, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 4.497969992237312e-05, | |
| "loss": 0.6314, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 4.4854784852726776e-05, | |
| "loss": 0.7211, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 4.4728512734909844e-05, | |
| "loss": 0.6992, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 4.460089219945382e-05, | |
| "loss": 0.7503, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 4.4471931969052816e-05, | |
| "loss": 0.6703, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 4.434164085796724e-05, | |
| "loss": 0.6396, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 4.421002777142148e-05, | |
| "loss": 0.6972, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 4.4077101704995166e-05, | |
| "loss": 0.6558, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 4.3942871744008374e-05, | |
| "loss": 0.7016, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 4.3807347062900624e-05, | |
| "loss": 0.6722, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 4.367053692460385e-05, | |
| "loss": 0.7364, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 4.3532450679909274e-05, | |
| "loss": 0.6927, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 4.3393097766828293e-05, | |
| "loss": 0.6704, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 4.325248770994741e-05, | |
| "loss": 0.6888, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 4.311063011977723e-05, | |
| "loss": 0.6381, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 4.29675346920956e-05, | |
| "loss": 0.7126, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 4.282321120728493e-05, | |
| "loss": 0.7115, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 4.267766952966369e-05, | |
| "loss": 0.7172, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 4.2530919606812216e-05, | |
| "loss": 0.6488, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 4.2382971468892806e-05, | |
| "loss": 0.6716, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 4.223383522796415e-05, | |
| "loss": 0.6279, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 4.2083521077290213e-05, | |
| "loss": 0.6189, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 4.193203929064353e-05, | |
| "loss": 0.671, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 4.177940022160299e-05, | |
| "loss": 0.7658, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 4.16256143028462e-05, | |
| "loss": 0.673, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 4.147069204543645e-05, | |
| "loss": 0.7758, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 4.131464403810422e-05, | |
| "loss": 0.6823, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 4.115748094652352e-05, | |
| "loss": 0.7035, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 4.099921351258292e-05, | |
| "loss": 0.6478, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 4.0839852553651265e-05, | |
| "loss": 0.8168, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 4.067940896183843e-05, | |
| "loss": 0.7212, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 4.051789370325078e-05, | |
| "loss": 0.6609, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 4.03553178172417e-05, | |
| "loss": 0.7815, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 4.019169241565703e-05, | |
| "loss": 0.6535, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 3.02, | |
| "learning_rate": 4.002702868207563e-05, | |
| "loss": 0.4454, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 3.05, | |
| "learning_rate": 3.9861337871044954e-05, | |
| "loss": 0.3374, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 3.07, | |
| "learning_rate": 3.969463130731183e-05, | |
| "loss": 0.3974, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 3.1, | |
| "learning_rate": 3.952692038504846e-05, | |
| "loss": 0.3395, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 3.12, | |
| "learning_rate": 3.935821656707359e-05, | |
| "loss": 0.3572, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 3.15, | |
| "learning_rate": 3.9188531384069096e-05, | |
| "loss": 0.3072, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 3.18, | |
| "learning_rate": 3.901787643379182e-05, | |
| "loss": 0.3527, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 3.2, | |
| "learning_rate": 3.884626338028094e-05, | |
| "loss": 0.338, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 3.23, | |
| "learning_rate": 3.867370395306068e-05, | |
| "loss": 0.357, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 3.26, | |
| "learning_rate": 3.850020994633868e-05, | |
| "loss": 0.3689, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 3.28, | |
| "learning_rate": 3.832579321819985e-05, | |
| "loss": 0.3469, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 3.31, | |
| "learning_rate": 3.8150465689795854e-05, | |
| "loss": 0.3679, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 3.33, | |
| "learning_rate": 3.797423934453038e-05, | |
| "loss": 0.3651, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 3.36, | |
| "learning_rate": 3.779712622724003e-05, | |
| "loss": 0.3828, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 3.39, | |
| "learning_rate": 3.76191384433711e-05, | |
| "loss": 0.3727, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 3.41, | |
| "learning_rate": 3.7440288158152187e-05, | |
| "loss": 0.3554, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 3.44, | |
| "learning_rate": 3.726058759576271e-05, | |
| "loss": 0.3658, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 3.46, | |
| "learning_rate": 3.708004903849741e-05, | |
| "loss": 0.3765, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 3.49, | |
| "learning_rate": 3.689868482592684e-05, | |
| "loss": 0.3733, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 3.52, | |
| "learning_rate": 3.671650735405404e-05, | |
| "loss": 0.4086, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 3.54, | |
| "learning_rate": 3.65335290744672e-05, | |
| "loss": 0.3753, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 3.57, | |
| "learning_rate": 3.634976249348867e-05, | |
| "loss": 0.3704, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 3.59, | |
| "learning_rate": 3.616522017132017e-05, | |
| "loss": 0.3939, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 3.62, | |
| "learning_rate": 3.597991472118426e-05, | |
| "loss": 0.3691, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 3.65, | |
| "learning_rate": 3.579385880846232e-05, | |
| "loss": 0.3804, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 3.67, | |
| "learning_rate": 3.5607065149828843e-05, | |
| "loss": 0.385, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 3.7, | |
| "learning_rate": 3.5419546512382266e-05, | |
| "loss": 0.3773, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 3.72, | |
| "learning_rate": 3.523131571277235e-05, | |
| "loss": 0.402, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 3.75, | |
| "learning_rate": 3.504238561632424e-05, | |
| "loss": 0.3561, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 3.78, | |
| "learning_rate": 3.485276913615905e-05, | |
| "loss": 0.3542, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 3.8, | |
| "learning_rate": 3.4662479232311306e-05, | |
| "loss": 0.354, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 3.83, | |
| "learning_rate": 3.447152891084319e-05, | |
| "loss": 0.4085, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 3.85, | |
| "learning_rate": 3.427993122295552e-05, | |
| "loss": 0.3853, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 3.88, | |
| "learning_rate": 3.4087699264095745e-05, | |
| "loss": 0.3909, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 3.91, | |
| "learning_rate": 3.389484617306292e-05, | |
| "loss": 0.3882, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 3.93, | |
| "learning_rate": 3.3701385131109616e-05, | |
| "loss": 0.3917, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 3.96, | |
| "learning_rate": 3.350732936104108e-05, | |
| "loss": 0.3825, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 3.98, | |
| "learning_rate": 3.3312692126311425e-05, | |
| "loss": 0.379, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 4.01, | |
| "learning_rate": 3.311748673011709e-05, | |
| "loss": 0.3357, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 4.04, | |
| "learning_rate": 3.2921726514487614e-05, | |
| "loss": 0.2284, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 4.06, | |
| "learning_rate": 3.272542485937369e-05, | |
| "loss": 0.2536, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 4.09, | |
| "learning_rate": 3.252859518173269e-05, | |
| "loss": 0.2258, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 4.11, | |
| "learning_rate": 3.2331250934611624e-05, | |
| "loss": 0.2522, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 4.14, | |
| "learning_rate": 3.213340560622763e-05, | |
| "loss": 0.2166, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 4.17, | |
| "learning_rate": 3.1935072719046115e-05, | |
| "loss": 0.2441, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 4.19, | |
| "learning_rate": 3.173626582885645e-05, | |
| "loss": 0.2381, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 4.22, | |
| "learning_rate": 3.1536998523845494e-05, | |
| "loss": 0.2642, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 4.24, | |
| "learning_rate": 3.133728442366885e-05, | |
| "loss": 0.2315, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 4.27, | |
| "learning_rate": 3.1137137178519985e-05, | |
| "loss": 0.2342, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 4.3, | |
| "learning_rate": 3.093657046819722e-05, | |
| "loss": 0.2378, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 4.32, | |
| "learning_rate": 3.073559800116879e-05, | |
| "loss": 0.2383, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 4.35, | |
| "learning_rate": 3.053423351363586e-05, | |
| "loss": 0.2597, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 4.38, | |
| "learning_rate": 3.0332490768593675e-05, | |
| "loss": 0.2262, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 4.4, | |
| "learning_rate": 3.0130383554890856e-05, | |
| "loss": 0.2244, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 4.43, | |
| "learning_rate": 2.9927925686287006e-05, | |
| "loss": 0.2385, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 4.45, | |
| "learning_rate": 2.972513100050851e-05, | |
| "loss": 0.2521, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 4.48, | |
| "learning_rate": 2.952201335830275e-05, | |
| "loss": 0.2357, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 4.51, | |
| "learning_rate": 2.9318586642490763e-05, | |
| "loss": 0.249, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 4.53, | |
| "learning_rate": 2.9114864757018352e-05, | |
| "loss": 0.2807, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 4.56, | |
| "learning_rate": 2.8910861626005776e-05, | |
| "loss": 0.2273, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 4.58, | |
| "learning_rate": 2.870659119279605e-05, | |
| "loss": 0.253, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 4.61, | |
| "learning_rate": 2.850206741900195e-05, | |
| "loss": 0.2429, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 4.64, | |
| "learning_rate": 2.8297304283551728e-05, | |
| "loss": 0.2613, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 4.66, | |
| "learning_rate": 2.8092315781733696e-05, | |
| "loss": 0.223, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 4.69, | |
| "learning_rate": 2.788711592423966e-05, | |
| "loss": 0.2265, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 4.71, | |
| "learning_rate": 2.7681718736207298e-05, | |
| "loss": 0.2338, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 4.74, | |
| "learning_rate": 2.7476138256261575e-05, | |
| "loss": 0.2393, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 4.77, | |
| "learning_rate": 2.727038853555521e-05, | |
| "loss": 0.2447, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 4.79, | |
| "learning_rate": 2.7064483636808313e-05, | |
| "loss": 0.2319, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 4.82, | |
| "learning_rate": 2.6858437633347194e-05, | |
| "loss": 0.2762, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 4.84, | |
| "learning_rate": 2.6652264608142484e-05, | |
| "loss": 0.24, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 4.87, | |
| "learning_rate": 2.6445978652846602e-05, | |
| "loss": 0.2366, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 4.9, | |
| "learning_rate": 2.623959386683056e-05, | |
| "loss": 0.2354, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 4.92, | |
| "learning_rate": 2.6033124356220328e-05, | |
| "loss": 0.247, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 4.95, | |
| "learning_rate": 2.5826584232932706e-05, | |
| "loss": 0.2143, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 4.97, | |
| "learning_rate": 2.5619987613710756e-05, | |
| "loss": 0.2412, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "learning_rate": 2.5413348619158967e-05, | |
| "loss": 0.2407, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 5.03, | |
| "learning_rate": 2.5206681372778124e-05, | |
| "loss": 0.1533, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 5.05, | |
| "learning_rate": 2.5e-05, | |
| "loss": 0.1639, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 5.08, | |
| "learning_rate": 2.4793318627221878e-05, | |
| "loss": 0.1375, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 5.1, | |
| "learning_rate": 2.458665138084104e-05, | |
| "loss": 0.1514, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 5.13, | |
| "learning_rate": 2.438001238628925e-05, | |
| "loss": 0.1482, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 5.16, | |
| "learning_rate": 2.4173415767067297e-05, | |
| "loss": 0.1462, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 5.18, | |
| "learning_rate": 2.3966875643779667e-05, | |
| "loss": 0.1428, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 5.21, | |
| "learning_rate": 2.3760406133169443e-05, | |
| "loss": 0.1342, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 5.23, | |
| "learning_rate": 2.3554021347153403e-05, | |
| "loss": 0.1363, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 5.26, | |
| "learning_rate": 2.334773539185752e-05, | |
| "loss": 0.1293, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 5.29, | |
| "learning_rate": 2.3141562366652812e-05, | |
| "loss": 0.1418, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 5.31, | |
| "learning_rate": 2.2935516363191693e-05, | |
| "loss": 0.14, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 5.34, | |
| "learning_rate": 2.2729611464444794e-05, | |
| "loss": 0.1384, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 5.36, | |
| "learning_rate": 2.2523861743738434e-05, | |
| "loss": 0.1404, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 5.39, | |
| "learning_rate": 2.231828126379271e-05, | |
| "loss": 0.1367, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 5.42, | |
| "learning_rate": 2.2112884075760347e-05, | |
| "loss": 0.1497, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 5.44, | |
| "learning_rate": 2.190768421826631e-05, | |
| "loss": 0.1347, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 5.47, | |
| "learning_rate": 2.1702695716448278e-05, | |
| "loss": 0.1437, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 5.49, | |
| "learning_rate": 2.1497932580998053e-05, | |
| "loss": 0.1393, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 5.52, | |
| "learning_rate": 2.1293408807203947e-05, | |
| "loss": 0.1673, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 5.55, | |
| "learning_rate": 2.1089138373994223e-05, | |
| "loss": 0.1413, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 5.57, | |
| "learning_rate": 2.088513524298165e-05, | |
| "loss": 0.1402, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 5.6, | |
| "learning_rate": 2.068141335750925e-05, | |
| "loss": 0.1345, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 5.62, | |
| "learning_rate": 2.047798664169726e-05, | |
| "loss": 0.144, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 5.65, | |
| "learning_rate": 2.02748689994915e-05, | |
| "loss": 0.1436, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 5.68, | |
| "learning_rate": 2.0072074313712997e-05, | |
| "loss": 0.1416, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 5.7, | |
| "learning_rate": 1.9869616445109147e-05, | |
| "loss": 0.1463, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 5.73, | |
| "learning_rate": 1.9667509231406334e-05, | |
| "loss": 0.1383, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 5.76, | |
| "learning_rate": 1.9465766486364143e-05, | |
| "loss": 0.1288, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 5.78, | |
| "learning_rate": 1.9264401998831213e-05, | |
| "loss": 0.147, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 5.81, | |
| "learning_rate": 1.9063429531802786e-05, | |
| "loss": 0.1297, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 5.83, | |
| "learning_rate": 1.8862862821480025e-05, | |
| "loss": 0.1242, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 5.86, | |
| "learning_rate": 1.866271557633115e-05, | |
| "loss": 0.14, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 5.89, | |
| "learning_rate": 1.8463001476154508e-05, | |
| "loss": 0.1331, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 5.91, | |
| "learning_rate": 1.826373417114355e-05, | |
| "loss": 0.1461, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 5.94, | |
| "learning_rate": 1.806492728095389e-05, | |
| "loss": 0.1305, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 5.96, | |
| "learning_rate": 1.7866594393772373e-05, | |
| "loss": 0.1244, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 5.99, | |
| "learning_rate": 1.7668749065388385e-05, | |
| "loss": 0.1427, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 6.02, | |
| "learning_rate": 1.7471404818267316e-05, | |
| "loss": 0.0886, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 6.04, | |
| "learning_rate": 1.7274575140626318e-05, | |
| "loss": 0.0576, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 6.07, | |
| "learning_rate": 1.7078273485512392e-05, | |
| "loss": 0.0725, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 6.09, | |
| "learning_rate": 1.6882513269882917e-05, | |
| "loss": 0.0488, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 6.12, | |
| "learning_rate": 1.668730787368858e-05, | |
| "loss": 0.0484, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 6.15, | |
| "learning_rate": 1.6492670638958924e-05, | |
| "loss": 0.0465, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 6.17, | |
| "learning_rate": 1.6298614868890387e-05, | |
| "loss": 0.0607, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 6.2, | |
| "learning_rate": 1.6105153826937085e-05, | |
| "loss": 0.0452, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 6.22, | |
| "learning_rate": 1.591230073590425e-05, | |
| "loss": 0.0572, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 6.25, | |
| "learning_rate": 1.5720068777044476e-05, | |
| "loss": 0.0626, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 6.28, | |
| "learning_rate": 1.5528471089156804e-05, | |
| "loss": 0.0487, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 6.3, | |
| "learning_rate": 1.5337520767688703e-05, | |
| "loss": 0.0539, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 6.33, | |
| "learning_rate": 1.5147230863840966e-05, | |
| "loss": 0.0514, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 6.35, | |
| "learning_rate": 1.495761438367577e-05, | |
| "loss": 0.0626, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 6.38, | |
| "learning_rate": 1.4768684287227652e-05, | |
| "loss": 0.0611, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 6.41, | |
| "learning_rate": 1.4580453487617745e-05, | |
| "loss": 0.0717, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 6.43, | |
| "learning_rate": 1.439293485017116e-05, | |
| "loss": 0.0608, | |
| "step": 1235 | |
| }, | |
| { | |
| "epoch": 6.46, | |
| "learning_rate": 1.4206141191537682e-05, | |
| "loss": 0.0665, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 6.48, | |
| "learning_rate": 1.4020085278815745e-05, | |
| "loss": 0.0489, | |
| "step": 1245 | |
| }, | |
| { | |
| "epoch": 6.51, | |
| "learning_rate": 1.383477982867984e-05, | |
| "loss": 0.052, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 6.54, | |
| "learning_rate": 1.3650237506511331e-05, | |
| "loss": 0.0664, | |
| "step": 1255 | |
| }, | |
| { | |
| "epoch": 6.56, | |
| "learning_rate": 1.346647092553281e-05, | |
| "loss": 0.0651, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 6.59, | |
| "learning_rate": 1.3283492645945966e-05, | |
| "loss": 0.0488, | |
| "step": 1265 | |
| }, | |
| { | |
| "epoch": 6.61, | |
| "learning_rate": 1.3101315174073162e-05, | |
| "loss": 0.0669, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 6.64, | |
| "learning_rate": 1.2919950961502603e-05, | |
| "loss": 0.0561, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 6.67, | |
| "learning_rate": 1.2739412404237306e-05, | |
| "loss": 0.0586, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 6.69, | |
| "learning_rate": 1.255971184184783e-05, | |
| "loss": 0.0632, | |
| "step": 1285 | |
| }, | |
| { | |
| "epoch": 6.72, | |
| "learning_rate": 1.2380861556628915e-05, | |
| "loss": 0.0618, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 6.74, | |
| "learning_rate": 1.2202873772759981e-05, | |
| "loss": 0.0526, | |
| "step": 1295 | |
| }, | |
| { | |
| "epoch": 6.77, | |
| "learning_rate": 1.202576065546963e-05, | |
| "loss": 0.0604, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 6.8, | |
| "learning_rate": 1.1849534310204152e-05, | |
| "loss": 0.0538, | |
| "step": 1305 | |
| }, | |
| { | |
| "epoch": 6.82, | |
| "learning_rate": 1.1674206781800162e-05, | |
| "loss": 0.062, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 6.85, | |
| "learning_rate": 1.1499790053661327e-05, | |
| "loss": 0.062, | |
| "step": 1315 | |
| }, | |
| { | |
| "epoch": 6.88, | |
| "learning_rate": 1.1326296046939333e-05, | |
| "loss": 0.0588, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 6.9, | |
| "learning_rate": 1.1153736619719077e-05, | |
| "loss": 0.0506, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 6.93, | |
| "learning_rate": 1.0982123566208185e-05, | |
| "loss": 0.0606, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 6.95, | |
| "learning_rate": 1.0811468615930911e-05, | |
| "loss": 0.0594, | |
| "step": 1335 | |
| }, | |
| { | |
| "epoch": 6.98, | |
| "learning_rate": 1.064178343292641e-05, | |
| "loss": 0.0514, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 7.01, | |
| "learning_rate": 1.0473079614951545e-05, | |
| "loss": 0.0521, | |
| "step": 1345 | |
| }, | |
| { | |
| "epoch": 7.03, | |
| "learning_rate": 1.0305368692688174e-05, | |
| "loss": 0.0196, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 7.06, | |
| "learning_rate": 1.0138662128955053e-05, | |
| "loss": 0.0296, | |
| "step": 1355 | |
| }, | |
| { | |
| "epoch": 7.08, | |
| "learning_rate": 9.972971317924374e-06, | |
| "loss": 0.0168, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 7.11, | |
| "learning_rate": 9.808307584342971e-06, | |
| "loss": 0.0255, | |
| "step": 1365 | |
| }, | |
| { | |
| "epoch": 7.14, | |
| "learning_rate": 9.644682182758306e-06, | |
| "loss": 0.0287, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 7.16, | |
| "learning_rate": 9.482106296749221e-06, | |
| "loss": 0.0238, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 7.19, | |
| "learning_rate": 9.320591038161574e-06, | |
| "loss": 0.0243, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 7.21, | |
| "learning_rate": 9.160147446348739e-06, | |
| "loss": 0.0198, | |
| "step": 1385 | |
| }, | |
| { | |
| "epoch": 7.24, | |
| "learning_rate": 9.000786487417085e-06, | |
| "loss": 0.0231, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 7.27, | |
| "learning_rate": 8.842519053476476e-06, | |
| "loss": 0.0266, | |
| "step": 1395 | |
| }, | |
| { | |
| "epoch": 7.29, | |
| "learning_rate": 8.685355961895784e-06, | |
| "loss": 0.0184, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 7.32, | |
| "learning_rate": 8.52930795456355e-06, | |
| "loss": 0.03, | |
| "step": 1405 | |
| }, | |
| { | |
| "epoch": 7.34, | |
| "learning_rate": 8.374385697153792e-06, | |
| "loss": 0.0203, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 7.37, | |
| "learning_rate": 8.220599778397017e-06, | |
| "loss": 0.0264, | |
| "step": 1415 | |
| }, | |
| { | |
| "epoch": 7.4, | |
| "learning_rate": 8.067960709356478e-06, | |
| "loss": 0.0237, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 7.42, | |
| "learning_rate": 7.91647892270979e-06, | |
| "loss": 0.0251, | |
| "step": 1425 | |
| }, | |
| { | |
| "epoch": 7.45, | |
| "learning_rate": 7.766164772035856e-06, | |
| "loss": 0.0265, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 7.47, | |
| "learning_rate": 7.617028531107201e-06, | |
| "loss": 0.0248, | |
| "step": 1435 | |
| }, | |
| { | |
| "epoch": 7.5, | |
| "learning_rate": 7.469080393187786e-06, | |
| "loss": 0.0242, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 7.53, | |
| "learning_rate": 7.3223304703363135e-06, | |
| "loss": 0.0234, | |
| "step": 1445 | |
| }, | |
| { | |
| "epoch": 7.55, | |
| "learning_rate": 7.176788792715075e-06, | |
| "loss": 0.0193, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 7.58, | |
| "learning_rate": 7.032465307904404e-06, | |
| "loss": 0.0322, | |
| "step": 1455 | |
| }, | |
| { | |
| "epoch": 7.6, | |
| "learning_rate": 6.889369880222776e-06, | |
| "loss": 0.0263, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 7.63, | |
| "learning_rate": 6.747512290052596e-06, | |
| "loss": 0.0209, | |
| "step": 1465 | |
| }, | |
| { | |
| "epoch": 7.66, | |
| "learning_rate": 6.606902233171711e-06, | |
| "loss": 0.023, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 7.68, | |
| "learning_rate": 6.46754932009073e-06, | |
| "loss": 0.0266, | |
| "step": 1475 | |
| }, | |
| { | |
| "epoch": 7.71, | |
| "learning_rate": 6.329463075396161e-06, | |
| "loss": 0.0228, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 7.73, | |
| "learning_rate": 6.192652937099388e-06, | |
| "loss": 0.0268, | |
| "step": 1485 | |
| }, | |
| { | |
| "epoch": 7.76, | |
| "learning_rate": 6.057128255991637e-06, | |
| "loss": 0.0261, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 7.79, | |
| "learning_rate": 5.9228982950048416e-06, | |
| "loss": 0.0198, | |
| "step": 1495 | |
| }, | |
| { | |
| "epoch": 7.81, | |
| "learning_rate": 5.78997222857853e-06, | |
| "loss": 0.0219, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 7.84, | |
| "learning_rate": 5.6583591420327684e-06, | |
| "loss": 0.0194, | |
| "step": 1505 | |
| }, | |
| { | |
| "epoch": 7.86, | |
| "learning_rate": 5.528068030947192e-06, | |
| "loss": 0.0227, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 7.89, | |
| "learning_rate": 5.399107800546177e-06, | |
| "loss": 0.0297, | |
| "step": 1515 | |
| }, | |
| { | |
| "epoch": 7.92, | |
| "learning_rate": 5.271487265090163e-06, | |
| "loss": 0.0197, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 7.94, | |
| "learning_rate": 5.145215147273224e-06, | |
| "loss": 0.0336, | |
| "step": 1525 | |
| }, | |
| { | |
| "epoch": 7.97, | |
| "learning_rate": 5.0203000776268825e-06, | |
| "loss": 0.0132, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 7.99, | |
| "learning_rate": 4.896750593930216e-06, | |
| "loss": 0.0226, | |
| "step": 1535 | |
| }, | |
| { | |
| "epoch": 8.02, | |
| "learning_rate": 4.7745751406263165e-06, | |
| "loss": 0.0075, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 8.05, | |
| "learning_rate": 4.653782068245127e-06, | |
| "loss": 0.0124, | |
| "step": 1545 | |
| }, | |
| { | |
| "epoch": 8.07, | |
| "learning_rate": 4.534379632832692e-06, | |
| "loss": 0.0083, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 8.1, | |
| "learning_rate": 4.416375995386857e-06, | |
| "loss": 0.0104, | |
| "step": 1555 | |
| }, | |
| { | |
| "epoch": 8.12, | |
| "learning_rate": 4.299779221299499e-06, | |
| "loss": 0.009, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 8.15, | |
| "learning_rate": 4.184597279805241e-06, | |
| "loss": 0.0115, | |
| "step": 1565 | |
| }, | |
| { | |
| "epoch": 8.18, | |
| "learning_rate": 4.070838043436786e-06, | |
| "loss": 0.013, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 8.2, | |
| "learning_rate": 3.958509287486823e-06, | |
| "loss": 0.0132, | |
| "step": 1575 | |
| }, | |
| { | |
| "epoch": 8.23, | |
| "learning_rate": 3.847618689476612e-06, | |
| "loss": 0.01, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 8.26, | |
| "learning_rate": 3.738173828631228e-06, | |
| "loss": 0.0113, | |
| "step": 1585 | |
| }, | |
| { | |
| "epoch": 8.28, | |
| "learning_rate": 3.630182185361522e-06, | |
| "loss": 0.0105, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 8.31, | |
| "learning_rate": 3.523651140752868e-06, | |
| "loss": 0.0085, | |
| "step": 1595 | |
| }, | |
| { | |
| "epoch": 8.33, | |
| "learning_rate": 3.418587976060653e-06, | |
| "loss": 0.0201, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 8.36, | |
| "learning_rate": 3.314999872212618e-06, | |
| "loss": 0.011, | |
| "step": 1605 | |
| }, | |
| { | |
| "epoch": 8.39, | |
| "learning_rate": 3.2128939093180655e-06, | |
| "loss": 0.0106, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 8.41, | |
| "learning_rate": 3.11227706618393e-06, | |
| "loss": 0.0146, | |
| "step": 1615 | |
| }, | |
| { | |
| "epoch": 8.44, | |
| "learning_rate": 3.013156219837776e-06, | |
| "loss": 0.0123, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 8.46, | |
| "learning_rate": 2.9155381450577863e-06, | |
| "loss": 0.0128, | |
| "step": 1625 | |
| }, | |
| { | |
| "epoch": 8.49, | |
| "learning_rate": 2.8194295139097048e-06, | |
| "loss": 0.0093, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 8.52, | |
| "learning_rate": 2.7248368952908053e-06, | |
| "loss": 0.0085, | |
| "step": 1635 | |
| }, | |
| { | |
| "epoch": 8.54, | |
| "learning_rate": 2.6317667544809134e-06, | |
| "loss": 0.0072, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 8.57, | |
| "learning_rate": 2.5402254527005287e-06, | |
| "loss": 0.0103, | |
| "step": 1645 | |
| }, | |
| { | |
| "epoch": 8.59, | |
| "learning_rate": 2.4502192466760276e-06, | |
| "loss": 0.0156, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 8.62, | |
| "learning_rate": 2.361754288212031e-06, | |
| "loss": 0.0128, | |
| "step": 1655 | |
| }, | |
| { | |
| "epoch": 8.65, | |
| "learning_rate": 2.2748366237709374e-06, | |
| "loss": 0.0113, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 8.67, | |
| "learning_rate": 2.1894721940596554e-06, | |
| "loss": 0.0076, | |
| "step": 1665 | |
| }, | |
| { | |
| "epoch": 8.7, | |
| "learning_rate": 2.1056668336235622e-06, | |
| "loss": 0.01, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 8.72, | |
| "learning_rate": 2.023426270447723e-06, | |
| "loss": 0.0104, | |
| "step": 1675 | |
| }, | |
| { | |
| "epoch": 8.75, | |
| "learning_rate": 1.9427561255653816e-06, | |
| "loss": 0.0078, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 8.78, | |
| "learning_rate": 1.8636619126737892e-06, | |
| "loss": 0.0122, | |
| "step": 1685 | |
| }, | |
| { | |
| "epoch": 8.8, | |
| "learning_rate": 1.7861490377573258e-06, | |
| "loss": 0.008, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 8.83, | |
| "learning_rate": 1.710222798718028e-06, | |
| "loss": 0.0112, | |
| "step": 1695 | |
| }, | |
| { | |
| "epoch": 8.85, | |
| "learning_rate": 1.6358883850134816e-06, | |
| "loss": 0.0087, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 8.88, | |
| "learning_rate": 1.5631508773021165e-06, | |
| "loss": 0.0102, | |
| "step": 1705 | |
| }, | |
| { | |
| "epoch": 8.91, | |
| "learning_rate": 1.4920152470959707e-06, | |
| "loss": 0.0115, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 8.93, | |
| "learning_rate": 1.4224863564208684e-06, | |
| "loss": 0.0081, | |
| "step": 1715 | |
| }, | |
| { | |
| "epoch": 8.96, | |
| "learning_rate": 1.3545689574841342e-06, | |
| "loss": 0.0113, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 8.98, | |
| "learning_rate": 1.288267692349765e-06, | |
| "loss": 0.0119, | |
| "step": 1725 | |
| }, | |
| { | |
| "epoch": 9.01, | |
| "learning_rate": 1.2235870926211619e-06, | |
| "loss": 0.0062, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 9.04, | |
| "learning_rate": 1.1605315791313964e-06, | |
| "loss": 0.0063, | |
| "step": 1735 | |
| }, | |
| { | |
| "epoch": 9.06, | |
| "learning_rate": 1.0991054616410589e-06, | |
| "loss": 0.005, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 9.09, | |
| "learning_rate": 1.0393129385436824e-06, | |
| "loss": 0.0064, | |
| "step": 1745 | |
| }, | |
| { | |
| "epoch": 9.11, | |
| "learning_rate": 9.811580965787965e-07, | |
| "loss": 0.0061, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 9.14, | |
| "learning_rate": 9.246449105525995e-07, | |
| "loss": 0.0054, | |
| "step": 1755 | |
| }, | |
| { | |
| "epoch": 9.17, | |
| "learning_rate": 8.697772430662859e-07, | |
| "loss": 0.0052, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 9.19, | |
| "learning_rate": 8.165588442520439e-07, | |
| "loss": 0.004, | |
| "step": 1765 | |
| }, | |
| { | |
| "epoch": 9.22, | |
| "learning_rate": 7.649933515167407e-07, | |
| "loss": 0.0041, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 9.24, | |
| "learning_rate": 7.150842892933107e-07, | |
| "loss": 0.006, | |
| "step": 1775 | |
| }, | |
| { | |
| "epoch": 9.27, | |
| "learning_rate": 6.668350687998565e-07, | |
| "loss": 0.0059, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 9.3, | |
| "learning_rate": 6.202489878065071e-07, | |
| "loss": 0.0059, | |
| "step": 1785 | |
| }, | |
| { | |
| "epoch": 9.32, | |
| "learning_rate": 5.753292304100183e-07, | |
| "loss": 0.007, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 9.35, | |
| "learning_rate": 5.32078866816138e-07, | |
| "loss": 0.0077, | |
| "step": 1795 | |
| }, | |
| { | |
| "epoch": 9.38, | |
| "learning_rate": 4.905008531297661e-07, | |
| "loss": 0.0072, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 9.4, | |
| "learning_rate": 4.505980311529101e-07, | |
| "loss": 0.0028, | |
| "step": 1805 | |
| }, | |
| { | |
| "epoch": 9.43, | |
| "learning_rate": 4.1237312819044085e-07, | |
| "loss": 0.0073, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 9.45, | |
| "learning_rate": 3.758287568637081e-07, | |
| "loss": 0.0058, | |
| "step": 1815 | |
| }, | |
| { | |
| "epoch": 9.48, | |
| "learning_rate": 3.4096741493194197e-07, | |
| "loss": 0.0044, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 9.51, | |
| "learning_rate": 3.077914851215585e-07, | |
| "loss": 0.0078, | |
| "step": 1825 | |
| }, | |
| { | |
| "epoch": 9.53, | |
| "learning_rate": 2.763032349632877e-07, | |
| "loss": 0.0075, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 9.56, | |
| "learning_rate": 2.4650481663720525e-07, | |
| "loss": 0.0054, | |
| "step": 1835 | |
| }, | |
| { | |
| "epoch": 9.58, | |
| "learning_rate": 2.1839826682562015e-07, | |
| "loss": 0.0061, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 9.61, | |
| "learning_rate": 1.919855065738746e-07, | |
| "loss": 0.0041, | |
| "step": 1845 | |
| }, | |
| { | |
| "epoch": 9.64, | |
| "learning_rate": 1.6726834115904643e-07, | |
| "loss": 0.0068, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 9.66, | |
| "learning_rate": 1.4424845996655888e-07, | |
| "loss": 0.008, | |
| "step": 1855 | |
| }, | |
| { | |
| "epoch": 9.69, | |
| "learning_rate": 1.229274363747146e-07, | |
| "loss": 0.0107, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 9.71, | |
| "learning_rate": 1.0330672764715387e-07, | |
| "loss": 0.0071, | |
| "step": 1865 | |
| }, | |
| { | |
| "epoch": 9.74, | |
| "learning_rate": 8.538767483325383e-08, | |
| "loss": 0.0091, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 9.77, | |
| "learning_rate": 6.91715026764711e-08, | |
| "loss": 0.0057, | |
| "step": 1875 | |
| }, | |
| { | |
| "epoch": 9.79, | |
| "learning_rate": 5.4659319530636633e-08, | |
| "loss": 0.0064, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 9.82, | |
| "learning_rate": 4.18521172841857e-08, | |
| "loss": 0.0073, | |
| "step": 1885 | |
| }, | |
| { | |
| "epoch": 9.84, | |
| "learning_rate": 3.075077129238158e-08, | |
| "loss": 0.0066, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 9.87, | |
| "learning_rate": 2.1356040317474512e-08, | |
| "loss": 0.0052, | |
| "step": 1895 | |
| }, | |
| { | |
| "epoch": 9.9, | |
| "learning_rate": 1.3668566476848777e-08, | |
| "loss": 0.0054, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 9.92, | |
| "learning_rate": 7.688875199132751e-09, | |
| "loss": 0.0069, | |
| "step": 1905 | |
| }, | |
| { | |
| "epoch": 9.95, | |
| "learning_rate": 3.417375188274896e-09, | |
| "loss": 0.0045, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 9.97, | |
| "learning_rate": 8.543583956355239e-10, | |
| "loss": 0.0043, | |
| "step": 1915 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "learning_rate": 0.0, | |
| "loss": 0.0066, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "step": 1920, | |
| "total_flos": 3556947756318720.0, | |
| "train_loss": 0.47322619671273664, | |
| "train_runtime": 757.4177, | |
| "train_samples_per_second": 5.057, | |
| "train_steps_per_second": 2.535 | |
| } | |
| ], | |
| "max_steps": 1920, | |
| "num_train_epochs": 10, | |
| "total_flos": 3556947756318720.0, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |