| { | |
| "best_metric": 1.2845630645751953, | |
| "best_model_checkpoint": "outputs/checkpoint-5000", | |
| "epoch": 1.7111567419575633, | |
| "global_step": 5000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 1e-05, | |
| "loss": 1.8606, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 2e-05, | |
| "loss": 1.8806, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 3e-05, | |
| "loss": 1.8434, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4e-05, | |
| "loss": 1.7431, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 5e-05, | |
| "loss": 1.7721, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 6e-05, | |
| "loss": 1.6826, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 7e-05, | |
| "loss": 1.5892, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 8e-05, | |
| "loss": 1.6333, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 9e-05, | |
| "loss": 1.5955, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 0.0001, | |
| "loss": 1.5504, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 0.00011000000000000002, | |
| "loss": 1.5621, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 0.00012, | |
| "loss": 1.5142, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 0.00013000000000000002, | |
| "loss": 1.4386, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 0.00014, | |
| "loss": 1.5017, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.00015000000000000001, | |
| "loss": 1.4666, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.00016, | |
| "loss": 1.4295, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.00017, | |
| "loss": 1.4402, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.00018, | |
| "loss": 1.4795, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.00019, | |
| "loss": 1.4138, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.0002, | |
| "loss": 1.4493, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 0.0001998259052924791, | |
| "loss": 1.4918, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 0.00019965181058495822, | |
| "loss": 1.4659, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 0.00019947771587743734, | |
| "loss": 1.4573, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 0.00019930362116991646, | |
| "loss": 1.4274, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 0.00019912952646239555, | |
| "loss": 1.4064, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 0.00019895543175487465, | |
| "loss": 1.4048, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.00019878133704735376, | |
| "loss": 1.4493, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.00019860724233983288, | |
| "loss": 1.3317, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.00019843314763231198, | |
| "loss": 1.4041, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.0001982590529247911, | |
| "loss": 1.4311, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.0001980849582172702, | |
| "loss": 1.3626, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.00019791086350974934, | |
| "loss": 1.4296, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 0.00019773676880222843, | |
| "loss": 1.3888, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 0.00019756267409470752, | |
| "loss": 1.3389, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 0.00019738857938718664, | |
| "loss": 1.4544, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 0.00019721448467966573, | |
| "loss": 1.4382, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 0.00019704038997214485, | |
| "loss": 1.4953, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.00019686629526462397, | |
| "loss": 1.4026, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.00019669220055710306, | |
| "loss": 1.3757, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.00019651810584958218, | |
| "loss": 1.3247, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "eval_loss": 1.3710952997207642, | |
| "eval_runtime": 738.1164, | |
| "eval_samples_per_second": 7.038, | |
| "eval_steps_per_second": 0.881, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.0001963440111420613, | |
| "loss": 1.4409, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.0001961699164345404, | |
| "loss": 1.4023, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.00019599582172701951, | |
| "loss": 1.3736, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.0001958217270194986, | |
| "loss": 1.4321, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.00019564763231197773, | |
| "loss": 1.4344, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.00019547353760445685, | |
| "loss": 1.3539, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.00019529944289693594, | |
| "loss": 1.438, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.00019512534818941506, | |
| "loss": 1.457, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.00019495125348189415, | |
| "loss": 1.429, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.00019477715877437327, | |
| "loss": 1.4465, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.0001946030640668524, | |
| "loss": 1.4202, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.00019442896935933148, | |
| "loss": 1.343, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.0001942548746518106, | |
| "loss": 1.4127, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.0001940807799442897, | |
| "loss": 1.3171, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.0001939066852367688, | |
| "loss": 1.4012, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.00019373259052924793, | |
| "loss": 1.2933, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.00019355849582172702, | |
| "loss": 1.326, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.00019338440111420614, | |
| "loss": 1.3951, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.00019321030640668524, | |
| "loss": 1.3149, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.00019303621169916436, | |
| "loss": 1.4046, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.00019286211699164348, | |
| "loss": 1.343, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.00019268802228412257, | |
| "loss": 1.3676, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.00019251392757660166, | |
| "loss": 1.3593, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.0001923398328690808, | |
| "loss": 1.3869, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.0001921657381615599, | |
| "loss": 1.3841, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.00019199164345403902, | |
| "loss": 1.3806, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.0001918175487465181, | |
| "loss": 1.4527, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.0001916434540389972, | |
| "loss": 1.4161, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.00019146935933147635, | |
| "loss": 1.3019, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.00019129526462395544, | |
| "loss": 1.4082, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.00019112116991643454, | |
| "loss": 1.4261, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.00019094707520891365, | |
| "loss": 1.4152, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.00019077298050139277, | |
| "loss": 1.4289, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 0.0001905988857938719, | |
| "loss": 1.3951, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 0.00019042479108635099, | |
| "loss": 1.3578, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 0.00019025069637883008, | |
| "loss": 1.3342, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 0.0001900766016713092, | |
| "loss": 1.3959, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 0.00018990250696378832, | |
| "loss": 1.4336, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 0.00018972841225626744, | |
| "loss": 1.464, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 0.00018955431754874653, | |
| "loss": 1.4053, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "eval_loss": 1.3470327854156494, | |
| "eval_runtime": 738.9389, | |
| "eval_samples_per_second": 7.03, | |
| "eval_steps_per_second": 0.88, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 0.00018938022284122562, | |
| "loss": 1.3582, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 0.00018920612813370474, | |
| "loss": 1.375, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 0.00018903203342618386, | |
| "loss": 1.4229, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 0.00018885793871866295, | |
| "loss": 1.3843, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.00018868384401114207, | |
| "loss": 1.39, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.00018850974930362116, | |
| "loss": 1.3863, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.00018833565459610028, | |
| "loss": 1.3597, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.0001881615598885794, | |
| "loss": 1.4114, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.0001879874651810585, | |
| "loss": 1.2929, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.00018781337047353762, | |
| "loss": 1.3087, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.0001876392757660167, | |
| "loss": 1.3748, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.00018746518105849583, | |
| "loss": 1.3716, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.00018729108635097495, | |
| "loss": 1.404, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.00018711699164345404, | |
| "loss": 1.2931, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.00018694289693593316, | |
| "loss": 1.3345, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.00018676880222841225, | |
| "loss": 1.3836, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 0.00018659470752089137, | |
| "loss": 1.3923, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 0.0001864206128133705, | |
| "loss": 1.3166, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 0.00018624651810584958, | |
| "loss": 1.3697, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 0.0001860724233983287, | |
| "loss": 1.437, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 0.00018589832869080782, | |
| "loss": 1.3579, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 0.00018572423398328691, | |
| "loss": 1.3769, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.00018555013927576603, | |
| "loss": 1.3989, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.00018537604456824513, | |
| "loss": 1.331, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.00018520194986072425, | |
| "loss": 1.3678, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.00018502785515320337, | |
| "loss": 1.3475, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.00018485376044568246, | |
| "loss": 1.4465, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.00018467966573816158, | |
| "loss": 1.3304, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 0.00018450557103064067, | |
| "loss": 1.4006, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 0.0001843314763231198, | |
| "loss": 1.3645, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 0.0001841573816155989, | |
| "loss": 1.3217, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 0.000183983286908078, | |
| "loss": 1.3421, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 0.0001838091922005571, | |
| "loss": 1.3283, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.0001836350974930362, | |
| "loss": 1.4101, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.00018346100278551533, | |
| "loss": 1.3333, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.00018328690807799445, | |
| "loss": 1.4574, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.00018311281337047354, | |
| "loss": 1.3755, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.00018293871866295264, | |
| "loss": 1.3537, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.00018276462395543176, | |
| "loss": 1.3734, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 0.00018259052924791088, | |
| "loss": 1.3431, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "eval_loss": 1.3354507684707642, | |
| "eval_runtime": 738.7441, | |
| "eval_samples_per_second": 7.032, | |
| "eval_steps_per_second": 0.88, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 0.00018241643454039, | |
| "loss": 1.3685, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 0.0001822423398328691, | |
| "loss": 1.377, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 0.00018206824512534818, | |
| "loss": 1.4289, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 0.00018189415041782733, | |
| "loss": 1.398, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 0.00018172005571030642, | |
| "loss": 1.3915, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.0001815459610027855, | |
| "loss": 1.4608, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.00018137186629526463, | |
| "loss": 1.4408, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.00018119777158774372, | |
| "loss": 1.3443, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.00018102367688022287, | |
| "loss": 1.4033, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.00018084958217270196, | |
| "loss": 1.3086, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.00018067548746518105, | |
| "loss": 1.3581, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.00018050139275766017, | |
| "loss": 1.3457, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.0001803272980501393, | |
| "loss": 1.4488, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.0001801532033426184, | |
| "loss": 1.3765, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.0001799791086350975, | |
| "loss": 1.4061, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.0001798050139275766, | |
| "loss": 1.3556, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.00017963091922005572, | |
| "loss": 1.3359, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 0.00017945682451253484, | |
| "loss": 1.2965, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 0.00017928272980501393, | |
| "loss": 1.3211, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 0.00017910863509749305, | |
| "loss": 1.4063, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 0.00017893454038997214, | |
| "loss": 1.3837, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 0.00017876044568245126, | |
| "loss": 1.397, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 0.00017858635097493038, | |
| "loss": 1.3671, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 0.00017841225626740947, | |
| "loss": 1.3666, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 0.0001782381615598886, | |
| "loss": 1.3875, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 0.00017806406685236768, | |
| "loss": 1.3613, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 0.0001778899721448468, | |
| "loss": 1.3611, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 0.00017771587743732592, | |
| "loss": 1.357, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 0.00017754178272980502, | |
| "loss": 1.3561, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.00017736768802228414, | |
| "loss": 1.4103, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.00017719359331476323, | |
| "loss": 1.4355, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.00017701949860724235, | |
| "loss": 1.3901, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.00017684540389972147, | |
| "loss": 1.4161, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.00017667130919220056, | |
| "loss": 1.4353, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 0.00017649721448467968, | |
| "loss": 1.3647, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 0.0001763231197771588, | |
| "loss": 1.3503, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 0.0001761490250696379, | |
| "loss": 1.3034, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 0.000175974930362117, | |
| "loss": 1.3908, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 0.0001758008356545961, | |
| "loss": 1.3361, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 0.0001756267409470752, | |
| "loss": 1.4012, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "eval_loss": 1.3277837038040161, | |
| "eval_runtime": 737.1439, | |
| "eval_samples_per_second": 7.047, | |
| "eval_steps_per_second": 0.882, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.00017545264623955434, | |
| "loss": 1.3631, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.00017527855153203343, | |
| "loss": 1.3789, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.00017510445682451255, | |
| "loss": 1.3418, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.00017493036211699165, | |
| "loss": 1.3556, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.00017475626740947077, | |
| "loss": 1.339, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.00017458217270194989, | |
| "loss": 1.3419, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 0.00017440807799442898, | |
| "loss": 1.2919, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 0.00017423398328690807, | |
| "loss": 1.3757, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 0.0001740598885793872, | |
| "loss": 1.3165, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 0.0001738857938718663, | |
| "loss": 1.325, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 0.00017371169916434543, | |
| "loss": 1.3846, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 0.00017353760445682452, | |
| "loss": 1.393, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.0001733635097493036, | |
| "loss": 1.3176, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.00017318941504178273, | |
| "loss": 1.3603, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.00017301532033426185, | |
| "loss": 1.3858, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.00017284122562674097, | |
| "loss": 1.3801, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.00017266713091922006, | |
| "loss": 1.3258, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.00017249303621169916, | |
| "loss": 1.3958, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.00017231894150417828, | |
| "loss": 1.3352, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.0001721448467966574, | |
| "loss": 1.3546, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.0001719707520891365, | |
| "loss": 1.3576, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.0001717966573816156, | |
| "loss": 1.5061, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.0001716225626740947, | |
| "loss": 1.3496, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.00017144846796657385, | |
| "loss": 1.4537, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 0.00017127437325905294, | |
| "loss": 1.4241, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 0.00017110027855153203, | |
| "loss": 1.4109, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 0.00017092618384401115, | |
| "loss": 1.4104, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 0.00017075208913649024, | |
| "loss": 1.4555, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 0.00017057799442896936, | |
| "loss": 1.2743, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 0.00017040389972144848, | |
| "loss": 1.3573, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 0.00017022980501392757, | |
| "loss": 1.3463, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 0.0001700557103064067, | |
| "loss": 1.3817, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 0.0001698816155988858, | |
| "loss": 1.4315, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 0.0001697075208913649, | |
| "loss": 1.3112, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 0.00016953342618384403, | |
| "loss": 1.3998, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 0.00016935933147632312, | |
| "loss": 1.3697, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 0.00016918523676880224, | |
| "loss": 1.3726, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 0.00016901114206128136, | |
| "loss": 1.4137, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 0.00016883704735376045, | |
| "loss": 1.3371, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 0.00016866295264623957, | |
| "loss": 1.3189, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "eval_loss": 1.3234615325927734, | |
| "eval_runtime": 736.9447, | |
| "eval_samples_per_second": 7.049, | |
| "eval_steps_per_second": 0.882, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 0.00016848885793871866, | |
| "loss": 1.3207, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.00016831476323119778, | |
| "loss": 1.3369, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.0001681406685236769, | |
| "loss": 1.3205, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.000167966573816156, | |
| "loss": 1.3124, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.0001677924791086351, | |
| "loss": 1.3317, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.0001676183844011142, | |
| "loss": 1.3713, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.00016744428969359332, | |
| "loss": 1.3276, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 0.00016727019498607244, | |
| "loss": 1.2705, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 0.00016709610027855154, | |
| "loss": 1.4057, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 0.00016692200557103066, | |
| "loss": 1.4044, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 0.00016674791086350975, | |
| "loss": 1.3438, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 0.00016657381615598887, | |
| "loss": 1.3871, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 0.000166399721448468, | |
| "loss": 1.3212, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 0.00016622562674094708, | |
| "loss": 1.4142, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 0.00016605153203342617, | |
| "loss": 1.3637, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 0.00016587743732590532, | |
| "loss": 1.2748, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 0.0001657033426183844, | |
| "loss": 1.4044, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 0.00016552924791086353, | |
| "loss": 1.3374, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 0.00016535515320334262, | |
| "loss": 1.3647, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 0.00016518105849582171, | |
| "loss": 1.4259, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 0.00016500696378830086, | |
| "loss": 1.2926, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 0.00016483286908077995, | |
| "loss": 1.4621, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 0.00016465877437325905, | |
| "loss": 1.3746, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 0.00016448467966573817, | |
| "loss": 1.4036, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 0.00016431058495821729, | |
| "loss": 1.4062, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 0.0001641364902506964, | |
| "loss": 1.348, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 0.0001639623955431755, | |
| "loss": 1.3271, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 0.0001637883008356546, | |
| "loss": 1.3663, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 0.0001636142061281337, | |
| "loss": 1.3276, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 0.00016344011142061283, | |
| "loss": 1.2809, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 0.00016326601671309195, | |
| "loss": 1.3586, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 0.00016309192200557104, | |
| "loss": 1.3864, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 0.00016291782729805013, | |
| "loss": 1.3163, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 0.00016274373259052925, | |
| "loss": 1.3677, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 0.00016256963788300837, | |
| "loss": 1.2984, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 0.00016239554317548746, | |
| "loss": 1.4083, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 0.00016222144846796658, | |
| "loss": 1.445, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 0.00016204735376044568, | |
| "loss": 1.3306, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 0.0001618732590529248, | |
| "loss": 1.4185, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 0.00016169916434540392, | |
| "loss": 1.3581, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "eval_loss": 1.318132996559143, | |
| "eval_runtime": 737.5524, | |
| "eval_samples_per_second": 7.044, | |
| "eval_steps_per_second": 0.881, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 0.000161525069637883, | |
| "loss": 1.383, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 0.00016135097493036213, | |
| "loss": 1.4161, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.00016117688022284122, | |
| "loss": 1.3449, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.00016100278551532034, | |
| "loss": 1.3172, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.00016082869080779946, | |
| "loss": 1.3987, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.00016065459610027855, | |
| "loss": 1.3752, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.00016048050139275767, | |
| "loss": 1.4369, | |
| "step": 1235 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.00016030640668523676, | |
| "loss": 1.3258, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 0.00016013231197771588, | |
| "loss": 1.3256, | |
| "step": 1245 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 0.000159958217270195, | |
| "loss": 1.3357, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 0.0001597841225626741, | |
| "loss": 1.3671, | |
| "step": 1255 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 0.00015961002785515321, | |
| "loss": 1.3605, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 0.00015943593314763233, | |
| "loss": 1.3404, | |
| "step": 1265 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 0.00015926183844011143, | |
| "loss": 1.3915, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 0.00015908774373259055, | |
| "loss": 1.3559, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 0.00015891364902506964, | |
| "loss": 1.3285, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 0.00015873955431754876, | |
| "loss": 1.3949, | |
| "step": 1285 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 0.00015856545961002788, | |
| "loss": 1.3825, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 0.00015839136490250697, | |
| "loss": 1.3211, | |
| "step": 1295 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 0.0001582172701949861, | |
| "loss": 1.332, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 0.00015804317548746518, | |
| "loss": 1.4379, | |
| "step": 1305 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 0.0001578690807799443, | |
| "loss": 1.3406, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 0.00015769498607242342, | |
| "loss": 1.3467, | |
| "step": 1315 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 0.0001575208913649025, | |
| "loss": 1.374, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 0.0001573467966573816, | |
| "loss": 1.3525, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 0.00015717270194986072, | |
| "loss": 1.3739, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 0.00015699860724233984, | |
| "loss": 1.3458, | |
| "step": 1335 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 0.00015682451253481896, | |
| "loss": 1.3588, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 0.00015665041782729806, | |
| "loss": 1.3362, | |
| "step": 1345 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 0.00015647632311977715, | |
| "loss": 1.3553, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 0.00015630222841225627, | |
| "loss": 1.4328, | |
| "step": 1355 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 0.0001561281337047354, | |
| "loss": 1.2763, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 0.0001559540389972145, | |
| "loss": 1.3359, | |
| "step": 1365 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 0.0001557799442896936, | |
| "loss": 1.3164, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 0.0001556058495821727, | |
| "loss": 1.29, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 0.00015543175487465184, | |
| "loss": 1.4188, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 0.00015525766016713093, | |
| "loss": 1.3594, | |
| "step": 1385 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 0.00015508356545961002, | |
| "loss": 1.3535, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 0.00015490947075208914, | |
| "loss": 1.3415, | |
| "step": 1395 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 0.00015473537604456823, | |
| "loss": 1.3417, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "eval_loss": 1.313808798789978, | |
| "eval_runtime": 733.0569, | |
| "eval_samples_per_second": 7.087, | |
| "eval_steps_per_second": 0.887, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 0.00015456128133704738, | |
| "loss": 1.3421, | |
| "step": 1405 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 0.00015438718662952647, | |
| "loss": 1.3915, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 0.00015421309192200557, | |
| "loss": 1.3097, | |
| "step": 1415 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 0.00015403899721448469, | |
| "loss": 1.2905, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 0.0001538649025069638, | |
| "loss": 1.3719, | |
| "step": 1425 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 0.00015369080779944292, | |
| "loss": 1.2958, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 0.00015351671309192202, | |
| "loss": 1.2922, | |
| "step": 1435 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 0.0001533426183844011, | |
| "loss": 1.3139, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 0.00015316852367688023, | |
| "loss": 1.3396, | |
| "step": 1445 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 0.00015299442896935935, | |
| "loss": 1.2605, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 0.00015282033426183844, | |
| "loss": 1.4605, | |
| "step": 1455 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 0.00015264623955431756, | |
| "loss": 1.4972, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 0.00015247214484679665, | |
| "loss": 1.3068, | |
| "step": 1465 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 0.00015229805013927577, | |
| "loss": 1.3249, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 0.0001521239554317549, | |
| "loss": 1.3316, | |
| "step": 1475 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 0.00015194986072423398, | |
| "loss": 1.434, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 0.0001517757660167131, | |
| "loss": 1.4266, | |
| "step": 1485 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 0.0001516016713091922, | |
| "loss": 1.3159, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 0.00015142757660167132, | |
| "loss": 1.2304, | |
| "step": 1495 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 0.00015125348189415043, | |
| "loss": 1.3826, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 0.00015107938718662953, | |
| "loss": 1.4042, | |
| "step": 1505 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 0.00015090529247910865, | |
| "loss": 1.335, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 0.00015073119777158774, | |
| "loss": 1.3166, | |
| "step": 1515 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 0.00015055710306406686, | |
| "loss": 1.284, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 0.00015038300835654598, | |
| "loss": 1.2927, | |
| "step": 1525 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 0.00015020891364902507, | |
| "loss": 1.4464, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 0.0001500348189415042, | |
| "loss": 1.319, | |
| "step": 1535 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 0.0001498607242339833, | |
| "loss": 1.3213, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 0.0001496866295264624, | |
| "loss": 1.4009, | |
| "step": 1545 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 0.00014951253481894152, | |
| "loss": 1.3166, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 0.00014933844011142061, | |
| "loss": 1.3605, | |
| "step": 1555 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 0.0001491643454038997, | |
| "loss": 1.401, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 0.00014899025069637885, | |
| "loss": 1.3219, | |
| "step": 1565 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 0.00014881615598885795, | |
| "loss": 1.3484, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 0.00014864206128133706, | |
| "loss": 1.3743, | |
| "step": 1575 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 0.00014846796657381616, | |
| "loss": 1.3498, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 0.00014829387186629528, | |
| "loss": 1.2985, | |
| "step": 1585 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 0.0001481197771587744, | |
| "loss": 1.3355, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 0.0001479456824512535, | |
| "loss": 1.3278, | |
| "step": 1595 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 0.00014777158774373258, | |
| "loss": 1.3111, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "eval_loss": 1.3110042810440063, | |
| "eval_runtime": 733.0309, | |
| "eval_samples_per_second": 7.087, | |
| "eval_steps_per_second": 0.887, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 0.0001475974930362117, | |
| "loss": 1.3197, | |
| "step": 1605 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 0.00014742339832869082, | |
| "loss": 1.4121, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 0.00014724930362116994, | |
| "loss": 1.34, | |
| "step": 1615 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 0.00014707520891364903, | |
| "loss": 1.3669, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 0.00014690111420612812, | |
| "loss": 1.3248, | |
| "step": 1625 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 0.00014672701949860724, | |
| "loss": 1.2567, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 0.00014655292479108636, | |
| "loss": 1.3399, | |
| "step": 1635 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 0.00014637883008356548, | |
| "loss": 1.315, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 0.00014620473537604458, | |
| "loss": 1.2658, | |
| "step": 1645 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 0.00014603064066852367, | |
| "loss": 1.3854, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 0.0001458565459610028, | |
| "loss": 1.3889, | |
| "step": 1655 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 0.0001456824512534819, | |
| "loss": 1.3587, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 0.000145508356545961, | |
| "loss": 1.3507, | |
| "step": 1665 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 0.00014533426183844012, | |
| "loss": 1.3647, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 0.0001451601671309192, | |
| "loss": 1.2777, | |
| "step": 1675 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 0.00014498607242339836, | |
| "loss": 1.3376, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 0.00014481197771587745, | |
| "loss": 1.2869, | |
| "step": 1685 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 0.00014463788300835654, | |
| "loss": 1.2855, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 0.00014446378830083566, | |
| "loss": 1.3387, | |
| "step": 1695 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 0.00014428969359331475, | |
| "loss": 1.3416, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 0.00014411559888579387, | |
| "loss": 1.3574, | |
| "step": 1705 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 0.000143941504178273, | |
| "loss": 1.375, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 0.00014376740947075209, | |
| "loss": 1.3952, | |
| "step": 1715 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 0.0001435933147632312, | |
| "loss": 1.3429, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 0.00014341922005571032, | |
| "loss": 1.3726, | |
| "step": 1725 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 0.00014324512534818942, | |
| "loss": 1.2442, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 0.00014307103064066854, | |
| "loss": 1.2478, | |
| "step": 1735 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 0.00014289693593314763, | |
| "loss": 1.3317, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 0.00014272284122562675, | |
| "loss": 1.3172, | |
| "step": 1745 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 0.00014254874651810587, | |
| "loss": 1.3594, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 0.00014237465181058496, | |
| "loss": 1.3597, | |
| "step": 1755 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 0.00014220055710306408, | |
| "loss": 1.3468, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 0.00014202646239554317, | |
| "loss": 1.369, | |
| "step": 1765 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 0.0001418523676880223, | |
| "loss": 1.2807, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 0.0001416782729805014, | |
| "loss": 1.3454, | |
| "step": 1775 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 0.0001415041782729805, | |
| "loss": 1.3576, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 0.00014133008356545962, | |
| "loss": 1.397, | |
| "step": 1785 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 0.00014115598885793872, | |
| "loss": 1.2672, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 0.00014098189415041784, | |
| "loss": 1.3121, | |
| "step": 1795 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 0.00014080779944289695, | |
| "loss": 1.3547, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "eval_loss": 1.306809425354004, | |
| "eval_runtime": 733.2137, | |
| "eval_samples_per_second": 7.085, | |
| "eval_steps_per_second": 0.887, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 0.00014063370473537605, | |
| "loss": 1.3878, | |
| "step": 1805 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 0.00014045961002785517, | |
| "loss": 1.2847, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 0.00014028551532033426, | |
| "loss": 1.3002, | |
| "step": 1815 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 0.00014011142061281338, | |
| "loss": 1.29, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 0.0001399373259052925, | |
| "loss": 1.3643, | |
| "step": 1825 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 0.0001397632311977716, | |
| "loss": 1.2768, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 0.00013958913649025068, | |
| "loss": 1.3229, | |
| "step": 1835 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 0.00013941504178272983, | |
| "loss": 1.3588, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 0.00013924094707520892, | |
| "loss": 1.3441, | |
| "step": 1845 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 0.00013906685236768804, | |
| "loss": 1.3363, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 0.00013889275766016713, | |
| "loss": 1.2517, | |
| "step": 1855 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 0.00013871866295264623, | |
| "loss": 1.2044, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 0.00013854456824512537, | |
| "loss": 1.382, | |
| "step": 1865 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 0.00013837047353760446, | |
| "loss": 1.2676, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 0.00013819637883008356, | |
| "loss": 1.3313, | |
| "step": 1875 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 0.00013802228412256268, | |
| "loss": 1.3407, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 0.0001378481894150418, | |
| "loss": 1.3454, | |
| "step": 1885 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 0.00013767409470752092, | |
| "loss": 1.3517, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 0.0001375, | |
| "loss": 1.3419, | |
| "step": 1895 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 0.0001373259052924791, | |
| "loss": 1.323, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 0.00013715181058495822, | |
| "loss": 1.3523, | |
| "step": 1905 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 0.00013697771587743734, | |
| "loss": 1.3689, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 0.00013680362116991646, | |
| "loss": 1.2813, | |
| "step": 1915 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 0.00013662952646239555, | |
| "loss": 1.2873, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 0.00013645543175487464, | |
| "loss": 1.3936, | |
| "step": 1925 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 0.00013628133704735376, | |
| "loss": 1.4143, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 0.00013610724233983288, | |
| "loss": 1.3495, | |
| "step": 1935 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 0.00013593314763231198, | |
| "loss": 1.3029, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 0.0001357590529247911, | |
| "loss": 1.3457, | |
| "step": 1945 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 0.0001355849582172702, | |
| "loss": 1.3519, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 0.0001354108635097493, | |
| "loss": 1.313, | |
| "step": 1955 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 0.00013523676880222843, | |
| "loss": 1.3421, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 0.00013506267409470752, | |
| "loss": 1.4057, | |
| "step": 1965 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 0.00013488857938718664, | |
| "loss": 1.3156, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 0.00013471448467966573, | |
| "loss": 1.3147, | |
| "step": 1975 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 0.00013454038997214485, | |
| "loss": 1.3831, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 0.00013436629526462397, | |
| "loss": 1.2535, | |
| "step": 1985 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 0.00013419220055710306, | |
| "loss": 1.3173, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 0.00013401810584958218, | |
| "loss": 1.3519, | |
| "step": 1995 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 0.00013384401114206127, | |
| "loss": 1.3725, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "eval_loss": 1.3058445453643799, | |
| "eval_runtime": 733.4218, | |
| "eval_samples_per_second": 7.083, | |
| "eval_steps_per_second": 0.886, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 0.0001336699164345404, | |
| "loss": 1.4021, | |
| "step": 2005 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 0.0001334958217270195, | |
| "loss": 1.2751, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 0.0001333217270194986, | |
| "loss": 1.3458, | |
| "step": 2015 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 0.00013314763231197772, | |
| "loss": 1.272, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 0.00013297353760445684, | |
| "loss": 1.3671, | |
| "step": 2025 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 0.00013279944289693594, | |
| "loss": 1.3424, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 0.00013262534818941506, | |
| "loss": 1.3302, | |
| "step": 2035 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 0.00013245125348189415, | |
| "loss": 1.3084, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 0.00013227715877437327, | |
| "loss": 1.3688, | |
| "step": 2045 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 0.0001321030640668524, | |
| "loss": 1.3742, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 0.00013192896935933148, | |
| "loss": 1.3327, | |
| "step": 2055 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 0.0001317548746518106, | |
| "loss": 1.3348, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 0.0001315807799442897, | |
| "loss": 1.3634, | |
| "step": 2065 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 0.0001314066852367688, | |
| "loss": 1.4085, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 0.00013123259052924793, | |
| "loss": 1.37, | |
| "step": 2075 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 0.00013105849582172702, | |
| "loss": 1.379, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 0.00013088440111420614, | |
| "loss": 1.3195, | |
| "step": 2085 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 0.00013071030640668524, | |
| "loss": 1.3618, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 0.00013053621169916435, | |
| "loss": 1.2982, | |
| "step": 2095 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 0.00013036211699164347, | |
| "loss": 1.3429, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 0.00013018802228412257, | |
| "loss": 1.2857, | |
| "step": 2105 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 0.00013001392757660166, | |
| "loss": 1.3079, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 0.00012983983286908078, | |
| "loss": 1.3331, | |
| "step": 2115 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 0.0001296657381615599, | |
| "loss": 1.3805, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 0.00012949164345403902, | |
| "loss": 1.3735, | |
| "step": 2125 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 0.0001293175487465181, | |
| "loss": 1.3509, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 0.0001291434540389972, | |
| "loss": 1.3057, | |
| "step": 2135 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 0.00012896935933147635, | |
| "loss": 1.3151, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 0.00012879526462395544, | |
| "loss": 1.3399, | |
| "step": 2145 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 0.00012862116991643453, | |
| "loss": 1.3866, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 0.00012844707520891365, | |
| "loss": 1.3843, | |
| "step": 2155 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 0.00012827298050139275, | |
| "loss": 1.3211, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 0.0001280988857938719, | |
| "loss": 1.303, | |
| "step": 2165 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 0.00012792479108635098, | |
| "loss": 1.4432, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 0.00012775069637883008, | |
| "loss": 1.3461, | |
| "step": 2175 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 0.0001275766016713092, | |
| "loss": 1.3664, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 0.00012740250696378832, | |
| "loss": 1.3216, | |
| "step": 2185 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 0.00012722841225626744, | |
| "loss": 1.3525, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 0.00012705431754874653, | |
| "loss": 1.2865, | |
| "step": 2195 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 0.00012688022284122562, | |
| "loss": 1.3574, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "eval_loss": 1.302751064300537, | |
| "eval_runtime": 734.76, | |
| "eval_samples_per_second": 7.07, | |
| "eval_steps_per_second": 0.885, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 0.00012670612813370474, | |
| "loss": 1.3621, | |
| "step": 2205 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 0.00012653203342618386, | |
| "loss": 1.4331, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 0.00012635793871866295, | |
| "loss": 1.3252, | |
| "step": 2215 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 0.00012618384401114207, | |
| "loss": 1.2945, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 0.00012600974930362116, | |
| "loss": 1.3286, | |
| "step": 2225 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 0.00012583565459610028, | |
| "loss": 1.2922, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 0.0001256615598885794, | |
| "loss": 1.2644, | |
| "step": 2235 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 0.0001254874651810585, | |
| "loss": 1.3052, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 0.00012531337047353761, | |
| "loss": 1.2918, | |
| "step": 2245 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 0.0001251392757660167, | |
| "loss": 1.3455, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 0.00012496518105849583, | |
| "loss": 1.3509, | |
| "step": 2255 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 0.00012479108635097495, | |
| "loss": 1.3038, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 0.00012461699164345404, | |
| "loss": 1.2552, | |
| "step": 2265 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 0.00012444289693593316, | |
| "loss": 1.3511, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 0.00012426880222841225, | |
| "loss": 1.3912, | |
| "step": 2275 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 0.00012409470752089137, | |
| "loss": 1.358, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 0.0001239206128133705, | |
| "loss": 1.3028, | |
| "step": 2285 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 0.00012374651810584958, | |
| "loss": 1.3647, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 0.0001235724233983287, | |
| "loss": 1.2736, | |
| "step": 2295 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 0.00012339832869080782, | |
| "loss": 1.2978, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 0.0001232242339832869, | |
| "loss": 1.3406, | |
| "step": 2305 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 0.00012305013927576603, | |
| "loss": 1.3424, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 0.00012287604456824513, | |
| "loss": 1.3182, | |
| "step": 2315 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 0.00012270194986072422, | |
| "loss": 1.3059, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 0.00012252785515320336, | |
| "loss": 1.2854, | |
| "step": 2325 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 0.00012235376044568246, | |
| "loss": 1.2978, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 0.00012217966573816158, | |
| "loss": 1.2419, | |
| "step": 2335 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 0.00012200557103064068, | |
| "loss": 1.3188, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 0.00012183147632311977, | |
| "loss": 1.2925, | |
| "step": 2345 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 0.0001216573816155989, | |
| "loss": 1.3548, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 0.000121483286908078, | |
| "loss": 1.3569, | |
| "step": 2355 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 0.00012130919220055709, | |
| "loss": 1.3378, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 0.00012113509749303623, | |
| "loss": 1.2776, | |
| "step": 2365 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 0.00012096100278551532, | |
| "loss": 1.3191, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 0.00012078690807799444, | |
| "loss": 1.3439, | |
| "step": 2375 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 0.00012061281337047354, | |
| "loss": 1.3, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 0.00012043871866295265, | |
| "loss": 1.3663, | |
| "step": 2385 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 0.00012026462395543177, | |
| "loss": 1.3978, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 0.00012009052924791086, | |
| "loss": 1.3929, | |
| "step": 2395 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 0.00011991643454039, | |
| "loss": 1.3773, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "eval_loss": 1.2999300956726074, | |
| "eval_runtime": 732.8294, | |
| "eval_samples_per_second": 7.089, | |
| "eval_steps_per_second": 0.887, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 0.00011974233983286909, | |
| "loss": 1.3097, | |
| "step": 2405 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 0.00011956824512534819, | |
| "loss": 1.3452, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 0.00011939415041782731, | |
| "loss": 1.3555, | |
| "step": 2415 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 0.0001192200557103064, | |
| "loss": 1.3182, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 0.00011904596100278551, | |
| "loss": 1.3665, | |
| "step": 2425 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 0.00011887186629526463, | |
| "loss": 1.2431, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 0.00011869777158774374, | |
| "loss": 1.3146, | |
| "step": 2435 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 0.00011852367688022286, | |
| "loss": 1.3499, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 0.00011834958217270196, | |
| "loss": 1.3353, | |
| "step": 2445 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 0.00011817548746518105, | |
| "loss": 1.3442, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 0.00011800139275766017, | |
| "loss": 1.3142, | |
| "step": 2455 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 0.00011782729805013928, | |
| "loss": 1.3017, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 0.0001176532033426184, | |
| "loss": 1.3205, | |
| "step": 2465 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 0.0001174791086350975, | |
| "loss": 1.3205, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 0.0001173050139275766, | |
| "loss": 1.3755, | |
| "step": 2475 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 0.00011713091922005573, | |
| "loss": 1.4142, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 0.00011695682451253482, | |
| "loss": 1.3415, | |
| "step": 2485 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 0.00011678272980501393, | |
| "loss": 1.3956, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 0.00011660863509749305, | |
| "loss": 1.3654, | |
| "step": 2495 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 0.00011643454038997214, | |
| "loss": 1.3569, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 0.00011626044568245127, | |
| "loss": 1.339, | |
| "step": 2505 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 0.00011608635097493037, | |
| "loss": 1.3619, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 0.00011591225626740947, | |
| "loss": 1.3503, | |
| "step": 2515 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 0.00011573816155988859, | |
| "loss": 1.2833, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 0.0001155640668523677, | |
| "loss": 1.3404, | |
| "step": 2525 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 0.00011538997214484679, | |
| "loss": 1.3855, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 0.00011521587743732591, | |
| "loss": 1.3225, | |
| "step": 2535 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 0.00011504178272980501, | |
| "loss": 1.413, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 0.00011486768802228413, | |
| "loss": 1.3811, | |
| "step": 2545 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 0.00011469359331476324, | |
| "loss": 1.311, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 0.00011451949860724233, | |
| "loss": 1.3874, | |
| "step": 2555 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 0.00011434540389972147, | |
| "loss": 1.4261, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 0.00011417130919220056, | |
| "loss": 1.3014, | |
| "step": 2565 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 0.00011399721448467968, | |
| "loss": 1.3277, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 0.00011382311977715878, | |
| "loss": 1.2828, | |
| "step": 2575 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 0.00011364902506963788, | |
| "loss": 1.3378, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 0.00011347493036211701, | |
| "loss": 1.2749, | |
| "step": 2585 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 0.0001133008356545961, | |
| "loss": 1.2597, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 0.00011312674094707521, | |
| "loss": 1.3568, | |
| "step": 2595 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 0.00011295264623955433, | |
| "loss": 1.2944, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "eval_loss": 1.2979168891906738, | |
| "eval_runtime": 738.7601, | |
| "eval_samples_per_second": 7.032, | |
| "eval_steps_per_second": 0.88, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 0.00011277855153203343, | |
| "loss": 1.2839, | |
| "step": 2605 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 0.00011260445682451255, | |
| "loss": 1.267, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 0.00011243036211699164, | |
| "loss": 1.3281, | |
| "step": 2615 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 0.00011225626740947075, | |
| "loss": 1.3206, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 0.00011208217270194987, | |
| "loss": 1.3363, | |
| "step": 2625 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 0.00011190807799442898, | |
| "loss": 1.3629, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 0.00011173398328690807, | |
| "loss": 1.2982, | |
| "step": 2635 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 0.0001115598885793872, | |
| "loss": 1.318, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 0.0001113857938718663, | |
| "loss": 1.3328, | |
| "step": 2645 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 0.00011121169916434541, | |
| "loss": 1.4585, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 0.00011103760445682452, | |
| "loss": 1.3041, | |
| "step": 2655 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 0.00011086350974930361, | |
| "loss": 1.3435, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 0.00011068941504178274, | |
| "loss": 1.3172, | |
| "step": 2665 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 0.00011051532033426184, | |
| "loss": 1.335, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 0.00011034122562674096, | |
| "loss": 1.3163, | |
| "step": 2675 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 0.00011016713091922006, | |
| "loss": 1.2974, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 0.00010999303621169917, | |
| "loss": 1.3252, | |
| "step": 2685 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 0.00010981894150417829, | |
| "loss": 1.2321, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 0.00010964484679665738, | |
| "loss": 1.3204, | |
| "step": 2695 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 0.00010947075208913649, | |
| "loss": 1.3555, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 0.0001092966573816156, | |
| "loss": 1.334, | |
| "step": 2705 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 0.00010912256267409471, | |
| "loss": 1.393, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 0.00010894846796657383, | |
| "loss": 1.3994, | |
| "step": 2715 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 0.00010877437325905294, | |
| "loss": 1.3573, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 0.00010860027855153203, | |
| "loss": 1.3483, | |
| "step": 2725 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 0.00010842618384401115, | |
| "loss": 1.273, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 0.00010825208913649026, | |
| "loss": 1.3775, | |
| "step": 2735 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 0.00010807799442896935, | |
| "loss": 1.2803, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 0.00010790389972144848, | |
| "loss": 1.2606, | |
| "step": 2745 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 0.00010772980501392757, | |
| "loss": 1.2591, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 0.00010755571030640669, | |
| "loss": 1.2515, | |
| "step": 2755 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 0.0001073816155988858, | |
| "loss": 1.4989, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 0.0001072075208913649, | |
| "loss": 1.3527, | |
| "step": 2765 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 0.00010703342618384402, | |
| "loss": 1.4096, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 0.00010685933147632312, | |
| "loss": 1.3479, | |
| "step": 2775 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 0.00010668523676880225, | |
| "loss": 1.3262, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 0.00010651114206128134, | |
| "loss": 1.3421, | |
| "step": 2785 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 0.00010633704735376045, | |
| "loss": 1.3808, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 0.00010616295264623957, | |
| "loss": 1.3222, | |
| "step": 2795 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 0.00010598885793871866, | |
| "loss": 1.2433, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "eval_loss": 1.2961724996566772, | |
| "eval_runtime": 732.0495, | |
| "eval_samples_per_second": 7.097, | |
| "eval_steps_per_second": 0.888, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 0.00010581476323119777, | |
| "loss": 1.2961, | |
| "step": 2805 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 0.00010564066852367689, | |
| "loss": 1.3239, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 0.00010546657381615599, | |
| "loss": 1.4174, | |
| "step": 2815 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 0.00010529247910863511, | |
| "loss": 1.4159, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 0.00010511838440111422, | |
| "loss": 1.2958, | |
| "step": 2825 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 0.00010494428969359331, | |
| "loss": 1.3396, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 0.00010477019498607243, | |
| "loss": 1.3403, | |
| "step": 2835 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 0.00010459610027855153, | |
| "loss": 1.3568, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 0.00010442200557103065, | |
| "loss": 1.2791, | |
| "step": 2845 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 0.00010424791086350976, | |
| "loss": 1.3467, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 0.00010407381615598885, | |
| "loss": 1.3204, | |
| "step": 2855 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 0.00010389972144846799, | |
| "loss": 1.3143, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 0.00010372562674094708, | |
| "loss": 1.3111, | |
| "step": 2865 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 0.00010355153203342618, | |
| "loss": 1.2282, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 0.0001033774373259053, | |
| "loss": 1.3535, | |
| "step": 2875 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 0.0001032033426183844, | |
| "loss": 1.3183, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 0.00010302924791086353, | |
| "loss": 1.2829, | |
| "step": 2885 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 0.00010285515320334262, | |
| "loss": 1.1567, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 0.00010268105849582173, | |
| "loss": 1.3606, | |
| "step": 2895 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 0.00010250696378830085, | |
| "loss": 1.3111, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 0.00010233286908077995, | |
| "loss": 1.3117, | |
| "step": 2905 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 0.00010215877437325904, | |
| "loss": 1.3033, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 0.00010198467966573816, | |
| "loss": 1.3488, | |
| "step": 2915 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 0.00010181058495821727, | |
| "loss": 1.2972, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 0.00010163649025069639, | |
| "loss": 1.3485, | |
| "step": 2925 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 0.0001014623955431755, | |
| "loss": 1.371, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 0.00010128830083565459, | |
| "loss": 1.3131, | |
| "step": 2935 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 0.00010111420612813372, | |
| "loss": 1.3348, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 0.00010094011142061281, | |
| "loss": 1.4028, | |
| "step": 2945 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 0.00010076601671309193, | |
| "loss": 1.2784, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 0.00010059192200557104, | |
| "loss": 1.3039, | |
| "step": 2955 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 0.00010041782729805013, | |
| "loss": 1.4023, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 0.00010024373259052926, | |
| "loss": 1.3074, | |
| "step": 2965 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 0.00010006963788300836, | |
| "loss": 1.259, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 9.989554317548748e-05, | |
| "loss": 1.3248, | |
| "step": 2975 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 9.972144846796658e-05, | |
| "loss": 1.3227, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 9.954735376044569e-05, | |
| "loss": 1.294, | |
| "step": 2985 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 9.93732590529248e-05, | |
| "loss": 1.2245, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 9.91991643454039e-05, | |
| "loss": 1.2731, | |
| "step": 2995 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 9.902506963788302e-05, | |
| "loss": 1.2349, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "eval_loss": 1.2951756715774536, | |
| "eval_runtime": 732.4846, | |
| "eval_samples_per_second": 7.092, | |
| "eval_steps_per_second": 0.887, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 9.885097493036211e-05, | |
| "loss": 1.3739, | |
| "step": 3005 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 9.867688022284123e-05, | |
| "loss": 1.3928, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 9.850278551532034e-05, | |
| "loss": 1.2705, | |
| "step": 3015 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 9.832869080779946e-05, | |
| "loss": 1.306, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 9.815459610027855e-05, | |
| "loss": 1.262, | |
| "step": 3025 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 9.798050139275766e-05, | |
| "loss": 1.2923, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 9.780640668523678e-05, | |
| "loss": 1.3027, | |
| "step": 3035 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 9.763231197771588e-05, | |
| "loss": 1.3312, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 9.7458217270195e-05, | |
| "loss": 1.3507, | |
| "step": 3045 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 9.728412256267409e-05, | |
| "loss": 1.2724, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 9.711002785515321e-05, | |
| "loss": 1.2655, | |
| "step": 3055 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 9.693593314763232e-05, | |
| "loss": 1.352, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 9.676183844011142e-05, | |
| "loss": 1.3074, | |
| "step": 3065 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 9.658774373259053e-05, | |
| "loss": 1.3014, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 9.641364902506964e-05, | |
| "loss": 1.3362, | |
| "step": 3075 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 9.623955431754876e-05, | |
| "loss": 1.3764, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 9.606545961002786e-05, | |
| "loss": 1.2733, | |
| "step": 3085 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 9.589136490250697e-05, | |
| "loss": 1.2318, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 9.571727019498607e-05, | |
| "loss": 1.3198, | |
| "step": 3095 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 9.554317548746519e-05, | |
| "loss": 1.3222, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 9.53690807799443e-05, | |
| "loss": 1.2183, | |
| "step": 3105 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 9.519498607242339e-05, | |
| "loss": 1.342, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 9.502089136490251e-05, | |
| "loss": 1.268, | |
| "step": 3115 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 9.484679665738162e-05, | |
| "loss": 1.373, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 9.467270194986074e-05, | |
| "loss": 1.326, | |
| "step": 3125 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 9.449860724233984e-05, | |
| "loss": 1.3662, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 9.432451253481895e-05, | |
| "loss": 1.327, | |
| "step": 3135 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 9.415041782729805e-05, | |
| "loss": 1.2587, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 9.397632311977716e-05, | |
| "loss": 1.2852, | |
| "step": 3145 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 9.380222841225628e-05, | |
| "loss": 1.2778, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 9.362813370473537e-05, | |
| "loss": 1.3912, | |
| "step": 3155 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 9.345403899721449e-05, | |
| "loss": 1.2293, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 9.32799442896936e-05, | |
| "loss": 1.3164, | |
| "step": 3165 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 9.310584958217272e-05, | |
| "loss": 1.3641, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 9.293175487465181e-05, | |
| "loss": 1.2796, | |
| "step": 3175 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 9.275766016713092e-05, | |
| "loss": 1.3221, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 9.258356545961003e-05, | |
| "loss": 1.3635, | |
| "step": 3185 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 9.240947075208914e-05, | |
| "loss": 1.2781, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 9.223537604456825e-05, | |
| "loss": 1.3838, | |
| "step": 3195 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 9.206128133704735e-05, | |
| "loss": 1.3126, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "eval_loss": 1.2933902740478516, | |
| "eval_runtime": 732.1293, | |
| "eval_samples_per_second": 7.096, | |
| "eval_steps_per_second": 0.888, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 9.188718662952647e-05, | |
| "loss": 1.2489, | |
| "step": 3205 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 9.171309192200558e-05, | |
| "loss": 1.3376, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 9.153899721448468e-05, | |
| "loss": 1.2556, | |
| "step": 3215 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 9.136490250696379e-05, | |
| "loss": 1.3112, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 9.11908077994429e-05, | |
| "loss": 1.2835, | |
| "step": 3225 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 9.101671309192202e-05, | |
| "loss": 1.2976, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 9.084261838440112e-05, | |
| "loss": 1.266, | |
| "step": 3235 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 9.066852367688023e-05, | |
| "loss": 1.3171, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 9.049442896935933e-05, | |
| "loss": 1.4022, | |
| "step": 3245 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 9.032033426183845e-05, | |
| "loss": 1.2727, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 9.014623955431756e-05, | |
| "loss": 1.3149, | |
| "step": 3255 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 8.997214484679665e-05, | |
| "loss": 1.3104, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 8.979805013927577e-05, | |
| "loss": 1.2367, | |
| "step": 3265 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 8.962395543175488e-05, | |
| "loss": 1.2615, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 8.9449860724234e-05, | |
| "loss": 1.3045, | |
| "step": 3275 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 8.927576601671309e-05, | |
| "loss": 1.2793, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 8.910167130919221e-05, | |
| "loss": 1.3093, | |
| "step": 3285 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 8.892757660167131e-05, | |
| "loss": 1.3182, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 8.875348189415042e-05, | |
| "loss": 1.2849, | |
| "step": 3295 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 8.857938718662953e-05, | |
| "loss": 1.3269, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 8.840529247910863e-05, | |
| "loss": 1.2924, | |
| "step": 3305 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 8.823119777158775e-05, | |
| "loss": 1.2871, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 8.805710306406686e-05, | |
| "loss": 1.3247, | |
| "step": 3315 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 8.788300835654598e-05, | |
| "loss": 1.4262, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 8.770891364902507e-05, | |
| "loss": 1.4257, | |
| "step": 3325 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 8.753481894150418e-05, | |
| "loss": 1.2515, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 8.73607242339833e-05, | |
| "loss": 1.3327, | |
| "step": 3335 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 8.71866295264624e-05, | |
| "loss": 1.2946, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 8.70125348189415e-05, | |
| "loss": 1.3422, | |
| "step": 3345 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 8.683844011142061e-05, | |
| "loss": 1.3492, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 8.666434540389973e-05, | |
| "loss": 1.3511, | |
| "step": 3355 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 8.649025069637884e-05, | |
| "loss": 1.2687, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 8.631615598885794e-05, | |
| "loss": 1.3712, | |
| "step": 3365 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 8.614206128133705e-05, | |
| "loss": 1.3567, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 8.596796657381616e-05, | |
| "loss": 1.3005, | |
| "step": 3375 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 8.579387186629528e-05, | |
| "loss": 1.213, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 8.561977715877437e-05, | |
| "loss": 1.2404, | |
| "step": 3385 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 8.544568245125349e-05, | |
| "loss": 1.3713, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 8.52715877437326e-05, | |
| "loss": 1.3294, | |
| "step": 3395 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 8.509749303621171e-05, | |
| "loss": 1.284, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "eval_loss": 1.2925370931625366, | |
| "eval_runtime": 732.6042, | |
| "eval_samples_per_second": 7.091, | |
| "eval_steps_per_second": 0.887, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 8.49233983286908e-05, | |
| "loss": 1.355, | |
| "step": 3405 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 8.474930362116991e-05, | |
| "loss": 1.3339, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 8.457520891364903e-05, | |
| "loss": 1.2847, | |
| "step": 3415 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 8.440111420612814e-05, | |
| "loss": 1.3614, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 8.422701949860726e-05, | |
| "loss": 1.2328, | |
| "step": 3425 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 8.405292479108635e-05, | |
| "loss": 1.3357, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 8.387883008356547e-05, | |
| "loss": 1.2801, | |
| "step": 3435 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 8.370473537604457e-05, | |
| "loss": 1.2555, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 8.353064066852368e-05, | |
| "loss": 1.2358, | |
| "step": 3445 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 8.335654596100279e-05, | |
| "loss": 1.3348, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 8.318245125348189e-05, | |
| "loss": 1.3154, | |
| "step": 3455 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 8.300835654596101e-05, | |
| "loss": 1.2447, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 8.283426183844012e-05, | |
| "loss": 1.3446, | |
| "step": 3465 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 8.266016713091922e-05, | |
| "loss": 1.2923, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 8.248607242339833e-05, | |
| "loss": 1.2936, | |
| "step": 3475 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 8.231197771587745e-05, | |
| "loss": 1.2774, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 8.213788300835655e-05, | |
| "loss": 1.2417, | |
| "step": 3485 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 8.196378830083565e-05, | |
| "loss": 1.3392, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 8.178969359331477e-05, | |
| "loss": 1.2959, | |
| "step": 3495 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 8.161559888579387e-05, | |
| "loss": 1.3219, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 8.144150417827299e-05, | |
| "loss": 1.3028, | |
| "step": 3505 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 8.12674094707521e-05, | |
| "loss": 1.3052, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 8.10933147632312e-05, | |
| "loss": 1.311, | |
| "step": 3515 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 8.091922005571031e-05, | |
| "loss": 1.262, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 8.074512534818942e-05, | |
| "loss": 1.317, | |
| "step": 3525 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 8.057103064066854e-05, | |
| "loss": 1.2233, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 8.039693593314763e-05, | |
| "loss": 1.3121, | |
| "step": 3535 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 8.022284122562675e-05, | |
| "loss": 1.3265, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 8.004874651810585e-05, | |
| "loss": 1.323, | |
| "step": 3545 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 7.987465181058497e-05, | |
| "loss": 1.3179, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 7.970055710306406e-05, | |
| "loss": 1.2651, | |
| "step": 3555 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 7.952646239554317e-05, | |
| "loss": 1.2636, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 7.935236768802229e-05, | |
| "loss": 1.3234, | |
| "step": 3565 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 7.91782729805014e-05, | |
| "loss": 1.2657, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 7.90041782729805e-05, | |
| "loss": 1.2825, | |
| "step": 3575 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 7.883008356545961e-05, | |
| "loss": 1.3906, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 7.865598885793873e-05, | |
| "loss": 1.3652, | |
| "step": 3585 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 7.848189415041783e-05, | |
| "loss": 1.3134, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 7.830779944289694e-05, | |
| "loss": 1.3358, | |
| "step": 3595 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 7.813370473537605e-05, | |
| "loss": 1.2194, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "eval_loss": 1.2912499904632568, | |
| "eval_runtime": 732.4213, | |
| "eval_samples_per_second": 7.093, | |
| "eval_steps_per_second": 0.887, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 7.795961002785515e-05, | |
| "loss": 1.3917, | |
| "step": 3605 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 7.778551532033427e-05, | |
| "loss": 1.2922, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 7.761142061281338e-05, | |
| "loss": 1.3084, | |
| "step": 3615 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 7.743732590529248e-05, | |
| "loss": 1.3974, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 7.726323119777159e-05, | |
| "loss": 1.3267, | |
| "step": 3625 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 7.708913649025071e-05, | |
| "loss": 1.2931, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 7.691504178272981e-05, | |
| "loss": 1.2374, | |
| "step": 3635 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 7.674094707520891e-05, | |
| "loss": 1.2701, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 7.656685236768803e-05, | |
| "loss": 1.3962, | |
| "step": 3645 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 7.639275766016713e-05, | |
| "loss": 1.3269, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 7.621866295264625e-05, | |
| "loss": 1.3424, | |
| "step": 3655 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 7.604456824512534e-05, | |
| "loss": 1.285, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 7.587047353760446e-05, | |
| "loss": 1.3711, | |
| "step": 3665 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 7.569637883008357e-05, | |
| "loss": 1.3403, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 7.552228412256268e-05, | |
| "loss": 1.3563, | |
| "step": 3675 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 7.534818941504178e-05, | |
| "loss": 1.2999, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 7.517409470752089e-05, | |
| "loss": 1.2715, | |
| "step": 3685 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 7.500000000000001e-05, | |
| "loss": 1.3427, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 7.482590529247911e-05, | |
| "loss": 1.2558, | |
| "step": 3695 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 7.465181058495823e-05, | |
| "loss": 1.3071, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 7.447771587743732e-05, | |
| "loss": 1.2598, | |
| "step": 3705 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 7.430362116991643e-05, | |
| "loss": 1.3291, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 7.412952646239555e-05, | |
| "loss": 1.3219, | |
| "step": 3715 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 7.395543175487466e-05, | |
| "loss": 1.346, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 7.378133704735376e-05, | |
| "loss": 1.3308, | |
| "step": 3725 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 7.360724233983287e-05, | |
| "loss": 1.2899, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 7.343314763231199e-05, | |
| "loss": 1.2938, | |
| "step": 3735 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 7.32590529247911e-05, | |
| "loss": 1.2433, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 7.30849582172702e-05, | |
| "loss": 1.3381, | |
| "step": 3745 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 7.29108635097493e-05, | |
| "loss": 1.3111, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 7.273676880222841e-05, | |
| "loss": 1.3363, | |
| "step": 3755 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 7.256267409470753e-05, | |
| "loss": 1.273, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 7.238857938718662e-05, | |
| "loss": 1.3039, | |
| "step": 3765 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 7.221448467966574e-05, | |
| "loss": 1.2538, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 7.204038997214485e-05, | |
| "loss": 1.2903, | |
| "step": 3775 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 7.186629526462397e-05, | |
| "loss": 1.3772, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 7.169220055710307e-05, | |
| "loss": 1.215, | |
| "step": 3785 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 7.151810584958217e-05, | |
| "loss": 1.3577, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 7.134401114206129e-05, | |
| "loss": 1.3559, | |
| "step": 3795 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 7.116991643454039e-05, | |
| "loss": 1.2885, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "eval_loss": 1.2902462482452393, | |
| "eval_runtime": 737.6841, | |
| "eval_samples_per_second": 7.042, | |
| "eval_steps_per_second": 0.881, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 7.099582172701951e-05, | |
| "loss": 1.322, | |
| "step": 3805 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 7.08217270194986e-05, | |
| "loss": 1.2344, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 7.064763231197772e-05, | |
| "loss": 1.2122, | |
| "step": 3815 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 7.047353760445683e-05, | |
| "loss": 1.3973, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 7.029944289693594e-05, | |
| "loss": 1.2172, | |
| "step": 3825 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 7.012534818941504e-05, | |
| "loss": 1.263, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 6.995125348189415e-05, | |
| "loss": 1.3488, | |
| "step": 3835 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 6.977715877437327e-05, | |
| "loss": 1.391, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 6.960306406685237e-05, | |
| "loss": 1.3284, | |
| "step": 3845 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 6.942896935933148e-05, | |
| "loss": 1.2766, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 6.925487465181058e-05, | |
| "loss": 1.3801, | |
| "step": 3855 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 6.90807799442897e-05, | |
| "loss": 1.3323, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 6.890668523676881e-05, | |
| "loss": 1.2127, | |
| "step": 3865 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 6.87325905292479e-05, | |
| "loss": 1.2669, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 6.855849582172702e-05, | |
| "loss": 1.3096, | |
| "step": 3875 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 6.838440111420613e-05, | |
| "loss": 1.3761, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 6.821030640668525e-05, | |
| "loss": 1.3147, | |
| "step": 3885 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 6.803621169916435e-05, | |
| "loss": 1.3225, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 6.786211699164346e-05, | |
| "loss": 1.3215, | |
| "step": 3895 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 6.768802228412257e-05, | |
| "loss": 1.3797, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 6.751392757660167e-05, | |
| "loss": 1.2743, | |
| "step": 3905 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 6.733983286908079e-05, | |
| "loss": 1.2811, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 6.716573816155988e-05, | |
| "loss": 1.3509, | |
| "step": 3915 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 6.6991643454039e-05, | |
| "loss": 1.2394, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 6.681754874651811e-05, | |
| "loss": 1.2732, | |
| "step": 3925 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 6.664345403899723e-05, | |
| "loss": 1.3003, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 6.646935933147632e-05, | |
| "loss": 1.2553, | |
| "step": 3935 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 6.629526462395543e-05, | |
| "loss": 1.3428, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 6.612116991643455e-05, | |
| "loss": 1.3241, | |
| "step": 3945 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 6.594707520891365e-05, | |
| "loss": 1.3564, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 6.577298050139276e-05, | |
| "loss": 1.3064, | |
| "step": 3955 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 6.559888579387186e-05, | |
| "loss": 1.3194, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 6.542479108635098e-05, | |
| "loss": 1.3197, | |
| "step": 3965 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 6.525069637883009e-05, | |
| "loss": 1.3256, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 6.50766016713092e-05, | |
| "loss": 1.2681, | |
| "step": 3975 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 6.49025069637883e-05, | |
| "loss": 1.2085, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 6.472841225626741e-05, | |
| "loss": 1.3024, | |
| "step": 3985 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 6.455431754874653e-05, | |
| "loss": 1.2928, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 6.438022284122563e-05, | |
| "loss": 1.2967, | |
| "step": 3995 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 6.420612813370474e-05, | |
| "loss": 1.2159, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "eval_loss": 1.288769245147705, | |
| "eval_runtime": 737.8329, | |
| "eval_samples_per_second": 7.041, | |
| "eval_steps_per_second": 0.881, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 6.403203342618384e-05, | |
| "loss": 1.3855, | |
| "step": 4005 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 6.385793871866296e-05, | |
| "loss": 1.3212, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 6.368384401114207e-05, | |
| "loss": 1.3354, | |
| "step": 4015 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 6.350974930362116e-05, | |
| "loss": 1.294, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 6.333565459610028e-05, | |
| "loss": 1.2667, | |
| "step": 4025 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 6.316155988857939e-05, | |
| "loss": 1.3167, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 6.298746518105851e-05, | |
| "loss": 1.3521, | |
| "step": 4035 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 6.28133704735376e-05, | |
| "loss": 1.248, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 6.263927576601672e-05, | |
| "loss": 1.2941, | |
| "step": 4045 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 6.246518105849583e-05, | |
| "loss": 1.3428, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 6.229108635097493e-05, | |
| "loss": 1.1704, | |
| "step": 4055 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 6.211699164345404e-05, | |
| "loss": 1.3148, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 6.194289693593314e-05, | |
| "loss": 1.275, | |
| "step": 4065 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 6.176880222841226e-05, | |
| "loss": 1.3346, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 6.159470752089137e-05, | |
| "loss": 1.3067, | |
| "step": 4075 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 6.142061281337049e-05, | |
| "loss": 1.3264, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 6.124651810584958e-05, | |
| "loss": 1.2819, | |
| "step": 4085 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 6.107242339832869e-05, | |
| "loss": 1.3631, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 6.0898328690807806e-05, | |
| "loss": 1.2887, | |
| "step": 4095 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 6.072423398328692e-05, | |
| "loss": 1.3059, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 6.055013927576602e-05, | |
| "loss": 1.2826, | |
| "step": 4105 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 6.0376044568245124e-05, | |
| "loss": 1.3932, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 6.0201949860724236e-05, | |
| "loss": 1.2938, | |
| "step": 4115 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 6.002785515320335e-05, | |
| "loss": 1.3074, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 5.985376044568245e-05, | |
| "loss": 1.2826, | |
| "step": 4125 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 5.967966573816156e-05, | |
| "loss": 1.3634, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 5.9505571030640674e-05, | |
| "loss": 1.3375, | |
| "step": 4135 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 5.9331476323119787e-05, | |
| "loss": 1.3556, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 5.9157381615598886e-05, | |
| "loss": 1.2967, | |
| "step": 4145 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 5.898328690807799e-05, | |
| "loss": 1.3048, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 5.8809192200557104e-05, | |
| "loss": 1.2729, | |
| "step": 4155 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 5.863509749303622e-05, | |
| "loss": 1.1831, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 5.846100278551533e-05, | |
| "loss": 1.3411, | |
| "step": 4165 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 5.828690807799443e-05, | |
| "loss": 1.2939, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 5.811281337047354e-05, | |
| "loss": 1.3429, | |
| "step": 4175 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 5.7938718662952654e-05, | |
| "loss": 1.3192, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 5.776462395543176e-05, | |
| "loss": 1.2657, | |
| "step": 4185 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 5.759052924791086e-05, | |
| "loss": 1.3071, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 5.741643454038997e-05, | |
| "loss": 1.3702, | |
| "step": 4195 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 5.7242339832869085e-05, | |
| "loss": 1.2832, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "eval_loss": 1.288108468055725, | |
| "eval_runtime": 737.6272, | |
| "eval_samples_per_second": 7.043, | |
| "eval_steps_per_second": 0.881, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 5.70682451253482e-05, | |
| "loss": 1.2143, | |
| "step": 4205 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 5.68941504178273e-05, | |
| "loss": 1.2698, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 5.672005571030641e-05, | |
| "loss": 1.3745, | |
| "step": 4215 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 5.654596100278552e-05, | |
| "loss": 1.303, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 5.637186629526463e-05, | |
| "loss": 1.3291, | |
| "step": 4225 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 5.619777158774373e-05, | |
| "loss": 1.3113, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 5.602367688022284e-05, | |
| "loss": 1.355, | |
| "step": 4235 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 5.584958217270195e-05, | |
| "loss": 1.3275, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 5.5675487465181066e-05, | |
| "loss": 1.2216, | |
| "step": 4245 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 5.5501392757660165e-05, | |
| "loss": 1.2785, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 5.532729805013928e-05, | |
| "loss": 1.3324, | |
| "step": 4255 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 5.5153203342618384e-05, | |
| "loss": 1.3222, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 5.4979108635097496e-05, | |
| "loss": 1.3249, | |
| "step": 4265 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 5.480501392757661e-05, | |
| "loss": 1.2889, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 5.463091922005571e-05, | |
| "loss": 1.286, | |
| "step": 4275 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 5.445682451253482e-05, | |
| "loss": 1.3139, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 5.4282729805013934e-05, | |
| "loss": 1.341, | |
| "step": 4285 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 5.4108635097493046e-05, | |
| "loss": 1.3668, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 5.3934540389972146e-05, | |
| "loss": 1.3228, | |
| "step": 4295 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 5.376044568245125e-05, | |
| "loss": 1.3442, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 5.3586350974930364e-05, | |
| "loss": 1.2981, | |
| "step": 4305 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 5.341225626740948e-05, | |
| "loss": 1.3438, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 5.3238161559888576e-05, | |
| "loss": 1.2201, | |
| "step": 4315 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 5.306406685236769e-05, | |
| "loss": 1.2575, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 5.28899721448468e-05, | |
| "loss": 1.3145, | |
| "step": 4325 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 5.2715877437325914e-05, | |
| "loss": 1.314, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 5.2541782729805013e-05, | |
| "loss": 1.2871, | |
| "step": 4335 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 5.236768802228412e-05, | |
| "loss": 1.3151, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 5.219359331476323e-05, | |
| "loss": 1.3453, | |
| "step": 4345 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 5.2019498607242345e-05, | |
| "loss": 1.3629, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 5.184540389972146e-05, | |
| "loss": 1.3649, | |
| "step": 4355 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 5.167130919220056e-05, | |
| "loss": 1.3063, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 5.149721448467967e-05, | |
| "loss": 1.3669, | |
| "step": 4365 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 5.132311977715878e-05, | |
| "loss": 1.2711, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 5.114902506963789e-05, | |
| "loss": 1.2629, | |
| "step": 4375 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 5.097493036211699e-05, | |
| "loss": 1.2919, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 5.08008356545961e-05, | |
| "loss": 1.3312, | |
| "step": 4385 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 5.062674094707521e-05, | |
| "loss": 1.3027, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 5.0452646239554326e-05, | |
| "loss": 1.2713, | |
| "step": 4395 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 5.0278551532033425e-05, | |
| "loss": 1.2233, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "eval_loss": 1.286835789680481, | |
| "eval_runtime": 737.7513, | |
| "eval_samples_per_second": 7.042, | |
| "eval_steps_per_second": 0.881, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 5.010445682451254e-05, | |
| "loss": 1.2238, | |
| "step": 4405 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 4.993036211699165e-05, | |
| "loss": 1.2575, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 4.975626740947075e-05, | |
| "loss": 1.2281, | |
| "step": 4415 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 4.958217270194986e-05, | |
| "loss": 1.372, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 4.940807799442897e-05, | |
| "loss": 1.2199, | |
| "step": 4425 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 4.923398328690808e-05, | |
| "loss": 1.2096, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 4.905988857938719e-05, | |
| "loss": 1.3134, | |
| "step": 4435 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 4.88857938718663e-05, | |
| "loss": 1.2629, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 4.8711699164345405e-05, | |
| "loss": 1.3521, | |
| "step": 4445 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 4.853760445682451e-05, | |
| "loss": 1.2982, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 4.8363509749303624e-05, | |
| "loss": 1.3329, | |
| "step": 4455 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 4.818941504178273e-05, | |
| "loss": 1.3278, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 4.801532033426184e-05, | |
| "loss": 1.3551, | |
| "step": 4465 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 4.784122562674095e-05, | |
| "loss": 1.3016, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 4.766713091922006e-05, | |
| "loss": 1.2918, | |
| "step": 4475 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 4.749303621169917e-05, | |
| "loss": 1.3242, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 4.731894150417828e-05, | |
| "loss": 1.3174, | |
| "step": 4485 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 4.714484679665738e-05, | |
| "loss": 1.3922, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 4.697075208913649e-05, | |
| "loss": 1.3263, | |
| "step": 4495 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 4.67966573816156e-05, | |
| "loss": 1.2848, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 4.662256267409471e-05, | |
| "loss": 1.3717, | |
| "step": 4505 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 4.644846796657382e-05, | |
| "loss": 1.2208, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 4.627437325905293e-05, | |
| "loss": 1.3238, | |
| "step": 4515 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 4.6100278551532035e-05, | |
| "loss": 1.3045, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 4.592618384401115e-05, | |
| "loss": 1.3534, | |
| "step": 4525 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 4.575208913649025e-05, | |
| "loss": 1.2493, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 4.557799442896936e-05, | |
| "loss": 1.3286, | |
| "step": 4535 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 4.5403899721448466e-05, | |
| "loss": 1.3154, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 4.522980501392758e-05, | |
| "loss": 1.2903, | |
| "step": 4545 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 4.505571030640669e-05, | |
| "loss": 1.287, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 4.48816155988858e-05, | |
| "loss": 1.3266, | |
| "step": 4555 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 4.470752089136491e-05, | |
| "loss": 1.284, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 4.453342618384401e-05, | |
| "loss": 1.2744, | |
| "step": 4565 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 4.435933147632312e-05, | |
| "loss": 1.2628, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 4.418523676880223e-05, | |
| "loss": 1.2512, | |
| "step": 4575 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 4.401114206128134e-05, | |
| "loss": 1.2431, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 4.3837047353760447e-05, | |
| "loss": 1.304, | |
| "step": 4585 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 4.366295264623956e-05, | |
| "loss": 1.2657, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 4.3488857938718665e-05, | |
| "loss": 1.3176, | |
| "step": 4595 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 4.331476323119778e-05, | |
| "loss": 1.2565, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "eval_loss": 1.2856769561767578, | |
| "eval_runtime": 737.7813, | |
| "eval_samples_per_second": 7.041, | |
| "eval_steps_per_second": 0.881, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 4.314066852367688e-05, | |
| "loss": 1.2888, | |
| "step": 4605 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 4.296657381615599e-05, | |
| "loss": 1.2709, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 4.2792479108635096e-05, | |
| "loss": 1.337, | |
| "step": 4615 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 4.261838440111421e-05, | |
| "loss": 1.2852, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 4.2444289693593315e-05, | |
| "loss": 1.2833, | |
| "step": 4625 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 4.227019498607243e-05, | |
| "loss": 1.3159, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 4.209610027855154e-05, | |
| "loss": 1.2542, | |
| "step": 4635 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 4.192200557103064e-05, | |
| "loss": 1.2685, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 4.174791086350975e-05, | |
| "loss": 1.3122, | |
| "step": 4645 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 4.157381615598886e-05, | |
| "loss": 1.3031, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 4.139972144846797e-05, | |
| "loss": 1.2935, | |
| "step": 4655 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 4.1225626740947077e-05, | |
| "loss": 1.2937, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 4.105153203342619e-05, | |
| "loss": 1.2152, | |
| "step": 4665 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 4.0877437325905295e-05, | |
| "loss": 1.2792, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 4.070334261838441e-05, | |
| "loss": 1.3382, | |
| "step": 4675 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 4.052924791086351e-05, | |
| "loss": 1.4042, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 4.035515320334262e-05, | |
| "loss": 1.2896, | |
| "step": 4685 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 4.0181058495821726e-05, | |
| "loss": 1.3844, | |
| "step": 4690 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 4.000696378830084e-05, | |
| "loss": 1.2777, | |
| "step": 4695 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 3.9832869080779944e-05, | |
| "loss": 1.2975, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 3.965877437325906e-05, | |
| "loss": 1.34, | |
| "step": 4705 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 3.948467966573816e-05, | |
| "loss": 1.2717, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 3.9310584958217276e-05, | |
| "loss": 1.3807, | |
| "step": 4715 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 3.9136490250696375e-05, | |
| "loss": 1.3388, | |
| "step": 4720 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 3.896239554317549e-05, | |
| "loss": 1.3167, | |
| "step": 4725 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 3.87883008356546e-05, | |
| "loss": 1.2485, | |
| "step": 4730 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 3.8614206128133706e-05, | |
| "loss": 1.3248, | |
| "step": 4735 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 3.844011142061282e-05, | |
| "loss": 1.2582, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 3.8266016713091925e-05, | |
| "loss": 1.3264, | |
| "step": 4745 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 3.809192200557104e-05, | |
| "loss": 1.3874, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 3.791782729805014e-05, | |
| "loss": 1.316, | |
| "step": 4755 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 3.774373259052925e-05, | |
| "loss": 1.2986, | |
| "step": 4760 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 3.7569637883008356e-05, | |
| "loss": 1.3555, | |
| "step": 4765 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 3.739554317548747e-05, | |
| "loss": 1.3543, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 3.7221448467966574e-05, | |
| "loss": 1.3566, | |
| "step": 4775 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 3.704735376044569e-05, | |
| "loss": 1.3386, | |
| "step": 4780 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 3.687325905292479e-05, | |
| "loss": 1.2482, | |
| "step": 4785 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 3.6699164345403906e-05, | |
| "loss": 1.3113, | |
| "step": 4790 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 3.6525069637883005e-05, | |
| "loss": 1.3643, | |
| "step": 4795 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 3.635097493036212e-05, | |
| "loss": 1.2618, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "eval_loss": 1.2854821681976318, | |
| "eval_runtime": 737.0009, | |
| "eval_samples_per_second": 7.049, | |
| "eval_steps_per_second": 0.882, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 3.6176880222841224e-05, | |
| "loss": 1.3645, | |
| "step": 4805 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 3.6002785515320336e-05, | |
| "loss": 1.2816, | |
| "step": 4810 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 3.582869080779944e-05, | |
| "loss": 1.2479, | |
| "step": 4815 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 3.5654596100278555e-05, | |
| "loss": 1.2798, | |
| "step": 4820 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 3.548050139275767e-05, | |
| "loss": 1.2691, | |
| "step": 4825 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 3.530640668523677e-05, | |
| "loss": 1.3186, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 3.513231197771588e-05, | |
| "loss": 1.2871, | |
| "step": 4835 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 3.4958217270194986e-05, | |
| "loss": 1.2623, | |
| "step": 4840 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 3.47841225626741e-05, | |
| "loss": 1.2977, | |
| "step": 4845 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 3.4610027855153204e-05, | |
| "loss": 1.319, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 3.443593314763232e-05, | |
| "loss": 1.2389, | |
| "step": 4855 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 3.426183844011142e-05, | |
| "loss": 1.2725, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 3.4087743732590536e-05, | |
| "loss": 1.3228, | |
| "step": 4865 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 3.3913649025069635e-05, | |
| "loss": 1.286, | |
| "step": 4870 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 3.373955431754875e-05, | |
| "loss": 1.3151, | |
| "step": 4875 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 3.3565459610027854e-05, | |
| "loss": 1.3726, | |
| "step": 4880 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 3.3391364902506966e-05, | |
| "loss": 1.2479, | |
| "step": 4885 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 3.321727019498607e-05, | |
| "loss": 1.2454, | |
| "step": 4890 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 3.3043175487465185e-05, | |
| "loss": 1.2835, | |
| "step": 4895 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 3.286908077994429e-05, | |
| "loss": 1.3041, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 3.2694986072423404e-05, | |
| "loss": 1.3424, | |
| "step": 4905 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 3.25208913649025e-05, | |
| "loss": 1.2531, | |
| "step": 4910 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 3.2346796657381616e-05, | |
| "loss": 1.2488, | |
| "step": 4915 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 3.217270194986073e-05, | |
| "loss": 1.2859, | |
| "step": 4920 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 3.1998607242339834e-05, | |
| "loss": 1.2757, | |
| "step": 4925 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 3.182451253481895e-05, | |
| "loss": 1.3261, | |
| "step": 4930 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 3.165041782729805e-05, | |
| "loss": 1.3386, | |
| "step": 4935 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 3.1476323119777166e-05, | |
| "loss": 1.3418, | |
| "step": 4940 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 3.1302228412256265e-05, | |
| "loss": 1.2768, | |
| "step": 4945 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 3.112813370473538e-05, | |
| "loss": 1.3141, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 3.0954038997214483e-05, | |
| "loss": 1.2161, | |
| "step": 4955 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 3.0779944289693596e-05, | |
| "loss": 1.2889, | |
| "step": 4960 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 3.06058495821727e-05, | |
| "loss": 1.438, | |
| "step": 4965 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 3.0431754874651815e-05, | |
| "loss": 1.3216, | |
| "step": 4970 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 3.0257660167130917e-05, | |
| "loss": 1.2821, | |
| "step": 4975 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 3.008356545961003e-05, | |
| "loss": 1.3392, | |
| "step": 4980 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 2.9909470752089136e-05, | |
| "loss": 1.2746, | |
| "step": 4985 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 2.973537604456825e-05, | |
| "loss": 1.3234, | |
| "step": 4990 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 2.956128133704735e-05, | |
| "loss": 1.3653, | |
| "step": 4995 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 2.9387186629526464e-05, | |
| "loss": 1.3954, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "eval_loss": 1.2845630645751953, | |
| "eval_runtime": 737.0566, | |
| "eval_samples_per_second": 7.048, | |
| "eval_steps_per_second": 0.882, | |
| "step": 5000 | |
| } | |
| ], | |
| "max_steps": 5844, | |
| "num_train_epochs": 2, | |
| "total_flos": 2.977806625328333e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |