| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.998956158663883, | |
| "eval_steps": 500, | |
| "global_step": 1276, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0015657620041753654, | |
| "grad_norm": 658.1201782226562, | |
| "learning_rate": 5.0000000000000004e-08, | |
| "loss": 1.3039, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.003131524008350731, | |
| "grad_norm": 749.5796508789062, | |
| "learning_rate": 1.0000000000000001e-07, | |
| "loss": 1.4164, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.004697286012526096, | |
| "grad_norm": 394.2350769042969, | |
| "learning_rate": 1.5000000000000002e-07, | |
| "loss": 1.3367, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.006263048016701462, | |
| "grad_norm": 384.08319091796875, | |
| "learning_rate": 2.0000000000000002e-07, | |
| "loss": 1.3192, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.007828810020876827, | |
| "grad_norm": 376.9790954589844, | |
| "learning_rate": 2.5000000000000004e-07, | |
| "loss": 1.2756, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.009394572025052192, | |
| "grad_norm": 175.25262451171875, | |
| "learning_rate": 3.0000000000000004e-07, | |
| "loss": 1.3992, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.010960334029227558, | |
| "grad_norm": 100.1114730834961, | |
| "learning_rate": 3.5000000000000004e-07, | |
| "loss": 1.2348, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.012526096033402923, | |
| "grad_norm": 131.44432067871094, | |
| "learning_rate": 4.0000000000000003e-07, | |
| "loss": 1.2685, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.014091858037578288, | |
| "grad_norm": 67.03341674804688, | |
| "learning_rate": 4.5000000000000003e-07, | |
| "loss": 1.3272, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.015657620041753653, | |
| "grad_norm": 32.0869140625, | |
| "learning_rate": 5.000000000000001e-07, | |
| "loss": 1.3458, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.01722338204592902, | |
| "grad_norm": 17.701913833618164, | |
| "learning_rate": 5.5e-07, | |
| "loss": 1.3608, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.018789144050104383, | |
| "grad_norm": 9.164765357971191, | |
| "learning_rate": 6.000000000000001e-07, | |
| "loss": 1.2961, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.02035490605427975, | |
| "grad_norm": 4.315349578857422, | |
| "learning_rate": 6.5e-07, | |
| "loss": 1.2069, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.021920668058455117, | |
| "grad_norm": 6.652385234832764, | |
| "learning_rate": 7.000000000000001e-07, | |
| "loss": 1.2588, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.02348643006263048, | |
| "grad_norm": 3.3322548866271973, | |
| "learning_rate": 7.5e-07, | |
| "loss": 1.2505, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.025052192066805846, | |
| "grad_norm": 9.599331855773926, | |
| "learning_rate": 8.000000000000001e-07, | |
| "loss": 1.1343, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.02661795407098121, | |
| "grad_norm": 10.33918285369873, | |
| "learning_rate": 8.500000000000001e-07, | |
| "loss": 1.3334, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.028183716075156576, | |
| "grad_norm": 9.453723907470703, | |
| "learning_rate": 9.000000000000001e-07, | |
| "loss": 1.3099, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.029749478079331943, | |
| "grad_norm": 7.075769901275635, | |
| "learning_rate": 9.500000000000001e-07, | |
| "loss": 1.1121, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.031315240083507306, | |
| "grad_norm": 7.770864009857178, | |
| "learning_rate": 1.0000000000000002e-06, | |
| "loss": 1.1836, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.03288100208768267, | |
| "grad_norm": 9.582944869995117, | |
| "learning_rate": 1.0500000000000001e-06, | |
| "loss": 1.1939, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.03444676409185804, | |
| "grad_norm": 9.496583938598633, | |
| "learning_rate": 1.1e-06, | |
| "loss": 1.2395, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.0360125260960334, | |
| "grad_norm": 5.912876129150391, | |
| "learning_rate": 1.1500000000000002e-06, | |
| "loss": 1.1769, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.037578288100208766, | |
| "grad_norm": 2.6761083602905273, | |
| "learning_rate": 1.2000000000000002e-06, | |
| "loss": 1.204, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.03914405010438413, | |
| "grad_norm": 11.136463165283203, | |
| "learning_rate": 1.25e-06, | |
| "loss": 1.2271, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.0407098121085595, | |
| "grad_norm": 23.912893295288086, | |
| "learning_rate": 1.3e-06, | |
| "loss": 1.1771, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.042275574112734866, | |
| "grad_norm": 57.08906555175781, | |
| "learning_rate": 1.3500000000000002e-06, | |
| "loss": 1.2253, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.04384133611691023, | |
| "grad_norm": 36.11408615112305, | |
| "learning_rate": 1.4000000000000001e-06, | |
| "loss": 1.1014, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.04540709812108559, | |
| "grad_norm": 51.39445495605469, | |
| "learning_rate": 1.45e-06, | |
| "loss": 1.1978, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.04697286012526096, | |
| "grad_norm": 27.394500732421875, | |
| "learning_rate": 1.5e-06, | |
| "loss": 1.1155, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.048538622129436326, | |
| "grad_norm": 6.844716548919678, | |
| "learning_rate": 1.5500000000000002e-06, | |
| "loss": 1.1798, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.05010438413361169, | |
| "grad_norm": 2.397536277770996, | |
| "learning_rate": 1.6000000000000001e-06, | |
| "loss": 1.1846, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.05167014613778706, | |
| "grad_norm": 7.821156024932861, | |
| "learning_rate": 1.6500000000000003e-06, | |
| "loss": 1.1312, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.05323590814196242, | |
| "grad_norm": 5.455922603607178, | |
| "learning_rate": 1.7000000000000002e-06, | |
| "loss": 1.2153, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.054801670146137786, | |
| "grad_norm": 5.377617835998535, | |
| "learning_rate": 1.75e-06, | |
| "loss": 1.1292, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.05636743215031315, | |
| "grad_norm": 7.172031879425049, | |
| "learning_rate": 1.8000000000000001e-06, | |
| "loss": 1.1459, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.05793319415448852, | |
| "grad_norm": 2.1464905738830566, | |
| "learning_rate": 1.85e-06, | |
| "loss": 1.1649, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.059498956158663886, | |
| "grad_norm": 2.7782726287841797, | |
| "learning_rate": 1.9000000000000002e-06, | |
| "loss": 1.159, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.061064718162839246, | |
| "grad_norm": 20.818374633789062, | |
| "learning_rate": 1.9500000000000004e-06, | |
| "loss": 1.0174, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.06263048016701461, | |
| "grad_norm": 22.776456832885742, | |
| "learning_rate": 2.0000000000000003e-06, | |
| "loss": 1.1425, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.06419624217118998, | |
| "grad_norm": 33.601287841796875, | |
| "learning_rate": 2.05e-06, | |
| "loss": 1.0498, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.06576200417536535, | |
| "grad_norm": 12.884684562683105, | |
| "learning_rate": 2.1000000000000002e-06, | |
| "loss": 1.1231, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.06732776617954071, | |
| "grad_norm": 1.951470971107483, | |
| "learning_rate": 2.15e-06, | |
| "loss": 1.1256, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.06889352818371608, | |
| "grad_norm": 2.584127426147461, | |
| "learning_rate": 2.2e-06, | |
| "loss": 1.1242, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.07045929018789145, | |
| "grad_norm": 6.982508659362793, | |
| "learning_rate": 2.25e-06, | |
| "loss": 1.0769, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.0720250521920668, | |
| "grad_norm": 7.940657138824463, | |
| "learning_rate": 2.3000000000000004e-06, | |
| "loss": 1.0516, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.07359081419624217, | |
| "grad_norm": 5.456567287445068, | |
| "learning_rate": 2.35e-06, | |
| "loss": 0.9894, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.07515657620041753, | |
| "grad_norm": 4.623636722564697, | |
| "learning_rate": 2.4000000000000003e-06, | |
| "loss": 0.9912, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.0767223382045929, | |
| "grad_norm": 2.182708978652954, | |
| "learning_rate": 2.4500000000000003e-06, | |
| "loss": 1.1303, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.07828810020876827, | |
| "grad_norm": 2.388091802597046, | |
| "learning_rate": 2.5e-06, | |
| "loss": 1.0725, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.07985386221294363, | |
| "grad_norm": 2.2816522121429443, | |
| "learning_rate": 2.55e-06, | |
| "loss": 1.0895, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.081419624217119, | |
| "grad_norm": 3.8391623497009277, | |
| "learning_rate": 2.6e-06, | |
| "loss": 1.0587, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.08298538622129437, | |
| "grad_norm": 3.700389862060547, | |
| "learning_rate": 2.6500000000000005e-06, | |
| "loss": 0.9795, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.08455114822546973, | |
| "grad_norm": 3.180649995803833, | |
| "learning_rate": 2.7000000000000004e-06, | |
| "loss": 1.0434, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.0861169102296451, | |
| "grad_norm": 3.790774345397949, | |
| "learning_rate": 2.7500000000000004e-06, | |
| "loss": 0.9652, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.08768267223382047, | |
| "grad_norm": 1.8411438465118408, | |
| "learning_rate": 2.8000000000000003e-06, | |
| "loss": 0.947, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.08924843423799582, | |
| "grad_norm": 2.100008010864258, | |
| "learning_rate": 2.85e-06, | |
| "loss": 1.0129, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.09081419624217119, | |
| "grad_norm": 1.103857159614563, | |
| "learning_rate": 2.9e-06, | |
| "loss": 0.9993, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.09237995824634655, | |
| "grad_norm": 1.232283353805542, | |
| "learning_rate": 2.95e-06, | |
| "loss": 0.9988, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.09394572025052192, | |
| "grad_norm": 2.4731297492980957, | |
| "learning_rate": 3e-06, | |
| "loss": 0.9873, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.09551148225469729, | |
| "grad_norm": 1.1080410480499268, | |
| "learning_rate": 3.05e-06, | |
| "loss": 1.0657, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.09707724425887265, | |
| "grad_norm": 1.1155121326446533, | |
| "learning_rate": 3.1000000000000004e-06, | |
| "loss": 0.974, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.09864300626304802, | |
| "grad_norm": 1.763231873512268, | |
| "learning_rate": 3.1500000000000003e-06, | |
| "loss": 1.0215, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.10020876826722339, | |
| "grad_norm": 1.1961321830749512, | |
| "learning_rate": 3.2000000000000003e-06, | |
| "loss": 0.9955, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.10177453027139875, | |
| "grad_norm": 1.377040982246399, | |
| "learning_rate": 3.2500000000000002e-06, | |
| "loss": 0.9834, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.10334029227557412, | |
| "grad_norm": 1.373260498046875, | |
| "learning_rate": 3.3000000000000006e-06, | |
| "loss": 0.984, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.10490605427974947, | |
| "grad_norm": 2.034064292907715, | |
| "learning_rate": 3.3500000000000005e-06, | |
| "loss": 0.973, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.10647181628392484, | |
| "grad_norm": 1.41798734664917, | |
| "learning_rate": 3.4000000000000005e-06, | |
| "loss": 0.925, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.1080375782881002, | |
| "grad_norm": 1.7425439357757568, | |
| "learning_rate": 3.45e-06, | |
| "loss": 0.9884, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.10960334029227557, | |
| "grad_norm": 2.369657278060913, | |
| "learning_rate": 3.5e-06, | |
| "loss": 1.0048, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.11116910229645094, | |
| "grad_norm": 1.4699137210845947, | |
| "learning_rate": 3.5500000000000003e-06, | |
| "loss": 0.9155, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.1127348643006263, | |
| "grad_norm": 1.1473263502120972, | |
| "learning_rate": 3.6000000000000003e-06, | |
| "loss": 0.8445, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.11430062630480167, | |
| "grad_norm": 2.315568685531616, | |
| "learning_rate": 3.65e-06, | |
| "loss": 0.977, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.11586638830897704, | |
| "grad_norm": 1.8554916381835938, | |
| "learning_rate": 3.7e-06, | |
| "loss": 0.841, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.1174321503131524, | |
| "grad_norm": 1.1517397165298462, | |
| "learning_rate": 3.7500000000000005e-06, | |
| "loss": 0.9478, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.11899791231732777, | |
| "grad_norm": 2.799316883087158, | |
| "learning_rate": 3.8000000000000005e-06, | |
| "loss": 0.9873, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.12056367432150313, | |
| "grad_norm": 1.11930251121521, | |
| "learning_rate": 3.85e-06, | |
| "loss": 0.9214, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.12212943632567849, | |
| "grad_norm": 1.0082950592041016, | |
| "learning_rate": 3.900000000000001e-06, | |
| "loss": 0.9115, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.12369519832985386, | |
| "grad_norm": 1.5534541606903076, | |
| "learning_rate": 3.95e-06, | |
| "loss": 1.04, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.12526096033402923, | |
| "grad_norm": 1.3469066619873047, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 0.9091, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.1268267223382046, | |
| "grad_norm": 1.1149636507034302, | |
| "learning_rate": 4.05e-06, | |
| "loss": 0.8829, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.12839248434237996, | |
| "grad_norm": 2.5933806896209717, | |
| "learning_rate": 4.1e-06, | |
| "loss": 0.9343, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.1299582463465553, | |
| "grad_norm": 1.980901837348938, | |
| "learning_rate": 4.15e-06, | |
| "loss": 0.9028, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.1315240083507307, | |
| "grad_norm": 0.8476768732070923, | |
| "learning_rate": 4.2000000000000004e-06, | |
| "loss": 0.9038, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.13308977035490605, | |
| "grad_norm": 1.1239815950393677, | |
| "learning_rate": 4.25e-06, | |
| "loss": 1.0359, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.13465553235908143, | |
| "grad_norm": 1.0159156322479248, | |
| "learning_rate": 4.3e-06, | |
| "loss": 0.8705, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.13622129436325678, | |
| "grad_norm": 2.180856466293335, | |
| "learning_rate": 4.350000000000001e-06, | |
| "loss": 0.9834, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.13778705636743216, | |
| "grad_norm": 1.5725276470184326, | |
| "learning_rate": 4.4e-06, | |
| "loss": 0.9449, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.1393528183716075, | |
| "grad_norm": 1.5560917854309082, | |
| "learning_rate": 4.450000000000001e-06, | |
| "loss": 1.0109, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.1409185803757829, | |
| "grad_norm": 1.1096960306167603, | |
| "learning_rate": 4.5e-06, | |
| "loss": 0.9081, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.14248434237995825, | |
| "grad_norm": 1.2525160312652588, | |
| "learning_rate": 4.5500000000000005e-06, | |
| "loss": 0.9907, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.1440501043841336, | |
| "grad_norm": 0.9198770523071289, | |
| "learning_rate": 4.600000000000001e-06, | |
| "loss": 0.9266, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.14561586638830898, | |
| "grad_norm": 0.992057740688324, | |
| "learning_rate": 4.65e-06, | |
| "loss": 0.9556, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.14718162839248433, | |
| "grad_norm": 1.00990891456604, | |
| "learning_rate": 4.7e-06, | |
| "loss": 0.9712, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.1487473903966597, | |
| "grad_norm": 1.1176848411560059, | |
| "learning_rate": 4.75e-06, | |
| "loss": 0.9604, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.15031315240083507, | |
| "grad_norm": 0.9323613047599792, | |
| "learning_rate": 4.800000000000001e-06, | |
| "loss": 0.9708, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.15187891440501045, | |
| "grad_norm": 0.9800642728805542, | |
| "learning_rate": 4.85e-06, | |
| "loss": 0.9868, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.1534446764091858, | |
| "grad_norm": 1.487841010093689, | |
| "learning_rate": 4.9000000000000005e-06, | |
| "loss": 0.8679, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.15501043841336118, | |
| "grad_norm": 0.95517897605896, | |
| "learning_rate": 4.95e-06, | |
| "loss": 0.7949, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.15657620041753653, | |
| "grad_norm": 0.9686336517333984, | |
| "learning_rate": 5e-06, | |
| "loss": 0.8655, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.1581419624217119, | |
| "grad_norm": 1.9481035470962524, | |
| "learning_rate": 4.999999112316954e-06, | |
| "loss": 0.9328, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.15970772442588727, | |
| "grad_norm": 1.272631287574768, | |
| "learning_rate": 4.9999964492684465e-06, | |
| "loss": 0.7869, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.16127348643006262, | |
| "grad_norm": 1.1117815971374512, | |
| "learning_rate": 4.999992010856368e-06, | |
| "loss": 0.949, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.162839248434238, | |
| "grad_norm": 0.9681039452552795, | |
| "learning_rate": 4.999985797083871e-06, | |
| "loss": 1.0396, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.16440501043841335, | |
| "grad_norm": 0.9797878265380859, | |
| "learning_rate": 4.999977807955368e-06, | |
| "loss": 0.9393, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.16597077244258873, | |
| "grad_norm": 1.0239158868789673, | |
| "learning_rate": 4.999968043476532e-06, | |
| "loss": 0.8766, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.16753653444676408, | |
| "grad_norm": 0.9314751029014587, | |
| "learning_rate": 4.999956503654299e-06, | |
| "loss": 0.8545, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.16910229645093947, | |
| "grad_norm": 1.1627179384231567, | |
| "learning_rate": 4.999943188496862e-06, | |
| "loss": 0.9241, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.17066805845511482, | |
| "grad_norm": 1.022752285003662, | |
| "learning_rate": 4.9999280980136765e-06, | |
| "loss": 0.9662, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.1722338204592902, | |
| "grad_norm": 1.0513653755187988, | |
| "learning_rate": 4.99991123221546e-06, | |
| "loss": 0.9054, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.17379958246346555, | |
| "grad_norm": 0.9488444328308105, | |
| "learning_rate": 4.99989259111419e-06, | |
| "loss": 0.8921, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.17536534446764093, | |
| "grad_norm": 0.9934154748916626, | |
| "learning_rate": 4.999872174723104e-06, | |
| "loss": 0.9848, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.17693110647181629, | |
| "grad_norm": 1.2035515308380127, | |
| "learning_rate": 4.9998499830567e-06, | |
| "loss": 0.9833, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.17849686847599164, | |
| "grad_norm": 1.027538537979126, | |
| "learning_rate": 4.999826016130739e-06, | |
| "loss": 1.0089, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.18006263048016702, | |
| "grad_norm": 1.0635300874710083, | |
| "learning_rate": 4.999800273962238e-06, | |
| "loss": 0.8863, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.18162839248434237, | |
| "grad_norm": 0.9978796243667603, | |
| "learning_rate": 4.999772756569482e-06, | |
| "loss": 0.899, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.18319415448851775, | |
| "grad_norm": 0.9971838593482971, | |
| "learning_rate": 4.999743463972008e-06, | |
| "loss": 0.9515, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.1847599164926931, | |
| "grad_norm": 1.2436699867248535, | |
| "learning_rate": 4.999712396190622e-06, | |
| "loss": 0.9101, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.18632567849686849, | |
| "grad_norm": 0.9573557376861572, | |
| "learning_rate": 4.999679553247383e-06, | |
| "loss": 0.8817, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.18789144050104384, | |
| "grad_norm": 0.9105668067932129, | |
| "learning_rate": 4.9996449351656165e-06, | |
| "loss": 1.0077, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.18945720250521922, | |
| "grad_norm": 0.8741400241851807, | |
| "learning_rate": 4.999608541969905e-06, | |
| "loss": 0.912, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.19102296450939457, | |
| "grad_norm": 1.1172688007354736, | |
| "learning_rate": 4.999570373686095e-06, | |
| "loss": 0.8062, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.19258872651356992, | |
| "grad_norm": 0.940539538860321, | |
| "learning_rate": 4.9995304303412905e-06, | |
| "loss": 0.8944, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.1941544885177453, | |
| "grad_norm": 2.1082208156585693, | |
| "learning_rate": 4.999488711963857e-06, | |
| "loss": 0.9668, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.19572025052192066, | |
| "grad_norm": 1.0358761548995972, | |
| "learning_rate": 4.99944521858342e-06, | |
| "loss": 0.9032, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.19728601252609604, | |
| "grad_norm": 2.0804238319396973, | |
| "learning_rate": 4.999399950230867e-06, | |
| "loss": 0.9819, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.1988517745302714, | |
| "grad_norm": 1.0986278057098389, | |
| "learning_rate": 4.9993529069383465e-06, | |
| "loss": 0.9034, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.20041753653444677, | |
| "grad_norm": 1.2374722957611084, | |
| "learning_rate": 4.999304088739263e-06, | |
| "loss": 0.9255, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.20198329853862212, | |
| "grad_norm": 1.0443146228790283, | |
| "learning_rate": 4.999253495668287e-06, | |
| "loss": 0.9352, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.2035490605427975, | |
| "grad_norm": 1.009800910949707, | |
| "learning_rate": 4.999201127761346e-06, | |
| "loss": 0.8324, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.20511482254697286, | |
| "grad_norm": 0.892861008644104, | |
| "learning_rate": 4.9991469850556295e-06, | |
| "loss": 0.886, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.20668058455114824, | |
| "grad_norm": 1.1675922870635986, | |
| "learning_rate": 4.999091067589587e-06, | |
| "loss": 0.967, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.2082463465553236, | |
| "grad_norm": 1.0169954299926758, | |
| "learning_rate": 4.999033375402926e-06, | |
| "loss": 0.8498, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.20981210855949894, | |
| "grad_norm": 1.0006227493286133, | |
| "learning_rate": 4.99897390853662e-06, | |
| "loss": 0.8481, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.21137787056367432, | |
| "grad_norm": 0.9049347639083862, | |
| "learning_rate": 4.9989126670328945e-06, | |
| "loss": 0.9186, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.21294363256784968, | |
| "grad_norm": 1.2151603698730469, | |
| "learning_rate": 4.9988496509352444e-06, | |
| "loss": 0.9067, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.21450939457202506, | |
| "grad_norm": 1.0112006664276123, | |
| "learning_rate": 4.998784860288417e-06, | |
| "loss": 0.8843, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.2160751565762004, | |
| "grad_norm": 0.9316662549972534, | |
| "learning_rate": 4.9987182951384264e-06, | |
| "loss": 0.8635, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.2176409185803758, | |
| "grad_norm": 0.942743718624115, | |
| "learning_rate": 4.9986499555325406e-06, | |
| "loss": 0.8481, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.21920668058455114, | |
| "grad_norm": 1.0298817157745361, | |
| "learning_rate": 4.998579841519292e-06, | |
| "loss": 0.865, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.22077244258872653, | |
| "grad_norm": 1.1556729078292847, | |
| "learning_rate": 4.998507953148472e-06, | |
| "loss": 0.8731, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.22233820459290188, | |
| "grad_norm": 0.997958779335022, | |
| "learning_rate": 4.9984342904711315e-06, | |
| "loss": 0.7949, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.22390396659707723, | |
| "grad_norm": 0.9565998315811157, | |
| "learning_rate": 4.998358853539582e-06, | |
| "loss": 0.8875, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.2254697286012526, | |
| "grad_norm": 0.9667196273803711, | |
| "learning_rate": 4.998281642407394e-06, | |
| "loss": 0.8537, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.22703549060542796, | |
| "grad_norm": 0.9701303839683533, | |
| "learning_rate": 4.998202657129401e-06, | |
| "loss": 0.873, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.22860125260960334, | |
| "grad_norm": 0.9936159253120422, | |
| "learning_rate": 4.998121897761692e-06, | |
| "loss": 0.8915, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.2301670146137787, | |
| "grad_norm": 1.5054311752319336, | |
| "learning_rate": 4.998039364361617e-06, | |
| "loss": 0.9944, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.23173277661795408, | |
| "grad_norm": 1.5895891189575195, | |
| "learning_rate": 4.99795505698779e-06, | |
| "loss": 0.9282, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.23329853862212943, | |
| "grad_norm": 1.0141469240188599, | |
| "learning_rate": 4.9978689757000785e-06, | |
| "loss": 0.9166, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.2348643006263048, | |
| "grad_norm": 1.1720311641693115, | |
| "learning_rate": 4.997781120559615e-06, | |
| "loss": 0.9404, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.23643006263048016, | |
| "grad_norm": 0.9536433815956116, | |
| "learning_rate": 4.997691491628789e-06, | |
| "loss": 0.9339, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.23799582463465555, | |
| "grad_norm": 0.9407872557640076, | |
| "learning_rate": 4.997600088971249e-06, | |
| "loss": 0.9019, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.2395615866388309, | |
| "grad_norm": 0.9202889204025269, | |
| "learning_rate": 4.997506912651906e-06, | |
| "loss": 0.9598, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.24112734864300625, | |
| "grad_norm": 1.0630513429641724, | |
| "learning_rate": 4.997411962736927e-06, | |
| "loss": 0.8554, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.24269311064718163, | |
| "grad_norm": 0.8378801941871643, | |
| "learning_rate": 4.997315239293742e-06, | |
| "loss": 0.8583, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.24425887265135698, | |
| "grad_norm": 0.9442815184593201, | |
| "learning_rate": 4.997216742391038e-06, | |
| "loss": 0.8518, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.24582463465553236, | |
| "grad_norm": 0.9273308515548706, | |
| "learning_rate": 4.997116472098763e-06, | |
| "loss": 0.8942, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.24739039665970772, | |
| "grad_norm": 0.9028997421264648, | |
| "learning_rate": 4.997014428488121e-06, | |
| "loss": 0.8592, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.2489561586638831, | |
| "grad_norm": 0.9877567887306213, | |
| "learning_rate": 4.996910611631582e-06, | |
| "loss": 0.9316, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.25052192066805845, | |
| "grad_norm": 0.961592435836792, | |
| "learning_rate": 4.996805021602868e-06, | |
| "loss": 0.8875, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.2520876826722338, | |
| "grad_norm": 0.9923007488250732, | |
| "learning_rate": 4.996697658476965e-06, | |
| "loss": 0.8554, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.2536534446764092, | |
| "grad_norm": 0.956763744354248, | |
| "learning_rate": 4.996588522330114e-06, | |
| "loss": 0.9381, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.25521920668058456, | |
| "grad_norm": 0.919992208480835, | |
| "learning_rate": 4.996477613239822e-06, | |
| "loss": 0.9016, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.2567849686847599, | |
| "grad_norm": 0.9655581712722778, | |
| "learning_rate": 4.996364931284847e-06, | |
| "loss": 0.896, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.25835073068893527, | |
| "grad_norm": 0.9159606099128723, | |
| "learning_rate": 4.996250476545212e-06, | |
| "loss": 0.9475, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.2599164926931106, | |
| "grad_norm": 1.1147626638412476, | |
| "learning_rate": 4.996134249102195e-06, | |
| "loss": 0.8793, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.26148225469728603, | |
| "grad_norm": 1.112069010734558, | |
| "learning_rate": 4.996016249038334e-06, | |
| "loss": 0.8765, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.2630480167014614, | |
| "grad_norm": 1.0366944074630737, | |
| "learning_rate": 4.995896476437428e-06, | |
| "loss": 0.9356, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.26461377870563674, | |
| "grad_norm": 0.9494239091873169, | |
| "learning_rate": 4.995774931384534e-06, | |
| "loss": 0.8246, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.2661795407098121, | |
| "grad_norm": 0.9819200038909912, | |
| "learning_rate": 4.995651613965964e-06, | |
| "loss": 0.9403, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.2677453027139875, | |
| "grad_norm": 1.7329354286193848, | |
| "learning_rate": 4.995526524269293e-06, | |
| "loss": 0.9091, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.26931106471816285, | |
| "grad_norm": 1.1415139436721802, | |
| "learning_rate": 4.995399662383352e-06, | |
| "loss": 0.9106, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.2708768267223382, | |
| "grad_norm": 0.9471906423568726, | |
| "learning_rate": 4.995271028398233e-06, | |
| "loss": 0.8858, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.27244258872651356, | |
| "grad_norm": 1.096053123474121, | |
| "learning_rate": 4.995140622405284e-06, | |
| "loss": 0.7502, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.2740083507306889, | |
| "grad_norm": 1.285410761833191, | |
| "learning_rate": 4.995008444497113e-06, | |
| "loss": 0.9288, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.2755741127348643, | |
| "grad_norm": 0.9714595079421997, | |
| "learning_rate": 4.994874494767585e-06, | |
| "loss": 0.8824, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.27713987473903967, | |
| "grad_norm": 0.8399011492729187, | |
| "learning_rate": 4.994738773311824e-06, | |
| "loss": 0.8703, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.278705636743215, | |
| "grad_norm": 0.9454224109649658, | |
| "learning_rate": 4.994601280226212e-06, | |
| "loss": 0.9627, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.2802713987473904, | |
| "grad_norm": 0.8983661532402039, | |
| "learning_rate": 4.99446201560839e-06, | |
| "loss": 0.9086, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.2818371607515658, | |
| "grad_norm": 0.9814153909683228, | |
| "learning_rate": 4.994320979557256e-06, | |
| "loss": 0.908, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.28340292275574114, | |
| "grad_norm": 0.8405355215072632, | |
| "learning_rate": 4.9941781721729645e-06, | |
| "loss": 0.8414, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.2849686847599165, | |
| "grad_norm": 0.950950026512146, | |
| "learning_rate": 4.994033593556933e-06, | |
| "loss": 0.9091, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.28653444676409184, | |
| "grad_norm": 1.1019747257232666, | |
| "learning_rate": 4.99388724381183e-06, | |
| "loss": 0.8305, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.2881002087682672, | |
| "grad_norm": 0.9339331984519958, | |
| "learning_rate": 4.9937391230415875e-06, | |
| "loss": 0.8542, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.2896659707724426, | |
| "grad_norm": 0.8813616633415222, | |
| "learning_rate": 4.993589231351394e-06, | |
| "loss": 0.848, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.29123173277661796, | |
| "grad_norm": 0.8916418552398682, | |
| "learning_rate": 4.9934375688476905e-06, | |
| "loss": 0.8032, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.2927974947807933, | |
| "grad_norm": 0.9891005754470825, | |
| "learning_rate": 4.993284135638182e-06, | |
| "loss": 0.7937, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.29436325678496866, | |
| "grad_norm": 0.9842082262039185, | |
| "learning_rate": 4.99312893183183e-06, | |
| "loss": 0.9532, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.29592901878914407, | |
| "grad_norm": 0.993478536605835, | |
| "learning_rate": 4.99297195753885e-06, | |
| "loss": 0.9406, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.2974947807933194, | |
| "grad_norm": 1.0654981136322021, | |
| "learning_rate": 4.992813212870718e-06, | |
| "loss": 0.907, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.2990605427974948, | |
| "grad_norm": 0.8937190771102905, | |
| "learning_rate": 4.992652697940164e-06, | |
| "loss": 0.9307, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.30062630480167013, | |
| "grad_norm": 0.9006052613258362, | |
| "learning_rate": 4.992490412861178e-06, | |
| "loss": 0.8808, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.30219206680584554, | |
| "grad_norm": 0.9601522088050842, | |
| "learning_rate": 4.992326357749007e-06, | |
| "loss": 0.8495, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.3037578288100209, | |
| "grad_norm": 0.9814243316650391, | |
| "learning_rate": 4.992160532720153e-06, | |
| "loss": 0.8204, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.30532359081419624, | |
| "grad_norm": 0.9660456776618958, | |
| "learning_rate": 4.991992937892377e-06, | |
| "loss": 0.8209, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.3068893528183716, | |
| "grad_norm": 0.8504270315170288, | |
| "learning_rate": 4.991823573384695e-06, | |
| "loss": 0.8415, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.30845511482254695, | |
| "grad_norm": 0.8733190894126892, | |
| "learning_rate": 4.991652439317382e-06, | |
| "loss": 0.8753, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.31002087682672236, | |
| "grad_norm": 0.9761794805526733, | |
| "learning_rate": 4.991479535811967e-06, | |
| "loss": 0.8923, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.3115866388308977, | |
| "grad_norm": 1.0400285720825195, | |
| "learning_rate": 4.991304862991238e-06, | |
| "loss": 0.899, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.31315240083507306, | |
| "grad_norm": 0.8561192750930786, | |
| "learning_rate": 4.991128420979237e-06, | |
| "loss": 0.9117, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.3147181628392484, | |
| "grad_norm": 0.8839634656906128, | |
| "learning_rate": 4.990950209901263e-06, | |
| "loss": 0.9022, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.3162839248434238, | |
| "grad_norm": 0.9569464921951294, | |
| "learning_rate": 4.990770229883873e-06, | |
| "loss": 0.9013, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.3178496868475992, | |
| "grad_norm": 1.0600861310958862, | |
| "learning_rate": 4.990588481054881e-06, | |
| "loss": 0.8387, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.31941544885177453, | |
| "grad_norm": 0.9492761492729187, | |
| "learning_rate": 4.990404963543352e-06, | |
| "loss": 0.8521, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.3209812108559499, | |
| "grad_norm": 0.9534817934036255, | |
| "learning_rate": 4.990219677479612e-06, | |
| "loss": 0.8009, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.32254697286012524, | |
| "grad_norm": 0.9918960928916931, | |
| "learning_rate": 4.990032622995242e-06, | |
| "loss": 0.8215, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.32411273486430064, | |
| "grad_norm": 0.8751630783081055, | |
| "learning_rate": 4.989843800223077e-06, | |
| "loss": 0.8744, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.325678496868476, | |
| "grad_norm": 1.0034785270690918, | |
| "learning_rate": 4.9896532092972085e-06, | |
| "loss": 0.884, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.32724425887265135, | |
| "grad_norm": 1.2693432569503784, | |
| "learning_rate": 4.989460850352984e-06, | |
| "loss": 0.9689, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.3288100208768267, | |
| "grad_norm": 0.9923811554908752, | |
| "learning_rate": 4.989266723527007e-06, | |
| "loss": 0.9622, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.3303757828810021, | |
| "grad_norm": 0.9439074397087097, | |
| "learning_rate": 4.989070828957137e-06, | |
| "loss": 0.9155, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.33194154488517746, | |
| "grad_norm": 1.2564488649368286, | |
| "learning_rate": 4.988873166782485e-06, | |
| "loss": 0.8712, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.3335073068893528, | |
| "grad_norm": 1.1441881656646729, | |
| "learning_rate": 4.988673737143423e-06, | |
| "loss": 0.8895, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.33507306889352817, | |
| "grad_norm": 0.972155749797821, | |
| "learning_rate": 4.988472540181574e-06, | |
| "loss": 0.9158, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.3366388308977035, | |
| "grad_norm": 0.9536120295524597, | |
| "learning_rate": 4.988269576039817e-06, | |
| "loss": 0.7963, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.33820459290187893, | |
| "grad_norm": 0.8956594467163086, | |
| "learning_rate": 4.988064844862288e-06, | |
| "loss": 0.8947, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.3397703549060543, | |
| "grad_norm": 0.9503691792488098, | |
| "learning_rate": 4.987858346794373e-06, | |
| "loss": 0.7857, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.34133611691022964, | |
| "grad_norm": 0.9471456408500671, | |
| "learning_rate": 4.987650081982719e-06, | |
| "loss": 0.9118, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.342901878914405, | |
| "grad_norm": 0.9485713839530945, | |
| "learning_rate": 4.987440050575222e-06, | |
| "loss": 0.8188, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.3444676409185804, | |
| "grad_norm": 1.0775927305221558, | |
| "learning_rate": 4.987228252721037e-06, | |
| "loss": 0.9469, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.34603340292275575, | |
| "grad_norm": 0.9173988103866577, | |
| "learning_rate": 4.9870146885705705e-06, | |
| "loss": 0.8566, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.3475991649269311, | |
| "grad_norm": 1.008735179901123, | |
| "learning_rate": 4.986799358275485e-06, | |
| "loss": 0.8637, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.34916492693110646, | |
| "grad_norm": 0.8183198571205139, | |
| "learning_rate": 4.986582261988695e-06, | |
| "loss": 0.9321, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.35073068893528186, | |
| "grad_norm": 0.8951883316040039, | |
| "learning_rate": 4.986363399864372e-06, | |
| "loss": 0.8015, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.3522964509394572, | |
| "grad_norm": 0.9275704622268677, | |
| "learning_rate": 4.986142772057941e-06, | |
| "loss": 0.8242, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.35386221294363257, | |
| "grad_norm": 0.9548516273498535, | |
| "learning_rate": 4.985920378726077e-06, | |
| "loss": 0.7889, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.3554279749478079, | |
| "grad_norm": 1.1203585863113403, | |
| "learning_rate": 4.985696220026714e-06, | |
| "loss": 0.835, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.3569937369519833, | |
| "grad_norm": 0.9525771141052246, | |
| "learning_rate": 4.985470296119038e-06, | |
| "loss": 0.9162, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.3585594989561587, | |
| "grad_norm": 1.150404930114746, | |
| "learning_rate": 4.985242607163488e-06, | |
| "loss": 0.9456, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.36012526096033404, | |
| "grad_norm": 1.01652991771698, | |
| "learning_rate": 4.985013153321755e-06, | |
| "loss": 0.87, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.3616910229645094, | |
| "grad_norm": 0.9455218315124512, | |
| "learning_rate": 4.984781934756786e-06, | |
| "loss": 0.8466, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.36325678496868474, | |
| "grad_norm": 0.9125218391418457, | |
| "learning_rate": 4.984548951632779e-06, | |
| "loss": 0.8764, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.36482254697286015, | |
| "grad_norm": 2.0192980766296387, | |
| "learning_rate": 4.984314204115187e-06, | |
| "loss": 0.7866, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.3663883089770355, | |
| "grad_norm": 1.8118515014648438, | |
| "learning_rate": 4.984077692370716e-06, | |
| "loss": 0.8703, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.36795407098121086, | |
| "grad_norm": 1.2995024919509888, | |
| "learning_rate": 4.983839416567322e-06, | |
| "loss": 0.9459, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.3695198329853862, | |
| "grad_norm": 1.0611528158187866, | |
| "learning_rate": 4.983599376874216e-06, | |
| "loss": 0.8456, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.37108559498956156, | |
| "grad_norm": 1.2018043994903564, | |
| "learning_rate": 4.983357573461863e-06, | |
| "loss": 0.914, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.37265135699373697, | |
| "grad_norm": 0.9091911315917969, | |
| "learning_rate": 4.983114006501978e-06, | |
| "loss": 0.791, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.3742171189979123, | |
| "grad_norm": 0.980873167514801, | |
| "learning_rate": 4.982868676167528e-06, | |
| "loss": 0.77, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.3757828810020877, | |
| "grad_norm": 0.8945695161819458, | |
| "learning_rate": 4.982621582632735e-06, | |
| "loss": 0.8038, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.37734864300626303, | |
| "grad_norm": 0.9325360655784607, | |
| "learning_rate": 4.9823727260730705e-06, | |
| "loss": 0.8908, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.37891440501043844, | |
| "grad_norm": 0.987630307674408, | |
| "learning_rate": 4.98212210666526e-06, | |
| "loss": 0.8642, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.3804801670146138, | |
| "grad_norm": 0.9151357412338257, | |
| "learning_rate": 4.98186972458728e-06, | |
| "loss": 0.8437, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.38204592901878914, | |
| "grad_norm": 0.9084850549697876, | |
| "learning_rate": 4.981615580018358e-06, | |
| "loss": 0.8775, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.3836116910229645, | |
| "grad_norm": 0.9786385297775269, | |
| "learning_rate": 4.981359673138974e-06, | |
| "loss": 0.8251, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.38517745302713985, | |
| "grad_norm": 0.8947877883911133, | |
| "learning_rate": 4.98110200413086e-06, | |
| "loss": 0.7709, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.38674321503131526, | |
| "grad_norm": 0.967019259929657, | |
| "learning_rate": 4.9808425731769985e-06, | |
| "loss": 0.8954, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.3883089770354906, | |
| "grad_norm": 1.1066704988479614, | |
| "learning_rate": 4.980581380461622e-06, | |
| "loss": 0.8544, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.38987473903966596, | |
| "grad_norm": 0.9750981330871582, | |
| "learning_rate": 4.980318426170218e-06, | |
| "loss": 0.8354, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.3914405010438413, | |
| "grad_norm": 0.8727232217788696, | |
| "learning_rate": 4.980053710489521e-06, | |
| "loss": 0.8911, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.3930062630480167, | |
| "grad_norm": 0.9337188601493835, | |
| "learning_rate": 4.979787233607518e-06, | |
| "loss": 0.8567, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.3945720250521921, | |
| "grad_norm": 0.890701174736023, | |
| "learning_rate": 4.979518995713448e-06, | |
| "loss": 0.8327, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.39613778705636743, | |
| "grad_norm": 0.9973195195198059, | |
| "learning_rate": 4.979248996997797e-06, | |
| "loss": 0.9065, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.3977035490605428, | |
| "grad_norm": 0.9894055128097534, | |
| "learning_rate": 4.978977237652306e-06, | |
| "loss": 0.8729, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.39926931106471814, | |
| "grad_norm": 1.0398536920547485, | |
| "learning_rate": 4.978703717869961e-06, | |
| "loss": 0.861, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.40083507306889354, | |
| "grad_norm": 1.1713942289352417, | |
| "learning_rate": 4.978428437845003e-06, | |
| "loss": 0.8183, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.4024008350730689, | |
| "grad_norm": 0.9868776202201843, | |
| "learning_rate": 4.9781513977729225e-06, | |
| "loss": 0.941, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.40396659707724425, | |
| "grad_norm": 0.9260863065719604, | |
| "learning_rate": 4.977872597850455e-06, | |
| "loss": 0.8963, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.4055323590814196, | |
| "grad_norm": 0.9553239941596985, | |
| "learning_rate": 4.977592038275592e-06, | |
| "loss": 0.8102, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.407098121085595, | |
| "grad_norm": 0.9426907300949097, | |
| "learning_rate": 4.977309719247571e-06, | |
| "loss": 0.89, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.40866388308977036, | |
| "grad_norm": 0.9691527485847473, | |
| "learning_rate": 4.97702564096688e-06, | |
| "loss": 0.7967, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.4102296450939457, | |
| "grad_norm": 0.9917487502098083, | |
| "learning_rate": 4.976739803635256e-06, | |
| "loss": 0.8327, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.41179540709812107, | |
| "grad_norm": 0.992340087890625, | |
| "learning_rate": 4.976452207455686e-06, | |
| "loss": 0.9462, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.4133611691022965, | |
| "grad_norm": 0.9554483890533447, | |
| "learning_rate": 4.9761628526324035e-06, | |
| "loss": 0.8294, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.41492693110647183, | |
| "grad_norm": 0.9318789839744568, | |
| "learning_rate": 4.975871739370895e-06, | |
| "loss": 0.8079, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.4164926931106472, | |
| "grad_norm": 0.9158215522766113, | |
| "learning_rate": 4.975578867877893e-06, | |
| "loss": 0.8632, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.41805845511482254, | |
| "grad_norm": 0.9444370865821838, | |
| "learning_rate": 4.975284238361378e-06, | |
| "loss": 0.8897, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.4196242171189979, | |
| "grad_norm": 0.9949231743812561, | |
| "learning_rate": 4.974987851030581e-06, | |
| "loss": 0.7905, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.4211899791231733, | |
| "grad_norm": 0.9819440245628357, | |
| "learning_rate": 4.974689706095981e-06, | |
| "loss": 0.9183, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.42275574112734865, | |
| "grad_norm": 1.0787206888198853, | |
| "learning_rate": 4.974389803769304e-06, | |
| "loss": 0.7739, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.424321503131524, | |
| "grad_norm": 0.8871361017227173, | |
| "learning_rate": 4.9740881442635235e-06, | |
| "loss": 0.9454, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.42588726513569936, | |
| "grad_norm": 0.9125893115997314, | |
| "learning_rate": 4.973784727792863e-06, | |
| "loss": 0.8017, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.42745302713987476, | |
| "grad_norm": 0.9102838635444641, | |
| "learning_rate": 4.973479554572793e-06, | |
| "loss": 0.8307, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.4290187891440501, | |
| "grad_norm": 0.8780474662780762, | |
| "learning_rate": 4.973172624820031e-06, | |
| "loss": 0.9402, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.43058455114822547, | |
| "grad_norm": 1.0059378147125244, | |
| "learning_rate": 4.972863938752541e-06, | |
| "loss": 0.7438, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.4321503131524008, | |
| "grad_norm": 0.9684615135192871, | |
| "learning_rate": 4.972553496589537e-06, | |
| "loss": 0.8897, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.4337160751565762, | |
| "grad_norm": 0.9615057110786438, | |
| "learning_rate": 4.972241298551477e-06, | |
| "loss": 0.8157, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.4352818371607516, | |
| "grad_norm": 0.8589702844619751, | |
| "learning_rate": 4.971927344860068e-06, | |
| "loss": 0.7813, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.43684759916492694, | |
| "grad_norm": 0.900324285030365, | |
| "learning_rate": 4.971611635738263e-06, | |
| "loss": 0.7534, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.4384133611691023, | |
| "grad_norm": 0.8881833553314209, | |
| "learning_rate": 4.971294171410261e-06, | |
| "loss": 0.8255, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.43997912317327764, | |
| "grad_norm": 0.9723751544952393, | |
| "learning_rate": 4.9709749521015085e-06, | |
| "loss": 0.8796, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.44154488517745305, | |
| "grad_norm": 0.9341713190078735, | |
| "learning_rate": 4.9706539780386995e-06, | |
| "loss": 0.7429, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.4431106471816284, | |
| "grad_norm": 0.9354205131530762, | |
| "learning_rate": 4.97033124944977e-06, | |
| "loss": 0.8545, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.44467640918580376, | |
| "grad_norm": 0.9294624328613281, | |
| "learning_rate": 4.970006766563906e-06, | |
| "loss": 0.7783, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.4462421711899791, | |
| "grad_norm": 0.9584887027740479, | |
| "learning_rate": 4.969680529611538e-06, | |
| "loss": 0.8068, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.44780793319415446, | |
| "grad_norm": 0.9525735378265381, | |
| "learning_rate": 4.9693525388243424e-06, | |
| "loss": 0.8055, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.44937369519832987, | |
| "grad_norm": 0.8845317959785461, | |
| "learning_rate": 4.96902279443524e-06, | |
| "loss": 0.8318, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.4509394572025052, | |
| "grad_norm": 0.8932276964187622, | |
| "learning_rate": 4.9686912966783965e-06, | |
| "loss": 0.8185, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.4525052192066806, | |
| "grad_norm": 0.9881951808929443, | |
| "learning_rate": 4.968358045789225e-06, | |
| "loss": 0.9001, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.45407098121085593, | |
| "grad_norm": 0.995212197303772, | |
| "learning_rate": 4.968023042004384e-06, | |
| "loss": 0.75, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.45563674321503134, | |
| "grad_norm": 0.9997846484184265, | |
| "learning_rate": 4.967686285561773e-06, | |
| "loss": 0.8993, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.4572025052192067, | |
| "grad_norm": 0.9438545107841492, | |
| "learning_rate": 4.967347776700538e-06, | |
| "loss": 0.8964, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.45876826722338204, | |
| "grad_norm": 0.9164395332336426, | |
| "learning_rate": 4.967007515661072e-06, | |
| "loss": 0.8671, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.4603340292275574, | |
| "grad_norm": 0.895302951335907, | |
| "learning_rate": 4.966665502685009e-06, | |
| "loss": 0.8785, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.4618997912317328, | |
| "grad_norm": 0.9122863411903381, | |
| "learning_rate": 4.966321738015227e-06, | |
| "loss": 0.8499, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.46346555323590816, | |
| "grad_norm": 1.0097336769104004, | |
| "learning_rate": 4.965976221895852e-06, | |
| "loss": 0.83, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.4650313152400835, | |
| "grad_norm": 1.0664499998092651, | |
| "learning_rate": 4.965628954572249e-06, | |
| "loss": 0.7799, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 0.46659707724425886, | |
| "grad_norm": 0.9602341055870056, | |
| "learning_rate": 4.96527993629103e-06, | |
| "loss": 0.8061, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.4681628392484342, | |
| "grad_norm": 1.0133620500564575, | |
| "learning_rate": 4.964929167300048e-06, | |
| "loss": 0.9148, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 0.4697286012526096, | |
| "grad_norm": 0.9351868033409119, | |
| "learning_rate": 4.964576647848401e-06, | |
| "loss": 0.83, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.471294363256785, | |
| "grad_norm": 1.0078860521316528, | |
| "learning_rate": 4.964222378186429e-06, | |
| "loss": 0.8391, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 0.47286012526096033, | |
| "grad_norm": 0.8966493606567383, | |
| "learning_rate": 4.963866358565716e-06, | |
| "loss": 0.8677, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.4744258872651357, | |
| "grad_norm": 0.9283657670021057, | |
| "learning_rate": 4.963508589239087e-06, | |
| "loss": 0.8787, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 0.4759916492693111, | |
| "grad_norm": 0.9540023803710938, | |
| "learning_rate": 4.963149070460611e-06, | |
| "loss": 0.8667, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.47755741127348644, | |
| "grad_norm": 0.9580283164978027, | |
| "learning_rate": 4.9627878024855995e-06, | |
| "loss": 0.8442, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.4791231732776618, | |
| "grad_norm": 0.9008278846740723, | |
| "learning_rate": 4.962424785570605e-06, | |
| "loss": 0.8605, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.48068893528183715, | |
| "grad_norm": 0.9681883454322815, | |
| "learning_rate": 4.962060019973423e-06, | |
| "loss": 0.7865, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 0.4822546972860125, | |
| "grad_norm": 0.8492597341537476, | |
| "learning_rate": 4.9616935059530915e-06, | |
| "loss": 0.8578, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.4838204592901879, | |
| "grad_norm": 0.9323164820671082, | |
| "learning_rate": 4.9613252437698865e-06, | |
| "loss": 0.8491, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 0.48538622129436326, | |
| "grad_norm": 0.9417833685874939, | |
| "learning_rate": 4.960955233685331e-06, | |
| "loss": 0.8108, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.4869519832985386, | |
| "grad_norm": 0.901364803314209, | |
| "learning_rate": 4.960583475962185e-06, | |
| "loss": 0.8958, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 0.48851774530271397, | |
| "grad_norm": 0.8929979205131531, | |
| "learning_rate": 4.96020997086445e-06, | |
| "loss": 0.8237, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.4900835073068894, | |
| "grad_norm": 0.9422337412834167, | |
| "learning_rate": 4.9598347186573716e-06, | |
| "loss": 0.8024, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 0.49164926931106473, | |
| "grad_norm": 0.9165504574775696, | |
| "learning_rate": 4.959457719607432e-06, | |
| "loss": 0.8124, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.4932150313152401, | |
| "grad_norm": 1.0233370065689087, | |
| "learning_rate": 4.959078973982356e-06, | |
| "loss": 0.788, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.49478079331941544, | |
| "grad_norm": 0.9375972151756287, | |
| "learning_rate": 4.958698482051109e-06, | |
| "loss": 0.839, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.4963465553235908, | |
| "grad_norm": 0.8805437684059143, | |
| "learning_rate": 4.958316244083895e-06, | |
| "loss": 0.7445, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 0.4979123173277662, | |
| "grad_norm": 1.0231893062591553, | |
| "learning_rate": 4.95793226035216e-06, | |
| "loss": 0.8257, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.49947807933194155, | |
| "grad_norm": 0.9125928282737732, | |
| "learning_rate": 4.957546531128589e-06, | |
| "loss": 0.7699, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 0.5010438413361169, | |
| "grad_norm": 0.8844016790390015, | |
| "learning_rate": 4.957159056687104e-06, | |
| "loss": 0.8784, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.5026096033402923, | |
| "grad_norm": 0.9770572781562805, | |
| "learning_rate": 4.956769837302871e-06, | |
| "loss": 0.8611, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 0.5041753653444676, | |
| "grad_norm": 0.9413725733757019, | |
| "learning_rate": 4.956378873252293e-06, | |
| "loss": 0.8056, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.505741127348643, | |
| "grad_norm": 0.9058850407600403, | |
| "learning_rate": 4.955986164813008e-06, | |
| "loss": 0.8162, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 0.5073068893528184, | |
| "grad_norm": 0.893100917339325, | |
| "learning_rate": 4.955591712263901e-06, | |
| "loss": 0.7554, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.5088726513569938, | |
| "grad_norm": 0.8967075943946838, | |
| "learning_rate": 4.955195515885089e-06, | |
| "loss": 0.8143, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.5104384133611691, | |
| "grad_norm": 0.9542785286903381, | |
| "learning_rate": 4.954797575957929e-06, | |
| "loss": 0.8102, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.5120041753653445, | |
| "grad_norm": 1.006451964378357, | |
| "learning_rate": 4.954397892765018e-06, | |
| "loss": 0.86, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 0.5135699373695198, | |
| "grad_norm": 0.9964715838432312, | |
| "learning_rate": 4.953996466590189e-06, | |
| "loss": 0.7166, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.5151356993736952, | |
| "grad_norm": 0.9568808674812317, | |
| "learning_rate": 4.953593297718512e-06, | |
| "loss": 0.8753, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 0.5167014613778705, | |
| "grad_norm": 0.9435825347900391, | |
| "learning_rate": 4.953188386436298e-06, | |
| "loss": 0.8961, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.5182672233820459, | |
| "grad_norm": 0.9386457800865173, | |
| "learning_rate": 4.952781733031092e-06, | |
| "loss": 0.8468, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 0.5198329853862212, | |
| "grad_norm": 0.9916554689407349, | |
| "learning_rate": 4.952373337791678e-06, | |
| "loss": 0.8274, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 0.5213987473903967, | |
| "grad_norm": 0.9351572394371033, | |
| "learning_rate": 4.9519632010080765e-06, | |
| "loss": 0.7964, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 0.5229645093945721, | |
| "grad_norm": 0.9936269521713257, | |
| "learning_rate": 4.951551322971545e-06, | |
| "loss": 0.8354, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 0.5245302713987474, | |
| "grad_norm": 0.9678221940994263, | |
| "learning_rate": 4.951137703974576e-06, | |
| "loss": 0.9115, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.5260960334029228, | |
| "grad_norm": 0.9453133940696716, | |
| "learning_rate": 4.9507223443109005e-06, | |
| "loss": 0.7725, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 0.5276617954070981, | |
| "grad_norm": 0.9577226638793945, | |
| "learning_rate": 4.950305244275484e-06, | |
| "loss": 0.8679, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 0.5292275574112735, | |
| "grad_norm": 0.998489260673523, | |
| "learning_rate": 4.94988640416453e-06, | |
| "loss": 0.7505, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 0.5307933194154488, | |
| "grad_norm": 0.976026713848114, | |
| "learning_rate": 4.949465824275475e-06, | |
| "loss": 0.9083, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 0.5323590814196242, | |
| "grad_norm": 1.0041502714157104, | |
| "learning_rate": 4.9490435049069925e-06, | |
| "loss": 0.8815, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.5339248434237995, | |
| "grad_norm": 0.9839801788330078, | |
| "learning_rate": 4.948619446358991e-06, | |
| "loss": 0.8517, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 0.535490605427975, | |
| "grad_norm": 0.9995140433311462, | |
| "learning_rate": 4.948193648932616e-06, | |
| "loss": 0.8157, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 0.5370563674321504, | |
| "grad_norm": 1.0782225131988525, | |
| "learning_rate": 4.947766112930243e-06, | |
| "loss": 0.9206, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 0.5386221294363257, | |
| "grad_norm": 0.9981802105903625, | |
| "learning_rate": 4.947336838655487e-06, | |
| "loss": 0.8389, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 0.5401878914405011, | |
| "grad_norm": 0.9261236190795898, | |
| "learning_rate": 4.946905826413195e-06, | |
| "loss": 0.8035, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.5417536534446764, | |
| "grad_norm": 0.9195284247398376, | |
| "learning_rate": 4.946473076509449e-06, | |
| "loss": 0.8265, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 0.5433194154488518, | |
| "grad_norm": 0.9535123109817505, | |
| "learning_rate": 4.946038589251566e-06, | |
| "loss": 0.7528, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 0.5448851774530271, | |
| "grad_norm": 0.9969532489776611, | |
| "learning_rate": 4.9456023649480935e-06, | |
| "loss": 0.7787, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 0.5464509394572025, | |
| "grad_norm": 0.9612399935722351, | |
| "learning_rate": 4.945164403908816e-06, | |
| "loss": 0.8298, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 0.5480167014613778, | |
| "grad_norm": 0.9541627764701843, | |
| "learning_rate": 4.944724706444749e-06, | |
| "loss": 0.8039, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.5495824634655533, | |
| "grad_norm": 0.9916557669639587, | |
| "learning_rate": 4.944283272868143e-06, | |
| "loss": 0.9012, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 0.5511482254697286, | |
| "grad_norm": 0.8807661533355713, | |
| "learning_rate": 4.94384010349248e-06, | |
| "loss": 0.9005, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 0.552713987473904, | |
| "grad_norm": 1.0179708003997803, | |
| "learning_rate": 4.943395198632476e-06, | |
| "loss": 0.8845, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 0.5542797494780793, | |
| "grad_norm": 0.923014223575592, | |
| "learning_rate": 4.942948558604077e-06, | |
| "loss": 0.8173, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 0.5558455114822547, | |
| "grad_norm": 0.9318749308586121, | |
| "learning_rate": 4.942500183724463e-06, | |
| "loss": 0.8828, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.55741127348643, | |
| "grad_norm": 1.0907000303268433, | |
| "learning_rate": 4.942050074312048e-06, | |
| "loss": 0.7951, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 0.5589770354906054, | |
| "grad_norm": 0.9811620712280273, | |
| "learning_rate": 4.9415982306864744e-06, | |
| "loss": 0.8826, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 0.5605427974947808, | |
| "grad_norm": 0.9888150691986084, | |
| "learning_rate": 4.9411446531686156e-06, | |
| "loss": 0.8338, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 0.5621085594989561, | |
| "grad_norm": 0.9389257431030273, | |
| "learning_rate": 4.9406893420805804e-06, | |
| "loss": 0.8316, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 0.5636743215031316, | |
| "grad_norm": 0.9500603675842285, | |
| "learning_rate": 4.940232297745705e-06, | |
| "loss": 0.8764, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.5652400835073069, | |
| "grad_norm": 0.9748769998550415, | |
| "learning_rate": 4.939773520488559e-06, | |
| "loss": 0.8308, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 0.5668058455114823, | |
| "grad_norm": 0.9179681539535522, | |
| "learning_rate": 4.93931301063494e-06, | |
| "loss": 0.789, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 0.5683716075156576, | |
| "grad_norm": 0.8609325289726257, | |
| "learning_rate": 4.938850768511878e-06, | |
| "loss": 0.7368, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 0.569937369519833, | |
| "grad_norm": 0.9375531673431396, | |
| "learning_rate": 4.9383867944476325e-06, | |
| "loss": 0.8164, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 0.5715031315240083, | |
| "grad_norm": 0.9386453032493591, | |
| "learning_rate": 4.937921088771694e-06, | |
| "loss": 0.7898, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.5730688935281837, | |
| "grad_norm": 1.025040864944458, | |
| "learning_rate": 4.937453651814781e-06, | |
| "loss": 0.8465, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 0.574634655532359, | |
| "grad_norm": 1.1037245988845825, | |
| "learning_rate": 4.936984483908842e-06, | |
| "loss": 0.7935, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 0.5762004175365344, | |
| "grad_norm": 1.230821132659912, | |
| "learning_rate": 4.936513585387055e-06, | |
| "loss": 0.8043, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 0.5777661795407099, | |
| "grad_norm": 1.090235948562622, | |
| "learning_rate": 4.9360409565838265e-06, | |
| "loss": 0.8726, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 0.5793319415448852, | |
| "grad_norm": 0.8977810144424438, | |
| "learning_rate": 4.935566597834793e-06, | |
| "loss": 0.8667, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.5808977035490606, | |
| "grad_norm": 0.96401447057724, | |
| "learning_rate": 4.935090509476818e-06, | |
| "loss": 0.8494, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 0.5824634655532359, | |
| "grad_norm": 0.9290149211883545, | |
| "learning_rate": 4.934612691847995e-06, | |
| "loss": 0.7678, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 0.5840292275574113, | |
| "grad_norm": 0.9765584468841553, | |
| "learning_rate": 4.934133145287644e-06, | |
| "loss": 0.7947, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 0.5855949895615866, | |
| "grad_norm": 0.8939430117607117, | |
| "learning_rate": 4.933651870136313e-06, | |
| "loss": 0.885, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 0.587160751565762, | |
| "grad_norm": 0.9512760043144226, | |
| "learning_rate": 4.933168866735777e-06, | |
| "loss": 0.8863, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.5887265135699373, | |
| "grad_norm": 1.0334441661834717, | |
| "learning_rate": 4.932684135429041e-06, | |
| "loss": 0.8362, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 0.5902922755741128, | |
| "grad_norm": 0.9402525424957275, | |
| "learning_rate": 4.932197676560334e-06, | |
| "loss": 0.8758, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 0.5918580375782881, | |
| "grad_norm": 0.9615777730941772, | |
| "learning_rate": 4.931709490475113e-06, | |
| "loss": 0.8324, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 0.5934237995824635, | |
| "grad_norm": 0.9407138228416443, | |
| "learning_rate": 4.931219577520061e-06, | |
| "loss": 0.8454, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 0.5949895615866388, | |
| "grad_norm": 1.0415815114974976, | |
| "learning_rate": 4.930727938043091e-06, | |
| "loss": 0.8649, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.5965553235908142, | |
| "grad_norm": 1.0946165323257446, | |
| "learning_rate": 4.9302345723933344e-06, | |
| "loss": 0.7794, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 0.5981210855949896, | |
| "grad_norm": 1.0532523393630981, | |
| "learning_rate": 4.929739480921157e-06, | |
| "loss": 0.8555, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 0.5996868475991649, | |
| "grad_norm": 0.9441399574279785, | |
| "learning_rate": 4.929242663978144e-06, | |
| "loss": 0.7613, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 0.6012526096033403, | |
| "grad_norm": 0.9970793128013611, | |
| "learning_rate": 4.92874412191711e-06, | |
| "loss": 0.9037, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 0.6028183716075156, | |
| "grad_norm": 0.9777354001998901, | |
| "learning_rate": 4.928243855092091e-06, | |
| "loss": 0.9044, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.6043841336116911, | |
| "grad_norm": 0.9507855772972107, | |
| "learning_rate": 4.92774186385835e-06, | |
| "loss": 0.8359, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 0.6059498956158664, | |
| "grad_norm": 0.9827553033828735, | |
| "learning_rate": 4.9272381485723765e-06, | |
| "loss": 0.7938, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 0.6075156576200418, | |
| "grad_norm": 0.8896475434303284, | |
| "learning_rate": 4.926732709591879e-06, | |
| "loss": 0.8545, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 0.6090814196242171, | |
| "grad_norm": 0.9817705750465393, | |
| "learning_rate": 4.926225547275795e-06, | |
| "loss": 0.8456, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 0.6106471816283925, | |
| "grad_norm": 0.9646008610725403, | |
| "learning_rate": 4.925716661984283e-06, | |
| "loss": 0.7906, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.6122129436325678, | |
| "grad_norm": 0.8940444588661194, | |
| "learning_rate": 4.9252060540787275e-06, | |
| "loss": 0.8371, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 0.6137787056367432, | |
| "grad_norm": 1.0812394618988037, | |
| "learning_rate": 4.924693723921734e-06, | |
| "loss": 0.8056, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 0.6153444676409185, | |
| "grad_norm": 0.9953856468200684, | |
| "learning_rate": 4.924179671877132e-06, | |
| "loss": 0.8651, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 0.6169102296450939, | |
| "grad_norm": 0.9760703444480896, | |
| "learning_rate": 4.9236638983099735e-06, | |
| "loss": 0.845, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 0.6184759916492694, | |
| "grad_norm": 1.0127615928649902, | |
| "learning_rate": 4.923146403586533e-06, | |
| "loss": 0.7987, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.6200417536534447, | |
| "grad_norm": 0.8771396279335022, | |
| "learning_rate": 4.9226271880743086e-06, | |
| "loss": 0.8092, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 0.6216075156576201, | |
| "grad_norm": 0.8906831741333008, | |
| "learning_rate": 4.9221062521420196e-06, | |
| "loss": 0.8039, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 0.6231732776617954, | |
| "grad_norm": 1.0218392610549927, | |
| "learning_rate": 4.9215835961596045e-06, | |
| "loss": 0.8354, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 0.6247390396659708, | |
| "grad_norm": 1.0382038354873657, | |
| "learning_rate": 4.921059220498227e-06, | |
| "loss": 0.8021, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 0.6263048016701461, | |
| "grad_norm": 0.8997588157653809, | |
| "learning_rate": 4.920533125530272e-06, | |
| "loss": 0.8716, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.6278705636743215, | |
| "grad_norm": 0.8725724220275879, | |
| "learning_rate": 4.920005311629342e-06, | |
| "loss": 0.7934, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 0.6294363256784968, | |
| "grad_norm": 0.9289800524711609, | |
| "learning_rate": 4.9194757791702625e-06, | |
| "loss": 0.812, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 0.6310020876826722, | |
| "grad_norm": 0.9969078898429871, | |
| "learning_rate": 4.918944528529079e-06, | |
| "loss": 0.866, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 0.6325678496868476, | |
| "grad_norm": 0.9544941186904907, | |
| "learning_rate": 4.918411560083058e-06, | |
| "loss": 0.8035, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 0.634133611691023, | |
| "grad_norm": 0.9678053855895996, | |
| "learning_rate": 4.917876874210686e-06, | |
| "loss": 0.8254, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.6356993736951984, | |
| "grad_norm": 0.9782965779304504, | |
| "learning_rate": 4.917340471291666e-06, | |
| "loss": 0.7954, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 0.6372651356993737, | |
| "grad_norm": 0.9380600452423096, | |
| "learning_rate": 4.916802351706923e-06, | |
| "loss": 0.8287, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 0.6388308977035491, | |
| "grad_norm": 0.9455891847610474, | |
| "learning_rate": 4.916262515838603e-06, | |
| "loss": 0.8714, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 0.6403966597077244, | |
| "grad_norm": 0.8937395811080933, | |
| "learning_rate": 4.915720964070065e-06, | |
| "loss": 0.7618, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 0.6419624217118998, | |
| "grad_norm": 1.0304689407348633, | |
| "learning_rate": 4.915177696785894e-06, | |
| "loss": 0.8436, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.6435281837160751, | |
| "grad_norm": 0.9623110294342041, | |
| "learning_rate": 4.914632714371885e-06, | |
| "loss": 0.865, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 0.6450939457202505, | |
| "grad_norm": 0.9212867021560669, | |
| "learning_rate": 4.914086017215059e-06, | |
| "loss": 0.8232, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 0.6466597077244259, | |
| "grad_norm": 0.9817205667495728, | |
| "learning_rate": 4.9135376057036485e-06, | |
| "loss": 0.7926, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 0.6482254697286013, | |
| "grad_norm": 0.9576719403266907, | |
| "learning_rate": 4.912987480227108e-06, | |
| "loss": 0.8812, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 0.6497912317327766, | |
| "grad_norm": 0.9415407180786133, | |
| "learning_rate": 4.912435641176106e-06, | |
| "loss": 0.8985, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.651356993736952, | |
| "grad_norm": 0.9084275364875793, | |
| "learning_rate": 4.91188208894253e-06, | |
| "loss": 0.8085, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 0.6529227557411273, | |
| "grad_norm": 0.8890969753265381, | |
| "learning_rate": 4.911326823919482e-06, | |
| "loss": 0.8127, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 0.6544885177453027, | |
| "grad_norm": 0.8727678656578064, | |
| "learning_rate": 4.9107698465012815e-06, | |
| "loss": 0.8656, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 0.656054279749478, | |
| "grad_norm": 1.0174667835235596, | |
| "learning_rate": 4.910211157083465e-06, | |
| "loss": 0.8358, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 0.6576200417536534, | |
| "grad_norm": 1.045121431350708, | |
| "learning_rate": 4.909650756062782e-06, | |
| "loss": 0.8239, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.6591858037578288, | |
| "grad_norm": 1.0688676834106445, | |
| "learning_rate": 4.909088643837202e-06, | |
| "loss": 0.9246, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 0.6607515657620042, | |
| "grad_norm": 0.9371877908706665, | |
| "learning_rate": 4.908524820805905e-06, | |
| "loss": 0.7862, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 0.6623173277661796, | |
| "grad_norm": 0.9567264318466187, | |
| "learning_rate": 4.907959287369288e-06, | |
| "loss": 0.8164, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 0.6638830897703549, | |
| "grad_norm": 0.9109166860580444, | |
| "learning_rate": 4.907392043928964e-06, | |
| "loss": 0.7016, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 0.6654488517745303, | |
| "grad_norm": 1.0378203392028809, | |
| "learning_rate": 4.906823090887757e-06, | |
| "loss": 0.8986, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.6670146137787056, | |
| "grad_norm": 0.9871951341629028, | |
| "learning_rate": 4.906252428649708e-06, | |
| "loss": 0.8366, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 0.668580375782881, | |
| "grad_norm": 0.8682780265808105, | |
| "learning_rate": 4.905680057620072e-06, | |
| "loss": 0.738, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 0.6701461377870563, | |
| "grad_norm": 0.8834647536277771, | |
| "learning_rate": 4.9051059782053125e-06, | |
| "loss": 0.8304, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 0.6717118997912317, | |
| "grad_norm": 1.0133378505706787, | |
| "learning_rate": 4.904530190813113e-06, | |
| "loss": 0.9369, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 0.673277661795407, | |
| "grad_norm": 0.9967373013496399, | |
| "learning_rate": 4.9039526958523676e-06, | |
| "loss": 0.782, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.6748434237995825, | |
| "grad_norm": 1.0258032083511353, | |
| "learning_rate": 4.90337349373318e-06, | |
| "loss": 0.8214, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 0.6764091858037579, | |
| "grad_norm": 0.9814366102218628, | |
| "learning_rate": 4.90279258486687e-06, | |
| "loss": 0.8173, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 0.6779749478079332, | |
| "grad_norm": 0.9351440668106079, | |
| "learning_rate": 4.902209969665966e-06, | |
| "loss": 0.8815, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 0.6795407098121086, | |
| "grad_norm": 0.9490398168563843, | |
| "learning_rate": 4.901625648544212e-06, | |
| "loss": 0.8295, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 0.6811064718162839, | |
| "grad_norm": 1.0183534622192383, | |
| "learning_rate": 4.9010396219165614e-06, | |
| "loss": 0.8538, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.6826722338204593, | |
| "grad_norm": 0.9546058773994446, | |
| "learning_rate": 4.900451890199179e-06, | |
| "loss": 0.8385, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 0.6842379958246346, | |
| "grad_norm": 1.076120138168335, | |
| "learning_rate": 4.8998624538094394e-06, | |
| "loss": 0.8958, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 0.68580375782881, | |
| "grad_norm": 0.8935732841491699, | |
| "learning_rate": 4.899271313165929e-06, | |
| "loss": 0.8674, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 0.6873695198329853, | |
| "grad_norm": 0.9574604630470276, | |
| "learning_rate": 4.898678468688445e-06, | |
| "loss": 0.8245, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 0.6889352818371608, | |
| "grad_norm": 0.931751549243927, | |
| "learning_rate": 4.898083920797993e-06, | |
| "loss": 0.8409, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.6905010438413361, | |
| "grad_norm": 0.937874436378479, | |
| "learning_rate": 4.89748766991679e-06, | |
| "loss": 0.8222, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 0.6920668058455115, | |
| "grad_norm": 0.9606375694274902, | |
| "learning_rate": 4.896889716468259e-06, | |
| "loss": 0.8123, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 0.6936325678496869, | |
| "grad_norm": 0.8692249059677124, | |
| "learning_rate": 4.896290060877038e-06, | |
| "loss": 0.8532, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 0.6951983298538622, | |
| "grad_norm": 0.9597892165184021, | |
| "learning_rate": 4.895688703568968e-06, | |
| "loss": 0.8365, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 0.6967640918580376, | |
| "grad_norm": 0.958342432975769, | |
| "learning_rate": 4.895085644971102e-06, | |
| "loss": 0.8535, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.6983298538622129, | |
| "grad_norm": 0.9603601098060608, | |
| "learning_rate": 4.894480885511699e-06, | |
| "loss": 0.8227, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 0.6998956158663883, | |
| "grad_norm": 1.0010526180267334, | |
| "learning_rate": 4.893874425620226e-06, | |
| "loss": 0.8785, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 0.7014613778705637, | |
| "grad_norm": 1.0061219930648804, | |
| "learning_rate": 4.893266265727361e-06, | |
| "loss": 0.8336, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 0.7030271398747391, | |
| "grad_norm": 0.8967173099517822, | |
| "learning_rate": 4.892656406264984e-06, | |
| "loss": 0.8305, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 0.7045929018789144, | |
| "grad_norm": 0.962729275226593, | |
| "learning_rate": 4.892044847666186e-06, | |
| "loss": 0.7843, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.7061586638830898, | |
| "grad_norm": 1.0067342519760132, | |
| "learning_rate": 4.891431590365262e-06, | |
| "loss": 0.8111, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 0.7077244258872651, | |
| "grad_norm": 1.0252268314361572, | |
| "learning_rate": 4.890816634797716e-06, | |
| "loss": 0.8253, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 0.7092901878914405, | |
| "grad_norm": 0.9098564386367798, | |
| "learning_rate": 4.890199981400255e-06, | |
| "loss": 0.8526, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 0.7108559498956158, | |
| "grad_norm": 0.893135130405426, | |
| "learning_rate": 4.889581630610793e-06, | |
| "loss": 0.8217, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 0.7124217118997912, | |
| "grad_norm": 0.9225817322731018, | |
| "learning_rate": 4.888961582868453e-06, | |
| "loss": 0.8296, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.7139874739039666, | |
| "grad_norm": 0.973386824131012, | |
| "learning_rate": 4.888339838613555e-06, | |
| "loss": 0.7429, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 0.715553235908142, | |
| "grad_norm": 0.9829993844032288, | |
| "learning_rate": 4.8877163982876306e-06, | |
| "loss": 0.7405, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 0.7171189979123174, | |
| "grad_norm": 0.927728533744812, | |
| "learning_rate": 4.887091262333414e-06, | |
| "loss": 0.8257, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 0.7186847599164927, | |
| "grad_norm": 0.9419897794723511, | |
| "learning_rate": 4.886464431194844e-06, | |
| "loss": 0.7762, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 0.7202505219206681, | |
| "grad_norm": 0.9358823299407959, | |
| "learning_rate": 4.885835905317061e-06, | |
| "loss": 0.7794, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.7218162839248434, | |
| "grad_norm": 1.0290783643722534, | |
| "learning_rate": 4.88520568514641e-06, | |
| "loss": 0.8723, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 0.7233820459290188, | |
| "grad_norm": 0.9909068942070007, | |
| "learning_rate": 4.884573771130442e-06, | |
| "loss": 0.8302, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 0.7249478079331941, | |
| "grad_norm": 0.9972653985023499, | |
| "learning_rate": 4.883940163717906e-06, | |
| "loss": 0.7961, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 0.7265135699373695, | |
| "grad_norm": 0.9245638251304626, | |
| "learning_rate": 4.883304863358757e-06, | |
| "loss": 0.8057, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 0.7280793319415448, | |
| "grad_norm": 1.0097148418426514, | |
| "learning_rate": 4.882667870504153e-06, | |
| "loss": 0.7296, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.7296450939457203, | |
| "grad_norm": 0.9792670607566833, | |
| "learning_rate": 4.882029185606448e-06, | |
| "loss": 0.8006, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 0.7312108559498957, | |
| "grad_norm": 0.9308120608329773, | |
| "learning_rate": 4.881388809119206e-06, | |
| "loss": 0.7957, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 0.732776617954071, | |
| "grad_norm": 0.9472301602363586, | |
| "learning_rate": 4.880746741497187e-06, | |
| "loss": 0.8984, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 0.7343423799582464, | |
| "grad_norm": 0.9898937344551086, | |
| "learning_rate": 4.880102983196351e-06, | |
| "loss": 0.8365, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 0.7359081419624217, | |
| "grad_norm": 0.9376612901687622, | |
| "learning_rate": 4.879457534673863e-06, | |
| "loss": 0.7838, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.7374739039665971, | |
| "grad_norm": 1.030616283416748, | |
| "learning_rate": 4.878810396388085e-06, | |
| "loss": 0.7698, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 0.7390396659707724, | |
| "grad_norm": 0.9583387970924377, | |
| "learning_rate": 4.87816156879858e-06, | |
| "loss": 0.7381, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 0.7406054279749478, | |
| "grad_norm": 1.00180184841156, | |
| "learning_rate": 4.87751105236611e-06, | |
| "loss": 0.8497, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 0.7421711899791231, | |
| "grad_norm": 0.9382562637329102, | |
| "learning_rate": 4.876858847552638e-06, | |
| "loss": 0.8698, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 0.7437369519832986, | |
| "grad_norm": 1.0104063749313354, | |
| "learning_rate": 4.8762049548213255e-06, | |
| "loss": 0.8444, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.7453027139874739, | |
| "grad_norm": 0.9321765303611755, | |
| "learning_rate": 4.87554937463653e-06, | |
| "loss": 0.8027, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 0.7468684759916493, | |
| "grad_norm": 0.9527666568756104, | |
| "learning_rate": 4.874892107463811e-06, | |
| "loss": 0.8069, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 0.7484342379958246, | |
| "grad_norm": 0.9576907157897949, | |
| "learning_rate": 4.874233153769924e-06, | |
| "loss": 0.774, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 1.0516161918640137, | |
| "learning_rate": 4.873572514022822e-06, | |
| "loss": 0.8176, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 0.7515657620041754, | |
| "grad_norm": 1.026928186416626, | |
| "learning_rate": 4.872910188691657e-06, | |
| "loss": 0.8797, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.7531315240083507, | |
| "grad_norm": 0.9312179088592529, | |
| "learning_rate": 4.872246178246776e-06, | |
| "loss": 0.7747, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 0.7546972860125261, | |
| "grad_norm": 1.0676255226135254, | |
| "learning_rate": 4.871580483159725e-06, | |
| "loss": 0.7801, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 0.7562630480167014, | |
| "grad_norm": 1.066056489944458, | |
| "learning_rate": 4.870913103903245e-06, | |
| "loss": 0.7837, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 0.7578288100208769, | |
| "grad_norm": 0.9388124346733093, | |
| "learning_rate": 4.87024404095127e-06, | |
| "loss": 0.7801, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 0.7593945720250522, | |
| "grad_norm": 0.9879791140556335, | |
| "learning_rate": 4.8695732947789356e-06, | |
| "loss": 0.7849, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.7609603340292276, | |
| "grad_norm": 1.0002318620681763, | |
| "learning_rate": 4.868900865862569e-06, | |
| "loss": 0.8219, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 0.7625260960334029, | |
| "grad_norm": 0.9599125385284424, | |
| "learning_rate": 4.8682267546796945e-06, | |
| "loss": 0.8888, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 0.7640918580375783, | |
| "grad_norm": 0.9099973440170288, | |
| "learning_rate": 4.867550961709027e-06, | |
| "loss": 0.7585, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 0.7656576200417536, | |
| "grad_norm": 0.9715955257415771, | |
| "learning_rate": 4.866873487430481e-06, | |
| "loss": 0.8237, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 0.767223382045929, | |
| "grad_norm": 1.0194706916809082, | |
| "learning_rate": 4.86619433232516e-06, | |
| "loss": 0.7708, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.7687891440501043, | |
| "grad_norm": 1.044005036354065, | |
| "learning_rate": 4.865513496875367e-06, | |
| "loss": 0.8, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 0.7703549060542797, | |
| "grad_norm": 0.9776361584663391, | |
| "learning_rate": 4.8648309815645915e-06, | |
| "loss": 0.7696, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 0.7719206680584552, | |
| "grad_norm": 0.9213030934333801, | |
| "learning_rate": 4.864146786877521e-06, | |
| "loss": 0.8558, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 0.7734864300626305, | |
| "grad_norm": 0.9361440539360046, | |
| "learning_rate": 4.8634609133000336e-06, | |
| "loss": 0.7851, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 0.7750521920668059, | |
| "grad_norm": 1.0199540853500366, | |
| "learning_rate": 4.8627733613192e-06, | |
| "loss": 0.8229, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.7766179540709812, | |
| "grad_norm": 1.0244604349136353, | |
| "learning_rate": 4.862084131423283e-06, | |
| "loss": 0.772, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 0.7781837160751566, | |
| "grad_norm": 0.9414368271827698, | |
| "learning_rate": 4.861393224101736e-06, | |
| "loss": 0.7579, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 0.7797494780793319, | |
| "grad_norm": 0.9198965430259705, | |
| "learning_rate": 4.860700639845205e-06, | |
| "loss": 0.855, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 0.7813152400835073, | |
| "grad_norm": 0.9398712515830994, | |
| "learning_rate": 4.8600063791455275e-06, | |
| "loss": 0.7824, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 0.7828810020876826, | |
| "grad_norm": 0.9544969201087952, | |
| "learning_rate": 4.8593104424957275e-06, | |
| "loss": 0.8104, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.784446764091858, | |
| "grad_norm": 0.9522370100021362, | |
| "learning_rate": 4.858612830390024e-06, | |
| "loss": 0.813, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 0.7860125260960334, | |
| "grad_norm": 0.9830234050750732, | |
| "learning_rate": 4.857913543323823e-06, | |
| "loss": 0.8656, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 0.7875782881002088, | |
| "grad_norm": 1.0527008771896362, | |
| "learning_rate": 4.8572125817937195e-06, | |
| "loss": 0.8417, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 0.7891440501043842, | |
| "grad_norm": 0.9772782921791077, | |
| "learning_rate": 4.856509946297501e-06, | |
| "loss": 0.7983, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 0.7907098121085595, | |
| "grad_norm": 0.9250578880310059, | |
| "learning_rate": 4.855805637334141e-06, | |
| "loss": 0.7391, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.7922755741127349, | |
| "grad_norm": 0.9792859554290771, | |
| "learning_rate": 4.855099655403802e-06, | |
| "loss": 0.7753, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 0.7938413361169102, | |
| "grad_norm": 0.8876479864120483, | |
| "learning_rate": 4.854392001007834e-06, | |
| "loss": 0.8249, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 0.7954070981210856, | |
| "grad_norm": 0.9336771368980408, | |
| "learning_rate": 4.853682674648775e-06, | |
| "loss": 0.8462, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 0.7969728601252609, | |
| "grad_norm": 0.9732388854026794, | |
| "learning_rate": 4.852971676830352e-06, | |
| "loss": 0.8791, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 0.7985386221294363, | |
| "grad_norm": 0.9135870337486267, | |
| "learning_rate": 4.852259008057476e-06, | |
| "loss": 0.7279, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.8001043841336117, | |
| "grad_norm": 0.9782464504241943, | |
| "learning_rate": 4.8515446688362476e-06, | |
| "loss": 0.7797, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 0.8016701461377871, | |
| "grad_norm": 0.9571824073791504, | |
| "learning_rate": 4.85082865967395e-06, | |
| "loss": 0.8349, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 0.8032359081419624, | |
| "grad_norm": 1.011786937713623, | |
| "learning_rate": 4.850110981079057e-06, | |
| "loss": 0.8057, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 0.8048016701461378, | |
| "grad_norm": 0.9418787956237793, | |
| "learning_rate": 4.8493916335612255e-06, | |
| "loss": 0.8592, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 0.8063674321503131, | |
| "grad_norm": 0.9292604923248291, | |
| "learning_rate": 4.848670617631296e-06, | |
| "loss": 0.7471, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.8079331941544885, | |
| "grad_norm": 0.9303188323974609, | |
| "learning_rate": 4.847947933801296e-06, | |
| "loss": 0.8768, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 0.8094989561586639, | |
| "grad_norm": 0.9897359609603882, | |
| "learning_rate": 4.847223582584437e-06, | |
| "loss": 0.8343, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 0.8110647181628392, | |
| "grad_norm": 0.9695891737937927, | |
| "learning_rate": 4.846497564495114e-06, | |
| "loss": 0.8318, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 0.8126304801670147, | |
| "grad_norm": 1.0175273418426514, | |
| "learning_rate": 4.845769880048906e-06, | |
| "loss": 0.9137, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 0.81419624217119, | |
| "grad_norm": 0.9497922658920288, | |
| "learning_rate": 4.845040529762577e-06, | |
| "loss": 0.6549, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.8157620041753654, | |
| "grad_norm": 1.0647574663162231, | |
| "learning_rate": 4.844309514154071e-06, | |
| "loss": 0.8243, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 0.8173277661795407, | |
| "grad_norm": 0.9710965156555176, | |
| "learning_rate": 4.843576833742517e-06, | |
| "loss": 0.7864, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 0.8188935281837161, | |
| "grad_norm": 0.9881441593170166, | |
| "learning_rate": 4.842842489048225e-06, | |
| "loss": 0.7799, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 0.8204592901878914, | |
| "grad_norm": 0.9455597400665283, | |
| "learning_rate": 4.842106480592687e-06, | |
| "loss": 0.7943, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 0.8220250521920668, | |
| "grad_norm": 0.9948442578315735, | |
| "learning_rate": 4.841368808898577e-06, | |
| "loss": 0.8587, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.8235908141962421, | |
| "grad_norm": 0.9959186315536499, | |
| "learning_rate": 4.84062947448975e-06, | |
| "loss": 0.8477, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 0.8251565762004175, | |
| "grad_norm": 0.9791848659515381, | |
| "learning_rate": 4.839888477891243e-06, | |
| "loss": 0.7568, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 0.826722338204593, | |
| "grad_norm": 1.1499426364898682, | |
| "learning_rate": 4.839145819629269e-06, | |
| "loss": 0.8296, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 0.8282881002087683, | |
| "grad_norm": 0.9269506335258484, | |
| "learning_rate": 4.8384015002312265e-06, | |
| "loss": 0.7935, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 0.8298538622129437, | |
| "grad_norm": 0.970565140247345, | |
| "learning_rate": 4.837655520225692e-06, | |
| "loss": 0.8138, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.831419624217119, | |
| "grad_norm": 0.9852119088172913, | |
| "learning_rate": 4.836907880142418e-06, | |
| "loss": 0.7909, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 0.8329853862212944, | |
| "grad_norm": 0.927129328250885, | |
| "learning_rate": 4.836158580512339e-06, | |
| "loss": 0.8105, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 0.8345511482254697, | |
| "grad_norm": 1.0711781978607178, | |
| "learning_rate": 4.835407621867569e-06, | |
| "loss": 0.8241, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 0.8361169102296451, | |
| "grad_norm": 1.0472421646118164, | |
| "learning_rate": 4.834655004741397e-06, | |
| "loss": 0.7923, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 0.8376826722338204, | |
| "grad_norm": 0.9761579632759094, | |
| "learning_rate": 4.833900729668293e-06, | |
| "loss": 0.8203, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.8392484342379958, | |
| "grad_norm": 1.0559637546539307, | |
| "learning_rate": 4.833144797183902e-06, | |
| "loss": 0.8915, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 0.8408141962421712, | |
| "grad_norm": 0.9951052069664001, | |
| "learning_rate": 4.832387207825045e-06, | |
| "loss": 0.8253, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 0.8423799582463466, | |
| "grad_norm": 0.894079864025116, | |
| "learning_rate": 4.831627962129725e-06, | |
| "loss": 0.7479, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 0.843945720250522, | |
| "grad_norm": 0.9832531213760376, | |
| "learning_rate": 4.830867060637115e-06, | |
| "loss": 0.8484, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 0.8455114822546973, | |
| "grad_norm": 1.0477591753005981, | |
| "learning_rate": 4.8301045038875665e-06, | |
| "loss": 0.8052, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.8470772442588727, | |
| "grad_norm": 0.9388875365257263, | |
| "learning_rate": 4.829340292422607e-06, | |
| "loss": 0.7998, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 0.848643006263048, | |
| "grad_norm": 0.8975532054901123, | |
| "learning_rate": 4.8285744267849395e-06, | |
| "loss": 0.848, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 0.8502087682672234, | |
| "grad_norm": 0.9211076498031616, | |
| "learning_rate": 4.8278069075184396e-06, | |
| "loss": 0.8337, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 0.8517745302713987, | |
| "grad_norm": 0.970134437084198, | |
| "learning_rate": 4.827037735168159e-06, | |
| "loss": 0.7843, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 0.8533402922755741, | |
| "grad_norm": 0.9714353084564209, | |
| "learning_rate": 4.826266910280322e-06, | |
| "loss": 0.6991, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.8549060542797495, | |
| "grad_norm": 0.8607689142227173, | |
| "learning_rate": 4.8254944334023265e-06, | |
| "loss": 0.7314, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 0.8564718162839249, | |
| "grad_norm": 0.9843030571937561, | |
| "learning_rate": 4.824720305082747e-06, | |
| "loss": 0.7497, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 0.8580375782881002, | |
| "grad_norm": 0.9638474583625793, | |
| "learning_rate": 4.823944525871324e-06, | |
| "loss": 0.776, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 0.8596033402922756, | |
| "grad_norm": 0.9342365860939026, | |
| "learning_rate": 4.823167096318979e-06, | |
| "loss": 0.77, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 0.8611691022964509, | |
| "grad_norm": 0.9608579874038696, | |
| "learning_rate": 4.822388016977797e-06, | |
| "loss": 0.8281, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.8627348643006263, | |
| "grad_norm": 0.8996383547782898, | |
| "learning_rate": 4.8216072884010395e-06, | |
| "loss": 0.7519, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 0.8643006263048016, | |
| "grad_norm": 0.9922810196876526, | |
| "learning_rate": 4.820824911143139e-06, | |
| "loss": 0.8714, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 0.865866388308977, | |
| "grad_norm": 0.9861842393875122, | |
| "learning_rate": 4.820040885759697e-06, | |
| "loss": 0.7998, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 0.8674321503131524, | |
| "grad_norm": 1.013168215751648, | |
| "learning_rate": 4.819255212807486e-06, | |
| "loss": 0.8291, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 0.8689979123173278, | |
| "grad_norm": 0.9695484638214111, | |
| "learning_rate": 4.818467892844451e-06, | |
| "loss": 0.8257, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.8705636743215032, | |
| "grad_norm": 1.0018948316574097, | |
| "learning_rate": 4.817678926429702e-06, | |
| "loss": 0.7551, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 0.8721294363256785, | |
| "grad_norm": 1.0259422063827515, | |
| "learning_rate": 4.816888314123521e-06, | |
| "loss": 0.8582, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 0.8736951983298539, | |
| "grad_norm": 0.9045076966285706, | |
| "learning_rate": 4.81609605648736e-06, | |
| "loss": 0.811, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 0.8752609603340292, | |
| "grad_norm": 0.9112316370010376, | |
| "learning_rate": 4.815302154083837e-06, | |
| "loss": 0.818, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 0.8768267223382046, | |
| "grad_norm": 1.0060032606124878, | |
| "learning_rate": 4.814506607476739e-06, | |
| "loss": 0.766, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.8783924843423799, | |
| "grad_norm": 0.9126970171928406, | |
| "learning_rate": 4.813709417231021e-06, | |
| "loss": 0.8903, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 0.8799582463465553, | |
| "grad_norm": 1.0825906991958618, | |
| "learning_rate": 4.812910583912804e-06, | |
| "loss": 0.8466, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 0.8815240083507306, | |
| "grad_norm": 0.9530975818634033, | |
| "learning_rate": 4.812110108089377e-06, | |
| "loss": 0.8163, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 0.8830897703549061, | |
| "grad_norm": 0.9566405415534973, | |
| "learning_rate": 4.8113079903291955e-06, | |
| "loss": 0.8244, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 0.8846555323590815, | |
| "grad_norm": 0.919980525970459, | |
| "learning_rate": 4.81050423120188e-06, | |
| "loss": 0.8335, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.8862212943632568, | |
| "grad_norm": 0.9123868346214294, | |
| "learning_rate": 4.809698831278217e-06, | |
| "loss": 0.7712, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 0.8877870563674322, | |
| "grad_norm": 0.9589663743972778, | |
| "learning_rate": 4.8088917911301595e-06, | |
| "loss": 0.8467, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 0.8893528183716075, | |
| "grad_norm": 0.9052798748016357, | |
| "learning_rate": 4.808083111330823e-06, | |
| "loss": 0.8078, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 0.8909185803757829, | |
| "grad_norm": 0.9250494241714478, | |
| "learning_rate": 4.807272792454489e-06, | |
| "loss": 0.8536, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 0.8924843423799582, | |
| "grad_norm": 0.8451298475265503, | |
| "learning_rate": 4.806460835076603e-06, | |
| "loss": 0.7752, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.8940501043841336, | |
| "grad_norm": 1.0324240922927856, | |
| "learning_rate": 4.805647239773773e-06, | |
| "loss": 0.7871, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 0.8956158663883089, | |
| "grad_norm": 0.9338528513908386, | |
| "learning_rate": 4.804832007123771e-06, | |
| "loss": 0.8082, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 0.8971816283924844, | |
| "grad_norm": 1.0080739259719849, | |
| "learning_rate": 4.804015137705531e-06, | |
| "loss": 0.7636, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 0.8987473903966597, | |
| "grad_norm": 0.9817036986351013, | |
| "learning_rate": 4.803196632099152e-06, | |
| "loss": 0.8599, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 0.9003131524008351, | |
| "grad_norm": 0.9284219145774841, | |
| "learning_rate": 4.80237649088589e-06, | |
| "loss": 0.8152, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.9018789144050104, | |
| "grad_norm": 0.8922317028045654, | |
| "learning_rate": 4.801554714648166e-06, | |
| "loss": 0.8146, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 0.9034446764091858, | |
| "grad_norm": 0.9465014338493347, | |
| "learning_rate": 4.800731303969565e-06, | |
| "loss": 0.7663, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 0.9050104384133612, | |
| "grad_norm": 1.0325525999069214, | |
| "learning_rate": 4.799906259434824e-06, | |
| "loss": 0.8458, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 0.9065762004175365, | |
| "grad_norm": 0.9366307258605957, | |
| "learning_rate": 4.7990795816298485e-06, | |
| "loss": 0.7992, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 0.9081419624217119, | |
| "grad_norm": 0.9923847317695618, | |
| "learning_rate": 4.7982512711416995e-06, | |
| "loss": 0.7256, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.9097077244258872, | |
| "grad_norm": 1.0387133359909058, | |
| "learning_rate": 4.7974213285586e-06, | |
| "loss": 0.7766, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 0.9112734864300627, | |
| "grad_norm": 0.9617043733596802, | |
| "learning_rate": 4.79658975446993e-06, | |
| "loss": 0.7602, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 0.912839248434238, | |
| "grad_norm": 0.9530353546142578, | |
| "learning_rate": 4.795756549466229e-06, | |
| "loss": 0.7922, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 0.9144050104384134, | |
| "grad_norm": 0.9305918216705322, | |
| "learning_rate": 4.794921714139194e-06, | |
| "loss": 0.8377, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 0.9159707724425887, | |
| "grad_norm": 0.899860143661499, | |
| "learning_rate": 4.794085249081682e-06, | |
| "loss": 0.8343, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.9175365344467641, | |
| "grad_norm": 0.9024940729141235, | |
| "learning_rate": 4.793247154887703e-06, | |
| "loss": 0.8271, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 0.9191022964509394, | |
| "grad_norm": 0.980869472026825, | |
| "learning_rate": 4.7924074321524286e-06, | |
| "loss": 0.7468, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 0.9206680584551148, | |
| "grad_norm": 1.0506541728973389, | |
| "learning_rate": 4.791566081472185e-06, | |
| "loss": 0.9207, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 0.9222338204592901, | |
| "grad_norm": 1.0723917484283447, | |
| "learning_rate": 4.790723103444454e-06, | |
| "loss": 0.7615, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 0.9237995824634656, | |
| "grad_norm": 1.0455081462860107, | |
| "learning_rate": 4.789878498667873e-06, | |
| "loss": 0.8115, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.925365344467641, | |
| "grad_norm": 0.9729639291763306, | |
| "learning_rate": 4.789032267742234e-06, | |
| "loss": 0.8622, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 0.9269311064718163, | |
| "grad_norm": 0.9695239067077637, | |
| "learning_rate": 4.788184411268488e-06, | |
| "loss": 0.8165, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 0.9284968684759917, | |
| "grad_norm": 1.0099278688430786, | |
| "learning_rate": 4.7873349298487345e-06, | |
| "loss": 0.759, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 0.930062630480167, | |
| "grad_norm": 0.9377149939537048, | |
| "learning_rate": 4.786483824086231e-06, | |
| "loss": 0.8323, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 0.9316283924843424, | |
| "grad_norm": 0.8885690569877625, | |
| "learning_rate": 4.785631094585387e-06, | |
| "loss": 0.8448, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.9331941544885177, | |
| "grad_norm": 0.9133430123329163, | |
| "learning_rate": 4.784776741951766e-06, | |
| "loss": 0.8135, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 0.9347599164926931, | |
| "grad_norm": 1.015851378440857, | |
| "learning_rate": 4.783920766792082e-06, | |
| "loss": 0.8464, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 0.9363256784968684, | |
| "grad_norm": 0.8883825540542603, | |
| "learning_rate": 4.783063169714203e-06, | |
| "loss": 0.7445, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 0.9378914405010439, | |
| "grad_norm": 0.9872002005577087, | |
| "learning_rate": 4.782203951327149e-06, | |
| "loss": 0.8293, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 0.9394572025052192, | |
| "grad_norm": 1.0305813550949097, | |
| "learning_rate": 4.781343112241091e-06, | |
| "loss": 0.8606, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.9410229645093946, | |
| "grad_norm": 0.9667104482650757, | |
| "learning_rate": 4.780480653067351e-06, | |
| "loss": 0.8057, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 0.94258872651357, | |
| "grad_norm": 1.067185640335083, | |
| "learning_rate": 4.7796165744184e-06, | |
| "loss": 0.849, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 0.9441544885177453, | |
| "grad_norm": 0.9017406105995178, | |
| "learning_rate": 4.778750876907862e-06, | |
| "loss": 0.7927, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 0.9457202505219207, | |
| "grad_norm": 0.9827666878700256, | |
| "learning_rate": 4.777883561150508e-06, | |
| "loss": 0.8087, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 0.947286012526096, | |
| "grad_norm": 0.9627811908721924, | |
| "learning_rate": 4.77701462776226e-06, | |
| "loss": 0.8488, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.9488517745302714, | |
| "grad_norm": 1.0321338176727295, | |
| "learning_rate": 4.776144077360186e-06, | |
| "loss": 0.8289, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 0.9504175365344467, | |
| "grad_norm": 1.0012633800506592, | |
| "learning_rate": 4.775271910562508e-06, | |
| "loss": 0.7184, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 0.9519832985386222, | |
| "grad_norm": 0.9186056852340698, | |
| "learning_rate": 4.7743981279885875e-06, | |
| "loss": 0.8727, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 0.9535490605427975, | |
| "grad_norm": 0.910044252872467, | |
| "learning_rate": 4.773522730258942e-06, | |
| "loss": 0.8455, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 0.9551148225469729, | |
| "grad_norm": 0.9342944025993347, | |
| "learning_rate": 4.77264571799523e-06, | |
| "loss": 0.7548, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.9566805845511482, | |
| "grad_norm": 0.9609628319740295, | |
| "learning_rate": 4.771767091820259e-06, | |
| "loss": 0.8294, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 0.9582463465553236, | |
| "grad_norm": 0.9708508253097534, | |
| "learning_rate": 4.770886852357983e-06, | |
| "loss": 0.7857, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 0.9598121085594989, | |
| "grad_norm": 1.0910807847976685, | |
| "learning_rate": 4.7700050002334995e-06, | |
| "loss": 0.8162, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 0.9613778705636743, | |
| "grad_norm": 0.9703771471977234, | |
| "learning_rate": 4.769121536073054e-06, | |
| "loss": 0.7205, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 0.9629436325678496, | |
| "grad_norm": 1.027938961982727, | |
| "learning_rate": 4.768236460504035e-06, | |
| "loss": 0.7641, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.964509394572025, | |
| "grad_norm": 1.0003989934921265, | |
| "learning_rate": 4.767349774154974e-06, | |
| "loss": 0.7588, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 0.9660751565762005, | |
| "grad_norm": 0.9572570323944092, | |
| "learning_rate": 4.766461477655552e-06, | |
| "loss": 0.8274, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 0.9676409185803758, | |
| "grad_norm": 0.977287769317627, | |
| "learning_rate": 4.765571571636586e-06, | |
| "loss": 0.7856, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 0.9692066805845512, | |
| "grad_norm": 0.9582562446594238, | |
| "learning_rate": 4.764680056730042e-06, | |
| "loss": 0.8102, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 0.9707724425887265, | |
| "grad_norm": 0.9842986464500427, | |
| "learning_rate": 4.763786933569025e-06, | |
| "loss": 0.7515, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.9723382045929019, | |
| "grad_norm": 0.9955743551254272, | |
| "learning_rate": 4.762892202787782e-06, | |
| "loss": 0.7398, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 0.9739039665970772, | |
| "grad_norm": 1.0011107921600342, | |
| "learning_rate": 4.761995865021706e-06, | |
| "loss": 0.8182, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 0.9754697286012526, | |
| "grad_norm": 0.9586448669433594, | |
| "learning_rate": 4.761097920907325e-06, | |
| "loss": 0.8434, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 0.9770354906054279, | |
| "grad_norm": 0.979123055934906, | |
| "learning_rate": 4.760198371082312e-06, | |
| "loss": 0.7954, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 0.9786012526096033, | |
| "grad_norm": 1.0081250667572021, | |
| "learning_rate": 4.75929721618548e-06, | |
| "loss": 0.7931, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.9801670146137788, | |
| "grad_norm": 1.027185320854187, | |
| "learning_rate": 4.75839445685678e-06, | |
| "loss": 0.7339, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 0.9817327766179541, | |
| "grad_norm": 1.0553215742111206, | |
| "learning_rate": 4.757490093737303e-06, | |
| "loss": 0.7639, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 0.9832985386221295, | |
| "grad_norm": 0.8578981757164001, | |
| "learning_rate": 4.75658412746928e-06, | |
| "loss": 0.8128, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 0.9848643006263048, | |
| "grad_norm": 1.0143071413040161, | |
| "learning_rate": 4.75567655869608e-06, | |
| "loss": 0.8169, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 0.9864300626304802, | |
| "grad_norm": 0.9743554592132568, | |
| "learning_rate": 4.754767388062208e-06, | |
| "loss": 0.8179, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.9879958246346555, | |
| "grad_norm": 1.0276602506637573, | |
| "learning_rate": 4.7538566162133105e-06, | |
| "loss": 0.8009, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 0.9895615866388309, | |
| "grad_norm": 0.9296634793281555, | |
| "learning_rate": 4.752944243796167e-06, | |
| "loss": 0.8262, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 0.9911273486430062, | |
| "grad_norm": 0.9216791987419128, | |
| "learning_rate": 4.752030271458696e-06, | |
| "loss": 0.8929, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 0.9926931106471816, | |
| "grad_norm": 0.9272832274436951, | |
| "learning_rate": 4.751114699849954e-06, | |
| "loss": 0.7396, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 0.994258872651357, | |
| "grad_norm": 0.9121203422546387, | |
| "learning_rate": 4.750197529620127e-06, | |
| "loss": 0.8311, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.9958246346555324, | |
| "grad_norm": 0.999600887298584, | |
| "learning_rate": 4.7492787614205425e-06, | |
| "loss": 0.8125, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 0.9973903966597077, | |
| "grad_norm": 1.012978196144104, | |
| "learning_rate": 4.748358395903661e-06, | |
| "loss": 0.7966, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 0.9989561586638831, | |
| "grad_norm": 0.9767918586730957, | |
| "learning_rate": 4.747436433723075e-06, | |
| "loss": 0.8024, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 1.0015657620041754, | |
| "grad_norm": 1.8302738666534424, | |
| "learning_rate": 4.7465128755335135e-06, | |
| "loss": 1.6427, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 1.0031315240083507, | |
| "grad_norm": 0.8704890012741089, | |
| "learning_rate": 4.7455877219908386e-06, | |
| "loss": 0.7766, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 1.004697286012526, | |
| "grad_norm": 0.9848434925079346, | |
| "learning_rate": 4.744660973752044e-06, | |
| "loss": 0.8314, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 1.0062630480167014, | |
| "grad_norm": 1.054824709892273, | |
| "learning_rate": 4.743732631475258e-06, | |
| "loss": 0.797, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 1.0078288100208768, | |
| "grad_norm": 0.9286114573478699, | |
| "learning_rate": 4.742802695819737e-06, | |
| "loss": 0.764, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 1.0093945720250521, | |
| "grad_norm": 0.8625781536102295, | |
| "learning_rate": 4.7418711674458735e-06, | |
| "loss": 0.6691, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 1.0109603340292275, | |
| "grad_norm": 0.8710452914237976, | |
| "learning_rate": 4.740938047015188e-06, | |
| "loss": 0.7277, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 1.0125260960334028, | |
| "grad_norm": 0.8873130679130554, | |
| "learning_rate": 4.740003335190334e-06, | |
| "loss": 0.6531, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 1.0140918580375784, | |
| "grad_norm": 0.9777164459228516, | |
| "learning_rate": 4.7390670326350925e-06, | |
| "loss": 0.8047, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 1.0156576200417538, | |
| "grad_norm": 0.886025071144104, | |
| "learning_rate": 4.738129140014377e-06, | |
| "loss": 0.715, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 1.017223382045929, | |
| "grad_norm": 0.9825332164764404, | |
| "learning_rate": 4.737189657994226e-06, | |
| "loss": 0.7783, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 1.0187891440501045, | |
| "grad_norm": 0.9331428408622742, | |
| "learning_rate": 4.736248587241812e-06, | |
| "loss": 0.7598, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 1.0203549060542798, | |
| "grad_norm": 0.9406535625457764, | |
| "learning_rate": 4.735305928425431e-06, | |
| "loss": 0.7989, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 1.0219206680584552, | |
| "grad_norm": 1.0644745826721191, | |
| "learning_rate": 4.734361682214511e-06, | |
| "loss": 0.8135, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 1.0234864300626305, | |
| "grad_norm": 0.9037052392959595, | |
| "learning_rate": 4.733415849279603e-06, | |
| "loss": 0.7051, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 1.0250521920668059, | |
| "grad_norm": 1.000563144683838, | |
| "learning_rate": 4.7324684302923865e-06, | |
| "loss": 0.7427, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 1.0266179540709812, | |
| "grad_norm": 0.9181354641914368, | |
| "learning_rate": 4.731519425925671e-06, | |
| "loss": 0.7236, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 1.0281837160751566, | |
| "grad_norm": 1.0283046960830688, | |
| "learning_rate": 4.730568836853384e-06, | |
| "loss": 0.7998, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 1.029749478079332, | |
| "grad_norm": 0.8692315220832825, | |
| "learning_rate": 4.729616663750587e-06, | |
| "loss": 0.7974, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 1.0313152400835073, | |
| "grad_norm": 0.928920567035675, | |
| "learning_rate": 4.72866290729346e-06, | |
| "loss": 0.7199, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 1.0328810020876826, | |
| "grad_norm": 0.9066957235336304, | |
| "learning_rate": 4.727707568159311e-06, | |
| "loss": 0.791, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 1.034446764091858, | |
| "grad_norm": 1.0086729526519775, | |
| "learning_rate": 4.726750647026569e-06, | |
| "loss": 0.7113, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 1.0360125260960333, | |
| "grad_norm": 0.931856632232666, | |
| "learning_rate": 4.72579214457479e-06, | |
| "loss": 0.7977, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 1.0375782881002087, | |
| "grad_norm": 0.9191203713417053, | |
| "learning_rate": 4.724832061484651e-06, | |
| "loss": 0.7979, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 1.039144050104384, | |
| "grad_norm": 0.9614707827568054, | |
| "learning_rate": 4.72387039843795e-06, | |
| "loss": 0.6994, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 1.0407098121085594, | |
| "grad_norm": 1.014823317527771, | |
| "learning_rate": 4.72290715611761e-06, | |
| "loss": 0.7539, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 1.042275574112735, | |
| "grad_norm": 0.9177424311637878, | |
| "learning_rate": 4.721942335207673e-06, | |
| "loss": 0.6625, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 1.0438413361169103, | |
| "grad_norm": 1.0253031253814697, | |
| "learning_rate": 4.720975936393305e-06, | |
| "loss": 0.7747, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 1.0454070981210857, | |
| "grad_norm": 1.0004568099975586, | |
| "learning_rate": 4.720007960360788e-06, | |
| "loss": 0.7008, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 1.046972860125261, | |
| "grad_norm": 1.1344349384307861, | |
| "learning_rate": 4.719038407797529e-06, | |
| "loss": 0.7493, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 1.0485386221294364, | |
| "grad_norm": 0.9944799542427063, | |
| "learning_rate": 4.718067279392052e-06, | |
| "loss": 0.7823, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 1.0501043841336117, | |
| "grad_norm": 0.9469346404075623, | |
| "learning_rate": 4.717094575833999e-06, | |
| "loss": 0.7419, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 1.051670146137787, | |
| "grad_norm": 1.015339732170105, | |
| "learning_rate": 4.716120297814133e-06, | |
| "loss": 0.7168, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 1.0532359081419624, | |
| "grad_norm": 0.9544686675071716, | |
| "learning_rate": 4.715144446024333e-06, | |
| "loss": 0.791, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 1.0548016701461378, | |
| "grad_norm": 1.0285652875900269, | |
| "learning_rate": 4.714167021157599e-06, | |
| "loss": 0.7649, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 1.0563674321503131, | |
| "grad_norm": 0.9366872906684875, | |
| "learning_rate": 4.713188023908044e-06, | |
| "loss": 0.7524, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 1.0579331941544885, | |
| "grad_norm": 1.0348883867263794, | |
| "learning_rate": 4.712207454970899e-06, | |
| "loss": 0.6965, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 1.0594989561586639, | |
| "grad_norm": 0.8716043829917908, | |
| "learning_rate": 4.711225315042513e-06, | |
| "loss": 0.6264, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 1.0610647181628392, | |
| "grad_norm": 1.0421313047409058, | |
| "learning_rate": 4.710241604820348e-06, | |
| "loss": 0.7548, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 1.0626304801670146, | |
| "grad_norm": 1.0886270999908447, | |
| "learning_rate": 4.709256325002983e-06, | |
| "loss": 0.6352, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 1.06419624217119, | |
| "grad_norm": 0.9752539396286011, | |
| "learning_rate": 4.708269476290111e-06, | |
| "loss": 0.7764, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 1.0657620041753653, | |
| "grad_norm": 1.0658339262008667, | |
| "learning_rate": 4.7072810593825385e-06, | |
| "loss": 0.766, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 1.0673277661795406, | |
| "grad_norm": 1.0299617052078247, | |
| "learning_rate": 4.706291074982187e-06, | |
| "loss": 0.7558, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 1.068893528183716, | |
| "grad_norm": 0.9171326160430908, | |
| "learning_rate": 4.705299523792089e-06, | |
| "loss": 0.7084, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 1.0704592901878915, | |
| "grad_norm": 0.9924243688583374, | |
| "learning_rate": 4.7043064065163925e-06, | |
| "loss": 0.8393, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 1.072025052192067, | |
| "grad_norm": 1.0243877172470093, | |
| "learning_rate": 4.703311723860356e-06, | |
| "loss": 0.7755, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 1.0735908141962422, | |
| "grad_norm": 1.0019137859344482, | |
| "learning_rate": 4.702315476530349e-06, | |
| "loss": 0.7254, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 1.0751565762004176, | |
| "grad_norm": 0.8541996479034424, | |
| "learning_rate": 4.701317665233853e-06, | |
| "loss": 0.7389, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 1.076722338204593, | |
| "grad_norm": 1.1119922399520874, | |
| "learning_rate": 4.700318290679462e-06, | |
| "loss": 0.7803, | |
| "step": 687 | |
| }, | |
| { | |
| "epoch": 1.0782881002087683, | |
| "grad_norm": 1.078452706336975, | |
| "learning_rate": 4.699317353576876e-06, | |
| "loss": 0.756, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 1.0798538622129437, | |
| "grad_norm": 1.0079457759857178, | |
| "learning_rate": 4.698314854636908e-06, | |
| "loss": 0.7365, | |
| "step": 689 | |
| }, | |
| { | |
| "epoch": 1.081419624217119, | |
| "grad_norm": 0.931423544883728, | |
| "learning_rate": 4.6973107945714795e-06, | |
| "loss": 0.7881, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 1.0829853862212944, | |
| "grad_norm": 1.0135142803192139, | |
| "learning_rate": 4.696305174093619e-06, | |
| "loss": 0.7884, | |
| "step": 691 | |
| }, | |
| { | |
| "epoch": 1.0845511482254697, | |
| "grad_norm": 1.1194610595703125, | |
| "learning_rate": 4.695297993917465e-06, | |
| "loss": 0.7969, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 1.086116910229645, | |
| "grad_norm": 0.9972780346870422, | |
| "learning_rate": 4.6942892547582625e-06, | |
| "loss": 0.5873, | |
| "step": 693 | |
| }, | |
| { | |
| "epoch": 1.0876826722338204, | |
| "grad_norm": 1.0711641311645508, | |
| "learning_rate": 4.693278957332364e-06, | |
| "loss": 0.7273, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 1.0892484342379958, | |
| "grad_norm": 0.9708715081214905, | |
| "learning_rate": 4.6922671023572305e-06, | |
| "loss": 0.6786, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 1.0908141962421711, | |
| "grad_norm": 0.9773644804954529, | |
| "learning_rate": 4.691253690551424e-06, | |
| "loss": 0.7839, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 1.0923799582463465, | |
| "grad_norm": 1.039009690284729, | |
| "learning_rate": 4.690238722634618e-06, | |
| "loss": 0.6807, | |
| "step": 697 | |
| }, | |
| { | |
| "epoch": 1.0939457202505218, | |
| "grad_norm": 1.140777349472046, | |
| "learning_rate": 4.689222199327586e-06, | |
| "loss": 0.7506, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 1.0955114822546972, | |
| "grad_norm": 0.9779882431030273, | |
| "learning_rate": 4.688204121352211e-06, | |
| "loss": 0.7732, | |
| "step": 699 | |
| }, | |
| { | |
| "epoch": 1.0970772442588728, | |
| "grad_norm": 1.0412248373031616, | |
| "learning_rate": 4.687184489431476e-06, | |
| "loss": 0.7034, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 1.0986430062630481, | |
| "grad_norm": 0.9802400469779968, | |
| "learning_rate": 4.686163304289469e-06, | |
| "loss": 0.729, | |
| "step": 701 | |
| }, | |
| { | |
| "epoch": 1.1002087682672235, | |
| "grad_norm": 1.0531584024429321, | |
| "learning_rate": 4.6851405666513804e-06, | |
| "loss": 0.7635, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 1.1017745302713988, | |
| "grad_norm": 0.983923077583313, | |
| "learning_rate": 4.684116277243505e-06, | |
| "loss": 0.7062, | |
| "step": 703 | |
| }, | |
| { | |
| "epoch": 1.1033402922755742, | |
| "grad_norm": 0.9886301159858704, | |
| "learning_rate": 4.683090436793238e-06, | |
| "loss": 0.7299, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 1.1049060542797495, | |
| "grad_norm": 1.0247817039489746, | |
| "learning_rate": 4.682063046029075e-06, | |
| "loss": 0.729, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 1.1064718162839249, | |
| "grad_norm": 0.9860950708389282, | |
| "learning_rate": 4.681034105680616e-06, | |
| "loss": 0.7392, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 1.1080375782881002, | |
| "grad_norm": 0.9543380737304688, | |
| "learning_rate": 4.680003616478556e-06, | |
| "loss": 0.7061, | |
| "step": 707 | |
| }, | |
| { | |
| "epoch": 1.1096033402922756, | |
| "grad_norm": 1.0253225564956665, | |
| "learning_rate": 4.678971579154698e-06, | |
| "loss": 0.6577, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 1.111169102296451, | |
| "grad_norm": 0.9013616442680359, | |
| "learning_rate": 4.677937994441935e-06, | |
| "loss": 0.7422, | |
| "step": 709 | |
| }, | |
| { | |
| "epoch": 1.1127348643006263, | |
| "grad_norm": 0.9716471433639526, | |
| "learning_rate": 4.676902863074268e-06, | |
| "loss": 0.7353, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 1.1143006263048016, | |
| "grad_norm": 1.040898084640503, | |
| "learning_rate": 4.675866185786787e-06, | |
| "loss": 0.7396, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 1.115866388308977, | |
| "grad_norm": 0.9931808114051819, | |
| "learning_rate": 4.674827963315688e-06, | |
| "loss": 0.7354, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 1.1174321503131524, | |
| "grad_norm": 1.020512580871582, | |
| "learning_rate": 4.6737881963982605e-06, | |
| "loss": 0.7366, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 1.1189979123173277, | |
| "grad_norm": 1.0201045274734497, | |
| "learning_rate": 4.67274688577289e-06, | |
| "loss": 0.7907, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 1.120563674321503, | |
| "grad_norm": 1.0163429975509644, | |
| "learning_rate": 4.671704032179061e-06, | |
| "loss": 0.7452, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 1.1221294363256784, | |
| "grad_norm": 0.9667377471923828, | |
| "learning_rate": 4.670659636357352e-06, | |
| "loss": 0.7983, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 1.1236951983298538, | |
| "grad_norm": 1.0788301229476929, | |
| "learning_rate": 4.669613699049436e-06, | |
| "loss": 0.7861, | |
| "step": 717 | |
| }, | |
| { | |
| "epoch": 1.1252609603340291, | |
| "grad_norm": 1.0512142181396484, | |
| "learning_rate": 4.668566220998082e-06, | |
| "loss": 0.7582, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 1.1268267223382047, | |
| "grad_norm": 0.9225794076919556, | |
| "learning_rate": 4.667517202947154e-06, | |
| "loss": 0.7801, | |
| "step": 719 | |
| }, | |
| { | |
| "epoch": 1.12839248434238, | |
| "grad_norm": 1.1193747520446777, | |
| "learning_rate": 4.666466645641607e-06, | |
| "loss": 0.8073, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 1.1299582463465554, | |
| "grad_norm": 0.934438943862915, | |
| "learning_rate": 4.6654145498274915e-06, | |
| "loss": 0.774, | |
| "step": 721 | |
| }, | |
| { | |
| "epoch": 1.1315240083507307, | |
| "grad_norm": 1.0614968538284302, | |
| "learning_rate": 4.664360916251949e-06, | |
| "loss": 0.7216, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 1.133089770354906, | |
| "grad_norm": 1.0807424783706665, | |
| "learning_rate": 4.663305745663212e-06, | |
| "loss": 0.6434, | |
| "step": 723 | |
| }, | |
| { | |
| "epoch": 1.1346555323590815, | |
| "grad_norm": 0.9515354633331299, | |
| "learning_rate": 4.66224903881061e-06, | |
| "loss": 0.7336, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 1.1362212943632568, | |
| "grad_norm": 0.9852489233016968, | |
| "learning_rate": 4.661190796444557e-06, | |
| "loss": 0.8156, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 1.1377870563674322, | |
| "grad_norm": 1.0071501731872559, | |
| "learning_rate": 4.66013101931656e-06, | |
| "loss": 0.8836, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 1.1393528183716075, | |
| "grad_norm": 0.9622960686683655, | |
| "learning_rate": 4.6590697081792176e-06, | |
| "loss": 0.7631, | |
| "step": 727 | |
| }, | |
| { | |
| "epoch": 1.1409185803757829, | |
| "grad_norm": 1.0588105916976929, | |
| "learning_rate": 4.6580068637862144e-06, | |
| "loss": 0.7874, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 1.1424843423799582, | |
| "grad_norm": 0.9920447468757629, | |
| "learning_rate": 4.656942486892326e-06, | |
| "loss": 0.7161, | |
| "step": 729 | |
| }, | |
| { | |
| "epoch": 1.1440501043841336, | |
| "grad_norm": 1.0036561489105225, | |
| "learning_rate": 4.655876578253416e-06, | |
| "loss": 0.7116, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 1.145615866388309, | |
| "grad_norm": 1.0208396911621094, | |
| "learning_rate": 4.654809138626436e-06, | |
| "loss": 0.818, | |
| "step": 731 | |
| }, | |
| { | |
| "epoch": 1.1471816283924843, | |
| "grad_norm": 1.1095491647720337, | |
| "learning_rate": 4.653740168769424e-06, | |
| "loss": 0.7319, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 1.1487473903966596, | |
| "grad_norm": 0.8151328563690186, | |
| "learning_rate": 4.652669669441505e-06, | |
| "loss": 0.774, | |
| "step": 733 | |
| }, | |
| { | |
| "epoch": 1.150313152400835, | |
| "grad_norm": 1.0747915506362915, | |
| "learning_rate": 4.651597641402891e-06, | |
| "loss": 0.8275, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 1.1518789144050103, | |
| "grad_norm": 0.9265272617340088, | |
| "learning_rate": 4.650524085414878e-06, | |
| "loss": 0.6946, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 1.153444676409186, | |
| "grad_norm": 0.9531672596931458, | |
| "learning_rate": 4.649449002239849e-06, | |
| "loss": 0.7899, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 1.1550104384133613, | |
| "grad_norm": 1.0496262311935425, | |
| "learning_rate": 4.648372392641269e-06, | |
| "loss": 0.8237, | |
| "step": 737 | |
| }, | |
| { | |
| "epoch": 1.1565762004175366, | |
| "grad_norm": 0.8967193961143494, | |
| "learning_rate": 4.64729425738369e-06, | |
| "loss": 0.736, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 1.158141962421712, | |
| "grad_norm": 1.061038851737976, | |
| "learning_rate": 4.646214597232745e-06, | |
| "loss": 0.7308, | |
| "step": 739 | |
| }, | |
| { | |
| "epoch": 1.1597077244258873, | |
| "grad_norm": 0.9556939601898193, | |
| "learning_rate": 4.64513341295515e-06, | |
| "loss": 0.7526, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 1.1612734864300627, | |
| "grad_norm": 1.000093936920166, | |
| "learning_rate": 4.6440507053187075e-06, | |
| "loss": 0.7488, | |
| "step": 741 | |
| }, | |
| { | |
| "epoch": 1.162839248434238, | |
| "grad_norm": 0.9552261233329773, | |
| "learning_rate": 4.642966475092294e-06, | |
| "loss": 0.8291, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 1.1644050104384134, | |
| "grad_norm": 1.0215847492218018, | |
| "learning_rate": 4.641880723045875e-06, | |
| "loss": 0.756, | |
| "step": 743 | |
| }, | |
| { | |
| "epoch": 1.1659707724425887, | |
| "grad_norm": 1.0543124675750732, | |
| "learning_rate": 4.640793449950493e-06, | |
| "loss": 0.7668, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 1.167536534446764, | |
| "grad_norm": 0.9648609161376953, | |
| "learning_rate": 4.639704656578269e-06, | |
| "loss": 0.6531, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 1.1691022964509394, | |
| "grad_norm": 1.0117427110671997, | |
| "learning_rate": 4.638614343702408e-06, | |
| "loss": 0.7084, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 1.1706680584551148, | |
| "grad_norm": 0.9475041627883911, | |
| "learning_rate": 4.6375225120971915e-06, | |
| "loss": 0.8578, | |
| "step": 747 | |
| }, | |
| { | |
| "epoch": 1.1722338204592901, | |
| "grad_norm": 1.0122299194335938, | |
| "learning_rate": 4.6364291625379785e-06, | |
| "loss": 0.7541, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 1.1737995824634655, | |
| "grad_norm": 0.9487535357475281, | |
| "learning_rate": 4.635334295801208e-06, | |
| "loss": 0.7203, | |
| "step": 749 | |
| }, | |
| { | |
| "epoch": 1.1753653444676408, | |
| "grad_norm": 1.0628844499588013, | |
| "learning_rate": 4.6342379126643965e-06, | |
| "loss": 0.7532, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 1.1769311064718162, | |
| "grad_norm": 0.9647781848907471, | |
| "learning_rate": 4.633140013906136e-06, | |
| "loss": 0.5918, | |
| "step": 751 | |
| }, | |
| { | |
| "epoch": 1.1784968684759916, | |
| "grad_norm": 0.9011932611465454, | |
| "learning_rate": 4.632040600306095e-06, | |
| "loss": 0.7395, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 1.1800626304801671, | |
| "grad_norm": 1.0524142980575562, | |
| "learning_rate": 4.630939672645018e-06, | |
| "loss": 0.7487, | |
| "step": 753 | |
| }, | |
| { | |
| "epoch": 1.1816283924843423, | |
| "grad_norm": 1.165077567100525, | |
| "learning_rate": 4.6298372317047265e-06, | |
| "loss": 0.6817, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 1.1831941544885178, | |
| "grad_norm": 0.9301269054412842, | |
| "learning_rate": 4.628733278268113e-06, | |
| "loss": 0.8446, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 1.1847599164926932, | |
| "grad_norm": 0.9762471318244934, | |
| "learning_rate": 4.627627813119147e-06, | |
| "loss": 0.7527, | |
| "step": 756 | |
| }, | |
| { | |
| "epoch": 1.1863256784968685, | |
| "grad_norm": 0.8634734153747559, | |
| "learning_rate": 4.626520837042871e-06, | |
| "loss": 0.7177, | |
| "step": 757 | |
| }, | |
| { | |
| "epoch": 1.187891440501044, | |
| "grad_norm": 1.0592188835144043, | |
| "learning_rate": 4.625412350825399e-06, | |
| "loss": 0.7293, | |
| "step": 758 | |
| }, | |
| { | |
| "epoch": 1.1894572025052192, | |
| "grad_norm": 1.1318559646606445, | |
| "learning_rate": 4.62430235525392e-06, | |
| "loss": 0.6592, | |
| "step": 759 | |
| }, | |
| { | |
| "epoch": 1.1910229645093946, | |
| "grad_norm": 0.9689339399337769, | |
| "learning_rate": 4.623190851116692e-06, | |
| "loss": 0.7667, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 1.19258872651357, | |
| "grad_norm": 0.9965885281562805, | |
| "learning_rate": 4.6220778392030464e-06, | |
| "loss": 0.6982, | |
| "step": 761 | |
| }, | |
| { | |
| "epoch": 1.1941544885177453, | |
| "grad_norm": 1.1428629159927368, | |
| "learning_rate": 4.6209633203033846e-06, | |
| "loss": 0.7342, | |
| "step": 762 | |
| }, | |
| { | |
| "epoch": 1.1957202505219207, | |
| "grad_norm": 0.901360809803009, | |
| "learning_rate": 4.6198472952091785e-06, | |
| "loss": 0.8371, | |
| "step": 763 | |
| }, | |
| { | |
| "epoch": 1.197286012526096, | |
| "grad_norm": 0.9882062077522278, | |
| "learning_rate": 4.618729764712969e-06, | |
| "loss": 0.6882, | |
| "step": 764 | |
| }, | |
| { | |
| "epoch": 1.1988517745302714, | |
| "grad_norm": 1.0692517757415771, | |
| "learning_rate": 4.617610729608366e-06, | |
| "loss": 0.7327, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 1.2004175365344467, | |
| "grad_norm": 1.1056455373764038, | |
| "learning_rate": 4.616490190690049e-06, | |
| "loss": 0.781, | |
| "step": 766 | |
| }, | |
| { | |
| "epoch": 1.201983298538622, | |
| "grad_norm": 0.9928885698318481, | |
| "learning_rate": 4.615368148753765e-06, | |
| "loss": 0.7463, | |
| "step": 767 | |
| }, | |
| { | |
| "epoch": 1.2035490605427974, | |
| "grad_norm": 1.0006698369979858, | |
| "learning_rate": 4.614244604596327e-06, | |
| "loss": 0.6994, | |
| "step": 768 | |
| }, | |
| { | |
| "epoch": 1.2051148225469728, | |
| "grad_norm": 0.907508909702301, | |
| "learning_rate": 4.613119559015616e-06, | |
| "loss": 0.7117, | |
| "step": 769 | |
| }, | |
| { | |
| "epoch": 1.2066805845511483, | |
| "grad_norm": 1.1037627458572388, | |
| "learning_rate": 4.61199301281058e-06, | |
| "loss": 0.772, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 1.2082463465553235, | |
| "grad_norm": 0.9474150538444519, | |
| "learning_rate": 4.610864966781231e-06, | |
| "loss": 0.7601, | |
| "step": 771 | |
| }, | |
| { | |
| "epoch": 1.209812108559499, | |
| "grad_norm": 1.003667950630188, | |
| "learning_rate": 4.609735421728647e-06, | |
| "loss": 0.7141, | |
| "step": 772 | |
| }, | |
| { | |
| "epoch": 1.2113778705636744, | |
| "grad_norm": 0.9724787473678589, | |
| "learning_rate": 4.608604378454971e-06, | |
| "loss": 0.7841, | |
| "step": 773 | |
| }, | |
| { | |
| "epoch": 1.2129436325678498, | |
| "grad_norm": 1.0460760593414307, | |
| "learning_rate": 4.607471837763409e-06, | |
| "loss": 0.7563, | |
| "step": 774 | |
| }, | |
| { | |
| "epoch": 1.2145093945720251, | |
| "grad_norm": 0.9798458218574524, | |
| "learning_rate": 4.606337800458231e-06, | |
| "loss": 0.6812, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 1.2160751565762005, | |
| "grad_norm": 1.1637914180755615, | |
| "learning_rate": 4.605202267344768e-06, | |
| "loss": 0.7571, | |
| "step": 776 | |
| }, | |
| { | |
| "epoch": 1.2176409185803758, | |
| "grad_norm": 0.9384608864784241, | |
| "learning_rate": 4.6040652392294175e-06, | |
| "loss": 0.7842, | |
| "step": 777 | |
| }, | |
| { | |
| "epoch": 1.2192066805845512, | |
| "grad_norm": 1.0490702390670776, | |
| "learning_rate": 4.602926716919633e-06, | |
| "loss": 0.7157, | |
| "step": 778 | |
| }, | |
| { | |
| "epoch": 1.2207724425887265, | |
| "grad_norm": 1.131402611732483, | |
| "learning_rate": 4.601786701223934e-06, | |
| "loss": 0.7724, | |
| "step": 779 | |
| }, | |
| { | |
| "epoch": 1.2223382045929019, | |
| "grad_norm": 1.0159722566604614, | |
| "learning_rate": 4.600645192951898e-06, | |
| "loss": 0.7666, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 1.2239039665970772, | |
| "grad_norm": 1.039806604385376, | |
| "learning_rate": 4.599502192914164e-06, | |
| "loss": 0.7845, | |
| "step": 781 | |
| }, | |
| { | |
| "epoch": 1.2254697286012526, | |
| "grad_norm": 1.0722614526748657, | |
| "learning_rate": 4.598357701922428e-06, | |
| "loss": 0.782, | |
| "step": 782 | |
| }, | |
| { | |
| "epoch": 1.227035490605428, | |
| "grad_norm": 0.9443033933639526, | |
| "learning_rate": 4.5972117207894465e-06, | |
| "loss": 0.6573, | |
| "step": 783 | |
| }, | |
| { | |
| "epoch": 1.2286012526096033, | |
| "grad_norm": 0.9738026857376099, | |
| "learning_rate": 4.596064250329035e-06, | |
| "loss": 0.7475, | |
| "step": 784 | |
| }, | |
| { | |
| "epoch": 1.2301670146137786, | |
| "grad_norm": 0.9932008385658264, | |
| "learning_rate": 4.594915291356063e-06, | |
| "loss": 0.757, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 1.231732776617954, | |
| "grad_norm": 1.0612564086914062, | |
| "learning_rate": 4.5937648446864625e-06, | |
| "loss": 0.7824, | |
| "step": 786 | |
| }, | |
| { | |
| "epoch": 1.2332985386221293, | |
| "grad_norm": 1.0500893592834473, | |
| "learning_rate": 4.592612911137218e-06, | |
| "loss": 0.7646, | |
| "step": 787 | |
| }, | |
| { | |
| "epoch": 1.2348643006263047, | |
| "grad_norm": 0.9607855081558228, | |
| "learning_rate": 4.591459491526371e-06, | |
| "loss": 0.754, | |
| "step": 788 | |
| }, | |
| { | |
| "epoch": 1.2364300626304803, | |
| "grad_norm": 1.0871951580047607, | |
| "learning_rate": 4.590304586673018e-06, | |
| "loss": 0.7226, | |
| "step": 789 | |
| }, | |
| { | |
| "epoch": 1.2379958246346556, | |
| "grad_norm": 1.0030092000961304, | |
| "learning_rate": 4.589148197397311e-06, | |
| "loss": 0.7357, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 1.239561586638831, | |
| "grad_norm": 1.0297991037368774, | |
| "learning_rate": 4.587990324520455e-06, | |
| "loss": 0.7724, | |
| "step": 791 | |
| }, | |
| { | |
| "epoch": 1.2411273486430063, | |
| "grad_norm": 1.0192303657531738, | |
| "learning_rate": 4.586830968864711e-06, | |
| "loss": 0.6921, | |
| "step": 792 | |
| }, | |
| { | |
| "epoch": 1.2426931106471817, | |
| "grad_norm": 0.9617307186126709, | |
| "learning_rate": 4.585670131253389e-06, | |
| "loss": 0.7443, | |
| "step": 793 | |
| }, | |
| { | |
| "epoch": 1.244258872651357, | |
| "grad_norm": 1.0355238914489746, | |
| "learning_rate": 4.584507812510856e-06, | |
| "loss": 0.7741, | |
| "step": 794 | |
| }, | |
| { | |
| "epoch": 1.2458246346555324, | |
| "grad_norm": 0.9938539862632751, | |
| "learning_rate": 4.583344013462526e-06, | |
| "loss": 0.7372, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 1.2473903966597077, | |
| "grad_norm": 1.0573335886001587, | |
| "learning_rate": 4.582178734934869e-06, | |
| "loss": 0.7191, | |
| "step": 796 | |
| }, | |
| { | |
| "epoch": 1.248956158663883, | |
| "grad_norm": 0.963642954826355, | |
| "learning_rate": 4.581011977755402e-06, | |
| "loss": 0.6283, | |
| "step": 797 | |
| }, | |
| { | |
| "epoch": 1.2505219206680585, | |
| "grad_norm": 1.0119287967681885, | |
| "learning_rate": 4.579843742752694e-06, | |
| "loss": 0.7599, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 1.2520876826722338, | |
| "grad_norm": 1.0628082752227783, | |
| "learning_rate": 4.578674030756364e-06, | |
| "loss": 0.7741, | |
| "step": 799 | |
| }, | |
| { | |
| "epoch": 1.2536534446764092, | |
| "grad_norm": 1.0510241985321045, | |
| "learning_rate": 4.577502842597077e-06, | |
| "loss": 0.7615, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 1.2552192066805845, | |
| "grad_norm": 1.1180559396743774, | |
| "learning_rate": 4.5763301791065474e-06, | |
| "loss": 0.7119, | |
| "step": 801 | |
| }, | |
| { | |
| "epoch": 1.2567849686847599, | |
| "grad_norm": 1.0198049545288086, | |
| "learning_rate": 4.575156041117541e-06, | |
| "loss": 0.7487, | |
| "step": 802 | |
| }, | |
| { | |
| "epoch": 1.2583507306889352, | |
| "grad_norm": 1.018902063369751, | |
| "learning_rate": 4.5739804294638655e-06, | |
| "loss": 0.7461, | |
| "step": 803 | |
| }, | |
| { | |
| "epoch": 1.2599164926931106, | |
| "grad_norm": 0.9230141639709473, | |
| "learning_rate": 4.572803344980378e-06, | |
| "loss": 0.7295, | |
| "step": 804 | |
| }, | |
| { | |
| "epoch": 1.261482254697286, | |
| "grad_norm": 1.0011619329452515, | |
| "learning_rate": 4.571624788502981e-06, | |
| "loss": 0.7325, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 1.2630480167014615, | |
| "grad_norm": 0.9569756388664246, | |
| "learning_rate": 4.570444760868622e-06, | |
| "loss": 0.7494, | |
| "step": 806 | |
| }, | |
| { | |
| "epoch": 1.2646137787056366, | |
| "grad_norm": 0.9860491156578064, | |
| "learning_rate": 4.569263262915293e-06, | |
| "loss": 0.6536, | |
| "step": 807 | |
| }, | |
| { | |
| "epoch": 1.2661795407098122, | |
| "grad_norm": 1.0986074209213257, | |
| "learning_rate": 4.568080295482032e-06, | |
| "loss": 0.7337, | |
| "step": 808 | |
| }, | |
| { | |
| "epoch": 1.2677453027139876, | |
| "grad_norm": 1.0782687664031982, | |
| "learning_rate": 4.566895859408916e-06, | |
| "loss": 0.7193, | |
| "step": 809 | |
| }, | |
| { | |
| "epoch": 1.269311064718163, | |
| "grad_norm": 1.1424410343170166, | |
| "learning_rate": 4.565709955537071e-06, | |
| "loss": 0.6153, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 1.2708768267223383, | |
| "grad_norm": 0.9178900718688965, | |
| "learning_rate": 4.564522584708662e-06, | |
| "loss": 0.7802, | |
| "step": 811 | |
| }, | |
| { | |
| "epoch": 1.2724425887265136, | |
| "grad_norm": 1.0006638765335083, | |
| "learning_rate": 4.563333747766896e-06, | |
| "loss": 0.7882, | |
| "step": 812 | |
| }, | |
| { | |
| "epoch": 1.274008350730689, | |
| "grad_norm": 1.0139516592025757, | |
| "learning_rate": 4.56214344555602e-06, | |
| "loss": 0.6562, | |
| "step": 813 | |
| }, | |
| { | |
| "epoch": 1.2755741127348643, | |
| "grad_norm": 1.0360203981399536, | |
| "learning_rate": 4.560951678921324e-06, | |
| "loss": 0.7951, | |
| "step": 814 | |
| }, | |
| { | |
| "epoch": 1.2771398747390397, | |
| "grad_norm": 1.042457103729248, | |
| "learning_rate": 4.559758448709138e-06, | |
| "loss": 0.7572, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 1.278705636743215, | |
| "grad_norm": 1.0056653022766113, | |
| "learning_rate": 4.558563755766827e-06, | |
| "loss": 0.7679, | |
| "step": 816 | |
| }, | |
| { | |
| "epoch": 1.2802713987473904, | |
| "grad_norm": 0.9838577508926392, | |
| "learning_rate": 4.5573676009428e-06, | |
| "loss": 0.7245, | |
| "step": 817 | |
| }, | |
| { | |
| "epoch": 1.2818371607515657, | |
| "grad_norm": 1.0474631786346436, | |
| "learning_rate": 4.556169985086503e-06, | |
| "loss": 0.7823, | |
| "step": 818 | |
| }, | |
| { | |
| "epoch": 1.283402922755741, | |
| "grad_norm": 1.0177569389343262, | |
| "learning_rate": 4.554970909048417e-06, | |
| "loss": 0.7599, | |
| "step": 819 | |
| }, | |
| { | |
| "epoch": 1.2849686847599164, | |
| "grad_norm": 1.0120493173599243, | |
| "learning_rate": 4.553770373680062e-06, | |
| "loss": 0.7842, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 1.2865344467640918, | |
| "grad_norm": 1.1045918464660645, | |
| "learning_rate": 4.5525683798339946e-06, | |
| "loss": 0.7075, | |
| "step": 821 | |
| }, | |
| { | |
| "epoch": 1.2881002087682671, | |
| "grad_norm": 1.097714900970459, | |
| "learning_rate": 4.551364928363805e-06, | |
| "loss": 0.7571, | |
| "step": 822 | |
| }, | |
| { | |
| "epoch": 1.2896659707724427, | |
| "grad_norm": 0.9125490188598633, | |
| "learning_rate": 4.550160020124121e-06, | |
| "loss": 0.7626, | |
| "step": 823 | |
| }, | |
| { | |
| "epoch": 1.2912317327766178, | |
| "grad_norm": 1.1127262115478516, | |
| "learning_rate": 4.548953655970604e-06, | |
| "loss": 0.6618, | |
| "step": 824 | |
| }, | |
| { | |
| "epoch": 1.2927974947807934, | |
| "grad_norm": 1.0372540950775146, | |
| "learning_rate": 4.547745836759949e-06, | |
| "loss": 0.7467, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 1.2943632567849686, | |
| "grad_norm": 1.0158213376998901, | |
| "learning_rate": 4.546536563349885e-06, | |
| "loss": 0.6421, | |
| "step": 826 | |
| }, | |
| { | |
| "epoch": 1.2959290187891441, | |
| "grad_norm": 1.0057018995285034, | |
| "learning_rate": 4.545325836599171e-06, | |
| "loss": 0.7252, | |
| "step": 827 | |
| }, | |
| { | |
| "epoch": 1.2974947807933195, | |
| "grad_norm": 1.0552566051483154, | |
| "learning_rate": 4.544113657367604e-06, | |
| "loss": 0.6645, | |
| "step": 828 | |
| }, | |
| { | |
| "epoch": 1.2990605427974948, | |
| "grad_norm": 1.0798696279525757, | |
| "learning_rate": 4.542900026516006e-06, | |
| "loss": 0.7627, | |
| "step": 829 | |
| }, | |
| { | |
| "epoch": 1.3006263048016702, | |
| "grad_norm": 1.0450047254562378, | |
| "learning_rate": 4.541684944906233e-06, | |
| "loss": 0.705, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 1.3021920668058455, | |
| "grad_norm": 0.9975817203521729, | |
| "learning_rate": 4.540468413401171e-06, | |
| "loss": 0.7639, | |
| "step": 831 | |
| }, | |
| { | |
| "epoch": 1.303757828810021, | |
| "grad_norm": 0.9081487059593201, | |
| "learning_rate": 4.539250432864736e-06, | |
| "loss": 0.8129, | |
| "step": 832 | |
| }, | |
| { | |
| "epoch": 1.3053235908141962, | |
| "grad_norm": 1.228684902191162, | |
| "learning_rate": 4.538031004161872e-06, | |
| "loss": 0.6984, | |
| "step": 833 | |
| }, | |
| { | |
| "epoch": 1.3068893528183716, | |
| "grad_norm": 1.0577794313430786, | |
| "learning_rate": 4.536810128158552e-06, | |
| "loss": 0.7436, | |
| "step": 834 | |
| }, | |
| { | |
| "epoch": 1.308455114822547, | |
| "grad_norm": 1.1270350217819214, | |
| "learning_rate": 4.535587805721777e-06, | |
| "loss": 0.7589, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 1.3100208768267223, | |
| "grad_norm": 1.1473027467727661, | |
| "learning_rate": 4.5343640377195766e-06, | |
| "loss": 0.7988, | |
| "step": 836 | |
| }, | |
| { | |
| "epoch": 1.3115866388308977, | |
| "grad_norm": 0.9700750112533569, | |
| "learning_rate": 4.533138825021001e-06, | |
| "loss": 0.7388, | |
| "step": 837 | |
| }, | |
| { | |
| "epoch": 1.313152400835073, | |
| "grad_norm": 0.9596260786056519, | |
| "learning_rate": 4.531912168496135e-06, | |
| "loss": 0.8177, | |
| "step": 838 | |
| }, | |
| { | |
| "epoch": 1.3147181628392484, | |
| "grad_norm": 0.9888957738876343, | |
| "learning_rate": 4.530684069016081e-06, | |
| "loss": 0.809, | |
| "step": 839 | |
| }, | |
| { | |
| "epoch": 1.316283924843424, | |
| "grad_norm": 1.219224214553833, | |
| "learning_rate": 4.529454527452972e-06, | |
| "loss": 0.7761, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 1.317849686847599, | |
| "grad_norm": 0.9730296730995178, | |
| "learning_rate": 4.5282235446799624e-06, | |
| "loss": 0.8044, | |
| "step": 841 | |
| }, | |
| { | |
| "epoch": 1.3194154488517746, | |
| "grad_norm": 1.1046907901763916, | |
| "learning_rate": 4.526991121571228e-06, | |
| "loss": 0.7452, | |
| "step": 842 | |
| }, | |
| { | |
| "epoch": 1.3209812108559498, | |
| "grad_norm": 0.9707220196723938, | |
| "learning_rate": 4.525757259001972e-06, | |
| "loss": 0.7112, | |
| "step": 843 | |
| }, | |
| { | |
| "epoch": 1.3225469728601253, | |
| "grad_norm": 1.0085593461990356, | |
| "learning_rate": 4.524521957848416e-06, | |
| "loss": 0.6266, | |
| "step": 844 | |
| }, | |
| { | |
| "epoch": 1.3241127348643007, | |
| "grad_norm": 1.131439447402954, | |
| "learning_rate": 4.5232852189878055e-06, | |
| "loss": 0.7393, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 1.325678496868476, | |
| "grad_norm": 1.1238782405853271, | |
| "learning_rate": 4.522047043298406e-06, | |
| "loss": 0.7541, | |
| "step": 846 | |
| }, | |
| { | |
| "epoch": 1.3272442588726514, | |
| "grad_norm": 0.9149152636528015, | |
| "learning_rate": 4.520807431659504e-06, | |
| "loss": 0.8162, | |
| "step": 847 | |
| }, | |
| { | |
| "epoch": 1.3288100208768268, | |
| "grad_norm": 1.0428732633590698, | |
| "learning_rate": 4.519566384951403e-06, | |
| "loss": 0.7511, | |
| "step": 848 | |
| }, | |
| { | |
| "epoch": 1.3303757828810021, | |
| "grad_norm": 1.0263090133666992, | |
| "learning_rate": 4.518323904055432e-06, | |
| "loss": 0.7482, | |
| "step": 849 | |
| }, | |
| { | |
| "epoch": 1.3319415448851775, | |
| "grad_norm": 1.113810420036316, | |
| "learning_rate": 4.517079989853931e-06, | |
| "loss": 0.797, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 1.3335073068893528, | |
| "grad_norm": 1.0090736150741577, | |
| "learning_rate": 4.515834643230263e-06, | |
| "loss": 0.688, | |
| "step": 851 | |
| }, | |
| { | |
| "epoch": 1.3350730688935282, | |
| "grad_norm": 1.012824296951294, | |
| "learning_rate": 4.514587865068806e-06, | |
| "loss": 0.6683, | |
| "step": 852 | |
| }, | |
| { | |
| "epoch": 1.3366388308977035, | |
| "grad_norm": 0.9442676305770874, | |
| "learning_rate": 4.513339656254954e-06, | |
| "loss": 0.7812, | |
| "step": 853 | |
| }, | |
| { | |
| "epoch": 1.3382045929018789, | |
| "grad_norm": 1.0870879888534546, | |
| "learning_rate": 4.512090017675119e-06, | |
| "loss": 0.6943, | |
| "step": 854 | |
| }, | |
| { | |
| "epoch": 1.3397703549060542, | |
| "grad_norm": 0.9352210164070129, | |
| "learning_rate": 4.510838950216727e-06, | |
| "loss": 0.7571, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 1.3413361169102296, | |
| "grad_norm": 1.173621654510498, | |
| "learning_rate": 4.509586454768221e-06, | |
| "loss": 0.6985, | |
| "step": 856 | |
| }, | |
| { | |
| "epoch": 1.342901878914405, | |
| "grad_norm": 0.9790180921554565, | |
| "learning_rate": 4.508332532219053e-06, | |
| "loss": 0.7132, | |
| "step": 857 | |
| }, | |
| { | |
| "epoch": 1.3444676409185803, | |
| "grad_norm": 1.0999163389205933, | |
| "learning_rate": 4.507077183459694e-06, | |
| "loss": 0.7858, | |
| "step": 858 | |
| }, | |
| { | |
| "epoch": 1.3460334029227559, | |
| "grad_norm": 1.018182635307312, | |
| "learning_rate": 4.505820409381625e-06, | |
| "loss": 0.712, | |
| "step": 859 | |
| }, | |
| { | |
| "epoch": 1.347599164926931, | |
| "grad_norm": 1.1015701293945312, | |
| "learning_rate": 4.504562210877338e-06, | |
| "loss": 0.8004, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 1.3491649269311066, | |
| "grad_norm": 1.0872273445129395, | |
| "learning_rate": 4.50330258884034e-06, | |
| "loss": 0.7478, | |
| "step": 861 | |
| }, | |
| { | |
| "epoch": 1.350730688935282, | |
| "grad_norm": 1.1098262071609497, | |
| "learning_rate": 4.502041544165148e-06, | |
| "loss": 0.7894, | |
| "step": 862 | |
| }, | |
| { | |
| "epoch": 1.3522964509394573, | |
| "grad_norm": 1.0535573959350586, | |
| "learning_rate": 4.500779077747285e-06, | |
| "loss": 0.7185, | |
| "step": 863 | |
| }, | |
| { | |
| "epoch": 1.3538622129436326, | |
| "grad_norm": 1.0424474477767944, | |
| "learning_rate": 4.499515190483291e-06, | |
| "loss": 0.748, | |
| "step": 864 | |
| }, | |
| { | |
| "epoch": 1.355427974947808, | |
| "grad_norm": 1.005065679550171, | |
| "learning_rate": 4.498249883270708e-06, | |
| "loss": 0.7948, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 1.3569937369519833, | |
| "grad_norm": 1.0052721500396729, | |
| "learning_rate": 4.49698315700809e-06, | |
| "loss": 0.7619, | |
| "step": 866 | |
| }, | |
| { | |
| "epoch": 1.3585594989561587, | |
| "grad_norm": 1.0953692197799683, | |
| "learning_rate": 4.495715012594999e-06, | |
| "loss": 0.7363, | |
| "step": 867 | |
| }, | |
| { | |
| "epoch": 1.360125260960334, | |
| "grad_norm": 1.1238093376159668, | |
| "learning_rate": 4.494445450932003e-06, | |
| "loss": 0.6576, | |
| "step": 868 | |
| }, | |
| { | |
| "epoch": 1.3616910229645094, | |
| "grad_norm": 0.8546008467674255, | |
| "learning_rate": 4.493174472920676e-06, | |
| "loss": 0.7513, | |
| "step": 869 | |
| }, | |
| { | |
| "epoch": 1.3632567849686847, | |
| "grad_norm": 1.0863126516342163, | |
| "learning_rate": 4.4919020794635995e-06, | |
| "loss": 0.6886, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 1.36482254697286, | |
| "grad_norm": 0.9943820834159851, | |
| "learning_rate": 4.490628271464359e-06, | |
| "loss": 0.7253, | |
| "step": 871 | |
| }, | |
| { | |
| "epoch": 1.3663883089770354, | |
| "grad_norm": 0.9377979636192322, | |
| "learning_rate": 4.4893530498275436e-06, | |
| "loss": 0.7026, | |
| "step": 872 | |
| }, | |
| { | |
| "epoch": 1.3679540709812108, | |
| "grad_norm": 1.100799798965454, | |
| "learning_rate": 4.488076415458749e-06, | |
| "loss": 0.719, | |
| "step": 873 | |
| }, | |
| { | |
| "epoch": 1.3695198329853862, | |
| "grad_norm": 1.015074610710144, | |
| "learning_rate": 4.486798369264572e-06, | |
| "loss": 0.7513, | |
| "step": 874 | |
| }, | |
| { | |
| "epoch": 1.3710855949895615, | |
| "grad_norm": 1.047530174255371, | |
| "learning_rate": 4.485518912152611e-06, | |
| "loss": 0.7757, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 1.372651356993737, | |
| "grad_norm": 1.0264610052108765, | |
| "learning_rate": 4.484238045031471e-06, | |
| "loss": 0.7712, | |
| "step": 876 | |
| }, | |
| { | |
| "epoch": 1.3742171189979122, | |
| "grad_norm": 0.9848430156707764, | |
| "learning_rate": 4.482955768810752e-06, | |
| "loss": 0.7569, | |
| "step": 877 | |
| }, | |
| { | |
| "epoch": 1.3757828810020878, | |
| "grad_norm": 1.070816159248352, | |
| "learning_rate": 4.48167208440106e-06, | |
| "loss": 0.7857, | |
| "step": 878 | |
| }, | |
| { | |
| "epoch": 1.377348643006263, | |
| "grad_norm": 1.0134745836257935, | |
| "learning_rate": 4.480386992713998e-06, | |
| "loss": 0.7372, | |
| "step": 879 | |
| }, | |
| { | |
| "epoch": 1.3789144050104385, | |
| "grad_norm": 1.0736140012741089, | |
| "learning_rate": 4.47910049466217e-06, | |
| "loss": 0.7794, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 1.3804801670146138, | |
| "grad_norm": 1.0163054466247559, | |
| "learning_rate": 4.477812591159176e-06, | |
| "loss": 0.7341, | |
| "step": 881 | |
| }, | |
| { | |
| "epoch": 1.3820459290187892, | |
| "grad_norm": 1.074578881263733, | |
| "learning_rate": 4.476523283119619e-06, | |
| "loss": 0.6924, | |
| "step": 882 | |
| }, | |
| { | |
| "epoch": 1.3836116910229646, | |
| "grad_norm": 1.183667540550232, | |
| "learning_rate": 4.475232571459095e-06, | |
| "loss": 0.7866, | |
| "step": 883 | |
| }, | |
| { | |
| "epoch": 1.38517745302714, | |
| "grad_norm": 1.030827522277832, | |
| "learning_rate": 4.473940457094199e-06, | |
| "loss": 0.7845, | |
| "step": 884 | |
| }, | |
| { | |
| "epoch": 1.3867432150313153, | |
| "grad_norm": 1.0043658018112183, | |
| "learning_rate": 4.47264694094252e-06, | |
| "loss": 0.7399, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 1.3883089770354906, | |
| "grad_norm": 1.1135472059249878, | |
| "learning_rate": 4.471352023922645e-06, | |
| "loss": 0.771, | |
| "step": 886 | |
| }, | |
| { | |
| "epoch": 1.389874739039666, | |
| "grad_norm": 1.0338670015335083, | |
| "learning_rate": 4.470055706954154e-06, | |
| "loss": 0.7282, | |
| "step": 887 | |
| }, | |
| { | |
| "epoch": 1.3914405010438413, | |
| "grad_norm": 0.918855607509613, | |
| "learning_rate": 4.468757990957623e-06, | |
| "loss": 0.728, | |
| "step": 888 | |
| }, | |
| { | |
| "epoch": 1.3930062630480167, | |
| "grad_norm": 1.06833016872406, | |
| "learning_rate": 4.467458876854619e-06, | |
| "loss": 0.7012, | |
| "step": 889 | |
| }, | |
| { | |
| "epoch": 1.394572025052192, | |
| "grad_norm": 1.0103005170822144, | |
| "learning_rate": 4.4661583655677045e-06, | |
| "loss": 0.7074, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 1.3961377870563674, | |
| "grad_norm": 1.0418543815612793, | |
| "learning_rate": 4.464856458020433e-06, | |
| "loss": 0.6858, | |
| "step": 891 | |
| }, | |
| { | |
| "epoch": 1.3977035490605427, | |
| "grad_norm": 0.9720668196678162, | |
| "learning_rate": 4.463553155137348e-06, | |
| "loss": 0.7024, | |
| "step": 892 | |
| }, | |
| { | |
| "epoch": 1.399269311064718, | |
| "grad_norm": 1.1870561838150024, | |
| "learning_rate": 4.462248457843986e-06, | |
| "loss": 0.8069, | |
| "step": 893 | |
| }, | |
| { | |
| "epoch": 1.4008350730688934, | |
| "grad_norm": 0.9888094663619995, | |
| "learning_rate": 4.460942367066874e-06, | |
| "loss": 0.7281, | |
| "step": 894 | |
| }, | |
| { | |
| "epoch": 1.402400835073069, | |
| "grad_norm": 0.9120670557022095, | |
| "learning_rate": 4.459634883733527e-06, | |
| "loss": 0.7992, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 1.4039665970772441, | |
| "grad_norm": 1.0239481925964355, | |
| "learning_rate": 4.45832600877245e-06, | |
| "loss": 0.7312, | |
| "step": 896 | |
| }, | |
| { | |
| "epoch": 1.4055323590814197, | |
| "grad_norm": 0.9379130005836487, | |
| "learning_rate": 4.4570157431131355e-06, | |
| "loss": 0.7019, | |
| "step": 897 | |
| }, | |
| { | |
| "epoch": 1.407098121085595, | |
| "grad_norm": 1.0789109468460083, | |
| "learning_rate": 4.455704087686063e-06, | |
| "loss": 0.8149, | |
| "step": 898 | |
| }, | |
| { | |
| "epoch": 1.4086638830897704, | |
| "grad_norm": 1.0521917343139648, | |
| "learning_rate": 4.454391043422703e-06, | |
| "loss": 0.7873, | |
| "step": 899 | |
| }, | |
| { | |
| "epoch": 1.4102296450939458, | |
| "grad_norm": 0.9582643508911133, | |
| "learning_rate": 4.453076611255507e-06, | |
| "loss": 0.7815, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 1.4117954070981211, | |
| "grad_norm": 1.1096739768981934, | |
| "learning_rate": 4.451760792117914e-06, | |
| "loss": 0.7636, | |
| "step": 901 | |
| }, | |
| { | |
| "epoch": 1.4133611691022965, | |
| "grad_norm": 1.043574333190918, | |
| "learning_rate": 4.450443586944349e-06, | |
| "loss": 0.7411, | |
| "step": 902 | |
| }, | |
| { | |
| "epoch": 1.4149269311064718, | |
| "grad_norm": 1.0280033349990845, | |
| "learning_rate": 4.449124996670221e-06, | |
| "loss": 0.6525, | |
| "step": 903 | |
| }, | |
| { | |
| "epoch": 1.4164926931106472, | |
| "grad_norm": 1.0329992771148682, | |
| "learning_rate": 4.44780502223192e-06, | |
| "loss": 0.7133, | |
| "step": 904 | |
| }, | |
| { | |
| "epoch": 1.4180584551148225, | |
| "grad_norm": 1.016168475151062, | |
| "learning_rate": 4.446483664566825e-06, | |
| "loss": 0.7098, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 1.4196242171189979, | |
| "grad_norm": 1.2022786140441895, | |
| "learning_rate": 4.4451609246132895e-06, | |
| "loss": 0.7303, | |
| "step": 906 | |
| }, | |
| { | |
| "epoch": 1.4211899791231732, | |
| "grad_norm": 1.0311683416366577, | |
| "learning_rate": 4.443836803310655e-06, | |
| "loss": 0.7475, | |
| "step": 907 | |
| }, | |
| { | |
| "epoch": 1.4227557411273486, | |
| "grad_norm": 1.0073823928833008, | |
| "learning_rate": 4.442511301599241e-06, | |
| "loss": 0.7513, | |
| "step": 908 | |
| }, | |
| { | |
| "epoch": 1.424321503131524, | |
| "grad_norm": 1.0702322721481323, | |
| "learning_rate": 4.441184420420347e-06, | |
| "loss": 0.6918, | |
| "step": 909 | |
| }, | |
| { | |
| "epoch": 1.4258872651356993, | |
| "grad_norm": 1.1416171789169312, | |
| "learning_rate": 4.439856160716254e-06, | |
| "loss": 0.6324, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 1.4274530271398747, | |
| "grad_norm": 1.0560253858566284, | |
| "learning_rate": 4.438526523430221e-06, | |
| "loss": 0.749, | |
| "step": 911 | |
| }, | |
| { | |
| "epoch": 1.4290187891440502, | |
| "grad_norm": 1.021008849143982, | |
| "learning_rate": 4.4371955095064835e-06, | |
| "loss": 0.7666, | |
| "step": 912 | |
| }, | |
| { | |
| "epoch": 1.4305845511482254, | |
| "grad_norm": 1.151904821395874, | |
| "learning_rate": 4.435863119890258e-06, | |
| "loss": 0.6904, | |
| "step": 913 | |
| }, | |
| { | |
| "epoch": 1.432150313152401, | |
| "grad_norm": 1.1216702461242676, | |
| "learning_rate": 4.434529355527736e-06, | |
| "loss": 0.7068, | |
| "step": 914 | |
| }, | |
| { | |
| "epoch": 1.433716075156576, | |
| "grad_norm": 1.1130588054656982, | |
| "learning_rate": 4.433194217366085e-06, | |
| "loss": 0.6524, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 1.4352818371607516, | |
| "grad_norm": 1.07612144947052, | |
| "learning_rate": 4.431857706353449e-06, | |
| "loss": 0.8078, | |
| "step": 916 | |
| }, | |
| { | |
| "epoch": 1.436847599164927, | |
| "grad_norm": 1.0220447778701782, | |
| "learning_rate": 4.430519823438946e-06, | |
| "loss": 0.6736, | |
| "step": 917 | |
| }, | |
| { | |
| "epoch": 1.4384133611691023, | |
| "grad_norm": 1.0657141208648682, | |
| "learning_rate": 4.429180569572669e-06, | |
| "loss": 0.7457, | |
| "step": 918 | |
| }, | |
| { | |
| "epoch": 1.4399791231732777, | |
| "grad_norm": 1.0646854639053345, | |
| "learning_rate": 4.427839945705684e-06, | |
| "loss": 0.7925, | |
| "step": 919 | |
| }, | |
| { | |
| "epoch": 1.441544885177453, | |
| "grad_norm": 1.0619502067565918, | |
| "learning_rate": 4.426497952790031e-06, | |
| "loss": 0.6968, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 1.4431106471816284, | |
| "grad_norm": 0.9684338569641113, | |
| "learning_rate": 4.425154591778722e-06, | |
| "loss": 0.7411, | |
| "step": 921 | |
| }, | |
| { | |
| "epoch": 1.4446764091858038, | |
| "grad_norm": 1.1225167512893677, | |
| "learning_rate": 4.423809863625739e-06, | |
| "loss": 0.7623, | |
| "step": 922 | |
| }, | |
| { | |
| "epoch": 1.446242171189979, | |
| "grad_norm": 0.9310746788978577, | |
| "learning_rate": 4.422463769286036e-06, | |
| "loss": 0.8219, | |
| "step": 923 | |
| }, | |
| { | |
| "epoch": 1.4478079331941545, | |
| "grad_norm": 1.1107957363128662, | |
| "learning_rate": 4.4211163097155375e-06, | |
| "loss": 0.7755, | |
| "step": 924 | |
| }, | |
| { | |
| "epoch": 1.4493736951983298, | |
| "grad_norm": 0.9827596545219421, | |
| "learning_rate": 4.419767485871136e-06, | |
| "loss": 0.7102, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 1.4509394572025052, | |
| "grad_norm": 1.1075087785720825, | |
| "learning_rate": 4.418417298710695e-06, | |
| "loss": 0.6399, | |
| "step": 926 | |
| }, | |
| { | |
| "epoch": 1.4525052192066805, | |
| "grad_norm": 0.9575802087783813, | |
| "learning_rate": 4.417065749193045e-06, | |
| "loss": 0.721, | |
| "step": 927 | |
| }, | |
| { | |
| "epoch": 1.4540709812108559, | |
| "grad_norm": 1.0016006231307983, | |
| "learning_rate": 4.415712838277985e-06, | |
| "loss": 0.762, | |
| "step": 928 | |
| }, | |
| { | |
| "epoch": 1.4556367432150314, | |
| "grad_norm": 1.1907106637954712, | |
| "learning_rate": 4.414358566926279e-06, | |
| "loss": 0.7904, | |
| "step": 929 | |
| }, | |
| { | |
| "epoch": 1.4572025052192066, | |
| "grad_norm": 1.0011543035507202, | |
| "learning_rate": 4.413002936099656e-06, | |
| "loss": 0.7179, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 1.4587682672233822, | |
| "grad_norm": 0.980684757232666, | |
| "learning_rate": 4.411645946760817e-06, | |
| "loss": 0.6845, | |
| "step": 931 | |
| }, | |
| { | |
| "epoch": 1.4603340292275573, | |
| "grad_norm": 0.9458135366439819, | |
| "learning_rate": 4.4102875998734176e-06, | |
| "loss": 0.6911, | |
| "step": 932 | |
| }, | |
| { | |
| "epoch": 1.4618997912317329, | |
| "grad_norm": 1.020966649055481, | |
| "learning_rate": 4.408927896402087e-06, | |
| "loss": 0.7284, | |
| "step": 933 | |
| }, | |
| { | |
| "epoch": 1.4634655532359082, | |
| "grad_norm": 1.041833519935608, | |
| "learning_rate": 4.407566837312413e-06, | |
| "loss": 0.7451, | |
| "step": 934 | |
| }, | |
| { | |
| "epoch": 1.4650313152400836, | |
| "grad_norm": 0.9076465964317322, | |
| "learning_rate": 4.406204423570946e-06, | |
| "loss": 0.7553, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 1.466597077244259, | |
| "grad_norm": 1.1963952779769897, | |
| "learning_rate": 4.404840656145199e-06, | |
| "loss": 0.7437, | |
| "step": 936 | |
| }, | |
| { | |
| "epoch": 1.4681628392484343, | |
| "grad_norm": 1.0181536674499512, | |
| "learning_rate": 4.403475536003648e-06, | |
| "loss": 0.7283, | |
| "step": 937 | |
| }, | |
| { | |
| "epoch": 1.4697286012526096, | |
| "grad_norm": 0.9563003778457642, | |
| "learning_rate": 4.402109064115727e-06, | |
| "loss": 0.7375, | |
| "step": 938 | |
| }, | |
| { | |
| "epoch": 1.471294363256785, | |
| "grad_norm": 1.05129873752594, | |
| "learning_rate": 4.400741241451832e-06, | |
| "loss": 0.6805, | |
| "step": 939 | |
| }, | |
| { | |
| "epoch": 1.4728601252609603, | |
| "grad_norm": 1.0912166833877563, | |
| "learning_rate": 4.399372068983317e-06, | |
| "loss": 0.7076, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 1.4744258872651357, | |
| "grad_norm": 1.108500599861145, | |
| "learning_rate": 4.398001547682494e-06, | |
| "loss": 0.6836, | |
| "step": 941 | |
| }, | |
| { | |
| "epoch": 1.475991649269311, | |
| "grad_norm": 1.0741651058197021, | |
| "learning_rate": 4.396629678522636e-06, | |
| "loss": 0.7405, | |
| "step": 942 | |
| }, | |
| { | |
| "epoch": 1.4775574112734864, | |
| "grad_norm": 1.0603930950164795, | |
| "learning_rate": 4.39525646247797e-06, | |
| "loss": 0.7602, | |
| "step": 943 | |
| }, | |
| { | |
| "epoch": 1.4791231732776617, | |
| "grad_norm": 0.9814990162849426, | |
| "learning_rate": 4.39388190052368e-06, | |
| "loss": 0.7324, | |
| "step": 944 | |
| }, | |
| { | |
| "epoch": 1.480688935281837, | |
| "grad_norm": 0.9643695950508118, | |
| "learning_rate": 4.392505993635906e-06, | |
| "loss": 0.7034, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 1.4822546972860124, | |
| "grad_norm": 1.0490994453430176, | |
| "learning_rate": 4.391128742791743e-06, | |
| "loss": 0.7358, | |
| "step": 946 | |
| }, | |
| { | |
| "epoch": 1.4838204592901878, | |
| "grad_norm": 0.9223467111587524, | |
| "learning_rate": 4.389750148969244e-06, | |
| "loss": 0.698, | |
| "step": 947 | |
| }, | |
| { | |
| "epoch": 1.4853862212943634, | |
| "grad_norm": 1.1107758283615112, | |
| "learning_rate": 4.388370213147409e-06, | |
| "loss": 0.7264, | |
| "step": 948 | |
| }, | |
| { | |
| "epoch": 1.4869519832985385, | |
| "grad_norm": 1.05939781665802, | |
| "learning_rate": 4.386988936306196e-06, | |
| "loss": 0.7209, | |
| "step": 949 | |
| }, | |
| { | |
| "epoch": 1.488517745302714, | |
| "grad_norm": 1.024686574935913, | |
| "learning_rate": 4.385606319426512e-06, | |
| "loss": 0.7798, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 1.4900835073068894, | |
| "grad_norm": 0.9920977354049683, | |
| "learning_rate": 4.3842223634902205e-06, | |
| "loss": 0.8045, | |
| "step": 951 | |
| }, | |
| { | |
| "epoch": 1.4916492693110648, | |
| "grad_norm": 0.950927197933197, | |
| "learning_rate": 4.382837069480131e-06, | |
| "loss": 0.6777, | |
| "step": 952 | |
| }, | |
| { | |
| "epoch": 1.4932150313152401, | |
| "grad_norm": 1.0072081089019775, | |
| "learning_rate": 4.3814504383800056e-06, | |
| "loss": 0.7458, | |
| "step": 953 | |
| }, | |
| { | |
| "epoch": 1.4947807933194155, | |
| "grad_norm": 0.9982030987739563, | |
| "learning_rate": 4.3800624711745546e-06, | |
| "loss": 0.8215, | |
| "step": 954 | |
| }, | |
| { | |
| "epoch": 1.4963465553235908, | |
| "grad_norm": 0.9682126641273499, | |
| "learning_rate": 4.3786731688494396e-06, | |
| "loss": 0.7157, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 1.4979123173277662, | |
| "grad_norm": 1.1599830389022827, | |
| "learning_rate": 4.377282532391267e-06, | |
| "loss": 0.8091, | |
| "step": 956 | |
| }, | |
| { | |
| "epoch": 1.4994780793319415, | |
| "grad_norm": 1.0624948740005493, | |
| "learning_rate": 4.3758905627875934e-06, | |
| "loss": 0.7428, | |
| "step": 957 | |
| }, | |
| { | |
| "epoch": 1.501043841336117, | |
| "grad_norm": 1.14694082736969, | |
| "learning_rate": 4.374497261026921e-06, | |
| "loss": 0.7752, | |
| "step": 958 | |
| }, | |
| { | |
| "epoch": 1.5026096033402923, | |
| "grad_norm": 1.0355825424194336, | |
| "learning_rate": 4.373102628098697e-06, | |
| "loss": 0.7348, | |
| "step": 959 | |
| }, | |
| { | |
| "epoch": 1.5041753653444676, | |
| "grad_norm": 1.036026954650879, | |
| "learning_rate": 4.371706664993316e-06, | |
| "loss": 0.7536, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 1.505741127348643, | |
| "grad_norm": 1.2319790124893188, | |
| "learning_rate": 4.370309372702116e-06, | |
| "loss": 0.6985, | |
| "step": 961 | |
| }, | |
| { | |
| "epoch": 1.5073068893528183, | |
| "grad_norm": 0.908687949180603, | |
| "learning_rate": 4.36891075221738e-06, | |
| "loss": 0.7173, | |
| "step": 962 | |
| }, | |
| { | |
| "epoch": 1.5088726513569939, | |
| "grad_norm": 1.0482099056243896, | |
| "learning_rate": 4.367510804532331e-06, | |
| "loss": 0.7514, | |
| "step": 963 | |
| }, | |
| { | |
| "epoch": 1.510438413361169, | |
| "grad_norm": 1.0434144735336304, | |
| "learning_rate": 4.36610953064114e-06, | |
| "loss": 0.754, | |
| "step": 964 | |
| }, | |
| { | |
| "epoch": 1.5120041753653446, | |
| "grad_norm": 1.0122450590133667, | |
| "learning_rate": 4.364706931538914e-06, | |
| "loss": 0.7767, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 1.5135699373695197, | |
| "grad_norm": 1.0471900701522827, | |
| "learning_rate": 4.363303008221705e-06, | |
| "loss": 0.7337, | |
| "step": 966 | |
| }, | |
| { | |
| "epoch": 1.5151356993736953, | |
| "grad_norm": 1.0691189765930176, | |
| "learning_rate": 4.3618977616865045e-06, | |
| "loss": 0.8047, | |
| "step": 967 | |
| }, | |
| { | |
| "epoch": 1.5167014613778704, | |
| "grad_norm": 1.015114188194275, | |
| "learning_rate": 4.360491192931242e-06, | |
| "loss": 0.726, | |
| "step": 968 | |
| }, | |
| { | |
| "epoch": 1.518267223382046, | |
| "grad_norm": 0.9770560264587402, | |
| "learning_rate": 4.359083302954788e-06, | |
| "loss": 0.7692, | |
| "step": 969 | |
| }, | |
| { | |
| "epoch": 1.5198329853862211, | |
| "grad_norm": 1.079991340637207, | |
| "learning_rate": 4.357674092756951e-06, | |
| "loss": 0.8424, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 1.5213987473903967, | |
| "grad_norm": 1.068708896636963, | |
| "learning_rate": 4.356263563338476e-06, | |
| "loss": 0.758, | |
| "step": 971 | |
| }, | |
| { | |
| "epoch": 1.522964509394572, | |
| "grad_norm": 1.1005477905273438, | |
| "learning_rate": 4.354851715701046e-06, | |
| "loss": 0.7209, | |
| "step": 972 | |
| }, | |
| { | |
| "epoch": 1.5245302713987474, | |
| "grad_norm": 0.9870902299880981, | |
| "learning_rate": 4.3534385508472786e-06, | |
| "loss": 0.7272, | |
| "step": 973 | |
| }, | |
| { | |
| "epoch": 1.5260960334029228, | |
| "grad_norm": 0.9616238474845886, | |
| "learning_rate": 4.352024069780729e-06, | |
| "loss": 0.8124, | |
| "step": 974 | |
| }, | |
| { | |
| "epoch": 1.5276617954070981, | |
| "grad_norm": 1.0341622829437256, | |
| "learning_rate": 4.3506082735058855e-06, | |
| "loss": 0.7434, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 1.5292275574112735, | |
| "grad_norm": 1.0157302618026733, | |
| "learning_rate": 4.349191163028171e-06, | |
| "loss": 0.7534, | |
| "step": 976 | |
| }, | |
| { | |
| "epoch": 1.5307933194154488, | |
| "grad_norm": 1.041379690170288, | |
| "learning_rate": 4.347772739353939e-06, | |
| "loss": 0.6942, | |
| "step": 977 | |
| }, | |
| { | |
| "epoch": 1.5323590814196242, | |
| "grad_norm": 1.016205072402954, | |
| "learning_rate": 4.346353003490481e-06, | |
| "loss": 0.6707, | |
| "step": 978 | |
| }, | |
| { | |
| "epoch": 1.5339248434237995, | |
| "grad_norm": 1.0034714937210083, | |
| "learning_rate": 4.344931956446017e-06, | |
| "loss": 0.6358, | |
| "step": 979 | |
| }, | |
| { | |
| "epoch": 1.535490605427975, | |
| "grad_norm": 0.9671767354011536, | |
| "learning_rate": 4.343509599229697e-06, | |
| "loss": 0.6333, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 1.5370563674321502, | |
| "grad_norm": 1.1384506225585938, | |
| "learning_rate": 4.342085932851605e-06, | |
| "loss": 0.803, | |
| "step": 981 | |
| }, | |
| { | |
| "epoch": 1.5386221294363258, | |
| "grad_norm": 1.023031234741211, | |
| "learning_rate": 4.34066095832275e-06, | |
| "loss": 0.7801, | |
| "step": 982 | |
| }, | |
| { | |
| "epoch": 1.540187891440501, | |
| "grad_norm": 1.0675241947174072, | |
| "learning_rate": 4.339234676655075e-06, | |
| "loss": 0.7335, | |
| "step": 983 | |
| }, | |
| { | |
| "epoch": 1.5417536534446765, | |
| "grad_norm": 1.0397543907165527, | |
| "learning_rate": 4.337807088861447e-06, | |
| "loss": 0.7722, | |
| "step": 984 | |
| }, | |
| { | |
| "epoch": 1.5433194154488517, | |
| "grad_norm": 1.0892809629440308, | |
| "learning_rate": 4.3363781959556635e-06, | |
| "loss": 0.7603, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 1.5448851774530272, | |
| "grad_norm": 0.9335037469863892, | |
| "learning_rate": 4.334947998952448e-06, | |
| "loss": 0.7678, | |
| "step": 986 | |
| }, | |
| { | |
| "epoch": 1.5464509394572024, | |
| "grad_norm": 1.0991342067718506, | |
| "learning_rate": 4.333516498867449e-06, | |
| "loss": 0.7461, | |
| "step": 987 | |
| }, | |
| { | |
| "epoch": 1.548016701461378, | |
| "grad_norm": 1.112265944480896, | |
| "learning_rate": 4.332083696717242e-06, | |
| "loss": 0.7593, | |
| "step": 988 | |
| }, | |
| { | |
| "epoch": 1.5495824634655533, | |
| "grad_norm": 0.9525822997093201, | |
| "learning_rate": 4.330649593519325e-06, | |
| "loss": 0.7629, | |
| "step": 989 | |
| }, | |
| { | |
| "epoch": 1.5511482254697286, | |
| "grad_norm": 1.0624464750289917, | |
| "learning_rate": 4.329214190292122e-06, | |
| "loss": 0.7261, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 1.552713987473904, | |
| "grad_norm": 1.1969516277313232, | |
| "learning_rate": 4.327777488054979e-06, | |
| "loss": 0.7483, | |
| "step": 991 | |
| }, | |
| { | |
| "epoch": 1.5542797494780793, | |
| "grad_norm": 1.0239863395690918, | |
| "learning_rate": 4.326339487828167e-06, | |
| "loss": 0.7265, | |
| "step": 992 | |
| }, | |
| { | |
| "epoch": 1.5558455114822547, | |
| "grad_norm": 1.0881083011627197, | |
| "learning_rate": 4.324900190632874e-06, | |
| "loss": 0.724, | |
| "step": 993 | |
| }, | |
| { | |
| "epoch": 1.55741127348643, | |
| "grad_norm": 1.098827838897705, | |
| "learning_rate": 4.323459597491214e-06, | |
| "loss": 0.7222, | |
| "step": 994 | |
| }, | |
| { | |
| "epoch": 1.5589770354906054, | |
| "grad_norm": 1.105057716369629, | |
| "learning_rate": 4.322017709426217e-06, | |
| "loss": 0.7827, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 1.5605427974947808, | |
| "grad_norm": 0.9830679297447205, | |
| "learning_rate": 4.3205745274618365e-06, | |
| "loss": 0.7316, | |
| "step": 996 | |
| }, | |
| { | |
| "epoch": 1.562108559498956, | |
| "grad_norm": 0.9951156973838806, | |
| "learning_rate": 4.319130052622942e-06, | |
| "loss": 0.7181, | |
| "step": 997 | |
| }, | |
| { | |
| "epoch": 1.5636743215031315, | |
| "grad_norm": 1.0374284982681274, | |
| "learning_rate": 4.317684285935323e-06, | |
| "loss": 0.7355, | |
| "step": 998 | |
| }, | |
| { | |
| "epoch": 1.565240083507307, | |
| "grad_norm": 1.0139906406402588, | |
| "learning_rate": 4.316237228425683e-06, | |
| "loss": 0.7249, | |
| "step": 999 | |
| }, | |
| { | |
| "epoch": 1.5668058455114822, | |
| "grad_norm": 0.9555441737174988, | |
| "learning_rate": 4.3147888811216496e-06, | |
| "loss": 0.7387, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 1.5683716075156577, | |
| "grad_norm": 1.0600265264511108, | |
| "learning_rate": 4.313339245051758e-06, | |
| "loss": 0.7445, | |
| "step": 1001 | |
| }, | |
| { | |
| "epoch": 1.5699373695198329, | |
| "grad_norm": 1.0070478916168213, | |
| "learning_rate": 4.311888321245461e-06, | |
| "loss": 0.7605, | |
| "step": 1002 | |
| }, | |
| { | |
| "epoch": 1.5715031315240084, | |
| "grad_norm": 1.019710898399353, | |
| "learning_rate": 4.31043611073313e-06, | |
| "loss": 0.7262, | |
| "step": 1003 | |
| }, | |
| { | |
| "epoch": 1.5730688935281836, | |
| "grad_norm": 1.0476088523864746, | |
| "learning_rate": 4.308982614546045e-06, | |
| "loss": 0.6766, | |
| "step": 1004 | |
| }, | |
| { | |
| "epoch": 1.5746346555323592, | |
| "grad_norm": 1.0670830011367798, | |
| "learning_rate": 4.307527833716403e-06, | |
| "loss": 0.7465, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 1.5762004175365343, | |
| "grad_norm": 1.1807290315628052, | |
| "learning_rate": 4.30607176927731e-06, | |
| "loss": 0.7032, | |
| "step": 1006 | |
| }, | |
| { | |
| "epoch": 1.5777661795407099, | |
| "grad_norm": 1.0482438802719116, | |
| "learning_rate": 4.3046144222627855e-06, | |
| "loss": 0.7156, | |
| "step": 1007 | |
| }, | |
| { | |
| "epoch": 1.5793319415448852, | |
| "grad_norm": 0.9434391856193542, | |
| "learning_rate": 4.303155793707759e-06, | |
| "loss": 0.7816, | |
| "step": 1008 | |
| }, | |
| { | |
| "epoch": 1.5808977035490606, | |
| "grad_norm": 1.0497010946273804, | |
| "learning_rate": 4.3016958846480706e-06, | |
| "loss": 0.735, | |
| "step": 1009 | |
| }, | |
| { | |
| "epoch": 1.582463465553236, | |
| "grad_norm": 0.955949068069458, | |
| "learning_rate": 4.300234696120469e-06, | |
| "loss": 0.6414, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 1.5840292275574113, | |
| "grad_norm": 1.0462027788162231, | |
| "learning_rate": 4.298772229162613e-06, | |
| "loss": 0.6949, | |
| "step": 1011 | |
| }, | |
| { | |
| "epoch": 1.5855949895615866, | |
| "grad_norm": 1.0634411573410034, | |
| "learning_rate": 4.297308484813067e-06, | |
| "loss": 0.7736, | |
| "step": 1012 | |
| }, | |
| { | |
| "epoch": 1.587160751565762, | |
| "grad_norm": 1.0463027954101562, | |
| "learning_rate": 4.295843464111306e-06, | |
| "loss": 0.6812, | |
| "step": 1013 | |
| }, | |
| { | |
| "epoch": 1.5887265135699373, | |
| "grad_norm": 1.0409252643585205, | |
| "learning_rate": 4.294377168097706e-06, | |
| "loss": 0.7319, | |
| "step": 1014 | |
| }, | |
| { | |
| "epoch": 1.5902922755741127, | |
| "grad_norm": 0.9714667201042175, | |
| "learning_rate": 4.292909597813555e-06, | |
| "loss": 0.7473, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 1.5918580375782883, | |
| "grad_norm": 1.03997802734375, | |
| "learning_rate": 4.291440754301041e-06, | |
| "loss": 0.7261, | |
| "step": 1016 | |
| }, | |
| { | |
| "epoch": 1.5934237995824634, | |
| "grad_norm": 1.063293218612671, | |
| "learning_rate": 4.289970638603258e-06, | |
| "loss": 0.7909, | |
| "step": 1017 | |
| }, | |
| { | |
| "epoch": 1.594989561586639, | |
| "grad_norm": 1.110007643699646, | |
| "learning_rate": 4.288499251764205e-06, | |
| "loss": 0.7, | |
| "step": 1018 | |
| }, | |
| { | |
| "epoch": 1.596555323590814, | |
| "grad_norm": 0.981261134147644, | |
| "learning_rate": 4.2870265948287806e-06, | |
| "loss": 0.7516, | |
| "step": 1019 | |
| }, | |
| { | |
| "epoch": 1.5981210855949897, | |
| "grad_norm": 1.0653973817825317, | |
| "learning_rate": 4.2855526688427875e-06, | |
| "loss": 0.7278, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 1.5996868475991648, | |
| "grad_norm": 0.9212517738342285, | |
| "learning_rate": 4.284077474852928e-06, | |
| "loss": 0.7251, | |
| "step": 1021 | |
| }, | |
| { | |
| "epoch": 1.6012526096033404, | |
| "grad_norm": 1.0887659788131714, | |
| "learning_rate": 4.282601013906808e-06, | |
| "loss": 0.7271, | |
| "step": 1022 | |
| }, | |
| { | |
| "epoch": 1.6028183716075155, | |
| "grad_norm": 1.0753779411315918, | |
| "learning_rate": 4.281123287052928e-06, | |
| "loss": 0.7679, | |
| "step": 1023 | |
| }, | |
| { | |
| "epoch": 1.604384133611691, | |
| "grad_norm": 1.0282115936279297, | |
| "learning_rate": 4.2796442953406935e-06, | |
| "loss": 0.695, | |
| "step": 1024 | |
| }, | |
| { | |
| "epoch": 1.6059498956158664, | |
| "grad_norm": 0.9782281517982483, | |
| "learning_rate": 4.2781640398204036e-06, | |
| "loss": 0.7717, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 1.6075156576200418, | |
| "grad_norm": 1.06228768825531, | |
| "learning_rate": 4.276682521543256e-06, | |
| "loss": 0.8069, | |
| "step": 1026 | |
| }, | |
| { | |
| "epoch": 1.6090814196242171, | |
| "grad_norm": 0.8853397965431213, | |
| "learning_rate": 4.2751997415613465e-06, | |
| "loss": 0.8174, | |
| "step": 1027 | |
| }, | |
| { | |
| "epoch": 1.6106471816283925, | |
| "grad_norm": 1.1525455713272095, | |
| "learning_rate": 4.273715700927666e-06, | |
| "loss": 0.7149, | |
| "step": 1028 | |
| }, | |
| { | |
| "epoch": 1.6122129436325678, | |
| "grad_norm": 0.8743720650672913, | |
| "learning_rate": 4.272230400696101e-06, | |
| "loss": 0.7296, | |
| "step": 1029 | |
| }, | |
| { | |
| "epoch": 1.6137787056367432, | |
| "grad_norm": 1.1136301755905151, | |
| "learning_rate": 4.270743841921431e-06, | |
| "loss": 0.7539, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 1.6153444676409185, | |
| "grad_norm": 0.9830031991004944, | |
| "learning_rate": 4.269256025659332e-06, | |
| "loss": 0.6545, | |
| "step": 1031 | |
| }, | |
| { | |
| "epoch": 1.616910229645094, | |
| "grad_norm": 1.1221113204956055, | |
| "learning_rate": 4.267766952966369e-06, | |
| "loss": 0.6144, | |
| "step": 1032 | |
| }, | |
| { | |
| "epoch": 1.6184759916492695, | |
| "grad_norm": 0.975713849067688, | |
| "learning_rate": 4.266276624900004e-06, | |
| "loss": 0.746, | |
| "step": 1033 | |
| }, | |
| { | |
| "epoch": 1.6200417536534446, | |
| "grad_norm": 1.1131149530410767, | |
| "learning_rate": 4.264785042518587e-06, | |
| "loss": 0.705, | |
| "step": 1034 | |
| }, | |
| { | |
| "epoch": 1.6216075156576202, | |
| "grad_norm": 1.0574990510940552, | |
| "learning_rate": 4.263292206881361e-06, | |
| "loss": 0.7681, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 1.6231732776617953, | |
| "grad_norm": 0.9931074976921082, | |
| "learning_rate": 4.261798119048456e-06, | |
| "loss": 0.7854, | |
| "step": 1036 | |
| }, | |
| { | |
| "epoch": 1.6247390396659709, | |
| "grad_norm": 1.1598386764526367, | |
| "learning_rate": 4.260302780080896e-06, | |
| "loss": 0.7406, | |
| "step": 1037 | |
| }, | |
| { | |
| "epoch": 1.626304801670146, | |
| "grad_norm": 0.9910677671432495, | |
| "learning_rate": 4.258806191040587e-06, | |
| "loss": 0.7107, | |
| "step": 1038 | |
| }, | |
| { | |
| "epoch": 1.6278705636743216, | |
| "grad_norm": 1.1023072004318237, | |
| "learning_rate": 4.2573083529903294e-06, | |
| "loss": 0.7964, | |
| "step": 1039 | |
| }, | |
| { | |
| "epoch": 1.6294363256784967, | |
| "grad_norm": 0.991082489490509, | |
| "learning_rate": 4.255809266993806e-06, | |
| "loss": 0.762, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 1.6310020876826723, | |
| "grad_norm": 0.9281193017959595, | |
| "learning_rate": 4.254308934115589e-06, | |
| "loss": 0.7272, | |
| "step": 1041 | |
| }, | |
| { | |
| "epoch": 1.6325678496868476, | |
| "grad_norm": 1.20292067527771, | |
| "learning_rate": 4.252807355421133e-06, | |
| "loss": 0.7535, | |
| "step": 1042 | |
| }, | |
| { | |
| "epoch": 1.634133611691023, | |
| "grad_norm": 1.0362178087234497, | |
| "learning_rate": 4.251304531976778e-06, | |
| "loss": 0.7524, | |
| "step": 1043 | |
| }, | |
| { | |
| "epoch": 1.6356993736951984, | |
| "grad_norm": 1.0267268419265747, | |
| "learning_rate": 4.249800464849751e-06, | |
| "loss": 0.6588, | |
| "step": 1044 | |
| }, | |
| { | |
| "epoch": 1.6372651356993737, | |
| "grad_norm": 1.0531187057495117, | |
| "learning_rate": 4.248295155108158e-06, | |
| "loss": 0.7328, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 1.638830897703549, | |
| "grad_norm": 1.0008069276809692, | |
| "learning_rate": 4.246788603820989e-06, | |
| "loss": 0.719, | |
| "step": 1046 | |
| }, | |
| { | |
| "epoch": 1.6403966597077244, | |
| "grad_norm": 1.064780831336975, | |
| "learning_rate": 4.245280812058119e-06, | |
| "loss": 0.7388, | |
| "step": 1047 | |
| }, | |
| { | |
| "epoch": 1.6419624217118998, | |
| "grad_norm": 1.06492018699646, | |
| "learning_rate": 4.243771780890298e-06, | |
| "loss": 0.7495, | |
| "step": 1048 | |
| }, | |
| { | |
| "epoch": 1.6435281837160751, | |
| "grad_norm": 0.9472903609275818, | |
| "learning_rate": 4.24226151138916e-06, | |
| "loss": 0.7409, | |
| "step": 1049 | |
| }, | |
| { | |
| "epoch": 1.6450939457202505, | |
| "grad_norm": 1.0612895488739014, | |
| "learning_rate": 4.240750004627217e-06, | |
| "loss": 0.7605, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 1.6466597077244258, | |
| "grad_norm": 0.9634467959403992, | |
| "learning_rate": 4.239237261677863e-06, | |
| "loss": 0.7109, | |
| "step": 1051 | |
| }, | |
| { | |
| "epoch": 1.6482254697286014, | |
| "grad_norm": 1.1347044706344604, | |
| "learning_rate": 4.2377232836153635e-06, | |
| "loss": 0.693, | |
| "step": 1052 | |
| }, | |
| { | |
| "epoch": 1.6497912317327765, | |
| "grad_norm": 1.0814287662506104, | |
| "learning_rate": 4.236208071514865e-06, | |
| "loss": 0.7159, | |
| "step": 1053 | |
| }, | |
| { | |
| "epoch": 1.651356993736952, | |
| "grad_norm": 0.9427600502967834, | |
| "learning_rate": 4.234691626452392e-06, | |
| "loss": 0.7014, | |
| "step": 1054 | |
| }, | |
| { | |
| "epoch": 1.6529227557411272, | |
| "grad_norm": 1.0177332162857056, | |
| "learning_rate": 4.233173949504842e-06, | |
| "loss": 0.7717, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 1.6544885177453028, | |
| "grad_norm": 1.0621635913848877, | |
| "learning_rate": 4.231655041749985e-06, | |
| "loss": 0.7078, | |
| "step": 1056 | |
| }, | |
| { | |
| "epoch": 1.656054279749478, | |
| "grad_norm": 1.0474066734313965, | |
| "learning_rate": 4.230134904266472e-06, | |
| "loss": 0.7869, | |
| "step": 1057 | |
| }, | |
| { | |
| "epoch": 1.6576200417536535, | |
| "grad_norm": 0.9170332551002502, | |
| "learning_rate": 4.228613538133821e-06, | |
| "loss": 0.7806, | |
| "step": 1058 | |
| }, | |
| { | |
| "epoch": 1.6591858037578286, | |
| "grad_norm": 1.010465383529663, | |
| "learning_rate": 4.227090944432426e-06, | |
| "loss": 0.7352, | |
| "step": 1059 | |
| }, | |
| { | |
| "epoch": 1.6607515657620042, | |
| "grad_norm": 0.988068163394928, | |
| "learning_rate": 4.22556712424355e-06, | |
| "loss": 0.7173, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 1.6623173277661796, | |
| "grad_norm": 1.0753575563430786, | |
| "learning_rate": 4.22404207864933e-06, | |
| "loss": 0.6981, | |
| "step": 1061 | |
| }, | |
| { | |
| "epoch": 1.663883089770355, | |
| "grad_norm": 1.0075691938400269, | |
| "learning_rate": 4.222515808732769e-06, | |
| "loss": 0.7279, | |
| "step": 1062 | |
| }, | |
| { | |
| "epoch": 1.6654488517745303, | |
| "grad_norm": 1.0954983234405518, | |
| "learning_rate": 4.220988315577745e-06, | |
| "loss": 0.7675, | |
| "step": 1063 | |
| }, | |
| { | |
| "epoch": 1.6670146137787056, | |
| "grad_norm": 1.0368746519088745, | |
| "learning_rate": 4.2194596002690016e-06, | |
| "loss": 0.7466, | |
| "step": 1064 | |
| }, | |
| { | |
| "epoch": 1.668580375782881, | |
| "grad_norm": 1.083906650543213, | |
| "learning_rate": 4.217929663892149e-06, | |
| "loss": 0.7517, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 1.6701461377870563, | |
| "grad_norm": 1.1356595754623413, | |
| "learning_rate": 4.216398507533667e-06, | |
| "loss": 0.7601, | |
| "step": 1066 | |
| }, | |
| { | |
| "epoch": 1.6717118997912317, | |
| "grad_norm": 1.1610008478164673, | |
| "learning_rate": 4.2148661322809e-06, | |
| "loss": 0.7356, | |
| "step": 1067 | |
| }, | |
| { | |
| "epoch": 1.673277661795407, | |
| "grad_norm": 1.019673466682434, | |
| "learning_rate": 4.21333253922206e-06, | |
| "loss": 0.7618, | |
| "step": 1068 | |
| }, | |
| { | |
| "epoch": 1.6748434237995826, | |
| "grad_norm": 1.1470829248428345, | |
| "learning_rate": 4.2117977294462235e-06, | |
| "loss": 0.8001, | |
| "step": 1069 | |
| }, | |
| { | |
| "epoch": 1.6764091858037578, | |
| "grad_norm": 0.9750358462333679, | |
| "learning_rate": 4.210261704043327e-06, | |
| "loss": 0.6736, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 1.6779749478079333, | |
| "grad_norm": 1.061862826347351, | |
| "learning_rate": 4.208724464104176e-06, | |
| "loss": 0.7751, | |
| "step": 1071 | |
| }, | |
| { | |
| "epoch": 1.6795407098121085, | |
| "grad_norm": 1.0311503410339355, | |
| "learning_rate": 4.2071860107204346e-06, | |
| "loss": 0.7629, | |
| "step": 1072 | |
| }, | |
| { | |
| "epoch": 1.681106471816284, | |
| "grad_norm": 1.0023622512817383, | |
| "learning_rate": 4.2056463449846305e-06, | |
| "loss": 0.6935, | |
| "step": 1073 | |
| }, | |
| { | |
| "epoch": 1.6826722338204592, | |
| "grad_norm": 1.1624165773391724, | |
| "learning_rate": 4.204105467990153e-06, | |
| "loss": 0.7451, | |
| "step": 1074 | |
| }, | |
| { | |
| "epoch": 1.6842379958246347, | |
| "grad_norm": 0.9852432608604431, | |
| "learning_rate": 4.202563380831249e-06, | |
| "loss": 0.7446, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 1.6858037578288099, | |
| "grad_norm": 1.0009260177612305, | |
| "learning_rate": 4.201020084603027e-06, | |
| "loss": 0.726, | |
| "step": 1076 | |
| }, | |
| { | |
| "epoch": 1.6873695198329854, | |
| "grad_norm": 1.0374701023101807, | |
| "learning_rate": 4.199475580401452e-06, | |
| "loss": 0.6687, | |
| "step": 1077 | |
| }, | |
| { | |
| "epoch": 1.6889352818371608, | |
| "grad_norm": 1.1437046527862549, | |
| "learning_rate": 4.19792986932335e-06, | |
| "loss": 0.7802, | |
| "step": 1078 | |
| }, | |
| { | |
| "epoch": 1.6905010438413361, | |
| "grad_norm": 0.9866487383842468, | |
| "learning_rate": 4.1963829524664e-06, | |
| "loss": 0.7117, | |
| "step": 1079 | |
| }, | |
| { | |
| "epoch": 1.6920668058455115, | |
| "grad_norm": 0.9720652103424072, | |
| "learning_rate": 4.194834830929142e-06, | |
| "loss": 0.778, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 1.6936325678496869, | |
| "grad_norm": 1.009422779083252, | |
| "learning_rate": 4.1932855058109675e-06, | |
| "loss": 0.6783, | |
| "step": 1081 | |
| }, | |
| { | |
| "epoch": 1.6951983298538622, | |
| "grad_norm": 1.024781346321106, | |
| "learning_rate": 4.191734978212124e-06, | |
| "loss": 0.6697, | |
| "step": 1082 | |
| }, | |
| { | |
| "epoch": 1.6967640918580376, | |
| "grad_norm": 1.060354471206665, | |
| "learning_rate": 4.190183249233715e-06, | |
| "loss": 0.7151, | |
| "step": 1083 | |
| }, | |
| { | |
| "epoch": 1.698329853862213, | |
| "grad_norm": 1.118486762046814, | |
| "learning_rate": 4.1886303199776924e-06, | |
| "loss": 0.7408, | |
| "step": 1084 | |
| }, | |
| { | |
| "epoch": 1.6998956158663883, | |
| "grad_norm": 0.9334209561347961, | |
| "learning_rate": 4.187076191546865e-06, | |
| "loss": 0.7646, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 1.7014613778705638, | |
| "grad_norm": 1.0506161451339722, | |
| "learning_rate": 4.185520865044892e-06, | |
| "loss": 0.7299, | |
| "step": 1086 | |
| }, | |
| { | |
| "epoch": 1.703027139874739, | |
| "grad_norm": 1.1261264085769653, | |
| "learning_rate": 4.1839643415762825e-06, | |
| "loss": 0.6813, | |
| "step": 1087 | |
| }, | |
| { | |
| "epoch": 1.7045929018789145, | |
| "grad_norm": 0.9690954685211182, | |
| "learning_rate": 4.182406622246396e-06, | |
| "loss": 0.7417, | |
| "step": 1088 | |
| }, | |
| { | |
| "epoch": 1.7061586638830897, | |
| "grad_norm": 1.0872795581817627, | |
| "learning_rate": 4.180847708161442e-06, | |
| "loss": 0.676, | |
| "step": 1089 | |
| }, | |
| { | |
| "epoch": 1.7077244258872653, | |
| "grad_norm": 1.054091453552246, | |
| "learning_rate": 4.179287600428476e-06, | |
| "loss": 0.785, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 1.7092901878914404, | |
| "grad_norm": 1.109984278678894, | |
| "learning_rate": 4.177726300155404e-06, | |
| "loss": 0.8033, | |
| "step": 1091 | |
| }, | |
| { | |
| "epoch": 1.710855949895616, | |
| "grad_norm": 1.0984348058700562, | |
| "learning_rate": 4.176163808450978e-06, | |
| "loss": 0.6964, | |
| "step": 1092 | |
| }, | |
| { | |
| "epoch": 1.712421711899791, | |
| "grad_norm": 1.0788499116897583, | |
| "learning_rate": 4.174600126424797e-06, | |
| "loss": 0.7596, | |
| "step": 1093 | |
| }, | |
| { | |
| "epoch": 1.7139874739039667, | |
| "grad_norm": 1.0846409797668457, | |
| "learning_rate": 4.173035255187302e-06, | |
| "loss": 0.7322, | |
| "step": 1094 | |
| }, | |
| { | |
| "epoch": 1.715553235908142, | |
| "grad_norm": 1.1376627683639526, | |
| "learning_rate": 4.171469195849782e-06, | |
| "loss": 0.6934, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 1.7171189979123174, | |
| "grad_norm": 1.003432035446167, | |
| "learning_rate": 4.169901949524368e-06, | |
| "loss": 0.7663, | |
| "step": 1096 | |
| }, | |
| { | |
| "epoch": 1.7186847599164927, | |
| "grad_norm": 0.9824592471122742, | |
| "learning_rate": 4.168333517324036e-06, | |
| "loss": 0.8253, | |
| "step": 1097 | |
| }, | |
| { | |
| "epoch": 1.720250521920668, | |
| "grad_norm": 1.0256636142730713, | |
| "learning_rate": 4.1667639003626e-06, | |
| "loss": 0.6614, | |
| "step": 1098 | |
| }, | |
| { | |
| "epoch": 1.7218162839248434, | |
| "grad_norm": 1.0290439128875732, | |
| "learning_rate": 4.16519309975472e-06, | |
| "loss": 0.7403, | |
| "step": 1099 | |
| }, | |
| { | |
| "epoch": 1.7233820459290188, | |
| "grad_norm": 1.0358202457427979, | |
| "learning_rate": 4.163621116615892e-06, | |
| "loss": 0.7332, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 1.7249478079331941, | |
| "grad_norm": 1.1046626567840576, | |
| "learning_rate": 4.162047952062456e-06, | |
| "loss": 0.6672, | |
| "step": 1101 | |
| }, | |
| { | |
| "epoch": 1.7265135699373695, | |
| "grad_norm": 1.116408109664917, | |
| "learning_rate": 4.16047360721159e-06, | |
| "loss": 0.6997, | |
| "step": 1102 | |
| }, | |
| { | |
| "epoch": 1.7280793319415448, | |
| "grad_norm": 0.98244309425354, | |
| "learning_rate": 4.158898083181308e-06, | |
| "loss": 0.754, | |
| "step": 1103 | |
| }, | |
| { | |
| "epoch": 1.7296450939457202, | |
| "grad_norm": 1.033761978149414, | |
| "learning_rate": 4.1573213810904635e-06, | |
| "loss": 0.7317, | |
| "step": 1104 | |
| }, | |
| { | |
| "epoch": 1.7312108559498958, | |
| "grad_norm": 1.1415889263153076, | |
| "learning_rate": 4.155743502058745e-06, | |
| "loss": 0.8361, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 1.732776617954071, | |
| "grad_norm": 0.9897739291191101, | |
| "learning_rate": 4.154164447206679e-06, | |
| "loss": 0.7361, | |
| "step": 1106 | |
| }, | |
| { | |
| "epoch": 1.7343423799582465, | |
| "grad_norm": 1.0837332010269165, | |
| "learning_rate": 4.1525842176556235e-06, | |
| "loss": 0.7007, | |
| "step": 1107 | |
| }, | |
| { | |
| "epoch": 1.7359081419624216, | |
| "grad_norm": 1.021332025527954, | |
| "learning_rate": 4.151002814527774e-06, | |
| "loss": 0.723, | |
| "step": 1108 | |
| }, | |
| { | |
| "epoch": 1.7374739039665972, | |
| "grad_norm": 0.9808626770973206, | |
| "learning_rate": 4.14942023894616e-06, | |
| "loss": 0.6121, | |
| "step": 1109 | |
| }, | |
| { | |
| "epoch": 1.7390396659707723, | |
| "grad_norm": 1.0266727209091187, | |
| "learning_rate": 4.14783649203464e-06, | |
| "loss": 0.8221, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 1.7406054279749479, | |
| "grad_norm": 1.0880744457244873, | |
| "learning_rate": 4.146251574917907e-06, | |
| "loss": 0.7159, | |
| "step": 1111 | |
| }, | |
| { | |
| "epoch": 1.742171189979123, | |
| "grad_norm": 1.0125631093978882, | |
| "learning_rate": 4.144665488721483e-06, | |
| "loss": 0.7228, | |
| "step": 1112 | |
| }, | |
| { | |
| "epoch": 1.7437369519832986, | |
| "grad_norm": 0.9339308142662048, | |
| "learning_rate": 4.143078234571723e-06, | |
| "loss": 0.7472, | |
| "step": 1113 | |
| }, | |
| { | |
| "epoch": 1.745302713987474, | |
| "grad_norm": 1.1234889030456543, | |
| "learning_rate": 4.141489813595809e-06, | |
| "loss": 0.7875, | |
| "step": 1114 | |
| }, | |
| { | |
| "epoch": 1.7468684759916493, | |
| "grad_norm": 1.0862795114517212, | |
| "learning_rate": 4.139900226921753e-06, | |
| "loss": 0.7219, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 1.7484342379958246, | |
| "grad_norm": 1.027912974357605, | |
| "learning_rate": 4.138309475678393e-06, | |
| "loss": 0.6616, | |
| "step": 1116 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "grad_norm": 1.0271416902542114, | |
| "learning_rate": 4.136717560995398e-06, | |
| "loss": 0.7245, | |
| "step": 1117 | |
| }, | |
| { | |
| "epoch": 1.7515657620041754, | |
| "grad_norm": 0.9430286288261414, | |
| "learning_rate": 4.135124484003258e-06, | |
| "loss": 0.7347, | |
| "step": 1118 | |
| }, | |
| { | |
| "epoch": 1.7531315240083507, | |
| "grad_norm": 1.1191871166229248, | |
| "learning_rate": 4.133530245833292e-06, | |
| "loss": 0.6713, | |
| "step": 1119 | |
| }, | |
| { | |
| "epoch": 1.754697286012526, | |
| "grad_norm": 1.0734976530075073, | |
| "learning_rate": 4.131934847617642e-06, | |
| "loss": 0.7268, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 1.7562630480167014, | |
| "grad_norm": 1.0727618932724, | |
| "learning_rate": 4.130338290489276e-06, | |
| "loss": 0.7855, | |
| "step": 1121 | |
| }, | |
| { | |
| "epoch": 1.757828810020877, | |
| "grad_norm": 1.0351287126541138, | |
| "learning_rate": 4.1287405755819805e-06, | |
| "loss": 0.7297, | |
| "step": 1122 | |
| }, | |
| { | |
| "epoch": 1.7593945720250521, | |
| "grad_norm": 1.046382188796997, | |
| "learning_rate": 4.127141704030371e-06, | |
| "loss": 0.6508, | |
| "step": 1123 | |
| }, | |
| { | |
| "epoch": 1.7609603340292277, | |
| "grad_norm": 1.0040491819381714, | |
| "learning_rate": 4.125541676969876e-06, | |
| "loss": 0.7707, | |
| "step": 1124 | |
| }, | |
| { | |
| "epoch": 1.7625260960334028, | |
| "grad_norm": 1.1626756191253662, | |
| "learning_rate": 4.123940495536752e-06, | |
| "loss": 0.7366, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 1.7640918580375784, | |
| "grad_norm": 0.9720292091369629, | |
| "learning_rate": 4.122338160868071e-06, | |
| "loss": 0.7641, | |
| "step": 1126 | |
| }, | |
| { | |
| "epoch": 1.7656576200417535, | |
| "grad_norm": 0.9957835078239441, | |
| "learning_rate": 4.1207346741017265e-06, | |
| "loss": 0.7398, | |
| "step": 1127 | |
| }, | |
| { | |
| "epoch": 1.767223382045929, | |
| "grad_norm": 0.972172737121582, | |
| "learning_rate": 4.1191300363764276e-06, | |
| "loss": 0.6565, | |
| "step": 1128 | |
| }, | |
| { | |
| "epoch": 1.7687891440501042, | |
| "grad_norm": 1.0128676891326904, | |
| "learning_rate": 4.117524248831702e-06, | |
| "loss": 0.7433, | |
| "step": 1129 | |
| }, | |
| { | |
| "epoch": 1.7703549060542798, | |
| "grad_norm": 1.1161550283432007, | |
| "learning_rate": 4.115917312607895e-06, | |
| "loss": 0.8241, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 1.7719206680584552, | |
| "grad_norm": 1.009810209274292, | |
| "learning_rate": 4.114309228846167e-06, | |
| "loss": 0.6939, | |
| "step": 1131 | |
| }, | |
| { | |
| "epoch": 1.7734864300626305, | |
| "grad_norm": 1.0082894563674927, | |
| "learning_rate": 4.112699998688492e-06, | |
| "loss": 0.6772, | |
| "step": 1132 | |
| }, | |
| { | |
| "epoch": 1.7750521920668059, | |
| "grad_norm": 0.9453067183494568, | |
| "learning_rate": 4.111089623277658e-06, | |
| "loss": 0.6982, | |
| "step": 1133 | |
| }, | |
| { | |
| "epoch": 1.7766179540709812, | |
| "grad_norm": 1.074747920036316, | |
| "learning_rate": 4.1094781037572696e-06, | |
| "loss": 0.7296, | |
| "step": 1134 | |
| }, | |
| { | |
| "epoch": 1.7781837160751566, | |
| "grad_norm": 1.0477592945098877, | |
| "learning_rate": 4.1078654412717404e-06, | |
| "loss": 0.8162, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 1.779749478079332, | |
| "grad_norm": 1.1362730264663696, | |
| "learning_rate": 4.106251636966297e-06, | |
| "loss": 0.6942, | |
| "step": 1136 | |
| }, | |
| { | |
| "epoch": 1.7813152400835073, | |
| "grad_norm": 0.9564191102981567, | |
| "learning_rate": 4.104636691986977e-06, | |
| "loss": 0.7613, | |
| "step": 1137 | |
| }, | |
| { | |
| "epoch": 1.7828810020876826, | |
| "grad_norm": 1.0384364128112793, | |
| "learning_rate": 4.103020607480628e-06, | |
| "loss": 0.6482, | |
| "step": 1138 | |
| }, | |
| { | |
| "epoch": 1.784446764091858, | |
| "grad_norm": 1.1258740425109863, | |
| "learning_rate": 4.101403384594905e-06, | |
| "loss": 0.7736, | |
| "step": 1139 | |
| }, | |
| { | |
| "epoch": 1.7860125260960333, | |
| "grad_norm": 1.0740283727645874, | |
| "learning_rate": 4.099785024478276e-06, | |
| "loss": 0.7219, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 1.787578288100209, | |
| "grad_norm": 1.0377696752548218, | |
| "learning_rate": 4.098165528280012e-06, | |
| "loss": 0.7132, | |
| "step": 1141 | |
| }, | |
| { | |
| "epoch": 1.789144050104384, | |
| "grad_norm": 1.0551276206970215, | |
| "learning_rate": 4.096544897150192e-06, | |
| "loss": 0.7879, | |
| "step": 1142 | |
| }, | |
| { | |
| "epoch": 1.7907098121085596, | |
| "grad_norm": 1.096389651298523, | |
| "learning_rate": 4.0949231322397036e-06, | |
| "loss": 0.7516, | |
| "step": 1143 | |
| }, | |
| { | |
| "epoch": 1.7922755741127347, | |
| "grad_norm": 0.9602596163749695, | |
| "learning_rate": 4.093300234700234e-06, | |
| "loss": 0.7197, | |
| "step": 1144 | |
| }, | |
| { | |
| "epoch": 1.7938413361169103, | |
| "grad_norm": 1.1918690204620361, | |
| "learning_rate": 4.09167620568428e-06, | |
| "loss": 0.7634, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 1.7954070981210855, | |
| "grad_norm": 1.1249059438705444, | |
| "learning_rate": 4.09005104634514e-06, | |
| "loss": 0.7718, | |
| "step": 1146 | |
| }, | |
| { | |
| "epoch": 1.796972860125261, | |
| "grad_norm": 1.0596246719360352, | |
| "learning_rate": 4.0884247578369164e-06, | |
| "loss": 0.7258, | |
| "step": 1147 | |
| }, | |
| { | |
| "epoch": 1.7985386221294362, | |
| "grad_norm": 1.026201844215393, | |
| "learning_rate": 4.086797341314509e-06, | |
| "loss": 0.6426, | |
| "step": 1148 | |
| }, | |
| { | |
| "epoch": 1.8001043841336117, | |
| "grad_norm": 1.0791343450546265, | |
| "learning_rate": 4.085168797933625e-06, | |
| "loss": 0.7147, | |
| "step": 1149 | |
| }, | |
| { | |
| "epoch": 1.801670146137787, | |
| "grad_norm": 0.9660344123840332, | |
| "learning_rate": 4.083539128850766e-06, | |
| "loss": 0.7335, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 1.8032359081419624, | |
| "grad_norm": 1.0022811889648438, | |
| "learning_rate": 4.081908335223238e-06, | |
| "loss": 0.6824, | |
| "step": 1151 | |
| }, | |
| { | |
| "epoch": 1.8048016701461378, | |
| "grad_norm": 0.9261506199836731, | |
| "learning_rate": 4.080276418209143e-06, | |
| "loss": 0.7538, | |
| "step": 1152 | |
| }, | |
| { | |
| "epoch": 1.8063674321503131, | |
| "grad_norm": 1.088552713394165, | |
| "learning_rate": 4.078643378967379e-06, | |
| "loss": 0.6988, | |
| "step": 1153 | |
| }, | |
| { | |
| "epoch": 1.8079331941544885, | |
| "grad_norm": 1.0581461191177368, | |
| "learning_rate": 4.077009218657645e-06, | |
| "loss": 0.7075, | |
| "step": 1154 | |
| }, | |
| { | |
| "epoch": 1.8094989561586639, | |
| "grad_norm": 1.0722301006317139, | |
| "learning_rate": 4.0753739384404335e-06, | |
| "loss": 0.6974, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 1.8110647181628392, | |
| "grad_norm": 0.9519115090370178, | |
| "learning_rate": 4.073737539477033e-06, | |
| "loss": 0.6898, | |
| "step": 1156 | |
| }, | |
| { | |
| "epoch": 1.8126304801670146, | |
| "grad_norm": 1.1201752424240112, | |
| "learning_rate": 4.072100022929526e-06, | |
| "loss": 0.8572, | |
| "step": 1157 | |
| }, | |
| { | |
| "epoch": 1.8141962421711901, | |
| "grad_norm": 1.24104642868042, | |
| "learning_rate": 4.070461389960789e-06, | |
| "loss": 0.7407, | |
| "step": 1158 | |
| }, | |
| { | |
| "epoch": 1.8157620041753653, | |
| "grad_norm": 1.0625057220458984, | |
| "learning_rate": 4.068821641734492e-06, | |
| "loss": 0.655, | |
| "step": 1159 | |
| }, | |
| { | |
| "epoch": 1.8173277661795408, | |
| "grad_norm": 1.1174049377441406, | |
| "learning_rate": 4.067180779415097e-06, | |
| "loss": 0.7214, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 1.818893528183716, | |
| "grad_norm": 1.0683097839355469, | |
| "learning_rate": 4.065538804167855e-06, | |
| "loss": 0.7863, | |
| "step": 1161 | |
| }, | |
| { | |
| "epoch": 1.8204592901878915, | |
| "grad_norm": 1.031198263168335, | |
| "learning_rate": 4.063895717158809e-06, | |
| "loss": 0.7663, | |
| "step": 1162 | |
| }, | |
| { | |
| "epoch": 1.8220250521920667, | |
| "grad_norm": 1.1091554164886475, | |
| "learning_rate": 4.062251519554791e-06, | |
| "loss": 0.7234, | |
| "step": 1163 | |
| }, | |
| { | |
| "epoch": 1.8235908141962422, | |
| "grad_norm": 1.0818198919296265, | |
| "learning_rate": 4.060606212523425e-06, | |
| "loss": 0.7709, | |
| "step": 1164 | |
| }, | |
| { | |
| "epoch": 1.8251565762004174, | |
| "grad_norm": 1.0104292631149292, | |
| "learning_rate": 4.058959797233116e-06, | |
| "loss": 0.7225, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 1.826722338204593, | |
| "grad_norm": 1.1726491451263428, | |
| "learning_rate": 4.057312274853063e-06, | |
| "loss": 0.6831, | |
| "step": 1166 | |
| }, | |
| { | |
| "epoch": 1.8282881002087683, | |
| "grad_norm": 0.9735565185546875, | |
| "learning_rate": 4.055663646553248e-06, | |
| "loss": 0.7123, | |
| "step": 1167 | |
| }, | |
| { | |
| "epoch": 1.8298538622129437, | |
| "grad_norm": 1.011709213256836, | |
| "learning_rate": 4.054013913504436e-06, | |
| "loss": 0.6885, | |
| "step": 1168 | |
| }, | |
| { | |
| "epoch": 1.831419624217119, | |
| "grad_norm": 1.0374364852905273, | |
| "learning_rate": 4.05236307687818e-06, | |
| "loss": 0.782, | |
| "step": 1169 | |
| }, | |
| { | |
| "epoch": 1.8329853862212944, | |
| "grad_norm": 1.011202096939087, | |
| "learning_rate": 4.050711137846817e-06, | |
| "loss": 0.7381, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 1.8345511482254697, | |
| "grad_norm": 1.0984755754470825, | |
| "learning_rate": 4.049058097583464e-06, | |
| "loss": 0.7472, | |
| "step": 1171 | |
| }, | |
| { | |
| "epoch": 1.836116910229645, | |
| "grad_norm": 1.090078353881836, | |
| "learning_rate": 4.047403957262024e-06, | |
| "loss": 0.6773, | |
| "step": 1172 | |
| }, | |
| { | |
| "epoch": 1.8376826722338204, | |
| "grad_norm": 0.9866096377372742, | |
| "learning_rate": 4.045748718057176e-06, | |
| "loss": 0.6812, | |
| "step": 1173 | |
| }, | |
| { | |
| "epoch": 1.8392484342379958, | |
| "grad_norm": 1.1006139516830444, | |
| "learning_rate": 4.0440923811443846e-06, | |
| "loss": 0.7953, | |
| "step": 1174 | |
| }, | |
| { | |
| "epoch": 1.8408141962421714, | |
| "grad_norm": 0.9621697664260864, | |
| "learning_rate": 4.04243494769989e-06, | |
| "loss": 0.7557, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 1.8423799582463465, | |
| "grad_norm": 1.1520715951919556, | |
| "learning_rate": 4.040776418900714e-06, | |
| "loss": 0.7422, | |
| "step": 1176 | |
| }, | |
| { | |
| "epoch": 1.843945720250522, | |
| "grad_norm": 1.0679237842559814, | |
| "learning_rate": 4.039116795924653e-06, | |
| "loss": 0.6738, | |
| "step": 1177 | |
| }, | |
| { | |
| "epoch": 1.8455114822546972, | |
| "grad_norm": 1.026510238647461, | |
| "learning_rate": 4.037456079950284e-06, | |
| "loss": 0.7741, | |
| "step": 1178 | |
| }, | |
| { | |
| "epoch": 1.8470772442588728, | |
| "grad_norm": 1.0111522674560547, | |
| "learning_rate": 4.0357942721569585e-06, | |
| "loss": 0.6968, | |
| "step": 1179 | |
| }, | |
| { | |
| "epoch": 1.848643006263048, | |
| "grad_norm": 1.0290493965148926, | |
| "learning_rate": 4.034131373724802e-06, | |
| "loss": 0.7131, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 1.8502087682672235, | |
| "grad_norm": 0.9878649711608887, | |
| "learning_rate": 4.032467385834718e-06, | |
| "loss": 0.7477, | |
| "step": 1181 | |
| }, | |
| { | |
| "epoch": 1.8517745302713986, | |
| "grad_norm": 1.0317823886871338, | |
| "learning_rate": 4.030802309668379e-06, | |
| "loss": 0.6318, | |
| "step": 1182 | |
| }, | |
| { | |
| "epoch": 1.8533402922755742, | |
| "grad_norm": 1.0163817405700684, | |
| "learning_rate": 4.029136146408235e-06, | |
| "loss": 0.7027, | |
| "step": 1183 | |
| }, | |
| { | |
| "epoch": 1.8549060542797495, | |
| "grad_norm": 0.9590600728988647, | |
| "learning_rate": 4.027468897237505e-06, | |
| "loss": 0.6988, | |
| "step": 1184 | |
| }, | |
| { | |
| "epoch": 1.8564718162839249, | |
| "grad_norm": 1.0645931959152222, | |
| "learning_rate": 4.0258005633401795e-06, | |
| "loss": 0.7588, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 1.8580375782881002, | |
| "grad_norm": 1.0380210876464844, | |
| "learning_rate": 4.024131145901022e-06, | |
| "loss": 0.6984, | |
| "step": 1186 | |
| }, | |
| { | |
| "epoch": 1.8596033402922756, | |
| "grad_norm": 1.0558087825775146, | |
| "learning_rate": 4.022460646105561e-06, | |
| "loss": 0.6729, | |
| "step": 1187 | |
| }, | |
| { | |
| "epoch": 1.861169102296451, | |
| "grad_norm": 1.0865014791488647, | |
| "learning_rate": 4.020789065140097e-06, | |
| "loss": 0.7208, | |
| "step": 1188 | |
| }, | |
| { | |
| "epoch": 1.8627348643006263, | |
| "grad_norm": 1.0456374883651733, | |
| "learning_rate": 4.019116404191697e-06, | |
| "loss": 0.7759, | |
| "step": 1189 | |
| }, | |
| { | |
| "epoch": 1.8643006263048016, | |
| "grad_norm": 0.9724026918411255, | |
| "learning_rate": 4.017442664448197e-06, | |
| "loss": 0.7475, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 1.865866388308977, | |
| "grad_norm": 1.1170289516448975, | |
| "learning_rate": 4.015767847098194e-06, | |
| "loss": 0.6993, | |
| "step": 1191 | |
| }, | |
| { | |
| "epoch": 1.8674321503131524, | |
| "grad_norm": 1.1052978038787842, | |
| "learning_rate": 4.014091953331056e-06, | |
| "loss": 0.748, | |
| "step": 1192 | |
| }, | |
| { | |
| "epoch": 1.8689979123173277, | |
| "grad_norm": 1.1223454475402832, | |
| "learning_rate": 4.012414984336912e-06, | |
| "loss": 0.6921, | |
| "step": 1193 | |
| }, | |
| { | |
| "epoch": 1.8705636743215033, | |
| "grad_norm": 1.0611138343811035, | |
| "learning_rate": 4.010736941306655e-06, | |
| "loss": 0.7859, | |
| "step": 1194 | |
| }, | |
| { | |
| "epoch": 1.8721294363256784, | |
| "grad_norm": 1.169189691543579, | |
| "learning_rate": 4.009057825431944e-06, | |
| "loss": 0.5874, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 1.873695198329854, | |
| "grad_norm": 1.036626935005188, | |
| "learning_rate": 4.0073776379051945e-06, | |
| "loss": 0.6801, | |
| "step": 1196 | |
| }, | |
| { | |
| "epoch": 1.8752609603340291, | |
| "grad_norm": 0.9431620240211487, | |
| "learning_rate": 4.005696379919586e-06, | |
| "loss": 0.7476, | |
| "step": 1197 | |
| }, | |
| { | |
| "epoch": 1.8768267223382047, | |
| "grad_norm": 1.0381059646606445, | |
| "learning_rate": 4.0040140526690585e-06, | |
| "loss": 0.6691, | |
| "step": 1198 | |
| }, | |
| { | |
| "epoch": 1.8783924843423798, | |
| "grad_norm": 1.120033860206604, | |
| "learning_rate": 4.0023306573483105e-06, | |
| "loss": 0.6669, | |
| "step": 1199 | |
| }, | |
| { | |
| "epoch": 1.8799582463465554, | |
| "grad_norm": 1.0298967361450195, | |
| "learning_rate": 4.000646195152798e-06, | |
| "loss": 0.7355, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 1.8815240083507305, | |
| "grad_norm": 1.0036594867706299, | |
| "learning_rate": 3.99896066727874e-06, | |
| "loss": 0.7545, | |
| "step": 1201 | |
| }, | |
| { | |
| "epoch": 1.883089770354906, | |
| "grad_norm": 1.1166342496871948, | |
| "learning_rate": 3.997274074923103e-06, | |
| "loss": 0.7468, | |
| "step": 1202 | |
| }, | |
| { | |
| "epoch": 1.8846555323590815, | |
| "grad_norm": 1.0512094497680664, | |
| "learning_rate": 3.995586419283617e-06, | |
| "loss": 0.7647, | |
| "step": 1203 | |
| }, | |
| { | |
| "epoch": 1.8862212943632568, | |
| "grad_norm": 1.0115973949432373, | |
| "learning_rate": 3.993897701558764e-06, | |
| "loss": 0.7893, | |
| "step": 1204 | |
| }, | |
| { | |
| "epoch": 1.8877870563674322, | |
| "grad_norm": 1.0839310884475708, | |
| "learning_rate": 3.992207922947783e-06, | |
| "loss": 0.6824, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 1.8893528183716075, | |
| "grad_norm": 0.9987578988075256, | |
| "learning_rate": 3.99051708465066e-06, | |
| "loss": 0.8045, | |
| "step": 1206 | |
| }, | |
| { | |
| "epoch": 1.8909185803757829, | |
| "grad_norm": 1.0759059190750122, | |
| "learning_rate": 3.9888251878681425e-06, | |
| "loss": 0.7756, | |
| "step": 1207 | |
| }, | |
| { | |
| "epoch": 1.8924843423799582, | |
| "grad_norm": 1.1055301427841187, | |
| "learning_rate": 3.987132233801722e-06, | |
| "loss": 0.6947, | |
| "step": 1208 | |
| }, | |
| { | |
| "epoch": 1.8940501043841336, | |
| "grad_norm": 1.042575478553772, | |
| "learning_rate": 3.985438223653644e-06, | |
| "loss": 0.6669, | |
| "step": 1209 | |
| }, | |
| { | |
| "epoch": 1.895615866388309, | |
| "grad_norm": 1.0595242977142334, | |
| "learning_rate": 3.983743158626904e-06, | |
| "loss": 0.6937, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 1.8971816283924845, | |
| "grad_norm": 1.0498789548873901, | |
| "learning_rate": 3.982047039925247e-06, | |
| "loss": 0.7244, | |
| "step": 1211 | |
| }, | |
| { | |
| "epoch": 1.8987473903966596, | |
| "grad_norm": 1.0960590839385986, | |
| "learning_rate": 3.980349868753166e-06, | |
| "loss": 0.7661, | |
| "step": 1212 | |
| }, | |
| { | |
| "epoch": 1.9003131524008352, | |
| "grad_norm": 1.108823299407959, | |
| "learning_rate": 3.9786516463159e-06, | |
| "loss": 0.6937, | |
| "step": 1213 | |
| }, | |
| { | |
| "epoch": 1.9018789144050103, | |
| "grad_norm": 1.051416039466858, | |
| "learning_rate": 3.976952373819436e-06, | |
| "loss": 0.7263, | |
| "step": 1214 | |
| }, | |
| { | |
| "epoch": 1.903444676409186, | |
| "grad_norm": 0.9884061217308044, | |
| "learning_rate": 3.9752520524705065e-06, | |
| "loss": 0.6577, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 1.905010438413361, | |
| "grad_norm": 1.0950369834899902, | |
| "learning_rate": 3.9735506834765885e-06, | |
| "loss": 0.7376, | |
| "step": 1216 | |
| }, | |
| { | |
| "epoch": 1.9065762004175366, | |
| "grad_norm": 1.1140044927597046, | |
| "learning_rate": 3.971848268045902e-06, | |
| "loss": 0.7125, | |
| "step": 1217 | |
| }, | |
| { | |
| "epoch": 1.9081419624217117, | |
| "grad_norm": 1.0908716917037964, | |
| "learning_rate": 3.970144807387414e-06, | |
| "loss": 0.7796, | |
| "step": 1218 | |
| }, | |
| { | |
| "epoch": 1.9097077244258873, | |
| "grad_norm": 1.1068549156188965, | |
| "learning_rate": 3.968440302710829e-06, | |
| "loss": 0.7454, | |
| "step": 1219 | |
| }, | |
| { | |
| "epoch": 1.9112734864300627, | |
| "grad_norm": 1.102594017982483, | |
| "learning_rate": 3.9667347552265945e-06, | |
| "loss": 0.757, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 1.912839248434238, | |
| "grad_norm": 1.1588326692581177, | |
| "learning_rate": 3.9650281661459015e-06, | |
| "loss": 0.6491, | |
| "step": 1221 | |
| }, | |
| { | |
| "epoch": 1.9144050104384134, | |
| "grad_norm": 0.9896461367607117, | |
| "learning_rate": 3.963320536680675e-06, | |
| "loss": 0.773, | |
| "step": 1222 | |
| }, | |
| { | |
| "epoch": 1.9159707724425887, | |
| "grad_norm": 1.0855952501296997, | |
| "learning_rate": 3.961611868043583e-06, | |
| "loss": 0.6906, | |
| "step": 1223 | |
| }, | |
| { | |
| "epoch": 1.917536534446764, | |
| "grad_norm": 1.0556972026824951, | |
| "learning_rate": 3.959902161448032e-06, | |
| "loss": 0.7577, | |
| "step": 1224 | |
| }, | |
| { | |
| "epoch": 1.9191022964509394, | |
| "grad_norm": 1.0284528732299805, | |
| "learning_rate": 3.958191418108162e-06, | |
| "loss": 0.6844, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 1.9206680584551148, | |
| "grad_norm": 0.9578842520713806, | |
| "learning_rate": 3.956479639238852e-06, | |
| "loss": 0.7216, | |
| "step": 1226 | |
| }, | |
| { | |
| "epoch": 1.9222338204592901, | |
| "grad_norm": 1.0366833209991455, | |
| "learning_rate": 3.954766826055717e-06, | |
| "loss": 0.7365, | |
| "step": 1227 | |
| }, | |
| { | |
| "epoch": 1.9237995824634657, | |
| "grad_norm": 1.0498946905136108, | |
| "learning_rate": 3.953052979775103e-06, | |
| "loss": 0.6788, | |
| "step": 1228 | |
| }, | |
| { | |
| "epoch": 1.9253653444676408, | |
| "grad_norm": 1.010860562324524, | |
| "learning_rate": 3.951338101614094e-06, | |
| "loss": 0.7181, | |
| "step": 1229 | |
| }, | |
| { | |
| "epoch": 1.9269311064718164, | |
| "grad_norm": 1.0203677415847778, | |
| "learning_rate": 3.949622192790502e-06, | |
| "loss": 0.7277, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 1.9284968684759916, | |
| "grad_norm": 0.9907971024513245, | |
| "learning_rate": 3.947905254522876e-06, | |
| "loss": 0.6858, | |
| "step": 1231 | |
| }, | |
| { | |
| "epoch": 1.9300626304801671, | |
| "grad_norm": 0.9824337363243103, | |
| "learning_rate": 3.946187288030493e-06, | |
| "loss": 0.7517, | |
| "step": 1232 | |
| }, | |
| { | |
| "epoch": 1.9316283924843423, | |
| "grad_norm": 1.0206327438354492, | |
| "learning_rate": 3.944468294533359e-06, | |
| "loss": 0.7225, | |
| "step": 1233 | |
| }, | |
| { | |
| "epoch": 1.9331941544885178, | |
| "grad_norm": 1.0529900789260864, | |
| "learning_rate": 3.942748275252214e-06, | |
| "loss": 0.7965, | |
| "step": 1234 | |
| }, | |
| { | |
| "epoch": 1.934759916492693, | |
| "grad_norm": 1.0501983165740967, | |
| "learning_rate": 3.941027231408521e-06, | |
| "loss": 0.7646, | |
| "step": 1235 | |
| }, | |
| { | |
| "epoch": 1.9363256784968685, | |
| "grad_norm": 0.9482662081718445, | |
| "learning_rate": 3.939305164224474e-06, | |
| "loss": 0.7449, | |
| "step": 1236 | |
| }, | |
| { | |
| "epoch": 1.937891440501044, | |
| "grad_norm": 1.096723198890686, | |
| "learning_rate": 3.937582074922993e-06, | |
| "loss": 0.7464, | |
| "step": 1237 | |
| }, | |
| { | |
| "epoch": 1.9394572025052192, | |
| "grad_norm": 1.0417059659957886, | |
| "learning_rate": 3.935857964727725e-06, | |
| "loss": 0.6586, | |
| "step": 1238 | |
| }, | |
| { | |
| "epoch": 1.9410229645093946, | |
| "grad_norm": 1.1008849143981934, | |
| "learning_rate": 3.934132834863039e-06, | |
| "loss": 0.6236, | |
| "step": 1239 | |
| }, | |
| { | |
| "epoch": 1.94258872651357, | |
| "grad_norm": 0.9107227921485901, | |
| "learning_rate": 3.93240668655403e-06, | |
| "loss": 0.7069, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 1.9441544885177453, | |
| "grad_norm": 1.0516945123672485, | |
| "learning_rate": 3.930679521026517e-06, | |
| "loss": 0.773, | |
| "step": 1241 | |
| }, | |
| { | |
| "epoch": 1.9457202505219207, | |
| "grad_norm": 0.9941951632499695, | |
| "learning_rate": 3.92895133950704e-06, | |
| "loss": 0.6002, | |
| "step": 1242 | |
| }, | |
| { | |
| "epoch": 1.947286012526096, | |
| "grad_norm": 1.031784176826477, | |
| "learning_rate": 3.927222143222859e-06, | |
| "loss": 0.7737, | |
| "step": 1243 | |
| }, | |
| { | |
| "epoch": 1.9488517745302714, | |
| "grad_norm": 0.986473798751831, | |
| "learning_rate": 3.925491933401961e-06, | |
| "loss": 0.7455, | |
| "step": 1244 | |
| }, | |
| { | |
| "epoch": 1.9504175365344467, | |
| "grad_norm": 1.109964370727539, | |
| "learning_rate": 3.923760711273044e-06, | |
| "loss": 0.7048, | |
| "step": 1245 | |
| }, | |
| { | |
| "epoch": 1.951983298538622, | |
| "grad_norm": 1.1303372383117676, | |
| "learning_rate": 3.922028478065532e-06, | |
| "loss": 0.749, | |
| "step": 1246 | |
| }, | |
| { | |
| "epoch": 1.9535490605427976, | |
| "grad_norm": 0.999824583530426, | |
| "learning_rate": 3.920295235009562e-06, | |
| "loss": 0.7596, | |
| "step": 1247 | |
| }, | |
| { | |
| "epoch": 1.9551148225469728, | |
| "grad_norm": 1.0440820455551147, | |
| "learning_rate": 3.918560983335993e-06, | |
| "loss": 0.7841, | |
| "step": 1248 | |
| }, | |
| { | |
| "epoch": 1.9566805845511483, | |
| "grad_norm": 1.0420323610305786, | |
| "learning_rate": 3.916825724276395e-06, | |
| "loss": 0.7837, | |
| "step": 1249 | |
| }, | |
| { | |
| "epoch": 1.9582463465553235, | |
| "grad_norm": 1.001997947692871, | |
| "learning_rate": 3.915089459063058e-06, | |
| "loss": 0.6956, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 1.959812108559499, | |
| "grad_norm": 1.030626893043518, | |
| "learning_rate": 3.913352188928984e-06, | |
| "loss": 0.74, | |
| "step": 1251 | |
| }, | |
| { | |
| "epoch": 1.9613778705636742, | |
| "grad_norm": 1.0981760025024414, | |
| "learning_rate": 3.911613915107888e-06, | |
| "loss": 0.713, | |
| "step": 1252 | |
| }, | |
| { | |
| "epoch": 1.9629436325678498, | |
| "grad_norm": 1.020772933959961, | |
| "learning_rate": 3.9098746388342006e-06, | |
| "loss": 0.7605, | |
| "step": 1253 | |
| }, | |
| { | |
| "epoch": 1.964509394572025, | |
| "grad_norm": 1.0206503868103027, | |
| "learning_rate": 3.908134361343062e-06, | |
| "loss": 0.7176, | |
| "step": 1254 | |
| }, | |
| { | |
| "epoch": 1.9660751565762005, | |
| "grad_norm": 1.0471689701080322, | |
| "learning_rate": 3.906393083870325e-06, | |
| "loss": 0.7698, | |
| "step": 1255 | |
| }, | |
| { | |
| "epoch": 1.9676409185803758, | |
| "grad_norm": 1.0658202171325684, | |
| "learning_rate": 3.904650807652549e-06, | |
| "loss": 0.7179, | |
| "step": 1256 | |
| }, | |
| { | |
| "epoch": 1.9692066805845512, | |
| "grad_norm": 1.196192741394043, | |
| "learning_rate": 3.902907533927007e-06, | |
| "loss": 0.7801, | |
| "step": 1257 | |
| }, | |
| { | |
| "epoch": 1.9707724425887265, | |
| "grad_norm": 1.0874967575073242, | |
| "learning_rate": 3.90116326393168e-06, | |
| "loss": 0.7672, | |
| "step": 1258 | |
| }, | |
| { | |
| "epoch": 1.9723382045929019, | |
| "grad_norm": 1.153617024421692, | |
| "learning_rate": 3.899417998905253e-06, | |
| "loss": 0.752, | |
| "step": 1259 | |
| }, | |
| { | |
| "epoch": 1.9739039665970772, | |
| "grad_norm": 1.0750453472137451, | |
| "learning_rate": 3.89767174008712e-06, | |
| "loss": 0.7471, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 1.9754697286012526, | |
| "grad_norm": 1.0693799257278442, | |
| "learning_rate": 3.895924488717381e-06, | |
| "loss": 0.6704, | |
| "step": 1261 | |
| }, | |
| { | |
| "epoch": 1.977035490605428, | |
| "grad_norm": 0.9552210569381714, | |
| "learning_rate": 3.89417624603684e-06, | |
| "loss": 0.7793, | |
| "step": 1262 | |
| }, | |
| { | |
| "epoch": 1.9786012526096033, | |
| "grad_norm": 0.96712726354599, | |
| "learning_rate": 3.892427013287007e-06, | |
| "loss": 0.6843, | |
| "step": 1263 | |
| }, | |
| { | |
| "epoch": 1.9801670146137789, | |
| "grad_norm": 1.1453518867492676, | |
| "learning_rate": 3.890676791710091e-06, | |
| "loss": 0.7532, | |
| "step": 1264 | |
| }, | |
| { | |
| "epoch": 1.981732776617954, | |
| "grad_norm": 1.0294159650802612, | |
| "learning_rate": 3.888925582549006e-06, | |
| "loss": 0.703, | |
| "step": 1265 | |
| }, | |
| { | |
| "epoch": 1.9832985386221296, | |
| "grad_norm": 1.0790715217590332, | |
| "learning_rate": 3.8871733870473686e-06, | |
| "loss": 0.6731, | |
| "step": 1266 | |
| }, | |
| { | |
| "epoch": 1.9848643006263047, | |
| "grad_norm": 1.052460789680481, | |
| "learning_rate": 3.8854202064494915e-06, | |
| "loss": 0.748, | |
| "step": 1267 | |
| }, | |
| { | |
| "epoch": 1.9864300626304803, | |
| "grad_norm": 1.112462043762207, | |
| "learning_rate": 3.883666042000392e-06, | |
| "loss": 0.7408, | |
| "step": 1268 | |
| }, | |
| { | |
| "epoch": 1.9879958246346554, | |
| "grad_norm": 1.0738476514816284, | |
| "learning_rate": 3.881910894945783e-06, | |
| "loss": 0.759, | |
| "step": 1269 | |
| }, | |
| { | |
| "epoch": 1.989561586638831, | |
| "grad_norm": 1.136472463607788, | |
| "learning_rate": 3.8801547665320775e-06, | |
| "loss": 0.754, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 1.9911273486430061, | |
| "grad_norm": 1.0640734434127808, | |
| "learning_rate": 3.87839765800638e-06, | |
| "loss": 0.719, | |
| "step": 1271 | |
| }, | |
| { | |
| "epoch": 1.9926931106471817, | |
| "grad_norm": 0.9155470132827759, | |
| "learning_rate": 3.876639570616498e-06, | |
| "loss": 0.762, | |
| "step": 1272 | |
| }, | |
| { | |
| "epoch": 1.994258872651357, | |
| "grad_norm": 1.015743374824524, | |
| "learning_rate": 3.87488050561093e-06, | |
| "loss": 0.6926, | |
| "step": 1273 | |
| }, | |
| { | |
| "epoch": 1.9958246346555324, | |
| "grad_norm": 1.0829862356185913, | |
| "learning_rate": 3.87312046423887e-06, | |
| "loss": 0.7644, | |
| "step": 1274 | |
| }, | |
| { | |
| "epoch": 1.9973903966597077, | |
| "grad_norm": 1.0962239503860474, | |
| "learning_rate": 3.871359447750205e-06, | |
| "loss": 0.7038, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 1.998956158663883, | |
| "grad_norm": 1.0470675230026245, | |
| "learning_rate": 3.869597457395514e-06, | |
| "loss": 0.7629, | |
| "step": 1276 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 3828, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 6, | |
| "save_steps": 638, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 3.224064837298422e+18, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |