| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.0, | |
| "eval_steps": 500, | |
| "global_step": 7728, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.002587991718426501, | |
| "grad_norm": 0.6931016445159912, | |
| "learning_rate": 4.9961180124223606e-05, | |
| "loss": 1.2213, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.005175983436853002, | |
| "grad_norm": 0.48186811804771423, | |
| "learning_rate": 4.9918046928916494e-05, | |
| "loss": 0.6617, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.007763975155279503, | |
| "grad_norm": 0.4067845642566681, | |
| "learning_rate": 4.987491373360939e-05, | |
| "loss": 0.5725, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.010351966873706004, | |
| "grad_norm": 0.4268130958080292, | |
| "learning_rate": 4.9831780538302284e-05, | |
| "loss": 0.5456, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.012939958592132506, | |
| "grad_norm": 0.4061700701713562, | |
| "learning_rate": 4.978864734299517e-05, | |
| "loss": 0.5057, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.015527950310559006, | |
| "grad_norm": 0.4203609526157379, | |
| "learning_rate": 4.974551414768806e-05, | |
| "loss": 0.493, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.018115942028985508, | |
| "grad_norm": 0.4072072505950928, | |
| "learning_rate": 4.9702380952380955e-05, | |
| "loss": 0.5016, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.020703933747412008, | |
| "grad_norm": 0.35524892807006836, | |
| "learning_rate": 4.965924775707384e-05, | |
| "loss": 0.4801, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.023291925465838508, | |
| "grad_norm": 0.422678142786026, | |
| "learning_rate": 4.961611456176674e-05, | |
| "loss": 0.4675, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.025879917184265012, | |
| "grad_norm": 0.7197648882865906, | |
| "learning_rate": 4.957298136645963e-05, | |
| "loss": 0.4504, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.028467908902691512, | |
| "grad_norm": 0.38139602541923523, | |
| "learning_rate": 4.952984817115252e-05, | |
| "loss": 0.4661, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.031055900621118012, | |
| "grad_norm": 0.3637668490409851, | |
| "learning_rate": 4.948671497584541e-05, | |
| "loss": 0.4291, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.03364389233954451, | |
| "grad_norm": 0.3413495421409607, | |
| "learning_rate": 4.94435817805383e-05, | |
| "loss": 0.4337, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.036231884057971016, | |
| "grad_norm": 0.3850350081920624, | |
| "learning_rate": 4.94004485852312e-05, | |
| "loss": 0.4353, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.03881987577639751, | |
| "grad_norm": 0.3495863378047943, | |
| "learning_rate": 4.9357315389924086e-05, | |
| "loss": 0.429, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.041407867494824016, | |
| "grad_norm": 0.3324839472770691, | |
| "learning_rate": 4.931418219461698e-05, | |
| "loss": 0.4254, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.04399585921325052, | |
| "grad_norm": 0.3240760862827301, | |
| "learning_rate": 4.927104899930987e-05, | |
| "loss": 0.418, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.046583850931677016, | |
| "grad_norm": 0.32717740535736084, | |
| "learning_rate": 4.922791580400276e-05, | |
| "loss": 0.4181, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.04917184265010352, | |
| "grad_norm": 0.37864601612091064, | |
| "learning_rate": 4.918478260869566e-05, | |
| "loss": 0.416, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.051759834368530024, | |
| "grad_norm": 0.3354615569114685, | |
| "learning_rate": 4.9141649413388546e-05, | |
| "loss": 0.4171, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.05434782608695652, | |
| "grad_norm": 0.3466266095638275, | |
| "learning_rate": 4.9098516218081434e-05, | |
| "loss": 0.4143, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.056935817805383024, | |
| "grad_norm": 0.31229594349861145, | |
| "learning_rate": 4.905538302277433e-05, | |
| "loss": 0.3969, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.05952380952380952, | |
| "grad_norm": 0.33017608523368835, | |
| "learning_rate": 4.9012249827467224e-05, | |
| "loss": 0.4029, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.062111801242236024, | |
| "grad_norm": 0.3223104774951935, | |
| "learning_rate": 4.896911663216011e-05, | |
| "loss": 0.4077, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.06469979296066253, | |
| "grad_norm": 0.3475039601325989, | |
| "learning_rate": 4.8925983436853006e-05, | |
| "loss": 0.4054, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.06728778467908902, | |
| "grad_norm": 0.3325633108615875, | |
| "learning_rate": 4.8882850241545894e-05, | |
| "loss": 0.3936, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.06987577639751552, | |
| "grad_norm": 0.3039849102497101, | |
| "learning_rate": 4.883971704623879e-05, | |
| "loss": 0.3983, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.07246376811594203, | |
| "grad_norm": 0.3026624023914337, | |
| "learning_rate": 4.8796583850931684e-05, | |
| "loss": 0.3917, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.07505175983436853, | |
| "grad_norm": 0.28852379322052, | |
| "learning_rate": 4.875345065562457e-05, | |
| "loss": 0.386, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.07763975155279502, | |
| "grad_norm": 0.29932525753974915, | |
| "learning_rate": 4.871031746031746e-05, | |
| "loss": 0.3878, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.08022774327122154, | |
| "grad_norm": 0.2904777526855469, | |
| "learning_rate": 4.8667184265010355e-05, | |
| "loss": 0.3827, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.08281573498964803, | |
| "grad_norm": 0.29514485597610474, | |
| "learning_rate": 4.862405106970325e-05, | |
| "loss": 0.3917, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.08540372670807453, | |
| "grad_norm": 0.29739001393318176, | |
| "learning_rate": 4.858091787439614e-05, | |
| "loss": 0.3873, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.08799171842650104, | |
| "grad_norm": 0.2792510688304901, | |
| "learning_rate": 4.8537784679089025e-05, | |
| "loss": 0.3815, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.09057971014492754, | |
| "grad_norm": 0.3052826225757599, | |
| "learning_rate": 4.849465148378192e-05, | |
| "loss": 0.3818, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.09316770186335403, | |
| "grad_norm": 0.28388407826423645, | |
| "learning_rate": 4.8451518288474815e-05, | |
| "loss": 0.3865, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.09575569358178054, | |
| "grad_norm": 0.313395619392395, | |
| "learning_rate": 4.84083850931677e-05, | |
| "loss": 0.3747, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.09834368530020704, | |
| "grad_norm": 0.30021408200263977, | |
| "learning_rate": 4.83652518978606e-05, | |
| "loss": 0.3838, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.10093167701863354, | |
| "grad_norm": 0.2741802930831909, | |
| "learning_rate": 4.8322118702553486e-05, | |
| "loss": 0.3713, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.10351966873706005, | |
| "grad_norm": 0.29940980672836304, | |
| "learning_rate": 4.8278985507246374e-05, | |
| "loss": 0.3916, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.10610766045548654, | |
| "grad_norm": 0.29653653502464294, | |
| "learning_rate": 4.8235852311939275e-05, | |
| "loss": 0.3836, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.10869565217391304, | |
| "grad_norm": 0.31111636757850647, | |
| "learning_rate": 4.819271911663216e-05, | |
| "loss": 0.3723, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.11128364389233955, | |
| "grad_norm": 0.30432841181755066, | |
| "learning_rate": 4.814958592132505e-05, | |
| "loss": 0.391, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.11387163561076605, | |
| "grad_norm": 0.2829114496707916, | |
| "learning_rate": 4.8106452726017946e-05, | |
| "loss": 0.3831, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.11645962732919254, | |
| "grad_norm": 0.2797948718070984, | |
| "learning_rate": 4.806331953071084e-05, | |
| "loss": 0.3729, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.11904761904761904, | |
| "grad_norm": 0.29887112975120544, | |
| "learning_rate": 4.802018633540373e-05, | |
| "loss": 0.3693, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.12163561076604555, | |
| "grad_norm": 0.2550595700740814, | |
| "learning_rate": 4.7977053140096624e-05, | |
| "loss": 0.3694, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.12422360248447205, | |
| "grad_norm": 0.28271788358688354, | |
| "learning_rate": 4.793391994478951e-05, | |
| "loss": 0.3769, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.12681159420289856, | |
| "grad_norm": 0.27268651127815247, | |
| "learning_rate": 4.78907867494824e-05, | |
| "loss": 0.3717, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.12939958592132506, | |
| "grad_norm": 0.3002064824104309, | |
| "learning_rate": 4.78476535541753e-05, | |
| "loss": 0.3648, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.13198757763975155, | |
| "grad_norm": 0.28463032841682434, | |
| "learning_rate": 4.780452035886819e-05, | |
| "loss": 0.3633, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.13457556935817805, | |
| "grad_norm": 0.29805323481559753, | |
| "learning_rate": 4.776138716356108e-05, | |
| "loss": 0.3642, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.13716356107660455, | |
| "grad_norm": 0.2904551327228546, | |
| "learning_rate": 4.771825396825397e-05, | |
| "loss": 0.3696, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.13975155279503104, | |
| "grad_norm": 0.2726401388645172, | |
| "learning_rate": 4.767512077294686e-05, | |
| "loss": 0.3629, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.14233954451345757, | |
| "grad_norm": 0.2827688753604889, | |
| "learning_rate": 4.7631987577639755e-05, | |
| "loss": 0.3613, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.14492753623188406, | |
| "grad_norm": 0.25535234808921814, | |
| "learning_rate": 4.758885438233265e-05, | |
| "loss": 0.3691, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.14751552795031056, | |
| "grad_norm": 0.3313714563846588, | |
| "learning_rate": 4.754572118702554e-05, | |
| "loss": 0.3719, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.15010351966873706, | |
| "grad_norm": 0.3062281906604767, | |
| "learning_rate": 4.7502587991718425e-05, | |
| "loss": 0.359, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.15269151138716355, | |
| "grad_norm": 0.26143062114715576, | |
| "learning_rate": 4.745945479641132e-05, | |
| "loss": 0.3633, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.15527950310559005, | |
| "grad_norm": 0.2947162091732025, | |
| "learning_rate": 4.7416321601104215e-05, | |
| "loss": 0.3534, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.15786749482401657, | |
| "grad_norm": 0.280143678188324, | |
| "learning_rate": 4.73731884057971e-05, | |
| "loss": 0.3681, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.16045548654244307, | |
| "grad_norm": 0.2904067039489746, | |
| "learning_rate": 4.733005521048999e-05, | |
| "loss": 0.3669, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.16304347826086957, | |
| "grad_norm": 0.2655963897705078, | |
| "learning_rate": 4.7286922015182886e-05, | |
| "loss": 0.364, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.16563146997929606, | |
| "grad_norm": 0.2548169493675232, | |
| "learning_rate": 4.724378881987578e-05, | |
| "loss": 0.3604, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.16821946169772256, | |
| "grad_norm": 0.26068902015686035, | |
| "learning_rate": 4.720065562456867e-05, | |
| "loss": 0.3573, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.17080745341614906, | |
| "grad_norm": 0.2658487558364868, | |
| "learning_rate": 4.715752242926156e-05, | |
| "loss": 0.3769, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.17339544513457558, | |
| "grad_norm": 0.2844776213169098, | |
| "learning_rate": 4.711438923395445e-05, | |
| "loss": 0.3538, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.17598343685300208, | |
| "grad_norm": 0.2560563385486603, | |
| "learning_rate": 4.7071256038647346e-05, | |
| "loss": 0.3525, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.17857142857142858, | |
| "grad_norm": 0.278079092502594, | |
| "learning_rate": 4.702812284334024e-05, | |
| "loss": 0.3497, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.18115942028985507, | |
| "grad_norm": 0.2821943461894989, | |
| "learning_rate": 4.698498964803313e-05, | |
| "loss": 0.3561, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.18374741200828157, | |
| "grad_norm": 0.2607298195362091, | |
| "learning_rate": 4.694185645272602e-05, | |
| "loss": 0.3616, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.18633540372670807, | |
| "grad_norm": 0.27013707160949707, | |
| "learning_rate": 4.689872325741891e-05, | |
| "loss": 0.3502, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.18892339544513456, | |
| "grad_norm": 0.265015184879303, | |
| "learning_rate": 4.6855590062111806e-05, | |
| "loss": 0.3463, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.1915113871635611, | |
| "grad_norm": 0.27620813250541687, | |
| "learning_rate": 4.6812456866804694e-05, | |
| "loss": 0.3567, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.19409937888198758, | |
| "grad_norm": 0.2566974461078644, | |
| "learning_rate": 4.676932367149759e-05, | |
| "loss": 0.3521, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.19668737060041408, | |
| "grad_norm": 0.2620496153831482, | |
| "learning_rate": 4.672619047619048e-05, | |
| "loss": 0.3474, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.19927536231884058, | |
| "grad_norm": 0.2598465383052826, | |
| "learning_rate": 4.668305728088337e-05, | |
| "loss": 0.3525, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.20186335403726707, | |
| "grad_norm": 0.28414371609687805, | |
| "learning_rate": 4.663992408557627e-05, | |
| "loss": 0.3415, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.20445134575569357, | |
| "grad_norm": 0.27969351410865784, | |
| "learning_rate": 4.6596790890269155e-05, | |
| "loss": 0.3597, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.2070393374741201, | |
| "grad_norm": 0.26387929916381836, | |
| "learning_rate": 4.655365769496204e-05, | |
| "loss": 0.3452, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.2096273291925466, | |
| "grad_norm": 0.27556657791137695, | |
| "learning_rate": 4.651052449965494e-05, | |
| "loss": 0.3418, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.2122153209109731, | |
| "grad_norm": 0.25666093826293945, | |
| "learning_rate": 4.646739130434783e-05, | |
| "loss": 0.3494, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.21480331262939958, | |
| "grad_norm": 0.24122317135334015, | |
| "learning_rate": 4.642425810904072e-05, | |
| "loss": 0.3667, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.21739130434782608, | |
| "grad_norm": 0.2530350983142853, | |
| "learning_rate": 4.638112491373361e-05, | |
| "loss": 0.3543, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.21997929606625258, | |
| "grad_norm": 0.26871258020401, | |
| "learning_rate": 4.63379917184265e-05, | |
| "loss": 0.3432, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.2225672877846791, | |
| "grad_norm": 0.2797186076641083, | |
| "learning_rate": 4.629485852311939e-05, | |
| "loss": 0.3451, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.2251552795031056, | |
| "grad_norm": 0.2654234766960144, | |
| "learning_rate": 4.6251725327812286e-05, | |
| "loss": 0.3495, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.2277432712215321, | |
| "grad_norm": 0.2519735097885132, | |
| "learning_rate": 4.620859213250518e-05, | |
| "loss": 0.3499, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.2303312629399586, | |
| "grad_norm": 0.24566596746444702, | |
| "learning_rate": 4.616545893719807e-05, | |
| "loss": 0.3555, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.2329192546583851, | |
| "grad_norm": 0.27151158452033997, | |
| "learning_rate": 4.6122325741890956e-05, | |
| "loss": 0.3573, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.23550724637681159, | |
| "grad_norm": 0.27960848808288574, | |
| "learning_rate": 4.607919254658386e-05, | |
| "loss": 0.3593, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.23809523809523808, | |
| "grad_norm": 0.23981152474880219, | |
| "learning_rate": 4.6036059351276746e-05, | |
| "loss": 0.3459, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.2406832298136646, | |
| "grad_norm": 0.25497132539749146, | |
| "learning_rate": 4.5992926155969634e-05, | |
| "loss": 0.3509, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.2432712215320911, | |
| "grad_norm": 0.27302148938179016, | |
| "learning_rate": 4.594979296066253e-05, | |
| "loss": 0.3426, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.2458592132505176, | |
| "grad_norm": 0.2586531639099121, | |
| "learning_rate": 4.590665976535542e-05, | |
| "loss": 0.3445, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.2484472049689441, | |
| "grad_norm": 0.22465559840202332, | |
| "learning_rate": 4.586352657004831e-05, | |
| "loss": 0.3415, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.2510351966873706, | |
| "grad_norm": 0.25479137897491455, | |
| "learning_rate": 4.5820393374741206e-05, | |
| "loss": 0.3355, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.2536231884057971, | |
| "grad_norm": 0.2489393651485443, | |
| "learning_rate": 4.5777260179434094e-05, | |
| "loss": 0.3427, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.2562111801242236, | |
| "grad_norm": 0.24661508202552795, | |
| "learning_rate": 4.573412698412698e-05, | |
| "loss": 0.3311, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.2587991718426501, | |
| "grad_norm": 0.24593059718608856, | |
| "learning_rate": 4.569099378881988e-05, | |
| "loss": 0.3388, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.2613871635610766, | |
| "grad_norm": 0.2396044135093689, | |
| "learning_rate": 4.564786059351277e-05, | |
| "loss": 0.3508, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.2639751552795031, | |
| "grad_norm": 0.275534063577652, | |
| "learning_rate": 4.560472739820566e-05, | |
| "loss": 0.3451, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.2665631469979296, | |
| "grad_norm": 0.2593844532966614, | |
| "learning_rate": 4.5561594202898555e-05, | |
| "loss": 0.3359, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.2691511387163561, | |
| "grad_norm": 0.2667211592197418, | |
| "learning_rate": 4.551846100759144e-05, | |
| "loss": 0.3446, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.2717391304347826, | |
| "grad_norm": 0.26292136311531067, | |
| "learning_rate": 4.547532781228434e-05, | |
| "loss": 0.3418, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.2743271221532091, | |
| "grad_norm": 0.2429954707622528, | |
| "learning_rate": 4.543219461697723e-05, | |
| "loss": 0.3411, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.2769151138716356, | |
| "grad_norm": 0.23935279250144958, | |
| "learning_rate": 4.538906142167012e-05, | |
| "loss": 0.3479, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.2795031055900621, | |
| "grad_norm": 0.24318240582942963, | |
| "learning_rate": 4.534592822636301e-05, | |
| "loss": 0.3363, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.28209109730848864, | |
| "grad_norm": 0.23401808738708496, | |
| "learning_rate": 4.53027950310559e-05, | |
| "loss": 0.3507, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.28467908902691513, | |
| "grad_norm": 0.235195592045784, | |
| "learning_rate": 4.52596618357488e-05, | |
| "loss": 0.3452, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.28726708074534163, | |
| "grad_norm": 0.25817984342575073, | |
| "learning_rate": 4.5216528640441686e-05, | |
| "loss": 0.3227, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.2898550724637681, | |
| "grad_norm": 0.2499350905418396, | |
| "learning_rate": 4.5173395445134574e-05, | |
| "loss": 0.341, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.2924430641821946, | |
| "grad_norm": 0.23882877826690674, | |
| "learning_rate": 4.513026224982747e-05, | |
| "loss": 0.3484, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.2950310559006211, | |
| "grad_norm": 0.2699326276779175, | |
| "learning_rate": 4.508712905452036e-05, | |
| "loss": 0.3427, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.2976190476190476, | |
| "grad_norm": 0.2389804571866989, | |
| "learning_rate": 4.504399585921325e-05, | |
| "loss": 0.3188, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.3002070393374741, | |
| "grad_norm": 0.24539288878440857, | |
| "learning_rate": 4.5000862663906146e-05, | |
| "loss": 0.3434, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.3027950310559006, | |
| "grad_norm": 0.25557300448417664, | |
| "learning_rate": 4.4957729468599034e-05, | |
| "loss": 0.3202, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.3053830227743271, | |
| "grad_norm": 0.2359408438205719, | |
| "learning_rate": 4.491459627329193e-05, | |
| "loss": 0.3416, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.3079710144927536, | |
| "grad_norm": 0.2432994544506073, | |
| "learning_rate": 4.4871463077984824e-05, | |
| "loss": 0.3386, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.3105590062111801, | |
| "grad_norm": 0.24209333956241608, | |
| "learning_rate": 4.482832988267771e-05, | |
| "loss": 0.3364, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.31314699792960665, | |
| "grad_norm": 0.25228288769721985, | |
| "learning_rate": 4.47851966873706e-05, | |
| "loss": 0.3245, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.31573498964803315, | |
| "grad_norm": 0.21681800484657288, | |
| "learning_rate": 4.4742063492063494e-05, | |
| "loss": 0.3417, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.31832298136645965, | |
| "grad_norm": 0.23574399948120117, | |
| "learning_rate": 4.469893029675639e-05, | |
| "loss": 0.3399, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.32091097308488614, | |
| "grad_norm": 0.22972829639911652, | |
| "learning_rate": 4.465579710144928e-05, | |
| "loss": 0.3245, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.32349896480331264, | |
| "grad_norm": 0.22471453249454498, | |
| "learning_rate": 4.461266390614217e-05, | |
| "loss": 0.3273, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.32608695652173914, | |
| "grad_norm": 0.2130855917930603, | |
| "learning_rate": 4.456953071083506e-05, | |
| "loss": 0.3279, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.32867494824016563, | |
| "grad_norm": 0.22941111028194427, | |
| "learning_rate": 4.452639751552795e-05, | |
| "loss": 0.3263, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.33126293995859213, | |
| "grad_norm": 0.24117586016654968, | |
| "learning_rate": 4.448326432022085e-05, | |
| "loss": 0.3337, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.3338509316770186, | |
| "grad_norm": 0.2593615651130676, | |
| "learning_rate": 4.444013112491374e-05, | |
| "loss": 0.3368, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.3364389233954451, | |
| "grad_norm": 0.23699291050434113, | |
| "learning_rate": 4.4396997929606625e-05, | |
| "loss": 0.332, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.3390269151138716, | |
| "grad_norm": 0.2599596381187439, | |
| "learning_rate": 4.435386473429952e-05, | |
| "loss": 0.3303, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.3416149068322981, | |
| "grad_norm": 0.23753949999809265, | |
| "learning_rate": 4.4310731538992415e-05, | |
| "loss": 0.3349, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.3442028985507246, | |
| "grad_norm": 0.2286694347858429, | |
| "learning_rate": 4.42675983436853e-05, | |
| "loss": 0.3342, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.34679089026915116, | |
| "grad_norm": 0.23095780611038208, | |
| "learning_rate": 4.422446514837819e-05, | |
| "loss": 0.3242, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.34937888198757766, | |
| "grad_norm": 0.2591758072376251, | |
| "learning_rate": 4.4181331953071086e-05, | |
| "loss": 0.3307, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.35196687370600416, | |
| "grad_norm": 0.23518985509872437, | |
| "learning_rate": 4.4138198757763974e-05, | |
| "loss": 0.3186, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.35455486542443065, | |
| "grad_norm": 0.2448560744524002, | |
| "learning_rate": 4.409506556245687e-05, | |
| "loss": 0.3198, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.35714285714285715, | |
| "grad_norm": 0.24746116995811462, | |
| "learning_rate": 4.405193236714976e-05, | |
| "loss": 0.3346, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.35973084886128365, | |
| "grad_norm": 0.2440098077058792, | |
| "learning_rate": 4.400879917184265e-05, | |
| "loss": 0.3164, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.36231884057971014, | |
| "grad_norm": 0.2528471350669861, | |
| "learning_rate": 4.396566597653554e-05, | |
| "loss": 0.3293, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.36490683229813664, | |
| "grad_norm": 0.25530192255973816, | |
| "learning_rate": 4.3922532781228434e-05, | |
| "loss": 0.3368, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.36749482401656314, | |
| "grad_norm": 0.21804003417491913, | |
| "learning_rate": 4.387939958592133e-05, | |
| "loss": 0.3204, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.37008281573498963, | |
| "grad_norm": 0.22486785054206848, | |
| "learning_rate": 4.383626639061422e-05, | |
| "loss": 0.3309, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.37267080745341613, | |
| "grad_norm": 0.2279578149318695, | |
| "learning_rate": 4.379313319530711e-05, | |
| "loss": 0.33, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.3752587991718426, | |
| "grad_norm": 0.23841218650341034, | |
| "learning_rate": 4.375e-05, | |
| "loss": 0.3144, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.3778467908902691, | |
| "grad_norm": 0.2441750168800354, | |
| "learning_rate": 4.3706866804692894e-05, | |
| "loss": 0.336, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.3804347826086957, | |
| "grad_norm": 0.22941255569458008, | |
| "learning_rate": 4.366373360938579e-05, | |
| "loss": 0.3419, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.3830227743271222, | |
| "grad_norm": 0.22717274725437164, | |
| "learning_rate": 4.362060041407868e-05, | |
| "loss": 0.3341, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.38561076604554867, | |
| "grad_norm": 0.22173310816287994, | |
| "learning_rate": 4.3577467218771565e-05, | |
| "loss": 0.3285, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.38819875776397517, | |
| "grad_norm": 0.26800036430358887, | |
| "learning_rate": 4.353433402346446e-05, | |
| "loss": 0.3263, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.39078674948240166, | |
| "grad_norm": 0.2275414764881134, | |
| "learning_rate": 4.3491200828157355e-05, | |
| "loss": 0.3393, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.39337474120082816, | |
| "grad_norm": 0.23832468688488007, | |
| "learning_rate": 4.344806763285024e-05, | |
| "loss": 0.3207, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.39596273291925466, | |
| "grad_norm": 0.24205411970615387, | |
| "learning_rate": 4.340493443754314e-05, | |
| "loss": 0.3174, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.39855072463768115, | |
| "grad_norm": 0.23856307566165924, | |
| "learning_rate": 4.3361801242236025e-05, | |
| "loss": 0.3286, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.40113871635610765, | |
| "grad_norm": 0.22671808302402496, | |
| "learning_rate": 4.331866804692892e-05, | |
| "loss": 0.3274, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.40372670807453415, | |
| "grad_norm": 0.21958813071250916, | |
| "learning_rate": 4.3275534851621815e-05, | |
| "loss": 0.3205, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.40631469979296064, | |
| "grad_norm": 0.2193451225757599, | |
| "learning_rate": 4.32324016563147e-05, | |
| "loss": 0.3153, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.40890269151138714, | |
| "grad_norm": 0.22171197831630707, | |
| "learning_rate": 4.318926846100759e-05, | |
| "loss": 0.3314, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.4114906832298137, | |
| "grad_norm": 0.23245662450790405, | |
| "learning_rate": 4.3146135265700486e-05, | |
| "loss": 0.3202, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.4140786749482402, | |
| "grad_norm": 0.20662301778793335, | |
| "learning_rate": 4.310300207039338e-05, | |
| "loss": 0.3112, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.4166666666666667, | |
| "grad_norm": 0.2323458194732666, | |
| "learning_rate": 4.305986887508627e-05, | |
| "loss": 0.3208, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.4192546583850932, | |
| "grad_norm": 0.21535032987594604, | |
| "learning_rate": 4.3016735679779156e-05, | |
| "loss": 0.3248, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.4218426501035197, | |
| "grad_norm": 0.2329694777727127, | |
| "learning_rate": 4.297360248447205e-05, | |
| "loss": 0.3265, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.4244306418219462, | |
| "grad_norm": 0.22817054390907288, | |
| "learning_rate": 4.2930469289164946e-05, | |
| "loss": 0.333, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.42701863354037267, | |
| "grad_norm": 0.22383803129196167, | |
| "learning_rate": 4.2887336093857834e-05, | |
| "loss": 0.3293, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.42960662525879917, | |
| "grad_norm": 0.2358909696340561, | |
| "learning_rate": 4.284420289855073e-05, | |
| "loss": 0.3214, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.43219461697722567, | |
| "grad_norm": 0.2268248051404953, | |
| "learning_rate": 4.280106970324362e-05, | |
| "loss": 0.3223, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.43478260869565216, | |
| "grad_norm": 0.22738555073738098, | |
| "learning_rate": 4.2757936507936505e-05, | |
| "loss": 0.3326, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.43737060041407866, | |
| "grad_norm": 0.206409752368927, | |
| "learning_rate": 4.2714803312629406e-05, | |
| "loss": 0.3324, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.43995859213250516, | |
| "grad_norm": 0.24513110518455505, | |
| "learning_rate": 4.2671670117322294e-05, | |
| "loss": 0.324, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.44254658385093165, | |
| "grad_norm": 0.21265286207199097, | |
| "learning_rate": 4.262853692201518e-05, | |
| "loss": 0.3196, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.4451345755693582, | |
| "grad_norm": 0.24437908828258514, | |
| "learning_rate": 4.258540372670808e-05, | |
| "loss": 0.3248, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.4477225672877847, | |
| "grad_norm": 0.2429364174604416, | |
| "learning_rate": 4.2542270531400965e-05, | |
| "loss": 0.3271, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.4503105590062112, | |
| "grad_norm": 0.2221539318561554, | |
| "learning_rate": 4.249913733609386e-05, | |
| "loss": 0.3208, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.4528985507246377, | |
| "grad_norm": 0.24334411323070526, | |
| "learning_rate": 4.2456004140786755e-05, | |
| "loss": 0.3265, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.4554865424430642, | |
| "grad_norm": 0.23080968856811523, | |
| "learning_rate": 4.241287094547964e-05, | |
| "loss": 0.3151, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.4580745341614907, | |
| "grad_norm": 0.21543864905834198, | |
| "learning_rate": 4.236973775017253e-05, | |
| "loss": 0.3307, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.4606625258799172, | |
| "grad_norm": 0.25965455174446106, | |
| "learning_rate": 4.232660455486543e-05, | |
| "loss": 0.3257, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.4632505175983437, | |
| "grad_norm": 0.22642041742801666, | |
| "learning_rate": 4.228347135955832e-05, | |
| "loss": 0.3128, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.4658385093167702, | |
| "grad_norm": 0.24067285656929016, | |
| "learning_rate": 4.224033816425121e-05, | |
| "loss": 0.3051, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.4684265010351967, | |
| "grad_norm": 0.21340426802635193, | |
| "learning_rate": 4.21972049689441e-05, | |
| "loss": 0.3247, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.47101449275362317, | |
| "grad_norm": 0.22851435840129852, | |
| "learning_rate": 4.215407177363699e-05, | |
| "loss": 0.3161, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.47360248447204967, | |
| "grad_norm": 0.22671709954738617, | |
| "learning_rate": 4.2110938578329886e-05, | |
| "loss": 0.3197, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.47619047619047616, | |
| "grad_norm": 0.22201639413833618, | |
| "learning_rate": 4.2067805383022774e-05, | |
| "loss": 0.3166, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.4787784679089027, | |
| "grad_norm": 0.22487780451774597, | |
| "learning_rate": 4.202467218771567e-05, | |
| "loss": 0.3101, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.4813664596273292, | |
| "grad_norm": 0.2108740210533142, | |
| "learning_rate": 4.1981538992408556e-05, | |
| "loss": 0.3208, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.4839544513457557, | |
| "grad_norm": 0.22232213616371155, | |
| "learning_rate": 4.193840579710145e-05, | |
| "loss": 0.3248, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.4865424430641822, | |
| "grad_norm": 0.2102160006761551, | |
| "learning_rate": 4.1895272601794346e-05, | |
| "loss": 0.3112, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.4891304347826087, | |
| "grad_norm": 0.2171991467475891, | |
| "learning_rate": 4.1852139406487234e-05, | |
| "loss": 0.3201, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.4917184265010352, | |
| "grad_norm": 0.22287575900554657, | |
| "learning_rate": 4.180900621118012e-05, | |
| "loss": 0.3178, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.4943064182194617, | |
| "grad_norm": 0.23184508085250854, | |
| "learning_rate": 4.176587301587302e-05, | |
| "loss": 0.3156, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.4968944099378882, | |
| "grad_norm": 0.23387432098388672, | |
| "learning_rate": 4.172273982056591e-05, | |
| "loss": 0.3067, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.4994824016563147, | |
| "grad_norm": 0.2195172756910324, | |
| "learning_rate": 4.16796066252588e-05, | |
| "loss": 0.3149, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.5020703933747412, | |
| "grad_norm": 0.23636245727539062, | |
| "learning_rate": 4.1636473429951694e-05, | |
| "loss": 0.3335, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.5046583850931677, | |
| "grad_norm": 0.23955215513706207, | |
| "learning_rate": 4.159334023464458e-05, | |
| "loss": 0.3207, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.5072463768115942, | |
| "grad_norm": 0.23382526636123657, | |
| "learning_rate": 4.155020703933748e-05, | |
| "loss": 0.3203, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.5098343685300207, | |
| "grad_norm": 0.20842687785625458, | |
| "learning_rate": 4.150707384403037e-05, | |
| "loss": 0.3144, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.5124223602484472, | |
| "grad_norm": 0.230192631483078, | |
| "learning_rate": 4.146394064872326e-05, | |
| "loss": 0.3133, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.5150103519668737, | |
| "grad_norm": 0.29996928572654724, | |
| "learning_rate": 4.142080745341615e-05, | |
| "loss": 0.3145, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.5175983436853002, | |
| "grad_norm": 0.2318979650735855, | |
| "learning_rate": 4.137767425810904e-05, | |
| "loss": 0.3245, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.5201863354037267, | |
| "grad_norm": 0.25185373425483704, | |
| "learning_rate": 4.133454106280194e-05, | |
| "loss": 0.3179, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.5227743271221532, | |
| "grad_norm": 0.2322726994752884, | |
| "learning_rate": 4.1291407867494825e-05, | |
| "loss": 0.3242, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 0.5253623188405797, | |
| "grad_norm": 0.2243122011423111, | |
| "learning_rate": 4.124827467218772e-05, | |
| "loss": 0.3083, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 0.5279503105590062, | |
| "grad_norm": 0.23717275261878967, | |
| "learning_rate": 4.120514147688061e-05, | |
| "loss": 0.3168, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.5305383022774327, | |
| "grad_norm": 0.20267385244369507, | |
| "learning_rate": 4.11620082815735e-05, | |
| "loss": 0.3193, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.5331262939958592, | |
| "grad_norm": 0.2284848392009735, | |
| "learning_rate": 4.11188750862664e-05, | |
| "loss": 0.3189, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 0.5357142857142857, | |
| "grad_norm": 0.24308684468269348, | |
| "learning_rate": 4.1075741890959286e-05, | |
| "loss": 0.3135, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 0.5383022774327122, | |
| "grad_norm": 0.2117205113172531, | |
| "learning_rate": 4.1032608695652174e-05, | |
| "loss": 0.3168, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 0.5408902691511387, | |
| "grad_norm": 0.2043170928955078, | |
| "learning_rate": 4.098947550034506e-05, | |
| "loss": 0.3124, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 0.5434782608695652, | |
| "grad_norm": 0.22353848814964294, | |
| "learning_rate": 4.094634230503796e-05, | |
| "loss": 0.3148, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.5460662525879917, | |
| "grad_norm": 0.218248650431633, | |
| "learning_rate": 4.090320910973085e-05, | |
| "loss": 0.3152, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 0.5486542443064182, | |
| "grad_norm": 0.2373238354921341, | |
| "learning_rate": 4.086007591442374e-05, | |
| "loss": 0.3225, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 0.5512422360248447, | |
| "grad_norm": 0.213827446103096, | |
| "learning_rate": 4.0816942719116634e-05, | |
| "loss": 0.3058, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 0.5538302277432712, | |
| "grad_norm": 0.22780118882656097, | |
| "learning_rate": 4.077380952380952e-05, | |
| "loss": 0.3082, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 0.5564182194616977, | |
| "grad_norm": 0.22952698171138763, | |
| "learning_rate": 4.073067632850242e-05, | |
| "loss": 0.3186, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.5590062111801242, | |
| "grad_norm": 0.21455378830432892, | |
| "learning_rate": 4.068754313319531e-05, | |
| "loss": 0.3075, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 0.5615942028985508, | |
| "grad_norm": 0.24111777544021606, | |
| "learning_rate": 4.06444099378882e-05, | |
| "loss": 0.308, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 0.5641821946169773, | |
| "grad_norm": 0.21158650517463684, | |
| "learning_rate": 4.060127674258109e-05, | |
| "loss": 0.3235, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 0.5667701863354038, | |
| "grad_norm": 0.2563655972480774, | |
| "learning_rate": 4.055814354727398e-05, | |
| "loss": 0.3259, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 0.5693581780538303, | |
| "grad_norm": 0.2210429161787033, | |
| "learning_rate": 4.051501035196688e-05, | |
| "loss": 0.3127, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.5719461697722568, | |
| "grad_norm": 0.22102981805801392, | |
| "learning_rate": 4.0471877156659765e-05, | |
| "loss": 0.3369, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 0.5745341614906833, | |
| "grad_norm": 0.22597503662109375, | |
| "learning_rate": 4.042874396135266e-05, | |
| "loss": 0.3301, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 0.5771221532091098, | |
| "grad_norm": 0.23010863363742828, | |
| "learning_rate": 4.038561076604555e-05, | |
| "loss": 0.3317, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 0.5797101449275363, | |
| "grad_norm": 0.22111916542053223, | |
| "learning_rate": 4.034247757073844e-05, | |
| "loss": 0.3237, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 0.5822981366459627, | |
| "grad_norm": 0.2222849726676941, | |
| "learning_rate": 4.029934437543134e-05, | |
| "loss": 0.3071, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.5848861283643892, | |
| "grad_norm": 0.2242390662431717, | |
| "learning_rate": 4.0256211180124225e-05, | |
| "loss": 0.3149, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 0.5874741200828157, | |
| "grad_norm": 0.23385004699230194, | |
| "learning_rate": 4.0213077984817113e-05, | |
| "loss": 0.3046, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 0.5900621118012422, | |
| "grad_norm": 0.21285676956176758, | |
| "learning_rate": 4.016994478951001e-05, | |
| "loss": 0.311, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 0.5926501035196687, | |
| "grad_norm": 0.2448578029870987, | |
| "learning_rate": 4.01268115942029e-05, | |
| "loss": 0.3218, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 0.5952380952380952, | |
| "grad_norm": 0.21924273669719696, | |
| "learning_rate": 4.008367839889579e-05, | |
| "loss": 0.3103, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.5978260869565217, | |
| "grad_norm": 0.21449491381645203, | |
| "learning_rate": 4.0040545203588686e-05, | |
| "loss": 0.3096, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 0.6004140786749482, | |
| "grad_norm": 0.20816919207572937, | |
| "learning_rate": 3.9997412008281574e-05, | |
| "loss": 0.3173, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 0.6030020703933747, | |
| "grad_norm": 0.22972124814987183, | |
| "learning_rate": 3.995427881297447e-05, | |
| "loss": 0.317, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 0.6055900621118012, | |
| "grad_norm": 0.2285618633031845, | |
| "learning_rate": 3.9911145617667356e-05, | |
| "loss": 0.3058, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 0.6081780538302277, | |
| "grad_norm": 0.21451903879642487, | |
| "learning_rate": 3.986801242236025e-05, | |
| "loss": 0.3146, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.6107660455486542, | |
| "grad_norm": 0.21774156391620636, | |
| "learning_rate": 3.982487922705314e-05, | |
| "loss": 0.309, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 0.6133540372670807, | |
| "grad_norm": 0.22307203710079193, | |
| "learning_rate": 3.9781746031746034e-05, | |
| "loss": 0.3156, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 0.6159420289855072, | |
| "grad_norm": 0.24002742767333984, | |
| "learning_rate": 3.973861283643893e-05, | |
| "loss": 0.3051, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 0.6185300207039337, | |
| "grad_norm": 0.21402250230312347, | |
| "learning_rate": 3.969547964113182e-05, | |
| "loss": 0.3051, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 0.6211180124223602, | |
| "grad_norm": 0.2224249541759491, | |
| "learning_rate": 3.9652346445824705e-05, | |
| "loss": 0.3063, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.6237060041407867, | |
| "grad_norm": 0.22487872838974, | |
| "learning_rate": 3.96092132505176e-05, | |
| "loss": 0.3111, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 0.6262939958592133, | |
| "grad_norm": 0.2085420936346054, | |
| "learning_rate": 3.9566080055210494e-05, | |
| "loss": 0.3085, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 0.6288819875776398, | |
| "grad_norm": 0.25433823466300964, | |
| "learning_rate": 3.952294685990338e-05, | |
| "loss": 0.2949, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 0.6314699792960663, | |
| "grad_norm": 0.23129135370254517, | |
| "learning_rate": 3.947981366459628e-05, | |
| "loss": 0.3101, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 0.6340579710144928, | |
| "grad_norm": 0.20423346757888794, | |
| "learning_rate": 3.9436680469289165e-05, | |
| "loss": 0.3079, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 0.6366459627329193, | |
| "grad_norm": 0.21036268770694733, | |
| "learning_rate": 3.939354727398206e-05, | |
| "loss": 0.3095, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 0.6392339544513458, | |
| "grad_norm": 0.2136411815881729, | |
| "learning_rate": 3.9350414078674955e-05, | |
| "loss": 0.3132, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 0.6418219461697723, | |
| "grad_norm": 0.2161836177110672, | |
| "learning_rate": 3.930728088336784e-05, | |
| "loss": 0.3127, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 0.6444099378881988, | |
| "grad_norm": 0.2031654566526413, | |
| "learning_rate": 3.926414768806073e-05, | |
| "loss": 0.3076, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 0.6469979296066253, | |
| "grad_norm": 0.22428163886070251, | |
| "learning_rate": 3.9221014492753625e-05, | |
| "loss": 0.3169, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.6495859213250518, | |
| "grad_norm": 0.21044711768627167, | |
| "learning_rate": 3.917788129744652e-05, | |
| "loss": 0.3131, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 0.6521739130434783, | |
| "grad_norm": 0.22925981879234314, | |
| "learning_rate": 3.913474810213941e-05, | |
| "loss": 0.3048, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 0.6547619047619048, | |
| "grad_norm": 0.20247575640678406, | |
| "learning_rate": 3.90916149068323e-05, | |
| "loss": 0.3126, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 0.6573498964803313, | |
| "grad_norm": 0.22203323245048523, | |
| "learning_rate": 3.904848171152519e-05, | |
| "loss": 0.3129, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 0.6599378881987578, | |
| "grad_norm": 0.20326444506645203, | |
| "learning_rate": 3.900534851621808e-05, | |
| "loss": 0.3032, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 0.6625258799171843, | |
| "grad_norm": 0.20740875601768494, | |
| "learning_rate": 3.896221532091098e-05, | |
| "loss": 0.311, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 0.6651138716356108, | |
| "grad_norm": 0.2004554718732834, | |
| "learning_rate": 3.891908212560387e-05, | |
| "loss": 0.2979, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 0.6677018633540373, | |
| "grad_norm": 0.2173566371202469, | |
| "learning_rate": 3.8875948930296756e-05, | |
| "loss": 0.3123, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 0.6702898550724637, | |
| "grad_norm": 0.22329995036125183, | |
| "learning_rate": 3.883281573498965e-05, | |
| "loss": 0.307, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 0.6728778467908902, | |
| "grad_norm": 0.2122991532087326, | |
| "learning_rate": 3.878968253968254e-05, | |
| "loss": 0.3136, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.6754658385093167, | |
| "grad_norm": 0.2094232439994812, | |
| "learning_rate": 3.8746549344375434e-05, | |
| "loss": 0.3026, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 0.6780538302277432, | |
| "grad_norm": 0.23355460166931152, | |
| "learning_rate": 3.870341614906832e-05, | |
| "loss": 0.3047, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 0.6806418219461697, | |
| "grad_norm": 0.22046047449111938, | |
| "learning_rate": 3.866028295376122e-05, | |
| "loss": 0.3081, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 0.6832298136645962, | |
| "grad_norm": 0.20763075351715088, | |
| "learning_rate": 3.8617149758454105e-05, | |
| "loss": 0.2954, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 0.6858178053830227, | |
| "grad_norm": 0.2164374738931656, | |
| "learning_rate": 3.8574016563147e-05, | |
| "loss": 0.3046, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 0.6884057971014492, | |
| "grad_norm": 0.39815595746040344, | |
| "learning_rate": 3.8530883367839894e-05, | |
| "loss": 0.3071, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 0.6909937888198758, | |
| "grad_norm": 0.22524535655975342, | |
| "learning_rate": 3.848775017253278e-05, | |
| "loss": 0.3077, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 0.6935817805383023, | |
| "grad_norm": 0.2300335317850113, | |
| "learning_rate": 3.844461697722567e-05, | |
| "loss": 0.3087, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 0.6961697722567288, | |
| "grad_norm": 0.2301955223083496, | |
| "learning_rate": 3.8401483781918565e-05, | |
| "loss": 0.3141, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 0.6987577639751553, | |
| "grad_norm": 0.2139981985092163, | |
| "learning_rate": 3.835835058661146e-05, | |
| "loss": 0.2946, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.7013457556935818, | |
| "grad_norm": 0.2879856526851654, | |
| "learning_rate": 3.831521739130435e-05, | |
| "loss": 0.305, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 0.7039337474120083, | |
| "grad_norm": 0.21703338623046875, | |
| "learning_rate": 3.827208419599724e-05, | |
| "loss": 0.3122, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 0.7065217391304348, | |
| "grad_norm": 0.21222138404846191, | |
| "learning_rate": 3.822895100069013e-05, | |
| "loss": 0.3105, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 0.7091097308488613, | |
| "grad_norm": 0.22085881233215332, | |
| "learning_rate": 3.8185817805383025e-05, | |
| "loss": 0.3051, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 0.7116977225672878, | |
| "grad_norm": 0.2120286077260971, | |
| "learning_rate": 3.814268461007592e-05, | |
| "loss": 0.3161, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 0.7142857142857143, | |
| "grad_norm": 0.2191096544265747, | |
| "learning_rate": 3.809955141476881e-05, | |
| "loss": 0.3036, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 0.7168737060041408, | |
| "grad_norm": 0.19695864617824554, | |
| "learning_rate": 3.8056418219461696e-05, | |
| "loss": 0.3033, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 0.7194616977225673, | |
| "grad_norm": 0.2128693163394928, | |
| "learning_rate": 3.801328502415459e-05, | |
| "loss": 0.3106, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 0.7220496894409938, | |
| "grad_norm": 0.2155313342809677, | |
| "learning_rate": 3.7970151828847486e-05, | |
| "loss": 0.3061, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 0.7246376811594203, | |
| "grad_norm": 0.212551549077034, | |
| "learning_rate": 3.7927018633540374e-05, | |
| "loss": 0.3094, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 0.7272256728778468, | |
| "grad_norm": 0.21915146708488464, | |
| "learning_rate": 3.788388543823327e-05, | |
| "loss": 0.3208, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 0.7298136645962733, | |
| "grad_norm": 0.20482341945171356, | |
| "learning_rate": 3.7840752242926156e-05, | |
| "loss": 0.3107, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 0.7324016563146998, | |
| "grad_norm": 0.20442645251750946, | |
| "learning_rate": 3.779761904761905e-05, | |
| "loss": 0.3091, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 0.7349896480331263, | |
| "grad_norm": 0.22753652930259705, | |
| "learning_rate": 3.7754485852311946e-05, | |
| "loss": 0.2987, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 0.7375776397515528, | |
| "grad_norm": 0.21697330474853516, | |
| "learning_rate": 3.7711352657004834e-05, | |
| "loss": 0.3187, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 0.7401656314699793, | |
| "grad_norm": 0.23610110580921173, | |
| "learning_rate": 3.766821946169772e-05, | |
| "loss": 0.3155, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 0.7427536231884058, | |
| "grad_norm": 0.20602217316627502, | |
| "learning_rate": 3.762508626639062e-05, | |
| "loss": 0.3174, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 0.7453416149068323, | |
| "grad_norm": 0.207588329911232, | |
| "learning_rate": 3.758195307108351e-05, | |
| "loss": 0.3043, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 0.7479296066252588, | |
| "grad_norm": 0.2304246574640274, | |
| "learning_rate": 3.75388198757764e-05, | |
| "loss": 0.3038, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 0.7505175983436853, | |
| "grad_norm": 0.201001837849617, | |
| "learning_rate": 3.749568668046929e-05, | |
| "loss": 0.2988, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 0.7531055900621118, | |
| "grad_norm": 0.2201271653175354, | |
| "learning_rate": 3.745255348516218e-05, | |
| "loss": 0.2997, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 0.7556935817805382, | |
| "grad_norm": 0.18496201932430267, | |
| "learning_rate": 3.740942028985508e-05, | |
| "loss": 0.3031, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 0.7582815734989649, | |
| "grad_norm": 0.21594169735908508, | |
| "learning_rate": 3.7366287094547965e-05, | |
| "loss": 0.3067, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 0.7608695652173914, | |
| "grad_norm": 0.21081900596618652, | |
| "learning_rate": 3.732315389924086e-05, | |
| "loss": 0.2986, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 0.7634575569358178, | |
| "grad_norm": 0.2343406081199646, | |
| "learning_rate": 3.728002070393375e-05, | |
| "loss": 0.2988, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 0.7660455486542443, | |
| "grad_norm": 0.22097930312156677, | |
| "learning_rate": 3.7236887508626636e-05, | |
| "loss": 0.2976, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 0.7686335403726708, | |
| "grad_norm": 0.20536422729492188, | |
| "learning_rate": 3.719375431331954e-05, | |
| "loss": 0.3097, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 0.7712215320910973, | |
| "grad_norm": 0.21830050647258759, | |
| "learning_rate": 3.7150621118012425e-05, | |
| "loss": 0.3014, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 0.7738095238095238, | |
| "grad_norm": 0.2149060070514679, | |
| "learning_rate": 3.7107487922705313e-05, | |
| "loss": 0.3075, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 0.7763975155279503, | |
| "grad_norm": 0.19886595010757446, | |
| "learning_rate": 3.706435472739821e-05, | |
| "loss": 0.3144, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.7789855072463768, | |
| "grad_norm": 0.20575444400310516, | |
| "learning_rate": 3.7021221532091096e-05, | |
| "loss": 0.2999, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 0.7815734989648033, | |
| "grad_norm": 0.2196149230003357, | |
| "learning_rate": 3.697808833678399e-05, | |
| "loss": 0.3094, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 0.7841614906832298, | |
| "grad_norm": 0.21629364788532257, | |
| "learning_rate": 3.6934955141476886e-05, | |
| "loss": 0.308, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 0.7867494824016563, | |
| "grad_norm": 0.2137310653924942, | |
| "learning_rate": 3.6891821946169774e-05, | |
| "loss": 0.2986, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 0.7893374741200828, | |
| "grad_norm": 0.20199188590049744, | |
| "learning_rate": 3.684868875086266e-05, | |
| "loss": 0.3064, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 0.7919254658385093, | |
| "grad_norm": 0.21547473967075348, | |
| "learning_rate": 3.6805555555555556e-05, | |
| "loss": 0.3013, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 0.7945134575569358, | |
| "grad_norm": 0.2733719050884247, | |
| "learning_rate": 3.676242236024845e-05, | |
| "loss": 0.3029, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 0.7971014492753623, | |
| "grad_norm": 0.29441699385643005, | |
| "learning_rate": 3.671928916494134e-05, | |
| "loss": 0.3011, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 0.7996894409937888, | |
| "grad_norm": 0.21802516281604767, | |
| "learning_rate": 3.6676155969634234e-05, | |
| "loss": 0.296, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 0.8022774327122153, | |
| "grad_norm": 0.20233801007270813, | |
| "learning_rate": 3.663302277432712e-05, | |
| "loss": 0.306, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 0.8048654244306418, | |
| "grad_norm": 0.20554248988628387, | |
| "learning_rate": 3.658988957902002e-05, | |
| "loss": 0.309, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 0.8074534161490683, | |
| "grad_norm": 0.20972499251365662, | |
| "learning_rate": 3.6546756383712905e-05, | |
| "loss": 0.2979, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 0.8100414078674948, | |
| "grad_norm": 0.22347532212734222, | |
| "learning_rate": 3.65036231884058e-05, | |
| "loss": 0.3086, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 0.8126293995859213, | |
| "grad_norm": 0.20977842807769775, | |
| "learning_rate": 3.646048999309869e-05, | |
| "loss": 0.3035, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 0.8152173913043478, | |
| "grad_norm": 0.21505331993103027, | |
| "learning_rate": 3.641735679779158e-05, | |
| "loss": 0.2932, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 0.8178053830227743, | |
| "grad_norm": 0.19368258118629456, | |
| "learning_rate": 3.637422360248448e-05, | |
| "loss": 0.3048, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 0.8203933747412008, | |
| "grad_norm": 0.19016410410404205, | |
| "learning_rate": 3.6331090407177365e-05, | |
| "loss": 0.3049, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 0.8229813664596274, | |
| "grad_norm": 0.21808278560638428, | |
| "learning_rate": 3.628795721187025e-05, | |
| "loss": 0.2878, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 0.8255693581780539, | |
| "grad_norm": 0.20243072509765625, | |
| "learning_rate": 3.624482401656315e-05, | |
| "loss": 0.3085, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 0.8281573498964804, | |
| "grad_norm": 0.21477043628692627, | |
| "learning_rate": 3.620169082125604e-05, | |
| "loss": 0.3004, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 0.8307453416149069, | |
| "grad_norm": 0.1993860900402069, | |
| "learning_rate": 3.615855762594893e-05, | |
| "loss": 0.3021, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 0.8333333333333334, | |
| "grad_norm": 0.20048989355564117, | |
| "learning_rate": 3.6115424430641825e-05, | |
| "loss": 0.2965, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 0.8359213250517599, | |
| "grad_norm": 0.2104330211877823, | |
| "learning_rate": 3.6072291235334713e-05, | |
| "loss": 0.2941, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 0.8385093167701864, | |
| "grad_norm": 0.22518469393253326, | |
| "learning_rate": 3.602915804002761e-05, | |
| "loss": 0.3117, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 0.8410973084886129, | |
| "grad_norm": 0.20357628166675568, | |
| "learning_rate": 3.59860248447205e-05, | |
| "loss": 0.297, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 0.8436853002070394, | |
| "grad_norm": 0.23179523646831512, | |
| "learning_rate": 3.594289164941339e-05, | |
| "loss": 0.2973, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 0.8462732919254659, | |
| "grad_norm": 0.22814355790615082, | |
| "learning_rate": 3.589975845410628e-05, | |
| "loss": 0.2988, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 0.8488612836438924, | |
| "grad_norm": 0.2123464047908783, | |
| "learning_rate": 3.5856625258799174e-05, | |
| "loss": 0.295, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 0.8514492753623188, | |
| "grad_norm": 0.216710165143013, | |
| "learning_rate": 3.581349206349207e-05, | |
| "loss": 0.2984, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 0.8540372670807453, | |
| "grad_norm": 0.22064447402954102, | |
| "learning_rate": 3.5770358868184956e-05, | |
| "loss": 0.3082, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 0.8566252587991718, | |
| "grad_norm": 0.21648381650447845, | |
| "learning_rate": 3.572722567287785e-05, | |
| "loss": 0.2935, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 0.8592132505175983, | |
| "grad_norm": 0.20240244269371033, | |
| "learning_rate": 3.568409247757074e-05, | |
| "loss": 0.3108, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 0.8618012422360248, | |
| "grad_norm": 0.2264847606420517, | |
| "learning_rate": 3.5640959282263634e-05, | |
| "loss": 0.2998, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 0.8643892339544513, | |
| "grad_norm": 0.22666993737220764, | |
| "learning_rate": 3.559782608695653e-05, | |
| "loss": 0.313, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 0.8669772256728778, | |
| "grad_norm": 0.19765187799930573, | |
| "learning_rate": 3.555469289164942e-05, | |
| "loss": 0.3089, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 0.8695652173913043, | |
| "grad_norm": 0.21958757936954498, | |
| "learning_rate": 3.5511559696342305e-05, | |
| "loss": 0.2925, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 0.8721532091097308, | |
| "grad_norm": 0.21202827990055084, | |
| "learning_rate": 3.546842650103519e-05, | |
| "loss": 0.3076, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 0.8747412008281573, | |
| "grad_norm": 0.2199050486087799, | |
| "learning_rate": 3.5425293305728094e-05, | |
| "loss": 0.3054, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 0.8773291925465838, | |
| "grad_norm": 0.21413950622081757, | |
| "learning_rate": 3.538216011042098e-05, | |
| "loss": 0.3118, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 0.8799171842650103, | |
| "grad_norm": 0.24077115952968597, | |
| "learning_rate": 3.533902691511387e-05, | |
| "loss": 0.2901, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 0.8825051759834368, | |
| "grad_norm": 0.19935116171836853, | |
| "learning_rate": 3.5295893719806765e-05, | |
| "loss": 0.3031, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 0.8850931677018633, | |
| "grad_norm": 0.19705252349376678, | |
| "learning_rate": 3.525276052449965e-05, | |
| "loss": 0.3059, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 0.8876811594202898, | |
| "grad_norm": 0.19009386003017426, | |
| "learning_rate": 3.520962732919255e-05, | |
| "loss": 0.3127, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 0.8902691511387164, | |
| "grad_norm": 0.21051499247550964, | |
| "learning_rate": 3.516649413388544e-05, | |
| "loss": 0.2901, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 0.8928571428571429, | |
| "grad_norm": 0.20724424719810486, | |
| "learning_rate": 3.512336093857833e-05, | |
| "loss": 0.2997, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 0.8954451345755694, | |
| "grad_norm": 0.21805281937122345, | |
| "learning_rate": 3.508022774327122e-05, | |
| "loss": 0.3128, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 0.8980331262939959, | |
| "grad_norm": 0.21885521709918976, | |
| "learning_rate": 3.5037094547964113e-05, | |
| "loss": 0.3124, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 0.9006211180124224, | |
| "grad_norm": 0.2068813443183899, | |
| "learning_rate": 3.499396135265701e-05, | |
| "loss": 0.3038, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 0.9032091097308489, | |
| "grad_norm": 0.19752545654773712, | |
| "learning_rate": 3.4950828157349896e-05, | |
| "loss": 0.2945, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 0.9057971014492754, | |
| "grad_norm": 0.215012788772583, | |
| "learning_rate": 3.490769496204279e-05, | |
| "loss": 0.292, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.9083850931677019, | |
| "grad_norm": 0.20747481286525726, | |
| "learning_rate": 3.486456176673568e-05, | |
| "loss": 0.3053, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 0.9109730848861284, | |
| "grad_norm": 0.20236340165138245, | |
| "learning_rate": 3.4821428571428574e-05, | |
| "loss": 0.2889, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 0.9135610766045549, | |
| "grad_norm": 0.20136864483356476, | |
| "learning_rate": 3.477829537612147e-05, | |
| "loss": 0.3006, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 0.9161490683229814, | |
| "grad_norm": 0.191153421998024, | |
| "learning_rate": 3.4735162180814356e-05, | |
| "loss": 0.3053, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 0.9187370600414079, | |
| "grad_norm": 0.2002895325422287, | |
| "learning_rate": 3.4692028985507244e-05, | |
| "loss": 0.31, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 0.9213250517598344, | |
| "grad_norm": 0.20413583517074585, | |
| "learning_rate": 3.464889579020014e-05, | |
| "loss": 0.302, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 0.9239130434782609, | |
| "grad_norm": 0.2050822377204895, | |
| "learning_rate": 3.4605762594893034e-05, | |
| "loss": 0.2897, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 0.9265010351966874, | |
| "grad_norm": 0.22073592245578766, | |
| "learning_rate": 3.456262939958592e-05, | |
| "loss": 0.2921, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 0.9290890269151139, | |
| "grad_norm": 0.20891468226909637, | |
| "learning_rate": 3.451949620427882e-05, | |
| "loss": 0.3029, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 0.9316770186335404, | |
| "grad_norm": 0.19531622529029846, | |
| "learning_rate": 3.4476363008971705e-05, | |
| "loss": 0.3053, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 0.9342650103519669, | |
| "grad_norm": 0.19616830348968506, | |
| "learning_rate": 3.44332298136646e-05, | |
| "loss": 0.2917, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 0.9368530020703933, | |
| "grad_norm": 0.18781477212905884, | |
| "learning_rate": 3.439009661835749e-05, | |
| "loss": 0.2925, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 0.9394409937888198, | |
| "grad_norm": 0.1944524645805359, | |
| "learning_rate": 3.434696342305038e-05, | |
| "loss": 0.3008, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 0.9420289855072463, | |
| "grad_norm": 0.22709321975708008, | |
| "learning_rate": 3.430383022774327e-05, | |
| "loss": 0.2905, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 0.9446169772256728, | |
| "grad_norm": 0.20355206727981567, | |
| "learning_rate": 3.4260697032436165e-05, | |
| "loss": 0.2929, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 0.9472049689440993, | |
| "grad_norm": 0.18706360459327698, | |
| "learning_rate": 3.421756383712906e-05, | |
| "loss": 0.2871, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 0.9497929606625258, | |
| "grad_norm": 0.21115171909332275, | |
| "learning_rate": 3.417443064182195e-05, | |
| "loss": 0.3022, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 0.9523809523809523, | |
| "grad_norm": 0.19551020860671997, | |
| "learning_rate": 3.4131297446514836e-05, | |
| "loss": 0.3045, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 0.9549689440993789, | |
| "grad_norm": 0.20669394731521606, | |
| "learning_rate": 3.408816425120773e-05, | |
| "loss": 0.2981, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 0.9575569358178054, | |
| "grad_norm": 0.20334820449352264, | |
| "learning_rate": 3.4045031055900625e-05, | |
| "loss": 0.2926, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 0.9601449275362319, | |
| "grad_norm": 0.2088611125946045, | |
| "learning_rate": 3.4001897860593513e-05, | |
| "loss": 0.3081, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 0.9627329192546584, | |
| "grad_norm": 0.19300073385238647, | |
| "learning_rate": 3.395876466528641e-05, | |
| "loss": 0.2918, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 0.9653209109730849, | |
| "grad_norm": 0.20454266667366028, | |
| "learning_rate": 3.3915631469979296e-05, | |
| "loss": 0.301, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 0.9679089026915114, | |
| "grad_norm": 0.20170961320400238, | |
| "learning_rate": 3.3872498274672184e-05, | |
| "loss": 0.291, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 0.9704968944099379, | |
| "grad_norm": 0.18805700540542603, | |
| "learning_rate": 3.3829365079365086e-05, | |
| "loss": 0.3003, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 0.9730848861283644, | |
| "grad_norm": 0.2026686817407608, | |
| "learning_rate": 3.3786231884057974e-05, | |
| "loss": 0.3019, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 0.9756728778467909, | |
| "grad_norm": 0.21092383563518524, | |
| "learning_rate": 3.374309868875086e-05, | |
| "loss": 0.299, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 0.9782608695652174, | |
| "grad_norm": 0.20221301913261414, | |
| "learning_rate": 3.3699965493443756e-05, | |
| "loss": 0.3063, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 0.9808488612836439, | |
| "grad_norm": 0.20995080471038818, | |
| "learning_rate": 3.365683229813665e-05, | |
| "loss": 0.2861, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 0.9834368530020704, | |
| "grad_norm": 0.20308387279510498, | |
| "learning_rate": 3.361369910282954e-05, | |
| "loss": 0.2995, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 0.9860248447204969, | |
| "grad_norm": 0.2107851803302765, | |
| "learning_rate": 3.3570565907522434e-05, | |
| "loss": 0.3118, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 0.9886128364389234, | |
| "grad_norm": 0.22725960612297058, | |
| "learning_rate": 3.352743271221532e-05, | |
| "loss": 0.2925, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 0.9912008281573499, | |
| "grad_norm": 0.2126699984073639, | |
| "learning_rate": 3.348429951690821e-05, | |
| "loss": 0.2965, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 0.9937888198757764, | |
| "grad_norm": 0.1876717060804367, | |
| "learning_rate": 3.344116632160111e-05, | |
| "loss": 0.2863, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 0.9963768115942029, | |
| "grad_norm": 0.19136710464954376, | |
| "learning_rate": 3.3398033126294e-05, | |
| "loss": 0.2893, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 0.9989648033126294, | |
| "grad_norm": 0.20953720808029175, | |
| "learning_rate": 3.335489993098689e-05, | |
| "loss": 0.2902, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 1.0015527950310559, | |
| "grad_norm": 0.1966995745897293, | |
| "learning_rate": 3.3311766735679776e-05, | |
| "loss": 0.2897, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 1.0041407867494825, | |
| "grad_norm": 0.20064373314380646, | |
| "learning_rate": 3.326863354037267e-05, | |
| "loss": 0.2857, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 1.0067287784679089, | |
| "grad_norm": 0.21787600219249725, | |
| "learning_rate": 3.3225500345065565e-05, | |
| "loss": 0.2868, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 1.0093167701863355, | |
| "grad_norm": 0.20619697868824005, | |
| "learning_rate": 3.318236714975845e-05, | |
| "loss": 0.2872, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 1.0119047619047619, | |
| "grad_norm": 0.1993846893310547, | |
| "learning_rate": 3.313923395445135e-05, | |
| "loss": 0.2867, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 1.0144927536231885, | |
| "grad_norm": 0.21129782497882843, | |
| "learning_rate": 3.3096100759144236e-05, | |
| "loss": 0.2811, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 1.0170807453416149, | |
| "grad_norm": 0.20419564843177795, | |
| "learning_rate": 3.305296756383713e-05, | |
| "loss": 0.2745, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 1.0196687370600415, | |
| "grad_norm": 0.18639621138572693, | |
| "learning_rate": 3.3009834368530025e-05, | |
| "loss": 0.2866, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 1.0222567287784678, | |
| "grad_norm": 0.21988655626773834, | |
| "learning_rate": 3.2966701173222913e-05, | |
| "loss": 0.2854, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 1.0248447204968945, | |
| "grad_norm": 0.1930243968963623, | |
| "learning_rate": 3.29235679779158e-05, | |
| "loss": 0.2776, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 1.0274327122153208, | |
| "grad_norm": 0.1990118771791458, | |
| "learning_rate": 3.2880434782608696e-05, | |
| "loss": 0.2814, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 1.0300207039337475, | |
| "grad_norm": 0.19535651803016663, | |
| "learning_rate": 3.283730158730159e-05, | |
| "loss": 0.2853, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 1.0326086956521738, | |
| "grad_norm": 0.2245965152978897, | |
| "learning_rate": 3.279416839199448e-05, | |
| "loss": 0.2845, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 1.0351966873706004, | |
| "grad_norm": 0.20509561896324158, | |
| "learning_rate": 3.2751035196687374e-05, | |
| "loss": 0.2899, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 1.0377846790890268, | |
| "grad_norm": 0.21699312329292297, | |
| "learning_rate": 3.270790200138026e-05, | |
| "loss": 0.2811, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 1.0403726708074534, | |
| "grad_norm": 0.21619072556495667, | |
| "learning_rate": 3.2664768806073156e-05, | |
| "loss": 0.285, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 1.0429606625258798, | |
| "grad_norm": 0.20293161273002625, | |
| "learning_rate": 3.262163561076605e-05, | |
| "loss": 0.279, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 1.0455486542443064, | |
| "grad_norm": 0.2242215871810913, | |
| "learning_rate": 3.257850241545894e-05, | |
| "loss": 0.2835, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 1.0481366459627328, | |
| "grad_norm": 0.20812055468559265, | |
| "learning_rate": 3.253536922015183e-05, | |
| "loss": 0.2855, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 1.0507246376811594, | |
| "grad_norm": 0.2371463179588318, | |
| "learning_rate": 3.249223602484472e-05, | |
| "loss": 0.2925, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 1.0533126293995858, | |
| "grad_norm": 0.20706847310066223, | |
| "learning_rate": 3.244910282953762e-05, | |
| "loss": 0.2687, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 1.0559006211180124, | |
| "grad_norm": 0.23508736491203308, | |
| "learning_rate": 3.2405969634230505e-05, | |
| "loss": 0.2829, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 1.058488612836439, | |
| "grad_norm": 0.2040328085422516, | |
| "learning_rate": 3.23628364389234e-05, | |
| "loss": 0.2789, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 1.0610766045548654, | |
| "grad_norm": 0.20989084243774414, | |
| "learning_rate": 3.231970324361629e-05, | |
| "loss": 0.2866, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 1.063664596273292, | |
| "grad_norm": 0.20459486544132233, | |
| "learning_rate": 3.227657004830918e-05, | |
| "loss": 0.2839, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 1.0662525879917184, | |
| "grad_norm": 0.20525234937667847, | |
| "learning_rate": 3.223343685300207e-05, | |
| "loss": 0.2772, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 1.068840579710145, | |
| "grad_norm": 0.22750186920166016, | |
| "learning_rate": 3.2190303657694965e-05, | |
| "loss": 0.2868, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 1.0714285714285714, | |
| "grad_norm": 0.20014546811580658, | |
| "learning_rate": 3.214717046238785e-05, | |
| "loss": 0.2851, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 1.074016563146998, | |
| "grad_norm": 0.21684785187244415, | |
| "learning_rate": 3.210403726708074e-05, | |
| "loss": 0.2736, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 1.0766045548654244, | |
| "grad_norm": 0.21611499786376953, | |
| "learning_rate": 3.206090407177364e-05, | |
| "loss": 0.2838, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 1.079192546583851, | |
| "grad_norm": 0.22447840869426727, | |
| "learning_rate": 3.201777087646653e-05, | |
| "loss": 0.2759, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 1.0817805383022774, | |
| "grad_norm": 0.20259734988212585, | |
| "learning_rate": 3.197463768115942e-05, | |
| "loss": 0.2773, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 1.084368530020704, | |
| "grad_norm": 0.20831549167633057, | |
| "learning_rate": 3.1931504485852313e-05, | |
| "loss": 0.2791, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 1.0869565217391304, | |
| "grad_norm": 0.19910798966884613, | |
| "learning_rate": 3.188837129054521e-05, | |
| "loss": 0.2775, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 1.089544513457557, | |
| "grad_norm": 0.21417391300201416, | |
| "learning_rate": 3.1845238095238096e-05, | |
| "loss": 0.2813, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 1.0921325051759834, | |
| "grad_norm": 0.2036023885011673, | |
| "learning_rate": 3.180210489993099e-05, | |
| "loss": 0.2818, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 1.09472049689441, | |
| "grad_norm": 0.19122059643268585, | |
| "learning_rate": 3.175897170462388e-05, | |
| "loss": 0.2794, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 1.0973084886128364, | |
| "grad_norm": 0.2032182216644287, | |
| "learning_rate": 3.171583850931677e-05, | |
| "loss": 0.2797, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 1.099896480331263, | |
| "grad_norm": 0.22476878762245178, | |
| "learning_rate": 3.167270531400967e-05, | |
| "loss": 0.2919, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 1.1024844720496894, | |
| "grad_norm": 0.21088774502277374, | |
| "learning_rate": 3.1629572118702556e-05, | |
| "loss": 0.2816, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 1.105072463768116, | |
| "grad_norm": 0.2437673956155777, | |
| "learning_rate": 3.1586438923395444e-05, | |
| "loss": 0.2831, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 1.1076604554865424, | |
| "grad_norm": 0.21279916167259216, | |
| "learning_rate": 3.154330572808834e-05, | |
| "loss": 0.284, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 1.110248447204969, | |
| "grad_norm": 0.20391355454921722, | |
| "learning_rate": 3.150017253278123e-05, | |
| "loss": 0.2825, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 1.1128364389233953, | |
| "grad_norm": 0.21171489357948303, | |
| "learning_rate": 3.145703933747412e-05, | |
| "loss": 0.2775, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 1.115424430641822, | |
| "grad_norm": 0.2001040130853653, | |
| "learning_rate": 3.141390614216702e-05, | |
| "loss": 0.2831, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 1.1180124223602483, | |
| "grad_norm": 0.18857017159461975, | |
| "learning_rate": 3.1370772946859905e-05, | |
| "loss": 0.2695, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 1.120600414078675, | |
| "grad_norm": 0.21368324756622314, | |
| "learning_rate": 3.132763975155279e-05, | |
| "loss": 0.2731, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 1.1231884057971016, | |
| "grad_norm": 0.20488175749778748, | |
| "learning_rate": 3.128450655624569e-05, | |
| "loss": 0.284, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 1.125776397515528, | |
| "grad_norm": 0.18714292347431183, | |
| "learning_rate": 3.124137336093858e-05, | |
| "loss": 0.2845, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 1.1283643892339545, | |
| "grad_norm": 0.20056986808776855, | |
| "learning_rate": 3.119824016563147e-05, | |
| "loss": 0.2759, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 1.130952380952381, | |
| "grad_norm": 0.1975480616092682, | |
| "learning_rate": 3.1155106970324365e-05, | |
| "loss": 0.2874, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 1.1335403726708075, | |
| "grad_norm": 0.20651063323020935, | |
| "learning_rate": 3.111197377501725e-05, | |
| "loss": 0.2783, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 1.136128364389234, | |
| "grad_norm": 0.212539941072464, | |
| "learning_rate": 3.106884057971015e-05, | |
| "loss": 0.2946, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 1.1387163561076605, | |
| "grad_norm": 0.20712485909461975, | |
| "learning_rate": 3.1025707384403036e-05, | |
| "loss": 0.2874, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 1.141304347826087, | |
| "grad_norm": 0.20916777849197388, | |
| "learning_rate": 3.098257418909593e-05, | |
| "loss": 0.2902, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 1.1438923395445135, | |
| "grad_norm": 0.18801477551460266, | |
| "learning_rate": 3.093944099378882e-05, | |
| "loss": 0.2807, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 1.14648033126294, | |
| "grad_norm": 0.21907158195972443, | |
| "learning_rate": 3.0896307798481713e-05, | |
| "loss": 0.2837, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 1.1490683229813665, | |
| "grad_norm": 0.20964211225509644, | |
| "learning_rate": 3.085317460317461e-05, | |
| "loss": 0.2811, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 1.151656314699793, | |
| "grad_norm": 0.19841931760311127, | |
| "learning_rate": 3.0810041407867496e-05, | |
| "loss": 0.2816, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 1.1542443064182195, | |
| "grad_norm": 0.20696336030960083, | |
| "learning_rate": 3.0766908212560384e-05, | |
| "loss": 0.2857, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 1.156832298136646, | |
| "grad_norm": 0.21968398988246918, | |
| "learning_rate": 3.072377501725328e-05, | |
| "loss": 0.2911, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 1.1594202898550725, | |
| "grad_norm": 0.2119816243648529, | |
| "learning_rate": 3.0680641821946174e-05, | |
| "loss": 0.2836, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 1.162008281573499, | |
| "grad_norm": 0.20051179826259613, | |
| "learning_rate": 3.063750862663906e-05, | |
| "loss": 0.2807, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 1.1645962732919255, | |
| "grad_norm": 0.20480915904045105, | |
| "learning_rate": 3.0594375431331956e-05, | |
| "loss": 0.2799, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 1.1671842650103519, | |
| "grad_norm": 0.1936984360218048, | |
| "learning_rate": 3.0551242236024844e-05, | |
| "loss": 0.2841, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 1.1697722567287785, | |
| "grad_norm": 0.17650452256202698, | |
| "learning_rate": 3.0508109040717736e-05, | |
| "loss": 0.2796, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 1.1723602484472049, | |
| "grad_norm": 0.2266344577074051, | |
| "learning_rate": 3.046497584541063e-05, | |
| "loss": 0.2839, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 1.1749482401656315, | |
| "grad_norm": 0.21390888094902039, | |
| "learning_rate": 3.0421842650103522e-05, | |
| "loss": 0.29, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 1.177536231884058, | |
| "grad_norm": 0.20884661376476288, | |
| "learning_rate": 3.037870945479641e-05, | |
| "loss": 0.2954, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 1.1801242236024845, | |
| "grad_norm": 0.20614804327487946, | |
| "learning_rate": 3.0335576259489308e-05, | |
| "loss": 0.284, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 1.1827122153209109, | |
| "grad_norm": 0.22237960994243622, | |
| "learning_rate": 3.0292443064182196e-05, | |
| "loss": 0.2769, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 1.1853002070393375, | |
| "grad_norm": 0.20414669811725616, | |
| "learning_rate": 3.0249309868875088e-05, | |
| "loss": 0.288, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 1.187888198757764, | |
| "grad_norm": 0.21255841851234436, | |
| "learning_rate": 3.0206176673567982e-05, | |
| "loss": 0.2908, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 1.1904761904761905, | |
| "grad_norm": 0.1876077502965927, | |
| "learning_rate": 3.016304347826087e-05, | |
| "loss": 0.2814, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 1.193064182194617, | |
| "grad_norm": 0.2036699801683426, | |
| "learning_rate": 3.0119910282953762e-05, | |
| "loss": 0.2773, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 1.1956521739130435, | |
| "grad_norm": 0.206691175699234, | |
| "learning_rate": 3.0076777087646656e-05, | |
| "loss": 0.2873, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 1.19824016563147, | |
| "grad_norm": 0.1894364207983017, | |
| "learning_rate": 3.0033643892339548e-05, | |
| "loss": 0.2776, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 1.2008281573498965, | |
| "grad_norm": 0.2230517864227295, | |
| "learning_rate": 2.9990510697032436e-05, | |
| "loss": 0.2913, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 1.203416149068323, | |
| "grad_norm": 0.21492069959640503, | |
| "learning_rate": 2.9947377501725327e-05, | |
| "loss": 0.2735, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 1.2060041407867494, | |
| "grad_norm": 0.22130122780799866, | |
| "learning_rate": 2.9904244306418222e-05, | |
| "loss": 0.2805, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 1.208592132505176, | |
| "grad_norm": 0.2193043977022171, | |
| "learning_rate": 2.9861111111111113e-05, | |
| "loss": 0.2823, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 1.2111801242236024, | |
| "grad_norm": 0.2020953744649887, | |
| "learning_rate": 2.9817977915804e-05, | |
| "loss": 0.2746, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 1.213768115942029, | |
| "grad_norm": 0.19789685308933258, | |
| "learning_rate": 2.9774844720496896e-05, | |
| "loss": 0.2789, | |
| "step": 4690 | |
| }, | |
| { | |
| "epoch": 1.2163561076604554, | |
| "grad_norm": 0.23847202956676483, | |
| "learning_rate": 2.9731711525189788e-05, | |
| "loss": 0.2858, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 1.218944099378882, | |
| "grad_norm": 0.21631298959255219, | |
| "learning_rate": 2.9688578329882676e-05, | |
| "loss": 0.2783, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 1.2215320910973084, | |
| "grad_norm": 0.19795972108840942, | |
| "learning_rate": 2.9645445134575574e-05, | |
| "loss": 0.2852, | |
| "step": 4720 | |
| }, | |
| { | |
| "epoch": 1.224120082815735, | |
| "grad_norm": 0.2208348512649536, | |
| "learning_rate": 2.9602311939268462e-05, | |
| "loss": 0.2757, | |
| "step": 4730 | |
| }, | |
| { | |
| "epoch": 1.2267080745341614, | |
| "grad_norm": 0.2082773745059967, | |
| "learning_rate": 2.9559178743961353e-05, | |
| "loss": 0.2688, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 1.229296066252588, | |
| "grad_norm": 0.208180770277977, | |
| "learning_rate": 2.9516045548654248e-05, | |
| "loss": 0.2778, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 1.2318840579710144, | |
| "grad_norm": 0.2316160351037979, | |
| "learning_rate": 2.9472912353347136e-05, | |
| "loss": 0.283, | |
| "step": 4760 | |
| }, | |
| { | |
| "epoch": 1.234472049689441, | |
| "grad_norm": 0.20870406925678253, | |
| "learning_rate": 2.9429779158040027e-05, | |
| "loss": 0.2833, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 1.2370600414078674, | |
| "grad_norm": 0.18954645097255707, | |
| "learning_rate": 2.9386645962732922e-05, | |
| "loss": 0.2747, | |
| "step": 4780 | |
| }, | |
| { | |
| "epoch": 1.239648033126294, | |
| "grad_norm": 0.19666893780231476, | |
| "learning_rate": 2.9343512767425813e-05, | |
| "loss": 0.2795, | |
| "step": 4790 | |
| }, | |
| { | |
| "epoch": 1.2422360248447206, | |
| "grad_norm": 0.20144566893577576, | |
| "learning_rate": 2.93003795721187e-05, | |
| "loss": 0.2831, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 1.244824016563147, | |
| "grad_norm": 0.20662471652030945, | |
| "learning_rate": 2.92572463768116e-05, | |
| "loss": 0.2806, | |
| "step": 4810 | |
| }, | |
| { | |
| "epoch": 1.2474120082815734, | |
| "grad_norm": 0.20573922991752625, | |
| "learning_rate": 2.9214113181504488e-05, | |
| "loss": 0.2781, | |
| "step": 4820 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "grad_norm": 0.1996840089559555, | |
| "learning_rate": 2.917097998619738e-05, | |
| "loss": 0.2767, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 1.2525879917184266, | |
| "grad_norm": 0.22875703871250153, | |
| "learning_rate": 2.9127846790890274e-05, | |
| "loss": 0.2793, | |
| "step": 4840 | |
| }, | |
| { | |
| "epoch": 1.255175983436853, | |
| "grad_norm": 0.19148211181163788, | |
| "learning_rate": 2.9084713595583162e-05, | |
| "loss": 0.2734, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 1.2577639751552794, | |
| "grad_norm": 0.2000264972448349, | |
| "learning_rate": 2.9041580400276053e-05, | |
| "loss": 0.2747, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 1.260351966873706, | |
| "grad_norm": 0.20301419496536255, | |
| "learning_rate": 2.8998447204968948e-05, | |
| "loss": 0.2824, | |
| "step": 4870 | |
| }, | |
| { | |
| "epoch": 1.2629399585921326, | |
| "grad_norm": 0.20807796716690063, | |
| "learning_rate": 2.895531400966184e-05, | |
| "loss": 0.2737, | |
| "step": 4880 | |
| }, | |
| { | |
| "epoch": 1.265527950310559, | |
| "grad_norm": 0.21532008051872253, | |
| "learning_rate": 2.8912180814354727e-05, | |
| "loss": 0.2951, | |
| "step": 4890 | |
| }, | |
| { | |
| "epoch": 1.2681159420289856, | |
| "grad_norm": 0.20902346074581146, | |
| "learning_rate": 2.886904761904762e-05, | |
| "loss": 0.2806, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 1.270703933747412, | |
| "grad_norm": 0.20580431818962097, | |
| "learning_rate": 2.8825914423740513e-05, | |
| "loss": 0.2942, | |
| "step": 4910 | |
| }, | |
| { | |
| "epoch": 1.2732919254658386, | |
| "grad_norm": 0.22487808763980865, | |
| "learning_rate": 2.87827812284334e-05, | |
| "loss": 0.2797, | |
| "step": 4920 | |
| }, | |
| { | |
| "epoch": 1.275879917184265, | |
| "grad_norm": 0.20283977687358856, | |
| "learning_rate": 2.8739648033126293e-05, | |
| "loss": 0.2718, | |
| "step": 4930 | |
| }, | |
| { | |
| "epoch": 1.2784679089026916, | |
| "grad_norm": 0.18678729236125946, | |
| "learning_rate": 2.8696514837819188e-05, | |
| "loss": 0.287, | |
| "step": 4940 | |
| }, | |
| { | |
| "epoch": 1.281055900621118, | |
| "grad_norm": 0.20551638305187225, | |
| "learning_rate": 2.865338164251208e-05, | |
| "loss": 0.2768, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 1.2836438923395446, | |
| "grad_norm": 0.208012193441391, | |
| "learning_rate": 2.8610248447204967e-05, | |
| "loss": 0.2756, | |
| "step": 4960 | |
| }, | |
| { | |
| "epoch": 1.286231884057971, | |
| "grad_norm": 0.19745701551437378, | |
| "learning_rate": 2.8567115251897865e-05, | |
| "loss": 0.2877, | |
| "step": 4970 | |
| }, | |
| { | |
| "epoch": 1.2888198757763976, | |
| "grad_norm": 0.19310729205608368, | |
| "learning_rate": 2.8523982056590753e-05, | |
| "loss": 0.2751, | |
| "step": 4980 | |
| }, | |
| { | |
| "epoch": 1.291407867494824, | |
| "grad_norm": 0.19812437891960144, | |
| "learning_rate": 2.8480848861283644e-05, | |
| "loss": 0.2637, | |
| "step": 4990 | |
| }, | |
| { | |
| "epoch": 1.2939958592132506, | |
| "grad_norm": 0.20688922703266144, | |
| "learning_rate": 2.843771566597654e-05, | |
| "loss": 0.2833, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 1.296583850931677, | |
| "grad_norm": 0.19578444957733154, | |
| "learning_rate": 2.8394582470669427e-05, | |
| "loss": 0.2741, | |
| "step": 5010 | |
| }, | |
| { | |
| "epoch": 1.2991718426501035, | |
| "grad_norm": 0.2112204134464264, | |
| "learning_rate": 2.835144927536232e-05, | |
| "loss": 0.2827, | |
| "step": 5020 | |
| }, | |
| { | |
| "epoch": 1.30175983436853, | |
| "grad_norm": 0.20422010123729706, | |
| "learning_rate": 2.8308316080055213e-05, | |
| "loss": 0.2806, | |
| "step": 5030 | |
| }, | |
| { | |
| "epoch": 1.3043478260869565, | |
| "grad_norm": 0.22117148339748383, | |
| "learning_rate": 2.8265182884748105e-05, | |
| "loss": 0.2828, | |
| "step": 5040 | |
| }, | |
| { | |
| "epoch": 1.3069358178053831, | |
| "grad_norm": 0.20923538506031036, | |
| "learning_rate": 2.8222049689440993e-05, | |
| "loss": 0.2896, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 1.3095238095238095, | |
| "grad_norm": 0.2134866565465927, | |
| "learning_rate": 2.8178916494133888e-05, | |
| "loss": 0.2799, | |
| "step": 5060 | |
| }, | |
| { | |
| "epoch": 1.312111801242236, | |
| "grad_norm": 0.20939190685749054, | |
| "learning_rate": 2.813578329882678e-05, | |
| "loss": 0.2778, | |
| "step": 5070 | |
| }, | |
| { | |
| "epoch": 1.3146997929606625, | |
| "grad_norm": 0.20852287113666534, | |
| "learning_rate": 2.809265010351967e-05, | |
| "loss": 0.2712, | |
| "step": 5080 | |
| }, | |
| { | |
| "epoch": 1.3172877846790891, | |
| "grad_norm": 0.18680156767368317, | |
| "learning_rate": 2.8049516908212565e-05, | |
| "loss": 0.288, | |
| "step": 5090 | |
| }, | |
| { | |
| "epoch": 1.3198757763975155, | |
| "grad_norm": 0.19748736917972565, | |
| "learning_rate": 2.8006383712905453e-05, | |
| "loss": 0.2727, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 1.322463768115942, | |
| "grad_norm": 0.19935381412506104, | |
| "learning_rate": 2.7963250517598344e-05, | |
| "loss": 0.2727, | |
| "step": 5110 | |
| }, | |
| { | |
| "epoch": 1.3250517598343685, | |
| "grad_norm": 0.20346301794052124, | |
| "learning_rate": 2.792011732229124e-05, | |
| "loss": 0.2852, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 1.3276397515527951, | |
| "grad_norm": 0.20282778143882751, | |
| "learning_rate": 2.787698412698413e-05, | |
| "loss": 0.29, | |
| "step": 5130 | |
| }, | |
| { | |
| "epoch": 1.3302277432712215, | |
| "grad_norm": 0.20782440900802612, | |
| "learning_rate": 2.783385093167702e-05, | |
| "loss": 0.2741, | |
| "step": 5140 | |
| }, | |
| { | |
| "epoch": 1.3328157349896481, | |
| "grad_norm": 0.20595425367355347, | |
| "learning_rate": 2.779071773636991e-05, | |
| "loss": 0.2819, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 1.3354037267080745, | |
| "grad_norm": 0.18362931907176971, | |
| "learning_rate": 2.7747584541062805e-05, | |
| "loss": 0.2826, | |
| "step": 5160 | |
| }, | |
| { | |
| "epoch": 1.337991718426501, | |
| "grad_norm": 0.20354659855365753, | |
| "learning_rate": 2.7704451345755693e-05, | |
| "loss": 0.2703, | |
| "step": 5170 | |
| }, | |
| { | |
| "epoch": 1.3405797101449275, | |
| "grad_norm": 0.2250794768333435, | |
| "learning_rate": 2.7665631469979297e-05, | |
| "loss": 0.269, | |
| "step": 5180 | |
| }, | |
| { | |
| "epoch": 1.343167701863354, | |
| "grad_norm": 0.18797485530376434, | |
| "learning_rate": 2.7622498274672188e-05, | |
| "loss": 0.2834, | |
| "step": 5190 | |
| }, | |
| { | |
| "epoch": 1.3457556935817805, | |
| "grad_norm": 0.18781961500644684, | |
| "learning_rate": 2.7579365079365083e-05, | |
| "loss": 0.2626, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 1.348343685300207, | |
| "grad_norm": 0.21970486640930176, | |
| "learning_rate": 2.753623188405797e-05, | |
| "loss": 0.2794, | |
| "step": 5210 | |
| }, | |
| { | |
| "epoch": 1.3509316770186335, | |
| "grad_norm": 0.1888144314289093, | |
| "learning_rate": 2.7493098688750862e-05, | |
| "loss": 0.2756, | |
| "step": 5220 | |
| }, | |
| { | |
| "epoch": 1.35351966873706, | |
| "grad_norm": 0.21122311055660248, | |
| "learning_rate": 2.7449965493443757e-05, | |
| "loss": 0.2759, | |
| "step": 5230 | |
| }, | |
| { | |
| "epoch": 1.3561076604554865, | |
| "grad_norm": 0.20177538692951202, | |
| "learning_rate": 2.740683229813665e-05, | |
| "loss": 0.2759, | |
| "step": 5240 | |
| }, | |
| { | |
| "epoch": 1.358695652173913, | |
| "grad_norm": 0.20434828102588654, | |
| "learning_rate": 2.7363699102829536e-05, | |
| "loss": 0.2769, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 1.3612836438923395, | |
| "grad_norm": 0.19432629644870758, | |
| "learning_rate": 2.7320565907522435e-05, | |
| "loss": 0.2755, | |
| "step": 5260 | |
| }, | |
| { | |
| "epoch": 1.363871635610766, | |
| "grad_norm": 0.21479980647563934, | |
| "learning_rate": 2.7277432712215323e-05, | |
| "loss": 0.2882, | |
| "step": 5270 | |
| }, | |
| { | |
| "epoch": 1.3664596273291925, | |
| "grad_norm": 0.20082104206085205, | |
| "learning_rate": 2.7234299516908214e-05, | |
| "loss": 0.2759, | |
| "step": 5280 | |
| }, | |
| { | |
| "epoch": 1.369047619047619, | |
| "grad_norm": 0.22913457453250885, | |
| "learning_rate": 2.719116632160111e-05, | |
| "loss": 0.2841, | |
| "step": 5290 | |
| }, | |
| { | |
| "epoch": 1.3716356107660457, | |
| "grad_norm": 0.19600068032741547, | |
| "learning_rate": 2.7148033126293997e-05, | |
| "loss": 0.2819, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 1.374223602484472, | |
| "grad_norm": 0.18606539070606232, | |
| "learning_rate": 2.7104899930986888e-05, | |
| "loss": 0.2712, | |
| "step": 5310 | |
| }, | |
| { | |
| "epoch": 1.3768115942028984, | |
| "grad_norm": 0.2032352238893509, | |
| "learning_rate": 2.7061766735679776e-05, | |
| "loss": 0.265, | |
| "step": 5320 | |
| }, | |
| { | |
| "epoch": 1.379399585921325, | |
| "grad_norm": 0.20583760738372803, | |
| "learning_rate": 2.7018633540372674e-05, | |
| "loss": 0.2814, | |
| "step": 5330 | |
| }, | |
| { | |
| "epoch": 1.3819875776397517, | |
| "grad_norm": 0.20506004989147186, | |
| "learning_rate": 2.6975500345065562e-05, | |
| "loss": 0.2849, | |
| "step": 5340 | |
| }, | |
| { | |
| "epoch": 1.384575569358178, | |
| "grad_norm": 0.20336943864822388, | |
| "learning_rate": 2.6932367149758454e-05, | |
| "loss": 0.2861, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 1.3871635610766044, | |
| "grad_norm": 0.21229679882526398, | |
| "learning_rate": 2.688923395445135e-05, | |
| "loss": 0.2848, | |
| "step": 5360 | |
| }, | |
| { | |
| "epoch": 1.389751552795031, | |
| "grad_norm": 0.1943487524986267, | |
| "learning_rate": 2.684610075914424e-05, | |
| "loss": 0.2813, | |
| "step": 5370 | |
| }, | |
| { | |
| "epoch": 1.3923395445134576, | |
| "grad_norm": 0.2057686150074005, | |
| "learning_rate": 2.6802967563837128e-05, | |
| "loss": 0.2774, | |
| "step": 5380 | |
| }, | |
| { | |
| "epoch": 1.394927536231884, | |
| "grad_norm": 0.18183177709579468, | |
| "learning_rate": 2.6759834368530023e-05, | |
| "loss": 0.2814, | |
| "step": 5390 | |
| }, | |
| { | |
| "epoch": 1.3975155279503104, | |
| "grad_norm": 0.21541506052017212, | |
| "learning_rate": 2.6716701173222914e-05, | |
| "loss": 0.2788, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 1.400103519668737, | |
| "grad_norm": 0.19158130884170532, | |
| "learning_rate": 2.6673567977915802e-05, | |
| "loss": 0.2898, | |
| "step": 5410 | |
| }, | |
| { | |
| "epoch": 1.4026915113871636, | |
| "grad_norm": 0.19460628926753998, | |
| "learning_rate": 2.66304347826087e-05, | |
| "loss": 0.276, | |
| "step": 5420 | |
| }, | |
| { | |
| "epoch": 1.40527950310559, | |
| "grad_norm": 0.19198273122310638, | |
| "learning_rate": 2.6587301587301588e-05, | |
| "loss": 0.2766, | |
| "step": 5430 | |
| }, | |
| { | |
| "epoch": 1.4078674948240166, | |
| "grad_norm": 0.20514032244682312, | |
| "learning_rate": 2.654416839199448e-05, | |
| "loss": 0.2893, | |
| "step": 5440 | |
| }, | |
| { | |
| "epoch": 1.410455486542443, | |
| "grad_norm": 0.20267118513584137, | |
| "learning_rate": 2.6501035196687374e-05, | |
| "loss": 0.2787, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 1.4130434782608696, | |
| "grad_norm": 0.2155541032552719, | |
| "learning_rate": 2.6457902001380262e-05, | |
| "loss": 0.2823, | |
| "step": 5460 | |
| }, | |
| { | |
| "epoch": 1.415631469979296, | |
| "grad_norm": 0.21240340173244476, | |
| "learning_rate": 2.6414768806073154e-05, | |
| "loss": 0.2793, | |
| "step": 5470 | |
| }, | |
| { | |
| "epoch": 1.4182194616977226, | |
| "grad_norm": 0.20921996235847473, | |
| "learning_rate": 2.637163561076605e-05, | |
| "loss": 0.2802, | |
| "step": 5480 | |
| }, | |
| { | |
| "epoch": 1.420807453416149, | |
| "grad_norm": 0.20031003654003143, | |
| "learning_rate": 2.632850241545894e-05, | |
| "loss": 0.2691, | |
| "step": 5490 | |
| }, | |
| { | |
| "epoch": 1.4233954451345756, | |
| "grad_norm": 0.1978559046983719, | |
| "learning_rate": 2.6285369220151828e-05, | |
| "loss": 0.2783, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 1.425983436853002, | |
| "grad_norm": 0.20545430481433868, | |
| "learning_rate": 2.6242236024844723e-05, | |
| "loss": 0.283, | |
| "step": 5510 | |
| }, | |
| { | |
| "epoch": 1.4285714285714286, | |
| "grad_norm": 0.24126268923282623, | |
| "learning_rate": 2.6199102829537614e-05, | |
| "loss": 0.2828, | |
| "step": 5520 | |
| }, | |
| { | |
| "epoch": 1.431159420289855, | |
| "grad_norm": 0.21742559969425201, | |
| "learning_rate": 2.6155969634230505e-05, | |
| "loss": 0.2789, | |
| "step": 5530 | |
| }, | |
| { | |
| "epoch": 1.4337474120082816, | |
| "grad_norm": 0.2103467881679535, | |
| "learning_rate": 2.61128364389234e-05, | |
| "loss": 0.2822, | |
| "step": 5540 | |
| }, | |
| { | |
| "epoch": 1.4363354037267082, | |
| "grad_norm": 0.19214099645614624, | |
| "learning_rate": 2.6069703243616288e-05, | |
| "loss": 0.2697, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 1.4389233954451346, | |
| "grad_norm": 0.21156474947929382, | |
| "learning_rate": 2.602657004830918e-05, | |
| "loss": 0.2765, | |
| "step": 5560 | |
| }, | |
| { | |
| "epoch": 1.441511387163561, | |
| "grad_norm": 0.2017282396554947, | |
| "learning_rate": 2.5983436853002067e-05, | |
| "loss": 0.2782, | |
| "step": 5570 | |
| }, | |
| { | |
| "epoch": 1.4440993788819876, | |
| "grad_norm": 0.20170588791370392, | |
| "learning_rate": 2.5940303657694966e-05, | |
| "loss": 0.2781, | |
| "step": 5580 | |
| }, | |
| { | |
| "epoch": 1.4466873706004142, | |
| "grad_norm": 0.27326133847236633, | |
| "learning_rate": 2.5897170462387854e-05, | |
| "loss": 0.2826, | |
| "step": 5590 | |
| }, | |
| { | |
| "epoch": 1.4492753623188406, | |
| "grad_norm": 0.19904714822769165, | |
| "learning_rate": 2.5854037267080745e-05, | |
| "loss": 0.2753, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 1.451863354037267, | |
| "grad_norm": 0.205663800239563, | |
| "learning_rate": 2.581090407177364e-05, | |
| "loss": 0.2897, | |
| "step": 5610 | |
| }, | |
| { | |
| "epoch": 1.4544513457556936, | |
| "grad_norm": 0.18975305557250977, | |
| "learning_rate": 2.5767770876466528e-05, | |
| "loss": 0.2815, | |
| "step": 5620 | |
| }, | |
| { | |
| "epoch": 1.4570393374741202, | |
| "grad_norm": 0.21230220794677734, | |
| "learning_rate": 2.572463768115942e-05, | |
| "loss": 0.2919, | |
| "step": 5630 | |
| }, | |
| { | |
| "epoch": 1.4596273291925466, | |
| "grad_norm": 0.20411543548107147, | |
| "learning_rate": 2.5681504485852314e-05, | |
| "loss": 0.2901, | |
| "step": 5640 | |
| }, | |
| { | |
| "epoch": 1.462215320910973, | |
| "grad_norm": 0.1978524625301361, | |
| "learning_rate": 2.5638371290545205e-05, | |
| "loss": 0.2842, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 1.4648033126293996, | |
| "grad_norm": 0.18695814907550812, | |
| "learning_rate": 2.5595238095238093e-05, | |
| "loss": 0.2761, | |
| "step": 5660 | |
| }, | |
| { | |
| "epoch": 1.4673913043478262, | |
| "grad_norm": 0.21033859252929688, | |
| "learning_rate": 2.5552104899930988e-05, | |
| "loss": 0.2782, | |
| "step": 5670 | |
| }, | |
| { | |
| "epoch": 1.4699792960662525, | |
| "grad_norm": 0.19550427794456482, | |
| "learning_rate": 2.550897170462388e-05, | |
| "loss": 0.2802, | |
| "step": 5680 | |
| }, | |
| { | |
| "epoch": 1.4725672877846792, | |
| "grad_norm": 0.21018573641777039, | |
| "learning_rate": 2.546583850931677e-05, | |
| "loss": 0.2751, | |
| "step": 5690 | |
| }, | |
| { | |
| "epoch": 1.4751552795031055, | |
| "grad_norm": 0.1967909336090088, | |
| "learning_rate": 2.5422705314009666e-05, | |
| "loss": 0.2759, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 1.4777432712215322, | |
| "grad_norm": 0.1938430666923523, | |
| "learning_rate": 2.5379572118702554e-05, | |
| "loss": 0.2837, | |
| "step": 5710 | |
| }, | |
| { | |
| "epoch": 1.4803312629399585, | |
| "grad_norm": 0.2072574943304062, | |
| "learning_rate": 2.5336438923395445e-05, | |
| "loss": 0.2814, | |
| "step": 5720 | |
| }, | |
| { | |
| "epoch": 1.4829192546583851, | |
| "grad_norm": 0.19119413197040558, | |
| "learning_rate": 2.529330572808834e-05, | |
| "loss": 0.2781, | |
| "step": 5730 | |
| }, | |
| { | |
| "epoch": 1.4855072463768115, | |
| "grad_norm": 0.19592414796352386, | |
| "learning_rate": 2.525017253278123e-05, | |
| "loss": 0.2761, | |
| "step": 5740 | |
| }, | |
| { | |
| "epoch": 1.4880952380952381, | |
| "grad_norm": 0.20186804234981537, | |
| "learning_rate": 2.520703933747412e-05, | |
| "loss": 0.2721, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 1.4906832298136645, | |
| "grad_norm": 0.19301468133926392, | |
| "learning_rate": 2.5163906142167014e-05, | |
| "loss": 0.2759, | |
| "step": 5760 | |
| }, | |
| { | |
| "epoch": 1.4932712215320911, | |
| "grad_norm": 0.19003848731517792, | |
| "learning_rate": 2.5120772946859905e-05, | |
| "loss": 0.2808, | |
| "step": 5770 | |
| }, | |
| { | |
| "epoch": 1.4958592132505175, | |
| "grad_norm": 0.21416200697422028, | |
| "learning_rate": 2.5077639751552793e-05, | |
| "loss": 0.2679, | |
| "step": 5780 | |
| }, | |
| { | |
| "epoch": 1.4984472049689441, | |
| "grad_norm": 0.21335680782794952, | |
| "learning_rate": 2.503450655624569e-05, | |
| "loss": 0.2737, | |
| "step": 5790 | |
| }, | |
| { | |
| "epoch": 1.5010351966873707, | |
| "grad_norm": 0.1855931133031845, | |
| "learning_rate": 2.499137336093858e-05, | |
| "loss": 0.2787, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 1.5036231884057971, | |
| "grad_norm": 0.20533353090286255, | |
| "learning_rate": 2.494824016563147e-05, | |
| "loss": 0.2781, | |
| "step": 5810 | |
| }, | |
| { | |
| "epoch": 1.5062111801242235, | |
| "grad_norm": 0.18676544725894928, | |
| "learning_rate": 2.4905106970324362e-05, | |
| "loss": 0.2737, | |
| "step": 5820 | |
| }, | |
| { | |
| "epoch": 1.50879917184265, | |
| "grad_norm": 0.2079932987689972, | |
| "learning_rate": 2.4861973775017257e-05, | |
| "loss": 0.2647, | |
| "step": 5830 | |
| }, | |
| { | |
| "epoch": 1.5113871635610767, | |
| "grad_norm": 0.1954832375049591, | |
| "learning_rate": 2.4818840579710145e-05, | |
| "loss": 0.2774, | |
| "step": 5840 | |
| }, | |
| { | |
| "epoch": 1.513975155279503, | |
| "grad_norm": 0.21631035208702087, | |
| "learning_rate": 2.4775707384403036e-05, | |
| "loss": 0.2775, | |
| "step": 5850 | |
| }, | |
| { | |
| "epoch": 1.5165631469979295, | |
| "grad_norm": 0.19149789214134216, | |
| "learning_rate": 2.473257418909593e-05, | |
| "loss": 0.2783, | |
| "step": 5860 | |
| }, | |
| { | |
| "epoch": 1.519151138716356, | |
| "grad_norm": 0.21757644414901733, | |
| "learning_rate": 2.468944099378882e-05, | |
| "loss": 0.2729, | |
| "step": 5870 | |
| }, | |
| { | |
| "epoch": 1.5217391304347827, | |
| "grad_norm": 0.20997533202171326, | |
| "learning_rate": 2.4646307798481714e-05, | |
| "loss": 0.2821, | |
| "step": 5880 | |
| }, | |
| { | |
| "epoch": 1.524327122153209, | |
| "grad_norm": 0.19383445382118225, | |
| "learning_rate": 2.4603174603174602e-05, | |
| "loss": 0.2778, | |
| "step": 5890 | |
| }, | |
| { | |
| "epoch": 1.5269151138716355, | |
| "grad_norm": 0.19153636693954468, | |
| "learning_rate": 2.4560041407867497e-05, | |
| "loss": 0.2823, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 1.529503105590062, | |
| "grad_norm": 0.20183731615543365, | |
| "learning_rate": 2.4516908212560388e-05, | |
| "loss": 0.284, | |
| "step": 5910 | |
| }, | |
| { | |
| "epoch": 1.5320910973084887, | |
| "grad_norm": 0.19239327311515808, | |
| "learning_rate": 2.447377501725328e-05, | |
| "loss": 0.2744, | |
| "step": 5920 | |
| }, | |
| { | |
| "epoch": 1.534679089026915, | |
| "grad_norm": 0.19152939319610596, | |
| "learning_rate": 2.443064182194617e-05, | |
| "loss": 0.274, | |
| "step": 5930 | |
| }, | |
| { | |
| "epoch": 1.5372670807453415, | |
| "grad_norm": 0.20555827021598816, | |
| "learning_rate": 2.4387508626639062e-05, | |
| "loss": 0.2755, | |
| "step": 5940 | |
| }, | |
| { | |
| "epoch": 1.539855072463768, | |
| "grad_norm": 0.1939895898103714, | |
| "learning_rate": 2.4344375431331954e-05, | |
| "loss": 0.2671, | |
| "step": 5950 | |
| }, | |
| { | |
| "epoch": 1.5424430641821947, | |
| "grad_norm": 0.20898254215717316, | |
| "learning_rate": 2.4301242236024845e-05, | |
| "loss": 0.2698, | |
| "step": 5960 | |
| }, | |
| { | |
| "epoch": 1.545031055900621, | |
| "grad_norm": 0.20771819353103638, | |
| "learning_rate": 2.425810904071774e-05, | |
| "loss": 0.2766, | |
| "step": 5970 | |
| }, | |
| { | |
| "epoch": 1.5476190476190477, | |
| "grad_norm": 0.23418374359607697, | |
| "learning_rate": 2.4214975845410628e-05, | |
| "loss": 0.2756, | |
| "step": 5980 | |
| }, | |
| { | |
| "epoch": 1.550207039337474, | |
| "grad_norm": 0.20295991003513336, | |
| "learning_rate": 2.4171842650103523e-05, | |
| "loss": 0.2734, | |
| "step": 5990 | |
| }, | |
| { | |
| "epoch": 1.5527950310559007, | |
| "grad_norm": 0.20638880133628845, | |
| "learning_rate": 2.4128709454796414e-05, | |
| "loss": 0.2745, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 1.5553830227743273, | |
| "grad_norm": 0.20611557364463806, | |
| "learning_rate": 2.4085576259489305e-05, | |
| "loss": 0.2868, | |
| "step": 6010 | |
| }, | |
| { | |
| "epoch": 1.5579710144927537, | |
| "grad_norm": 0.21041472256183624, | |
| "learning_rate": 2.4042443064182197e-05, | |
| "loss": 0.2775, | |
| "step": 6020 | |
| }, | |
| { | |
| "epoch": 1.56055900621118, | |
| "grad_norm": 0.1944635957479477, | |
| "learning_rate": 2.3999309868875085e-05, | |
| "loss": 0.2753, | |
| "step": 6030 | |
| }, | |
| { | |
| "epoch": 1.5631469979296067, | |
| "grad_norm": 0.19726218283176422, | |
| "learning_rate": 2.395617667356798e-05, | |
| "loss": 0.2823, | |
| "step": 6040 | |
| }, | |
| { | |
| "epoch": 1.5657349896480333, | |
| "grad_norm": 0.20652392506599426, | |
| "learning_rate": 2.391304347826087e-05, | |
| "loss": 0.2689, | |
| "step": 6050 | |
| }, | |
| { | |
| "epoch": 1.5683229813664596, | |
| "grad_norm": 0.1904771625995636, | |
| "learning_rate": 2.3869910282953762e-05, | |
| "loss": 0.273, | |
| "step": 6060 | |
| }, | |
| { | |
| "epoch": 1.570910973084886, | |
| "grad_norm": 0.20847630500793457, | |
| "learning_rate": 2.3826777087646654e-05, | |
| "loss": 0.2734, | |
| "step": 6070 | |
| }, | |
| { | |
| "epoch": 1.5734989648033126, | |
| "grad_norm": 0.18985170125961304, | |
| "learning_rate": 2.3783643892339545e-05, | |
| "loss": 0.2716, | |
| "step": 6080 | |
| }, | |
| { | |
| "epoch": 1.5760869565217392, | |
| "grad_norm": 0.21426165103912354, | |
| "learning_rate": 2.3740510697032436e-05, | |
| "loss": 0.2698, | |
| "step": 6090 | |
| }, | |
| { | |
| "epoch": 1.5786749482401656, | |
| "grad_norm": 0.2173684686422348, | |
| "learning_rate": 2.3697377501725328e-05, | |
| "loss": 0.285, | |
| "step": 6100 | |
| }, | |
| { | |
| "epoch": 1.581262939958592, | |
| "grad_norm": 0.19787850975990295, | |
| "learning_rate": 2.3654244306418223e-05, | |
| "loss": 0.27, | |
| "step": 6110 | |
| }, | |
| { | |
| "epoch": 1.5838509316770186, | |
| "grad_norm": 0.19278573989868164, | |
| "learning_rate": 2.361111111111111e-05, | |
| "loss": 0.272, | |
| "step": 6120 | |
| }, | |
| { | |
| "epoch": 1.5864389233954452, | |
| "grad_norm": 0.21055203676223755, | |
| "learning_rate": 2.3567977915804005e-05, | |
| "loss": 0.2826, | |
| "step": 6130 | |
| }, | |
| { | |
| "epoch": 1.5890269151138716, | |
| "grad_norm": 0.20687265694141388, | |
| "learning_rate": 2.3524844720496897e-05, | |
| "loss": 0.2714, | |
| "step": 6140 | |
| }, | |
| { | |
| "epoch": 1.591614906832298, | |
| "grad_norm": 0.21255378425121307, | |
| "learning_rate": 2.3481711525189788e-05, | |
| "loss": 0.2808, | |
| "step": 6150 | |
| }, | |
| { | |
| "epoch": 1.5942028985507246, | |
| "grad_norm": 0.20416966080665588, | |
| "learning_rate": 2.343857832988268e-05, | |
| "loss": 0.2805, | |
| "step": 6160 | |
| }, | |
| { | |
| "epoch": 1.5967908902691512, | |
| "grad_norm": 0.20834051072597504, | |
| "learning_rate": 2.339544513457557e-05, | |
| "loss": 0.2752, | |
| "step": 6170 | |
| }, | |
| { | |
| "epoch": 1.5993788819875776, | |
| "grad_norm": 0.19759848713874817, | |
| "learning_rate": 2.3352311939268462e-05, | |
| "loss": 0.2732, | |
| "step": 6180 | |
| }, | |
| { | |
| "epoch": 1.601966873706004, | |
| "grad_norm": 0.21195858716964722, | |
| "learning_rate": 2.3309178743961354e-05, | |
| "loss": 0.2878, | |
| "step": 6190 | |
| }, | |
| { | |
| "epoch": 1.6045548654244306, | |
| "grad_norm": 0.18112218379974365, | |
| "learning_rate": 2.3266045548654245e-05, | |
| "loss": 0.277, | |
| "step": 6200 | |
| }, | |
| { | |
| "epoch": 1.6071428571428572, | |
| "grad_norm": 0.2216740995645523, | |
| "learning_rate": 2.3222912353347136e-05, | |
| "loss": 0.2743, | |
| "step": 6210 | |
| }, | |
| { | |
| "epoch": 1.6097308488612836, | |
| "grad_norm": 0.19420769810676575, | |
| "learning_rate": 2.317977915804003e-05, | |
| "loss": 0.2687, | |
| "step": 6220 | |
| }, | |
| { | |
| "epoch": 1.6123188405797102, | |
| "grad_norm": 0.20138053596019745, | |
| "learning_rate": 2.313664596273292e-05, | |
| "loss": 0.2867, | |
| "step": 6230 | |
| }, | |
| { | |
| "epoch": 1.6149068322981366, | |
| "grad_norm": 0.19675815105438232, | |
| "learning_rate": 2.3093512767425814e-05, | |
| "loss": 0.2632, | |
| "step": 6240 | |
| }, | |
| { | |
| "epoch": 1.6174948240165632, | |
| "grad_norm": 0.18572789430618286, | |
| "learning_rate": 2.3050379572118705e-05, | |
| "loss": 0.2763, | |
| "step": 6250 | |
| }, | |
| { | |
| "epoch": 1.6200828157349898, | |
| "grad_norm": 0.22142665088176727, | |
| "learning_rate": 2.3007246376811593e-05, | |
| "loss": 0.2843, | |
| "step": 6260 | |
| }, | |
| { | |
| "epoch": 1.6226708074534162, | |
| "grad_norm": 0.19245979189872742, | |
| "learning_rate": 2.2964113181504488e-05, | |
| "loss": 0.282, | |
| "step": 6270 | |
| }, | |
| { | |
| "epoch": 1.6252587991718426, | |
| "grad_norm": 0.19960767030715942, | |
| "learning_rate": 2.2920979986197376e-05, | |
| "loss": 0.2757, | |
| "step": 6280 | |
| }, | |
| { | |
| "epoch": 1.6278467908902692, | |
| "grad_norm": 0.1969170719385147, | |
| "learning_rate": 2.287784679089027e-05, | |
| "loss": 0.2834, | |
| "step": 6290 | |
| }, | |
| { | |
| "epoch": 1.6304347826086958, | |
| "grad_norm": 0.20468299090862274, | |
| "learning_rate": 2.2834713595583162e-05, | |
| "loss": 0.2768, | |
| "step": 6300 | |
| }, | |
| { | |
| "epoch": 1.6330227743271222, | |
| "grad_norm": 0.21196874976158142, | |
| "learning_rate": 2.2791580400276054e-05, | |
| "loss": 0.2768, | |
| "step": 6310 | |
| }, | |
| { | |
| "epoch": 1.6356107660455486, | |
| "grad_norm": 0.20282064378261566, | |
| "learning_rate": 2.2748447204968945e-05, | |
| "loss": 0.2705, | |
| "step": 6320 | |
| }, | |
| { | |
| "epoch": 1.6381987577639752, | |
| "grad_norm": 0.19569437205791473, | |
| "learning_rate": 2.2705314009661836e-05, | |
| "loss": 0.2825, | |
| "step": 6330 | |
| }, | |
| { | |
| "epoch": 1.6407867494824018, | |
| "grad_norm": 0.21149498224258423, | |
| "learning_rate": 2.2662180814354728e-05, | |
| "loss": 0.2781, | |
| "step": 6340 | |
| }, | |
| { | |
| "epoch": 1.6433747412008282, | |
| "grad_norm": 0.19336047768592834, | |
| "learning_rate": 2.261904761904762e-05, | |
| "loss": 0.2722, | |
| "step": 6350 | |
| }, | |
| { | |
| "epoch": 1.6459627329192545, | |
| "grad_norm": 0.19740383327007294, | |
| "learning_rate": 2.2575914423740514e-05, | |
| "loss": 0.2749, | |
| "step": 6360 | |
| }, | |
| { | |
| "epoch": 1.6485507246376812, | |
| "grad_norm": 0.20502516627311707, | |
| "learning_rate": 2.2532781228433402e-05, | |
| "loss": 0.2709, | |
| "step": 6370 | |
| }, | |
| { | |
| "epoch": 1.6511387163561078, | |
| "grad_norm": 0.1982286423444748, | |
| "learning_rate": 2.2489648033126297e-05, | |
| "loss": 0.2721, | |
| "step": 6380 | |
| }, | |
| { | |
| "epoch": 1.6537267080745341, | |
| "grad_norm": 0.20207327604293823, | |
| "learning_rate": 2.2446514837819188e-05, | |
| "loss": 0.285, | |
| "step": 6390 | |
| }, | |
| { | |
| "epoch": 1.6563146997929605, | |
| "grad_norm": 0.19832180440425873, | |
| "learning_rate": 2.240338164251208e-05, | |
| "loss": 0.283, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 1.6589026915113871, | |
| "grad_norm": 0.23319384455680847, | |
| "learning_rate": 2.236024844720497e-05, | |
| "loss": 0.2753, | |
| "step": 6410 | |
| }, | |
| { | |
| "epoch": 1.6614906832298137, | |
| "grad_norm": 0.19634345173835754, | |
| "learning_rate": 2.231711525189786e-05, | |
| "loss": 0.2718, | |
| "step": 6420 | |
| }, | |
| { | |
| "epoch": 1.6640786749482401, | |
| "grad_norm": 0.2094293087720871, | |
| "learning_rate": 2.2273982056590754e-05, | |
| "loss": 0.2676, | |
| "step": 6430 | |
| }, | |
| { | |
| "epoch": 1.6666666666666665, | |
| "grad_norm": 0.19552995264530182, | |
| "learning_rate": 2.2230848861283645e-05, | |
| "loss": 0.267, | |
| "step": 6440 | |
| }, | |
| { | |
| "epoch": 1.6692546583850931, | |
| "grad_norm": 0.19639810919761658, | |
| "learning_rate": 2.2187715665976536e-05, | |
| "loss": 0.2789, | |
| "step": 6450 | |
| }, | |
| { | |
| "epoch": 1.6718426501035197, | |
| "grad_norm": 0.1981237679719925, | |
| "learning_rate": 2.2144582470669428e-05, | |
| "loss": 0.2815, | |
| "step": 6460 | |
| }, | |
| { | |
| "epoch": 1.6744306418219461, | |
| "grad_norm": 0.21538740396499634, | |
| "learning_rate": 2.2101449275362323e-05, | |
| "loss": 0.2787, | |
| "step": 6470 | |
| }, | |
| { | |
| "epoch": 1.6770186335403725, | |
| "grad_norm": 0.23236703872680664, | |
| "learning_rate": 2.205831608005521e-05, | |
| "loss": 0.2694, | |
| "step": 6480 | |
| }, | |
| { | |
| "epoch": 1.6796066252587991, | |
| "grad_norm": 0.1944168359041214, | |
| "learning_rate": 2.2015182884748102e-05, | |
| "loss": 0.2736, | |
| "step": 6490 | |
| }, | |
| { | |
| "epoch": 1.6821946169772257, | |
| "grad_norm": 0.19319669902324677, | |
| "learning_rate": 2.1972049689440997e-05, | |
| "loss": 0.2637, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 1.6847826086956523, | |
| "grad_norm": 0.1919366717338562, | |
| "learning_rate": 2.1928916494133885e-05, | |
| "loss": 0.2771, | |
| "step": 6510 | |
| }, | |
| { | |
| "epoch": 1.6873706004140787, | |
| "grad_norm": 0.2012830674648285, | |
| "learning_rate": 2.188578329882678e-05, | |
| "loss": 0.2652, | |
| "step": 6520 | |
| }, | |
| { | |
| "epoch": 1.689958592132505, | |
| "grad_norm": 0.20478034019470215, | |
| "learning_rate": 2.1842650103519667e-05, | |
| "loss": 0.2749, | |
| "step": 6530 | |
| }, | |
| { | |
| "epoch": 1.6925465838509317, | |
| "grad_norm": 0.17857521772384644, | |
| "learning_rate": 2.1799516908212562e-05, | |
| "loss": 0.2779, | |
| "step": 6540 | |
| }, | |
| { | |
| "epoch": 1.6951345755693583, | |
| "grad_norm": 0.198260098695755, | |
| "learning_rate": 2.1756383712905454e-05, | |
| "loss": 0.2692, | |
| "step": 6550 | |
| }, | |
| { | |
| "epoch": 1.6977225672877847, | |
| "grad_norm": 0.2041163593530655, | |
| "learning_rate": 2.1713250517598345e-05, | |
| "loss": 0.2876, | |
| "step": 6560 | |
| }, | |
| { | |
| "epoch": 1.700310559006211, | |
| "grad_norm": 0.19581981003284454, | |
| "learning_rate": 2.1670117322291236e-05, | |
| "loss": 0.2732, | |
| "step": 6570 | |
| }, | |
| { | |
| "epoch": 1.7028985507246377, | |
| "grad_norm": 0.21552933752536774, | |
| "learning_rate": 2.1626984126984128e-05, | |
| "loss": 0.2755, | |
| "step": 6580 | |
| }, | |
| { | |
| "epoch": 1.7054865424430643, | |
| "grad_norm": 0.18474365770816803, | |
| "learning_rate": 2.158385093167702e-05, | |
| "loss": 0.2742, | |
| "step": 6590 | |
| }, | |
| { | |
| "epoch": 1.7080745341614907, | |
| "grad_norm": 0.1958833485841751, | |
| "learning_rate": 2.154071773636991e-05, | |
| "loss": 0.2761, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 1.710662525879917, | |
| "grad_norm": 0.21241448819637299, | |
| "learning_rate": 2.1497584541062805e-05, | |
| "loss": 0.2755, | |
| "step": 6610 | |
| }, | |
| { | |
| "epoch": 1.7132505175983437, | |
| "grad_norm": 0.1859632134437561, | |
| "learning_rate": 2.1454451345755693e-05, | |
| "loss": 0.2675, | |
| "step": 6620 | |
| }, | |
| { | |
| "epoch": 1.7158385093167703, | |
| "grad_norm": 0.18138276040554047, | |
| "learning_rate": 2.1411318150448588e-05, | |
| "loss": 0.277, | |
| "step": 6630 | |
| }, | |
| { | |
| "epoch": 1.7184265010351967, | |
| "grad_norm": 0.21567761898040771, | |
| "learning_rate": 2.136818495514148e-05, | |
| "loss": 0.2751, | |
| "step": 6640 | |
| }, | |
| { | |
| "epoch": 1.721014492753623, | |
| "grad_norm": 0.2187628597021103, | |
| "learning_rate": 2.1325051759834367e-05, | |
| "loss": 0.2786, | |
| "step": 6650 | |
| }, | |
| { | |
| "epoch": 1.7236024844720497, | |
| "grad_norm": 0.1880040168762207, | |
| "learning_rate": 2.1281918564527262e-05, | |
| "loss": 0.2861, | |
| "step": 6660 | |
| }, | |
| { | |
| "epoch": 1.7261904761904763, | |
| "grad_norm": 0.18701395392417908, | |
| "learning_rate": 2.123878536922015e-05, | |
| "loss": 0.261, | |
| "step": 6670 | |
| }, | |
| { | |
| "epoch": 1.7287784679089027, | |
| "grad_norm": 0.19089701771736145, | |
| "learning_rate": 2.1195652173913045e-05, | |
| "loss": 0.2784, | |
| "step": 6680 | |
| }, | |
| { | |
| "epoch": 1.731366459627329, | |
| "grad_norm": 0.1895458698272705, | |
| "learning_rate": 2.1152518978605936e-05, | |
| "loss": 0.2841, | |
| "step": 6690 | |
| }, | |
| { | |
| "epoch": 1.7339544513457557, | |
| "grad_norm": 0.19079262018203735, | |
| "learning_rate": 2.1109385783298828e-05, | |
| "loss": 0.2799, | |
| "step": 6700 | |
| }, | |
| { | |
| "epoch": 1.7365424430641823, | |
| "grad_norm": 0.19622786343097687, | |
| "learning_rate": 2.106625258799172e-05, | |
| "loss": 0.2688, | |
| "step": 6710 | |
| }, | |
| { | |
| "epoch": 1.7391304347826086, | |
| "grad_norm": 0.21237607300281525, | |
| "learning_rate": 2.102311939268461e-05, | |
| "loss": 0.278, | |
| "step": 6720 | |
| }, | |
| { | |
| "epoch": 1.741718426501035, | |
| "grad_norm": 0.20015180110931396, | |
| "learning_rate": 2.0979986197377502e-05, | |
| "loss": 0.2825, | |
| "step": 6730 | |
| }, | |
| { | |
| "epoch": 1.7443064182194616, | |
| "grad_norm": 0.22043476998806, | |
| "learning_rate": 2.0936853002070393e-05, | |
| "loss": 0.2697, | |
| "step": 6740 | |
| }, | |
| { | |
| "epoch": 1.7468944099378882, | |
| "grad_norm": 0.1935938447713852, | |
| "learning_rate": 2.0893719806763288e-05, | |
| "loss": 0.2715, | |
| "step": 6750 | |
| }, | |
| { | |
| "epoch": 1.7494824016563149, | |
| "grad_norm": 0.21081463992595673, | |
| "learning_rate": 2.0850586611456176e-05, | |
| "loss": 0.2761, | |
| "step": 6760 | |
| }, | |
| { | |
| "epoch": 1.7520703933747412, | |
| "grad_norm": 0.19441653788089752, | |
| "learning_rate": 2.080745341614907e-05, | |
| "loss": 0.275, | |
| "step": 6770 | |
| }, | |
| { | |
| "epoch": 1.7546583850931676, | |
| "grad_norm": 0.21199271082878113, | |
| "learning_rate": 2.076432022084196e-05, | |
| "loss": 0.2592, | |
| "step": 6780 | |
| }, | |
| { | |
| "epoch": 1.7572463768115942, | |
| "grad_norm": 0.19131699204444885, | |
| "learning_rate": 2.0721187025534854e-05, | |
| "loss": 0.2775, | |
| "step": 6790 | |
| }, | |
| { | |
| "epoch": 1.7598343685300208, | |
| "grad_norm": 0.20551328361034393, | |
| "learning_rate": 2.0678053830227745e-05, | |
| "loss": 0.2642, | |
| "step": 6800 | |
| }, | |
| { | |
| "epoch": 1.7624223602484472, | |
| "grad_norm": 0.1989978551864624, | |
| "learning_rate": 2.0634920634920636e-05, | |
| "loss": 0.2744, | |
| "step": 6810 | |
| }, | |
| { | |
| "epoch": 1.7650103519668736, | |
| "grad_norm": 0.20336061716079712, | |
| "learning_rate": 2.0591787439613528e-05, | |
| "loss": 0.2645, | |
| "step": 6820 | |
| }, | |
| { | |
| "epoch": 1.7675983436853002, | |
| "grad_norm": 0.1846802830696106, | |
| "learning_rate": 2.054865424430642e-05, | |
| "loss": 0.2826, | |
| "step": 6830 | |
| }, | |
| { | |
| "epoch": 1.7701863354037268, | |
| "grad_norm": 0.2088494598865509, | |
| "learning_rate": 2.050552104899931e-05, | |
| "loss": 0.2765, | |
| "step": 6840 | |
| }, | |
| { | |
| "epoch": 1.7727743271221532, | |
| "grad_norm": 0.22022396326065063, | |
| "learning_rate": 2.0462387853692202e-05, | |
| "loss": 0.2668, | |
| "step": 6850 | |
| }, | |
| { | |
| "epoch": 1.7753623188405796, | |
| "grad_norm": 0.1923859864473343, | |
| "learning_rate": 2.0419254658385097e-05, | |
| "loss": 0.272, | |
| "step": 6860 | |
| }, | |
| { | |
| "epoch": 1.7779503105590062, | |
| "grad_norm": 0.21392539143562317, | |
| "learning_rate": 2.0376121463077985e-05, | |
| "loss": 0.2761, | |
| "step": 6870 | |
| }, | |
| { | |
| "epoch": 1.7805383022774328, | |
| "grad_norm": 0.19041626155376434, | |
| "learning_rate": 2.0332988267770876e-05, | |
| "loss": 0.2735, | |
| "step": 6880 | |
| }, | |
| { | |
| "epoch": 1.7831262939958592, | |
| "grad_norm": 0.21040987968444824, | |
| "learning_rate": 2.028985507246377e-05, | |
| "loss": 0.2742, | |
| "step": 6890 | |
| }, | |
| { | |
| "epoch": 1.7857142857142856, | |
| "grad_norm": 0.19723209738731384, | |
| "learning_rate": 2.024672187715666e-05, | |
| "loss": 0.2676, | |
| "step": 6900 | |
| }, | |
| { | |
| "epoch": 1.7883022774327122, | |
| "grad_norm": 0.21089300513267517, | |
| "learning_rate": 2.0203588681849554e-05, | |
| "loss": 0.2703, | |
| "step": 6910 | |
| }, | |
| { | |
| "epoch": 1.7908902691511388, | |
| "grad_norm": 0.18884050846099854, | |
| "learning_rate": 2.016045548654244e-05, | |
| "loss": 0.2719, | |
| "step": 6920 | |
| }, | |
| { | |
| "epoch": 1.7934782608695652, | |
| "grad_norm": 0.19870036840438843, | |
| "learning_rate": 2.0117322291235336e-05, | |
| "loss": 0.2744, | |
| "step": 6930 | |
| }, | |
| { | |
| "epoch": 1.7960662525879916, | |
| "grad_norm": 0.20100484788417816, | |
| "learning_rate": 2.0074189095928228e-05, | |
| "loss": 0.2731, | |
| "step": 6940 | |
| }, | |
| { | |
| "epoch": 1.7986542443064182, | |
| "grad_norm": 0.1931971311569214, | |
| "learning_rate": 2.003105590062112e-05, | |
| "loss": 0.2769, | |
| "step": 6950 | |
| }, | |
| { | |
| "epoch": 1.8012422360248448, | |
| "grad_norm": 0.2118457704782486, | |
| "learning_rate": 1.998792270531401e-05, | |
| "loss": 0.2766, | |
| "step": 6960 | |
| }, | |
| { | |
| "epoch": 1.8038302277432712, | |
| "grad_norm": 0.1875993311405182, | |
| "learning_rate": 1.9944789510006902e-05, | |
| "loss": 0.2711, | |
| "step": 6970 | |
| }, | |
| { | |
| "epoch": 1.8064182194616976, | |
| "grad_norm": 0.19404591619968414, | |
| "learning_rate": 1.9901656314699793e-05, | |
| "loss": 0.2691, | |
| "step": 6980 | |
| }, | |
| { | |
| "epoch": 1.8090062111801242, | |
| "grad_norm": 0.20889738202095032, | |
| "learning_rate": 1.9858523119392685e-05, | |
| "loss": 0.2792, | |
| "step": 6990 | |
| }, | |
| { | |
| "epoch": 1.8115942028985508, | |
| "grad_norm": 0.19283007085323334, | |
| "learning_rate": 1.981538992408558e-05, | |
| "loss": 0.2707, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 1.8141821946169774, | |
| "grad_norm": 0.2058730572462082, | |
| "learning_rate": 1.9772256728778467e-05, | |
| "loss": 0.2768, | |
| "step": 7010 | |
| }, | |
| { | |
| "epoch": 1.8167701863354038, | |
| "grad_norm": 0.1965474933385849, | |
| "learning_rate": 1.9729123533471362e-05, | |
| "loss": 0.2704, | |
| "step": 7020 | |
| }, | |
| { | |
| "epoch": 1.8193581780538302, | |
| "grad_norm": 0.21187111735343933, | |
| "learning_rate": 1.9685990338164254e-05, | |
| "loss": 0.2637, | |
| "step": 7030 | |
| }, | |
| { | |
| "epoch": 1.8219461697722568, | |
| "grad_norm": 0.20169232785701752, | |
| "learning_rate": 1.9642857142857145e-05, | |
| "loss": 0.2698, | |
| "step": 7040 | |
| }, | |
| { | |
| "epoch": 1.8245341614906834, | |
| "grad_norm": 0.2075551301240921, | |
| "learning_rate": 1.9599723947550036e-05, | |
| "loss": 0.2606, | |
| "step": 7050 | |
| }, | |
| { | |
| "epoch": 1.8271221532091098, | |
| "grad_norm": 0.19754406809806824, | |
| "learning_rate": 1.9556590752242924e-05, | |
| "loss": 0.2706, | |
| "step": 7060 | |
| }, | |
| { | |
| "epoch": 1.8297101449275361, | |
| "grad_norm": 0.20495067536830902, | |
| "learning_rate": 1.951345755693582e-05, | |
| "loss": 0.27, | |
| "step": 7070 | |
| }, | |
| { | |
| "epoch": 1.8322981366459627, | |
| "grad_norm": 0.20887307822704315, | |
| "learning_rate": 1.947032436162871e-05, | |
| "loss": 0.2775, | |
| "step": 7080 | |
| }, | |
| { | |
| "epoch": 1.8348861283643894, | |
| "grad_norm": 0.19987981021404266, | |
| "learning_rate": 1.9427191166321602e-05, | |
| "loss": 0.2742, | |
| "step": 7090 | |
| }, | |
| { | |
| "epoch": 1.8374741200828157, | |
| "grad_norm": 0.21831856667995453, | |
| "learning_rate": 1.9384057971014493e-05, | |
| "loss": 0.2751, | |
| "step": 7100 | |
| }, | |
| { | |
| "epoch": 1.8400621118012421, | |
| "grad_norm": 0.19449429214000702, | |
| "learning_rate": 1.9340924775707385e-05, | |
| "loss": 0.2779, | |
| "step": 7110 | |
| }, | |
| { | |
| "epoch": 1.8426501035196687, | |
| "grad_norm": 0.2008129507303238, | |
| "learning_rate": 1.9297791580400276e-05, | |
| "loss": 0.2707, | |
| "step": 7120 | |
| }, | |
| { | |
| "epoch": 1.8452380952380953, | |
| "grad_norm": 0.20419888198375702, | |
| "learning_rate": 1.9254658385093167e-05, | |
| "loss": 0.2602, | |
| "step": 7130 | |
| }, | |
| { | |
| "epoch": 1.8478260869565217, | |
| "grad_norm": 0.20065145194530487, | |
| "learning_rate": 1.9211525189786062e-05, | |
| "loss": 0.2683, | |
| "step": 7140 | |
| }, | |
| { | |
| "epoch": 1.8504140786749481, | |
| "grad_norm": 0.1982513964176178, | |
| "learning_rate": 1.916839199447895e-05, | |
| "loss": 0.2808, | |
| "step": 7150 | |
| }, | |
| { | |
| "epoch": 1.8530020703933747, | |
| "grad_norm": 0.18309429287910461, | |
| "learning_rate": 1.9125258799171845e-05, | |
| "loss": 0.2795, | |
| "step": 7160 | |
| }, | |
| { | |
| "epoch": 1.8555900621118013, | |
| "grad_norm": 0.21153387427330017, | |
| "learning_rate": 1.9082125603864733e-05, | |
| "loss": 0.2712, | |
| "step": 7170 | |
| }, | |
| { | |
| "epoch": 1.8581780538302277, | |
| "grad_norm": 0.1983746439218521, | |
| "learning_rate": 1.9038992408557628e-05, | |
| "loss": 0.27, | |
| "step": 7180 | |
| }, | |
| { | |
| "epoch": 1.860766045548654, | |
| "grad_norm": 0.18508397042751312, | |
| "learning_rate": 1.899585921325052e-05, | |
| "loss": 0.2703, | |
| "step": 7190 | |
| }, | |
| { | |
| "epoch": 1.8633540372670807, | |
| "grad_norm": 0.1981285661458969, | |
| "learning_rate": 1.895272601794341e-05, | |
| "loss": 0.2685, | |
| "step": 7200 | |
| }, | |
| { | |
| "epoch": 1.8659420289855073, | |
| "grad_norm": 0.22482812404632568, | |
| "learning_rate": 1.8909592822636302e-05, | |
| "loss": 0.2722, | |
| "step": 7210 | |
| }, | |
| { | |
| "epoch": 1.8685300207039337, | |
| "grad_norm": 0.17429296672344208, | |
| "learning_rate": 1.8866459627329193e-05, | |
| "loss": 0.2706, | |
| "step": 7220 | |
| }, | |
| { | |
| "epoch": 1.87111801242236, | |
| "grad_norm": 0.1882694512605667, | |
| "learning_rate": 1.8823326432022085e-05, | |
| "loss": 0.2713, | |
| "step": 7230 | |
| }, | |
| { | |
| "epoch": 1.8737060041407867, | |
| "grad_norm": 0.1910208910703659, | |
| "learning_rate": 1.8780193236714976e-05, | |
| "loss": 0.2684, | |
| "step": 7240 | |
| }, | |
| { | |
| "epoch": 1.8762939958592133, | |
| "grad_norm": 0.19706323742866516, | |
| "learning_rate": 1.873706004140787e-05, | |
| "loss": 0.2647, | |
| "step": 7250 | |
| }, | |
| { | |
| "epoch": 1.87888198757764, | |
| "grad_norm": 0.21187639236450195, | |
| "learning_rate": 1.869392684610076e-05, | |
| "loss": 0.2797, | |
| "step": 7260 | |
| }, | |
| { | |
| "epoch": 1.8814699792960663, | |
| "grad_norm": 0.20016558468341827, | |
| "learning_rate": 1.8650793650793654e-05, | |
| "loss": 0.2731, | |
| "step": 7270 | |
| }, | |
| { | |
| "epoch": 1.8840579710144927, | |
| "grad_norm": 0.19838570058345795, | |
| "learning_rate": 1.8607660455486545e-05, | |
| "loss": 0.2735, | |
| "step": 7280 | |
| }, | |
| { | |
| "epoch": 1.8866459627329193, | |
| "grad_norm": 0.20789624750614166, | |
| "learning_rate": 1.8564527260179433e-05, | |
| "loss": 0.2716, | |
| "step": 7290 | |
| }, | |
| { | |
| "epoch": 1.889233954451346, | |
| "grad_norm": 0.20426179468631744, | |
| "learning_rate": 1.8521394064872328e-05, | |
| "loss": 0.2682, | |
| "step": 7300 | |
| }, | |
| { | |
| "epoch": 1.8918219461697723, | |
| "grad_norm": 0.19371093809604645, | |
| "learning_rate": 1.8478260869565216e-05, | |
| "loss": 0.2762, | |
| "step": 7310 | |
| }, | |
| { | |
| "epoch": 1.8944099378881987, | |
| "grad_norm": 0.1775299459695816, | |
| "learning_rate": 1.843512767425811e-05, | |
| "loss": 0.2746, | |
| "step": 7320 | |
| }, | |
| { | |
| "epoch": 1.8969979296066253, | |
| "grad_norm": 0.21058380603790283, | |
| "learning_rate": 1.8391994478951002e-05, | |
| "loss": 0.2652, | |
| "step": 7330 | |
| }, | |
| { | |
| "epoch": 1.8995859213250519, | |
| "grad_norm": 0.19346512854099274, | |
| "learning_rate": 1.8348861283643893e-05, | |
| "loss": 0.272, | |
| "step": 7340 | |
| }, | |
| { | |
| "epoch": 1.9021739130434783, | |
| "grad_norm": 0.18280893564224243, | |
| "learning_rate": 1.8305728088336785e-05, | |
| "loss": 0.2744, | |
| "step": 7350 | |
| }, | |
| { | |
| "epoch": 1.9047619047619047, | |
| "grad_norm": 0.20643116533756256, | |
| "learning_rate": 1.8262594893029676e-05, | |
| "loss": 0.2779, | |
| "step": 7360 | |
| }, | |
| { | |
| "epoch": 1.9073498964803313, | |
| "grad_norm": 0.182168647646904, | |
| "learning_rate": 1.8219461697722567e-05, | |
| "loss": 0.2708, | |
| "step": 7370 | |
| }, | |
| { | |
| "epoch": 1.9099378881987579, | |
| "grad_norm": 0.19439440965652466, | |
| "learning_rate": 1.817632850241546e-05, | |
| "loss": 0.2682, | |
| "step": 7380 | |
| }, | |
| { | |
| "epoch": 1.9125258799171843, | |
| "grad_norm": 0.18823355436325073, | |
| "learning_rate": 1.8133195307108354e-05, | |
| "loss": 0.2629, | |
| "step": 7390 | |
| }, | |
| { | |
| "epoch": 1.9151138716356106, | |
| "grad_norm": 0.18337075412273407, | |
| "learning_rate": 1.809006211180124e-05, | |
| "loss": 0.2675, | |
| "step": 7400 | |
| }, | |
| { | |
| "epoch": 1.9177018633540373, | |
| "grad_norm": 0.19854582846164703, | |
| "learning_rate": 1.8046928916494136e-05, | |
| "loss": 0.2745, | |
| "step": 7410 | |
| }, | |
| { | |
| "epoch": 1.9202898550724639, | |
| "grad_norm": 0.19245949387550354, | |
| "learning_rate": 1.8003795721187024e-05, | |
| "loss": 0.2707, | |
| "step": 7420 | |
| }, | |
| { | |
| "epoch": 1.9228778467908902, | |
| "grad_norm": 0.20844367146492004, | |
| "learning_rate": 1.796066252587992e-05, | |
| "loss": 0.273, | |
| "step": 7430 | |
| }, | |
| { | |
| "epoch": 1.9254658385093166, | |
| "grad_norm": 0.20758184790611267, | |
| "learning_rate": 1.791752933057281e-05, | |
| "loss": 0.2718, | |
| "step": 7440 | |
| }, | |
| { | |
| "epoch": 1.9280538302277432, | |
| "grad_norm": 0.19265791773796082, | |
| "learning_rate": 1.78743961352657e-05, | |
| "loss": 0.278, | |
| "step": 7450 | |
| }, | |
| { | |
| "epoch": 1.9306418219461698, | |
| "grad_norm": 0.21054716408252716, | |
| "learning_rate": 1.7831262939958593e-05, | |
| "loss": 0.2798, | |
| "step": 7460 | |
| }, | |
| { | |
| "epoch": 1.9332298136645962, | |
| "grad_norm": 0.21932169795036316, | |
| "learning_rate": 1.7788129744651485e-05, | |
| "loss": 0.2619, | |
| "step": 7470 | |
| }, | |
| { | |
| "epoch": 1.9358178053830226, | |
| "grad_norm": 0.19597266614437103, | |
| "learning_rate": 1.7744996549344376e-05, | |
| "loss": 0.2686, | |
| "step": 7480 | |
| }, | |
| { | |
| "epoch": 1.9384057971014492, | |
| "grad_norm": 0.18867290019989014, | |
| "learning_rate": 1.7701863354037267e-05, | |
| "loss": 0.2758, | |
| "step": 7490 | |
| }, | |
| { | |
| "epoch": 1.9409937888198758, | |
| "grad_norm": 0.20822232961654663, | |
| "learning_rate": 1.7658730158730162e-05, | |
| "loss": 0.264, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 1.9435817805383024, | |
| "grad_norm": 0.20569127798080444, | |
| "learning_rate": 1.761559696342305e-05, | |
| "loss": 0.2685, | |
| "step": 7510 | |
| }, | |
| { | |
| "epoch": 1.9461697722567288, | |
| "grad_norm": 0.19487345218658447, | |
| "learning_rate": 1.757246376811594e-05, | |
| "loss": 0.2704, | |
| "step": 7520 | |
| }, | |
| { | |
| "epoch": 1.9487577639751552, | |
| "grad_norm": 0.19858413934707642, | |
| "learning_rate": 1.7529330572808836e-05, | |
| "loss": 0.2811, | |
| "step": 7530 | |
| }, | |
| { | |
| "epoch": 1.9513457556935818, | |
| "grad_norm": 0.2053307443857193, | |
| "learning_rate": 1.7486197377501724e-05, | |
| "loss": 0.2706, | |
| "step": 7540 | |
| }, | |
| { | |
| "epoch": 1.9539337474120084, | |
| "grad_norm": 0.2085588127374649, | |
| "learning_rate": 1.744306418219462e-05, | |
| "loss": 0.267, | |
| "step": 7550 | |
| }, | |
| { | |
| "epoch": 1.9565217391304348, | |
| "grad_norm": 0.20521822571754456, | |
| "learning_rate": 1.7399930986887507e-05, | |
| "loss": 0.2733, | |
| "step": 7560 | |
| }, | |
| { | |
| "epoch": 1.9591097308488612, | |
| "grad_norm": 0.20257830619812012, | |
| "learning_rate": 1.7356797791580402e-05, | |
| "loss": 0.2795, | |
| "step": 7570 | |
| }, | |
| { | |
| "epoch": 1.9616977225672878, | |
| "grad_norm": 0.20972435176372528, | |
| "learning_rate": 1.7313664596273293e-05, | |
| "loss": 0.261, | |
| "step": 7580 | |
| }, | |
| { | |
| "epoch": 1.9642857142857144, | |
| "grad_norm": 0.1922076791524887, | |
| "learning_rate": 1.7270531400966185e-05, | |
| "loss": 0.259, | |
| "step": 7590 | |
| }, | |
| { | |
| "epoch": 1.9668737060041408, | |
| "grad_norm": 0.2023407369852066, | |
| "learning_rate": 1.7227398205659076e-05, | |
| "loss": 0.26, | |
| "step": 7600 | |
| }, | |
| { | |
| "epoch": 1.9694616977225672, | |
| "grad_norm": 0.2070726901292801, | |
| "learning_rate": 1.7184265010351967e-05, | |
| "loss": 0.2756, | |
| "step": 7610 | |
| }, | |
| { | |
| "epoch": 1.9720496894409938, | |
| "grad_norm": 0.19282975792884827, | |
| "learning_rate": 1.714113181504486e-05, | |
| "loss": 0.2667, | |
| "step": 7620 | |
| }, | |
| { | |
| "epoch": 1.9746376811594204, | |
| "grad_norm": 0.1871107816696167, | |
| "learning_rate": 1.709799861973775e-05, | |
| "loss": 0.2596, | |
| "step": 7630 | |
| }, | |
| { | |
| "epoch": 1.9772256728778468, | |
| "grad_norm": 0.18695834279060364, | |
| "learning_rate": 1.7054865424430645e-05, | |
| "loss": 0.2713, | |
| "step": 7640 | |
| }, | |
| { | |
| "epoch": 1.9798136645962732, | |
| "grad_norm": 0.1977001577615738, | |
| "learning_rate": 1.7011732229123533e-05, | |
| "loss": 0.2708, | |
| "step": 7650 | |
| }, | |
| { | |
| "epoch": 1.9824016563146998, | |
| "grad_norm": 0.2192411720752716, | |
| "learning_rate": 1.6968599033816428e-05, | |
| "loss": 0.2642, | |
| "step": 7660 | |
| }, | |
| { | |
| "epoch": 1.9849896480331264, | |
| "grad_norm": 0.20032544434070587, | |
| "learning_rate": 1.6925465838509316e-05, | |
| "loss": 0.2604, | |
| "step": 7670 | |
| }, | |
| { | |
| "epoch": 1.9875776397515528, | |
| "grad_norm": 0.20344774425029755, | |
| "learning_rate": 1.688233264320221e-05, | |
| "loss": 0.2743, | |
| "step": 7680 | |
| }, | |
| { | |
| "epoch": 1.9901656314699792, | |
| "grad_norm": 0.21191217005252838, | |
| "learning_rate": 1.6839199447895102e-05, | |
| "loss": 0.2688, | |
| "step": 7690 | |
| }, | |
| { | |
| "epoch": 1.9927536231884058, | |
| "grad_norm": 0.17852450907230377, | |
| "learning_rate": 1.679606625258799e-05, | |
| "loss": 0.2762, | |
| "step": 7700 | |
| }, | |
| { | |
| "epoch": 1.9953416149068324, | |
| "grad_norm": 0.2022544890642166, | |
| "learning_rate": 1.6752933057280885e-05, | |
| "loss": 0.2823, | |
| "step": 7710 | |
| }, | |
| { | |
| "epoch": 1.9979296066252588, | |
| "grad_norm": 0.1877758651971817, | |
| "learning_rate": 1.6709799861973776e-05, | |
| "loss": 0.2638, | |
| "step": 7720 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 11592, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 8.072391866659635e+17, | |
| "train_batch_size": 6, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |