| { | |
| "best_global_step": 2000, | |
| "best_metric": 0.854835983797369, | |
| "best_model_checkpoint": "./SALAMA_NEWMEDTTT/checkpoint-2000", | |
| "epoch": 0.7990012484394506, | |
| "eval_steps": 2000, | |
| "global_step": 2000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.003995006242197253, | |
| "grad_norm": 0.9641304016113281, | |
| "learning_rate": 1.8e-07, | |
| "loss": 0.006, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.007990012484394507, | |
| "grad_norm": 1.8198537826538086, | |
| "learning_rate": 3.8e-07, | |
| "loss": 0.0069, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.01198501872659176, | |
| "grad_norm": 0.9285380244255066, | |
| "learning_rate": 5.800000000000001e-07, | |
| "loss": 0.0055, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.015980024968789013, | |
| "grad_norm": 0.5802440643310547, | |
| "learning_rate": 7.8e-07, | |
| "loss": 0.0033, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.019975031210986267, | |
| "grad_norm": 1.8845840692520142, | |
| "learning_rate": 9.800000000000001e-07, | |
| "loss": 0.0076, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.02397003745318352, | |
| "grad_norm": 3.1090924739837646, | |
| "learning_rate": 1.1800000000000001e-06, | |
| "loss": 0.0049, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.027965043695380776, | |
| "grad_norm": 2.336266040802002, | |
| "learning_rate": 1.3800000000000001e-06, | |
| "loss": 0.0052, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.031960049937578026, | |
| "grad_norm": 1.0518473386764526, | |
| "learning_rate": 1.5800000000000001e-06, | |
| "loss": 0.0031, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.035955056179775284, | |
| "grad_norm": 0.6219454407691956, | |
| "learning_rate": 1.7800000000000001e-06, | |
| "loss": 0.0048, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.039950062421972535, | |
| "grad_norm": 0.6421394348144531, | |
| "learning_rate": 1.98e-06, | |
| "loss": 0.0081, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.043945068664169785, | |
| "grad_norm": 1.3314197063446045, | |
| "learning_rate": 2.1800000000000003e-06, | |
| "loss": 0.0061, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.04794007490636704, | |
| "grad_norm": 1.8596506118774414, | |
| "learning_rate": 2.38e-06, | |
| "loss": 0.0044, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.051935081148564294, | |
| "grad_norm": 1.2010821104049683, | |
| "learning_rate": 2.5800000000000003e-06, | |
| "loss": 0.0068, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.05593008739076155, | |
| "grad_norm": 1.134049415588379, | |
| "learning_rate": 2.7800000000000005e-06, | |
| "loss": 0.0057, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.0599250936329588, | |
| "grad_norm": 1.1893360614776611, | |
| "learning_rate": 2.9800000000000003e-06, | |
| "loss": 0.0065, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.06392009987515605, | |
| "grad_norm": 0.11562289297580719, | |
| "learning_rate": 3.1800000000000005e-06, | |
| "loss": 0.0052, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.06791510611735331, | |
| "grad_norm": 1.5058082342147827, | |
| "learning_rate": 3.3800000000000007e-06, | |
| "loss": 0.0061, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.07191011235955057, | |
| "grad_norm": 1.4590610265731812, | |
| "learning_rate": 3.58e-06, | |
| "loss": 0.0117, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.07590511860174781, | |
| "grad_norm": 0.17685647308826447, | |
| "learning_rate": 3.7800000000000002e-06, | |
| "loss": 0.0052, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.07990012484394507, | |
| "grad_norm": 1.6710718870162964, | |
| "learning_rate": 3.980000000000001e-06, | |
| "loss": 0.0083, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.08389513108614233, | |
| "grad_norm": 1.475885033607483, | |
| "learning_rate": 4.18e-06, | |
| "loss": 0.0119, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.08789013732833957, | |
| "grad_norm": 0.43519771099090576, | |
| "learning_rate": 4.38e-06, | |
| "loss": 0.0077, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.09188514357053683, | |
| "grad_norm": 1.2100317478179932, | |
| "learning_rate": 4.58e-06, | |
| "loss": 0.0057, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.09588014981273409, | |
| "grad_norm": 0.39092394709587097, | |
| "learning_rate": 4.78e-06, | |
| "loss": 0.0053, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.09987515605493133, | |
| "grad_norm": 1.807553768157959, | |
| "learning_rate": 4.980000000000001e-06, | |
| "loss": 0.0094, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.10387016229712859, | |
| "grad_norm": 1.791165828704834, | |
| "learning_rate": 5.18e-06, | |
| "loss": 0.0098, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.10786516853932585, | |
| "grad_norm": 0.7668461203575134, | |
| "learning_rate": 5.380000000000001e-06, | |
| "loss": 0.0066, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.1118601747815231, | |
| "grad_norm": 2.34199595451355, | |
| "learning_rate": 5.580000000000001e-06, | |
| "loss": 0.0111, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.11585518102372035, | |
| "grad_norm": 1.3942313194274902, | |
| "learning_rate": 5.78e-06, | |
| "loss": 0.0087, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.1198501872659176, | |
| "grad_norm": 0.5836958885192871, | |
| "learning_rate": 5.98e-06, | |
| "loss": 0.0077, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.12384519350811486, | |
| "grad_norm": 0.9044429659843445, | |
| "learning_rate": 6.18e-06, | |
| "loss": 0.0064, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.1278401997503121, | |
| "grad_norm": 1.130627989768982, | |
| "learning_rate": 6.380000000000001e-06, | |
| "loss": 0.0075, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.13183520599250936, | |
| "grad_norm": 1.221282720565796, | |
| "learning_rate": 6.5800000000000005e-06, | |
| "loss": 0.0068, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.13583021223470662, | |
| "grad_norm": 2.746778964996338, | |
| "learning_rate": 6.780000000000001e-06, | |
| "loss": 0.0102, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.13982521847690388, | |
| "grad_norm": 1.063391089439392, | |
| "learning_rate": 6.98e-06, | |
| "loss": 0.0076, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.14382022471910114, | |
| "grad_norm": 0.9570655822753906, | |
| "learning_rate": 7.180000000000001e-06, | |
| "loss": 0.0099, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.14781523096129837, | |
| "grad_norm": 0.4893503785133362, | |
| "learning_rate": 7.3800000000000005e-06, | |
| "loss": 0.0061, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.15181023720349562, | |
| "grad_norm": 0.4272705018520355, | |
| "learning_rate": 7.58e-06, | |
| "loss": 0.009, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.15580524344569288, | |
| "grad_norm": 1.3758041858673096, | |
| "learning_rate": 7.78e-06, | |
| "loss": 0.0161, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.15980024968789014, | |
| "grad_norm": 1.4578285217285156, | |
| "learning_rate": 7.980000000000002e-06, | |
| "loss": 0.0076, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.1637952559300874, | |
| "grad_norm": 1.4418432712554932, | |
| "learning_rate": 8.18e-06, | |
| "loss": 0.0067, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.16779026217228465, | |
| "grad_norm": 1.1867637634277344, | |
| "learning_rate": 8.380000000000001e-06, | |
| "loss": 0.0088, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.17178526841448188, | |
| "grad_norm": 1.5421844720840454, | |
| "learning_rate": 8.580000000000001e-06, | |
| "loss": 0.0095, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.17578027465667914, | |
| "grad_norm": 1.0062193870544434, | |
| "learning_rate": 8.78e-06, | |
| "loss": 0.0079, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.1797752808988764, | |
| "grad_norm": 2.0979905128479004, | |
| "learning_rate": 8.98e-06, | |
| "loss": 0.0124, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.18377028714107366, | |
| "grad_norm": 1.6434396505355835, | |
| "learning_rate": 9.180000000000002e-06, | |
| "loss": 0.0067, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.18776529338327091, | |
| "grad_norm": 0.8114656209945679, | |
| "learning_rate": 9.38e-06, | |
| "loss": 0.0152, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.19176029962546817, | |
| "grad_norm": 1.19154691696167, | |
| "learning_rate": 9.58e-06, | |
| "loss": 0.0153, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.19575530586766543, | |
| "grad_norm": 1.7915692329406738, | |
| "learning_rate": 9.780000000000001e-06, | |
| "loss": 0.007, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.19975031210986266, | |
| "grad_norm": 1.6148704290390015, | |
| "learning_rate": 9.980000000000001e-06, | |
| "loss": 0.0081, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.20374531835205992, | |
| "grad_norm": 4.897802352905273, | |
| "learning_rate": 9.980035492457854e-06, | |
| "loss": 0.0148, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.20774032459425718, | |
| "grad_norm": 1.0882717370986938, | |
| "learning_rate": 9.957852706299913e-06, | |
| "loss": 0.0168, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.21173533083645443, | |
| "grad_norm": 2.1700711250305176, | |
| "learning_rate": 9.935669920141971e-06, | |
| "loss": 0.0127, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.2157303370786517, | |
| "grad_norm": 1.8156019449234009, | |
| "learning_rate": 9.91348713398403e-06, | |
| "loss": 0.0216, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.21972534332084895, | |
| "grad_norm": 0.6672806739807129, | |
| "learning_rate": 9.891304347826088e-06, | |
| "loss": 0.0133, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.2237203495630462, | |
| "grad_norm": 1.5110219717025757, | |
| "learning_rate": 9.869121561668146e-06, | |
| "loss": 0.0149, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.22771535580524344, | |
| "grad_norm": 1.7372881174087524, | |
| "learning_rate": 9.846938775510205e-06, | |
| "loss": 0.0186, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.2317103620474407, | |
| "grad_norm": 2.117297887802124, | |
| "learning_rate": 9.824755989352263e-06, | |
| "loss": 0.0178, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.23570536828963795, | |
| "grad_norm": 0.7649470567703247, | |
| "learning_rate": 9.802573203194321e-06, | |
| "loss": 0.0175, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.2397003745318352, | |
| "grad_norm": 1.8739404678344727, | |
| "learning_rate": 9.78039041703638e-06, | |
| "loss": 0.0151, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.24369538077403247, | |
| "grad_norm": 1.6858923435211182, | |
| "learning_rate": 9.758207630878438e-06, | |
| "loss": 0.0182, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.24769038701622972, | |
| "grad_norm": 1.2428107261657715, | |
| "learning_rate": 9.736024844720497e-06, | |
| "loss": 0.0137, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.251685393258427, | |
| "grad_norm": 1.4081534147262573, | |
| "learning_rate": 9.713842058562555e-06, | |
| "loss": 0.0124, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.2556803995006242, | |
| "grad_norm": 0.9433392286300659, | |
| "learning_rate": 9.691659272404614e-06, | |
| "loss": 0.015, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.2596754057428215, | |
| "grad_norm": 1.1751147508621216, | |
| "learning_rate": 9.669476486246674e-06, | |
| "loss": 0.0142, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.2636704119850187, | |
| "grad_norm": 1.8343169689178467, | |
| "learning_rate": 9.647293700088732e-06, | |
| "loss": 0.0135, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.26766541822721596, | |
| "grad_norm": 1.0744661092758179, | |
| "learning_rate": 9.62511091393079e-06, | |
| "loss": 0.0115, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.27166042446941324, | |
| "grad_norm": 2.02150559425354, | |
| "learning_rate": 9.602928127772849e-06, | |
| "loss": 0.0247, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.27565543071161047, | |
| "grad_norm": 1.059798002243042, | |
| "learning_rate": 9.580745341614907e-06, | |
| "loss": 0.0126, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.27965043695380776, | |
| "grad_norm": 2.3814563751220703, | |
| "learning_rate": 9.558562555456966e-06, | |
| "loss": 0.0187, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.283645443196005, | |
| "grad_norm": 2.136800527572632, | |
| "learning_rate": 9.536379769299024e-06, | |
| "loss": 0.021, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.2876404494382023, | |
| "grad_norm": 1.2156870365142822, | |
| "learning_rate": 9.514196983141083e-06, | |
| "loss": 0.0138, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.2916354556803995, | |
| "grad_norm": 1.7681819200515747, | |
| "learning_rate": 9.492014196983141e-06, | |
| "loss": 0.0139, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.29563046192259673, | |
| "grad_norm": 1.086654543876648, | |
| "learning_rate": 9.469831410825201e-06, | |
| "loss": 0.0215, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.299625468164794, | |
| "grad_norm": 3.1630606651306152, | |
| "learning_rate": 9.44764862466726e-06, | |
| "loss": 0.014, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.30362047440699125, | |
| "grad_norm": 1.9664831161499023, | |
| "learning_rate": 9.425465838509318e-06, | |
| "loss": 0.0133, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.30761548064918853, | |
| "grad_norm": 1.4888386726379395, | |
| "learning_rate": 9.403283052351377e-06, | |
| "loss": 0.021, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.31161048689138576, | |
| "grad_norm": 1.404789686203003, | |
| "learning_rate": 9.381100266193435e-06, | |
| "loss": 0.0099, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.31560549313358305, | |
| "grad_norm": 2.4319567680358887, | |
| "learning_rate": 9.358917480035493e-06, | |
| "loss": 0.0132, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.3196004993757803, | |
| "grad_norm": 3.325315475463867, | |
| "learning_rate": 9.336734693877552e-06, | |
| "loss": 0.0179, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.3235955056179775, | |
| "grad_norm": 0.7138023376464844, | |
| "learning_rate": 9.31455190771961e-06, | |
| "loss": 0.0159, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.3275905118601748, | |
| "grad_norm": 2.6261703968048096, | |
| "learning_rate": 9.292369121561669e-06, | |
| "loss": 0.0126, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.331585518102372, | |
| "grad_norm": 1.8155118227005005, | |
| "learning_rate": 9.270186335403727e-06, | |
| "loss": 0.0148, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.3355805243445693, | |
| "grad_norm": 3.166321277618408, | |
| "learning_rate": 9.248003549245787e-06, | |
| "loss": 0.0152, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.33957553058676654, | |
| "grad_norm": 1.0673789978027344, | |
| "learning_rate": 9.225820763087846e-06, | |
| "loss": 0.0199, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.34357053682896377, | |
| "grad_norm": 1.273202657699585, | |
| "learning_rate": 9.203637976929904e-06, | |
| "loss": 0.0134, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.34756554307116105, | |
| "grad_norm": 1.2098419666290283, | |
| "learning_rate": 9.181455190771963e-06, | |
| "loss": 0.0119, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.3515605493133583, | |
| "grad_norm": 1.6141830682754517, | |
| "learning_rate": 9.159272404614021e-06, | |
| "loss": 0.0164, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.35555555555555557, | |
| "grad_norm": 1.0257773399353027, | |
| "learning_rate": 9.13708961845608e-06, | |
| "loss": 0.0204, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.3595505617977528, | |
| "grad_norm": 1.8587626218795776, | |
| "learning_rate": 9.114906832298138e-06, | |
| "loss": 0.0217, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.3635455680399501, | |
| "grad_norm": 2.3578131198883057, | |
| "learning_rate": 9.092724046140196e-06, | |
| "loss": 0.0197, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.3675405742821473, | |
| "grad_norm": 1.582905888557434, | |
| "learning_rate": 9.070541259982255e-06, | |
| "loss": 0.0128, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.37153558052434454, | |
| "grad_norm": 1.1242965459823608, | |
| "learning_rate": 9.048358473824313e-06, | |
| "loss": 0.0174, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.37553058676654183, | |
| "grad_norm": 1.2778273820877075, | |
| "learning_rate": 9.026175687666371e-06, | |
| "loss": 0.0173, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.37952559300873906, | |
| "grad_norm": 1.7987346649169922, | |
| "learning_rate": 9.00399290150843e-06, | |
| "loss": 0.0136, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.38352059925093634, | |
| "grad_norm": 1.511250376701355, | |
| "learning_rate": 8.981810115350488e-06, | |
| "loss": 0.0223, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.3875156054931336, | |
| "grad_norm": 1.457127571105957, | |
| "learning_rate": 8.959627329192547e-06, | |
| "loss": 0.0147, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.39151061173533086, | |
| "grad_norm": 1.9975931644439697, | |
| "learning_rate": 8.937444543034605e-06, | |
| "loss": 0.0165, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.3955056179775281, | |
| "grad_norm": 1.9559694528579712, | |
| "learning_rate": 8.915261756876664e-06, | |
| "loss": 0.0166, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.3995006242197253, | |
| "grad_norm": 1.8788564205169678, | |
| "learning_rate": 8.893078970718722e-06, | |
| "loss": 0.0202, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.4034956304619226, | |
| "grad_norm": 1.40276038646698, | |
| "learning_rate": 8.87089618456078e-06, | |
| "loss": 0.0194, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.40749063670411984, | |
| "grad_norm": 1.9642316102981567, | |
| "learning_rate": 8.84871339840284e-06, | |
| "loss": 0.0152, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.4114856429463171, | |
| "grad_norm": 2.1016628742218018, | |
| "learning_rate": 8.826530612244899e-06, | |
| "loss": 0.016, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.41548064918851435, | |
| "grad_norm": 1.6866837739944458, | |
| "learning_rate": 8.804347826086957e-06, | |
| "loss": 0.0192, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.41947565543071164, | |
| "grad_norm": 2.214118480682373, | |
| "learning_rate": 8.782165039929016e-06, | |
| "loss": 0.0157, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.42347066167290887, | |
| "grad_norm": 1.396916389465332, | |
| "learning_rate": 8.759982253771074e-06, | |
| "loss": 0.0171, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.4274656679151061, | |
| "grad_norm": 2.7234904766082764, | |
| "learning_rate": 8.737799467613133e-06, | |
| "loss": 0.017, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.4314606741573034, | |
| "grad_norm": 1.3929657936096191, | |
| "learning_rate": 8.715616681455191e-06, | |
| "loss": 0.0152, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.4354556803995006, | |
| "grad_norm": 1.807311773300171, | |
| "learning_rate": 8.69343389529725e-06, | |
| "loss": 0.0149, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.4394506866416979, | |
| "grad_norm": 2.0680670738220215, | |
| "learning_rate": 8.671251109139308e-06, | |
| "loss": 0.0124, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.4434456928838951, | |
| "grad_norm": 1.3359425067901611, | |
| "learning_rate": 8.649068322981368e-06, | |
| "loss": 0.0132, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.4474406991260924, | |
| "grad_norm": 1.089568018913269, | |
| "learning_rate": 8.626885536823427e-06, | |
| "loss": 0.0127, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.45143570536828964, | |
| "grad_norm": 1.5060057640075684, | |
| "learning_rate": 8.604702750665485e-06, | |
| "loss": 0.0172, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.45543071161048687, | |
| "grad_norm": 1.3111387491226196, | |
| "learning_rate": 8.582519964507543e-06, | |
| "loss": 0.01, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.45942571785268416, | |
| "grad_norm": 1.7749875783920288, | |
| "learning_rate": 8.560337178349602e-06, | |
| "loss": 0.0176, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.4634207240948814, | |
| "grad_norm": 0.6430302262306213, | |
| "learning_rate": 8.53815439219166e-06, | |
| "loss": 0.0103, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.46741573033707867, | |
| "grad_norm": 2.322556257247925, | |
| "learning_rate": 8.515971606033719e-06, | |
| "loss": 0.0143, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.4714107365792759, | |
| "grad_norm": 1.683770775794983, | |
| "learning_rate": 8.493788819875777e-06, | |
| "loss": 0.0139, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.47540574282147313, | |
| "grad_norm": 1.8032125234603882, | |
| "learning_rate": 8.471606033717836e-06, | |
| "loss": 0.0157, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.4794007490636704, | |
| "grad_norm": 2.082831859588623, | |
| "learning_rate": 8.449423247559894e-06, | |
| "loss": 0.014, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.48339575530586765, | |
| "grad_norm": 1.434509038925171, | |
| "learning_rate": 8.427240461401952e-06, | |
| "loss": 0.0218, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.48739076154806493, | |
| "grad_norm": 1.7403631210327148, | |
| "learning_rate": 8.40505767524401e-06, | |
| "loss": 0.0142, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.49138576779026216, | |
| "grad_norm": 0.542819082736969, | |
| "learning_rate": 8.38287488908607e-06, | |
| "loss": 0.0184, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.49538077403245945, | |
| "grad_norm": 1.7680611610412598, | |
| "learning_rate": 8.360692102928128e-06, | |
| "loss": 0.0128, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.4993757802746567, | |
| "grad_norm": 1.9297124147415161, | |
| "learning_rate": 8.338509316770186e-06, | |
| "loss": 0.0145, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.503370786516854, | |
| "grad_norm": 1.825976848602295, | |
| "learning_rate": 8.316326530612246e-06, | |
| "loss": 0.0152, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.5073657927590511, | |
| "grad_norm": 2.373891592025757, | |
| "learning_rate": 8.294143744454305e-06, | |
| "loss": 0.0113, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.5113607990012484, | |
| "grad_norm": 1.0525145530700684, | |
| "learning_rate": 8.271960958296363e-06, | |
| "loss": 0.0102, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.5153558052434457, | |
| "grad_norm": 0.9661758542060852, | |
| "learning_rate": 8.249778172138421e-06, | |
| "loss": 0.0158, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.519350811485643, | |
| "grad_norm": 1.7053468227386475, | |
| "learning_rate": 8.22759538598048e-06, | |
| "loss": 0.0167, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.5233458177278402, | |
| "grad_norm": 1.5533392429351807, | |
| "learning_rate": 8.205412599822538e-06, | |
| "loss": 0.0123, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.5273408239700375, | |
| "grad_norm": 1.5681849718093872, | |
| "learning_rate": 8.183229813664597e-06, | |
| "loss": 0.015, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.5313358302122347, | |
| "grad_norm": 1.7171717882156372, | |
| "learning_rate": 8.161047027506655e-06, | |
| "loss": 0.0226, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.5353308364544319, | |
| "grad_norm": 1.3488409519195557, | |
| "learning_rate": 8.138864241348714e-06, | |
| "loss": 0.0133, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.5393258426966292, | |
| "grad_norm": 1.7044317722320557, | |
| "learning_rate": 8.116681455190772e-06, | |
| "loss": 0.0141, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.5433208489388265, | |
| "grad_norm": 1.6619277000427246, | |
| "learning_rate": 8.09449866903283e-06, | |
| "loss": 0.0191, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.5473158551810238, | |
| "grad_norm": 1.2049981355667114, | |
| "learning_rate": 8.072315882874889e-06, | |
| "loss": 0.0166, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.5513108614232209, | |
| "grad_norm": 2.1938767433166504, | |
| "learning_rate": 8.050133096716947e-06, | |
| "loss": 0.0308, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.5553058676654182, | |
| "grad_norm": 0.9096195697784424, | |
| "learning_rate": 8.027950310559007e-06, | |
| "loss": 0.0133, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.5593008739076155, | |
| "grad_norm": 0.7625906467437744, | |
| "learning_rate": 8.005767524401066e-06, | |
| "loss": 0.0132, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.5632958801498127, | |
| "grad_norm": 0.9563636183738708, | |
| "learning_rate": 7.983584738243124e-06, | |
| "loss": 0.0099, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.56729088639201, | |
| "grad_norm": 1.9605094194412231, | |
| "learning_rate": 7.961401952085183e-06, | |
| "loss": 0.0132, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.5712858926342073, | |
| "grad_norm": 1.3071681261062622, | |
| "learning_rate": 7.939219165927241e-06, | |
| "loss": 0.0141, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.5752808988764045, | |
| "grad_norm": 1.344677209854126, | |
| "learning_rate": 7.9170363797693e-06, | |
| "loss": 0.0119, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.5792759051186017, | |
| "grad_norm": 0.7557487487792969, | |
| "learning_rate": 7.894853593611358e-06, | |
| "loss": 0.012, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.583270911360799, | |
| "grad_norm": 0.9158011078834534, | |
| "learning_rate": 7.872670807453416e-06, | |
| "loss": 0.0161, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.5872659176029963, | |
| "grad_norm": 1.8613120317459106, | |
| "learning_rate": 7.850488021295475e-06, | |
| "loss": 0.0176, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.5912609238451935, | |
| "grad_norm": 1.1209336519241333, | |
| "learning_rate": 7.828305235137535e-06, | |
| "loss": 0.0079, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.5952559300873907, | |
| "grad_norm": 1.9394909143447876, | |
| "learning_rate": 7.806122448979593e-06, | |
| "loss": 0.0141, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.599250936329588, | |
| "grad_norm": 1.1817519664764404, | |
| "learning_rate": 7.783939662821652e-06, | |
| "loss": 0.0194, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.6032459425717853, | |
| "grad_norm": 1.9992880821228027, | |
| "learning_rate": 7.76175687666371e-06, | |
| "loss": 0.0181, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.6072409488139825, | |
| "grad_norm": 1.5693268775939941, | |
| "learning_rate": 7.739574090505769e-06, | |
| "loss": 0.015, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.6112359550561798, | |
| "grad_norm": 0.8529360294342041, | |
| "learning_rate": 7.717391304347827e-06, | |
| "loss": 0.0195, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.6152309612983771, | |
| "grad_norm": 2.413498878479004, | |
| "learning_rate": 7.695208518189886e-06, | |
| "loss": 0.012, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.6192259675405742, | |
| "grad_norm": 0.7738659977912903, | |
| "learning_rate": 7.673025732031944e-06, | |
| "loss": 0.0113, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.6232209737827715, | |
| "grad_norm": 2.782437562942505, | |
| "learning_rate": 7.650842945874002e-06, | |
| "loss": 0.0128, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.6272159800249688, | |
| "grad_norm": 1.7309448719024658, | |
| "learning_rate": 7.628660159716062e-06, | |
| "loss": 0.0108, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.6312109862671661, | |
| "grad_norm": 1.4984791278839111, | |
| "learning_rate": 7.60647737355812e-06, | |
| "loss": 0.0201, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.6352059925093633, | |
| "grad_norm": 1.505699872970581, | |
| "learning_rate": 7.5842945874001785e-06, | |
| "loss": 0.0109, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.6392009987515606, | |
| "grad_norm": 2.4657981395721436, | |
| "learning_rate": 7.562111801242237e-06, | |
| "loss": 0.0154, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.6431960049937578, | |
| "grad_norm": 1.2779576778411865, | |
| "learning_rate": 7.539929015084295e-06, | |
| "loss": 0.0167, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.647191011235955, | |
| "grad_norm": 1.947884440422058, | |
| "learning_rate": 7.517746228926354e-06, | |
| "loss": 0.0115, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.6511860174781523, | |
| "grad_norm": 1.5985134840011597, | |
| "learning_rate": 7.495563442768412e-06, | |
| "loss": 0.014, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.6551810237203496, | |
| "grad_norm": 1.1401286125183105, | |
| "learning_rate": 7.473380656610471e-06, | |
| "loss": 0.0118, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.6591760299625468, | |
| "grad_norm": 0.7950101494789124, | |
| "learning_rate": 7.451197870452529e-06, | |
| "loss": 0.0144, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.663171036204744, | |
| "grad_norm": 0.976190984249115, | |
| "learning_rate": 7.429015084294588e-06, | |
| "loss": 0.0113, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.6671660424469413, | |
| "grad_norm": 1.813292145729065, | |
| "learning_rate": 7.406832298136647e-06, | |
| "loss": 0.0172, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.6711610486891386, | |
| "grad_norm": 1.2922966480255127, | |
| "learning_rate": 7.384649511978705e-06, | |
| "loss": 0.014, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.6751560549313358, | |
| "grad_norm": 1.797882318496704, | |
| "learning_rate": 7.362466725820764e-06, | |
| "loss": 0.0159, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.6791510611735331, | |
| "grad_norm": 1.7543209791183472, | |
| "learning_rate": 7.340283939662822e-06, | |
| "loss": 0.0142, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.6831460674157304, | |
| "grad_norm": 1.8360003232955933, | |
| "learning_rate": 7.3181011535048805e-06, | |
| "loss": 0.0165, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.6871410736579275, | |
| "grad_norm": 1.775307059288025, | |
| "learning_rate": 7.295918367346939e-06, | |
| "loss": 0.0151, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.6911360799001248, | |
| "grad_norm": 1.120833158493042, | |
| "learning_rate": 7.273735581188997e-06, | |
| "loss": 0.0105, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.6951310861423221, | |
| "grad_norm": 2.0523838996887207, | |
| "learning_rate": 7.251552795031056e-06, | |
| "loss": 0.0127, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.6991260923845194, | |
| "grad_norm": 0.9151129126548767, | |
| "learning_rate": 7.229370008873116e-06, | |
| "loss": 0.0132, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.7031210986267166, | |
| "grad_norm": 1.4018059968948364, | |
| "learning_rate": 7.207187222715174e-06, | |
| "loss": 0.0123, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.7071161048689139, | |
| "grad_norm": 0.31861627101898193, | |
| "learning_rate": 7.185004436557233e-06, | |
| "loss": 0.0125, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.7111111111111111, | |
| "grad_norm": 0.8622251152992249, | |
| "learning_rate": 7.162821650399291e-06, | |
| "loss": 0.0134, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.7151061173533083, | |
| "grad_norm": 2.0537431240081787, | |
| "learning_rate": 7.1406388642413496e-06, | |
| "loss": 0.0121, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.7191011235955056, | |
| "grad_norm": 1.9600417613983154, | |
| "learning_rate": 7.118456078083408e-06, | |
| "loss": 0.0117, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.7230961298377029, | |
| "grad_norm": 0.7996333241462708, | |
| "learning_rate": 7.0962732919254664e-06, | |
| "loss": 0.0111, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.7270911360799002, | |
| "grad_norm": 1.1933099031448364, | |
| "learning_rate": 7.074090505767525e-06, | |
| "loss": 0.0153, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.7310861423220973, | |
| "grad_norm": 3.3150546550750732, | |
| "learning_rate": 7.051907719609583e-06, | |
| "loss": 0.0147, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.7350811485642946, | |
| "grad_norm": 1.1746463775634766, | |
| "learning_rate": 7.0297249334516426e-06, | |
| "loss": 0.0148, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.7390761548064919, | |
| "grad_norm": 0.9147731065750122, | |
| "learning_rate": 7.007542147293701e-06, | |
| "loss": 0.0141, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.7430711610486891, | |
| "grad_norm": 1.264098882675171, | |
| "learning_rate": 6.985359361135759e-06, | |
| "loss": 0.0122, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.7470661672908864, | |
| "grad_norm": 1.3051644563674927, | |
| "learning_rate": 6.963176574977818e-06, | |
| "loss": 0.0169, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.7510611735330837, | |
| "grad_norm": 1.8435847759246826, | |
| "learning_rate": 6.940993788819876e-06, | |
| "loss": 0.0149, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.755056179775281, | |
| "grad_norm": 1.2256033420562744, | |
| "learning_rate": 6.918811002661935e-06, | |
| "loss": 0.0118, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.7590511860174781, | |
| "grad_norm": 1.3870049715042114, | |
| "learning_rate": 6.896628216503993e-06, | |
| "loss": 0.0077, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.7630461922596754, | |
| "grad_norm": 1.935509443283081, | |
| "learning_rate": 6.8744454303460515e-06, | |
| "loss": 0.0114, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.7670411985018727, | |
| "grad_norm": 2.061938524246216, | |
| "learning_rate": 6.85226264418811e-06, | |
| "loss": 0.0124, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.7710362047440699, | |
| "grad_norm": 2.2215144634246826, | |
| "learning_rate": 6.830079858030168e-06, | |
| "loss": 0.0142, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.7750312109862671, | |
| "grad_norm": 2.1934878826141357, | |
| "learning_rate": 6.8078970718722285e-06, | |
| "loss": 0.0136, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.7790262172284644, | |
| "grad_norm": 2.866685152053833, | |
| "learning_rate": 6.785714285714287e-06, | |
| "loss": 0.0172, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.7830212234706617, | |
| "grad_norm": 0.7049480676651001, | |
| "learning_rate": 6.763531499556345e-06, | |
| "loss": 0.0083, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.7870162297128589, | |
| "grad_norm": 0.8357182145118713, | |
| "learning_rate": 6.741348713398404e-06, | |
| "loss": 0.0099, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.7910112359550562, | |
| "grad_norm": 2.4045839309692383, | |
| "learning_rate": 6.719165927240462e-06, | |
| "loss": 0.0097, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.7950062421972535, | |
| "grad_norm": 1.0173354148864746, | |
| "learning_rate": 6.696983141082521e-06, | |
| "loss": 0.0145, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.7990012484394506, | |
| "grad_norm": 1.5352550745010376, | |
| "learning_rate": 6.674800354924579e-06, | |
| "loss": 0.0111, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.7990012484394506, | |
| "eval_loss": 0.008306492120027542, | |
| "eval_runtime": 14435.9601, | |
| "eval_samples_per_second": 1.387, | |
| "eval_steps_per_second": 0.173, | |
| "eval_wer": 0.854835983797369, | |
| "step": 2000 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 5008, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 2, | |
| "save_steps": 2000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 6.531871408128e+19, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |