| { | |
| "best_global_step": 300, | |
| "best_metric": 0.5966701953613704, | |
| "best_model_checkpoint": "./SALAMA_NEW88/checkpoint-300", | |
| "epoch": 1.9559543230016314, | |
| "eval_steps": 300, | |
| "global_step": 600, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.03262642740619902, | |
| "grad_norm": 2.3551647663116455, | |
| "learning_rate": 1.8e-07, | |
| "loss": 0.0153, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.06525285481239804, | |
| "grad_norm": 1.2086862325668335, | |
| "learning_rate": 3.8e-07, | |
| "loss": 0.012, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.09787928221859707, | |
| "grad_norm": 2.0345568656921387, | |
| "learning_rate": 5.800000000000001e-07, | |
| "loss": 0.0145, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.13050570962479607, | |
| "grad_norm": 1.01999032497406, | |
| "learning_rate": 7.8e-07, | |
| "loss": 0.0091, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.1631321370309951, | |
| "grad_norm": 0.9040848612785339, | |
| "learning_rate": 9.800000000000001e-07, | |
| "loss": 0.0112, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.19575856443719414, | |
| "grad_norm": 1.4125545024871826, | |
| "learning_rate": 1.1800000000000001e-06, | |
| "loss": 0.0064, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.22838499184339314, | |
| "grad_norm": 0.22181180119514465, | |
| "learning_rate": 1.3800000000000001e-06, | |
| "loss": 0.0089, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.26101141924959215, | |
| "grad_norm": 0.6269964575767517, | |
| "learning_rate": 1.5800000000000001e-06, | |
| "loss": 0.0069, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.2936378466557912, | |
| "grad_norm": 0.1682853251695633, | |
| "learning_rate": 1.7800000000000001e-06, | |
| "loss": 0.0077, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.3262642740619902, | |
| "grad_norm": 0.14965638518333435, | |
| "learning_rate": 1.98e-06, | |
| "loss": 0.0026, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.35889070146818924, | |
| "grad_norm": 0.34541550278663635, | |
| "learning_rate": 2.1800000000000003e-06, | |
| "loss": 0.0061, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.3915171288743883, | |
| "grad_norm": 0.45224177837371826, | |
| "learning_rate": 2.38e-06, | |
| "loss": 0.0034, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.42414355628058725, | |
| "grad_norm": 0.19250553846359253, | |
| "learning_rate": 2.5800000000000003e-06, | |
| "loss": 0.0038, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.4567699836867863, | |
| "grad_norm": 1.2514749765396118, | |
| "learning_rate": 2.7800000000000005e-06, | |
| "loss": 0.0058, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.4893964110929853, | |
| "grad_norm": 0.26269763708114624, | |
| "learning_rate": 2.9800000000000003e-06, | |
| "loss": 0.0046, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.5220228384991843, | |
| "grad_norm": 0.5023095607757568, | |
| "learning_rate": 3.1800000000000005e-06, | |
| "loss": 0.008, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.5546492659053833, | |
| "grad_norm": 0.5606697797775269, | |
| "learning_rate": 3.3800000000000007e-06, | |
| "loss": 0.0054, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.5872756933115824, | |
| "grad_norm": 0.11588776856660843, | |
| "learning_rate": 3.58e-06, | |
| "loss": 0.0039, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.6199021207177814, | |
| "grad_norm": 0.4340250492095947, | |
| "learning_rate": 3.7800000000000002e-06, | |
| "loss": 0.0053, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.6525285481239804, | |
| "grad_norm": 0.1779269576072693, | |
| "learning_rate": 3.980000000000001e-06, | |
| "loss": 0.0046, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.6851549755301795, | |
| "grad_norm": 0.42044180631637573, | |
| "learning_rate": 4.18e-06, | |
| "loss": 0.0076, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.7177814029363785, | |
| "grad_norm": 0.2029053121805191, | |
| "learning_rate": 4.38e-06, | |
| "loss": 0.005, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.7504078303425775, | |
| "grad_norm": 0.12045332789421082, | |
| "learning_rate": 4.58e-06, | |
| "loss": 0.0087, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.7830342577487766, | |
| "grad_norm": 0.11535464227199554, | |
| "learning_rate": 4.78e-06, | |
| "loss": 0.0036, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.8156606851549756, | |
| "grad_norm": 0.21480712294578552, | |
| "learning_rate": 4.980000000000001e-06, | |
| "loss": 0.006, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.8482871125611745, | |
| "grad_norm": 0.23418085277080536, | |
| "learning_rate": 5.18e-06, | |
| "loss": 0.0111, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.8809135399673735, | |
| "grad_norm": 0.8250060081481934, | |
| "learning_rate": 5.380000000000001e-06, | |
| "loss": 0.005, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.9135399673735726, | |
| "grad_norm": 0.6729530692100525, | |
| "learning_rate": 5.580000000000001e-06, | |
| "loss": 0.0042, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.9461663947797716, | |
| "grad_norm": 2.4627511501312256, | |
| "learning_rate": 5.78e-06, | |
| "loss": 0.0147, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.9787928221859706, | |
| "grad_norm": 2.2341601848602295, | |
| "learning_rate": 5.98e-06, | |
| "loss": 0.0153, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.9787928221859706, | |
| "eval_loss": 0.0076320902444422245, | |
| "eval_runtime": 875.0468, | |
| "eval_samples_per_second": 2.801, | |
| "eval_steps_per_second": 0.351, | |
| "eval_wer": 0.5966701953613704, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.0097879282218598, | |
| "grad_norm": 0.2666170001029968, | |
| "learning_rate": 6.18e-06, | |
| "loss": 0.0106, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.0424143556280587, | |
| "grad_norm": 0.3426216244697571, | |
| "learning_rate": 6.380000000000001e-06, | |
| "loss": 0.0051, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 1.0750407830342577, | |
| "grad_norm": 0.26094576716423035, | |
| "learning_rate": 6.5800000000000005e-06, | |
| "loss": 0.0053, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.1076672104404568, | |
| "grad_norm": 0.83148592710495, | |
| "learning_rate": 6.780000000000001e-06, | |
| "loss": 0.007, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.1402936378466557, | |
| "grad_norm": 1.1708325147628784, | |
| "learning_rate": 6.98e-06, | |
| "loss": 0.0056, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.1729200652528549, | |
| "grad_norm": 0.7598049640655518, | |
| "learning_rate": 7.180000000000001e-06, | |
| "loss": 0.0056, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.2055464926590538, | |
| "grad_norm": 0.29737839102745056, | |
| "learning_rate": 7.3800000000000005e-06, | |
| "loss": 0.0048, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 1.238172920065253, | |
| "grad_norm": 0.2751404643058777, | |
| "learning_rate": 7.58e-06, | |
| "loss": 0.0075, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.2707993474714518, | |
| "grad_norm": 0.89850252866745, | |
| "learning_rate": 7.78e-06, | |
| "loss": 0.0054, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.3034257748776508, | |
| "grad_norm": 2.9854891300201416, | |
| "learning_rate": 7.980000000000002e-06, | |
| "loss": 0.0049, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.33605220228385, | |
| "grad_norm": 0.9358381628990173, | |
| "learning_rate": 8.18e-06, | |
| "loss": 0.0109, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.368678629690049, | |
| "grad_norm": 2.8082876205444336, | |
| "learning_rate": 8.380000000000001e-06, | |
| "loss": 0.0082, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.401305057096248, | |
| "grad_norm": 1.3110146522521973, | |
| "learning_rate": 8.580000000000001e-06, | |
| "loss": 0.0073, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.433931484502447, | |
| "grad_norm": 1.0960301160812378, | |
| "learning_rate": 8.78e-06, | |
| "loss": 0.0141, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.466557911908646, | |
| "grad_norm": 2.337661027908325, | |
| "learning_rate": 8.98e-06, | |
| "loss": 0.0076, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.499184339314845, | |
| "grad_norm": 1.3074886798858643, | |
| "learning_rate": 9.180000000000002e-06, | |
| "loss": 0.0094, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.531810766721044, | |
| "grad_norm": 2.7374138832092285, | |
| "learning_rate": 9.38e-06, | |
| "loss": 0.012, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.564437194127243, | |
| "grad_norm": 1.2662121057510376, | |
| "learning_rate": 9.58e-06, | |
| "loss": 0.0101, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.597063621533442, | |
| "grad_norm": 1.4484268426895142, | |
| "learning_rate": 9.780000000000001e-06, | |
| "loss": 0.0096, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.629690048939641, | |
| "grad_norm": 1.6117146015167236, | |
| "learning_rate": 9.980000000000001e-06, | |
| "loss": 0.0103, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.6623164763458402, | |
| "grad_norm": 1.0795536041259766, | |
| "learning_rate": 9.78622327790974e-06, | |
| "loss": 0.0103, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 1.6949429037520392, | |
| "grad_norm": 2.0443150997161865, | |
| "learning_rate": 9.548693586698338e-06, | |
| "loss": 0.0081, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 1.727569331158238, | |
| "grad_norm": 1.6950815916061401, | |
| "learning_rate": 9.311163895486937e-06, | |
| "loss": 0.0099, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 1.7601957585644372, | |
| "grad_norm": 1.0695880651474, | |
| "learning_rate": 9.073634204275536e-06, | |
| "loss": 0.01, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 1.7928221859706364, | |
| "grad_norm": 0.755652666091919, | |
| "learning_rate": 8.836104513064134e-06, | |
| "loss": 0.0098, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 1.8254486133768353, | |
| "grad_norm": 1.6938477754592896, | |
| "learning_rate": 8.598574821852733e-06, | |
| "loss": 0.0122, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 1.8580750407830342, | |
| "grad_norm": 1.686536192893982, | |
| "learning_rate": 8.36104513064133e-06, | |
| "loss": 0.0122, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 1.8907014681892331, | |
| "grad_norm": 0.8220322728157043, | |
| "learning_rate": 8.12351543942993e-06, | |
| "loss": 0.009, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 1.9233278955954323, | |
| "grad_norm": 0.788307785987854, | |
| "learning_rate": 7.885985748218527e-06, | |
| "loss": 0.0093, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 1.9559543230016314, | |
| "grad_norm": 1.414797306060791, | |
| "learning_rate": 7.648456057007126e-06, | |
| "loss": 0.0079, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 1.9559543230016314, | |
| "eval_loss": 0.009851695038378239, | |
| "eval_runtime": 884.3358, | |
| "eval_samples_per_second": 2.772, | |
| "eval_steps_per_second": 0.347, | |
| "eval_wer": 0.986430564911943, | |
| "step": 600 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 921, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 300, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 5.53449080733696e+18, | |
| "train_batch_size": 16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |