| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 2.9230769230769234, |
| "eval_steps": 500, |
| "global_step": 90, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.03296703296703297, |
| "grad_norm": 31.073850631713867, |
| "learning_rate": 5.0000000000000004e-08, |
| "loss": 2.5967, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.06593406593406594, |
| "grad_norm": 32.91181182861328, |
| "learning_rate": 1.0000000000000001e-07, |
| "loss": 2.723, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.0989010989010989, |
| "grad_norm": 31.494897842407227, |
| "learning_rate": 1.5000000000000002e-07, |
| "loss": 2.6125, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.13186813186813187, |
| "grad_norm": 30.80953598022461, |
| "learning_rate": 2.0000000000000002e-07, |
| "loss": 2.58, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.16483516483516483, |
| "grad_norm": 31.269071578979492, |
| "learning_rate": 2.5000000000000004e-07, |
| "loss": 2.5977, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.1978021978021978, |
| "grad_norm": 30.687875747680664, |
| "learning_rate": 3.0000000000000004e-07, |
| "loss": 2.5588, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.23076923076923078, |
| "grad_norm": 31.30279541015625, |
| "learning_rate": 3.5000000000000004e-07, |
| "loss": 2.5731, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.26373626373626374, |
| "grad_norm": 31.384830474853516, |
| "learning_rate": 4.0000000000000003e-07, |
| "loss": 2.561, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.2967032967032967, |
| "grad_norm": 30.58422088623047, |
| "learning_rate": 4.5000000000000003e-07, |
| "loss": 2.4872, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.32967032967032966, |
| "grad_norm": 30.883068084716797, |
| "learning_rate": 5.000000000000001e-07, |
| "loss": 2.5257, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.3626373626373626, |
| "grad_norm": 32.198814392089844, |
| "learning_rate": 5.5e-07, |
| "loss": 2.6286, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.3956043956043956, |
| "grad_norm": 31.001300811767578, |
| "learning_rate": 6.000000000000001e-07, |
| "loss": 2.4632, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.42857142857142855, |
| "grad_norm": 31.106016159057617, |
| "learning_rate": 6.5e-07, |
| "loss": 2.4274, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.46153846153846156, |
| "grad_norm": 31.180011749267578, |
| "learning_rate": 7.000000000000001e-07, |
| "loss": 2.3864, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.4945054945054945, |
| "grad_norm": 30.95736312866211, |
| "learning_rate": 7.5e-07, |
| "loss": 2.2977, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.5274725274725275, |
| "grad_norm": 31.67963218688965, |
| "learning_rate": 8.000000000000001e-07, |
| "loss": 2.2632, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.5604395604395604, |
| "grad_norm": 32.420562744140625, |
| "learning_rate": 8.500000000000001e-07, |
| "loss": 2.1983, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.5934065934065934, |
| "grad_norm": 32.46091079711914, |
| "learning_rate": 9.000000000000001e-07, |
| "loss": 2.0623, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.6263736263736264, |
| "grad_norm": 31.34447479248047, |
| "learning_rate": 9.500000000000001e-07, |
| "loss": 1.8633, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.6593406593406593, |
| "grad_norm": 31.65386962890625, |
| "learning_rate": 1.0000000000000002e-06, |
| "loss": 1.7861, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.6923076923076923, |
| "grad_norm": 30.12110137939453, |
| "learning_rate": 1.0500000000000001e-06, |
| "loss": 1.6408, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.7252747252747253, |
| "grad_norm": 28.824857711791992, |
| "learning_rate": 1.1e-06, |
| "loss": 1.5022, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.7582417582417582, |
| "grad_norm": 27.37271499633789, |
| "learning_rate": 1.1500000000000002e-06, |
| "loss": 1.3799, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.7912087912087912, |
| "grad_norm": 26.869949340820312, |
| "learning_rate": 1.2000000000000002e-06, |
| "loss": 1.2699, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.8241758241758241, |
| "grad_norm": 26.696306228637695, |
| "learning_rate": 1.25e-06, |
| "loss": 1.107, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.8571428571428571, |
| "grad_norm": 29.182300567626953, |
| "learning_rate": 1.3e-06, |
| "loss": 0.9981, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.8901098901098901, |
| "grad_norm": 28.117998123168945, |
| "learning_rate": 1.3500000000000002e-06, |
| "loss": 0.7886, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.9230769230769231, |
| "grad_norm": 27.153093338012695, |
| "learning_rate": 1.4000000000000001e-06, |
| "loss": 0.6565, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.9560439560439561, |
| "grad_norm": 24.350711822509766, |
| "learning_rate": 1.45e-06, |
| "loss": 0.5143, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.989010989010989, |
| "grad_norm": 21.189594268798828, |
| "learning_rate": 1.5e-06, |
| "loss": 0.3953, |
| "step": 30 |
| }, |
| { |
| "epoch": 1.0, |
| "grad_norm": 21.189594268798828, |
| "learning_rate": 1.5500000000000002e-06, |
| "loss": 0.2703, |
| "step": 31 |
| }, |
| { |
| "epoch": 1.032967032967033, |
| "grad_norm": 25.850399017333984, |
| "learning_rate": 1.6000000000000001e-06, |
| "loss": 0.2846, |
| "step": 32 |
| }, |
| { |
| "epoch": 1.065934065934066, |
| "grad_norm": 7.641750335693359, |
| "learning_rate": 1.6500000000000003e-06, |
| "loss": 0.1964, |
| "step": 33 |
| }, |
| { |
| "epoch": 1.098901098901099, |
| "grad_norm": 5.204587459564209, |
| "learning_rate": 1.7000000000000002e-06, |
| "loss": 0.195, |
| "step": 34 |
| }, |
| { |
| "epoch": 1.1318681318681318, |
| "grad_norm": 5.610931396484375, |
| "learning_rate": 1.75e-06, |
| "loss": 0.2097, |
| "step": 35 |
| }, |
| { |
| "epoch": 1.164835164835165, |
| "grad_norm": 3.679949998855591, |
| "learning_rate": 1.8000000000000001e-06, |
| "loss": 0.1428, |
| "step": 36 |
| }, |
| { |
| "epoch": 1.1978021978021978, |
| "grad_norm": 2.877136707305908, |
| "learning_rate": 1.85e-06, |
| "loss": 0.1506, |
| "step": 37 |
| }, |
| { |
| "epoch": 1.2307692307692308, |
| "grad_norm": 1.788109302520752, |
| "learning_rate": 1.9000000000000002e-06, |
| "loss": 0.1384, |
| "step": 38 |
| }, |
| { |
| "epoch": 1.2637362637362637, |
| "grad_norm": 1.154598355293274, |
| "learning_rate": 1.9500000000000004e-06, |
| "loss": 0.1255, |
| "step": 39 |
| }, |
| { |
| "epoch": 1.2967032967032968, |
| "grad_norm": 1.1099777221679688, |
| "learning_rate": 2.0000000000000003e-06, |
| "loss": 0.1291, |
| "step": 40 |
| }, |
| { |
| "epoch": 1.3296703296703296, |
| "grad_norm": 1.022336483001709, |
| "learning_rate": 2.05e-06, |
| "loss": 0.1149, |
| "step": 41 |
| }, |
| { |
| "epoch": 1.3626373626373627, |
| "grad_norm": 0.8197290897369385, |
| "learning_rate": 2.1000000000000002e-06, |
| "loss": 0.1175, |
| "step": 42 |
| }, |
| { |
| "epoch": 1.3956043956043955, |
| "grad_norm": 1.283385992050171, |
| "learning_rate": 2.15e-06, |
| "loss": 0.1137, |
| "step": 43 |
| }, |
| { |
| "epoch": 1.4285714285714286, |
| "grad_norm": 0.9875780344009399, |
| "learning_rate": 2.2e-06, |
| "loss": 0.117, |
| "step": 44 |
| }, |
| { |
| "epoch": 1.4615384615384617, |
| "grad_norm": 0.5812683701515198, |
| "learning_rate": 2.25e-06, |
| "loss": 0.1089, |
| "step": 45 |
| }, |
| { |
| "epoch": 1.4945054945054945, |
| "grad_norm": 0.8154236674308777, |
| "learning_rate": 2.3000000000000004e-06, |
| "loss": 0.1102, |
| "step": 46 |
| }, |
| { |
| "epoch": 1.5274725274725274, |
| "grad_norm": 0.6170194149017334, |
| "learning_rate": 2.35e-06, |
| "loss": 0.1108, |
| "step": 47 |
| }, |
| { |
| "epoch": 1.5604395604395604, |
| "grad_norm": 0.8121249675750732, |
| "learning_rate": 2.4000000000000003e-06, |
| "loss": 0.1077, |
| "step": 48 |
| }, |
| { |
| "epoch": 1.5934065934065935, |
| "grad_norm": 0.7454224824905396, |
| "learning_rate": 2.4500000000000003e-06, |
| "loss": 0.1089, |
| "step": 49 |
| }, |
| { |
| "epoch": 1.6263736263736264, |
| "grad_norm": 1.021628499031067, |
| "learning_rate": 2.5e-06, |
| "loss": 0.1075, |
| "step": 50 |
| }, |
| { |
| "epoch": 1.6593406593406592, |
| "grad_norm": 0.8242588639259338, |
| "learning_rate": 2.55e-06, |
| "loss": 0.1056, |
| "step": 51 |
| }, |
| { |
| "epoch": 1.6923076923076923, |
| "grad_norm": 0.7174047827720642, |
| "learning_rate": 2.6e-06, |
| "loss": 0.1062, |
| "step": 52 |
| }, |
| { |
| "epoch": 1.7252747252747254, |
| "grad_norm": 0.676979660987854, |
| "learning_rate": 2.6500000000000005e-06, |
| "loss": 0.1062, |
| "step": 53 |
| }, |
| { |
| "epoch": 1.7582417582417582, |
| "grad_norm": 0.44768354296684265, |
| "learning_rate": 2.7000000000000004e-06, |
| "loss": 0.1073, |
| "step": 54 |
| }, |
| { |
| "epoch": 1.791208791208791, |
| "grad_norm": 1.0064852237701416, |
| "learning_rate": 2.7500000000000004e-06, |
| "loss": 0.1033, |
| "step": 55 |
| }, |
| { |
| "epoch": 1.8241758241758241, |
| "grad_norm": 1.3364107608795166, |
| "learning_rate": 2.8000000000000003e-06, |
| "loss": 0.1041, |
| "step": 56 |
| }, |
| { |
| "epoch": 1.8571428571428572, |
| "grad_norm": 1.187800645828247, |
| "learning_rate": 2.85e-06, |
| "loss": 0.1054, |
| "step": 57 |
| }, |
| { |
| "epoch": 1.89010989010989, |
| "grad_norm": 1.457709550857544, |
| "learning_rate": 2.9e-06, |
| "loss": 0.1163, |
| "step": 58 |
| }, |
| { |
| "epoch": 1.9230769230769231, |
| "grad_norm": 1.1929093599319458, |
| "learning_rate": 2.95e-06, |
| "loss": 0.1049, |
| "step": 59 |
| }, |
| { |
| "epoch": 1.9560439560439562, |
| "grad_norm": 0.6893891096115112, |
| "learning_rate": 3e-06, |
| "loss": 0.1023, |
| "step": 60 |
| }, |
| { |
| "epoch": 1.989010989010989, |
| "grad_norm": 0.7837490439414978, |
| "learning_rate": 3.05e-06, |
| "loss": 0.1014, |
| "step": 61 |
| }, |
| { |
| "epoch": 2.0, |
| "grad_norm": 0.7837490439414978, |
| "learning_rate": 3.1000000000000004e-06, |
| "loss": 0.1324, |
| "step": 62 |
| }, |
| { |
| "epoch": 2.032967032967033, |
| "grad_norm": 2.488649845123291, |
| "learning_rate": 3.1500000000000003e-06, |
| "loss": 0.1017, |
| "step": 63 |
| }, |
| { |
| "epoch": 2.065934065934066, |
| "grad_norm": 1.104415774345398, |
| "learning_rate": 3.2000000000000003e-06, |
| "loss": 0.1019, |
| "step": 64 |
| }, |
| { |
| "epoch": 2.098901098901099, |
| "grad_norm": 0.9327191710472107, |
| "learning_rate": 3.2500000000000002e-06, |
| "loss": 0.0996, |
| "step": 65 |
| }, |
| { |
| "epoch": 2.131868131868132, |
| "grad_norm": 1.17020583152771, |
| "learning_rate": 3.3000000000000006e-06, |
| "loss": 0.0979, |
| "step": 66 |
| }, |
| { |
| "epoch": 2.1648351648351647, |
| "grad_norm": 0.6745622754096985, |
| "learning_rate": 3.3500000000000005e-06, |
| "loss": 0.0924, |
| "step": 67 |
| }, |
| { |
| "epoch": 2.197802197802198, |
| "grad_norm": 0.8487798571586609, |
| "learning_rate": 3.4000000000000005e-06, |
| "loss": 0.0993, |
| "step": 68 |
| }, |
| { |
| "epoch": 2.230769230769231, |
| "grad_norm": 1.7320159673690796, |
| "learning_rate": 3.45e-06, |
| "loss": 0.0929, |
| "step": 69 |
| }, |
| { |
| "epoch": 2.2637362637362637, |
| "grad_norm": 1.8361762762069702, |
| "learning_rate": 3.5e-06, |
| "loss": 0.0931, |
| "step": 70 |
| }, |
| { |
| "epoch": 2.2967032967032965, |
| "grad_norm": 0.8804886341094971, |
| "learning_rate": 3.5500000000000003e-06, |
| "loss": 0.0905, |
| "step": 71 |
| }, |
| { |
| "epoch": 2.32967032967033, |
| "grad_norm": 1.29473876953125, |
| "learning_rate": 3.6000000000000003e-06, |
| "loss": 0.0959, |
| "step": 72 |
| }, |
| { |
| "epoch": 2.3626373626373627, |
| "grad_norm": 1.2560906410217285, |
| "learning_rate": 3.65e-06, |
| "loss": 0.0954, |
| "step": 73 |
| }, |
| { |
| "epoch": 2.3956043956043955, |
| "grad_norm": 1.2681041955947876, |
| "learning_rate": 3.7e-06, |
| "loss": 0.0935, |
| "step": 74 |
| }, |
| { |
| "epoch": 2.4285714285714284, |
| "grad_norm": 1.6790293455123901, |
| "learning_rate": 3.7500000000000005e-06, |
| "loss": 0.0958, |
| "step": 75 |
| }, |
| { |
| "epoch": 2.4615384615384617, |
| "grad_norm": 1.250998854637146, |
| "learning_rate": 3.8000000000000005e-06, |
| "loss": 0.0916, |
| "step": 76 |
| }, |
| { |
| "epoch": 2.4945054945054945, |
| "grad_norm": 1.1952019929885864, |
| "learning_rate": 3.85e-06, |
| "loss": 0.094, |
| "step": 77 |
| }, |
| { |
| "epoch": 2.5274725274725274, |
| "grad_norm": 1.381598711013794, |
| "learning_rate": 3.900000000000001e-06, |
| "loss": 0.0892, |
| "step": 78 |
| }, |
| { |
| "epoch": 2.5604395604395602, |
| "grad_norm": 0.9761469960212708, |
| "learning_rate": 3.95e-06, |
| "loss": 0.091, |
| "step": 79 |
| }, |
| { |
| "epoch": 2.5934065934065935, |
| "grad_norm": 1.5026003122329712, |
| "learning_rate": 4.000000000000001e-06, |
| "loss": 0.0917, |
| "step": 80 |
| }, |
| { |
| "epoch": 2.6263736263736264, |
| "grad_norm": 3.112177848815918, |
| "learning_rate": 4.05e-06, |
| "loss": 0.0891, |
| "step": 81 |
| }, |
| { |
| "epoch": 2.659340659340659, |
| "grad_norm": 1.4188305139541626, |
| "learning_rate": 4.1e-06, |
| "loss": 0.0856, |
| "step": 82 |
| }, |
| { |
| "epoch": 2.6923076923076925, |
| "grad_norm": 1.1930654048919678, |
| "learning_rate": 4.15e-06, |
| "loss": 0.0796, |
| "step": 83 |
| }, |
| { |
| "epoch": 2.7252747252747254, |
| "grad_norm": 1.2071819305419922, |
| "learning_rate": 4.2000000000000004e-06, |
| "loss": 0.0849, |
| "step": 84 |
| }, |
| { |
| "epoch": 2.758241758241758, |
| "grad_norm": 1.8237779140472412, |
| "learning_rate": 4.25e-06, |
| "loss": 0.0809, |
| "step": 85 |
| }, |
| { |
| "epoch": 2.791208791208791, |
| "grad_norm": 1.8066545724868774, |
| "learning_rate": 4.3e-06, |
| "loss": 0.0822, |
| "step": 86 |
| }, |
| { |
| "epoch": 2.824175824175824, |
| "grad_norm": 1.541412591934204, |
| "learning_rate": 4.350000000000001e-06, |
| "loss": 0.0737, |
| "step": 87 |
| }, |
| { |
| "epoch": 2.857142857142857, |
| "grad_norm": 1.6204791069030762, |
| "learning_rate": 4.4e-06, |
| "loss": 0.0701, |
| "step": 88 |
| }, |
| { |
| "epoch": 2.89010989010989, |
| "grad_norm": 1.6418696641921997, |
| "learning_rate": 4.450000000000001e-06, |
| "loss": 0.0693, |
| "step": 89 |
| }, |
| { |
| "epoch": 2.9230769230769234, |
| "grad_norm": 1.4522324800491333, |
| "learning_rate": 4.5e-06, |
| "loss": 0.0701, |
| "step": 90 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 180, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 6, |
| "save_steps": 30, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": false |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 2.231332415471616e+17, |
| "train_batch_size": 4, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|