| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 6.72, |
| "eval_steps": 500, |
| "global_step": 70, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.096, |
| "grad_norm": 8.423216225170513, |
| "learning_rate": 2.8571428571428573e-06, |
| "loss": 1.2598, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.192, |
| "grad_norm": 8.33678434566309, |
| "learning_rate": 5.7142857142857145e-06, |
| "loss": 1.2284, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.288, |
| "grad_norm": 7.532278886182312, |
| "learning_rate": 8.571428571428571e-06, |
| "loss": 1.1884, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.384, |
| "grad_norm": 3.706996231992246, |
| "learning_rate": 1.1428571428571429e-05, |
| "loss": 1.0817, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 4.591879137093918, |
| "learning_rate": 1.4285714285714287e-05, |
| "loss": 1.0358, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.576, |
| "grad_norm": 4.72748644477042, |
| "learning_rate": 1.7142857142857142e-05, |
| "loss": 1.0017, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.672, |
| "grad_norm": 5.475220172528173, |
| "learning_rate": 2e-05, |
| "loss": 1.0114, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.768, |
| "grad_norm": 4.558828887797451, |
| "learning_rate": 1.9987569212189224e-05, |
| "loss": 1.0131, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.864, |
| "grad_norm": 4.05497430045768, |
| "learning_rate": 1.9950307753654016e-05, |
| "loss": 0.9482, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.96, |
| "grad_norm": 3.17791442177505, |
| "learning_rate": 1.9888308262251286e-05, |
| "loss": 0.9109, |
| "step": 10 |
| }, |
| { |
| "epoch": 1.056, |
| "grad_norm": 2.7142917050834545, |
| "learning_rate": 1.9801724878485438e-05, |
| "loss": 0.8737, |
| "step": 11 |
| }, |
| { |
| "epoch": 1.152, |
| "grad_norm": 2.16379506722022, |
| "learning_rate": 1.969077286229078e-05, |
| "loss": 0.8412, |
| "step": 12 |
| }, |
| { |
| "epoch": 1.248, |
| "grad_norm": 2.205232338366624, |
| "learning_rate": 1.955572805786141e-05, |
| "loss": 0.8201, |
| "step": 13 |
| }, |
| { |
| "epoch": 1.3439999999999999, |
| "grad_norm": 1.6468537757972845, |
| "learning_rate": 1.9396926207859085e-05, |
| "loss": 0.756, |
| "step": 14 |
| }, |
| { |
| "epoch": 1.44, |
| "grad_norm": 1.1854534540746406, |
| "learning_rate": 1.921476211870408e-05, |
| "loss": 0.7557, |
| "step": 15 |
| }, |
| { |
| "epoch": 1.536, |
| "grad_norm": 1.4591777578572527, |
| "learning_rate": 1.900968867902419e-05, |
| "loss": 0.7607, |
| "step": 16 |
| }, |
| { |
| "epoch": 1.6320000000000001, |
| "grad_norm": 1.189899554241067, |
| "learning_rate": 1.8782215733702286e-05, |
| "loss": 0.7388, |
| "step": 17 |
| }, |
| { |
| "epoch": 1.728, |
| "grad_norm": 1.101803186609227, |
| "learning_rate": 1.8532908816321557e-05, |
| "loss": 0.7282, |
| "step": 18 |
| }, |
| { |
| "epoch": 1.8239999999999998, |
| "grad_norm": 1.084511474506394, |
| "learning_rate": 1.826238774315995e-05, |
| "loss": 0.705, |
| "step": 19 |
| }, |
| { |
| "epoch": 1.92, |
| "grad_norm": 1.1294004898635919, |
| "learning_rate": 1.7971325072229227e-05, |
| "loss": 0.6931, |
| "step": 20 |
| }, |
| { |
| "epoch": 2.016, |
| "grad_norm": 1.691793825975685, |
| "learning_rate": 1.766044443118978e-05, |
| "loss": 0.7037, |
| "step": 21 |
| }, |
| { |
| "epoch": 2.112, |
| "grad_norm": 1.1476755655110455, |
| "learning_rate": 1.7330518718298263e-05, |
| "loss": 0.5664, |
| "step": 22 |
| }, |
| { |
| "epoch": 2.208, |
| "grad_norm": 1.127945633341472, |
| "learning_rate": 1.698236818086073e-05, |
| "loss": 0.5469, |
| "step": 23 |
| }, |
| { |
| "epoch": 2.304, |
| "grad_norm": 1.0710484586703242, |
| "learning_rate": 1.6616858375968596e-05, |
| "loss": 0.5424, |
| "step": 24 |
| }, |
| { |
| "epoch": 2.4, |
| "grad_norm": 5.8684571760905495, |
| "learning_rate": 1.6234898018587336e-05, |
| "loss": 0.5233, |
| "step": 25 |
| }, |
| { |
| "epoch": 2.496, |
| "grad_norm": 2.1493758912669247, |
| "learning_rate": 1.5837436722347902e-05, |
| "loss": 0.5102, |
| "step": 26 |
| }, |
| { |
| "epoch": 2.592, |
| "grad_norm": 2.0304648569378934, |
| "learning_rate": 1.5425462638657597e-05, |
| "loss": 0.5346, |
| "step": 27 |
| }, |
| { |
| "epoch": 2.6879999999999997, |
| "grad_norm": 0.9595779238277304, |
| "learning_rate": 1.5000000000000002e-05, |
| "loss": 0.5088, |
| "step": 28 |
| }, |
| { |
| "epoch": 2.784, |
| "grad_norm": 0.8647596733497533, |
| "learning_rate": 1.4562106573531632e-05, |
| "loss": 0.509, |
| "step": 29 |
| }, |
| { |
| "epoch": 2.88, |
| "grad_norm": 0.946031655717741, |
| "learning_rate": 1.4112871031306118e-05, |
| "loss": 0.5166, |
| "step": 30 |
| }, |
| { |
| "epoch": 2.976, |
| "grad_norm": 0.9118468403598151, |
| "learning_rate": 1.3653410243663953e-05, |
| "loss": 0.4981, |
| "step": 31 |
| }, |
| { |
| "epoch": 3.072, |
| "grad_norm": 0.9238751962575951, |
| "learning_rate": 1.3184866502516846e-05, |
| "loss": 0.4426, |
| "step": 32 |
| }, |
| { |
| "epoch": 3.168, |
| "grad_norm": 0.7465446293366775, |
| "learning_rate": 1.2708404681430054e-05, |
| "loss": 0.3905, |
| "step": 33 |
| }, |
| { |
| "epoch": 3.2640000000000002, |
| "grad_norm": 0.8360099214224513, |
| "learning_rate": 1.2225209339563144e-05, |
| "loss": 0.4066, |
| "step": 34 |
| }, |
| { |
| "epoch": 3.36, |
| "grad_norm": 0.850002009049129, |
| "learning_rate": 1.1736481776669307e-05, |
| "loss": 0.329, |
| "step": 35 |
| }, |
| { |
| "epoch": 3.456, |
| "grad_norm": 0.8133436219622251, |
| "learning_rate": 1.1243437046474854e-05, |
| "loss": 0.3676, |
| "step": 36 |
| }, |
| { |
| "epoch": 3.552, |
| "grad_norm": 0.8001314684702371, |
| "learning_rate": 1.0747300935864245e-05, |
| "loss": 0.4153, |
| "step": 37 |
| }, |
| { |
| "epoch": 3.648, |
| "grad_norm": 0.8481071589521082, |
| "learning_rate": 1.0249306917380731e-05, |
| "loss": 0.332, |
| "step": 38 |
| }, |
| { |
| "epoch": 3.7439999999999998, |
| "grad_norm": 0.6902948880341699, |
| "learning_rate": 9.750693082619274e-06, |
| "loss": 0.3497, |
| "step": 39 |
| }, |
| { |
| "epoch": 3.84, |
| "grad_norm": 0.6816078599970087, |
| "learning_rate": 9.252699064135759e-06, |
| "loss": 0.3311, |
| "step": 40 |
| }, |
| { |
| "epoch": 3.936, |
| "grad_norm": 0.7426242359132978, |
| "learning_rate": 8.756562953525151e-06, |
| "loss": 0.3064, |
| "step": 41 |
| }, |
| { |
| "epoch": 4.032, |
| "grad_norm": 0.6674435493390191, |
| "learning_rate": 8.263518223330698e-06, |
| "loss": 0.3332, |
| "step": 42 |
| }, |
| { |
| "epoch": 4.128, |
| "grad_norm": 0.7109661261198361, |
| "learning_rate": 7.774790660436857e-06, |
| "loss": 0.2485, |
| "step": 43 |
| }, |
| { |
| "epoch": 4.224, |
| "grad_norm": 0.5535332073784425, |
| "learning_rate": 7.291595318569951e-06, |
| "loss": 0.2595, |
| "step": 44 |
| }, |
| { |
| "epoch": 4.32, |
| "grad_norm": 0.6270493072946656, |
| "learning_rate": 6.815133497483157e-06, |
| "loss": 0.2534, |
| "step": 45 |
| }, |
| { |
| "epoch": 4.416, |
| "grad_norm": 0.7599802144838778, |
| "learning_rate": 6.34658975633605e-06, |
| "loss": 0.2479, |
| "step": 46 |
| }, |
| { |
| "epoch": 4.5120000000000005, |
| "grad_norm": 0.7832712093800479, |
| "learning_rate": 5.887128968693887e-06, |
| "loss": 0.2665, |
| "step": 47 |
| }, |
| { |
| "epoch": 4.608, |
| "grad_norm": 0.6090402734947341, |
| "learning_rate": 5.43789342646837e-06, |
| "loss": 0.2441, |
| "step": 48 |
| }, |
| { |
| "epoch": 4.704, |
| "grad_norm": 0.5941740692546298, |
| "learning_rate": 5.000000000000003e-06, |
| "loss": 0.2581, |
| "step": 49 |
| }, |
| { |
| "epoch": 4.8, |
| "grad_norm": 0.5941297758940043, |
| "learning_rate": 4.5745373613424075e-06, |
| "loss": 0.2573, |
| "step": 50 |
| }, |
| { |
| "epoch": 4.896, |
| "grad_norm": 0.5857774477861153, |
| "learning_rate": 4.162563277652104e-06, |
| "loss": 0.2447, |
| "step": 51 |
| }, |
| { |
| "epoch": 4.992, |
| "grad_norm": 0.5237471505051408, |
| "learning_rate": 3.7651019814126656e-06, |
| "loss": 0.2559, |
| "step": 52 |
| }, |
| { |
| "epoch": 5.088, |
| "grad_norm": 0.5777761027009243, |
| "learning_rate": 3.3831416240314085e-06, |
| "loss": 0.195, |
| "step": 53 |
| }, |
| { |
| "epoch": 5.184, |
| "grad_norm": 0.5310934613228839, |
| "learning_rate": 3.017631819139273e-06, |
| "loss": 0.209, |
| "step": 54 |
| }, |
| { |
| "epoch": 5.28, |
| "grad_norm": 0.4876656343048685, |
| "learning_rate": 2.669481281701739e-06, |
| "loss": 0.2093, |
| "step": 55 |
| }, |
| { |
| "epoch": 5.376, |
| "grad_norm": 0.5185644841880239, |
| "learning_rate": 2.339555568810221e-06, |
| "loss": 0.2167, |
| "step": 56 |
| }, |
| { |
| "epoch": 5.4719999999999995, |
| "grad_norm": 0.48894415973299077, |
| "learning_rate": 2.0286749277707783e-06, |
| "loss": 0.194, |
| "step": 57 |
| }, |
| { |
| "epoch": 5.568, |
| "grad_norm": 0.4963782748475688, |
| "learning_rate": 1.7376122568400533e-06, |
| "loss": 0.1979, |
| "step": 58 |
| }, |
| { |
| "epoch": 5.664, |
| "grad_norm": 0.4846643998509256, |
| "learning_rate": 1.467091183678444e-06, |
| "loss": 0.166, |
| "step": 59 |
| }, |
| { |
| "epoch": 5.76, |
| "grad_norm": 0.5027220362891996, |
| "learning_rate": 1.2177842662977136e-06, |
| "loss": 0.2245, |
| "step": 60 |
| }, |
| { |
| "epoch": 5.856, |
| "grad_norm": 0.4386370282284896, |
| "learning_rate": 9.903113209758098e-07, |
| "loss": 0.1892, |
| "step": 61 |
| }, |
| { |
| "epoch": 5.952, |
| "grad_norm": 0.4274658732911449, |
| "learning_rate": 7.852378812959227e-07, |
| "loss": 0.2155, |
| "step": 62 |
| }, |
| { |
| "epoch": 6.048, |
| "grad_norm": 0.4068546881239251, |
| "learning_rate": 6.030737921409169e-07, |
| "loss": 0.1633, |
| "step": 63 |
| }, |
| { |
| "epoch": 6.144, |
| "grad_norm": 0.4113236804830393, |
| "learning_rate": 4.4427194213859216e-07, |
| "loss": 0.2247, |
| "step": 64 |
| }, |
| { |
| "epoch": 6.24, |
| "grad_norm": 0.37483369029013164, |
| "learning_rate": 3.0922713770922155e-07, |
| "loss": 0.1903, |
| "step": 65 |
| }, |
| { |
| "epoch": 6.336, |
| "grad_norm": 0.3772952210076009, |
| "learning_rate": 1.9827512151456175e-07, |
| "loss": 0.1746, |
| "step": 66 |
| }, |
| { |
| "epoch": 6.432, |
| "grad_norm": 0.3796414105569942, |
| "learning_rate": 1.1169173774871478e-07, |
| "loss": 0.1732, |
| "step": 67 |
| }, |
| { |
| "epoch": 6.5280000000000005, |
| "grad_norm": 0.3968524427917974, |
| "learning_rate": 4.9692246345985905e-08, |
| "loss": 0.1448, |
| "step": 68 |
| }, |
| { |
| "epoch": 6.624, |
| "grad_norm": 0.38713908845265427, |
| "learning_rate": 1.2430787810776556e-08, |
| "loss": 0.1779, |
| "step": 69 |
| }, |
| { |
| "epoch": 6.72, |
| "grad_norm": 0.4228631990131007, |
| "learning_rate": 0.0, |
| "loss": 0.1992, |
| "step": 70 |
| }, |
| { |
| "epoch": 6.72, |
| "step": 70, |
| "total_flos": 6.183626038522675e+16, |
| "train_loss": 0.49023831082241875, |
| "train_runtime": 2743.1173, |
| "train_samples_per_second": 2.552, |
| "train_steps_per_second": 0.026 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 70, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 7, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 6.183626038522675e+16, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|