| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0776942355889725, | |
| "eval_steps": 500, | |
| "global_step": 430, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.02506265664160401, | |
| "grad_norm": 1.0692139863967896, | |
| "learning_rate": 2.5e-06, | |
| "loss": 4.188, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.05012531328320802, | |
| "grad_norm": 1.1046545505523682, | |
| "learning_rate": 5e-06, | |
| "loss": 4.1657, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.07518796992481203, | |
| "grad_norm": 1.2392791509628296, | |
| "learning_rate": 7.500000000000001e-06, | |
| "loss": 4.1205, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.10025062656641603, | |
| "grad_norm": 1.4656062126159668, | |
| "learning_rate": 1e-05, | |
| "loss": 4.0411, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.12531328320802004, | |
| "grad_norm": 1.5381596088409424, | |
| "learning_rate": 9.722222222222223e-06, | |
| "loss": 3.8527, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.15037593984962405, | |
| "grad_norm": 1.220762014389038, | |
| "learning_rate": 9.444444444444445e-06, | |
| "loss": 3.6254, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.17543859649122806, | |
| "grad_norm": 1.0793465375900269, | |
| "learning_rate": 9.166666666666666e-06, | |
| "loss": 3.4326, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.20050125313283207, | |
| "grad_norm": 0.9771835803985596, | |
| "learning_rate": 8.888888888888888e-06, | |
| "loss": 3.2486, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.22556390977443608, | |
| "grad_norm": 0.9600415825843811, | |
| "learning_rate": 8.611111111111112e-06, | |
| "loss": 3.0867, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.2506265664160401, | |
| "grad_norm": 0.789818525314331, | |
| "learning_rate": 8.333333333333334e-06, | |
| "loss": 2.9344, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.2756892230576441, | |
| "grad_norm": 0.7392975091934204, | |
| "learning_rate": 9.365079365079366e-06, | |
| "loss": 2.8172, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.3007518796992481, | |
| "grad_norm": 0.7141592502593994, | |
| "learning_rate": 9.206349206349207e-06, | |
| "loss": 2.6949, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.3258145363408521, | |
| "grad_norm": 0.6999505162239075, | |
| "learning_rate": 9.047619047619049e-06, | |
| "loss": 2.5975, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.3508771929824561, | |
| "grad_norm": 0.7100343704223633, | |
| "learning_rate": 8.888888888888888e-06, | |
| "loss": 2.4933, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.37593984962406013, | |
| "grad_norm": 0.7077847719192505, | |
| "learning_rate": 8.730158730158731e-06, | |
| "loss": 2.3849, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.40100250626566414, | |
| "grad_norm": 0.7145490050315857, | |
| "learning_rate": 8.571428571428571e-06, | |
| "loss": 2.2824, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.42606516290726815, | |
| "grad_norm": 0.7200222611427307, | |
| "learning_rate": 8.412698412698414e-06, | |
| "loss": 2.1871, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.45112781954887216, | |
| "grad_norm": 0.7174162864685059, | |
| "learning_rate": 8.253968253968254e-06, | |
| "loss": 2.0916, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.47619047619047616, | |
| "grad_norm": 0.7255663871765137, | |
| "learning_rate": 8.095238095238097e-06, | |
| "loss": 1.9964, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.5012531328320802, | |
| "grad_norm": 0.7197927236557007, | |
| "learning_rate": 7.936507936507936e-06, | |
| "loss": 1.8927, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.5263157894736842, | |
| "grad_norm": 0.7157548666000366, | |
| "learning_rate": 7.77777777777778e-06, | |
| "loss": 1.8088, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.5513784461152882, | |
| "grad_norm": 0.7149109840393066, | |
| "learning_rate": 7.61904761904762e-06, | |
| "loss": 1.7166, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.5764411027568922, | |
| "grad_norm": 0.7100406289100647, | |
| "learning_rate": 7.460317460317461e-06, | |
| "loss": 1.643, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.6015037593984962, | |
| "grad_norm": 0.6912670135498047, | |
| "learning_rate": 7.301587301587301e-06, | |
| "loss": 1.5751, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.6265664160401002, | |
| "grad_norm": 0.6892037987709045, | |
| "learning_rate": 7.1428571428571436e-06, | |
| "loss": 1.4972, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.6516290726817042, | |
| "grad_norm": 0.6863360404968262, | |
| "learning_rate": 6.984126984126984e-06, | |
| "loss": 1.4356, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.6766917293233082, | |
| "grad_norm": 0.6862374544143677, | |
| "learning_rate": 6.825396825396826e-06, | |
| "loss": 1.36, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.7017543859649122, | |
| "grad_norm": 0.7181079387664795, | |
| "learning_rate": 6.666666666666667e-06, | |
| "loss": 1.3035, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.7268170426065163, | |
| "grad_norm": 0.7799757719039917, | |
| "learning_rate": 6.507936507936509e-06, | |
| "loss": 1.2459, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.7518796992481203, | |
| "grad_norm": 0.8159253001213074, | |
| "learning_rate": 6.349206349206349e-06, | |
| "loss": 1.1798, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.7769423558897243, | |
| "grad_norm": 0.745711088180542, | |
| "learning_rate": 6.1904761904761914e-06, | |
| "loss": 1.1132, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.8020050125313283, | |
| "grad_norm": 0.7495912909507751, | |
| "learning_rate": 6.031746031746032e-06, | |
| "loss": 1.0668, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.8270676691729323, | |
| "grad_norm": 0.7342067360877991, | |
| "learning_rate": 5.873015873015874e-06, | |
| "loss": 1.0187, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.8521303258145363, | |
| "grad_norm": 0.7345426082611084, | |
| "learning_rate": 5.7142857142857145e-06, | |
| "loss": 0.987, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.8771929824561403, | |
| "grad_norm": 0.7322160601615906, | |
| "learning_rate": 5.555555555555557e-06, | |
| "loss": 0.9476, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.9022556390977443, | |
| "grad_norm": 0.7495856881141663, | |
| "learning_rate": 5.396825396825397e-06, | |
| "loss": 0.9219, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.9273182957393483, | |
| "grad_norm": 0.7894755005836487, | |
| "learning_rate": 5.2380952380952384e-06, | |
| "loss": 0.8988, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.9523809523809523, | |
| "grad_norm": 0.8114584684371948, | |
| "learning_rate": 5.07936507936508e-06, | |
| "loss": 0.8827, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.9774436090225563, | |
| "grad_norm": 0.8420091271400452, | |
| "learning_rate": 4.920634920634921e-06, | |
| "loss": 0.8516, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.0025062656641603, | |
| "grad_norm": 0.8762117624282837, | |
| "learning_rate": 4.761904761904762e-06, | |
| "loss": 0.8343, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.0275689223057645, | |
| "grad_norm": 0.8553557991981506, | |
| "learning_rate": 4.603174603174604e-06, | |
| "loss": 0.8124, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.0526315789473684, | |
| "grad_norm": 0.7959764003753662, | |
| "learning_rate": 4.444444444444444e-06, | |
| "loss": 0.8053, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.0776942355889725, | |
| "grad_norm": 0.6741518378257751, | |
| "learning_rate": 4.2857142857142855e-06, | |
| "loss": 0.7804, | |
| "step": 430 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 700, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 2, | |
| "save_steps": 10, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 4.6364045077533696e+17, | |
| "train_batch_size": 64, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |