| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 10.0, | |
| "global_step": 9980, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 9.997522904167843e-07, | |
| "loss": 26.4633, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 9.990094071072878e-07, | |
| "loss": 9.8101, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 9.977720861487699e-07, | |
| "loss": 5.8569, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 9.96041553526267e-07, | |
| "loss": 5.5213, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 9.938195239178374e-07, | |
| "loss": 5.1344, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 9.911081989955939e-07, | |
| "loss": 4.9245, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 9.879102652442023e-07, | |
| "loss": 4.5925, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 9.842288912990095e-07, | |
| "loss": 4.676, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 9.80067724806438e-07, | |
| "loss": 4.4576, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 9.754308888097582e-07, | |
| "loss": 4.2986, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 9.703229776638185e-07, | |
| "loss": 3.8739, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 9.647490524827833e-07, | |
| "loss": 3.5629, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 9.587146361253867e-07, | |
| "loss": 2.9575, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 9.522257077226716e-07, | |
| "loss": 2.8326, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 9.452886967536388e-07, | |
| "loss": 2.7891, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 9.379104766746722e-07, | |
| "loss": 2.4971, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 9.30098358109054e-07, | |
| "loss": 2.4297, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 9.218600816033199e-07, | |
| "loss": 2.183, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 9.132038099576289e-07, | |
| "loss": 2.0732, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 9.041381201377467e-07, | |
| "loss": 2.2732, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 8.94671994776661e-07, | |
| "loss": 1.7384, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 8.84814813274243e-07, | |
| "loss": 1.8955, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 8.745763425037795e-07, | |
| "loss": 2.0546, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 8.639667271345798e-07, | |
| "loss": 1.6855, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 8.529964795802484e-07, | |
| "loss": 1.7512, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 8.416764695825834e-07, | |
| "loss": 1.7856, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 8.300179134414187e-07, | |
| "loss": 1.7027, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 8.180323629010848e-07, | |
| "loss": 1.5552, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 8.057316937044976e-07, | |
| "loss": 1.6427, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 3.01, | |
| "learning_rate": 7.931280938262168e-07, | |
| "loss": 1.7996, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 3.11, | |
| "learning_rate": 7.802340513961341e-07, | |
| "loss": 1.4622, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 3.21, | |
| "learning_rate": 7.670623423257547e-07, | |
| "loss": 1.4137, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 3.31, | |
| "learning_rate": 7.536260176493347e-07, | |
| "loss": 1.4894, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 3.41, | |
| "learning_rate": 7.399383905924165e-07, | |
| "loss": 1.4263, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 3.51, | |
| "learning_rate": 7.26013023380574e-07, | |
| "loss": 1.491, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 3.61, | |
| "learning_rate": 7.118637138014395e-07, | |
| "loss": 1.2858, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 3.71, | |
| "learning_rate": 6.975044815333281e-07, | |
| "loss": 1.2851, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 3.81, | |
| "learning_rate": 6.829495542540013e-07, | |
| "loss": 1.3229, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 3.91, | |
| "learning_rate": 6.682133535433393e-07, | |
| "loss": 1.3675, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 4.01, | |
| "learning_rate": 6.533104805938873e-07, | |
| "loss": 1.3686, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 4.11, | |
| "learning_rate": 6.382557017434331e-07, | |
| "loss": 1.0781, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 4.21, | |
| "learning_rate": 6.230639338439549e-07, | |
| "loss": 1.1388, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 4.31, | |
| "learning_rate": 6.077502294814311e-07, | |
| "loss": 1.0966, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 4.41, | |
| "learning_rate": 5.923297620611622e-07, | |
| "loss": 1.1764, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 4.51, | |
| "learning_rate": 5.76817810773379e-07, | |
| "loss": 1.2625, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 4.61, | |
| "learning_rate": 5.612297454540351e-07, | |
| "loss": 1.1718, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 4.71, | |
| "learning_rate": 5.455810113557839e-07, | |
| "loss": 1.218, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 4.81, | |
| "learning_rate": 5.298871138442307e-07, | |
| "loss": 1.2203, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 4.91, | |
| "learning_rate": 5.14163603034622e-07, | |
| "loss": 1.0743, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 5.01, | |
| "learning_rate": 4.984260583841952e-07, | |
| "loss": 1.1825, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 5.11, | |
| "learning_rate": 4.82690073255455e-07, | |
| "loss": 0.8899, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 5.21, | |
| "learning_rate": 4.6697123946567224e-07, | |
| "loss": 0.9867, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 5.31, | |
| "learning_rate": 4.512851318379138e-07, | |
| "loss": 0.9842, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 5.41, | |
| "learning_rate": 4.3564729276891087e-07, | |
| "loss": 1.1441, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 5.51, | |
| "learning_rate": 4.2007321682905594e-07, | |
| "loss": 0.9965, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 5.61, | |
| "learning_rate": 4.0457833540978923e-07, | |
| "loss": 0.9929, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 5.71, | |
| "learning_rate": 3.89178001433584e-07, | |
| "loss": 1.152, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 5.81, | |
| "learning_rate": 3.7388747414168295e-07, | |
| "loss": 0.8896, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 5.91, | |
| "learning_rate": 3.5872190397465635e-07, | |
| "loss": 1.1022, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 6.01, | |
| "learning_rate": 3.436963175607656e-07, | |
| "loss": 0.8925, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 6.11, | |
| "learning_rate": 3.288256028270033e-07, | |
| "loss": 0.9647, | |
| "step": 6100 | |
| }, | |
| { | |
| "epoch": 6.21, | |
| "learning_rate": 3.141244942475647e-07, | |
| "loss": 0.7659, | |
| "step": 6200 | |
| }, | |
| { | |
| "epoch": 6.31, | |
| "learning_rate": 2.996075582443658e-07, | |
| "loss": 1.0098, | |
| "step": 6300 | |
| }, | |
| { | |
| "epoch": 6.41, | |
| "learning_rate": 2.8528917875407433e-07, | |
| "loss": 1.0704, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 6.51, | |
| "learning_rate": 2.711835429759539e-07, | |
| "loss": 1.0039, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 6.61, | |
| "learning_rate": 2.573046273146427e-07, | |
| "loss": 0.9493, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 6.71, | |
| "learning_rate": 2.4366618353179644e-07, | |
| "loss": 0.9582, | |
| "step": 6700 | |
| }, | |
| { | |
| "epoch": 6.81, | |
| "learning_rate": 2.30281725120316e-07, | |
| "loss": 0.8875, | |
| "step": 6800 | |
| }, | |
| { | |
| "epoch": 6.91, | |
| "learning_rate": 2.1716451391466006e-07, | |
| "loss": 0.8132, | |
| "step": 6900 | |
| }, | |
| { | |
| "epoch": 7.01, | |
| "learning_rate": 2.0432754695051136e-07, | |
| "loss": 1.0703, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 7.11, | |
| "learning_rate": 1.9178354358681548e-07, | |
| "loss": 1.0679, | |
| "step": 7100 | |
| }, | |
| { | |
| "epoch": 7.21, | |
| "learning_rate": 1.7954493290295309e-07, | |
| "loss": 0.8124, | |
| "step": 7200 | |
| }, | |
| { | |
| "epoch": 7.31, | |
| "learning_rate": 1.6762384138353075e-07, | |
| "loss": 0.9779, | |
| "step": 7300 | |
| }, | |
| { | |
| "epoch": 7.41, | |
| "learning_rate": 1.5603208090299496e-07, | |
| "loss": 0.7439, | |
| "step": 7400 | |
| }, | |
| { | |
| "epoch": 7.52, | |
| "learning_rate": 1.4478113702197569e-07, | |
| "loss": 0.858, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 7.62, | |
| "learning_rate": 1.3388215760695098e-07, | |
| "loss": 1.0146, | |
| "step": 7600 | |
| }, | |
| { | |
| "epoch": 7.72, | |
| "learning_rate": 1.2334594178451424e-07, | |
| "loss": 0.7714, | |
| "step": 7700 | |
| }, | |
| { | |
| "epoch": 7.82, | |
| "learning_rate": 1.1318292924118584e-07, | |
| "loss": 0.8889, | |
| "step": 7800 | |
| }, | |
| { | |
| "epoch": 7.92, | |
| "learning_rate": 1.0340318987937097e-07, | |
| "loss": 1.0001, | |
| "step": 7900 | |
| }, | |
| { | |
| "epoch": 8.02, | |
| "learning_rate": 9.401641383971476e-08, | |
| "loss": 0.9567, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 8.12, | |
| "learning_rate": 8.503190189973914e-08, | |
| "loss": 0.834, | |
| "step": 8100 | |
| }, | |
| { | |
| "epoch": 8.22, | |
| "learning_rate": 7.645855625827657e-08, | |
| "loss": 0.9169, | |
| "step": 8200 | |
| }, | |
| { | |
| "epoch": 8.32, | |
| "learning_rate": 6.830487171482935e-08, | |
| "loss": 0.841, | |
| "step": 8300 | |
| }, | |
| { | |
| "epoch": 8.42, | |
| "learning_rate": 6.057892725259717e-08, | |
| "loss": 1.0763, | |
| "step": 8400 | |
| }, | |
| { | |
| "epoch": 8.52, | |
| "learning_rate": 5.328837803351083e-08, | |
| "loss": 0.8885, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 8.62, | |
| "learning_rate": 4.644044781320422e-08, | |
| "loss": 0.7986, | |
| "step": 8600 | |
| }, | |
| { | |
| "epoch": 8.72, | |
| "learning_rate": 4.004192178344029e-08, | |
| "loss": 0.701, | |
| "step": 8700 | |
| }, | |
| { | |
| "epoch": 8.82, | |
| "learning_rate": 3.4099139849083304e-08, | |
| "loss": 0.9347, | |
| "step": 8800 | |
| }, | |
| { | |
| "epoch": 8.92, | |
| "learning_rate": 2.8617990346277655e-08, | |
| "loss": 1.0685, | |
| "step": 8900 | |
| }, | |
| { | |
| "epoch": 9.02, | |
| "learning_rate": 2.3603904208058688e-08, | |
| "loss": 0.8046, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 9.12, | |
| "learning_rate": 1.9061849583176636e-08, | |
| "loss": 0.8515, | |
| "step": 9100 | |
| }, | |
| { | |
| "epoch": 9.22, | |
| "learning_rate": 1.499632691346375e-08, | |
| "loss": 0.8998, | |
| "step": 9200 | |
| }, | |
| { | |
| "epoch": 9.32, | |
| "learning_rate": 1.1411364474624264e-08, | |
| "loss": 0.8923, | |
| "step": 9300 | |
| }, | |
| { | |
| "epoch": 9.42, | |
| "learning_rate": 8.31051438486441e-09, | |
| "loss": 0.7543, | |
| "step": 9400 | |
| }, | |
| { | |
| "epoch": 9.52, | |
| "learning_rate": 5.696849085317645e-09, | |
| "loss": 0.883, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 9.62, | |
| "learning_rate": 3.5729582957520486e-09, | |
| "loss": 0.8342, | |
| "step": 9600 | |
| }, | |
| { | |
| "epoch": 9.72, | |
| "learning_rate": 1.9409464485766746e-09, | |
| "loss": 0.9553, | |
| "step": 9700 | |
| }, | |
| { | |
| "epoch": 9.82, | |
| "learning_rate": 8.024306036893968e-10, | |
| "loss": 0.9193, | |
| "step": 9800 | |
| }, | |
| { | |
| "epoch": 9.92, | |
| "learning_rate": 1.5853884623195922e-10, | |
| "loss": 0.8958, | |
| "step": 9900 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "step": 9980, | |
| "total_flos": 0.0, | |
| "train_runtime": 8323.4424, | |
| "train_samples_per_second": 1.199 | |
| } | |
| ], | |
| "max_steps": 9980, | |
| "num_train_epochs": 10, | |
| "total_flos": 0.0, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |