Invalid JSON: Unexpected token 'N', ..."sk_loss": NaN,
"... is not valid JSON
| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.36121264244248547, | |
| "global_step": 12600, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 1.25e-06, | |
| "loss": 0.7035, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 2.5e-06, | |
| "loss": 0.7034, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 3.75e-06, | |
| "loss": 0.6974, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 5e-06, | |
| "loss": 0.7017, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 6.25e-06, | |
| "loss": 0.6972, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 7.5e-06, | |
| "loss": 0.6971, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 8.75e-06, | |
| "loss": 0.6918, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 1e-05, | |
| "loss": 0.6947, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 1.125e-05, | |
| "loss": 0.6979, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 1.25e-05, | |
| "loss": 0.6952, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 1.3750000000000002e-05, | |
| "loss": 0.6988, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 1.5e-05, | |
| "loss": 0.6919, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 1.6250000000000002e-05, | |
| "loss": 0.6955, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 1.75e-05, | |
| "loss": 0.6969, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 1.8750000000000002e-05, | |
| "loss": 0.6959, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 2e-05, | |
| "loss": 0.6958, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 2.125e-05, | |
| "loss": 0.6959, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 2.25e-05, | |
| "loss": 0.6973, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 2.375e-05, | |
| "loss": 0.6979, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 2.5e-05, | |
| "loss": 0.6975, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 2.625e-05, | |
| "loss": 0.7012, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 2.7500000000000004e-05, | |
| "loss": 0.6973, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 2.8749999999999997e-05, | |
| "loss": 0.7027, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 3e-05, | |
| "loss": 0.703, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 3.125e-05, | |
| "loss": 0.6989, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 3.2500000000000004e-05, | |
| "loss": 0.7007, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 3.375000000000001e-05, | |
| "loss": 0.701, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 3.5e-05, | |
| "loss": 0.7015, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 3.625e-05, | |
| "loss": 0.701, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 3.7500000000000003e-05, | |
| "loss": 0.7018, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 3.875e-05, | |
| "loss": 0.7021, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4e-05, | |
| "loss": 0.7063, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.125e-05, | |
| "loss": 0.7056, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.25e-05, | |
| "loss": 0.7017, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.375e-05, | |
| "loss": 0.7057, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.5e-05, | |
| "loss": 0.7073, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.6250000000000006e-05, | |
| "loss": 0.7049, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.75e-05, | |
| "loss": 0.7089, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.875e-05, | |
| "loss": 0.7049, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 5e-05, | |
| "loss": 0.7058, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.992397056140138e-05, | |
| "loss": 0.7086, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.984794112280275e-05, | |
| "loss": 0.7015, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.9771911684204125e-05, | |
| "loss": 0.7091, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.96958822456055e-05, | |
| "loss": 0.7096, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.961985280700688e-05, | |
| "loss": 0.7039, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.954382336840825e-05, | |
| "loss": 0.705, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.946779392980962e-05, | |
| "loss": 0.7051, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.9391764491211e-05, | |
| "loss": 0.7075, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.931573505261237e-05, | |
| "loss": 0.7064, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.9239705614013746e-05, | |
| "loss": 0.7095, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.916367617541512e-05, | |
| "loss": 0.7027, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.90876467368165e-05, | |
| "loss": 0.7041, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.901161729821787e-05, | |
| "loss": 0.7089, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.8935587859619244e-05, | |
| "loss": 0.7018, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.885955842102062e-05, | |
| "loss": 0.6996, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.8783528982422e-05, | |
| "loss": 0.7017, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.870749954382337e-05, | |
| "loss": 0.7058, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.863147010522475e-05, | |
| "loss": 0.7076, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.855544066662612e-05, | |
| "loss": 0.708, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.8479411228027496e-05, | |
| "loss": 0.7047, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.840338178942887e-05, | |
| "loss": 0.7026, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.832735235083025e-05, | |
| "loss": 0.7038, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.825132291223162e-05, | |
| "loss": 0.6992, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.8175293473632994e-05, | |
| "loss": 0.7007, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.809926403503437e-05, | |
| "loss": 0.7025, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.802323459643574e-05, | |
| "loss": 0.7019, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.7947205157837116e-05, | |
| "loss": 0.7, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.787117571923849e-05, | |
| "loss": 0.7024, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.779514628063987e-05, | |
| "loss": 0.6992, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.771911684204124e-05, | |
| "loss": 0.7028, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.7643087403442615e-05, | |
| "loss": 0.6987, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.756705796484399e-05, | |
| "loss": 0.7026, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.749102852624536e-05, | |
| "loss": 0.7036, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.741499908764674e-05, | |
| "loss": 0.7011, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.7338969649048114e-05, | |
| "loss": 0.7042, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.726294021044948e-05, | |
| "loss": 0.7045, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.718691077185086e-05, | |
| "loss": 0.698, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.7110881333252236e-05, | |
| "loss": 0.6974, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.703485189465361e-05, | |
| "loss": 0.698, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.695882245605498e-05, | |
| "loss": 0.7007, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.688279301745636e-05, | |
| "loss": 0.6989, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.6806763578857735e-05, | |
| "loss": 0.7001, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.673073414025911e-05, | |
| "loss": 0.6979, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.665470470166049e-05, | |
| "loss": 0.6971, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.6578675263061864e-05, | |
| "loss": 0.6956, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.650264582446324e-05, | |
| "loss": 0.6973, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.642661638586461e-05, | |
| "loss": 0.6987, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.6350586947265986e-05, | |
| "loss": 0.6948, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.627455750866736e-05, | |
| "loss": 0.6961, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.619852807006873e-05, | |
| "loss": 0.6944, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.612249863147011e-05, | |
| "loss": 0.6933, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.6046469192871485e-05, | |
| "loss": 0.6928, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.597043975427286e-05, | |
| "loss": 0.692, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.589441031567423e-05, | |
| "loss": 0.6965, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.581838087707561e-05, | |
| "loss": 0.6952, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.574235143847698e-05, | |
| "loss": 0.6902, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.566632199987835e-05, | |
| "loss": 0.6956, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.559029256127973e-05, | |
| "loss": 0.6939, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.5514263122681106e-05, | |
| "loss": 0.6973, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.5438233684082475e-05, | |
| "loss": 0.6945, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.536220424548385e-05, | |
| "loss": 0.6922, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.528617480688523e-05, | |
| "loss": 0.6951, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.5210145368286604e-05, | |
| "loss": 0.6907, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.5134115929687974e-05, | |
| "loss": 0.6922, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.505808649108935e-05, | |
| "loss": 0.6952, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.4982057052490726e-05, | |
| "loss": 0.692, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.4906027613892096e-05, | |
| "loss": 0.6964, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.482999817529347e-05, | |
| "loss": 0.692, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.475396873669485e-05, | |
| "loss": 0.6911, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.4677939298096225e-05, | |
| "loss": 0.6927, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.46019098594976e-05, | |
| "loss": 0.6915, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.452588042089897e-05, | |
| "loss": 0.6917, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.444985098230035e-05, | |
| "loss": 0.6914, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.4373821543701724e-05, | |
| "loss": 0.685, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.42977921051031e-05, | |
| "loss": 0.6898, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.4221762666504476e-05, | |
| "loss": 0.6875, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.414573322790585e-05, | |
| "loss": 0.6917, | |
| "step": 5850 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.406970378930722e-05, | |
| "loss": 0.6894, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.39936743507086e-05, | |
| "loss": 0.6903, | |
| "step": 5950 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.3917644912109975e-05, | |
| "loss": 0.6927, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.3841615473511345e-05, | |
| "loss": 0.6906, | |
| "step": 6050 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.376558603491272e-05, | |
| "loss": 0.6858, | |
| "step": 6100 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.36895565963141e-05, | |
| "loss": 0.69, | |
| "step": 6150 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.361352715771547e-05, | |
| "loss": 0.6856, | |
| "step": 6200 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.353749771911684e-05, | |
| "loss": 0.6822, | |
| "step": 6250 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.346146828051822e-05, | |
| "loss": 0.6877, | |
| "step": 6300 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.3385438841919596e-05, | |
| "loss": 0.6844, | |
| "step": 6350 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.3309409403320966e-05, | |
| "loss": 0.6836, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.323337996472234e-05, | |
| "loss": 0.6891, | |
| "step": 6450 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.315735052612372e-05, | |
| "loss": 0.6859, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.308132108752509e-05, | |
| "loss": 0.6879, | |
| "step": 6550 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.3005291648926464e-05, | |
| "loss": 0.6864, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.292926221032784e-05, | |
| "loss": 0.6812, | |
| "step": 6650 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.285323277172922e-05, | |
| "loss": 0.6859, | |
| "step": 6700 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.2777203333130587e-05, | |
| "loss": 0.685, | |
| "step": 6750 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.270117389453196e-05, | |
| "loss": 0.6864, | |
| "step": 6800 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.262514445593334e-05, | |
| "loss": 0.6868, | |
| "step": 6850 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.254911501733471e-05, | |
| "loss": 0.685, | |
| "step": 6900 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.2473085578736085e-05, | |
| "loss": 0.6799, | |
| "step": 6950 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.239705614013746e-05, | |
| "loss": 0.6868, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.232102670153884e-05, | |
| "loss": 0.6868, | |
| "step": 7050 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.2244997262940214e-05, | |
| "loss": 0.6819, | |
| "step": 7100 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.216896782434159e-05, | |
| "loss": 0.6877, | |
| "step": 7150 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.209293838574297e-05, | |
| "loss": 0.6844, | |
| "step": 7200 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.2016908947144336e-05, | |
| "loss": 0.6851, | |
| "step": 7250 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.194087950854571e-05, | |
| "loss": 0.6827, | |
| "step": 7300 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.186485006994709e-05, | |
| "loss": 0.6793, | |
| "step": 7350 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.178882063134846e-05, | |
| "loss": 0.6818, | |
| "step": 7400 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.1712791192749835e-05, | |
| "loss": 0.6855, | |
| "step": 7450 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.163676175415121e-05, | |
| "loss": 0.6819, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.156073231555259e-05, | |
| "loss": 0.6797, | |
| "step": 7550 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.148470287695396e-05, | |
| "loss": 0.6775, | |
| "step": 7600 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.1408673438355334e-05, | |
| "loss": 0.6778, | |
| "step": 7650 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.133264399975671e-05, | |
| "loss": 0.6776, | |
| "step": 7700 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.125661456115808e-05, | |
| "loss": 0.6838, | |
| "step": 7750 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.1180585122559456e-05, | |
| "loss": 0.6788, | |
| "step": 7800 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.110455568396083e-05, | |
| "loss": 0.6845, | |
| "step": 7850 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.102852624536221e-05, | |
| "loss": 0.681, | |
| "step": 7900 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.095249680676358e-05, | |
| "loss": 0.6793, | |
| "step": 7950 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.0876467368164955e-05, | |
| "loss": 0.6814, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.080043792956633e-05, | |
| "loss": 0.6791, | |
| "step": 8050 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.07244084909677e-05, | |
| "loss": 0.6775, | |
| "step": 8100 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.064837905236908e-05, | |
| "loss": 0.6754, | |
| "step": 8150 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.057234961377045e-05, | |
| "loss": 0.6837, | |
| "step": 8200 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.049632017517182e-05, | |
| "loss": 0.6747, | |
| "step": 8250 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.04202907365732e-05, | |
| "loss": 0.6766, | |
| "step": 8300 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.0344261297974576e-05, | |
| "loss": 0.6807, | |
| "step": 8350 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.026823185937595e-05, | |
| "loss": 0.6749, | |
| "step": 8400 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.019220242077733e-05, | |
| "loss": 0.6765, | |
| "step": 8450 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.0116172982178705e-05, | |
| "loss": 0.6767, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.0040143543580074e-05, | |
| "loss": 0.6785, | |
| "step": 8550 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 3.996411410498145e-05, | |
| "loss": 0.6729, | |
| "step": 8600 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 3.988808466638283e-05, | |
| "loss": 0.6778, | |
| "step": 8650 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 3.98120552277842e-05, | |
| "loss": 0.672, | |
| "step": 8700 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 3.973602578918558e-05, | |
| "loss": 0.6784, | |
| "step": 8750 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 3.965999635058695e-05, | |
| "loss": 0.6741, | |
| "step": 8800 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 3.9583966911988326e-05, | |
| "loss": 0.6742, | |
| "step": 8850 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 3.95079374733897e-05, | |
| "loss": 0.6769, | |
| "step": 8900 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 3.943190803479107e-05, | |
| "loss": 0.6751, | |
| "step": 8950 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 3.935587859619245e-05, | |
| "loss": 0.6755, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 3.9279849157593824e-05, | |
| "loss": 0.6745, | |
| "step": 9050 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 3.92038197189952e-05, | |
| "loss": 0.6736, | |
| "step": 9100 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 3.912779028039657e-05, | |
| "loss": 0.6737, | |
| "step": 9150 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 3.9051760841797946e-05, | |
| "loss": 0.6706, | |
| "step": 9200 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 3.897573140319932e-05, | |
| "loss": 0.6755, | |
| "step": 9250 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 3.889970196460069e-05, | |
| "loss": 0.6713, | |
| "step": 9300 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 3.882367252600207e-05, | |
| "loss": 0.6706, | |
| "step": 9350 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 3.8747643087403445e-05, | |
| "loss": 0.6774, | |
| "step": 9400 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 3.8671613648804815e-05, | |
| "loss": 0.6729, | |
| "step": 9450 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 3.859558421020619e-05, | |
| "loss": 0.6778, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 3.851955477160757e-05, | |
| "loss": 0.6719, | |
| "step": 9550 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 3.8443525333008944e-05, | |
| "loss": 0.6697, | |
| "step": 9600 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 3.836749589441031e-05, | |
| "loss": 0.6759, | |
| "step": 9650 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 3.829146645581169e-05, | |
| "loss": 0.6763, | |
| "step": 9700 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 3.8215437017213066e-05, | |
| "loss": 0.6719, | |
| "step": 9750 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 3.8139407578614436e-05, | |
| "loss": 0.6717, | |
| "step": 9800 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 3.806337814001581e-05, | |
| "loss": 0.6756, | |
| "step": 9850 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 3.798734870141719e-05, | |
| "loss": 0.6671, | |
| "step": 9900 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 3.7911319262818565e-05, | |
| "loss": 0.6767, | |
| "step": 9950 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 3.783528982421994e-05, | |
| "loss": 0.673, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "eval_input_ids_loss": 0.6666935086250305, | |
| "eval_input_ids_runtime": 38.9546, | |
| "eval_input_ids_samples_per_second": 2567.088, | |
| "eval_input_ids_steps_per_second": 6.7, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "eval_special_tokens_mask_loss": NaN, | |
| "eval_special_tokens_mask_runtime": 38.7839, | |
| "eval_special_tokens_mask_samples_per_second": 2578.389, | |
| "eval_special_tokens_mask_steps_per_second": 6.73, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "eval_attention_mask_loss": NaN, | |
| "eval_attention_mask_runtime": 38.7271, | |
| "eval_attention_mask_samples_per_second": 2582.17, | |
| "eval_attention_mask_steps_per_second": 6.739, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 3.775926038562132e-05, | |
| "loss": 0.6733, | |
| "step": 10050 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 3.7683230947022694e-05, | |
| "loss": 0.6712, | |
| "step": 10100 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 3.760720150842406e-05, | |
| "loss": 0.6673, | |
| "step": 10150 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 3.753117206982544e-05, | |
| "loss": 0.6745, | |
| "step": 10200 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 3.7455142631226816e-05, | |
| "loss": 0.6647, | |
| "step": 10250 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 3.737911319262819e-05, | |
| "loss": 0.6694, | |
| "step": 10300 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 3.730308375402956e-05, | |
| "loss": 0.6694, | |
| "step": 10350 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 3.722705431543094e-05, | |
| "loss": 0.6665, | |
| "step": 10400 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 3.7151024876832315e-05, | |
| "loss": 0.6686, | |
| "step": 10450 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 3.7074995438233684e-05, | |
| "loss": 0.6682, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 3.699896599963506e-05, | |
| "loss": 0.6683, | |
| "step": 10550 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 3.692293656103644e-05, | |
| "loss": 0.6654, | |
| "step": 10600 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 3.6846907122437807e-05, | |
| "loss": 0.6636, | |
| "step": 10650 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 3.677087768383918e-05, | |
| "loss": 0.6726, | |
| "step": 10700 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 3.669484824524056e-05, | |
| "loss": 0.6601, | |
| "step": 10750 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 3.6618818806641936e-05, | |
| "loss": 0.6715, | |
| "step": 10800 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 3.6542789368043305e-05, | |
| "loss": 0.6625, | |
| "step": 10850 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 3.646675992944468e-05, | |
| "loss": 0.6662, | |
| "step": 10900 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 3.639073049084606e-05, | |
| "loss": 0.6636, | |
| "step": 10950 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 3.631470105224743e-05, | |
| "loss": 0.6722, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 3.6238671613648804e-05, | |
| "loss": 0.6648, | |
| "step": 11050 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 3.616264217505018e-05, | |
| "loss": 0.6664, | |
| "step": 11100 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 3.6086612736451556e-05, | |
| "loss": 0.6659, | |
| "step": 11150 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 3.6010583297852926e-05, | |
| "loss": 0.6689, | |
| "step": 11200 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 3.59345538592543e-05, | |
| "loss": 0.6656, | |
| "step": 11250 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 3.585852442065568e-05, | |
| "loss": 0.6603, | |
| "step": 11300 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 3.5782494982057055e-05, | |
| "loss": 0.6622, | |
| "step": 11350 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 3.570646554345843e-05, | |
| "loss": 0.6636, | |
| "step": 11400 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 3.563043610485981e-05, | |
| "loss": 0.6679, | |
| "step": 11450 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 3.555440666626118e-05, | |
| "loss": 0.6645, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 3.5478377227662554e-05, | |
| "loss": 0.6623, | |
| "step": 11550 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 3.540234778906393e-05, | |
| "loss": 0.6654, | |
| "step": 11600 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 3.5326318350465306e-05, | |
| "loss": 0.6655, | |
| "step": 11650 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 3.5250288911866676e-05, | |
| "loss": 0.6666, | |
| "step": 11700 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 3.517425947326805e-05, | |
| "loss": 0.6598, | |
| "step": 11750 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 3.509823003466943e-05, | |
| "loss": 0.6583, | |
| "step": 11800 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 3.50222005960708e-05, | |
| "loss": 0.6623, | |
| "step": 11850 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 3.4946171157472175e-05, | |
| "loss": 0.6597, | |
| "step": 11900 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 3.487014171887355e-05, | |
| "loss": 0.6598, | |
| "step": 11950 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 3.479411228027493e-05, | |
| "loss": 0.6683, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 3.47180828416763e-05, | |
| "loss": 0.6594, | |
| "step": 12050 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 3.464205340307767e-05, | |
| "loss": 0.6621, | |
| "step": 12100 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 3.456602396447905e-05, | |
| "loss": 0.6579, | |
| "step": 12150 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 3.448999452588042e-05, | |
| "loss": 0.6572, | |
| "step": 12200 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 3.4413965087281796e-05, | |
| "loss": 0.6581, | |
| "step": 12250 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 3.433793564868317e-05, | |
| "loss": 0.6638, | |
| "step": 12300 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 3.426190621008455e-05, | |
| "loss": 0.6624, | |
| "step": 12350 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 3.418587677148592e-05, | |
| "loss": 0.6612, | |
| "step": 12400 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 3.4109847332887294e-05, | |
| "loss": 0.6573, | |
| "step": 12450 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 3.403381789428867e-05, | |
| "loss": 0.6634, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 3.395778845569004e-05, | |
| "loss": 0.6595, | |
| "step": 12550 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 3.3881759017091417e-05, | |
| "loss": 0.6569, | |
| "step": 12600 | |
| } | |
| ], | |
| "max_steps": 34882, | |
| "num_train_epochs": 1, | |
| "total_flos": 1.2738094058962944e+18, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |