| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.999854310939306, | |
| "eval_steps": 5000, | |
| "global_step": 3217, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 8.000000000000001e-07, | |
| "loss": 0.1751, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 1.6333333333333335e-06, | |
| "loss": 0.1343, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 2.3333333333333336e-06, | |
| "loss": 0.1246, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 3.133333333333334e-06, | |
| "loss": 0.1172, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 3.866666666666667e-06, | |
| "loss": 0.1133, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.566666666666667e-06, | |
| "loss": 0.1104, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 5.366666666666666e-06, | |
| "loss": 0.1098, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 6.1e-06, | |
| "loss": 0.1077, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 6.833333333333334e-06, | |
| "loss": 0.1061, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 7.566666666666667e-06, | |
| "loss": 0.1068, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 8.3e-06, | |
| "loss": 0.1074, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 9.033333333333334e-06, | |
| "loss": 0.107, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 9.800000000000001e-06, | |
| "loss": 0.1048, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 9.941720946177581e-06, | |
| "loss": 0.1062, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 9.862872814535483e-06, | |
| "loss": 0.1061, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 9.787452862529997e-06, | |
| "loss": 0.1051, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 9.7086047308879e-06, | |
| "loss": 0.1056, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 9.633184778882414e-06, | |
| "loss": 0.1045, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 9.554336647240316e-06, | |
| "loss": 0.1042, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 9.475488515598218e-06, | |
| "loss": 0.105, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 9.39664038395612e-06, | |
| "loss": 0.104, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 9.321220431950634e-06, | |
| "loss": 0.104, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 9.242372300308537e-06, | |
| "loss": 0.1017, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 9.166952348303052e-06, | |
| "loss": 0.104, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 9.088104216660953e-06, | |
| "loss": 0.1021, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 9.012684264655469e-06, | |
| "loss": 0.1027, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 8.940692492286596e-06, | |
| "loss": 0.1041, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 8.861844360644498e-06, | |
| "loss": 0.1022, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 8.789852588275626e-06, | |
| "loss": 0.1021, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 8.711004456633528e-06, | |
| "loss": 0.103, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 8.63215632499143e-06, | |
| "loss": 0.1033, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 8.560164552622558e-06, | |
| "loss": 0.1036, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 8.484744600617072e-06, | |
| "loss": 0.101, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 8.409324648611588e-06, | |
| "loss": 0.1022, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 8.34076105587933e-06, | |
| "loss": 0.1014, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 8.265341103873843e-06, | |
| "loss": 0.1005, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 8.186492972231745e-06, | |
| "loss": 0.0999, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 8.114501199862873e-06, | |
| "loss": 0.0993, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 8.042509427494002e-06, | |
| "loss": 0.1017, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 7.97051765512513e-06, | |
| "loss": 0.0997, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 7.895097703119644e-06, | |
| "loss": 0.1004, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 7.812821391840933e-06, | |
| "loss": 0.1005, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 7.737401439835449e-06, | |
| "loss": 0.0996, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 7.661981487829963e-06, | |
| "loss": 0.1, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 7.589989715461091e-06, | |
| "loss": 0.0994, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 7.5111415838189925e-06, | |
| "loss": 0.0995, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 7.43914981145012e-06, | |
| "loss": 0.1004, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 7.363729859444636e-06, | |
| "loss": 0.1011, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 7.2814535481659246e-06, | |
| "loss": 0.1002, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 7.209461775797052e-06, | |
| "loss": 0.099, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 7.137470003428181e-06, | |
| "loss": 0.0996, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 7.058621871786082e-06, | |
| "loss": 0.1, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 6.983201919780597e-06, | |
| "loss": 0.0992, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 6.904353788138499e-06, | |
| "loss": 0.0993, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 6.825505656496401e-06, | |
| "loss": 0.0975, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 6.756942063764142e-06, | |
| "loss": 0.097, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 6.68495029139527e-06, | |
| "loss": 0.0988, | |
| "step": 1425 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 6.612958519026398e-06, | |
| "loss": 0.096, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 6.537538567020913e-06, | |
| "loss": 0.0972, | |
| "step": 1475 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 6.462118615015427e-06, | |
| "loss": 0.0978, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 6.386698663009943e-06, | |
| "loss": 0.0985, | |
| "step": 1525 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 6.3044223517312304e-06, | |
| "loss": 0.0982, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 6.235858758998972e-06, | |
| "loss": 0.098, | |
| "step": 1575 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 6.1638669866301e-06, | |
| "loss": 0.0965, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 6.091875214261228e-06, | |
| "loss": 0.0963, | |
| "step": 1625 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 6.01302708261913e-06, | |
| "loss": 0.097, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 5.937607130613645e-06, | |
| "loss": 0.0972, | |
| "step": 1675 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 5.86218717860816e-06, | |
| "loss": 0.0977, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 5.783339046966061e-06, | |
| "loss": 0.0971, | |
| "step": 1725 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 5.71134727459719e-06, | |
| "loss": 0.0965, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 5.632499142955091e-06, | |
| "loss": 0.0971, | |
| "step": 1775 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 5.557079190949606e-06, | |
| "loss": 0.0967, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 5.485087418580734e-06, | |
| "loss": 0.0949, | |
| "step": 1825 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 5.413095646211862e-06, | |
| "loss": 0.0968, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 5.337675694206377e-06, | |
| "loss": 0.0974, | |
| "step": 1875 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 5.269112101474117e-06, | |
| "loss": 0.0952, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 5.193692149468632e-06, | |
| "loss": 0.0952, | |
| "step": 1925 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 5.118272197463148e-06, | |
| "loss": 0.0967, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 5.0462804250942755e-06, | |
| "loss": 0.0948, | |
| "step": 1975 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 4.9708604730887904e-06, | |
| "loss": 0.0967, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 4.892012341446693e-06, | |
| "loss": 0.0953, | |
| "step": 2025 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 4.82002056907782e-06, | |
| "loss": 0.0951, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 4.737744257799109e-06, | |
| "loss": 0.0952, | |
| "step": 2075 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 4.662324305793624e-06, | |
| "loss": 0.0962, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 4.586904353788139e-06, | |
| "loss": 0.0953, | |
| "step": 2125 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 4.51834076105588e-06, | |
| "loss": 0.0942, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 4.439492629413782e-06, | |
| "loss": 0.0943, | |
| "step": 2175 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 4.360644497771684e-06, | |
| "loss": 0.1057, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 4.288652725402811e-06, | |
| "loss": 0.1048, | |
| "step": 2225 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 4.216660953033939e-06, | |
| "loss": 0.1063, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 4.144669180665067e-06, | |
| "loss": 0.1051, | |
| "step": 2275 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 4.069249228659582e-06, | |
| "loss": 0.104, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 3.990401097017484e-06, | |
| "loss": 0.1064, | |
| "step": 2325 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 3.914981145011999e-06, | |
| "loss": 0.1038, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 3.84641755227974e-06, | |
| "loss": 0.1038, | |
| "step": 2375 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 3.764141241001029e-06, | |
| "loss": 0.1028, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 3.6887212889955437e-06, | |
| "loss": 0.1044, | |
| "step": 2425 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 3.6167295166266713e-06, | |
| "loss": 0.1052, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 3.5413095646211863e-06, | |
| "loss": 0.1042, | |
| "step": 2475 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 3.4693177922523143e-06, | |
| "loss": 0.104, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 3.3973260198834423e-06, | |
| "loss": 0.1025, | |
| "step": 2525 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 3.321906067877957e-06, | |
| "loss": 0.1031, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 3.2430579362358587e-06, | |
| "loss": 0.1049, | |
| "step": 2575 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 3.164209804593761e-06, | |
| "loss": 0.1039, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 3.088789852588276e-06, | |
| "loss": 0.1032, | |
| "step": 2625 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 3.0167980802194034e-06, | |
| "loss": 0.1032, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 2.9413781282139188e-06, | |
| "loss": 0.1036, | |
| "step": 2675 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 2.8625299965718206e-06, | |
| "loss": 0.1027, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 2.7836818649297224e-06, | |
| "loss": 0.1016, | |
| "step": 2725 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 2.7082619129242378e-06, | |
| "loss": 0.1035, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 2.6294137812821396e-06, | |
| "loss": 0.1025, | |
| "step": 2775 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 2.557422008913267e-06, | |
| "loss": 0.1013, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 2.471717517997943e-06, | |
| "loss": 0.1026, | |
| "step": 2825 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 2.3997257456290712e-06, | |
| "loss": 0.1026, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 2.331162152896812e-06, | |
| "loss": 0.1019, | |
| "step": 2875 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 2.252314021254714e-06, | |
| "loss": 0.101, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 2.183750428522455e-06, | |
| "loss": 0.1026, | |
| "step": 2925 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 2.1083304765169694e-06, | |
| "loss": 0.1014, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 2.0363387041480975e-06, | |
| "loss": 0.1022, | |
| "step": 2975 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 1.9574905725059997e-06, | |
| "loss": 0.1015, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.8786424408639015e-06, | |
| "loss": 0.1029, | |
| "step": 3025 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.8066506684950293e-06, | |
| "loss": 0.1022, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.7312307164895443e-06, | |
| "loss": 0.1014, | |
| "step": 3075 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.6523825848474463e-06, | |
| "loss": 0.1002, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.573534453205348e-06, | |
| "loss": 0.1017, | |
| "step": 3125 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 1.504970860473089e-06, | |
| "loss": 0.1015, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.4226945491943777e-06, | |
| "loss": 0.101, | |
| "step": 3175 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.3541309564621189e-06, | |
| "loss": 0.1017, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 3217, | |
| "total_flos": 1.3990082808835046e+22, | |
| "train_loss": 0.006861437764047724, | |
| "train_runtime": 11834.061, | |
| "train_samples_per_second": 348.009, | |
| "train_steps_per_second": 0.272 | |
| } | |
| ], | |
| "logging_steps": 25, | |
| "max_steps": 3217, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 200, | |
| "total_flos": 1.3990082808835046e+22, | |
| "train_batch_size": 5, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |