| { | |
| "best_metric": 0.15087111294269562, | |
| "best_model_checkpoint": "./checkpoints/ultrafeedback_binarized/phi-2-ultrafeedback_binarized-lambda0.25-ORPO-17-4-15/checkpoint-5988", | |
| "epoch": 2.0, | |
| "eval_steps": 500, | |
| "global_step": 5988, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.016700066800267203, | |
| "grad_norm": 36.0, | |
| "learning_rate": 2.0000000000000002e-07, | |
| "loss": 0.5754, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.033400133600534405, | |
| "grad_norm": 9.0, | |
| "learning_rate": 4.0000000000000003e-07, | |
| "loss": 0.3356, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.050100200400801605, | |
| "grad_norm": 8.1875, | |
| "learning_rate": 6.000000000000001e-07, | |
| "loss": 0.2953, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.06680026720106881, | |
| "grad_norm": 9.8125, | |
| "learning_rate": 8.000000000000001e-07, | |
| "loss": 0.2954, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.08350033400133601, | |
| "grad_norm": 66.5, | |
| "learning_rate": 1.0000000000000002e-06, | |
| "loss": 0.2946, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.10020040080160321, | |
| "grad_norm": 15.5625, | |
| "learning_rate": 1.2000000000000002e-06, | |
| "loss": 0.2982, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.11690046760187041, | |
| "grad_norm": 9.0, | |
| "learning_rate": 1.4000000000000001e-06, | |
| "loss": 0.2912, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.13360053440213762, | |
| "grad_norm": 18.125, | |
| "learning_rate": 1.6000000000000001e-06, | |
| "loss": 0.2994, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.15030060120240482, | |
| "grad_norm": 14.75, | |
| "learning_rate": 1.8000000000000001e-06, | |
| "loss": 0.2869, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.16700066800267202, | |
| "grad_norm": 7.4375, | |
| "learning_rate": 2.0000000000000003e-06, | |
| "loss": 0.2779, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.18370073480293922, | |
| "grad_norm": 6.3125, | |
| "learning_rate": 2.2e-06, | |
| "loss": 0.2781, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.20040080160320642, | |
| "grad_norm": 10.625, | |
| "learning_rate": 2.4000000000000003e-06, | |
| "loss": 0.2836, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.21710086840347362, | |
| "grad_norm": 18.25, | |
| "learning_rate": 2.6e-06, | |
| "loss": 0.2825, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.23380093520374082, | |
| "grad_norm": 9.875, | |
| "learning_rate": 2.8000000000000003e-06, | |
| "loss": 0.2932, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.250501002004008, | |
| "grad_norm": 20.125, | |
| "learning_rate": 3e-06, | |
| "loss": 0.2681, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.26720106880427524, | |
| "grad_norm": 9.8125, | |
| "learning_rate": 3.2000000000000003e-06, | |
| "loss": 0.2639, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.28390113560454244, | |
| "grad_norm": 6.65625, | |
| "learning_rate": 3.4000000000000005e-06, | |
| "loss": 0.2706, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.30060120240480964, | |
| "grad_norm": 15.875, | |
| "learning_rate": 3.6000000000000003e-06, | |
| "loss": 0.2742, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.31730126920507684, | |
| "grad_norm": 8.375, | |
| "learning_rate": 3.8000000000000005e-06, | |
| "loss": 0.2729, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.33400133600534404, | |
| "grad_norm": 6.65625, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 0.2595, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.35070140280561124, | |
| "grad_norm": 7.3125, | |
| "learning_rate": 4.2000000000000004e-06, | |
| "loss": 0.2596, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.36740146960587844, | |
| "grad_norm": 7.6875, | |
| "learning_rate": 4.4e-06, | |
| "loss": 0.2615, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.38410153640614564, | |
| "grad_norm": 12.125, | |
| "learning_rate": 4.600000000000001e-06, | |
| "loss": 0.2614, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.40080160320641284, | |
| "grad_norm": 20.375, | |
| "learning_rate": 4.800000000000001e-06, | |
| "loss": 0.2537, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.41750167000668004, | |
| "grad_norm": 8.75, | |
| "learning_rate": 5e-06, | |
| "loss": 0.2646, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.43420173680694724, | |
| "grad_norm": 14.9375, | |
| "learning_rate": 5.2e-06, | |
| "loss": 0.2609, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.45090180360721444, | |
| "grad_norm": 24.375, | |
| "learning_rate": 5.400000000000001e-06, | |
| "loss": 0.2541, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.46760187040748163, | |
| "grad_norm": 30.25, | |
| "learning_rate": 5.600000000000001e-06, | |
| "loss": 0.2584, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.48430193720774883, | |
| "grad_norm": 12.1875, | |
| "learning_rate": 5.8e-06, | |
| "loss": 0.2572, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.501002004008016, | |
| "grad_norm": 9.0625, | |
| "learning_rate": 6e-06, | |
| "loss": 0.2511, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.5177020708082832, | |
| "grad_norm": 26.25, | |
| "learning_rate": 6.200000000000001e-06, | |
| "loss": 0.2595, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.5344021376085505, | |
| "grad_norm": 12.1875, | |
| "learning_rate": 6.4000000000000006e-06, | |
| "loss": 0.2555, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.5511022044088176, | |
| "grad_norm": 14.375, | |
| "learning_rate": 6.600000000000001e-06, | |
| "loss": 0.2283, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.5678022712090849, | |
| "grad_norm": 15.6875, | |
| "learning_rate": 6.800000000000001e-06, | |
| "loss": 0.2375, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.584502338009352, | |
| "grad_norm": 7.3125, | |
| "learning_rate": 7e-06, | |
| "loss": 0.2378, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.6012024048096193, | |
| "grad_norm": 8.25, | |
| "learning_rate": 7.2000000000000005e-06, | |
| "loss": 0.231, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.6179024716098864, | |
| "grad_norm": 9.4375, | |
| "learning_rate": 7.4e-06, | |
| "loss": 0.2211, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.6346025384101537, | |
| "grad_norm": 10.1875, | |
| "learning_rate": 7.600000000000001e-06, | |
| "loss": 0.2427, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.6513026052104208, | |
| "grad_norm": 21.375, | |
| "learning_rate": 7.800000000000002e-06, | |
| "loss": 0.23, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.6680026720106881, | |
| "grad_norm": 6.0, | |
| "learning_rate": 8.000000000000001e-06, | |
| "loss": 0.2312, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.6847027388109552, | |
| "grad_norm": 12.1875, | |
| "learning_rate": 8.2e-06, | |
| "loss": 0.2334, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.7014028056112225, | |
| "grad_norm": 13.875, | |
| "learning_rate": 8.400000000000001e-06, | |
| "loss": 0.2316, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.7181028724114896, | |
| "grad_norm": 15.1875, | |
| "learning_rate": 8.6e-06, | |
| "loss": 0.2337, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.7348029392117569, | |
| "grad_norm": 9.0, | |
| "learning_rate": 8.8e-06, | |
| "loss": 0.2315, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.751503006012024, | |
| "grad_norm": 10.9375, | |
| "learning_rate": 9e-06, | |
| "loss": 0.2182, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.7682030728122913, | |
| "grad_norm": 10.4375, | |
| "learning_rate": 9.200000000000002e-06, | |
| "loss": 0.2326, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.7849031396125584, | |
| "grad_norm": 37.0, | |
| "learning_rate": 9.4e-06, | |
| "loss": 0.2186, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.8016032064128257, | |
| "grad_norm": 8.5, | |
| "learning_rate": 9.600000000000001e-06, | |
| "loss": 0.2195, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.8183032732130928, | |
| "grad_norm": 33.0, | |
| "learning_rate": 9.800000000000001e-06, | |
| "loss": 0.2154, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 0.8350033400133601, | |
| "grad_norm": 8.375, | |
| "learning_rate": 1e-05, | |
| "loss": 0.2301, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.8517034068136272, | |
| "grad_norm": 11.625, | |
| "learning_rate": 1.02e-05, | |
| "loss": 0.235, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 0.8684034736138945, | |
| "grad_norm": 5.25, | |
| "learning_rate": 1.04e-05, | |
| "loss": 0.2086, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.8851035404141616, | |
| "grad_norm": 10.0, | |
| "learning_rate": 1.0600000000000002e-05, | |
| "loss": 0.2183, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 0.9018036072144289, | |
| "grad_norm": 5.25, | |
| "learning_rate": 1.0800000000000002e-05, | |
| "loss": 0.2047, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.918503674014696, | |
| "grad_norm": 44.5, | |
| "learning_rate": 1.1000000000000001e-05, | |
| "loss": 0.206, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 0.9352037408149633, | |
| "grad_norm": 19.5, | |
| "learning_rate": 1.1200000000000001e-05, | |
| "loss": 0.2095, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 0.9519038076152304, | |
| "grad_norm": 9.5625, | |
| "learning_rate": 1.14e-05, | |
| "loss": 0.2039, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 0.9686038744154977, | |
| "grad_norm": 11.375, | |
| "learning_rate": 1.16e-05, | |
| "loss": 0.2096, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 0.9853039412157648, | |
| "grad_norm": 5.09375, | |
| "learning_rate": 1.18e-05, | |
| "loss": 0.2087, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_loss": 0.2021823674440384, | |
| "eval_runtime": 181.6869, | |
| "eval_samples_per_second": 8.872, | |
| "eval_steps_per_second": 0.556, | |
| "step": 2994 | |
| }, | |
| { | |
| "epoch": 1.002004008016032, | |
| "grad_norm": 19.0, | |
| "learning_rate": 1.2e-05, | |
| "loss": 0.2132, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 1.0187040748162992, | |
| "grad_norm": 7.46875, | |
| "learning_rate": 1.22e-05, | |
| "loss": 0.2114, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 1.0354041416165665, | |
| "grad_norm": 6.75, | |
| "learning_rate": 1.2400000000000002e-05, | |
| "loss": 0.2034, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 1.0521042084168337, | |
| "grad_norm": 6.46875, | |
| "learning_rate": 1.2600000000000001e-05, | |
| "loss": 0.1949, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 1.0688042752171008, | |
| "grad_norm": 5.625, | |
| "learning_rate": 1.2800000000000001e-05, | |
| "loss": 0.2028, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 1.085504342017368, | |
| "grad_norm": 5.34375, | |
| "learning_rate": 1.3000000000000001e-05, | |
| "loss": 0.2062, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 1.1022044088176353, | |
| "grad_norm": 7.4375, | |
| "learning_rate": 1.3200000000000002e-05, | |
| "loss": 0.1937, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 1.1189044756179025, | |
| "grad_norm": 44.25, | |
| "learning_rate": 1.3400000000000002e-05, | |
| "loss": 0.2108, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 1.1356045424181698, | |
| "grad_norm": 4.75, | |
| "learning_rate": 1.3600000000000002e-05, | |
| "loss": 0.201, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 1.1523046092184368, | |
| "grad_norm": 8.1875, | |
| "learning_rate": 1.38e-05, | |
| "loss": 0.2053, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 1.169004676018704, | |
| "grad_norm": 7.5625, | |
| "learning_rate": 1.4e-05, | |
| "loss": 0.1996, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 1.1857047428189713, | |
| "grad_norm": 7.125, | |
| "learning_rate": 1.4200000000000001e-05, | |
| "loss": 0.2019, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 1.2024048096192386, | |
| "grad_norm": 6.40625, | |
| "learning_rate": 1.4400000000000001e-05, | |
| "loss": 0.2104, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 1.2191048764195056, | |
| "grad_norm": 6.09375, | |
| "learning_rate": 1.46e-05, | |
| "loss": 0.1958, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 1.2358049432197729, | |
| "grad_norm": 10.6875, | |
| "learning_rate": 1.48e-05, | |
| "loss": 0.2022, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 1.25250501002004, | |
| "grad_norm": 7.71875, | |
| "learning_rate": 1.5000000000000002e-05, | |
| "loss": 0.2028, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 1.2692050768203074, | |
| "grad_norm": 10.5, | |
| "learning_rate": 1.5200000000000002e-05, | |
| "loss": 0.1977, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 1.2859051436205746, | |
| "grad_norm": 8.125, | |
| "learning_rate": 1.54e-05, | |
| "loss": 0.2056, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 1.3026052104208417, | |
| "grad_norm": 5.0625, | |
| "learning_rate": 1.5600000000000003e-05, | |
| "loss": 0.2007, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 1.319305277221109, | |
| "grad_norm": 9.3125, | |
| "learning_rate": 1.58e-05, | |
| "loss": 0.1993, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 1.3360053440213762, | |
| "grad_norm": 6.6875, | |
| "learning_rate": 1.6000000000000003e-05, | |
| "loss": 0.2112, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 1.3527054108216432, | |
| "grad_norm": 7.65625, | |
| "learning_rate": 1.62e-05, | |
| "loss": 0.2082, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 1.3694054776219104, | |
| "grad_norm": 4.875, | |
| "learning_rate": 1.64e-05, | |
| "loss": 0.2062, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 1.3861055444221777, | |
| "grad_norm": 6.0625, | |
| "learning_rate": 1.66e-05, | |
| "loss": 0.1875, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 1.402805611222445, | |
| "grad_norm": 6.71875, | |
| "learning_rate": 1.6800000000000002e-05, | |
| "loss": 0.2083, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 1.4195056780227122, | |
| "grad_norm": 5.21875, | |
| "learning_rate": 1.7e-05, | |
| "loss": 0.2101, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 1.4362057448229792, | |
| "grad_norm": 9.375, | |
| "learning_rate": 1.72e-05, | |
| "loss": 0.2163, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 1.4529058116232465, | |
| "grad_norm": 6.75, | |
| "learning_rate": 1.7400000000000003e-05, | |
| "loss": 0.2048, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 1.4696058784235138, | |
| "grad_norm": 6.75, | |
| "learning_rate": 1.76e-05, | |
| "loss": 0.2035, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 1.4863059452237808, | |
| "grad_norm": 10.75, | |
| "learning_rate": 1.7800000000000002e-05, | |
| "loss": 0.2062, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 1.503006012024048, | |
| "grad_norm": 6.71875, | |
| "learning_rate": 1.8e-05, | |
| "loss": 0.1976, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 1.5197060788243153, | |
| "grad_norm": 8.6875, | |
| "learning_rate": 1.8200000000000002e-05, | |
| "loss": 0.1941, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 1.5364061456245826, | |
| "grad_norm": 14.0, | |
| "learning_rate": 1.8400000000000003e-05, | |
| "loss": 0.1868, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 1.5531062124248498, | |
| "grad_norm": 4.875, | |
| "learning_rate": 1.86e-05, | |
| "loss": 0.2056, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 1.569806279225117, | |
| "grad_norm": 13.75, | |
| "learning_rate": 1.88e-05, | |
| "loss": 0.1937, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 1.586506346025384, | |
| "grad_norm": 9.25, | |
| "learning_rate": 1.9e-05, | |
| "loss": 0.2051, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 1.6032064128256514, | |
| "grad_norm": 16.375, | |
| "learning_rate": 1.9200000000000003e-05, | |
| "loss": 0.21, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 1.6199064796259184, | |
| "grad_norm": 10.125, | |
| "learning_rate": 1.94e-05, | |
| "loss": 0.1873, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 1.6366065464261856, | |
| "grad_norm": 9.625, | |
| "learning_rate": 1.9600000000000002e-05, | |
| "loss": 0.1909, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 1.653306613226453, | |
| "grad_norm": 11.0, | |
| "learning_rate": 1.98e-05, | |
| "loss": 0.1844, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 1.6700066800267201, | |
| "grad_norm": 5.65625, | |
| "learning_rate": 2e-05, | |
| "loss": 0.198, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 1.6867067468269874, | |
| "grad_norm": 4.625, | |
| "learning_rate": 1.9873880897766597e-05, | |
| "loss": 0.1871, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 1.7034068136272547, | |
| "grad_norm": 57.5, | |
| "learning_rate": 1.949870479665602e-05, | |
| "loss": 0.181, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 1.7201068804275217, | |
| "grad_norm": 16.75, | |
| "learning_rate": 1.888393507127856e-05, | |
| "loss": 0.166, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 1.736806947227789, | |
| "grad_norm": 502.0, | |
| "learning_rate": 1.8045078562803203e-05, | |
| "loss": 0.3851, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 1.753507014028056, | |
| "grad_norm": 135.0, | |
| "learning_rate": 1.7003294437180254e-05, | |
| "loss": 0.2137, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 1.7702070808283232, | |
| "grad_norm": 62.5, | |
| "learning_rate": 1.5784860470138633e-05, | |
| "loss": 0.1884, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 1.7869071476285905, | |
| "grad_norm": 5.53125, | |
| "learning_rate": 1.4420510221289137e-05, | |
| "loss": 0.1631, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 1.8036072144288577, | |
| "grad_norm": 175.0, | |
| "learning_rate": 1.2944657816335124e-05, | |
| "loss": 0.2435, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 1.820307281229125, | |
| "grad_norm": 109.0, | |
| "learning_rate": 1.139452989134496e-05, | |
| "loss": 0.2224, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 1.8370073480293923, | |
| "grad_norm": 140.0, | |
| "learning_rate": 9.809226594767979e-06, | |
| "loss": 0.1926, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 1.8537074148296593, | |
| "grad_norm": 129.0, | |
| "learning_rate": 8.228735332310575e-06, | |
| "loss": 0.1689, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 1.8704074816299265, | |
| "grad_norm": 30.25, | |
| "learning_rate": 6.692922131794517e-06, | |
| "loss": 0.143, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 1.8871075484301936, | |
| "grad_norm": 74.0, | |
| "learning_rate": 5.240526069629265e-06, | |
| "loss": 0.1634, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 1.9038076152304608, | |
| "grad_norm": 43.0, | |
| "learning_rate": 3.908182123304344e-06, | |
| "loss": 0.1372, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 1.920507682030728, | |
| "grad_norm": 42.5, | |
| "learning_rate": 2.729497097295075e-06, | |
| "loss": 0.1331, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 1.9372077488309953, | |
| "grad_norm": 448.0, | |
| "learning_rate": 1.7342019310607062e-06, | |
| "loss": 0.137, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 1.9539078156312626, | |
| "grad_norm": 44.5, | |
| "learning_rate": 9.474017711657835e-07, | |
| "loss": 0.1754, | |
| "step": 5850 | |
| }, | |
| { | |
| "epoch": 1.9706078824315298, | |
| "grad_norm": 64.0, | |
| "learning_rate": 3.889427235709153e-07, | |
| "loss": 0.1797, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 1.9873079492317969, | |
| "grad_norm": 241.0, | |
| "learning_rate": 7.291125901946027e-08, | |
| "loss": 0.1781, | |
| "step": 5950 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_loss": 0.15087111294269562, | |
| "eval_runtime": 181.739, | |
| "eval_samples_per_second": 8.87, | |
| "eval_steps_per_second": 0.556, | |
| "step": 5988 | |
| } | |
| ], | |
| "logging_steps": 50, | |
| "max_steps": 5988, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 2, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.5590253079717478e+18, | |
| "train_batch_size": 16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |