| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 1.984375, |
| "eval_steps": 500, |
| "global_step": 170, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.01171875, |
| "grad_norm": 34.936946868896484, |
| "learning_rate": 5.0000000000000004e-08, |
| "loss": 4.5195, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.0234375, |
| "grad_norm": 35.17010498046875, |
| "learning_rate": 1.0000000000000001e-07, |
| "loss": 4.5214, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.03515625, |
| "grad_norm": 36.44556427001953, |
| "learning_rate": 1.5000000000000002e-07, |
| "loss": 4.5497, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.046875, |
| "grad_norm": 35.30234146118164, |
| "learning_rate": 2.0000000000000002e-07, |
| "loss": 4.5195, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.05859375, |
| "grad_norm": 35.90467834472656, |
| "learning_rate": 2.5000000000000004e-07, |
| "loss": 4.5572, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.0703125, |
| "grad_norm": 35.25331497192383, |
| "learning_rate": 3.0000000000000004e-07, |
| "loss": 4.522, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.08203125, |
| "grad_norm": 35.77690124511719, |
| "learning_rate": 3.5000000000000004e-07, |
| "loss": 4.532, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.09375, |
| "grad_norm": 35.59796905517578, |
| "learning_rate": 4.0000000000000003e-07, |
| "loss": 4.5413, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.10546875, |
| "grad_norm": 34.550079345703125, |
| "learning_rate": 4.5000000000000003e-07, |
| "loss": 4.444, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.1171875, |
| "grad_norm": 35.464324951171875, |
| "learning_rate": 5.000000000000001e-07, |
| "loss": 4.3859, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.12890625, |
| "grad_norm": 34.73582458496094, |
| "learning_rate": 5.5e-07, |
| "loss": 4.4206, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.140625, |
| "grad_norm": 34.19432067871094, |
| "learning_rate": 6.000000000000001e-07, |
| "loss": 4.3844, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.15234375, |
| "grad_norm": 33.5693473815918, |
| "learning_rate": 6.5e-07, |
| "loss": 4.3484, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.1640625, |
| "grad_norm": 33.903499603271484, |
| "learning_rate": 7.000000000000001e-07, |
| "loss": 4.3156, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.17578125, |
| "grad_norm": 31.61307716369629, |
| "learning_rate": 7.5e-07, |
| "loss": 4.1674, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.1875, |
| "grad_norm": 29.78888702392578, |
| "learning_rate": 8.000000000000001e-07, |
| "loss": 4.0061, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.19921875, |
| "grad_norm": 28.59884262084961, |
| "learning_rate": 8.500000000000001e-07, |
| "loss": 3.9706, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.2109375, |
| "grad_norm": 25.763689041137695, |
| "learning_rate": 9.000000000000001e-07, |
| "loss": 3.7961, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.22265625, |
| "grad_norm": 24.51258659362793, |
| "learning_rate": 9.500000000000001e-07, |
| "loss": 3.7835, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.234375, |
| "grad_norm": 22.484188079833984, |
| "learning_rate": 1.0000000000000002e-06, |
| "loss": 3.5655, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.24609375, |
| "grad_norm": 20.98280143737793, |
| "learning_rate": 1.0500000000000001e-06, |
| "loss": 3.483, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.2578125, |
| "grad_norm": 19.596670150756836, |
| "learning_rate": 1.1e-06, |
| "loss": 3.3479, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.26953125, |
| "grad_norm": 18.059001922607422, |
| "learning_rate": 1.1500000000000002e-06, |
| "loss": 3.1264, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.28125, |
| "grad_norm": 17.812040328979492, |
| "learning_rate": 1.2000000000000002e-06, |
| "loss": 3.1063, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.29296875, |
| "grad_norm": 18.554685592651367, |
| "learning_rate": 1.25e-06, |
| "loss": 2.9743, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.3046875, |
| "grad_norm": 19.290931701660156, |
| "learning_rate": 1.3e-06, |
| "loss": 2.7898, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.31640625, |
| "grad_norm": 20.526958465576172, |
| "learning_rate": 1.3500000000000002e-06, |
| "loss": 2.6767, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.328125, |
| "grad_norm": 20.140962600708008, |
| "learning_rate": 1.4000000000000001e-06, |
| "loss": 2.476, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.33984375, |
| "grad_norm": 18.80613136291504, |
| "learning_rate": 1.45e-06, |
| "loss": 2.4107, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.3515625, |
| "grad_norm": 16.77231788635254, |
| "learning_rate": 1.5e-06, |
| "loss": 2.2789, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.36328125, |
| "grad_norm": 15.546438217163086, |
| "learning_rate": 1.5500000000000002e-06, |
| "loss": 2.118, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.375, |
| "grad_norm": 15.454617500305176, |
| "learning_rate": 1.6000000000000001e-06, |
| "loss": 1.9487, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.38671875, |
| "grad_norm": 15.873698234558105, |
| "learning_rate": 1.6500000000000003e-06, |
| "loss": 1.8438, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.3984375, |
| "grad_norm": 15.691689491271973, |
| "learning_rate": 1.7000000000000002e-06, |
| "loss": 1.6935, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.41015625, |
| "grad_norm": 15.127069473266602, |
| "learning_rate": 1.75e-06, |
| "loss": 1.5188, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.421875, |
| "grad_norm": 14.441194534301758, |
| "learning_rate": 1.8000000000000001e-06, |
| "loss": 1.345, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.43359375, |
| "grad_norm": 14.392739295959473, |
| "learning_rate": 1.85e-06, |
| "loss": 1.2395, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.4453125, |
| "grad_norm": 14.283534049987793, |
| "learning_rate": 1.9000000000000002e-06, |
| "loss": 1.0954, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.45703125, |
| "grad_norm": 13.821670532226562, |
| "learning_rate": 1.9500000000000004e-06, |
| "loss": 0.9594, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.46875, |
| "grad_norm": 13.498543739318848, |
| "learning_rate": 2.0000000000000003e-06, |
| "loss": 0.7972, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.48046875, |
| "grad_norm": 12.805685997009277, |
| "learning_rate": 2.05e-06, |
| "loss": 0.6578, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.4921875, |
| "grad_norm": 11.584136962890625, |
| "learning_rate": 2.1000000000000002e-06, |
| "loss": 0.5148, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.50390625, |
| "grad_norm": 9.801186561584473, |
| "learning_rate": 2.15e-06, |
| "loss": 0.416, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.515625, |
| "grad_norm": 9.404278755187988, |
| "learning_rate": 2.2e-06, |
| "loss": 0.3419, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.52734375, |
| "grad_norm": 7.185790061950684, |
| "learning_rate": 2.25e-06, |
| "loss": 0.24, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.5390625, |
| "grad_norm": 4.915873050689697, |
| "learning_rate": 2.3000000000000004e-06, |
| "loss": 0.1769, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.55078125, |
| "grad_norm": 2.8505806922912598, |
| "learning_rate": 2.35e-06, |
| "loss": 0.1435, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.5625, |
| "grad_norm": 1.7713755369186401, |
| "learning_rate": 2.4000000000000003e-06, |
| "loss": 0.119, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.57421875, |
| "grad_norm": 1.5673104524612427, |
| "learning_rate": 2.4500000000000003e-06, |
| "loss": 0.1064, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.5859375, |
| "grad_norm": 1.3322486877441406, |
| "learning_rate": 2.5e-06, |
| "loss": 0.0995, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.59765625, |
| "grad_norm": 1.2245675325393677, |
| "learning_rate": 2.55e-06, |
| "loss": 0.0889, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.609375, |
| "grad_norm": 0.8981238007545471, |
| "learning_rate": 2.6e-06, |
| "loss": 0.0847, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.62109375, |
| "grad_norm": 0.6747678518295288, |
| "learning_rate": 2.6500000000000005e-06, |
| "loss": 0.0731, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.6328125, |
| "grad_norm": 0.7690777778625488, |
| "learning_rate": 2.7000000000000004e-06, |
| "loss": 0.0812, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.64453125, |
| "grad_norm": 0.9659140706062317, |
| "learning_rate": 2.7500000000000004e-06, |
| "loss": 0.0827, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.65625, |
| "grad_norm": 0.7463306784629822, |
| "learning_rate": 2.8000000000000003e-06, |
| "loss": 0.0734, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.66796875, |
| "grad_norm": 0.6388681530952454, |
| "learning_rate": 2.85e-06, |
| "loss": 0.0741, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.6796875, |
| "grad_norm": 0.5176859498023987, |
| "learning_rate": 2.9e-06, |
| "loss": 0.0687, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.69140625, |
| "grad_norm": 0.5809365510940552, |
| "learning_rate": 2.95e-06, |
| "loss": 0.0759, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.703125, |
| "grad_norm": 0.4900883436203003, |
| "learning_rate": 3e-06, |
| "loss": 0.0637, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.71484375, |
| "grad_norm": 0.46506503224372864, |
| "learning_rate": 3.05e-06, |
| "loss": 0.066, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.7265625, |
| "grad_norm": 0.4315766394138336, |
| "learning_rate": 3.1000000000000004e-06, |
| "loss": 0.0619, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.73828125, |
| "grad_norm": 0.6561943888664246, |
| "learning_rate": 3.1500000000000003e-06, |
| "loss": 0.0718, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.75, |
| "grad_norm": 0.44578853249549866, |
| "learning_rate": 3.2000000000000003e-06, |
| "loss": 0.0662, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.76171875, |
| "grad_norm": 0.352586567401886, |
| "learning_rate": 3.2500000000000002e-06, |
| "loss": 0.0612, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.7734375, |
| "grad_norm": 0.3779674172401428, |
| "learning_rate": 3.3000000000000006e-06, |
| "loss": 0.0696, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.78515625, |
| "grad_norm": 0.45150119066238403, |
| "learning_rate": 3.3500000000000005e-06, |
| "loss": 0.0665, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.796875, |
| "grad_norm": 0.32507383823394775, |
| "learning_rate": 3.4000000000000005e-06, |
| "loss": 0.0652, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.80859375, |
| "grad_norm": 0.3733697533607483, |
| "learning_rate": 3.45e-06, |
| "loss": 0.061, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.8203125, |
| "grad_norm": 0.3483797609806061, |
| "learning_rate": 3.5e-06, |
| "loss": 0.067, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.83203125, |
| "grad_norm": 0.2772752642631531, |
| "learning_rate": 3.5500000000000003e-06, |
| "loss": 0.0597, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.84375, |
| "grad_norm": 0.32659995555877686, |
| "learning_rate": 3.6000000000000003e-06, |
| "loss": 0.0657, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.85546875, |
| "grad_norm": 0.3197411000728607, |
| "learning_rate": 3.65e-06, |
| "loss": 0.0649, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.8671875, |
| "grad_norm": 0.36947256326675415, |
| "learning_rate": 3.7e-06, |
| "loss": 0.0666, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.87890625, |
| "grad_norm": 0.2516244351863861, |
| "learning_rate": 3.7500000000000005e-06, |
| "loss": 0.0618, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.890625, |
| "grad_norm": 0.27707308530807495, |
| "learning_rate": 3.8000000000000005e-06, |
| "loss": 0.0629, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.90234375, |
| "grad_norm": 0.31011566519737244, |
| "learning_rate": 3.85e-06, |
| "loss": 0.0639, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.9140625, |
| "grad_norm": 0.42964762449264526, |
| "learning_rate": 3.900000000000001e-06, |
| "loss": 0.0575, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.92578125, |
| "grad_norm": 0.2871685326099396, |
| "learning_rate": 3.95e-06, |
| "loss": 0.056, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.9375, |
| "grad_norm": 0.2593545615673065, |
| "learning_rate": 4.000000000000001e-06, |
| "loss": 0.0578, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.94921875, |
| "grad_norm": 0.27979350090026855, |
| "learning_rate": 4.05e-06, |
| "loss": 0.0582, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.9609375, |
| "grad_norm": 0.47774314880371094, |
| "learning_rate": 4.1e-06, |
| "loss": 0.0657, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.97265625, |
| "grad_norm": 0.46003803610801697, |
| "learning_rate": 4.15e-06, |
| "loss": 0.0591, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.984375, |
| "grad_norm": 0.28047817945480347, |
| "learning_rate": 4.2000000000000004e-06, |
| "loss": 0.0585, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.99609375, |
| "grad_norm": 0.3024386763572693, |
| "learning_rate": 4.25e-06, |
| "loss": 0.0597, |
| "step": 85 |
| }, |
| { |
| "epoch": 1.0, |
| "grad_norm": 0.3024386763572693, |
| "learning_rate": 4.3e-06, |
| "loss": 0.0612, |
| "step": 86 |
| }, |
| { |
| "epoch": 1.01171875, |
| "grad_norm": 1.1462472677230835, |
| "learning_rate": 4.350000000000001e-06, |
| "loss": 0.0505, |
| "step": 87 |
| }, |
| { |
| "epoch": 1.0234375, |
| "grad_norm": 0.28574419021606445, |
| "learning_rate": 4.4e-06, |
| "loss": 0.0575, |
| "step": 88 |
| }, |
| { |
| "epoch": 1.03515625, |
| "grad_norm": 0.23733267188072205, |
| "learning_rate": 4.450000000000001e-06, |
| "loss": 0.0567, |
| "step": 89 |
| }, |
| { |
| "epoch": 1.046875, |
| "grad_norm": 0.25351643562316895, |
| "learning_rate": 4.5e-06, |
| "loss": 0.0595, |
| "step": 90 |
| }, |
| { |
| "epoch": 1.05859375, |
| "grad_norm": 0.3286800980567932, |
| "learning_rate": 4.5500000000000005e-06, |
| "loss": 0.056, |
| "step": 91 |
| }, |
| { |
| "epoch": 1.0703125, |
| "grad_norm": 0.2708681523799896, |
| "learning_rate": 4.600000000000001e-06, |
| "loss": 0.0541, |
| "step": 92 |
| }, |
| { |
| "epoch": 1.08203125, |
| "grad_norm": 0.30789315700531006, |
| "learning_rate": 4.65e-06, |
| "loss": 0.0583, |
| "step": 93 |
| }, |
| { |
| "epoch": 1.09375, |
| "grad_norm": 0.3800869882106781, |
| "learning_rate": 4.7e-06, |
| "loss": 0.059, |
| "step": 94 |
| }, |
| { |
| "epoch": 1.10546875, |
| "grad_norm": 0.2756796181201935, |
| "learning_rate": 4.75e-06, |
| "loss": 0.0587, |
| "step": 95 |
| }, |
| { |
| "epoch": 1.1171875, |
| "grad_norm": 0.29054075479507446, |
| "learning_rate": 4.800000000000001e-06, |
| "loss": 0.0545, |
| "step": 96 |
| }, |
| { |
| "epoch": 1.12890625, |
| "grad_norm": 0.24633410573005676, |
| "learning_rate": 4.85e-06, |
| "loss": 0.0576, |
| "step": 97 |
| }, |
| { |
| "epoch": 1.140625, |
| "grad_norm": 0.23658645153045654, |
| "learning_rate": 4.9000000000000005e-06, |
| "loss": 0.0566, |
| "step": 98 |
| }, |
| { |
| "epoch": 1.15234375, |
| "grad_norm": 0.2314174473285675, |
| "learning_rate": 4.95e-06, |
| "loss": 0.0535, |
| "step": 99 |
| }, |
| { |
| "epoch": 1.1640625, |
| "grad_norm": 0.1932941973209381, |
| "learning_rate": 5e-06, |
| "loss": 0.0576, |
| "step": 100 |
| }, |
| { |
| "epoch": 1.17578125, |
| "grad_norm": 0.22639156877994537, |
| "learning_rate": 4.999926609487568e-06, |
| "loss": 0.0519, |
| "step": 101 |
| }, |
| { |
| "epoch": 1.1875, |
| "grad_norm": 0.23717834055423737, |
| "learning_rate": 4.999706442259205e-06, |
| "loss": 0.0487, |
| "step": 102 |
| }, |
| { |
| "epoch": 1.19921875, |
| "grad_norm": 0.25690606236457825, |
| "learning_rate": 4.999339511241458e-06, |
| "loss": 0.0542, |
| "step": 103 |
| }, |
| { |
| "epoch": 1.2109375, |
| "grad_norm": 0.28705868124961853, |
| "learning_rate": 4.9988258379777334e-06, |
| "loss": 0.0534, |
| "step": 104 |
| }, |
| { |
| "epoch": 1.22265625, |
| "grad_norm": 0.24628855288028717, |
| "learning_rate": 4.998165452627025e-06, |
| "loss": 0.0536, |
| "step": 105 |
| }, |
| { |
| "epoch": 1.234375, |
| "grad_norm": 0.38239291310310364, |
| "learning_rate": 4.99735839396215e-06, |
| "loss": 0.0545, |
| "step": 106 |
| }, |
| { |
| "epoch": 1.24609375, |
| "grad_norm": 0.22199885547161102, |
| "learning_rate": 4.996404709367466e-06, |
| "loss": 0.0525, |
| "step": 107 |
| }, |
| { |
| "epoch": 1.2578125, |
| "grad_norm": 0.2303021103143692, |
| "learning_rate": 4.995304454836095e-06, |
| "loss": 0.0544, |
| "step": 108 |
| }, |
| { |
| "epoch": 1.26953125, |
| "grad_norm": 0.245191752910614, |
| "learning_rate": 4.994057694966632e-06, |
| "loss": 0.0521, |
| "step": 109 |
| }, |
| { |
| "epoch": 1.28125, |
| "grad_norm": 0.267315149307251, |
| "learning_rate": 4.992664502959351e-06, |
| "loss": 0.0503, |
| "step": 110 |
| }, |
| { |
| "epoch": 1.29296875, |
| "grad_norm": 0.23672722280025482, |
| "learning_rate": 4.991124960611916e-06, |
| "loss": 0.0471, |
| "step": 111 |
| }, |
| { |
| "epoch": 1.3046875, |
| "grad_norm": 0.2500607371330261, |
| "learning_rate": 4.989439158314566e-06, |
| "loss": 0.0547, |
| "step": 112 |
| }, |
| { |
| "epoch": 1.31640625, |
| "grad_norm": 0.27703896164894104, |
| "learning_rate": 4.9876071950448185e-06, |
| "loss": 0.0508, |
| "step": 113 |
| }, |
| { |
| "epoch": 1.328125, |
| "grad_norm": 0.271287739276886, |
| "learning_rate": 4.98562917836165e-06, |
| "loss": 0.049, |
| "step": 114 |
| }, |
| { |
| "epoch": 1.33984375, |
| "grad_norm": 0.25890663266181946, |
| "learning_rate": 4.983505224399188e-06, |
| "loss": 0.0509, |
| "step": 115 |
| }, |
| { |
| "epoch": 1.3515625, |
| "grad_norm": 0.2870018780231476, |
| "learning_rate": 4.9812354578598876e-06, |
| "loss": 0.0518, |
| "step": 116 |
| }, |
| { |
| "epoch": 1.36328125, |
| "grad_norm": 0.2725720703601837, |
| "learning_rate": 4.978820012007213e-06, |
| "loss": 0.047, |
| "step": 117 |
| }, |
| { |
| "epoch": 1.375, |
| "grad_norm": 0.19925664365291595, |
| "learning_rate": 4.976259028657812e-06, |
| "loss": 0.0493, |
| "step": 118 |
| }, |
| { |
| "epoch": 1.38671875, |
| "grad_norm": 0.22143645584583282, |
| "learning_rate": 4.973552658173186e-06, |
| "loss": 0.0514, |
| "step": 119 |
| }, |
| { |
| "epoch": 1.3984375, |
| "grad_norm": 0.22685761749744415, |
| "learning_rate": 4.970701059450872e-06, |
| "loss": 0.0525, |
| "step": 120 |
| }, |
| { |
| "epoch": 1.41015625, |
| "grad_norm": 0.21121999621391296, |
| "learning_rate": 4.9677043999151e-06, |
| "loss": 0.051, |
| "step": 121 |
| }, |
| { |
| "epoch": 1.421875, |
| "grad_norm": 0.23914238810539246, |
| "learning_rate": 4.964562855506976e-06, |
| "loss": 0.0469, |
| "step": 122 |
| }, |
| { |
| "epoch": 1.43359375, |
| "grad_norm": 0.24409078061580658, |
| "learning_rate": 4.961276610674141e-06, |
| "loss": 0.0508, |
| "step": 123 |
| }, |
| { |
| "epoch": 1.4453125, |
| "grad_norm": 0.23388482630252838, |
| "learning_rate": 4.9578458583599495e-06, |
| "loss": 0.051, |
| "step": 124 |
| }, |
| { |
| "epoch": 1.45703125, |
| "grad_norm": 0.23801520466804504, |
| "learning_rate": 4.954270799992138e-06, |
| "loss": 0.0506, |
| "step": 125 |
| }, |
| { |
| "epoch": 1.46875, |
| "grad_norm": 0.24522367119789124, |
| "learning_rate": 4.950551645470998e-06, |
| "loss": 0.0496, |
| "step": 126 |
| }, |
| { |
| "epoch": 1.48046875, |
| "grad_norm": 0.22810965776443481, |
| "learning_rate": 4.9466886131570565e-06, |
| "loss": 0.0493, |
| "step": 127 |
| }, |
| { |
| "epoch": 1.4921875, |
| "grad_norm": 0.21910196542739868, |
| "learning_rate": 4.942681929858249e-06, |
| "loss": 0.048, |
| "step": 128 |
| }, |
| { |
| "epoch": 1.50390625, |
| "grad_norm": 0.22025763988494873, |
| "learning_rate": 4.9385318308166065e-06, |
| "loss": 0.0448, |
| "step": 129 |
| }, |
| { |
| "epoch": 1.515625, |
| "grad_norm": 0.2766706049442291, |
| "learning_rate": 4.934238559694448e-06, |
| "loss": 0.0463, |
| "step": 130 |
| }, |
| { |
| "epoch": 1.52734375, |
| "grad_norm": 0.22292804718017578, |
| "learning_rate": 4.929802368560066e-06, |
| "loss": 0.0433, |
| "step": 131 |
| }, |
| { |
| "epoch": 1.5390625, |
| "grad_norm": 0.312200129032135, |
| "learning_rate": 4.925223517872934e-06, |
| "loss": 0.0477, |
| "step": 132 |
| }, |
| { |
| "epoch": 1.55078125, |
| "grad_norm": 0.21522150933742523, |
| "learning_rate": 4.920502276468408e-06, |
| "loss": 0.0476, |
| "step": 133 |
| }, |
| { |
| "epoch": 1.5625, |
| "grad_norm": 0.2599612772464752, |
| "learning_rate": 4.915638921541952e-06, |
| "loss": 0.0472, |
| "step": 134 |
| }, |
| { |
| "epoch": 1.57421875, |
| "grad_norm": 0.2079230546951294, |
| "learning_rate": 4.9106337386328524e-06, |
| "loss": 0.0437, |
| "step": 135 |
| }, |
| { |
| "epoch": 1.5859375, |
| "grad_norm": 0.23645609617233276, |
| "learning_rate": 4.905487021607462e-06, |
| "loss": 0.0418, |
| "step": 136 |
| }, |
| { |
| "epoch": 1.59765625, |
| "grad_norm": 0.24720944464206696, |
| "learning_rate": 4.900199072641937e-06, |
| "loss": 0.0471, |
| "step": 137 |
| }, |
| { |
| "epoch": 1.609375, |
| "grad_norm": 0.24982096254825592, |
| "learning_rate": 4.894770202204509e-06, |
| "loss": 0.0427, |
| "step": 138 |
| }, |
| { |
| "epoch": 1.62109375, |
| "grad_norm": 0.2666509747505188, |
| "learning_rate": 4.889200729037241e-06, |
| "loss": 0.0457, |
| "step": 139 |
| }, |
| { |
| "epoch": 1.6328125, |
| "grad_norm": 0.2664203345775604, |
| "learning_rate": 4.883490980137327e-06, |
| "loss": 0.0467, |
| "step": 140 |
| }, |
| { |
| "epoch": 1.64453125, |
| "grad_norm": 0.250724732875824, |
| "learning_rate": 4.8776412907378845e-06, |
| "loss": 0.0389, |
| "step": 141 |
| }, |
| { |
| "epoch": 1.65625, |
| "grad_norm": 0.3050763010978699, |
| "learning_rate": 4.871652004288275e-06, |
| "loss": 0.0447, |
| "step": 142 |
| }, |
| { |
| "epoch": 1.66796875, |
| "grad_norm": 0.3108248710632324, |
| "learning_rate": 4.865523472433942e-06, |
| "loss": 0.0486, |
| "step": 143 |
| }, |
| { |
| "epoch": 1.6796875, |
| "grad_norm": 0.24726933240890503, |
| "learning_rate": 4.859256054995758e-06, |
| "loss": 0.0425, |
| "step": 144 |
| }, |
| { |
| "epoch": 1.69140625, |
| "grad_norm": 0.2529606819152832, |
| "learning_rate": 4.8528501199489045e-06, |
| "loss": 0.0468, |
| "step": 145 |
| }, |
| { |
| "epoch": 1.703125, |
| "grad_norm": 0.2741091847419739, |
| "learning_rate": 4.846306043401268e-06, |
| "loss": 0.043, |
| "step": 146 |
| }, |
| { |
| "epoch": 1.71484375, |
| "grad_norm": 0.22653311491012573, |
| "learning_rate": 4.839624209571352e-06, |
| "loss": 0.0436, |
| "step": 147 |
| }, |
| { |
| "epoch": 1.7265625, |
| "grad_norm": 0.2258974015712738, |
| "learning_rate": 4.832805010765724e-06, |
| "loss": 0.0489, |
| "step": 148 |
| }, |
| { |
| "epoch": 1.73828125, |
| "grad_norm": 0.2763097584247589, |
| "learning_rate": 4.8258488473559794e-06, |
| "loss": 0.0408, |
| "step": 149 |
| }, |
| { |
| "epoch": 1.75, |
| "grad_norm": 0.23488503694534302, |
| "learning_rate": 4.8187561277552376e-06, |
| "loss": 0.0396, |
| "step": 150 |
| }, |
| { |
| "epoch": 1.76171875, |
| "grad_norm": 0.2815094590187073, |
| "learning_rate": 4.811527268394157e-06, |
| "loss": 0.0422, |
| "step": 151 |
| }, |
| { |
| "epoch": 1.7734375, |
| "grad_norm": 0.2870430648326874, |
| "learning_rate": 4.804162693696494e-06, |
| "loss": 0.0417, |
| "step": 152 |
| }, |
| { |
| "epoch": 1.78515625, |
| "grad_norm": 0.34226351976394653, |
| "learning_rate": 4.796662836054176e-06, |
| "loss": 0.0453, |
| "step": 153 |
| }, |
| { |
| "epoch": 1.796875, |
| "grad_norm": 0.3671925961971283, |
| "learning_rate": 4.789028135801919e-06, |
| "loss": 0.043, |
| "step": 154 |
| }, |
| { |
| "epoch": 1.80859375, |
| "grad_norm": 0.26639413833618164, |
| "learning_rate": 4.7812590411913755e-06, |
| "loss": 0.0437, |
| "step": 155 |
| }, |
| { |
| "epoch": 1.8203125, |
| "grad_norm": 0.24375365674495697, |
| "learning_rate": 4.773356008364812e-06, |
| "loss": 0.0376, |
| "step": 156 |
| }, |
| { |
| "epoch": 1.83203125, |
| "grad_norm": 0.23922622203826904, |
| "learning_rate": 4.765319501328332e-06, |
| "loss": 0.0459, |
| "step": 157 |
| }, |
| { |
| "epoch": 1.84375, |
| "grad_norm": 0.21677494049072266, |
| "learning_rate": 4.757149991924633e-06, |
| "loss": 0.0429, |
| "step": 158 |
| }, |
| { |
| "epoch": 1.85546875, |
| "grad_norm": 0.2717846632003784, |
| "learning_rate": 4.748847959805297e-06, |
| "loss": 0.0403, |
| "step": 159 |
| }, |
| { |
| "epoch": 1.8671875, |
| "grad_norm": 0.23092707991600037, |
| "learning_rate": 4.740413892402639e-06, |
| "loss": 0.0417, |
| "step": 160 |
| }, |
| { |
| "epoch": 1.87890625, |
| "grad_norm": 0.23728156089782715, |
| "learning_rate": 4.731848284901082e-06, |
| "loss": 0.0445, |
| "step": 161 |
| }, |
| { |
| "epoch": 1.890625, |
| "grad_norm": 0.22474321722984314, |
| "learning_rate": 4.723151640208084e-06, |
| "loss": 0.0409, |
| "step": 162 |
| }, |
| { |
| "epoch": 1.90234375, |
| "grad_norm": 0.22294431924819946, |
| "learning_rate": 4.714324468924614e-06, |
| "loss": 0.0387, |
| "step": 163 |
| }, |
| { |
| "epoch": 1.9140625, |
| "grad_norm": 0.21790871024131775, |
| "learning_rate": 4.705367289315172e-06, |
| "loss": 0.0424, |
| "step": 164 |
| }, |
| { |
| "epoch": 1.92578125, |
| "grad_norm": 0.24538278579711914, |
| "learning_rate": 4.696280627277356e-06, |
| "loss": 0.0399, |
| "step": 165 |
| }, |
| { |
| "epoch": 1.9375, |
| "grad_norm": 0.2369818389415741, |
| "learning_rate": 4.687065016310996e-06, |
| "loss": 0.0386, |
| "step": 166 |
| }, |
| { |
| "epoch": 1.94921875, |
| "grad_norm": 0.2826277017593384, |
| "learning_rate": 4.6777209974868194e-06, |
| "loss": 0.0404, |
| "step": 167 |
| }, |
| { |
| "epoch": 1.9609375, |
| "grad_norm": 0.26429814100265503, |
| "learning_rate": 4.668249119414692e-06, |
| "loss": 0.0407, |
| "step": 168 |
| }, |
| { |
| "epoch": 1.97265625, |
| "grad_norm": 0.2524208426475525, |
| "learning_rate": 4.6586499382113985e-06, |
| "loss": 0.037, |
| "step": 169 |
| }, |
| { |
| "epoch": 1.984375, |
| "grad_norm": 0.22663331031799316, |
| "learning_rate": 4.648924017468003e-06, |
| "loss": 0.0337, |
| "step": 170 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 510, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 6, |
| "save_steps": 85, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": false |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 3.787708462666875e+17, |
| "train_batch_size": 4, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|