| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.0, | |
| "eval_steps": 500, | |
| "global_step": 512, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01953125, | |
| "grad_norm": 1.3515021800994873, | |
| "learning_rate": 1.875e-06, | |
| "loss": 1.333, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.0390625, | |
| "grad_norm": 0.9634742736816406, | |
| "learning_rate": 4.21875e-06, | |
| "loss": 1.3235, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.05859375, | |
| "grad_norm": 0.7603744268417358, | |
| "learning_rate": 6.5625e-06, | |
| "loss": 1.2374, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.078125, | |
| "grad_norm": 0.898777186870575, | |
| "learning_rate": 8.90625e-06, | |
| "loss": 1.2363, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.09765625, | |
| "grad_norm": 0.5078862309455872, | |
| "learning_rate": 1.125e-05, | |
| "loss": 1.2713, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.1171875, | |
| "grad_norm": 0.6618434190750122, | |
| "learning_rate": 1.359375e-05, | |
| "loss": 1.2592, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.13671875, | |
| "grad_norm": 0.4789685904979706, | |
| "learning_rate": 1.59375e-05, | |
| "loss": 1.1962, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.15625, | |
| "grad_norm": 0.4443208873271942, | |
| "learning_rate": 1.828125e-05, | |
| "loss": 1.2079, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.17578125, | |
| "grad_norm": 0.42774105072021484, | |
| "learning_rate": 2.0625e-05, | |
| "loss": 1.1485, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.1953125, | |
| "grad_norm": 0.4273070991039276, | |
| "learning_rate": 2.296875e-05, | |
| "loss": 1.1809, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.21484375, | |
| "grad_norm": 0.4687409996986389, | |
| "learning_rate": 2.5312500000000002e-05, | |
| "loss": 1.1016, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.234375, | |
| "grad_norm": 0.4467330873012543, | |
| "learning_rate": 2.765625e-05, | |
| "loss": 1.1695, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.25390625, | |
| "grad_norm": 0.6110517978668213, | |
| "learning_rate": 3e-05, | |
| "loss": 1.1325, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.2734375, | |
| "grad_norm": 0.75448077917099, | |
| "learning_rate": 2.9998748508718575e-05, | |
| "loss": 1.093, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.29296875, | |
| "grad_norm": 0.6111339330673218, | |
| "learning_rate": 2.9994994243705013e-05, | |
| "loss": 1.1134, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.3125, | |
| "grad_norm": 0.5295779705047607, | |
| "learning_rate": 2.9988737831416642e-05, | |
| "loss": 1.0628, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.33203125, | |
| "grad_norm": 0.538719654083252, | |
| "learning_rate": 2.9979980315832853e-05, | |
| "loss": 1.0402, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.3515625, | |
| "grad_norm": 0.554957389831543, | |
| "learning_rate": 2.9968723158280906e-05, | |
| "loss": 1.0623, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.37109375, | |
| "grad_norm": 0.5269491076469421, | |
| "learning_rate": 2.995496823719206e-05, | |
| "loss": 1.0775, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.390625, | |
| "grad_norm": 0.5827288031578064, | |
| "learning_rate": 2.9938717847788167e-05, | |
| "loss": 0.9854, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.41015625, | |
| "grad_norm": 0.5621783137321472, | |
| "learning_rate": 2.9919974701698638e-05, | |
| "loss": 0.9554, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.4296875, | |
| "grad_norm": 0.7156975269317627, | |
| "learning_rate": 2.989874192650801e-05, | |
| "loss": 0.9722, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.44921875, | |
| "grad_norm": 0.5912815928459167, | |
| "learning_rate": 2.9875023065234003e-05, | |
| "loss": 0.9747, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.46875, | |
| "grad_norm": 0.686764657497406, | |
| "learning_rate": 2.984882207573638e-05, | |
| "loss": 1.0, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.48828125, | |
| "grad_norm": 0.7672634720802307, | |
| "learning_rate": 2.982014333005645e-05, | |
| "loss": 0.9985, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.5078125, | |
| "grad_norm": 0.7488982677459717, | |
| "learning_rate": 2.9788991613687575e-05, | |
| "loss": 0.9438, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.52734375, | |
| "grad_norm": 0.7190539240837097, | |
| "learning_rate": 2.9755372124776616e-05, | |
| "loss": 0.9301, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.546875, | |
| "grad_norm": 0.6693652868270874, | |
| "learning_rate": 2.971929047325654e-05, | |
| "loss": 0.8901, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.56640625, | |
| "grad_norm": 0.7850137948989868, | |
| "learning_rate": 2.968075267991032e-05, | |
| "loss": 0.8936, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.5859375, | |
| "grad_norm": 0.9584252834320068, | |
| "learning_rate": 2.963976517536627e-05, | |
| "loss": 0.8915, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.60546875, | |
| "grad_norm": 0.8579109907150269, | |
| "learning_rate": 2.9596334799025003e-05, | |
| "loss": 0.8889, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.625, | |
| "grad_norm": 0.8218862414360046, | |
| "learning_rate": 2.9550468797918162e-05, | |
| "loss": 0.855, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.64453125, | |
| "grad_norm": 0.7801545858383179, | |
| "learning_rate": 2.950217482549915e-05, | |
| "loss": 0.8265, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.6640625, | |
| "grad_norm": 0.803713858127594, | |
| "learning_rate": 2.9451460940366025e-05, | |
| "loss": 0.8222, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.68359375, | |
| "grad_norm": 0.8898375034332275, | |
| "learning_rate": 2.9398335604916797e-05, | |
| "loss": 0.7957, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.703125, | |
| "grad_norm": 1.0227468013763428, | |
| "learning_rate": 2.9342807683937352e-05, | |
| "loss": 0.8001, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.72265625, | |
| "grad_norm": 0.8922404050827026, | |
| "learning_rate": 2.928488644312222e-05, | |
| "loss": 0.7994, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.7421875, | |
| "grad_norm": 0.9021320939064026, | |
| "learning_rate": 2.9224581547528453e-05, | |
| "loss": 0.7911, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.76171875, | |
| "grad_norm": 0.8039846420288086, | |
| "learning_rate": 2.916190305996286e-05, | |
| "loss": 0.779, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.78125, | |
| "grad_norm": 0.977204442024231, | |
| "learning_rate": 2.909686143930287e-05, | |
| "loss": 0.7827, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.80078125, | |
| "grad_norm": 0.8399918675422668, | |
| "learning_rate": 2.902946753875131e-05, | |
| "loss": 0.7524, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.8203125, | |
| "grad_norm": 0.8906209468841553, | |
| "learning_rate": 2.895973260402537e-05, | |
| "loss": 0.7638, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.83984375, | |
| "grad_norm": 1.2285107374191284, | |
| "learning_rate": 2.8887668271480098e-05, | |
| "loss": 0.7679, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.859375, | |
| "grad_norm": 0.9521386027336121, | |
| "learning_rate": 2.8813286566166674e-05, | |
| "loss": 0.7347, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.87890625, | |
| "grad_norm": 1.1255961656570435, | |
| "learning_rate": 2.873659989982586e-05, | |
| "loss": 0.7486, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.8984375, | |
| "grad_norm": 0.9320322871208191, | |
| "learning_rate": 2.8657621068816903e-05, | |
| "loss": 0.7073, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.91796875, | |
| "grad_norm": 1.0787020921707153, | |
| "learning_rate": 2.857636325198225e-05, | |
| "loss": 0.6956, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.9375, | |
| "grad_norm": 0.9191898703575134, | |
| "learning_rate": 2.849284000844849e-05, | |
| "loss": 0.6815, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.95703125, | |
| "grad_norm": 0.947468638420105, | |
| "learning_rate": 2.8407065275363756e-05, | |
| "loss": 0.6692, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.9765625, | |
| "grad_norm": 0.8919875621795654, | |
| "learning_rate": 2.8319053365572135e-05, | |
| "loss": 0.688, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.99609375, | |
| "grad_norm": 0.9745859503746033, | |
| "learning_rate": 2.8228818965225325e-05, | |
| "loss": 0.6103, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 1.015625, | |
| "grad_norm": 1.2867947816848755, | |
| "learning_rate": 2.8136377131332043e-05, | |
| "loss": 0.6321, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.03515625, | |
| "grad_norm": 1.055025577545166, | |
| "learning_rate": 2.8041743289245503e-05, | |
| "loss": 0.5343, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 1.0546875, | |
| "grad_norm": 1.0192275047302246, | |
| "learning_rate": 2.7944933230089484e-05, | |
| "loss": 0.5867, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.07421875, | |
| "grad_norm": 1.1829568147659302, | |
| "learning_rate": 2.784596310812331e-05, | |
| "loss": 0.5331, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 1.09375, | |
| "grad_norm": 1.0421009063720703, | |
| "learning_rate": 2.774484943804629e-05, | |
| "loss": 0.5378, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.11328125, | |
| "grad_norm": 1.039535641670227, | |
| "learning_rate": 2.764160909224196e-05, | |
| "loss": 0.5446, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 1.1328125, | |
| "grad_norm": 1.2662440538406372, | |
| "learning_rate": 2.7536259297962674e-05, | |
| "loss": 0.4932, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.15234375, | |
| "grad_norm": 1.0458778142929077, | |
| "learning_rate": 2.7428817634454973e-05, | |
| "loss": 0.5315, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 1.171875, | |
| "grad_norm": 1.0862466096878052, | |
| "learning_rate": 2.7319302030026207e-05, | |
| "loss": 0.5626, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.19140625, | |
| "grad_norm": 1.191480278968811, | |
| "learning_rate": 2.7207730759052925e-05, | |
| "loss": 0.6235, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 1.2109375, | |
| "grad_norm": 1.3713099956512451, | |
| "learning_rate": 2.7094122438931513e-05, | |
| "loss": 0.4837, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.23046875, | |
| "grad_norm": 1.104356288909912, | |
| "learning_rate": 2.697849602697159e-05, | |
| "loss": 0.5028, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "grad_norm": 1.2873727083206177, | |
| "learning_rate": 2.6860870817232682e-05, | |
| "loss": 0.5134, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 1.26953125, | |
| "grad_norm": 1.0831677913665771, | |
| "learning_rate": 2.6741266437304718e-05, | |
| "loss": 0.475, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 1.2890625, | |
| "grad_norm": 1.1841942071914673, | |
| "learning_rate": 2.661970284503286e-05, | |
| "loss": 0.5094, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.30859375, | |
| "grad_norm": 1.1621878147125244, | |
| "learning_rate": 2.6496200325187223e-05, | |
| "loss": 0.5235, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 1.328125, | |
| "grad_norm": 1.1662148237228394, | |
| "learning_rate": 2.6370779486078047e-05, | |
| "loss": 0.5086, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.34765625, | |
| "grad_norm": 1.1327015161514282, | |
| "learning_rate": 2.6243461256116892e-05, | |
| "loss": 0.5031, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 1.3671875, | |
| "grad_norm": 1.211604118347168, | |
| "learning_rate": 2.611426688032439e-05, | |
| "loss": 0.4841, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.38671875, | |
| "grad_norm": 1.0225025415420532, | |
| "learning_rate": 2.598321791678519e-05, | |
| "loss": 0.4889, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 1.40625, | |
| "grad_norm": 1.1432801485061646, | |
| "learning_rate": 2.5850336233050677e-05, | |
| "loss": 0.4618, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.42578125, | |
| "grad_norm": 1.3656413555145264, | |
| "learning_rate": 2.5715644002489998e-05, | |
| "loss": 0.4594, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 1.4453125, | |
| "grad_norm": 1.383625864982605, | |
| "learning_rate": 2.557916370059012e-05, | |
| "loss": 0.5009, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 1.46484375, | |
| "grad_norm": 1.2634763717651367, | |
| "learning_rate": 2.544091810120543e-05, | |
| "loss": 0.5046, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 1.484375, | |
| "grad_norm": 1.156620979309082, | |
| "learning_rate": 2.530093027275757e-05, | |
| "loss": 0.4219, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.50390625, | |
| "grad_norm": 1.0806770324707031, | |
| "learning_rate": 2.5159223574386117e-05, | |
| "loss": 0.4786, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 1.5234375, | |
| "grad_norm": 1.1003456115722656, | |
| "learning_rate": 2.501582165205074e-05, | |
| "loss": 0.4232, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.54296875, | |
| "grad_norm": 1.1384750604629517, | |
| "learning_rate": 2.4870748434585514e-05, | |
| "loss": 0.499, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 1.5625, | |
| "grad_norm": 1.0580660104751587, | |
| "learning_rate": 2.4724028129706012e-05, | |
| "loss": 0.4066, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.58203125, | |
| "grad_norm": 1.2567460536956787, | |
| "learning_rate": 2.4575685219969884e-05, | |
| "loss": 0.4367, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 1.6015625, | |
| "grad_norm": 1.1812260150909424, | |
| "learning_rate": 2.442574445869156e-05, | |
| "loss": 0.4303, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.62109375, | |
| "grad_norm": 1.2039210796356201, | |
| "learning_rate": 2.4274230865811763e-05, | |
| "loss": 0.4385, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 1.640625, | |
| "grad_norm": 1.2550849914550781, | |
| "learning_rate": 2.4121169723722566e-05, | |
| "loss": 0.3703, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.66015625, | |
| "grad_norm": 1.1235885620117188, | |
| "learning_rate": 2.396658657304861e-05, | |
| "loss": 0.4242, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 1.6796875, | |
| "grad_norm": 1.2263010740280151, | |
| "learning_rate": 2.381050720838528e-05, | |
| "loss": 0.3919, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.69921875, | |
| "grad_norm": 1.1357097625732422, | |
| "learning_rate": 2.3652957673994448e-05, | |
| "loss": 0.4487, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 1.71875, | |
| "grad_norm": 1.271737813949585, | |
| "learning_rate": 2.3493964259458603e-05, | |
| "loss": 0.3659, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.73828125, | |
| "grad_norm": 1.257322072982788, | |
| "learning_rate": 2.3333553495294033e-05, | |
| "loss": 0.3847, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 1.7578125, | |
| "grad_norm": 1.1676826477050781, | |
| "learning_rate": 2.317175214852377e-05, | |
| "loss": 0.3531, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.77734375, | |
| "grad_norm": 1.032481074333191, | |
| "learning_rate": 2.3008587218211127e-05, | |
| "loss": 0.3849, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 1.796875, | |
| "grad_norm": 1.195465087890625, | |
| "learning_rate": 2.284408593095446e-05, | |
| "loss": 0.4086, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.81640625, | |
| "grad_norm": 1.3418141603469849, | |
| "learning_rate": 2.2678275736344014e-05, | |
| "loss": 0.3851, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 1.8359375, | |
| "grad_norm": 1.1920610666275024, | |
| "learning_rate": 2.251118430238151e-05, | |
| "loss": 0.3678, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.85546875, | |
| "grad_norm": 1.2250503301620483, | |
| "learning_rate": 2.2342839510863323e-05, | |
| "loss": 0.367, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 1.875, | |
| "grad_norm": 1.1807481050491333, | |
| "learning_rate": 2.2173269452727965e-05, | |
| "loss": 0.3344, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.89453125, | |
| "grad_norm": 1.2924655675888062, | |
| "learning_rate": 2.200250242336868e-05, | |
| "loss": 0.338, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 1.9140625, | |
| "grad_norm": 1.1538184881210327, | |
| "learning_rate": 2.183056691791193e-05, | |
| "loss": 0.3683, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.93359375, | |
| "grad_norm": 1.335293173789978, | |
| "learning_rate": 2.1657491626462514e-05, | |
| "loss": 0.3779, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 1.953125, | |
| "grad_norm": 1.2173998355865479, | |
| "learning_rate": 2.1483305429316208e-05, | |
| "loss": 0.3728, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.97265625, | |
| "grad_norm": 1.1745744943618774, | |
| "learning_rate": 2.1308037392140613e-05, | |
| "loss": 0.3268, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 1.9921875, | |
| "grad_norm": 1.1856087446212769, | |
| "learning_rate": 2.113171676112513e-05, | |
| "loss": 0.3501, | |
| "step": 510 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 1280, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 5, | |
| "save_steps": 2000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 7.521082542632468e+17, | |
| "train_batch_size": 2, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |