| { |
| "best_global_step": 4098, |
| "best_metric": 0.6449487554904831, |
| "best_model_checkpoint": "./saved_models/vulberta/checkpoint-4098", |
| "epoch": 5.0, |
| "eval_steps": 500, |
| "global_step": 6830, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.036603221083455345, |
| "grad_norm": 3.2610507011413574, |
| "learning_rate": 1.9856515373352855e-05, |
| "loss": 0.7004, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.07320644216691069, |
| "grad_norm": 4.554418563842773, |
| "learning_rate": 1.9710102489019037e-05, |
| "loss": 0.7002, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.10980966325036604, |
| "grad_norm": 2.98534893989563, |
| "learning_rate": 1.9563689604685214e-05, |
| "loss": 0.6961, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.14641288433382138, |
| "grad_norm": 2.6510939598083496, |
| "learning_rate": 1.9417276720351393e-05, |
| "loss": 0.6968, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.18301610541727673, |
| "grad_norm": 3.6250808238983154, |
| "learning_rate": 1.927086383601757e-05, |
| "loss": 0.7027, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.21961932650073207, |
| "grad_norm": 6.485755443572998, |
| "learning_rate": 1.9124450951683752e-05, |
| "loss": 0.6881, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.2562225475841874, |
| "grad_norm": 3.9667303562164307, |
| "learning_rate": 1.897803806734993e-05, |
| "loss": 0.6745, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.29282576866764276, |
| "grad_norm": 5.07423210144043, |
| "learning_rate": 1.8831625183016108e-05, |
| "loss": 0.6963, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.3294289897510981, |
| "grad_norm": 3.7114145755767822, |
| "learning_rate": 1.8685212298682284e-05, |
| "loss": 0.6868, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.36603221083455345, |
| "grad_norm": 8.060341835021973, |
| "learning_rate": 1.8538799414348463e-05, |
| "loss": 0.6865, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.40263543191800877, |
| "grad_norm": 5.241364002227783, |
| "learning_rate": 1.8392386530014643e-05, |
| "loss": 0.6757, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.43923865300146414, |
| "grad_norm": 3.611410140991211, |
| "learning_rate": 1.8245973645680823e-05, |
| "loss": 0.6616, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.47584187408491946, |
| "grad_norm": 5.06477689743042, |
| "learning_rate": 1.8099560761347e-05, |
| "loss": 0.6775, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.5124450951683748, |
| "grad_norm": 9.890421867370605, |
| "learning_rate": 1.7953147877013178e-05, |
| "loss": 0.6631, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.5490483162518301, |
| "grad_norm": 3.756012201309204, |
| "learning_rate": 1.7806734992679358e-05, |
| "loss": 0.672, |
| "step": 750 |
| }, |
| { |
| "epoch": 0.5856515373352855, |
| "grad_norm": 2.940450429916382, |
| "learning_rate": 1.7660322108345537e-05, |
| "loss": 0.6591, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.6222547584187409, |
| "grad_norm": 3.1005771160125732, |
| "learning_rate": 1.7513909224011713e-05, |
| "loss": 0.6593, |
| "step": 850 |
| }, |
| { |
| "epoch": 0.6588579795021962, |
| "grad_norm": 7.628331661224365, |
| "learning_rate": 1.7367496339677893e-05, |
| "loss": 0.6677, |
| "step": 900 |
| }, |
| { |
| "epoch": 0.6954612005856515, |
| "grad_norm": 7.699522018432617, |
| "learning_rate": 1.7221083455344072e-05, |
| "loss": 0.6621, |
| "step": 950 |
| }, |
| { |
| "epoch": 0.7320644216691069, |
| "grad_norm": 5.101274490356445, |
| "learning_rate": 1.707467057101025e-05, |
| "loss": 0.6666, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.7686676427525623, |
| "grad_norm": 7.820975303649902, |
| "learning_rate": 1.6928257686676428e-05, |
| "loss": 0.6646, |
| "step": 1050 |
| }, |
| { |
| "epoch": 0.8052708638360175, |
| "grad_norm": 5.459151268005371, |
| "learning_rate": 1.6781844802342608e-05, |
| "loss": 0.665, |
| "step": 1100 |
| }, |
| { |
| "epoch": 0.8418740849194729, |
| "grad_norm": 2.7326955795288086, |
| "learning_rate": 1.6635431918008787e-05, |
| "loss": 0.6623, |
| "step": 1150 |
| }, |
| { |
| "epoch": 0.8784773060029283, |
| "grad_norm": 4.056868076324463, |
| "learning_rate": 1.6489019033674963e-05, |
| "loss": 0.6718, |
| "step": 1200 |
| }, |
| { |
| "epoch": 0.9150805270863837, |
| "grad_norm": 3.1695716381073, |
| "learning_rate": 1.6342606149341143e-05, |
| "loss": 0.6469, |
| "step": 1250 |
| }, |
| { |
| "epoch": 0.9516837481698389, |
| "grad_norm": 3.2866313457489014, |
| "learning_rate": 1.6196193265007322e-05, |
| "loss": 0.646, |
| "step": 1300 |
| }, |
| { |
| "epoch": 0.9882869692532943, |
| "grad_norm": 7.2812957763671875, |
| "learning_rate": 1.6049780380673502e-05, |
| "loss": 0.6381, |
| "step": 1350 |
| }, |
| { |
| "epoch": 1.0, |
| "eval_accuracy": 0.6248169838945827, |
| "eval_loss": 0.6279681324958801, |
| "eval_runtime": 13.6534, |
| "eval_samples_per_second": 200.097, |
| "eval_steps_per_second": 6.299, |
| "step": 1366 |
| }, |
| { |
| "epoch": 1.0248901903367496, |
| "grad_norm": 9.606693267822266, |
| "learning_rate": 1.5903367496339678e-05, |
| "loss": 0.6152, |
| "step": 1400 |
| }, |
| { |
| "epoch": 1.061493411420205, |
| "grad_norm": 7.122623920440674, |
| "learning_rate": 1.5756954612005857e-05, |
| "loss": 0.6157, |
| "step": 1450 |
| }, |
| { |
| "epoch": 1.0980966325036603, |
| "grad_norm": 7.1912384033203125, |
| "learning_rate": 1.5610541727672037e-05, |
| "loss": 0.5858, |
| "step": 1500 |
| }, |
| { |
| "epoch": 1.1346998535871156, |
| "grad_norm": 8.011345863342285, |
| "learning_rate": 1.5464128843338216e-05, |
| "loss": 0.5917, |
| "step": 1550 |
| }, |
| { |
| "epoch": 1.171303074670571, |
| "grad_norm": 3.9715421199798584, |
| "learning_rate": 1.5317715959004393e-05, |
| "loss": 0.6215, |
| "step": 1600 |
| }, |
| { |
| "epoch": 1.2079062957540263, |
| "grad_norm": 4.4239501953125, |
| "learning_rate": 1.5171303074670572e-05, |
| "loss": 0.5701, |
| "step": 1650 |
| }, |
| { |
| "epoch": 1.2445095168374818, |
| "grad_norm": 6.152805328369141, |
| "learning_rate": 1.5024890190336752e-05, |
| "loss": 0.6044, |
| "step": 1700 |
| }, |
| { |
| "epoch": 1.281112737920937, |
| "grad_norm": 8.131645202636719, |
| "learning_rate": 1.487847730600293e-05, |
| "loss": 0.5967, |
| "step": 1750 |
| }, |
| { |
| "epoch": 1.3177159590043923, |
| "grad_norm": 5.887744903564453, |
| "learning_rate": 1.4732064421669107e-05, |
| "loss": 0.5764, |
| "step": 1800 |
| }, |
| { |
| "epoch": 1.3543191800878478, |
| "grad_norm": 4.500638484954834, |
| "learning_rate": 1.4585651537335285e-05, |
| "loss": 0.5966, |
| "step": 1850 |
| }, |
| { |
| "epoch": 1.390922401171303, |
| "grad_norm": 4.338794231414795, |
| "learning_rate": 1.4439238653001466e-05, |
| "loss": 0.6064, |
| "step": 1900 |
| }, |
| { |
| "epoch": 1.4275256222547585, |
| "grad_norm": 7.292316436767578, |
| "learning_rate": 1.4292825768667644e-05, |
| "loss": 0.6048, |
| "step": 1950 |
| }, |
| { |
| "epoch": 1.4641288433382138, |
| "grad_norm": 6.632826328277588, |
| "learning_rate": 1.4146412884333822e-05, |
| "loss": 0.5798, |
| "step": 2000 |
| }, |
| { |
| "epoch": 1.500732064421669, |
| "grad_norm": 9.965185165405273, |
| "learning_rate": 1.4e-05, |
| "loss": 0.5452, |
| "step": 2050 |
| }, |
| { |
| "epoch": 1.5373352855051245, |
| "grad_norm": 5.015806674957275, |
| "learning_rate": 1.3853587115666181e-05, |
| "loss": 0.5961, |
| "step": 2100 |
| }, |
| { |
| "epoch": 1.5739385065885798, |
| "grad_norm": 3.3543472290039062, |
| "learning_rate": 1.3707174231332359e-05, |
| "loss": 0.5949, |
| "step": 2150 |
| }, |
| { |
| "epoch": 1.610541727672035, |
| "grad_norm": 5.284890651702881, |
| "learning_rate": 1.3560761346998537e-05, |
| "loss": 0.5742, |
| "step": 2200 |
| }, |
| { |
| "epoch": 1.6471449487554906, |
| "grad_norm": 4.332915782928467, |
| "learning_rate": 1.3414348462664714e-05, |
| "loss": 0.6036, |
| "step": 2250 |
| }, |
| { |
| "epoch": 1.6837481698389458, |
| "grad_norm": 6.890816688537598, |
| "learning_rate": 1.3267935578330896e-05, |
| "loss": 0.5673, |
| "step": 2300 |
| }, |
| { |
| "epoch": 1.720351390922401, |
| "grad_norm": 6.644792079925537, |
| "learning_rate": 1.3121522693997074e-05, |
| "loss": 0.5624, |
| "step": 2350 |
| }, |
| { |
| "epoch": 1.7569546120058566, |
| "grad_norm": 4.097620487213135, |
| "learning_rate": 1.2975109809663251e-05, |
| "loss": 0.5753, |
| "step": 2400 |
| }, |
| { |
| "epoch": 1.7935578330893118, |
| "grad_norm": 6.0914459228515625, |
| "learning_rate": 1.282869692532943e-05, |
| "loss": 0.5962, |
| "step": 2450 |
| }, |
| { |
| "epoch": 1.830161054172767, |
| "grad_norm": 6.992921352386475, |
| "learning_rate": 1.2682284040995609e-05, |
| "loss": 0.5672, |
| "step": 2500 |
| }, |
| { |
| "epoch": 1.8667642752562226, |
| "grad_norm": 5.129520416259766, |
| "learning_rate": 1.2535871156661788e-05, |
| "loss": 0.5925, |
| "step": 2550 |
| }, |
| { |
| "epoch": 1.903367496339678, |
| "grad_norm": 5.3862223625183105, |
| "learning_rate": 1.2389458272327966e-05, |
| "loss": 0.5897, |
| "step": 2600 |
| }, |
| { |
| "epoch": 1.939970717423133, |
| "grad_norm": 4.44102668762207, |
| "learning_rate": 1.2243045387994144e-05, |
| "loss": 0.5699, |
| "step": 2650 |
| }, |
| { |
| "epoch": 1.9765739385065886, |
| "grad_norm": 3.638970375061035, |
| "learning_rate": 1.2096632503660323e-05, |
| "loss": 0.5709, |
| "step": 2700 |
| }, |
| { |
| "epoch": 2.0, |
| "eval_accuracy": 0.6292093704245973, |
| "eval_loss": 0.6692330837249756, |
| "eval_runtime": 13.6268, |
| "eval_samples_per_second": 200.488, |
| "eval_steps_per_second": 6.311, |
| "step": 2732 |
| }, |
| { |
| "epoch": 2.013177159590044, |
| "grad_norm": 4.104243278503418, |
| "learning_rate": 1.1950219619326501e-05, |
| "loss": 0.537, |
| "step": 2750 |
| }, |
| { |
| "epoch": 2.049780380673499, |
| "grad_norm": 16.372787475585938, |
| "learning_rate": 1.180380673499268e-05, |
| "loss": 0.4823, |
| "step": 2800 |
| }, |
| { |
| "epoch": 2.0863836017569546, |
| "grad_norm": 6.0528244972229, |
| "learning_rate": 1.1657393850658859e-05, |
| "loss": 0.4871, |
| "step": 2850 |
| }, |
| { |
| "epoch": 2.12298682284041, |
| "grad_norm": 11.29574203491211, |
| "learning_rate": 1.1510980966325038e-05, |
| "loss": 0.4714, |
| "step": 2900 |
| }, |
| { |
| "epoch": 2.159590043923865, |
| "grad_norm": 10.56286334991455, |
| "learning_rate": 1.1364568081991216e-05, |
| "loss": 0.4669, |
| "step": 2950 |
| }, |
| { |
| "epoch": 2.1961932650073206, |
| "grad_norm": 12.307333946228027, |
| "learning_rate": 1.1218155197657394e-05, |
| "loss": 0.4742, |
| "step": 3000 |
| }, |
| { |
| "epoch": 2.232796486090776, |
| "grad_norm": 4.0419440269470215, |
| "learning_rate": 1.1071742313323573e-05, |
| "loss": 0.4621, |
| "step": 3050 |
| }, |
| { |
| "epoch": 2.269399707174231, |
| "grad_norm": 14.435094833374023, |
| "learning_rate": 1.0925329428989753e-05, |
| "loss": 0.4789, |
| "step": 3100 |
| }, |
| { |
| "epoch": 2.3060029282576866, |
| "grad_norm": 4.455873966217041, |
| "learning_rate": 1.077891654465593e-05, |
| "loss": 0.4672, |
| "step": 3150 |
| }, |
| { |
| "epoch": 2.342606149341142, |
| "grad_norm": 9.073851585388184, |
| "learning_rate": 1.0632503660322108e-05, |
| "loss": 0.4827, |
| "step": 3200 |
| }, |
| { |
| "epoch": 2.379209370424597, |
| "grad_norm": 6.983617305755615, |
| "learning_rate": 1.0486090775988286e-05, |
| "loss": 0.4619, |
| "step": 3250 |
| }, |
| { |
| "epoch": 2.4158125915080526, |
| "grad_norm": 7.893983840942383, |
| "learning_rate": 1.0339677891654467e-05, |
| "loss": 0.4513, |
| "step": 3300 |
| }, |
| { |
| "epoch": 2.452415812591508, |
| "grad_norm": 8.749136924743652, |
| "learning_rate": 1.0193265007320645e-05, |
| "loss": 0.4947, |
| "step": 3350 |
| }, |
| { |
| "epoch": 2.4890190336749636, |
| "grad_norm": 4.401139736175537, |
| "learning_rate": 1.0046852122986823e-05, |
| "loss": 0.4712, |
| "step": 3400 |
| }, |
| { |
| "epoch": 2.5256222547584186, |
| "grad_norm": 6.115893363952637, |
| "learning_rate": 9.900439238653003e-06, |
| "loss": 0.4624, |
| "step": 3450 |
| }, |
| { |
| "epoch": 2.562225475841874, |
| "grad_norm": 3.8851876258850098, |
| "learning_rate": 9.75402635431918e-06, |
| "loss": 0.4955, |
| "step": 3500 |
| }, |
| { |
| "epoch": 2.5988286969253296, |
| "grad_norm": 8.24189281463623, |
| "learning_rate": 9.60761346998536e-06, |
| "loss": 0.4573, |
| "step": 3550 |
| }, |
| { |
| "epoch": 2.6354319180087846, |
| "grad_norm": 4.605769157409668, |
| "learning_rate": 9.461200585651538e-06, |
| "loss": 0.5172, |
| "step": 3600 |
| }, |
| { |
| "epoch": 2.67203513909224, |
| "grad_norm": 9.174911499023438, |
| "learning_rate": 9.314787701317717e-06, |
| "loss": 0.455, |
| "step": 3650 |
| }, |
| { |
| "epoch": 2.7086383601756956, |
| "grad_norm": 7.105350017547607, |
| "learning_rate": 9.168374816983895e-06, |
| "loss": 0.4792, |
| "step": 3700 |
| }, |
| { |
| "epoch": 2.745241581259151, |
| "grad_norm": 9.204080581665039, |
| "learning_rate": 9.021961932650075e-06, |
| "loss": 0.5004, |
| "step": 3750 |
| }, |
| { |
| "epoch": 2.781844802342606, |
| "grad_norm": 23.993898391723633, |
| "learning_rate": 8.875549048316252e-06, |
| "loss": 0.49, |
| "step": 3800 |
| }, |
| { |
| "epoch": 2.8184480234260616, |
| "grad_norm": 7.301971912384033, |
| "learning_rate": 8.729136163982432e-06, |
| "loss": 0.5001, |
| "step": 3850 |
| }, |
| { |
| "epoch": 2.855051244509517, |
| "grad_norm": 5.969315052032471, |
| "learning_rate": 8.58272327964861e-06, |
| "loss": 0.4463, |
| "step": 3900 |
| }, |
| { |
| "epoch": 2.891654465592972, |
| "grad_norm": 5.500088691711426, |
| "learning_rate": 8.436310395314788e-06, |
| "loss": 0.4566, |
| "step": 3950 |
| }, |
| { |
| "epoch": 2.9282576866764276, |
| "grad_norm": 4.6784586906433105, |
| "learning_rate": 8.289897510980967e-06, |
| "loss": 0.458, |
| "step": 4000 |
| }, |
| { |
| "epoch": 2.964860907759883, |
| "grad_norm": 7.5789103507995605, |
| "learning_rate": 8.143484626647145e-06, |
| "loss": 0.499, |
| "step": 4050 |
| }, |
| { |
| "epoch": 3.0, |
| "eval_accuracy": 0.6449487554904831, |
| "eval_loss": 0.6640557050704956, |
| "eval_runtime": 13.6186, |
| "eval_samples_per_second": 200.607, |
| "eval_steps_per_second": 6.315, |
| "step": 4098 |
| }, |
| { |
| "epoch": 3.001464128843338, |
| "grad_norm": 9.293344497680664, |
| "learning_rate": 7.997071742313325e-06, |
| "loss": 0.5113, |
| "step": 4100 |
| }, |
| { |
| "epoch": 3.0380673499267936, |
| "grad_norm": 13.914603233337402, |
| "learning_rate": 7.850658857979502e-06, |
| "loss": 0.3652, |
| "step": 4150 |
| }, |
| { |
| "epoch": 3.074670571010249, |
| "grad_norm": 10.234390258789062, |
| "learning_rate": 7.70424597364568e-06, |
| "loss": 0.3838, |
| "step": 4200 |
| }, |
| { |
| "epoch": 3.111273792093704, |
| "grad_norm": 14.120603561401367, |
| "learning_rate": 7.55783308931186e-06, |
| "loss": 0.347, |
| "step": 4250 |
| }, |
| { |
| "epoch": 3.1478770131771596, |
| "grad_norm": 12.265131950378418, |
| "learning_rate": 7.411420204978038e-06, |
| "loss": 0.3749, |
| "step": 4300 |
| }, |
| { |
| "epoch": 3.184480234260615, |
| "grad_norm": 9.216547966003418, |
| "learning_rate": 7.265007320644217e-06, |
| "loss": 0.3554, |
| "step": 4350 |
| }, |
| { |
| "epoch": 3.22108345534407, |
| "grad_norm": 11.449105262756348, |
| "learning_rate": 7.118594436310396e-06, |
| "loss": 0.344, |
| "step": 4400 |
| }, |
| { |
| "epoch": 3.2576866764275256, |
| "grad_norm": 12.415879249572754, |
| "learning_rate": 6.972181551976574e-06, |
| "loss": 0.3718, |
| "step": 4450 |
| }, |
| { |
| "epoch": 3.294289897510981, |
| "grad_norm": 11.875557899475098, |
| "learning_rate": 6.825768667642752e-06, |
| "loss": 0.3526, |
| "step": 4500 |
| }, |
| { |
| "epoch": 3.330893118594436, |
| "grad_norm": 5.397584438323975, |
| "learning_rate": 6.679355783308932e-06, |
| "loss": 0.3522, |
| "step": 4550 |
| }, |
| { |
| "epoch": 3.3674963396778916, |
| "grad_norm": 6.454545974731445, |
| "learning_rate": 6.5329428989751095e-06, |
| "loss": 0.3884, |
| "step": 4600 |
| }, |
| { |
| "epoch": 3.404099560761347, |
| "grad_norm": 6.373364448547363, |
| "learning_rate": 6.386530014641289e-06, |
| "loss": 0.3908, |
| "step": 4650 |
| }, |
| { |
| "epoch": 3.440702781844802, |
| "grad_norm": 16.992902755737305, |
| "learning_rate": 6.240117130307467e-06, |
| "loss": 0.3711, |
| "step": 4700 |
| }, |
| { |
| "epoch": 3.4773060029282576, |
| "grad_norm": 12.786251068115234, |
| "learning_rate": 6.093704245973646e-06, |
| "loss": 0.3872, |
| "step": 4750 |
| }, |
| { |
| "epoch": 3.513909224011713, |
| "grad_norm": 7.947234153747559, |
| "learning_rate": 5.947291361639825e-06, |
| "loss": 0.3498, |
| "step": 4800 |
| }, |
| { |
| "epoch": 3.550512445095168, |
| "grad_norm": 13.394478797912598, |
| "learning_rate": 5.800878477306004e-06, |
| "loss": 0.3621, |
| "step": 4850 |
| }, |
| { |
| "epoch": 3.5871156661786237, |
| "grad_norm": 13.295003890991211, |
| "learning_rate": 5.6544655929721824e-06, |
| "loss": 0.385, |
| "step": 4900 |
| }, |
| { |
| "epoch": 3.623718887262079, |
| "grad_norm": 8.606812477111816, |
| "learning_rate": 5.50805270863836e-06, |
| "loss": 0.3755, |
| "step": 4950 |
| }, |
| { |
| "epoch": 3.660322108345534, |
| "grad_norm": 7.550472736358643, |
| "learning_rate": 5.36163982430454e-06, |
| "loss": 0.372, |
| "step": 5000 |
| }, |
| { |
| "epoch": 3.6969253294289897, |
| "grad_norm": 9.188961029052734, |
| "learning_rate": 5.215226939970718e-06, |
| "loss": 0.3511, |
| "step": 5050 |
| }, |
| { |
| "epoch": 3.733528550512445, |
| "grad_norm": 7.2935638427734375, |
| "learning_rate": 5.068814055636897e-06, |
| "loss": 0.3673, |
| "step": 5100 |
| }, |
| { |
| "epoch": 3.7701317715959, |
| "grad_norm": 5.323498249053955, |
| "learning_rate": 4.922401171303075e-06, |
| "loss": 0.3851, |
| "step": 5150 |
| }, |
| { |
| "epoch": 3.8067349926793557, |
| "grad_norm": 6.138152599334717, |
| "learning_rate": 4.775988286969254e-06, |
| "loss": 0.3634, |
| "step": 5200 |
| }, |
| { |
| "epoch": 3.843338213762811, |
| "grad_norm": 6.376275062561035, |
| "learning_rate": 4.629575402635432e-06, |
| "loss": 0.3826, |
| "step": 5250 |
| }, |
| { |
| "epoch": 3.8799414348462666, |
| "grad_norm": 9.393123626708984, |
| "learning_rate": 4.483162518301611e-06, |
| "loss": 0.3765, |
| "step": 5300 |
| }, |
| { |
| "epoch": 3.9165446559297217, |
| "grad_norm": 7.3140950202941895, |
| "learning_rate": 4.33674963396779e-06, |
| "loss": 0.3723, |
| "step": 5350 |
| }, |
| { |
| "epoch": 3.953147877013177, |
| "grad_norm": 6.270499229431152, |
| "learning_rate": 4.190336749633968e-06, |
| "loss": 0.372, |
| "step": 5400 |
| }, |
| { |
| "epoch": 3.9897510980966326, |
| "grad_norm": 10.044820785522461, |
| "learning_rate": 4.043923865300147e-06, |
| "loss": 0.3798, |
| "step": 5450 |
| }, |
| { |
| "epoch": 4.0, |
| "eval_accuracy": 0.6328696925329429, |
| "eval_loss": 0.8756816387176514, |
| "eval_runtime": 13.6217, |
| "eval_samples_per_second": 200.562, |
| "eval_steps_per_second": 6.313, |
| "step": 5464 |
| }, |
| { |
| "epoch": 4.026354319180088, |
| "grad_norm": 10.713644027709961, |
| "learning_rate": 3.897510980966326e-06, |
| "loss": 0.3022, |
| "step": 5500 |
| }, |
| { |
| "epoch": 4.062957540263543, |
| "grad_norm": 11.245952606201172, |
| "learning_rate": 3.751098096632504e-06, |
| "loss": 0.29, |
| "step": 5550 |
| }, |
| { |
| "epoch": 4.099560761346998, |
| "grad_norm": 9.67605209350586, |
| "learning_rate": 3.6046852122986826e-06, |
| "loss": 0.2848, |
| "step": 5600 |
| }, |
| { |
| "epoch": 4.136163982430454, |
| "grad_norm": 14.651419639587402, |
| "learning_rate": 3.4582723279648612e-06, |
| "loss": 0.2558, |
| "step": 5650 |
| }, |
| { |
| "epoch": 4.172767203513909, |
| "grad_norm": 14.767287254333496, |
| "learning_rate": 3.31185944363104e-06, |
| "loss": 0.2863, |
| "step": 5700 |
| }, |
| { |
| "epoch": 4.209370424597364, |
| "grad_norm": 14.15087604522705, |
| "learning_rate": 3.165446559297218e-06, |
| "loss": 0.2609, |
| "step": 5750 |
| }, |
| { |
| "epoch": 4.24597364568082, |
| "grad_norm": 16.17600440979004, |
| "learning_rate": 3.019033674963397e-06, |
| "loss": 0.2924, |
| "step": 5800 |
| }, |
| { |
| "epoch": 4.282576866764275, |
| "grad_norm": 21.407875061035156, |
| "learning_rate": 2.8726207906295755e-06, |
| "loss": 0.2886, |
| "step": 5850 |
| }, |
| { |
| "epoch": 4.31918008784773, |
| "grad_norm": 16.692087173461914, |
| "learning_rate": 2.726207906295754e-06, |
| "loss": 0.284, |
| "step": 5900 |
| }, |
| { |
| "epoch": 4.355783308931186, |
| "grad_norm": 7.582740783691406, |
| "learning_rate": 2.579795021961933e-06, |
| "loss": 0.2575, |
| "step": 5950 |
| }, |
| { |
| "epoch": 4.392386530014641, |
| "grad_norm": 15.145219802856445, |
| "learning_rate": 2.4333821376281115e-06, |
| "loss": 0.2686, |
| "step": 6000 |
| }, |
| { |
| "epoch": 4.428989751098096, |
| "grad_norm": 13.306376457214355, |
| "learning_rate": 2.2869692532942898e-06, |
| "loss": 0.3013, |
| "step": 6050 |
| }, |
| { |
| "epoch": 4.465592972181552, |
| "grad_norm": 6.897510051727295, |
| "learning_rate": 2.1405563689604685e-06, |
| "loss": 0.2736, |
| "step": 6100 |
| }, |
| { |
| "epoch": 4.502196193265007, |
| "grad_norm": 10.133888244628906, |
| "learning_rate": 1.994143484626647e-06, |
| "loss": 0.2548, |
| "step": 6150 |
| }, |
| { |
| "epoch": 4.538799414348462, |
| "grad_norm": 21.59294319152832, |
| "learning_rate": 1.8477306002928258e-06, |
| "loss": 0.2898, |
| "step": 6200 |
| }, |
| { |
| "epoch": 4.575402635431918, |
| "grad_norm": 10.04138469696045, |
| "learning_rate": 1.7013177159590045e-06, |
| "loss": 0.2729, |
| "step": 6250 |
| }, |
| { |
| "epoch": 4.612005856515373, |
| "grad_norm": 9.01938533782959, |
| "learning_rate": 1.554904831625183e-06, |
| "loss": 0.284, |
| "step": 6300 |
| }, |
| { |
| "epoch": 4.648609077598829, |
| "grad_norm": 23.59950065612793, |
| "learning_rate": 1.4084919472913616e-06, |
| "loss": 0.2727, |
| "step": 6350 |
| }, |
| { |
| "epoch": 4.685212298682284, |
| "grad_norm": 10.790794372558594, |
| "learning_rate": 1.2620790629575403e-06, |
| "loss": 0.2561, |
| "step": 6400 |
| }, |
| { |
| "epoch": 4.721815519765739, |
| "grad_norm": 11.493463516235352, |
| "learning_rate": 1.115666178623719e-06, |
| "loss": 0.3014, |
| "step": 6450 |
| }, |
| { |
| "epoch": 4.758418740849194, |
| "grad_norm": 11.314969062805176, |
| "learning_rate": 9.692532942898976e-07, |
| "loss": 0.2901, |
| "step": 6500 |
| }, |
| { |
| "epoch": 4.79502196193265, |
| "grad_norm": 10.80167007446289, |
| "learning_rate": 8.228404099560762e-07, |
| "loss": 0.3001, |
| "step": 6550 |
| }, |
| { |
| "epoch": 4.831625183016105, |
| "grad_norm": 11.179972648620605, |
| "learning_rate": 6.764275256222548e-07, |
| "loss": 0.2845, |
| "step": 6600 |
| }, |
| { |
| "epoch": 4.868228404099561, |
| "grad_norm": 19.67405891418457, |
| "learning_rate": 5.300146412884334e-07, |
| "loss": 0.2801, |
| "step": 6650 |
| }, |
| { |
| "epoch": 4.904831625183016, |
| "grad_norm": 12.151304244995117, |
| "learning_rate": 3.8360175695461205e-07, |
| "loss": 0.2561, |
| "step": 6700 |
| }, |
| { |
| "epoch": 4.941434846266471, |
| "grad_norm": 5.352505207061768, |
| "learning_rate": 2.3718887262079066e-07, |
| "loss": 0.2813, |
| "step": 6750 |
| }, |
| { |
| "epoch": 4.978038067349927, |
| "grad_norm": 8.123278617858887, |
| "learning_rate": 9.077598828696926e-08, |
| "loss": 0.3164, |
| "step": 6800 |
| }, |
| { |
| "epoch": 5.0, |
| "eval_accuracy": 0.6273792093704246, |
| "eval_loss": 1.0964716672897339, |
| "eval_runtime": 13.6672, |
| "eval_samples_per_second": 199.895, |
| "eval_steps_per_second": 6.292, |
| "step": 6830 |
| } |
| ], |
| "logging_steps": 50, |
| "max_steps": 6830, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 5, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 1.43750725095936e+16, |
| "train_batch_size": 16, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|