| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.9887005649717513, | |
| "eval_steps": 500, | |
| "global_step": 154, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0129136400322841, | |
| "grad_norm": 26.062711090700205, | |
| "learning_rate": 1.25e-07, | |
| "loss": 0.3676, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0258272800645682, | |
| "grad_norm": 25.492701005475226, | |
| "learning_rate": 2.5e-07, | |
| "loss": 0.3636, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.0387409200968523, | |
| "grad_norm": 28.061770353754802, | |
| "learning_rate": 3.75e-07, | |
| "loss": 0.3788, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.0516545601291364, | |
| "grad_norm": 28.99654186845467, | |
| "learning_rate": 5e-07, | |
| "loss": 0.395, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.0645682001614205, | |
| "grad_norm": 20.935596355684492, | |
| "learning_rate": 6.249999999999999e-07, | |
| "loss": 0.3741, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.0774818401937046, | |
| "grad_norm": 26.035750645230728, | |
| "learning_rate": 7.5e-07, | |
| "loss": 0.3701, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.0903954802259887, | |
| "grad_norm": 36.159284168135784, | |
| "learning_rate": 8.75e-07, | |
| "loss": 0.3642, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.1033091202582728, | |
| "grad_norm": 32.251355945752145, | |
| "learning_rate": 1e-06, | |
| "loss": 0.3473, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.1162227602905569, | |
| "grad_norm": 35.98055219357963, | |
| "learning_rate": 1.125e-06, | |
| "loss": 0.3728, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.129136400322841, | |
| "grad_norm": 40.267311648036056, | |
| "learning_rate": 1.2499999999999999e-06, | |
| "loss": 0.3472, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.1420500403551251, | |
| "grad_norm": 54.431376922416256, | |
| "learning_rate": 1.375e-06, | |
| "loss": 0.3941, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.1549636803874092, | |
| "grad_norm": 52.787609103487405, | |
| "learning_rate": 1.5e-06, | |
| "loss": 0.3694, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.1678773204196933, | |
| "grad_norm": 51.470919986790086, | |
| "learning_rate": 1.625e-06, | |
| "loss": 0.3587, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.1807909604519774, | |
| "grad_norm": 44.49429708878717, | |
| "learning_rate": 1.75e-06, | |
| "loss": 0.3369, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.1937046004842615, | |
| "grad_norm": 48.61799720707049, | |
| "learning_rate": 1.8749999999999998e-06, | |
| "loss": 0.3811, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.2066182405165456, | |
| "grad_norm": 41.649896181802205, | |
| "learning_rate": 2e-06, | |
| "loss": 0.3911, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.2195318805488297, | |
| "grad_norm": 42.44898868701815, | |
| "learning_rate": 1.999740884841349e-06, | |
| "loss": 0.387, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.2324455205811138, | |
| "grad_norm": 41.183443431747065, | |
| "learning_rate": 1.9989636736467275e-06, | |
| "loss": 0.3946, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.2453591606133979, | |
| "grad_norm": 34.50686543223266, | |
| "learning_rate": 1.9976687691905393e-06, | |
| "loss": 0.4088, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.258272800645682, | |
| "grad_norm": 27.279515728605386, | |
| "learning_rate": 1.995856842531531e-06, | |
| "loss": 0.3724, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.2711864406779661, | |
| "grad_norm": 30.83455319215996, | |
| "learning_rate": 1.993528832665031e-06, | |
| "loss": 0.3866, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.2841000807102502, | |
| "grad_norm": 26.247716082750618, | |
| "learning_rate": 1.9906859460363304e-06, | |
| "loss": 0.3521, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.2970137207425343, | |
| "grad_norm": 27.831795874032476, | |
| "learning_rate": 1.9873296559154695e-06, | |
| "loss": 0.4159, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.3099273607748184, | |
| "grad_norm": 28.5584628971039, | |
| "learning_rate": 1.983461701633742e-06, | |
| "loss": 0.3767, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.3228410008071025, | |
| "grad_norm": 21.378018038217974, | |
| "learning_rate": 1.979084087682323e-06, | |
| "loss": 0.3304, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.3357546408393866, | |
| "grad_norm": 23.044867887515178, | |
| "learning_rate": 1.9741990826734792e-06, | |
| "loss": 0.3705, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.3486682808716707, | |
| "grad_norm": 26.947778108928976, | |
| "learning_rate": 1.968809218164906e-06, | |
| "loss": 0.404, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.3615819209039548, | |
| "grad_norm": 22.039479721325353, | |
| "learning_rate": 1.9629172873477994e-06, | |
| "loss": 0.3365, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.3744955609362389, | |
| "grad_norm": 21.274934357850633, | |
| "learning_rate": 1.956526343599335e-06, | |
| "loss": 0.3306, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.387409200968523, | |
| "grad_norm": 23.40835330420104, | |
| "learning_rate": 1.949639698900319e-06, | |
| "loss": 0.3246, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.4003228410008071, | |
| "grad_norm": 26.03684733839572, | |
| "learning_rate": 1.9422609221188204e-06, | |
| "loss": 0.3121, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.4132364810330912, | |
| "grad_norm": 22.817733184810667, | |
| "learning_rate": 1.9343938371606708e-06, | |
| "loss": 0.363, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.4261501210653753, | |
| "grad_norm": 27.888538264779257, | |
| "learning_rate": 1.926042520987805e-06, | |
| "loss": 0.3599, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.4390637610976594, | |
| "grad_norm": 23.664544295074048, | |
| "learning_rate": 1.9172113015054528e-06, | |
| "loss": 0.3259, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.4519774011299435, | |
| "grad_norm": 26.814661596965937, | |
| "learning_rate": 1.907904755319289e-06, | |
| "loss": 0.3622, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.4648910411622276, | |
| "grad_norm": 23.047494151516773, | |
| "learning_rate": 1.898127705363696e-06, | |
| "loss": 0.3595, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.4778046811945117, | |
| "grad_norm": 23.769391103002327, | |
| "learning_rate": 1.887885218402375e-06, | |
| "loss": 0.3418, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.4907183212267958, | |
| "grad_norm": 21.448479890321668, | |
| "learning_rate": 1.8771826024025943e-06, | |
| "loss": 0.3112, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.5036319612590799, | |
| "grad_norm": 21.618299294862208, | |
| "learning_rate": 1.8660254037844386e-06, | |
| "loss": 0.2935, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.516545601291364, | |
| "grad_norm": 23.83881914235937, | |
| "learning_rate": 1.8544194045464886e-06, | |
| "loss": 0.3487, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.5294592413236481, | |
| "grad_norm": 22.34191137757626, | |
| "learning_rate": 1.8423706192694115e-06, | |
| "loss": 0.3337, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.5423728813559322, | |
| "grad_norm": 22.680526606663665, | |
| "learning_rate": 1.8298852919990251e-06, | |
| "loss": 0.3326, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.5552865213882163, | |
| "grad_norm": 21.047157564860804, | |
| "learning_rate": 1.8169698930104419e-06, | |
| "loss": 0.3262, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.5682001614205004, | |
| "grad_norm": 23.210220151582366, | |
| "learning_rate": 1.8036311154549781e-06, | |
| "loss": 0.3531, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.5811138014527845, | |
| "grad_norm": 23.059400786558072, | |
| "learning_rate": 1.7898758718915585e-06, | |
| "loss": 0.3042, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.5940274414850686, | |
| "grad_norm": 20.985554392897924, | |
| "learning_rate": 1.7757112907044198e-06, | |
| "loss": 0.3211, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.6069410815173527, | |
| "grad_norm": 24.381494037046473, | |
| "learning_rate": 1.7611447124089646e-06, | |
| "loss": 0.3501, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.6198547215496368, | |
| "grad_norm": 21.163889567259087, | |
| "learning_rate": 1.7461836858476855e-06, | |
| "loss": 0.3407, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.632768361581921, | |
| "grad_norm": 17.673857406454214, | |
| "learning_rate": 1.7308359642781241e-06, | |
| "loss": 0.2912, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.645682001614205, | |
| "grad_norm": 20.851302876331236, | |
| "learning_rate": 1.7151095013548993e-06, | |
| "loss": 0.311, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.6585956416464891, | |
| "grad_norm": 20.07386513124459, | |
| "learning_rate": 1.699012447007882e-06, | |
| "loss": 0.2891, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.6715092816787732, | |
| "grad_norm": 16.654008916412547, | |
| "learning_rate": 1.682553143218654e-06, | |
| "loss": 0.2724, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.6844229217110573, | |
| "grad_norm": 20.770464397707823, | |
| "learning_rate": 1.6657401196974403e-06, | |
| "loss": 0.332, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.6973365617433414, | |
| "grad_norm": 22.094352414249297, | |
| "learning_rate": 1.648582089462756e-06, | |
| "loss": 0.344, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.7102502017756255, | |
| "grad_norm": 20.494531642603356, | |
| "learning_rate": 1.6310879443260529e-06, | |
| "loss": 0.3041, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.7231638418079096, | |
| "grad_norm": 19.8177079163147, | |
| "learning_rate": 1.6132667502837162e-06, | |
| "loss": 0.3162, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.7360774818401937, | |
| "grad_norm": 19.049318629916456, | |
| "learning_rate": 1.5951277428187897e-06, | |
| "loss": 0.3094, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.7489911218724778, | |
| "grad_norm": 21.597536610054288, | |
| "learning_rate": 1.5766803221148673e-06, | |
| "loss": 0.3325, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.7619047619047619, | |
| "grad_norm": 20.70892358284607, | |
| "learning_rate": 1.5579340481846335e-06, | |
| "loss": 0.3397, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.774818401937046, | |
| "grad_norm": 20.308915238405316, | |
| "learning_rate": 1.5388986359155755e-06, | |
| "loss": 0.3265, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.7877320419693301, | |
| "grad_norm": 22.627261985685145, | |
| "learning_rate": 1.5195839500354335e-06, | |
| "loss": 0.329, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.8006456820016142, | |
| "grad_norm": 19.565397419582293, | |
| "learning_rate": 1.5e-06, | |
| "loss": 0.3167, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.8135593220338984, | |
| "grad_norm": 18.907828874624407, | |
| "learning_rate": 1.4801569348059155e-06, | |
| "loss": 0.3146, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.8264729620661824, | |
| "grad_norm": 22.644125199961817, | |
| "learning_rate": 1.460065037731152e-06, | |
| "loss": 0.3072, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.8393866020984665, | |
| "grad_norm": 20.786645375646636, | |
| "learning_rate": 1.4397347210059057e-06, | |
| "loss": 0.3159, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.8523002421307506, | |
| "grad_norm": 18.57141588916772, | |
| "learning_rate": 1.419176520416664e-06, | |
| "loss": 0.3103, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.8652138821630347, | |
| "grad_norm": 17.225037473257153, | |
| "learning_rate": 1.3984010898462415e-06, | |
| "loss": 0.3027, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.8781275221953188, | |
| "grad_norm": 18.21584058115977, | |
| "learning_rate": 1.3774191957526142e-06, | |
| "loss": 0.2895, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.8910411622276029, | |
| "grad_norm": 20.509880420628917, | |
| "learning_rate": 1.3562417115894168e-06, | |
| "loss": 0.3098, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.903954802259887, | |
| "grad_norm": 17.29318336230985, | |
| "learning_rate": 1.334879612170986e-06, | |
| "loss": 0.2954, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.9168684422921711, | |
| "grad_norm": 18.377551002595997, | |
| "learning_rate": 1.3133439679848822e-06, | |
| "loss": 0.301, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.9297820823244553, | |
| "grad_norm": 26.11617966128903, | |
| "learning_rate": 1.2916459394548249e-06, | |
| "loss": 0.3493, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.9426957223567393, | |
| "grad_norm": 19.11873065402881, | |
| "learning_rate": 1.2697967711570242e-06, | |
| "loss": 0.3055, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.9556093623890234, | |
| "grad_norm": 20.15187293059899, | |
| "learning_rate": 1.2478077859929e-06, | |
| "loss": 0.3267, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.9685230024213075, | |
| "grad_norm": 17.356504787996283, | |
| "learning_rate": 1.2256903793212105e-06, | |
| "loss": 0.3, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.9814366424535916, | |
| "grad_norm": 20.116822076601576, | |
| "learning_rate": 1.203456013052634e-06, | |
| "loss": 0.3139, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.9943502824858758, | |
| "grad_norm": 20.23077529953701, | |
| "learning_rate": 1.1811162097098558e-06, | |
| "loss": 0.2941, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 1.0072639225181599, | |
| "grad_norm": 14.472243001169707, | |
| "learning_rate": 1.1586825464562514e-06, | |
| "loss": 0.217, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 1.020177562550444, | |
| "grad_norm": 11.1760098722801, | |
| "learning_rate": 1.1361666490962467e-06, | |
| "loss": 0.1391, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 1.033091202582728, | |
| "grad_norm": 12.929875937980091, | |
| "learning_rate": 1.1135801860504748e-06, | |
| "loss": 0.144, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 1.0460048426150121, | |
| "grad_norm": 14.401479453367932, | |
| "learning_rate": 1.0909348623088471e-06, | |
| "loss": 0.1525, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 1.0589184826472962, | |
| "grad_norm": 11.466941187008539, | |
| "learning_rate": 1.068242413364671e-06, | |
| "loss": 0.1492, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 1.0718321226795804, | |
| "grad_norm": 13.844595316123245, | |
| "learning_rate": 1.0455145991329637e-06, | |
| "loss": 0.1317, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 1.0847457627118644, | |
| "grad_norm": 14.390882441536627, | |
| "learning_rate": 1.0227631978561055e-06, | |
| "loss": 0.1574, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 1.0976594027441484, | |
| "grad_norm": 13.517598225715695, | |
| "learning_rate": 1e-06, | |
| "loss": 0.1409, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 1.1105730427764327, | |
| "grad_norm": 13.993115366941224, | |
| "learning_rate": 9.772368021438942e-07, | |
| "loss": 0.1464, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 1.1234866828087167, | |
| "grad_norm": 12.2423524228492, | |
| "learning_rate": 9.544854008670366e-07, | |
| "loss": 0.1207, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 1.136400322841001, | |
| "grad_norm": 14.122285998417135, | |
| "learning_rate": 9.317575866353291e-07, | |
| "loss": 0.1251, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 1.149313962873285, | |
| "grad_norm": 15.560999029473845, | |
| "learning_rate": 9.09065137691153e-07, | |
| "loss": 0.1209, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 1.162227602905569, | |
| "grad_norm": 13.507106399175507, | |
| "learning_rate": 8.86419813949525e-07, | |
| "loss": 0.1091, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 1.1751412429378532, | |
| "grad_norm": 13.891730510521498, | |
| "learning_rate": 8.638333509037535e-07, | |
| "loss": 0.1256, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 1.1880548829701372, | |
| "grad_norm": 14.992783015617869, | |
| "learning_rate": 8.413174535437487e-07, | |
| "loss": 0.137, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 1.2009685230024214, | |
| "grad_norm": 15.910575644376518, | |
| "learning_rate": 8.188837902901441e-07, | |
| "loss": 0.1347, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 1.2138821630347054, | |
| "grad_norm": 19.326658299865034, | |
| "learning_rate": 7.965439869473663e-07, | |
| "loss": 0.1473, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 1.2267958030669894, | |
| "grad_norm": 21.139159288452323, | |
| "learning_rate": 7.743096206787893e-07, | |
| "loss": 0.1449, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 1.2397094430992737, | |
| "grad_norm": 19.57794361709339, | |
| "learning_rate": 7.521922140071003e-07, | |
| "loss": 0.1429, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 1.2526230831315577, | |
| "grad_norm": 18.770894293602584, | |
| "learning_rate": 7.302032288429756e-07, | |
| "loss": 0.1533, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 1.2655367231638417, | |
| "grad_norm": 19.148357746011182, | |
| "learning_rate": 7.083540605451749e-07, | |
| "loss": 0.164, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 1.278450363196126, | |
| "grad_norm": 15.077187564406879, | |
| "learning_rate": 6.866560320151178e-07, | |
| "loss": 0.1404, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 1.29136400322841, | |
| "grad_norm": 16.058568106787366, | |
| "learning_rate": 6.651203878290138e-07, | |
| "loss": 0.1327, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 1.3042776432606942, | |
| "grad_norm": 14.029168964991912, | |
| "learning_rate": 6.437582884105834e-07, | |
| "loss": 0.136, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 1.3171912832929782, | |
| "grad_norm": 15.10100507251639, | |
| "learning_rate": 6.225808042473857e-07, | |
| "loss": 0.1149, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 1.3301049233252624, | |
| "grad_norm": 11.718976894577146, | |
| "learning_rate": 6.015989101537586e-07, | |
| "loss": 0.1207, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 1.3430185633575464, | |
| "grad_norm": 12.627764504381856, | |
| "learning_rate": 5.808234795833362e-07, | |
| "loss": 0.1081, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 1.3559322033898304, | |
| "grad_norm": 15.041188760891716, | |
| "learning_rate": 5.602652789940941e-07, | |
| "loss": 0.1262, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 1.3688458434221147, | |
| "grad_norm": 21.09158607094679, | |
| "learning_rate": 5.399349622688478e-07, | |
| "loss": 0.1314, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 1.3817594834543987, | |
| "grad_norm": 20.9326453419062, | |
| "learning_rate": 5.198430651940845e-07, | |
| "loss": 0.1346, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 1.3946731234866827, | |
| "grad_norm": 12.206782639953575, | |
| "learning_rate": 5.000000000000002e-07, | |
| "loss": 0.1157, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 1.407586763518967, | |
| "grad_norm": 13.75893208112109, | |
| "learning_rate": 4.804160499645667e-07, | |
| "loss": 0.1384, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 1.420500403551251, | |
| "grad_norm": 13.597298102282235, | |
| "learning_rate": 4.6110136408442445e-07, | |
| "loss": 0.1137, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 1.4334140435835352, | |
| "grad_norm": 13.538763536743675, | |
| "learning_rate": 4.4206595181536664e-07, | |
| "loss": 0.1203, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 1.4463276836158192, | |
| "grad_norm": 12.774053600703025, | |
| "learning_rate": 4.233196778851329e-07, | |
| "loss": 0.1122, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 1.4592413236481034, | |
| "grad_norm": 12.870680344192039, | |
| "learning_rate": 4.0487225718121045e-07, | |
| "loss": 0.1228, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 1.4721549636803875, | |
| "grad_norm": 15.02538486601625, | |
| "learning_rate": 3.867332497162835e-07, | |
| "loss": 0.1615, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 1.4850686037126715, | |
| "grad_norm": 12.30281015942022, | |
| "learning_rate": 3.6891205567394746e-07, | |
| "loss": 0.099, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 1.4979822437449557, | |
| "grad_norm": 10.419235242267765, | |
| "learning_rate": 3.5141791053724404e-07, | |
| "loss": 0.1096, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 1.5108958837772397, | |
| "grad_norm": 11.16767240028713, | |
| "learning_rate": 3.3425988030255945e-07, | |
| "loss": 0.1199, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 1.5238095238095237, | |
| "grad_norm": 13.158987202921047, | |
| "learning_rate": 3.174468567813461e-07, | |
| "loss": 0.1324, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 1.536723163841808, | |
| "grad_norm": 11.844158983050455, | |
| "learning_rate": 3.009875529921181e-07, | |
| "loss": 0.1106, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 1.549636803874092, | |
| "grad_norm": 14.439077228213756, | |
| "learning_rate": 2.848904986451005e-07, | |
| "loss": 0.117, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 1.562550443906376, | |
| "grad_norm": 12.407600386041583, | |
| "learning_rate": 2.6916403572187587e-07, | |
| "loss": 0.1159, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 1.5754640839386602, | |
| "grad_norm": 13.151590889043437, | |
| "learning_rate": 2.538163141523145e-07, | |
| "loss": 0.1236, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 1.5883777239709445, | |
| "grad_norm": 12.342415661391085, | |
| "learning_rate": 2.3885528759103536e-07, | |
| "loss": 0.1191, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 1.6012913640032282, | |
| "grad_norm": 25.46717666484534, | |
| "learning_rate": 2.2428870929558007e-07, | |
| "loss": 0.1464, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 1.6142050040355125, | |
| "grad_norm": 13.835165114851305, | |
| "learning_rate": 2.101241281084416e-07, | |
| "loss": 0.114, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 1.6271186440677967, | |
| "grad_norm": 12.278805533417628, | |
| "learning_rate": 1.9636888454502177e-07, | |
| "loss": 0.1095, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 1.6400322841000807, | |
| "grad_norm": 9.656229159282553, | |
| "learning_rate": 1.83030106989558e-07, | |
| "loss": 0.1048, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 1.6529459241323647, | |
| "grad_norm": 10.723649324385244, | |
| "learning_rate": 1.7011470800097495e-07, | |
| "loss": 0.1144, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 1.665859564164649, | |
| "grad_norm": 11.18317780807273, | |
| "learning_rate": 1.576293807305885e-07, | |
| "loss": 0.1124, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 1.678773204196933, | |
| "grad_norm": 19.22525610881903, | |
| "learning_rate": 1.4558059545351142e-07, | |
| "loss": 0.1185, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 1.691686844229217, | |
| "grad_norm": 13.840544386244854, | |
| "learning_rate": 1.3397459621556128e-07, | |
| "loss": 0.1172, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 1.7046004842615012, | |
| "grad_norm": 9.863106547796383, | |
| "learning_rate": 1.2281739759740574e-07, | |
| "loss": 0.1123, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 1.7175141242937855, | |
| "grad_norm": 12.249146236722565, | |
| "learning_rate": 1.1211478159762478e-07, | |
| "loss": 0.1026, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 1.7304277643260693, | |
| "grad_norm": 19.53823199805843, | |
| "learning_rate": 1.0187229463630398e-07, | |
| "loss": 0.1245, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 1.7433414043583535, | |
| "grad_norm": 13.813268314685585, | |
| "learning_rate": 9.209524468071095e-08, | |
| "loss": 0.1259, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 1.7562550443906377, | |
| "grad_norm": 13.002603579106614, | |
| "learning_rate": 8.278869849454717e-08, | |
| "loss": 0.126, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 1.7691686844229217, | |
| "grad_norm": 11.790731140563368, | |
| "learning_rate": 7.395747901219473e-08, | |
| "loss": 0.1163, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 1.7820823244552058, | |
| "grad_norm": 15.611451697022517, | |
| "learning_rate": 6.560616283932896e-08, | |
| "loss": 0.1226, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 1.79499596448749, | |
| "grad_norm": 24.410099015402505, | |
| "learning_rate": 5.77390778811796e-08, | |
| "loss": 0.1155, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 1.807909604519774, | |
| "grad_norm": 9.829584476780473, | |
| "learning_rate": 5.036030109968081e-08, | |
| "loss": 0.0972, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 1.820823244552058, | |
| "grad_norm": 14.542859998120877, | |
| "learning_rate": 4.347365640066525e-08, | |
| "loss": 0.1126, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 1.8337368845843423, | |
| "grad_norm": 20.231191724041285, | |
| "learning_rate": 3.708271265220087e-08, | |
| "loss": 0.1175, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 1.8466505246166263, | |
| "grad_norm": 12.407632368224395, | |
| "learning_rate": 3.119078183509372e-08, | |
| "loss": 0.1149, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 1.8595641646489103, | |
| "grad_norm": 14.44661290494769, | |
| "learning_rate": 2.580091732652101e-08, | |
| "loss": 0.1181, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 1.8724778046811945, | |
| "grad_norm": 18.79024412476877, | |
| "learning_rate": 2.0915912317677088e-08, | |
| "loss": 0.1475, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 1.8853914447134787, | |
| "grad_norm": 15.376557680187412, | |
| "learning_rate": 1.6538298366257974e-08, | |
| "loss": 0.1255, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 1.8983050847457628, | |
| "grad_norm": 15.33944835023639, | |
| "learning_rate": 1.2670344084530382e-08, | |
| "loss": 0.1034, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 1.9112187247780468, | |
| "grad_norm": 13.112291504434255, | |
| "learning_rate": 9.314053963669244e-09, | |
| "loss": 0.1086, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 1.924132364810331, | |
| "grad_norm": 12.045688670027003, | |
| "learning_rate": 6.471167334968886e-09, | |
| "loss": 0.1096, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 1.937046004842615, | |
| "grad_norm": 15.127302877916637, | |
| "learning_rate": 4.143157468468716e-09, | |
| "loss": 0.1126, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 1.949959644874899, | |
| "grad_norm": 13.774563816455347, | |
| "learning_rate": 2.331230809460738e-09, | |
| "loss": 0.1245, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 1.9628732849071833, | |
| "grad_norm": 32.82358453731887, | |
| "learning_rate": 1.036326353272443e-09, | |
| "loss": 0.1181, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 1.9757869249394673, | |
| "grad_norm": 14.92150992963698, | |
| "learning_rate": 2.5911515865084663e-10, | |
| "loss": 0.1143, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 1.9887005649717513, | |
| "grad_norm": 13.25981887405618, | |
| "learning_rate": 0.0, | |
| "loss": 0.1276, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 1.9887005649717513, | |
| "step": 154, | |
| "total_flos": 72055202119680.0, | |
| "train_loss": 0.2331618887747263, | |
| "train_runtime": 3106.2852, | |
| "train_samples_per_second": 12.759, | |
| "train_steps_per_second": 0.05 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 154, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 2, | |
| "save_steps": 100, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 72055202119680.0, | |
| "train_batch_size": 2, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |