| { |
| "best_global_step": 1734, |
| "best_metric": 0.8220858895705522, |
| "best_model_checkpoint": "RALL_RGBCROP_Aug16F-1DO1/checkpoint-1734", |
| "epoch": 9.083477758521086, |
| "eval_steps": 500, |
| "global_step": 2890, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0028885037550548816, |
| "grad_norm": 8.37844467163086, |
| "learning_rate": 1.2968299711815564e-07, |
| "loss": 0.7077, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.005777007510109763, |
| "grad_norm": 12.892661094665527, |
| "learning_rate": 2.7377521613832854e-07, |
| "loss": 0.7416, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.008665511265164644, |
| "grad_norm": 8.258193969726562, |
| "learning_rate": 4.1786743515850145e-07, |
| "loss": 0.7147, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.011554015020219527, |
| "grad_norm": 8.39278507232666, |
| "learning_rate": 5.619596541786745e-07, |
| "loss": 0.6718, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.014442518775274409, |
| "grad_norm": 7.893959045410156, |
| "learning_rate": 7.060518731988474e-07, |
| "loss": 0.7027, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.01733102253032929, |
| "grad_norm": 8.608739852905273, |
| "learning_rate": 8.501440922190203e-07, |
| "loss": 0.7158, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.020219526285384173, |
| "grad_norm": 9.674214363098145, |
| "learning_rate": 9.94236311239193e-07, |
| "loss": 0.7614, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.023108030040439053, |
| "grad_norm": 7.737273693084717, |
| "learning_rate": 1.138328530259366e-06, |
| "loss": 0.6871, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.025996533795493933, |
| "grad_norm": 8.532608032226562, |
| "learning_rate": 1.282420749279539e-06, |
| "loss": 0.7358, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.028885037550548817, |
| "grad_norm": 7.309403419494629, |
| "learning_rate": 1.426512968299712e-06, |
| "loss": 0.6698, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.0317735413056037, |
| "grad_norm": 8.677237510681152, |
| "learning_rate": 1.5706051873198849e-06, |
| "loss": 0.6867, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.03466204506065858, |
| "grad_norm": 7.54386568069458, |
| "learning_rate": 1.7146974063400579e-06, |
| "loss": 0.7063, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.03755054881571346, |
| "grad_norm": 8.105358123779297, |
| "learning_rate": 1.8587896253602309e-06, |
| "loss": 0.648, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.040439052570768345, |
| "grad_norm": 7.151630401611328, |
| "learning_rate": 2.0028818443804035e-06, |
| "loss": 0.6525, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.043327556325823226, |
| "grad_norm": 8.37738037109375, |
| "learning_rate": 2.1469740634005763e-06, |
| "loss": 0.6615, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.046216060080878106, |
| "grad_norm": 8.140033721923828, |
| "learning_rate": 2.2910662824207495e-06, |
| "loss": 0.6703, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.049104563835932986, |
| "grad_norm": 7.80758810043335, |
| "learning_rate": 2.4351585014409223e-06, |
| "loss": 0.6309, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.05199306759098787, |
| "grad_norm": 6.859084129333496, |
| "learning_rate": 2.579250720461095e-06, |
| "loss": 0.64, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.05488157134604275, |
| "grad_norm": 8.606156349182129, |
| "learning_rate": 2.7233429394812683e-06, |
| "loss": 0.593, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.057770075101097634, |
| "grad_norm": 6.8152313232421875, |
| "learning_rate": 2.867435158501441e-06, |
| "loss": 0.6037, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.060658578856152515, |
| "grad_norm": 6.530623912811279, |
| "learning_rate": 3.0115273775216143e-06, |
| "loss": 0.6197, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.0635470826112074, |
| "grad_norm": 7.118127822875977, |
| "learning_rate": 3.1556195965417867e-06, |
| "loss": 0.6244, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.06643558636626228, |
| "grad_norm": 6.540687084197998, |
| "learning_rate": 3.29971181556196e-06, |
| "loss": 0.6235, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.06932409012131716, |
| "grad_norm": 6.988425254821777, |
| "learning_rate": 3.4438040345821327e-06, |
| "loss": 0.5487, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.07221259387637204, |
| "grad_norm": 6.784511089324951, |
| "learning_rate": 3.587896253602306e-06, |
| "loss": 0.5492, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.07510109763142692, |
| "grad_norm": 6.616488933563232, |
| "learning_rate": 3.7319884726224787e-06, |
| "loss": 0.5659, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.0779896013864818, |
| "grad_norm": 7.42545747756958, |
| "learning_rate": 3.876080691642652e-06, |
| "loss": 0.5038, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.08087810514153669, |
| "grad_norm": 6.44716215133667, |
| "learning_rate": 4.020172910662825e-06, |
| "loss": 0.5263, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.08347775852108608, |
| "eval_accuracy": 0.6134969325153374, |
| "eval_loss": 0.6351147294044495, |
| "eval_runtime": 75.0993, |
| "eval_samples_per_second": 6.511, |
| "eval_steps_per_second": 0.826, |
| "step": 289 |
| }, |
| { |
| "epoch": 1.0002888503755054, |
| "grad_norm": 8.028291702270508, |
| "learning_rate": 4.1642651296829975e-06, |
| "loss": 0.5092, |
| "step": 290 |
| }, |
| { |
| "epoch": 1.0031773541305604, |
| "grad_norm": 8.235032081604004, |
| "learning_rate": 4.30835734870317e-06, |
| "loss": 0.4796, |
| "step": 300 |
| }, |
| { |
| "epoch": 1.0060658578856152, |
| "grad_norm": 7.71317720413208, |
| "learning_rate": 4.452449567723343e-06, |
| "loss": 0.4452, |
| "step": 310 |
| }, |
| { |
| "epoch": 1.0089543616406702, |
| "grad_norm": 5.7118144035339355, |
| "learning_rate": 4.596541786743517e-06, |
| "loss": 0.4561, |
| "step": 320 |
| }, |
| { |
| "epoch": 1.011842865395725, |
| "grad_norm": 7.083165168762207, |
| "learning_rate": 4.740634005763689e-06, |
| "loss": 0.4049, |
| "step": 330 |
| }, |
| { |
| "epoch": 1.01473136915078, |
| "grad_norm": 4.181227207183838, |
| "learning_rate": 4.884726224783862e-06, |
| "loss": 0.4103, |
| "step": 340 |
| }, |
| { |
| "epoch": 1.0176198729058348, |
| "grad_norm": 9.941950798034668, |
| "learning_rate": 4.996789727126807e-06, |
| "loss": 0.3803, |
| "step": 350 |
| }, |
| { |
| "epoch": 1.0205083766608896, |
| "grad_norm": 8.401833534240723, |
| "learning_rate": 4.980738362760835e-06, |
| "loss": 0.3703, |
| "step": 360 |
| }, |
| { |
| "epoch": 1.0233968804159446, |
| "grad_norm": 5.167896270751953, |
| "learning_rate": 4.9646869983948645e-06, |
| "loss": 0.352, |
| "step": 370 |
| }, |
| { |
| "epoch": 1.0262853841709993, |
| "grad_norm": 11.792040824890137, |
| "learning_rate": 4.948635634028893e-06, |
| "loss": 0.378, |
| "step": 380 |
| }, |
| { |
| "epoch": 1.0291738879260544, |
| "grad_norm": 7.333316326141357, |
| "learning_rate": 4.9325842696629215e-06, |
| "loss": 0.3654, |
| "step": 390 |
| }, |
| { |
| "epoch": 1.0320623916811091, |
| "grad_norm": 5.1884942054748535, |
| "learning_rate": 4.916532905296951e-06, |
| "loss": 0.3294, |
| "step": 400 |
| }, |
| { |
| "epoch": 1.0349508954361641, |
| "grad_norm": 11.97611141204834, |
| "learning_rate": 4.90048154093098e-06, |
| "loss": 0.3134, |
| "step": 410 |
| }, |
| { |
| "epoch": 1.037839399191219, |
| "grad_norm": 10.419317245483398, |
| "learning_rate": 4.884430176565008e-06, |
| "loss": 0.3299, |
| "step": 420 |
| }, |
| { |
| "epoch": 1.040727902946274, |
| "grad_norm": 8.307182312011719, |
| "learning_rate": 4.868378812199037e-06, |
| "loss": 0.3303, |
| "step": 430 |
| }, |
| { |
| "epoch": 1.0436164067013287, |
| "grad_norm": 4.807034015655518, |
| "learning_rate": 4.8523274478330665e-06, |
| "loss": 0.2441, |
| "step": 440 |
| }, |
| { |
| "epoch": 1.0465049104563835, |
| "grad_norm": 8.766178131103516, |
| "learning_rate": 4.836276083467095e-06, |
| "loss": 0.3258, |
| "step": 450 |
| }, |
| { |
| "epoch": 1.0493934142114385, |
| "grad_norm": 3.609104633331299, |
| "learning_rate": 4.820224719101124e-06, |
| "loss": 0.2477, |
| "step": 460 |
| }, |
| { |
| "epoch": 1.0522819179664933, |
| "grad_norm": 8.949795722961426, |
| "learning_rate": 4.804173354735153e-06, |
| "loss": 0.2654, |
| "step": 470 |
| }, |
| { |
| "epoch": 1.0551704217215483, |
| "grad_norm": 6.772108554840088, |
| "learning_rate": 4.788121990369181e-06, |
| "loss": 0.2769, |
| "step": 480 |
| }, |
| { |
| "epoch": 1.058058925476603, |
| "grad_norm": 5.552646636962891, |
| "learning_rate": 4.772070626003211e-06, |
| "loss": 0.3607, |
| "step": 490 |
| }, |
| { |
| "epoch": 1.060947429231658, |
| "grad_norm": 7.810203552246094, |
| "learning_rate": 4.75601926163724e-06, |
| "loss": 0.263, |
| "step": 500 |
| }, |
| { |
| "epoch": 1.0638359329867129, |
| "grad_norm": 9.738797187805176, |
| "learning_rate": 4.7399678972712685e-06, |
| "loss": 0.2365, |
| "step": 510 |
| }, |
| { |
| "epoch": 1.0667244367417679, |
| "grad_norm": 14.024474143981934, |
| "learning_rate": 4.723916532905297e-06, |
| "loss": 0.3409, |
| "step": 520 |
| }, |
| { |
| "epoch": 1.0696129404968227, |
| "grad_norm": 6.747750282287598, |
| "learning_rate": 4.707865168539326e-06, |
| "loss": 0.1955, |
| "step": 530 |
| }, |
| { |
| "epoch": 1.0725014442518774, |
| "grad_norm": 6.048046588897705, |
| "learning_rate": 4.691813804173355e-06, |
| "loss": 0.2507, |
| "step": 540 |
| }, |
| { |
| "epoch": 1.0753899480069324, |
| "grad_norm": 6.181787967681885, |
| "learning_rate": 4.675762439807384e-06, |
| "loss": 0.3123, |
| "step": 550 |
| }, |
| { |
| "epoch": 1.0782784517619872, |
| "grad_norm": 6.10444974899292, |
| "learning_rate": 4.659711075441413e-06, |
| "loss": 0.2076, |
| "step": 560 |
| }, |
| { |
| "epoch": 1.0811669555170422, |
| "grad_norm": 9.113558769226074, |
| "learning_rate": 4.643659711075442e-06, |
| "loss": 0.2545, |
| "step": 570 |
| }, |
| { |
| "epoch": 1.0834777585210862, |
| "eval_accuracy": 0.7811860940695297, |
| "eval_loss": 0.4964900314807892, |
| "eval_runtime": 66.949, |
| "eval_samples_per_second": 7.304, |
| "eval_steps_per_second": 0.926, |
| "step": 578 |
| }, |
| { |
| "epoch": 2.000577700751011, |
| "grad_norm": 5.2409467697143555, |
| "learning_rate": 4.6276083467094705e-06, |
| "loss": 0.1855, |
| "step": 580 |
| }, |
| { |
| "epoch": 2.003466204506066, |
| "grad_norm": 6.332943439483643, |
| "learning_rate": 4.6115569823435e-06, |
| "loss": 0.1953, |
| "step": 590 |
| }, |
| { |
| "epoch": 2.006354708261121, |
| "grad_norm": 22.365009307861328, |
| "learning_rate": 4.595505617977528e-06, |
| "loss": 0.3354, |
| "step": 600 |
| }, |
| { |
| "epoch": 2.009243212016176, |
| "grad_norm": 8.605449676513672, |
| "learning_rate": 4.579454253611557e-06, |
| "loss": 0.2047, |
| "step": 610 |
| }, |
| { |
| "epoch": 2.0121317157712304, |
| "grad_norm": 7.922995090484619, |
| "learning_rate": 4.563402889245586e-06, |
| "loss": 0.2502, |
| "step": 620 |
| }, |
| { |
| "epoch": 2.0150202195262854, |
| "grad_norm": 10.771293640136719, |
| "learning_rate": 4.5473515248796155e-06, |
| "loss": 0.1701, |
| "step": 630 |
| }, |
| { |
| "epoch": 2.0179087232813404, |
| "grad_norm": 20.358705520629883, |
| "learning_rate": 4.531300160513644e-06, |
| "loss": 0.1273, |
| "step": 640 |
| }, |
| { |
| "epoch": 2.020797227036395, |
| "grad_norm": 2.4659533500671387, |
| "learning_rate": 4.515248796147673e-06, |
| "loss": 0.1772, |
| "step": 650 |
| }, |
| { |
| "epoch": 2.02368573079145, |
| "grad_norm": 10.494536399841309, |
| "learning_rate": 4.499197431781702e-06, |
| "loss": 0.2002, |
| "step": 660 |
| }, |
| { |
| "epoch": 2.026574234546505, |
| "grad_norm": 16.61016082763672, |
| "learning_rate": 4.48314606741573e-06, |
| "loss": 0.188, |
| "step": 670 |
| }, |
| { |
| "epoch": 2.02946273830156, |
| "grad_norm": 10.22573471069336, |
| "learning_rate": 4.46709470304976e-06, |
| "loss": 0.1466, |
| "step": 680 |
| }, |
| { |
| "epoch": 2.0323512420566145, |
| "grad_norm": 10.276712417602539, |
| "learning_rate": 4.451043338683789e-06, |
| "loss": 0.1377, |
| "step": 690 |
| }, |
| { |
| "epoch": 2.0352397458116696, |
| "grad_norm": 7.458189487457275, |
| "learning_rate": 4.4349919743178176e-06, |
| "loss": 0.168, |
| "step": 700 |
| }, |
| { |
| "epoch": 2.0381282495667246, |
| "grad_norm": 9.402140617370605, |
| "learning_rate": 4.418940609951846e-06, |
| "loss": 0.1435, |
| "step": 710 |
| }, |
| { |
| "epoch": 2.041016753321779, |
| "grad_norm": 4.765238285064697, |
| "learning_rate": 4.402889245585875e-06, |
| "loss": 0.1237, |
| "step": 720 |
| }, |
| { |
| "epoch": 2.043905257076834, |
| "grad_norm": 1.2375091314315796, |
| "learning_rate": 4.386837881219904e-06, |
| "loss": 0.2354, |
| "step": 730 |
| }, |
| { |
| "epoch": 2.046793760831889, |
| "grad_norm": 12.870935440063477, |
| "learning_rate": 4.370786516853933e-06, |
| "loss": 0.2488, |
| "step": 740 |
| }, |
| { |
| "epoch": 2.049682264586944, |
| "grad_norm": 6.429897785186768, |
| "learning_rate": 4.354735152487962e-06, |
| "loss": 0.1568, |
| "step": 750 |
| }, |
| { |
| "epoch": 2.0525707683419987, |
| "grad_norm": 14.20434284210205, |
| "learning_rate": 4.33868378812199e-06, |
| "loss": 0.1464, |
| "step": 760 |
| }, |
| { |
| "epoch": 2.0554592720970537, |
| "grad_norm": 4.782289028167725, |
| "learning_rate": 4.32263242375602e-06, |
| "loss": 0.194, |
| "step": 770 |
| }, |
| { |
| "epoch": 2.0583477758521087, |
| "grad_norm": 17.103336334228516, |
| "learning_rate": 4.306581059390049e-06, |
| "loss": 0.1696, |
| "step": 780 |
| }, |
| { |
| "epoch": 2.0612362796071633, |
| "grad_norm": 0.7173800468444824, |
| "learning_rate": 4.290529695024077e-06, |
| "loss": 0.1829, |
| "step": 790 |
| }, |
| { |
| "epoch": 2.0641247833622183, |
| "grad_norm": 2.673967123031616, |
| "learning_rate": 4.274478330658106e-06, |
| "loss": 0.1354, |
| "step": 800 |
| }, |
| { |
| "epoch": 2.0670132871172733, |
| "grad_norm": 13.312447547912598, |
| "learning_rate": 4.258426966292135e-06, |
| "loss": 0.1753, |
| "step": 810 |
| }, |
| { |
| "epoch": 2.0699017908723283, |
| "grad_norm": 14.13827896118164, |
| "learning_rate": 4.242375601926164e-06, |
| "loss": 0.1376, |
| "step": 820 |
| }, |
| { |
| "epoch": 2.072790294627383, |
| "grad_norm": 6.9245219230651855, |
| "learning_rate": 4.226324237560193e-06, |
| "loss": 0.1123, |
| "step": 830 |
| }, |
| { |
| "epoch": 2.075678798382438, |
| "grad_norm": 17.101953506469727, |
| "learning_rate": 4.210272873194222e-06, |
| "loss": 0.1688, |
| "step": 840 |
| }, |
| { |
| "epoch": 2.078567302137493, |
| "grad_norm": 3.961395502090454, |
| "learning_rate": 4.194221508828251e-06, |
| "loss": 0.0972, |
| "step": 850 |
| }, |
| { |
| "epoch": 2.081455805892548, |
| "grad_norm": 6.498648166656494, |
| "learning_rate": 4.1781701444622794e-06, |
| "loss": 0.1261, |
| "step": 860 |
| }, |
| { |
| "epoch": 2.083477758521086, |
| "eval_accuracy": 0.8098159509202454, |
| "eval_loss": 0.5260858535766602, |
| "eval_runtime": 65.8351, |
| "eval_samples_per_second": 7.428, |
| "eval_steps_per_second": 0.942, |
| "step": 867 |
| }, |
| { |
| "epoch": 3.0008665511265167, |
| "grad_norm": 1.190217137336731, |
| "learning_rate": 4.162118780096309e-06, |
| "loss": 0.1403, |
| "step": 870 |
| }, |
| { |
| "epoch": 3.0037550548815712, |
| "grad_norm": 2.1841437816619873, |
| "learning_rate": 4.146067415730337e-06, |
| "loss": 0.1253, |
| "step": 880 |
| }, |
| { |
| "epoch": 3.0066435586366262, |
| "grad_norm": 29.44947624206543, |
| "learning_rate": 4.130016051364366e-06, |
| "loss": 0.0811, |
| "step": 890 |
| }, |
| { |
| "epoch": 3.0095320623916813, |
| "grad_norm": 16.02284049987793, |
| "learning_rate": 4.113964686998395e-06, |
| "loss": 0.1242, |
| "step": 900 |
| }, |
| { |
| "epoch": 3.012420566146736, |
| "grad_norm": 10.445579528808594, |
| "learning_rate": 4.0979133226324245e-06, |
| "loss": 0.1388, |
| "step": 910 |
| }, |
| { |
| "epoch": 3.015309069901791, |
| "grad_norm": 2.3682429790496826, |
| "learning_rate": 4.081861958266453e-06, |
| "loss": 0.1039, |
| "step": 920 |
| }, |
| { |
| "epoch": 3.018197573656846, |
| "grad_norm": 0.7850814461708069, |
| "learning_rate": 4.0658105939004815e-06, |
| "loss": 0.1165, |
| "step": 930 |
| }, |
| { |
| "epoch": 3.021086077411901, |
| "grad_norm": 31.352468490600586, |
| "learning_rate": 4.049759229534511e-06, |
| "loss": 0.1494, |
| "step": 940 |
| }, |
| { |
| "epoch": 3.0239745811669554, |
| "grad_norm": 0.1588502824306488, |
| "learning_rate": 4.033707865168539e-06, |
| "loss": 0.0996, |
| "step": 950 |
| }, |
| { |
| "epoch": 3.0268630849220104, |
| "grad_norm": 3.636672019958496, |
| "learning_rate": 4.017656500802569e-06, |
| "loss": 0.1087, |
| "step": 960 |
| }, |
| { |
| "epoch": 3.0297515886770654, |
| "grad_norm": 24.48581886291504, |
| "learning_rate": 4.001605136436598e-06, |
| "loss": 0.1439, |
| "step": 970 |
| }, |
| { |
| "epoch": 3.03264009243212, |
| "grad_norm": 0.9635569453239441, |
| "learning_rate": 3.9855537720706265e-06, |
| "loss": 0.068, |
| "step": 980 |
| }, |
| { |
| "epoch": 3.035528596187175, |
| "grad_norm": 1.4727041721343994, |
| "learning_rate": 3.969502407704655e-06, |
| "loss": 0.0867, |
| "step": 990 |
| }, |
| { |
| "epoch": 3.03841709994223, |
| "grad_norm": 0.2378067672252655, |
| "learning_rate": 3.953451043338684e-06, |
| "loss": 0.0891, |
| "step": 1000 |
| }, |
| { |
| "epoch": 3.041305603697285, |
| "grad_norm": 0.4924512505531311, |
| "learning_rate": 3.937399678972713e-06, |
| "loss": 0.0693, |
| "step": 1010 |
| }, |
| { |
| "epoch": 3.0441941074523395, |
| "grad_norm": 2.5564048290252686, |
| "learning_rate": 3.921348314606742e-06, |
| "loss": 0.1311, |
| "step": 1020 |
| }, |
| { |
| "epoch": 3.0470826112073945, |
| "grad_norm": 2.72170352935791, |
| "learning_rate": 3.905296950240771e-06, |
| "loss": 0.0777, |
| "step": 1030 |
| }, |
| { |
| "epoch": 3.0499711149624495, |
| "grad_norm": 0.4120216965675354, |
| "learning_rate": 3.8892455858748e-06, |
| "loss": 0.1778, |
| "step": 1040 |
| }, |
| { |
| "epoch": 3.0528596187175046, |
| "grad_norm": 16.4429931640625, |
| "learning_rate": 3.8731942215088285e-06, |
| "loss": 0.0344, |
| "step": 1050 |
| }, |
| { |
| "epoch": 3.055748122472559, |
| "grad_norm": 21.77942657470703, |
| "learning_rate": 3.857142857142858e-06, |
| "loss": 0.1641, |
| "step": 1060 |
| }, |
| { |
| "epoch": 3.058636626227614, |
| "grad_norm": 4.657069683074951, |
| "learning_rate": 3.841091492776886e-06, |
| "loss": 0.0319, |
| "step": 1070 |
| }, |
| { |
| "epoch": 3.061525129982669, |
| "grad_norm": 0.2789720892906189, |
| "learning_rate": 3.825040128410915e-06, |
| "loss": 0.061, |
| "step": 1080 |
| }, |
| { |
| "epoch": 3.0644136337377237, |
| "grad_norm": 0.1231907308101654, |
| "learning_rate": 3.808988764044944e-06, |
| "loss": 0.1156, |
| "step": 1090 |
| }, |
| { |
| "epoch": 3.0673021374927787, |
| "grad_norm": 17.38383674621582, |
| "learning_rate": 3.792937399678973e-06, |
| "loss": 0.1351, |
| "step": 1100 |
| }, |
| { |
| "epoch": 3.0701906412478337, |
| "grad_norm": 2.08525013923645, |
| "learning_rate": 3.776886035313002e-06, |
| "loss": 0.0755, |
| "step": 1110 |
| }, |
| { |
| "epoch": 3.0730791450028887, |
| "grad_norm": 20.763957977294922, |
| "learning_rate": 3.7608346709470305e-06, |
| "loss": 0.0934, |
| "step": 1120 |
| }, |
| { |
| "epoch": 3.0759676487579433, |
| "grad_norm": 19.472963333129883, |
| "learning_rate": 3.7447833065810594e-06, |
| "loss": 0.139, |
| "step": 1130 |
| }, |
| { |
| "epoch": 3.0788561525129983, |
| "grad_norm": 2.2785959243774414, |
| "learning_rate": 3.7287319422150888e-06, |
| "loss": 0.0703, |
| "step": 1140 |
| }, |
| { |
| "epoch": 3.0817446562680533, |
| "grad_norm": 0.3075811564922333, |
| "learning_rate": 3.7126805778491177e-06, |
| "loss": 0.0753, |
| "step": 1150 |
| }, |
| { |
| "epoch": 3.083477758521086, |
| "eval_accuracy": 0.8159509202453987, |
| "eval_loss": 0.625999391078949, |
| "eval_runtime": 66.8149, |
| "eval_samples_per_second": 7.319, |
| "eval_steps_per_second": 0.928, |
| "step": 1156 |
| }, |
| { |
| "epoch": 4.001155401502022, |
| "grad_norm": 2.79459285736084, |
| "learning_rate": 3.6966292134831466e-06, |
| "loss": 0.0859, |
| "step": 1160 |
| }, |
| { |
| "epoch": 4.004043905257077, |
| "grad_norm": 9.143715858459473, |
| "learning_rate": 3.680577849117175e-06, |
| "loss": 0.0741, |
| "step": 1170 |
| }, |
| { |
| "epoch": 4.006932409012132, |
| "grad_norm": 7.800527095794678, |
| "learning_rate": 3.664526484751204e-06, |
| "loss": 0.1363, |
| "step": 1180 |
| }, |
| { |
| "epoch": 4.009820912767187, |
| "grad_norm": 28.958932876586914, |
| "learning_rate": 3.648475120385233e-06, |
| "loss": 0.0505, |
| "step": 1190 |
| }, |
| { |
| "epoch": 4.012709416522242, |
| "grad_norm": 0.5397215485572815, |
| "learning_rate": 3.6324237560192623e-06, |
| "loss": 0.0502, |
| "step": 1200 |
| }, |
| { |
| "epoch": 4.015597920277297, |
| "grad_norm": 14.25724983215332, |
| "learning_rate": 3.6163723916532904e-06, |
| "loss": 0.0464, |
| "step": 1210 |
| }, |
| { |
| "epoch": 4.018486424032352, |
| "grad_norm": 0.7435076832771301, |
| "learning_rate": 3.6003210272873197e-06, |
| "loss": 0.0605, |
| "step": 1220 |
| }, |
| { |
| "epoch": 4.021374927787406, |
| "grad_norm": 0.11585772037506104, |
| "learning_rate": 3.5842696629213486e-06, |
| "loss": 0.0408, |
| "step": 1230 |
| }, |
| { |
| "epoch": 4.024263431542461, |
| "grad_norm": 2.1825027465820312, |
| "learning_rate": 3.5682182985553776e-06, |
| "loss": 0.0584, |
| "step": 1240 |
| }, |
| { |
| "epoch": 4.027151935297516, |
| "grad_norm": 0.7946633100509644, |
| "learning_rate": 3.5521669341894065e-06, |
| "loss": 0.0361, |
| "step": 1250 |
| }, |
| { |
| "epoch": 4.030040439052571, |
| "grad_norm": 0.4123324155807495, |
| "learning_rate": 3.536115569823435e-06, |
| "loss": 0.1155, |
| "step": 1260 |
| }, |
| { |
| "epoch": 4.032928942807626, |
| "grad_norm": 0.17728902399539948, |
| "learning_rate": 3.520064205457464e-06, |
| "loss": 0.0038, |
| "step": 1270 |
| }, |
| { |
| "epoch": 4.035817446562681, |
| "grad_norm": 2.5627269744873047, |
| "learning_rate": 3.5040128410914932e-06, |
| "loss": 0.0489, |
| "step": 1280 |
| }, |
| { |
| "epoch": 4.038705950317736, |
| "grad_norm": 2.115309238433838, |
| "learning_rate": 3.487961476725522e-06, |
| "loss": 0.0572, |
| "step": 1290 |
| }, |
| { |
| "epoch": 4.04159445407279, |
| "grad_norm": 0.32022103667259216, |
| "learning_rate": 3.471910112359551e-06, |
| "loss": 0.067, |
| "step": 1300 |
| }, |
| { |
| "epoch": 4.044482957827845, |
| "grad_norm": 51.26248550415039, |
| "learning_rate": 3.4558587479935796e-06, |
| "loss": 0.0505, |
| "step": 1310 |
| }, |
| { |
| "epoch": 4.0473714615829, |
| "grad_norm": 3.7673208713531494, |
| "learning_rate": 3.4398073836276085e-06, |
| "loss": 0.0859, |
| "step": 1320 |
| }, |
| { |
| "epoch": 4.050259965337955, |
| "grad_norm": 47.00321960449219, |
| "learning_rate": 3.4237560192616374e-06, |
| "loss": 0.0532, |
| "step": 1330 |
| }, |
| { |
| "epoch": 4.05314846909301, |
| "grad_norm": 2.6400699615478516, |
| "learning_rate": 3.4077046548956668e-06, |
| "loss": 0.0388, |
| "step": 1340 |
| }, |
| { |
| "epoch": 4.056036972848065, |
| "grad_norm": 0.06903611868619919, |
| "learning_rate": 3.391653290529695e-06, |
| "loss": 0.0154, |
| "step": 1350 |
| }, |
| { |
| "epoch": 4.05892547660312, |
| "grad_norm": 0.4001617431640625, |
| "learning_rate": 3.375601926163724e-06, |
| "loss": 0.0601, |
| "step": 1360 |
| }, |
| { |
| "epoch": 4.061813980358174, |
| "grad_norm": 0.8745481967926025, |
| "learning_rate": 3.359550561797753e-06, |
| "loss": 0.0035, |
| "step": 1370 |
| }, |
| { |
| "epoch": 4.064702484113229, |
| "grad_norm": 0.3285149931907654, |
| "learning_rate": 3.343499197431782e-06, |
| "loss": 0.0655, |
| "step": 1380 |
| }, |
| { |
| "epoch": 4.067590987868284, |
| "grad_norm": 1.2616970539093018, |
| "learning_rate": 3.327447833065811e-06, |
| "loss": 0.0559, |
| "step": 1390 |
| }, |
| { |
| "epoch": 4.070479491623339, |
| "grad_norm": 0.5105425715446472, |
| "learning_rate": 3.3113964686998394e-06, |
| "loss": 0.0033, |
| "step": 1400 |
| }, |
| { |
| "epoch": 4.073367995378394, |
| "grad_norm": 0.07815102487802505, |
| "learning_rate": 3.2953451043338684e-06, |
| "loss": 0.0535, |
| "step": 1410 |
| }, |
| { |
| "epoch": 4.076256499133449, |
| "grad_norm": 50.4415397644043, |
| "learning_rate": 3.2792937399678977e-06, |
| "loss": 0.0305, |
| "step": 1420 |
| }, |
| { |
| "epoch": 4.079145002888504, |
| "grad_norm": 0.04866603761911392, |
| "learning_rate": 3.2632423756019266e-06, |
| "loss": 0.0752, |
| "step": 1430 |
| }, |
| { |
| "epoch": 4.082033506643558, |
| "grad_norm": 0.6638075113296509, |
| "learning_rate": 3.2471910112359555e-06, |
| "loss": 0.0246, |
| "step": 1440 |
| }, |
| { |
| "epoch": 4.083477758521086, |
| "eval_accuracy": 0.8159509202453987, |
| "eval_loss": 0.7590932250022888, |
| "eval_runtime": 67.4418, |
| "eval_samples_per_second": 7.251, |
| "eval_steps_per_second": 0.919, |
| "step": 1445 |
| }, |
| { |
| "epoch": 5.001444251877527, |
| "grad_norm": 0.060185324400663376, |
| "learning_rate": 3.231139646869984e-06, |
| "loss": 0.002, |
| "step": 1450 |
| }, |
| { |
| "epoch": 5.004332755632582, |
| "grad_norm": 0.07039889693260193, |
| "learning_rate": 3.215088282504013e-06, |
| "loss": 0.0285, |
| "step": 1460 |
| }, |
| { |
| "epoch": 5.007221259387637, |
| "grad_norm": 0.03269031643867493, |
| "learning_rate": 3.199036918138042e-06, |
| "loss": 0.0065, |
| "step": 1470 |
| }, |
| { |
| "epoch": 5.010109763142692, |
| "grad_norm": 0.32333606481552124, |
| "learning_rate": 3.1829855537720712e-06, |
| "loss": 0.0105, |
| "step": 1480 |
| }, |
| { |
| "epoch": 5.012998266897747, |
| "grad_norm": 0.03409011289477348, |
| "learning_rate": 3.1669341894060997e-06, |
| "loss": 0.0347, |
| "step": 1490 |
| }, |
| { |
| "epoch": 5.015886770652802, |
| "grad_norm": 0.19116906821727753, |
| "learning_rate": 3.1508828250401286e-06, |
| "loss": 0.0034, |
| "step": 1500 |
| }, |
| { |
| "epoch": 5.018775274407857, |
| "grad_norm": 0.25904765725135803, |
| "learning_rate": 3.1348314606741576e-06, |
| "loss": 0.0234, |
| "step": 1510 |
| }, |
| { |
| "epoch": 5.021663778162911, |
| "grad_norm": 0.3230428695678711, |
| "learning_rate": 3.1187800963081865e-06, |
| "loss": 0.0316, |
| "step": 1520 |
| }, |
| { |
| "epoch": 5.024552281917966, |
| "grad_norm": 0.10437498986721039, |
| "learning_rate": 3.1027287319422154e-06, |
| "loss": 0.004, |
| "step": 1530 |
| }, |
| { |
| "epoch": 5.027440785673021, |
| "grad_norm": 0.46833401918411255, |
| "learning_rate": 3.086677367576244e-06, |
| "loss": 0.0075, |
| "step": 1540 |
| }, |
| { |
| "epoch": 5.030329289428076, |
| "grad_norm": 0.21706847846508026, |
| "learning_rate": 3.0706260032102732e-06, |
| "loss": 0.012, |
| "step": 1550 |
| }, |
| { |
| "epoch": 5.033217793183131, |
| "grad_norm": 3.5541529655456543, |
| "learning_rate": 3.054574638844302e-06, |
| "loss": 0.0654, |
| "step": 1560 |
| }, |
| { |
| "epoch": 5.036106296938186, |
| "grad_norm": 35.335044860839844, |
| "learning_rate": 3.038523274478331e-06, |
| "loss": 0.0402, |
| "step": 1570 |
| }, |
| { |
| "epoch": 5.038994800693241, |
| "grad_norm": 1.4521031379699707, |
| "learning_rate": 3.02247191011236e-06, |
| "loss": 0.0033, |
| "step": 1580 |
| }, |
| { |
| "epoch": 5.041883304448295, |
| "grad_norm": 4.004300594329834, |
| "learning_rate": 3.0064205457463885e-06, |
| "loss": 0.0455, |
| "step": 1590 |
| }, |
| { |
| "epoch": 5.04477180820335, |
| "grad_norm": 2.2921578884124756, |
| "learning_rate": 2.9903691813804174e-06, |
| "loss": 0.0058, |
| "step": 1600 |
| }, |
| { |
| "epoch": 5.047660311958405, |
| "grad_norm": 0.06124192476272583, |
| "learning_rate": 2.9743178170144463e-06, |
| "loss": 0.0029, |
| "step": 1610 |
| }, |
| { |
| "epoch": 5.05054881571346, |
| "grad_norm": 0.20763950049877167, |
| "learning_rate": 2.9582664526484757e-06, |
| "loss": 0.0135, |
| "step": 1620 |
| }, |
| { |
| "epoch": 5.053437319468515, |
| "grad_norm": 0.05570823326706886, |
| "learning_rate": 2.942215088282504e-06, |
| "loss": 0.0141, |
| "step": 1630 |
| }, |
| { |
| "epoch": 5.05632582322357, |
| "grad_norm": 11.078546524047852, |
| "learning_rate": 2.926163723916533e-06, |
| "loss": 0.0354, |
| "step": 1640 |
| }, |
| { |
| "epoch": 5.059214326978625, |
| "grad_norm": 0.05520700663328171, |
| "learning_rate": 2.910112359550562e-06, |
| "loss": 0.0205, |
| "step": 1650 |
| }, |
| { |
| "epoch": 5.06210283073368, |
| "grad_norm": 1.370224118232727, |
| "learning_rate": 2.894060995184591e-06, |
| "loss": 0.0302, |
| "step": 1660 |
| }, |
| { |
| "epoch": 5.0649913344887345, |
| "grad_norm": 29.20268440246582, |
| "learning_rate": 2.87800963081862e-06, |
| "loss": 0.0856, |
| "step": 1670 |
| }, |
| { |
| "epoch": 5.0678798382437895, |
| "grad_norm": 0.0997309535741806, |
| "learning_rate": 2.8619582664526484e-06, |
| "loss": 0.0017, |
| "step": 1680 |
| }, |
| { |
| "epoch": 5.0707683419988445, |
| "grad_norm": 0.5986093878746033, |
| "learning_rate": 2.8459069020866777e-06, |
| "loss": 0.0045, |
| "step": 1690 |
| }, |
| { |
| "epoch": 5.0736568457538995, |
| "grad_norm": 1.4065715074539185, |
| "learning_rate": 2.8298555377207066e-06, |
| "loss": 0.0227, |
| "step": 1700 |
| }, |
| { |
| "epoch": 5.0765453495089545, |
| "grad_norm": 29.846811294555664, |
| "learning_rate": 2.8138041733547356e-06, |
| "loss": 0.127, |
| "step": 1710 |
| }, |
| { |
| "epoch": 5.0794338532640095, |
| "grad_norm": 0.12639851868152618, |
| "learning_rate": 2.797752808988764e-06, |
| "loss": 0.0016, |
| "step": 1720 |
| }, |
| { |
| "epoch": 5.0823223570190645, |
| "grad_norm": 16.550195693969727, |
| "learning_rate": 2.781701444622793e-06, |
| "loss": 0.0587, |
| "step": 1730 |
| }, |
| { |
| "epoch": 5.083477758521086, |
| "eval_accuracy": 0.8220858895705522, |
| "eval_loss": 0.8467618227005005, |
| "eval_runtime": 69.3503, |
| "eval_samples_per_second": 7.051, |
| "eval_steps_per_second": 0.894, |
| "step": 1734 |
| }, |
| { |
| "epoch": 6.001733102253033, |
| "grad_norm": 0.0677463710308075, |
| "learning_rate": 2.765650080256822e-06, |
| "loss": 0.0073, |
| "step": 1740 |
| }, |
| { |
| "epoch": 6.0046216060080875, |
| "grad_norm": 0.08975090831518173, |
| "learning_rate": 2.7495987158908512e-06, |
| "loss": 0.0022, |
| "step": 1750 |
| }, |
| { |
| "epoch": 6.0075101097631425, |
| "grad_norm": 0.31082966923713684, |
| "learning_rate": 2.73354735152488e-06, |
| "loss": 0.0042, |
| "step": 1760 |
| }, |
| { |
| "epoch": 6.0103986135181975, |
| "grad_norm": 0.11679187417030334, |
| "learning_rate": 2.7174959871589087e-06, |
| "loss": 0.0487, |
| "step": 1770 |
| }, |
| { |
| "epoch": 6.0132871172732525, |
| "grad_norm": 30.055280685424805, |
| "learning_rate": 2.7014446227929376e-06, |
| "loss": 0.0139, |
| "step": 1780 |
| }, |
| { |
| "epoch": 6.0161756210283075, |
| "grad_norm": 0.5157354474067688, |
| "learning_rate": 2.6853932584269665e-06, |
| "loss": 0.0094, |
| "step": 1790 |
| }, |
| { |
| "epoch": 6.0190641247833625, |
| "grad_norm": 0.036292631179094315, |
| "learning_rate": 2.6693418940609954e-06, |
| "loss": 0.0204, |
| "step": 1800 |
| }, |
| { |
| "epoch": 6.0219526285384175, |
| "grad_norm": 0.013917963020503521, |
| "learning_rate": 2.6532905296950243e-06, |
| "loss": 0.0053, |
| "step": 1810 |
| }, |
| { |
| "epoch": 6.024841132293472, |
| "grad_norm": 0.21816915273666382, |
| "learning_rate": 2.637239165329053e-06, |
| "loss": 0.0093, |
| "step": 1820 |
| }, |
| { |
| "epoch": 6.027729636048527, |
| "grad_norm": 0.193815678358078, |
| "learning_rate": 2.621187800963082e-06, |
| "loss": 0.0076, |
| "step": 1830 |
| }, |
| { |
| "epoch": 6.030618139803582, |
| "grad_norm": 0.46772897243499756, |
| "learning_rate": 2.605136436597111e-06, |
| "loss": 0.0059, |
| "step": 1840 |
| }, |
| { |
| "epoch": 6.033506643558637, |
| "grad_norm": 0.014806525781750679, |
| "learning_rate": 2.58908507223114e-06, |
| "loss": 0.0081, |
| "step": 1850 |
| }, |
| { |
| "epoch": 6.036395147313692, |
| "grad_norm": 0.5249377489089966, |
| "learning_rate": 2.5730337078651685e-06, |
| "loss": 0.0084, |
| "step": 1860 |
| }, |
| { |
| "epoch": 6.039283651068747, |
| "grad_norm": 0.05226698890328407, |
| "learning_rate": 2.5569823434991974e-06, |
| "loss": 0.0016, |
| "step": 1870 |
| }, |
| { |
| "epoch": 6.042172154823802, |
| "grad_norm": 0.028807491064071655, |
| "learning_rate": 2.5409309791332264e-06, |
| "loss": 0.0026, |
| "step": 1880 |
| }, |
| { |
| "epoch": 6.045060658578856, |
| "grad_norm": 0.28243187069892883, |
| "learning_rate": 2.5248796147672557e-06, |
| "loss": 0.001, |
| "step": 1890 |
| }, |
| { |
| "epoch": 6.047949162333911, |
| "grad_norm": 0.03083970583975315, |
| "learning_rate": 2.5088282504012846e-06, |
| "loss": 0.0015, |
| "step": 1900 |
| }, |
| { |
| "epoch": 6.050837666088966, |
| "grad_norm": 0.11073260754346848, |
| "learning_rate": 2.492776886035313e-06, |
| "loss": 0.0146, |
| "step": 1910 |
| }, |
| { |
| "epoch": 6.053726169844021, |
| "grad_norm": 0.07196179777383804, |
| "learning_rate": 2.476725521669342e-06, |
| "loss": 0.0171, |
| "step": 1920 |
| }, |
| { |
| "epoch": 6.056614673599076, |
| "grad_norm": 0.09667622298002243, |
| "learning_rate": 2.460674157303371e-06, |
| "loss": 0.0023, |
| "step": 1930 |
| }, |
| { |
| "epoch": 6.059503177354131, |
| "grad_norm": 1.5665602684020996, |
| "learning_rate": 2.4446227929374e-06, |
| "loss": 0.0011, |
| "step": 1940 |
| }, |
| { |
| "epoch": 6.062391681109186, |
| "grad_norm": 0.15951007604599, |
| "learning_rate": 2.428571428571429e-06, |
| "loss": 0.0126, |
| "step": 1950 |
| }, |
| { |
| "epoch": 6.06528018486424, |
| "grad_norm": 0.16798914968967438, |
| "learning_rate": 2.4125200642054577e-06, |
| "loss": 0.0042, |
| "step": 1960 |
| }, |
| { |
| "epoch": 6.068168688619295, |
| "grad_norm": 0.17065782845020294, |
| "learning_rate": 2.3964686998394866e-06, |
| "loss": 0.1316, |
| "step": 1970 |
| }, |
| { |
| "epoch": 6.07105719237435, |
| "grad_norm": 1.0885783433914185, |
| "learning_rate": 2.3804173354735156e-06, |
| "loss": 0.0021, |
| "step": 1980 |
| }, |
| { |
| "epoch": 6.073945696129405, |
| "grad_norm": 0.4040501117706299, |
| "learning_rate": 2.364365971107544e-06, |
| "loss": 0.0013, |
| "step": 1990 |
| }, |
| { |
| "epoch": 6.07683419988446, |
| "grad_norm": 14.820023536682129, |
| "learning_rate": 2.3483146067415734e-06, |
| "loss": 0.0077, |
| "step": 2000 |
| }, |
| { |
| "epoch": 6.079722703639515, |
| "grad_norm": 0.065485380589962, |
| "learning_rate": 2.3322632423756023e-06, |
| "loss": 0.0047, |
| "step": 2010 |
| }, |
| { |
| "epoch": 6.08261120739457, |
| "grad_norm": 2.0591225624084473, |
| "learning_rate": 2.316211878009631e-06, |
| "loss": 0.0351, |
| "step": 2020 |
| }, |
| { |
| "epoch": 6.083477758521086, |
| "eval_accuracy": 0.8179959100204499, |
| "eval_loss": 0.8952175974845886, |
| "eval_runtime": 67.365, |
| "eval_samples_per_second": 7.259, |
| "eval_steps_per_second": 0.92, |
| "step": 2023 |
| }, |
| { |
| "epoch": 7.002021952628539, |
| "grad_norm": 0.09516696631908417, |
| "learning_rate": 2.30016051364366e-06, |
| "loss": 0.0007, |
| "step": 2030 |
| }, |
| { |
| "epoch": 7.004910456383593, |
| "grad_norm": 0.14757688343524933, |
| "learning_rate": 2.2841091492776887e-06, |
| "loss": 0.0006, |
| "step": 2040 |
| }, |
| { |
| "epoch": 7.007798960138648, |
| "grad_norm": 0.19937986135482788, |
| "learning_rate": 2.2680577849117176e-06, |
| "loss": 0.0008, |
| "step": 2050 |
| }, |
| { |
| "epoch": 7.010687463893703, |
| "grad_norm": 4.664211750030518, |
| "learning_rate": 2.2520064205457465e-06, |
| "loss": 0.0018, |
| "step": 2060 |
| }, |
| { |
| "epoch": 7.013575967648758, |
| "grad_norm": 0.44122084975242615, |
| "learning_rate": 2.2359550561797754e-06, |
| "loss": 0.0025, |
| "step": 2070 |
| }, |
| { |
| "epoch": 7.016464471403813, |
| "grad_norm": 0.4168035387992859, |
| "learning_rate": 2.2199036918138043e-06, |
| "loss": 0.0012, |
| "step": 2080 |
| }, |
| { |
| "epoch": 7.019352975158868, |
| "grad_norm": 0.30642348527908325, |
| "learning_rate": 2.2038523274478333e-06, |
| "loss": 0.0033, |
| "step": 2090 |
| }, |
| { |
| "epoch": 7.022241478913923, |
| "grad_norm": 0.1506723016500473, |
| "learning_rate": 2.187800963081862e-06, |
| "loss": 0.0007, |
| "step": 2100 |
| }, |
| { |
| "epoch": 7.025129982668977, |
| "grad_norm": 0.050181757658720016, |
| "learning_rate": 2.171749598715891e-06, |
| "loss": 0.0011, |
| "step": 2110 |
| }, |
| { |
| "epoch": 7.028018486424032, |
| "grad_norm": 0.3687441349029541, |
| "learning_rate": 2.15569823434992e-06, |
| "loss": 0.0013, |
| "step": 2120 |
| }, |
| { |
| "epoch": 7.030906990179087, |
| "grad_norm": 1.2629284858703613, |
| "learning_rate": 2.1396468699839485e-06, |
| "loss": 0.0031, |
| "step": 2130 |
| }, |
| { |
| "epoch": 7.033795493934142, |
| "grad_norm": 0.010518478229641914, |
| "learning_rate": 2.123595505617978e-06, |
| "loss": 0.0133, |
| "step": 2140 |
| }, |
| { |
| "epoch": 7.036683997689197, |
| "grad_norm": 0.22406050562858582, |
| "learning_rate": 2.1075441412520064e-06, |
| "loss": 0.0164, |
| "step": 2150 |
| }, |
| { |
| "epoch": 7.039572501444252, |
| "grad_norm": 0.6182830333709717, |
| "learning_rate": 2.0914927768860353e-06, |
| "loss": 0.0039, |
| "step": 2160 |
| }, |
| { |
| "epoch": 7.042461005199307, |
| "grad_norm": 0.028110405430197716, |
| "learning_rate": 2.0754414125200646e-06, |
| "loss": 0.0314, |
| "step": 2170 |
| }, |
| { |
| "epoch": 7.045349508954362, |
| "grad_norm": 0.15830457210540771, |
| "learning_rate": 2.059390048154093e-06, |
| "loss": 0.0021, |
| "step": 2180 |
| }, |
| { |
| "epoch": 7.048238012709416, |
| "grad_norm": 0.026566576212644577, |
| "learning_rate": 2.043338683788122e-06, |
| "loss": 0.0079, |
| "step": 2190 |
| }, |
| { |
| "epoch": 7.051126516464471, |
| "grad_norm": 0.16128750145435333, |
| "learning_rate": 2.027287319422151e-06, |
| "loss": 0.0036, |
| "step": 2200 |
| }, |
| { |
| "epoch": 7.054015020219526, |
| "grad_norm": 0.1151314377784729, |
| "learning_rate": 2.01123595505618e-06, |
| "loss": 0.0009, |
| "step": 2210 |
| }, |
| { |
| "epoch": 7.056903523974581, |
| "grad_norm": 5.008311748504639, |
| "learning_rate": 1.995184590690209e-06, |
| "loss": 0.0182, |
| "step": 2220 |
| }, |
| { |
| "epoch": 7.059792027729636, |
| "grad_norm": 0.17845681309700012, |
| "learning_rate": 1.9791332263242377e-06, |
| "loss": 0.0007, |
| "step": 2230 |
| }, |
| { |
| "epoch": 7.062680531484691, |
| "grad_norm": 0.620922327041626, |
| "learning_rate": 1.9630818619582666e-06, |
| "loss": 0.0013, |
| "step": 2240 |
| }, |
| { |
| "epoch": 7.065569035239746, |
| "grad_norm": 0.22967398166656494, |
| "learning_rate": 1.9470304975922956e-06, |
| "loss": 0.0008, |
| "step": 2250 |
| }, |
| { |
| "epoch": 7.0684575389948, |
| "grad_norm": 0.4523940682411194, |
| "learning_rate": 1.9309791332263245e-06, |
| "loss": 0.0008, |
| "step": 2260 |
| }, |
| { |
| "epoch": 7.071346042749855, |
| "grad_norm": 70.14838409423828, |
| "learning_rate": 1.9149277688603534e-06, |
| "loss": 0.0123, |
| "step": 2270 |
| }, |
| { |
| "epoch": 7.07423454650491, |
| "grad_norm": 0.06539999693632126, |
| "learning_rate": 1.8988764044943821e-06, |
| "loss": 0.0014, |
| "step": 2280 |
| }, |
| { |
| "epoch": 7.077123050259965, |
| "grad_norm": 0.6128162145614624, |
| "learning_rate": 1.882825040128411e-06, |
| "loss": 0.0924, |
| "step": 2290 |
| }, |
| { |
| "epoch": 7.08001155401502, |
| "grad_norm": 0.013330874964594841, |
| "learning_rate": 1.86677367576244e-06, |
| "loss": 0.0005, |
| "step": 2300 |
| }, |
| { |
| "epoch": 7.082900057770075, |
| "grad_norm": 0.8353597521781921, |
| "learning_rate": 1.8507223113964689e-06, |
| "loss": 0.001, |
| "step": 2310 |
| }, |
| { |
| "epoch": 7.083477758521086, |
| "eval_accuracy": 0.8098159509202454, |
| "eval_loss": 1.0094096660614014, |
| "eval_runtime": 67.7303, |
| "eval_samples_per_second": 7.22, |
| "eval_steps_per_second": 0.915, |
| "step": 2312 |
| }, |
| { |
| "epoch": 8.002310803004043, |
| "grad_norm": 3.3091068267822266, |
| "learning_rate": 1.8346709470304978e-06, |
| "loss": 0.0028, |
| "step": 2320 |
| }, |
| { |
| "epoch": 8.0051993067591, |
| "grad_norm": 0.01653185486793518, |
| "learning_rate": 1.8186195826645267e-06, |
| "loss": 0.0006, |
| "step": 2330 |
| }, |
| { |
| "epoch": 8.008087810514153, |
| "grad_norm": 0.07168268412351608, |
| "learning_rate": 1.8025682182985554e-06, |
| "loss": 0.0015, |
| "step": 2340 |
| }, |
| { |
| "epoch": 8.01097631426921, |
| "grad_norm": 8.671859741210938, |
| "learning_rate": 1.7865168539325846e-06, |
| "loss": 0.0018, |
| "step": 2350 |
| }, |
| { |
| "epoch": 8.013864818024263, |
| "grad_norm": 0.32845407724380493, |
| "learning_rate": 1.7704654895666133e-06, |
| "loss": 0.0011, |
| "step": 2360 |
| }, |
| { |
| "epoch": 8.016753321779317, |
| "grad_norm": 0.029094768688082695, |
| "learning_rate": 1.7544141252006422e-06, |
| "loss": 0.0016, |
| "step": 2370 |
| }, |
| { |
| "epoch": 8.019641825534373, |
| "grad_norm": 0.28631359338760376, |
| "learning_rate": 1.738362760834671e-06, |
| "loss": 0.0007, |
| "step": 2380 |
| }, |
| { |
| "epoch": 8.022530329289427, |
| "grad_norm": 0.07883863896131516, |
| "learning_rate": 1.7223113964687e-06, |
| "loss": 0.0008, |
| "step": 2390 |
| }, |
| { |
| "epoch": 8.025418833044483, |
| "grad_norm": 0.1313171684741974, |
| "learning_rate": 1.706260032102729e-06, |
| "loss": 0.0005, |
| "step": 2400 |
| }, |
| { |
| "epoch": 8.028307336799537, |
| "grad_norm": 0.00614338181912899, |
| "learning_rate": 1.6902086677367576e-06, |
| "loss": 0.0029, |
| "step": 2410 |
| }, |
| { |
| "epoch": 8.031195840554593, |
| "grad_norm": 0.008385803550481796, |
| "learning_rate": 1.6741573033707868e-06, |
| "loss": 0.0011, |
| "step": 2420 |
| }, |
| { |
| "epoch": 8.034084344309647, |
| "grad_norm": 0.07132828235626221, |
| "learning_rate": 1.6581059390048155e-06, |
| "loss": 0.0004, |
| "step": 2430 |
| }, |
| { |
| "epoch": 8.036972848064703, |
| "grad_norm": 0.018882159143686295, |
| "learning_rate": 1.6420545746388444e-06, |
| "loss": 0.0003, |
| "step": 2440 |
| }, |
| { |
| "epoch": 8.039861351819757, |
| "grad_norm": 0.9956786036491394, |
| "learning_rate": 1.6260032102728735e-06, |
| "loss": 0.0004, |
| "step": 2450 |
| }, |
| { |
| "epoch": 8.042749855574812, |
| "grad_norm": 0.03203749656677246, |
| "learning_rate": 1.6099518459069023e-06, |
| "loss": 0.0004, |
| "step": 2460 |
| }, |
| { |
| "epoch": 8.045638359329867, |
| "grad_norm": 0.016354788094758987, |
| "learning_rate": 1.5939004815409312e-06, |
| "loss": 0.0009, |
| "step": 2470 |
| }, |
| { |
| "epoch": 8.048526863084922, |
| "grad_norm": 0.06263572722673416, |
| "learning_rate": 1.5778491171749599e-06, |
| "loss": 0.0004, |
| "step": 2480 |
| }, |
| { |
| "epoch": 8.051415366839977, |
| "grad_norm": 0.39288368821144104, |
| "learning_rate": 1.561797752808989e-06, |
| "loss": 0.0009, |
| "step": 2490 |
| }, |
| { |
| "epoch": 8.054303870595032, |
| "grad_norm": 0.02906760200858116, |
| "learning_rate": 1.5457463884430177e-06, |
| "loss": 0.0006, |
| "step": 2500 |
| }, |
| { |
| "epoch": 8.057192374350088, |
| "grad_norm": 0.012857715599238873, |
| "learning_rate": 1.5296950240770466e-06, |
| "loss": 0.0005, |
| "step": 2510 |
| }, |
| { |
| "epoch": 8.060080878105142, |
| "grad_norm": 0.01696927100419998, |
| "learning_rate": 1.5136436597110758e-06, |
| "loss": 0.0002, |
| "step": 2520 |
| }, |
| { |
| "epoch": 8.062969381860196, |
| "grad_norm": 0.011458744294941425, |
| "learning_rate": 1.4975922953451045e-06, |
| "loss": 0.0006, |
| "step": 2530 |
| }, |
| { |
| "epoch": 8.065857885615252, |
| "grad_norm": 0.10132727771997452, |
| "learning_rate": 1.4815409309791334e-06, |
| "loss": 0.0007, |
| "step": 2540 |
| }, |
| { |
| "epoch": 8.068746389370306, |
| "grad_norm": 0.029605470597743988, |
| "learning_rate": 1.4654895666131621e-06, |
| "loss": 0.0105, |
| "step": 2550 |
| }, |
| { |
| "epoch": 8.071634893125362, |
| "grad_norm": 0.03353351354598999, |
| "learning_rate": 1.4494382022471912e-06, |
| "loss": 0.0005, |
| "step": 2560 |
| }, |
| { |
| "epoch": 8.074523396880416, |
| "grad_norm": 0.01684616506099701, |
| "learning_rate": 1.43338683788122e-06, |
| "loss": 0.0011, |
| "step": 2570 |
| }, |
| { |
| "epoch": 8.077411900635472, |
| "grad_norm": 0.46243542432785034, |
| "learning_rate": 1.4173354735152489e-06, |
| "loss": 0.0007, |
| "step": 2580 |
| }, |
| { |
| "epoch": 8.080300404390526, |
| "grad_norm": 0.10783965140581131, |
| "learning_rate": 1.401284109149278e-06, |
| "loss": 0.0021, |
| "step": 2590 |
| }, |
| { |
| "epoch": 8.08318890814558, |
| "grad_norm": 10.902896881103516, |
| "learning_rate": 1.3852327447833067e-06, |
| "loss": 0.0038, |
| "step": 2600 |
| }, |
| { |
| "epoch": 8.083477758521086, |
| "eval_accuracy": 0.820040899795501, |
| "eval_loss": 1.0583363771438599, |
| "eval_runtime": 70.0171, |
| "eval_samples_per_second": 6.984, |
| "eval_steps_per_second": 0.885, |
| "step": 2601 |
| }, |
| { |
| "epoch": 9.00259965337955, |
| "grad_norm": 0.00959661416709423, |
| "learning_rate": 1.3691813804173356e-06, |
| "loss": 0.0003, |
| "step": 2610 |
| }, |
| { |
| "epoch": 9.005488157134604, |
| "grad_norm": 0.0043175676837563515, |
| "learning_rate": 1.3531300160513643e-06, |
| "loss": 0.0003, |
| "step": 2620 |
| }, |
| { |
| "epoch": 9.00837666088966, |
| "grad_norm": 0.010923999361693859, |
| "learning_rate": 1.3370786516853935e-06, |
| "loss": 0.0003, |
| "step": 2630 |
| }, |
| { |
| "epoch": 9.011265164644714, |
| "grad_norm": 0.016267606988549232, |
| "learning_rate": 1.3210272873194222e-06, |
| "loss": 0.002, |
| "step": 2640 |
| }, |
| { |
| "epoch": 9.01415366839977, |
| "grad_norm": 0.011338594369590282, |
| "learning_rate": 1.304975922953451e-06, |
| "loss": 0.0004, |
| "step": 2650 |
| }, |
| { |
| "epoch": 9.017042172154824, |
| "grad_norm": 2.419212579727173, |
| "learning_rate": 1.2889245585874802e-06, |
| "loss": 0.0007, |
| "step": 2660 |
| }, |
| { |
| "epoch": 9.019930675909878, |
| "grad_norm": 0.05167938023805618, |
| "learning_rate": 1.272873194221509e-06, |
| "loss": 0.0004, |
| "step": 2670 |
| }, |
| { |
| "epoch": 9.022819179664934, |
| "grad_norm": 0.010392725467681885, |
| "learning_rate": 1.2568218298555379e-06, |
| "loss": 0.0004, |
| "step": 2680 |
| }, |
| { |
| "epoch": 9.025707683419988, |
| "grad_norm": 0.09611614793539047, |
| "learning_rate": 1.2407704654895668e-06, |
| "loss": 0.0031, |
| "step": 2690 |
| }, |
| { |
| "epoch": 9.028596187175044, |
| "grad_norm": 0.0379074327647686, |
| "learning_rate": 1.2247191011235957e-06, |
| "loss": 0.0003, |
| "step": 2700 |
| }, |
| { |
| "epoch": 9.031484690930098, |
| "grad_norm": 0.2393835335969925, |
| "learning_rate": 1.2086677367576246e-06, |
| "loss": 0.0028, |
| "step": 2710 |
| }, |
| { |
| "epoch": 9.034373194685154, |
| "grad_norm": 0.19386854767799377, |
| "learning_rate": 1.1926163723916533e-06, |
| "loss": 0.0109, |
| "step": 2720 |
| }, |
| { |
| "epoch": 9.037261698440208, |
| "grad_norm": 0.014546582475304604, |
| "learning_rate": 1.1765650080256823e-06, |
| "loss": 0.0004, |
| "step": 2730 |
| }, |
| { |
| "epoch": 9.040150202195262, |
| "grad_norm": 0.018764443695545197, |
| "learning_rate": 1.1605136436597112e-06, |
| "loss": 0.0006, |
| "step": 2740 |
| }, |
| { |
| "epoch": 9.043038705950318, |
| "grad_norm": 0.0548914410173893, |
| "learning_rate": 1.14446227929374e-06, |
| "loss": 0.0051, |
| "step": 2750 |
| }, |
| { |
| "epoch": 9.045927209705372, |
| "grad_norm": 0.774179220199585, |
| "learning_rate": 1.1284109149277688e-06, |
| "loss": 0.0005, |
| "step": 2760 |
| }, |
| { |
| "epoch": 9.048815713460428, |
| "grad_norm": 1.2299340963363647, |
| "learning_rate": 1.112359550561798e-06, |
| "loss": 0.0027, |
| "step": 2770 |
| }, |
| { |
| "epoch": 9.051704217215482, |
| "grad_norm": 2.421196699142456, |
| "learning_rate": 1.0963081861958269e-06, |
| "loss": 0.0006, |
| "step": 2780 |
| }, |
| { |
| "epoch": 9.054592720970538, |
| "grad_norm": 0.036972079426050186, |
| "learning_rate": 1.0802568218298556e-06, |
| "loss": 0.0004, |
| "step": 2790 |
| }, |
| { |
| "epoch": 9.057481224725592, |
| "grad_norm": 1.629666805267334, |
| "learning_rate": 1.0642054574638845e-06, |
| "loss": 0.0026, |
| "step": 2800 |
| }, |
| { |
| "epoch": 9.060369728480646, |
| "grad_norm": 6.719384670257568, |
| "learning_rate": 1.0481540930979134e-06, |
| "loss": 0.0012, |
| "step": 2810 |
| }, |
| { |
| "epoch": 9.063258232235702, |
| "grad_norm": 0.04605178162455559, |
| "learning_rate": 1.0321027287319423e-06, |
| "loss": 0.0003, |
| "step": 2820 |
| }, |
| { |
| "epoch": 9.066146735990756, |
| "grad_norm": 0.9861611723899841, |
| "learning_rate": 1.016051364365971e-06, |
| "loss": 0.0008, |
| "step": 2830 |
| }, |
| { |
| "epoch": 9.069035239745812, |
| "grad_norm": 0.0451810359954834, |
| "learning_rate": 1.0000000000000002e-06, |
| "loss": 0.001, |
| "step": 2840 |
| }, |
| { |
| "epoch": 9.071923743500866, |
| "grad_norm": 0.007434166967868805, |
| "learning_rate": 9.83948635634029e-07, |
| "loss": 0.0003, |
| "step": 2850 |
| }, |
| { |
| "epoch": 9.074812247255922, |
| "grad_norm": 0.00690662395209074, |
| "learning_rate": 9.678972712680578e-07, |
| "loss": 0.0006, |
| "step": 2860 |
| }, |
| { |
| "epoch": 9.077700751010976, |
| "grad_norm": 0.007874921895563602, |
| "learning_rate": 9.518459069020867e-07, |
| "loss": 0.0005, |
| "step": 2870 |
| }, |
| { |
| "epoch": 9.080589254766032, |
| "grad_norm": 0.12472189962863922, |
| "learning_rate": 9.357945425361156e-07, |
| "loss": 0.0015, |
| "step": 2880 |
| }, |
| { |
| "epoch": 9.083477758521086, |
| "grad_norm": 0.38941165804862976, |
| "learning_rate": 9.197431781701445e-07, |
| "loss": 0.0011, |
| "step": 2890 |
| }, |
| { |
| "epoch": 9.083477758521086, |
| "eval_accuracy": 0.8118609406952966, |
| "eval_loss": 1.0632034540176392, |
| "eval_runtime": 74.4675, |
| "eval_samples_per_second": 6.567, |
| "eval_steps_per_second": 0.833, |
| "step": 2890 |
| }, |
| { |
| "epoch": 9.083477758521086, |
| "step": 2890, |
| "total_flos": 2.8784092016797286e+19, |
| "train_loss": 0.13445161678191841, |
| "train_runtime": 7468.4263, |
| "train_samples_per_second": 3.708, |
| "train_steps_per_second": 0.464 |
| }, |
| { |
| "epoch": 9.083477758521086, |
| "eval_accuracy": 0.8172690763052208, |
| "eval_loss": 0.6521589159965515, |
| "eval_runtime": 71.7298, |
| "eval_samples_per_second": 6.943, |
| "eval_steps_per_second": 0.878, |
| "step": 2890 |
| }, |
| { |
| "epoch": 9.083477758521086, |
| "eval_accuracy": 0.8172690763052208, |
| "eval_loss": 0.6521589756011963, |
| "eval_runtime": 69.8312, |
| "eval_samples_per_second": 7.131, |
| "eval_steps_per_second": 0.902, |
| "step": 2890 |
| }, |
| { |
| "epoch": 9.083477758521086, |
| "eval_accuracy": 0.8172690763052208, |
| "eval_loss": 0.6521589159965515, |
| "eval_runtime": 67.9033, |
| "eval_samples_per_second": 7.334, |
| "eval_steps_per_second": 0.928, |
| "step": 2890 |
| }, |
| { |
| "epoch": 9.083477758521086, |
| "eval_accuracy": 0.8172690763052208, |
| "eval_loss": 0.6521589756011963, |
| "eval_runtime": 71.0552, |
| "eval_samples_per_second": 7.009, |
| "eval_steps_per_second": 0.887, |
| "step": 2890 |
| } |
| ], |
| "logging_steps": 10, |
| "max_steps": 3462, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 9223372036854775807, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "EarlyStoppingCallback": { |
| "args": { |
| "early_stopping_patience": 4, |
| "early_stopping_threshold": 0.0 |
| }, |
| "attributes": { |
| "early_stopping_patience_counter": 4 |
| } |
| }, |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 2.8784092016797286e+19, |
| "train_batch_size": 8, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|