{ "best_global_step": 16224, "best_metric": 0.019662603735923767, "best_model_checkpoint": "./models/autocrop-tekst/checkpoint-16224", "epoch": 50.0, "eval_steps": 500, "global_step": 31200, "is_hyper_param_search": false, "is_local_process_zero": true, "is_world_process_zero": true, "log_history": [ { "epoch": 0.016025641025641024, "grad_norm": 2.8978374004364014, "learning_rate": 1.7307692307692308e-07, "loss": 0.6631190299987793, "step": 10 }, { "epoch": 0.03205128205128205, "grad_norm": 2.461387872695923, "learning_rate": 3.653846153846154e-07, "loss": 0.664227819442749, "step": 20 }, { "epoch": 0.04807692307692308, "grad_norm": 2.3286049365997314, "learning_rate": 5.576923076923077e-07, "loss": 0.6693105220794677, "step": 30 }, { "epoch": 0.0641025641025641, "grad_norm": 2.987257957458496, "learning_rate": 7.5e-07, "loss": 0.6588366985321045, "step": 40 }, { "epoch": 0.08012820512820513, "grad_norm": 2.8601245880126953, "learning_rate": 9.423076923076924e-07, "loss": 0.6513937950134278, "step": 50 }, { "epoch": 0.09615384615384616, "grad_norm": 3.325812816619873, "learning_rate": 1.1346153846153845e-06, "loss": 0.6468723773956299, "step": 60 }, { "epoch": 0.11217948717948718, "grad_norm": 3.0886006355285645, "learning_rate": 1.326923076923077e-06, "loss": 0.6379693984985352, "step": 70 }, { "epoch": 0.1282051282051282, "grad_norm": 4.030473232269287, "learning_rate": 1.5192307692307694e-06, "loss": 0.631058645248413, "step": 80 }, { "epoch": 0.14423076923076922, "grad_norm": 3.0825562477111816, "learning_rate": 1.7115384615384615e-06, "loss": 0.6204285621643066, "step": 90 }, { "epoch": 0.16025641025641027, "grad_norm": 2.882533550262451, "learning_rate": 1.9038461538461538e-06, "loss": 0.6076338768005372, "step": 100 }, { "epoch": 0.1762820512820513, "grad_norm": 2.8292901515960693, "learning_rate": 2.096153846153846e-06, "loss": 0.5971291542053223, "step": 110 }, { "epoch": 0.19230769230769232, "grad_norm": 2.8647992610931396, "learning_rate": 2.2884615384615387e-06, "loss": 0.5831421852111817, "step": 120 }, { "epoch": 0.20833333333333334, "grad_norm": 2.1939799785614014, "learning_rate": 2.480769230769231e-06, "loss": 0.5634413719177246, "step": 130 }, { "epoch": 0.22435897435897437, "grad_norm": 2.08270001411438, "learning_rate": 2.6730769230769234e-06, "loss": 0.5565694808959961, "step": 140 }, { "epoch": 0.2403846153846154, "grad_norm": 2.2558093070983887, "learning_rate": 2.865384615384615e-06, "loss": 0.5334548950195312, "step": 150 }, { "epoch": 0.2564102564102564, "grad_norm": 1.9057304859161377, "learning_rate": 3.0576923076923077e-06, "loss": 0.5233359813690186, "step": 160 }, { "epoch": 0.2724358974358974, "grad_norm": 2.0299901962280273, "learning_rate": 3.2500000000000002e-06, "loss": 0.5021195411682129, "step": 170 }, { "epoch": 0.28846153846153844, "grad_norm": 1.978830099105835, "learning_rate": 3.4423076923076924e-06, "loss": 0.4856695652008057, "step": 180 }, { "epoch": 0.30448717948717946, "grad_norm": 1.660170555114746, "learning_rate": 3.634615384615385e-06, "loss": 0.46488447189331056, "step": 190 }, { "epoch": 0.32051282051282054, "grad_norm": 1.8923237323760986, "learning_rate": 3.826923076923077e-06, "loss": 0.4534611701965332, "step": 200 }, { "epoch": 0.33653846153846156, "grad_norm": 1.5200432538986206, "learning_rate": 4.01923076923077e-06, "loss": 0.42194423675537107, "step": 210 }, { "epoch": 0.3525641025641026, "grad_norm": 1.738019347190857, "learning_rate": 4.211538461538461e-06, "loss": 0.4001463413238525, "step": 220 }, { "epoch": 0.3685897435897436, "grad_norm": 1.4980989694595337, "learning_rate": 4.403846153846154e-06, "loss": 0.4010288715362549, "step": 230 }, { "epoch": 0.38461538461538464, "grad_norm": 1.6915544271469116, "learning_rate": 4.596153846153846e-06, "loss": 0.38144919872283933, "step": 240 }, { "epoch": 0.40064102564102566, "grad_norm": 1.5964322090148926, "learning_rate": 4.788461538461538e-06, "loss": 0.36180050373077394, "step": 250 }, { "epoch": 0.4166666666666667, "grad_norm": 1.5442605018615723, "learning_rate": 4.980769230769231e-06, "loss": 0.34835476875305177, "step": 260 }, { "epoch": 0.4326923076923077, "grad_norm": 1.6271041631698608, "learning_rate": 5.173076923076923e-06, "loss": 0.33919360637664797, "step": 270 }, { "epoch": 0.44871794871794873, "grad_norm": 1.8765562772750854, "learning_rate": 5.365384615384616e-06, "loss": 0.3171669006347656, "step": 280 }, { "epoch": 0.46474358974358976, "grad_norm": 1.3951225280761719, "learning_rate": 5.557692307692308e-06, "loss": 0.29842212200164797, "step": 290 }, { "epoch": 0.4807692307692308, "grad_norm": 1.870288610458374, "learning_rate": 5.750000000000001e-06, "loss": 0.29542884826660154, "step": 300 }, { "epoch": 0.4967948717948718, "grad_norm": 3.037306308746338, "learning_rate": 5.9423076923076926e-06, "loss": 0.29122865200042725, "step": 310 }, { "epoch": 0.5128205128205128, "grad_norm": 1.6633657217025757, "learning_rate": 6.134615384615384e-06, "loss": 0.25931763648986816, "step": 320 }, { "epoch": 0.5288461538461539, "grad_norm": 2.102506160736084, "learning_rate": 6.326923076923077e-06, "loss": 0.27236285209655764, "step": 330 }, { "epoch": 0.5448717948717948, "grad_norm": 2.3356759548187256, "learning_rate": 6.519230769230769e-06, "loss": 0.24678642749786378, "step": 340 }, { "epoch": 0.5608974358974359, "grad_norm": 2.030473232269287, "learning_rate": 6.711538461538462e-06, "loss": 0.2475229263305664, "step": 350 }, { "epoch": 0.5769230769230769, "grad_norm": 1.3857992887496948, "learning_rate": 6.903846153846154e-06, "loss": 0.23202922344207763, "step": 360 }, { "epoch": 0.592948717948718, "grad_norm": 1.5786619186401367, "learning_rate": 7.096153846153846e-06, "loss": 0.21291444301605225, "step": 370 }, { "epoch": 0.6089743589743589, "grad_norm": 1.9310903549194336, "learning_rate": 7.288461538461539e-06, "loss": 0.21734194755554198, "step": 380 }, { "epoch": 0.625, "grad_norm": 2.047337055206299, "learning_rate": 7.480769230769231e-06, "loss": 0.2049659013748169, "step": 390 }, { "epoch": 0.6410256410256411, "grad_norm": 1.4146894216537476, "learning_rate": 7.673076923076923e-06, "loss": 0.2001959800720215, "step": 400 }, { "epoch": 0.657051282051282, "grad_norm": 1.571545124053955, "learning_rate": 7.865384615384616e-06, "loss": 0.20457913875579833, "step": 410 }, { "epoch": 0.6730769230769231, "grad_norm": 1.779143214225769, "learning_rate": 8.057692307692308e-06, "loss": 0.17855006456375122, "step": 420 }, { "epoch": 0.6891025641025641, "grad_norm": 1.659570336341858, "learning_rate": 8.25e-06, "loss": 0.18677111864089965, "step": 430 }, { "epoch": 0.7051282051282052, "grad_norm": 1.814180850982666, "learning_rate": 8.442307692307693e-06, "loss": 0.1817182183265686, "step": 440 }, { "epoch": 0.7211538461538461, "grad_norm": 1.700537919998169, "learning_rate": 8.634615384615384e-06, "loss": 0.16286907196044922, "step": 450 }, { "epoch": 0.7371794871794872, "grad_norm": 1.8104839324951172, "learning_rate": 8.826923076923077e-06, "loss": 0.16935815811157226, "step": 460 }, { "epoch": 0.7532051282051282, "grad_norm": 1.0565820932388306, "learning_rate": 9.019230769230769e-06, "loss": 0.15165486335754394, "step": 470 }, { "epoch": 0.7692307692307693, "grad_norm": 1.0826140642166138, "learning_rate": 9.211538461538462e-06, "loss": 0.14564144611358643, "step": 480 }, { "epoch": 0.7852564102564102, "grad_norm": 1.972421407699585, "learning_rate": 9.403846153846154e-06, "loss": 0.1535298466682434, "step": 490 }, { "epoch": 0.8012820512820513, "grad_norm": 1.3333393335342407, "learning_rate": 9.596153846153845e-06, "loss": 0.1533737897872925, "step": 500 }, { "epoch": 0.8173076923076923, "grad_norm": 1.3685951232910156, "learning_rate": 9.78846153846154e-06, "loss": 0.15220363140106202, "step": 510 }, { "epoch": 0.8333333333333334, "grad_norm": 1.045799732208252, "learning_rate": 9.98076923076923e-06, "loss": 0.14262043237686156, "step": 520 }, { "epoch": 0.8493589743589743, "grad_norm": 2.0969302654266357, "learning_rate": 1.0173076923076924e-05, "loss": 0.1315517783164978, "step": 530 }, { "epoch": 0.8653846153846154, "grad_norm": 2.7112653255462646, "learning_rate": 1.0365384615384615e-05, "loss": 0.12437996864318848, "step": 540 }, { "epoch": 0.8814102564102564, "grad_norm": 2.098452568054199, "learning_rate": 1.055769230769231e-05, "loss": 0.13736732006073, "step": 550 }, { "epoch": 0.8974358974358975, "grad_norm": 1.0644536018371582, "learning_rate": 1.075e-05, "loss": 0.11754384040832519, "step": 560 }, { "epoch": 0.9134615384615384, "grad_norm": 1.5819801092147827, "learning_rate": 1.0942307692307691e-05, "loss": 0.12328445911407471, "step": 570 }, { "epoch": 0.9294871794871795, "grad_norm": 1.3748680353164673, "learning_rate": 1.1134615384615386e-05, "loss": 0.1165321946144104, "step": 580 }, { "epoch": 0.9455128205128205, "grad_norm": 0.7080390453338623, "learning_rate": 1.1326923076923076e-05, "loss": 0.11513717174530029, "step": 590 }, { "epoch": 0.9615384615384616, "grad_norm": 2.556220054626465, "learning_rate": 1.151923076923077e-05, "loss": 0.11560723781585694, "step": 600 }, { "epoch": 0.9775641025641025, "grad_norm": 3.159904956817627, "learning_rate": 1.1711538461538461e-05, "loss": 0.1105891227722168, "step": 610 }, { "epoch": 0.9935897435897436, "grad_norm": 1.6174134016036987, "learning_rate": 1.1903846153846154e-05, "loss": 0.11173322200775146, "step": 620 }, { "epoch": 1.0, "eval_accuracy_background": NaN, "eval_accuracy_crop": 0.9809820602372269, "eval_iou_background": 0.0, "eval_iou_crop": 0.9809820602372269, "eval_loss": 0.08471288532018661, "eval_mean_accuracy": 0.9809820602372269, "eval_mean_iou": 0.49049103011861345, "eval_overall_accuracy": 0.9809820602372269, "eval_runtime": 39.6883, "eval_samples_per_second": 22.198, "eval_steps_per_second": 2.797, "step": 624 }, { "epoch": 1.0096153846153846, "grad_norm": 1.0499769449234009, "learning_rate": 1.2096153846153847e-05, "loss": 0.10642787218093872, "step": 630 }, { "epoch": 1.0256410256410255, "grad_norm": 0.8296972513198853, "learning_rate": 1.2288461538461539e-05, "loss": 0.11537472009658814, "step": 640 }, { "epoch": 1.0416666666666667, "grad_norm": 1.169792890548706, "learning_rate": 1.2480769230769232e-05, "loss": 0.10270591974258422, "step": 650 }, { "epoch": 1.0576923076923077, "grad_norm": 1.745046615600586, "learning_rate": 1.2673076923076923e-05, "loss": 0.10901927947998047, "step": 660 }, { "epoch": 1.0737179487179487, "grad_norm": 2.268970251083374, "learning_rate": 1.2865384615384615e-05, "loss": 0.09987906217575074, "step": 670 }, { "epoch": 1.0897435897435896, "grad_norm": 0.710591733455658, "learning_rate": 1.3057692307692308e-05, "loss": 0.10316517353057861, "step": 680 }, { "epoch": 1.1057692307692308, "grad_norm": 0.6230706572532654, "learning_rate": 1.325e-05, "loss": 0.09216501712799072, "step": 690 }, { "epoch": 1.1217948717948718, "grad_norm": 0.9660974740982056, "learning_rate": 1.3442307692307693e-05, "loss": 0.10565031766891479, "step": 700 }, { "epoch": 1.1378205128205128, "grad_norm": 1.8511900901794434, "learning_rate": 1.3634615384615385e-05, "loss": 0.11355563402175903, "step": 710 }, { "epoch": 1.1538461538461537, "grad_norm": 2.2117269039154053, "learning_rate": 1.3826923076923076e-05, "loss": 0.10050290822982788, "step": 720 }, { "epoch": 1.169871794871795, "grad_norm": 1.5981507301330566, "learning_rate": 1.401923076923077e-05, "loss": 0.09200314879417419, "step": 730 }, { "epoch": 1.185897435897436, "grad_norm": 0.723307728767395, "learning_rate": 1.4211538461538461e-05, "loss": 0.09783825278282166, "step": 740 }, { "epoch": 1.2019230769230769, "grad_norm": 1.2051719427108765, "learning_rate": 1.4403846153846156e-05, "loss": 0.09543119668960572, "step": 750 }, { "epoch": 1.217948717948718, "grad_norm": 1.069825530052185, "learning_rate": 1.4596153846153846e-05, "loss": 0.08868036270141602, "step": 760 }, { "epoch": 1.233974358974359, "grad_norm": 1.354432225227356, "learning_rate": 1.4788461538461539e-05, "loss": 0.09343335628509522, "step": 770 }, { "epoch": 1.25, "grad_norm": 1.478181004524231, "learning_rate": 1.4980769230769231e-05, "loss": 0.09314903020858764, "step": 780 }, { "epoch": 1.266025641025641, "grad_norm": 0.9095227122306824, "learning_rate": 1.5173076923076922e-05, "loss": 0.09059506058692932, "step": 790 }, { "epoch": 1.282051282051282, "grad_norm": 0.8066712617874146, "learning_rate": 1.5365384615384615e-05, "loss": 0.087891685962677, "step": 800 }, { "epoch": 1.2980769230769231, "grad_norm": 1.9738563299179077, "learning_rate": 1.555769230769231e-05, "loss": 0.09054545760154724, "step": 810 }, { "epoch": 1.314102564102564, "grad_norm": 0.6593788266181946, "learning_rate": 1.575e-05, "loss": 0.08902927637100219, "step": 820 }, { "epoch": 1.330128205128205, "grad_norm": 1.341935634613037, "learning_rate": 1.5942307692307693e-05, "loss": 0.08920291662216187, "step": 830 }, { "epoch": 1.3461538461538463, "grad_norm": 0.5621249079704285, "learning_rate": 1.6134615384615385e-05, "loss": 0.08985798954963684, "step": 840 }, { "epoch": 1.3621794871794872, "grad_norm": 4.16618537902832, "learning_rate": 1.6326923076923078e-05, "loss": 0.08564502000808716, "step": 850 }, { "epoch": 1.3782051282051282, "grad_norm": 0.4976300895214081, "learning_rate": 1.651923076923077e-05, "loss": 0.07843945026397706, "step": 860 }, { "epoch": 1.3942307692307692, "grad_norm": 1.3510425090789795, "learning_rate": 1.6711538461538463e-05, "loss": 0.08893550038337708, "step": 870 }, { "epoch": 1.4102564102564101, "grad_norm": 1.2808810472488403, "learning_rate": 1.6903846153846155e-05, "loss": 0.0793556809425354, "step": 880 }, { "epoch": 1.4262820512820513, "grad_norm": 0.952301561832428, "learning_rate": 1.7096153846153844e-05, "loss": 0.07932060360908508, "step": 890 }, { "epoch": 1.4423076923076923, "grad_norm": 0.9076259732246399, "learning_rate": 1.7288461538461537e-05, "loss": 0.07979171872138976, "step": 900 }, { "epoch": 1.4583333333333333, "grad_norm": 1.6222256422042847, "learning_rate": 1.7480769230769233e-05, "loss": 0.0878231704235077, "step": 910 }, { "epoch": 1.4743589743589745, "grad_norm": 0.5938989520072937, "learning_rate": 1.7673076923076925e-05, "loss": 0.0748097062110901, "step": 920 }, { "epoch": 1.4903846153846154, "grad_norm": 1.5903033018112183, "learning_rate": 1.7865384615384615e-05, "loss": 0.0928604006767273, "step": 930 }, { "epoch": 1.5064102564102564, "grad_norm": 1.0538687705993652, "learning_rate": 1.8057692307692307e-05, "loss": 0.07924981117248535, "step": 940 }, { "epoch": 1.5224358974358974, "grad_norm": 1.5909301042556763, "learning_rate": 1.825e-05, "loss": 0.07935247421264649, "step": 950 }, { "epoch": 1.5384615384615383, "grad_norm": 1.4847450256347656, "learning_rate": 1.8442307692307692e-05, "loss": 0.07561319470405578, "step": 960 }, { "epoch": 1.5544871794871795, "grad_norm": 0.9455337524414062, "learning_rate": 1.8634615384615385e-05, "loss": 0.06679589748382568, "step": 970 }, { "epoch": 1.5705128205128205, "grad_norm": 2.9431333541870117, "learning_rate": 1.8826923076923077e-05, "loss": 0.07731801867485047, "step": 980 }, { "epoch": 1.5865384615384617, "grad_norm": 1.2514718770980835, "learning_rate": 1.901923076923077e-05, "loss": 0.0651570737361908, "step": 990 }, { "epoch": 1.6025641025641026, "grad_norm": 1.2942439317703247, "learning_rate": 1.921153846153846e-05, "loss": 0.06809894442558288, "step": 1000 }, { "epoch": 1.6185897435897436, "grad_norm": 3.1999220848083496, "learning_rate": 1.9403846153846155e-05, "loss": 0.06920793652534485, "step": 1010 }, { "epoch": 1.6346153846153846, "grad_norm": 1.061483383178711, "learning_rate": 1.9596153846153848e-05, "loss": 0.07084127068519593, "step": 1020 }, { "epoch": 1.6506410256410255, "grad_norm": 1.6536297798156738, "learning_rate": 1.978846153846154e-05, "loss": 0.07267069816589355, "step": 1030 }, { "epoch": 1.6666666666666665, "grad_norm": 0.517036497592926, "learning_rate": 1.998076923076923e-05, "loss": 0.06605628728866578, "step": 1040 }, { "epoch": 1.6826923076923077, "grad_norm": 0.8198621869087219, "learning_rate": 2.0173076923076925e-05, "loss": 0.06464830040931702, "step": 1050 }, { "epoch": 1.6987179487179487, "grad_norm": 0.43641558289527893, "learning_rate": 2.0365384615384618e-05, "loss": 0.06300255060195922, "step": 1060 }, { "epoch": 1.7147435897435899, "grad_norm": 0.8637346625328064, "learning_rate": 2.0557692307692307e-05, "loss": 0.06466352939605713, "step": 1070 }, { "epoch": 1.7307692307692308, "grad_norm": 1.3485493659973145, "learning_rate": 2.075e-05, "loss": 0.06510435938835143, "step": 1080 }, { "epoch": 1.7467948717948718, "grad_norm": 0.5048149824142456, "learning_rate": 2.0942307692307692e-05, "loss": 0.0568791389465332, "step": 1090 }, { "epoch": 1.7628205128205128, "grad_norm": 0.7393937110900879, "learning_rate": 2.1134615384615388e-05, "loss": 0.06128841042518616, "step": 1100 }, { "epoch": 1.7788461538461537, "grad_norm": 0.49092328548431396, "learning_rate": 2.1326923076923077e-05, "loss": 0.06877773404121398, "step": 1110 }, { "epoch": 1.7948717948717947, "grad_norm": 1.5559990406036377, "learning_rate": 2.151923076923077e-05, "loss": 0.05716264247894287, "step": 1120 }, { "epoch": 1.810897435897436, "grad_norm": 0.9030606746673584, "learning_rate": 2.1711538461538462e-05, "loss": 0.059932160377502444, "step": 1130 }, { "epoch": 1.8269230769230769, "grad_norm": 0.9892504811286926, "learning_rate": 2.1903846153846155e-05, "loss": 0.05768003463745117, "step": 1140 }, { "epoch": 1.842948717948718, "grad_norm": 0.5394744873046875, "learning_rate": 2.2096153846153847e-05, "loss": 0.05758763551712036, "step": 1150 }, { "epoch": 1.858974358974359, "grad_norm": 1.6875523328781128, "learning_rate": 2.228846153846154e-05, "loss": 0.05770478844642639, "step": 1160 }, { "epoch": 1.875, "grad_norm": 0.7153938412666321, "learning_rate": 2.2480769230769233e-05, "loss": 0.05816774368286133, "step": 1170 }, { "epoch": 1.891025641025641, "grad_norm": 0.3587498366832733, "learning_rate": 2.267307692307692e-05, "loss": 0.05168953537940979, "step": 1180 }, { "epoch": 1.907051282051282, "grad_norm": 0.6020457148551941, "learning_rate": 2.2865384615384614e-05, "loss": 0.05018225908279419, "step": 1190 }, { "epoch": 1.9230769230769231, "grad_norm": 0.6615403890609741, "learning_rate": 2.305769230769231e-05, "loss": 0.06284399628639221, "step": 1200 }, { "epoch": 1.939102564102564, "grad_norm": 0.9490018486976624, "learning_rate": 2.3250000000000003e-05, "loss": 0.06275268793106079, "step": 1210 }, { "epoch": 1.9551282051282053, "grad_norm": 0.4771631956100464, "learning_rate": 2.3442307692307692e-05, "loss": 0.05917642712593078, "step": 1220 }, { "epoch": 1.9711538461538463, "grad_norm": 0.719717800617218, "learning_rate": 2.3634615384615384e-05, "loss": 0.05038931369781494, "step": 1230 }, { "epoch": 1.9871794871794872, "grad_norm": 0.5429818630218506, "learning_rate": 2.3826923076923077e-05, "loss": 0.053183364868164065, "step": 1240 }, { "epoch": 2.0, "eval_accuracy_background": NaN, "eval_accuracy_crop": 0.9879663422742617, "eval_iou_background": 0.0, "eval_iou_crop": 0.9879663422742617, "eval_loss": 0.04441356658935547, "eval_mean_accuracy": 0.9879663422742617, "eval_mean_iou": 0.49398317113713086, "eval_overall_accuracy": 0.9879663422742617, "eval_runtime": 34.1215, "eval_samples_per_second": 25.82, "eval_steps_per_second": 3.253, "step": 1248 }, { "epoch": 2.003205128205128, "grad_norm": 0.3801051676273346, "learning_rate": 2.401923076923077e-05, "loss": 0.04833372235298157, "step": 1250 }, { "epoch": 2.019230769230769, "grad_norm": 0.999081015586853, "learning_rate": 2.4211538461538462e-05, "loss": 0.052205616235733034, "step": 1260 }, { "epoch": 2.03525641025641, "grad_norm": 0.741291344165802, "learning_rate": 2.4403846153846155e-05, "loss": 0.04968209862709046, "step": 1270 }, { "epoch": 2.051282051282051, "grad_norm": 1.649759292602539, "learning_rate": 2.4596153846153847e-05, "loss": 0.05742647647857666, "step": 1280 }, { "epoch": 2.0673076923076925, "grad_norm": 1.6699559688568115, "learning_rate": 2.4788461538461536e-05, "loss": 0.052060216665267944, "step": 1290 }, { "epoch": 2.0833333333333335, "grad_norm": 0.5854372382164001, "learning_rate": 2.4980769230769232e-05, "loss": 0.0502300500869751, "step": 1300 }, { "epoch": 2.0993589743589745, "grad_norm": 0.7020288705825806, "learning_rate": 2.5173076923076925e-05, "loss": 0.051952189207077025, "step": 1310 }, { "epoch": 2.1153846153846154, "grad_norm": 1.1406855583190918, "learning_rate": 2.5365384615384617e-05, "loss": 0.06145268678665161, "step": 1320 }, { "epoch": 2.1314102564102564, "grad_norm": 2.751540184020996, "learning_rate": 2.5557692307692307e-05, "loss": 0.04650202393531799, "step": 1330 }, { "epoch": 2.1474358974358974, "grad_norm": 1.1292656660079956, "learning_rate": 2.575e-05, "loss": 0.04999934732913971, "step": 1340 }, { "epoch": 2.1634615384615383, "grad_norm": 0.7582893967628479, "learning_rate": 2.5942307692307695e-05, "loss": 0.04774584174156189, "step": 1350 }, { "epoch": 2.1794871794871793, "grad_norm": 0.7890088558197021, "learning_rate": 2.6134615384615384e-05, "loss": 0.04984915852546692, "step": 1360 }, { "epoch": 2.1955128205128207, "grad_norm": 0.48213985562324524, "learning_rate": 2.6326923076923077e-05, "loss": 0.05540837645530701, "step": 1370 }, { "epoch": 2.2115384615384617, "grad_norm": 0.8197171688079834, "learning_rate": 2.651923076923077e-05, "loss": 0.0571667492389679, "step": 1380 }, { "epoch": 2.2275641025641026, "grad_norm": 3.909357786178589, "learning_rate": 2.6711538461538462e-05, "loss": 0.05169597864151001, "step": 1390 }, { "epoch": 2.2435897435897436, "grad_norm": 1.2111477851867676, "learning_rate": 2.6903846153846154e-05, "loss": 0.05419460535049438, "step": 1400 }, { "epoch": 2.2596153846153846, "grad_norm": 1.0997021198272705, "learning_rate": 2.7096153846153847e-05, "loss": 0.04321539998054504, "step": 1410 }, { "epoch": 2.2756410256410255, "grad_norm": 0.5814623236656189, "learning_rate": 2.728846153846154e-05, "loss": 0.04394578337669373, "step": 1420 }, { "epoch": 2.2916666666666665, "grad_norm": 0.7090608477592468, "learning_rate": 2.7480769230769232e-05, "loss": 0.04326327443122864, "step": 1430 }, { "epoch": 2.3076923076923075, "grad_norm": 0.9651350975036621, "learning_rate": 2.7673076923076925e-05, "loss": 0.05761231780052185, "step": 1440 }, { "epoch": 2.323717948717949, "grad_norm": 0.7218719124794006, "learning_rate": 2.7865384615384617e-05, "loss": 0.04768041372299194, "step": 1450 }, { "epoch": 2.33974358974359, "grad_norm": 0.5020609498023987, "learning_rate": 2.805769230769231e-05, "loss": 0.047953438758850095, "step": 1460 }, { "epoch": 2.355769230769231, "grad_norm": 0.6746088862419128, "learning_rate": 2.825e-05, "loss": 0.04293334782123566, "step": 1470 }, { "epoch": 2.371794871794872, "grad_norm": 1.3429710865020752, "learning_rate": 2.844230769230769e-05, "loss": 0.052300912141799924, "step": 1480 }, { "epoch": 2.3878205128205128, "grad_norm": 0.4659610390663147, "learning_rate": 2.8634615384615387e-05, "loss": 0.04146589040756225, "step": 1490 }, { "epoch": 2.4038461538461537, "grad_norm": 0.8068901300430298, "learning_rate": 2.882692307692308e-05, "loss": 0.047113779187202456, "step": 1500 }, { "epoch": 2.4198717948717947, "grad_norm": 1.8203867673873901, "learning_rate": 2.901923076923077e-05, "loss": 0.0420144259929657, "step": 1510 }, { "epoch": 2.435897435897436, "grad_norm": 0.682911217212677, "learning_rate": 2.921153846153846e-05, "loss": 0.04502320885658264, "step": 1520 }, { "epoch": 2.451923076923077, "grad_norm": 0.8348860740661621, "learning_rate": 2.9403846153846154e-05, "loss": 0.04804910719394684, "step": 1530 }, { "epoch": 2.467948717948718, "grad_norm": 1.1765103340148926, "learning_rate": 2.9596153846153847e-05, "loss": 0.049623751640319826, "step": 1540 }, { "epoch": 2.483974358974359, "grad_norm": 0.8034332394599915, "learning_rate": 2.978846153846154e-05, "loss": 0.042610961198806765, "step": 1550 }, { "epoch": 2.5, "grad_norm": 0.7089917063713074, "learning_rate": 2.9980769230769232e-05, "loss": 0.044666612148284913, "step": 1560 }, { "epoch": 2.516025641025641, "grad_norm": 2.6224801540374756, "learning_rate": 3.0173076923076924e-05, "loss": 0.04345373809337616, "step": 1570 }, { "epoch": 2.532051282051282, "grad_norm": 0.30182814598083496, "learning_rate": 3.0365384615384614e-05, "loss": 0.04136187434196472, "step": 1580 }, { "epoch": 2.5480769230769234, "grad_norm": 0.9174556136131287, "learning_rate": 3.0557692307692306e-05, "loss": 0.04562776982784271, "step": 1590 }, { "epoch": 2.564102564102564, "grad_norm": 0.6227515339851379, "learning_rate": 3.0749999999999995e-05, "loss": 0.04386436939239502, "step": 1600 }, { "epoch": 2.5801282051282053, "grad_norm": 0.8149673342704773, "learning_rate": 3.09423076923077e-05, "loss": 0.04567871391773224, "step": 1610 }, { "epoch": 2.5961538461538463, "grad_norm": 0.7843014001846313, "learning_rate": 3.113461538461539e-05, "loss": 0.04344689249992371, "step": 1620 }, { "epoch": 2.6121794871794872, "grad_norm": 1.0364046096801758, "learning_rate": 3.1326923076923076e-05, "loss": 0.046479719877243045, "step": 1630 }, { "epoch": 2.628205128205128, "grad_norm": 1.0321872234344482, "learning_rate": 3.151923076923077e-05, "loss": 0.039962741732597354, "step": 1640 }, { "epoch": 2.644230769230769, "grad_norm": 0.6547836065292358, "learning_rate": 3.171153846153846e-05, "loss": 0.0429239958524704, "step": 1650 }, { "epoch": 2.66025641025641, "grad_norm": 0.8532689213752747, "learning_rate": 3.190384615384616e-05, "loss": 0.04485795497894287, "step": 1660 }, { "epoch": 2.676282051282051, "grad_norm": 0.5231749415397644, "learning_rate": 3.2096153846153847e-05, "loss": 0.04166106581687927, "step": 1670 }, { "epoch": 2.6923076923076925, "grad_norm": 0.6179865002632141, "learning_rate": 3.2288461538461536e-05, "loss": 0.04188653826713562, "step": 1680 }, { "epoch": 2.7083333333333335, "grad_norm": 1.372294306755066, "learning_rate": 3.248076923076923e-05, "loss": 0.04062500596046448, "step": 1690 }, { "epoch": 2.7243589743589745, "grad_norm": 0.7334085702896118, "learning_rate": 3.267307692307692e-05, "loss": 0.0503538191318512, "step": 1700 }, { "epoch": 2.7403846153846154, "grad_norm": 0.4090029299259186, "learning_rate": 3.286538461538462e-05, "loss": 0.04095695614814758, "step": 1710 }, { "epoch": 2.7564102564102564, "grad_norm": 1.0092709064483643, "learning_rate": 3.305769230769231e-05, "loss": 0.03987523913383484, "step": 1720 }, { "epoch": 2.7724358974358974, "grad_norm": 0.7223461866378784, "learning_rate": 3.325e-05, "loss": 0.04037346243858338, "step": 1730 }, { "epoch": 2.7884615384615383, "grad_norm": 0.7112054824829102, "learning_rate": 3.344230769230769e-05, "loss": 0.04351586401462555, "step": 1740 }, { "epoch": 2.8044871794871797, "grad_norm": 1.2751537561416626, "learning_rate": 3.363461538461539e-05, "loss": 0.0415432870388031, "step": 1750 }, { "epoch": 2.8205128205128203, "grad_norm": 0.9211739897727966, "learning_rate": 3.3826923076923076e-05, "loss": 0.03843856155872345, "step": 1760 }, { "epoch": 2.8365384615384617, "grad_norm": 1.2814154624938965, "learning_rate": 3.401923076923077e-05, "loss": 0.04045993089675903, "step": 1770 }, { "epoch": 2.8525641025641026, "grad_norm": 0.610087513923645, "learning_rate": 3.421153846153846e-05, "loss": 0.03903607726097107, "step": 1780 }, { "epoch": 2.8685897435897436, "grad_norm": 0.6725602746009827, "learning_rate": 3.440384615384615e-05, "loss": 0.03775264322757721, "step": 1790 }, { "epoch": 2.8846153846153846, "grad_norm": 0.5684686899185181, "learning_rate": 3.4596153846153846e-05, "loss": 0.042115017771720886, "step": 1800 }, { "epoch": 2.9006410256410255, "grad_norm": 0.5078580975532532, "learning_rate": 3.478846153846154e-05, "loss": 0.03766477108001709, "step": 1810 }, { "epoch": 2.9166666666666665, "grad_norm": 0.633446216583252, "learning_rate": 3.498076923076923e-05, "loss": 0.03915166556835174, "step": 1820 }, { "epoch": 2.9326923076923075, "grad_norm": 0.7618836760520935, "learning_rate": 3.517307692307693e-05, "loss": 0.04485049843788147, "step": 1830 }, { "epoch": 2.948717948717949, "grad_norm": 0.26274845004081726, "learning_rate": 3.5365384615384617e-05, "loss": 0.039657619595527646, "step": 1840 }, { "epoch": 2.96474358974359, "grad_norm": 0.4159216284751892, "learning_rate": 3.5557692307692306e-05, "loss": 0.0393680214881897, "step": 1850 }, { "epoch": 2.980769230769231, "grad_norm": 0.3515976071357727, "learning_rate": 3.575e-05, "loss": 0.037731483578681946, "step": 1860 }, { "epoch": 2.996794871794872, "grad_norm": 0.5800319314002991, "learning_rate": 3.594230769230769e-05, "loss": 0.03818998634815216, "step": 1870 }, { "epoch": 3.0, "eval_accuracy_background": NaN, "eval_accuracy_crop": 0.9924461572242762, "eval_iou_background": 0.0, "eval_iou_crop": 0.9924461572242762, "eval_loss": 0.03301709517836571, "eval_mean_accuracy": 0.9924461572242762, "eval_mean_iou": 0.4962230786121381, "eval_overall_accuracy": 0.9924461572242762, "eval_runtime": 35.9522, "eval_samples_per_second": 24.505, "eval_steps_per_second": 3.087, "step": 1872 }, { "epoch": 3.0128205128205128, "grad_norm": 0.43613722920417786, "learning_rate": 3.613461538461539e-05, "loss": 0.03763360381126404, "step": 1880 }, { "epoch": 3.0288461538461537, "grad_norm": 0.5865011215209961, "learning_rate": 3.6326923076923076e-05, "loss": 0.036468866467475894, "step": 1890 }, { "epoch": 3.0448717948717947, "grad_norm": 0.2768128514289856, "learning_rate": 3.6519230769230765e-05, "loss": 0.03468484282493591, "step": 1900 }, { "epoch": 3.0608974358974357, "grad_norm": 2.719532012939453, "learning_rate": 3.671153846153847e-05, "loss": 0.038494184613227844, "step": 1910 }, { "epoch": 3.076923076923077, "grad_norm": 0.3716694712638855, "learning_rate": 3.690384615384616e-05, "loss": 0.03583415746688843, "step": 1920 }, { "epoch": 3.092948717948718, "grad_norm": 1.104018211364746, "learning_rate": 3.7096153846153846e-05, "loss": 0.038289874792099, "step": 1930 }, { "epoch": 3.108974358974359, "grad_norm": 0.9561704993247986, "learning_rate": 3.728846153846154e-05, "loss": 0.03667833209037781, "step": 1940 }, { "epoch": 3.125, "grad_norm": 2.7098262310028076, "learning_rate": 3.748076923076923e-05, "loss": 0.03901275098323822, "step": 1950 }, { "epoch": 3.141025641025641, "grad_norm": 0.45409610867500305, "learning_rate": 3.767307692307692e-05, "loss": 0.037158113718032834, "step": 1960 }, { "epoch": 3.157051282051282, "grad_norm": 1.101320505142212, "learning_rate": 3.7865384615384616e-05, "loss": 0.0380224883556366, "step": 1970 }, { "epoch": 3.173076923076923, "grad_norm": 0.5158281922340393, "learning_rate": 3.8057692307692305e-05, "loss": 0.03490853309631348, "step": 1980 }, { "epoch": 3.189102564102564, "grad_norm": 0.5684204697608948, "learning_rate": 3.825e-05, "loss": 0.047729453444480895, "step": 1990 }, { "epoch": 3.2051282051282053, "grad_norm": 0.4268355965614319, "learning_rate": 3.84423076923077e-05, "loss": 0.038329523801803586, "step": 2000 }, { "epoch": 3.2211538461538463, "grad_norm": 1.2342427968978882, "learning_rate": 3.8634615384615386e-05, "loss": 0.0396031528711319, "step": 2010 }, { "epoch": 3.2371794871794872, "grad_norm": 0.26372313499450684, "learning_rate": 3.882692307692308e-05, "loss": 0.03752124309539795, "step": 2020 }, { "epoch": 3.253205128205128, "grad_norm": 0.42242977023124695, "learning_rate": 3.901923076923077e-05, "loss": 0.03689063787460327, "step": 2030 }, { "epoch": 3.269230769230769, "grad_norm": 0.6541191935539246, "learning_rate": 3.921153846153846e-05, "loss": 0.04157097339630127, "step": 2040 }, { "epoch": 3.28525641025641, "grad_norm": 0.3303698003292084, "learning_rate": 3.940384615384616e-05, "loss": 0.03653435409069061, "step": 2050 }, { "epoch": 3.301282051282051, "grad_norm": 0.4294886291027069, "learning_rate": 3.9596153846153846e-05, "loss": 0.033668556809425355, "step": 2060 }, { "epoch": 3.3173076923076925, "grad_norm": 0.4154902696609497, "learning_rate": 3.9788461538461535e-05, "loss": 0.038721701502799986, "step": 2070 }, { "epoch": 3.3333333333333335, "grad_norm": 0.3537912666797638, "learning_rate": 3.998076923076923e-05, "loss": 0.036333557963371274, "step": 2080 }, { "epoch": 3.3493589743589745, "grad_norm": 0.30793169140815735, "learning_rate": 4.017307692307692e-05, "loss": 0.0327135294675827, "step": 2090 }, { "epoch": 3.3653846153846154, "grad_norm": 0.6450328230857849, "learning_rate": 4.036538461538462e-05, "loss": 0.03750131130218506, "step": 2100 }, { "epoch": 3.3814102564102564, "grad_norm": 0.32865336537361145, "learning_rate": 4.055769230769231e-05, "loss": 0.03355443179607391, "step": 2110 }, { "epoch": 3.3974358974358974, "grad_norm": 0.33461058139801025, "learning_rate": 4.075e-05, "loss": 0.036764442920684814, "step": 2120 }, { "epoch": 3.4134615384615383, "grad_norm": 1.1683859825134277, "learning_rate": 4.09423076923077e-05, "loss": 0.036345633864402774, "step": 2130 }, { "epoch": 3.4294871794871793, "grad_norm": 0.6285243034362793, "learning_rate": 4.1134615384615386e-05, "loss": 0.03398682475090027, "step": 2140 }, { "epoch": 3.4455128205128207, "grad_norm": 0.4027184844017029, "learning_rate": 4.1326923076923075e-05, "loss": 0.032721468806266786, "step": 2150 }, { "epoch": 3.4615384615384617, "grad_norm": 0.44112998247146606, "learning_rate": 4.151923076923077e-05, "loss": 0.03799274861812592, "step": 2160 }, { "epoch": 3.4775641025641026, "grad_norm": 0.397806316614151, "learning_rate": 4.171153846153846e-05, "loss": 0.03467671573162079, "step": 2170 }, { "epoch": 3.4935897435897436, "grad_norm": 0.6073699593544006, "learning_rate": 4.190384615384615e-05, "loss": 0.03473643660545349, "step": 2180 }, { "epoch": 3.5096153846153846, "grad_norm": 0.797701895236969, "learning_rate": 4.2096153846153846e-05, "loss": 0.0362335205078125, "step": 2190 }, { "epoch": 3.5256410256410255, "grad_norm": 0.6728187203407288, "learning_rate": 4.228846153846154e-05, "loss": 0.03503105640411377, "step": 2200 }, { "epoch": 3.5416666666666665, "grad_norm": 9.786542892456055, "learning_rate": 4.248076923076923e-05, "loss": 0.0479371964931488, "step": 2210 }, { "epoch": 3.5576923076923075, "grad_norm": 1.458256483078003, "learning_rate": 4.267307692307693e-05, "loss": 0.04110998511314392, "step": 2220 }, { "epoch": 3.573717948717949, "grad_norm": 0.4202897846698761, "learning_rate": 4.2865384615384616e-05, "loss": 0.035566508769989014, "step": 2230 }, { "epoch": 3.58974358974359, "grad_norm": 0.5583341717720032, "learning_rate": 4.305769230769231e-05, "loss": 0.06406933069229126, "step": 2240 }, { "epoch": 3.605769230769231, "grad_norm": 0.4719959795475006, "learning_rate": 4.325e-05, "loss": 0.03586368560791016, "step": 2250 }, { "epoch": 3.621794871794872, "grad_norm": 1.0195426940917969, "learning_rate": 4.344230769230769e-05, "loss": 0.03238672912120819, "step": 2260 }, { "epoch": 3.6378205128205128, "grad_norm": 0.6202515363693237, "learning_rate": 4.3634615384615386e-05, "loss": 0.036961537599563596, "step": 2270 }, { "epoch": 3.6538461538461537, "grad_norm": 0.83589106798172, "learning_rate": 4.3826923076923075e-05, "loss": 0.03749557733535767, "step": 2280 }, { "epoch": 3.6698717948717947, "grad_norm": 0.6019588112831116, "learning_rate": 4.4019230769230764e-05, "loss": 0.03304694294929504, "step": 2290 }, { "epoch": 3.685897435897436, "grad_norm": 1.3841171264648438, "learning_rate": 4.421153846153847e-05, "loss": 0.034581425786018374, "step": 2300 }, { "epoch": 3.7019230769230766, "grad_norm": 0.3997619152069092, "learning_rate": 4.4403846153846156e-05, "loss": 0.030369657278060912, "step": 2310 }, { "epoch": 3.717948717948718, "grad_norm": 0.3999145030975342, "learning_rate": 4.4596153846153845e-05, "loss": 0.03592500686645508, "step": 2320 }, { "epoch": 3.733974358974359, "grad_norm": 0.48669877648353577, "learning_rate": 4.478846153846154e-05, "loss": 0.03342157006263733, "step": 2330 }, { "epoch": 3.75, "grad_norm": 0.533778727054596, "learning_rate": 4.498076923076923e-05, "loss": 0.031413665413856505, "step": 2340 }, { "epoch": 3.766025641025641, "grad_norm": 1.3593326807022095, "learning_rate": 4.5173076923076926e-05, "loss": 0.03351903259754181, "step": 2350 }, { "epoch": 3.782051282051282, "grad_norm": 0.258972704410553, "learning_rate": 4.5365384615384616e-05, "loss": 0.0356216311454773, "step": 2360 }, { "epoch": 3.7980769230769234, "grad_norm": 0.27762991189956665, "learning_rate": 4.5557692307692305e-05, "loss": 0.03472884297370911, "step": 2370 }, { "epoch": 3.814102564102564, "grad_norm": 0.4767250418663025, "learning_rate": 4.575e-05, "loss": 0.035300204157829286, "step": 2380 }, { "epoch": 3.8301282051282053, "grad_norm": 0.7791353464126587, "learning_rate": 4.59423076923077e-05, "loss": 0.03240425288677216, "step": 2390 }, { "epoch": 3.8461538461538463, "grad_norm": 0.3735044300556183, "learning_rate": 4.6134615384615386e-05, "loss": 0.033401432633399966, "step": 2400 }, { "epoch": 3.8621794871794872, "grad_norm": 0.8107817769050598, "learning_rate": 4.632692307692308e-05, "loss": 0.048764553666114804, "step": 2410 }, { "epoch": 3.878205128205128, "grad_norm": 0.2958928644657135, "learning_rate": 4.651923076923077e-05, "loss": 0.03636920154094696, "step": 2420 }, { "epoch": 3.894230769230769, "grad_norm": 0.64930659532547, "learning_rate": 4.671153846153846e-05, "loss": 0.03425193727016449, "step": 2430 }, { "epoch": 3.91025641025641, "grad_norm": 0.3162537217140198, "learning_rate": 4.6903846153846156e-05, "loss": 0.034277725219726565, "step": 2440 }, { "epoch": 3.926282051282051, "grad_norm": 0.8449147343635559, "learning_rate": 4.7096153846153845e-05, "loss": 0.03559814095497131, "step": 2450 }, { "epoch": 3.9423076923076925, "grad_norm": 0.8141484260559082, "learning_rate": 4.728846153846154e-05, "loss": 0.03653644323348999, "step": 2460 }, { "epoch": 3.9583333333333335, "grad_norm": 0.48328983783721924, "learning_rate": 4.748076923076923e-05, "loss": 0.031796663999557495, "step": 2470 }, { "epoch": 3.9743589743589745, "grad_norm": 0.5168148279190063, "learning_rate": 4.767307692307692e-05, "loss": 0.03125152885913849, "step": 2480 }, { "epoch": 3.9903846153846154, "grad_norm": 0.32664403319358826, "learning_rate": 4.786538461538462e-05, "loss": 0.030197840929031373, "step": 2490 }, { "epoch": 4.0, "eval_accuracy_background": NaN, "eval_accuracy_crop": 0.9908097927900117, "eval_iou_background": 0.0, "eval_iou_crop": 0.9908097927900117, "eval_loss": 0.029161356389522552, "eval_mean_accuracy": 0.9908097927900117, "eval_mean_iou": 0.49540489639500584, "eval_overall_accuracy": 0.9908097927900117, "eval_runtime": 36.5704, "eval_samples_per_second": 24.09, "eval_steps_per_second": 3.035, "step": 2496 }, { "epoch": 4.006410256410256, "grad_norm": 0.4701683521270752, "learning_rate": 4.805769230769231e-05, "loss": 0.033029836416244504, "step": 2500 }, { "epoch": 4.022435897435898, "grad_norm": 0.38779956102371216, "learning_rate": 4.825e-05, "loss": 0.03376379311084747, "step": 2510 }, { "epoch": 4.038461538461538, "grad_norm": 0.36794722080230713, "learning_rate": 4.8442307692307696e-05, "loss": 0.032618916034698485, "step": 2520 }, { "epoch": 4.05448717948718, "grad_norm": 0.2915388345718384, "learning_rate": 4.8634615384615386e-05, "loss": 0.031600722670555116, "step": 2530 }, { "epoch": 4.07051282051282, "grad_norm": 0.31456631422042847, "learning_rate": 4.8826923076923075e-05, "loss": 0.030255401134490968, "step": 2540 }, { "epoch": 4.086538461538462, "grad_norm": 2.5629823207855225, "learning_rate": 4.901923076923077e-05, "loss": 0.03927111625671387, "step": 2550 }, { "epoch": 4.102564102564102, "grad_norm": 0.384067565202713, "learning_rate": 4.921153846153846e-05, "loss": 0.03392416536808014, "step": 2560 }, { "epoch": 4.118589743589744, "grad_norm": 0.2489372342824936, "learning_rate": 4.9403846153846156e-05, "loss": 0.05496013164520264, "step": 2570 }, { "epoch": 4.134615384615385, "grad_norm": 0.4958206117153168, "learning_rate": 4.9596153846153845e-05, "loss": 0.032106789946556094, "step": 2580 }, { "epoch": 4.1506410256410255, "grad_norm": 1.3620619773864746, "learning_rate": 4.978846153846154e-05, "loss": 0.03159627020359039, "step": 2590 }, { "epoch": 4.166666666666667, "grad_norm": 0.46580415964126587, "learning_rate": 4.998076923076924e-05, "loss": 0.030227524042129517, "step": 2600 }, { "epoch": 4.1826923076923075, "grad_norm": 0.5562746524810791, "learning_rate": 5.0173076923076926e-05, "loss": 0.04093174338340759, "step": 2610 }, { "epoch": 4.198717948717949, "grad_norm": 0.6005678772926331, "learning_rate": 5.0365384615384615e-05, "loss": 0.03624850213527679, "step": 2620 }, { "epoch": 4.214743589743589, "grad_norm": 0.7517910599708557, "learning_rate": 5.055769230769231e-05, "loss": 0.03295823037624359, "step": 2630 }, { "epoch": 4.230769230769231, "grad_norm": 0.44093698263168335, "learning_rate": 5.075e-05, "loss": 0.033479434251785276, "step": 2640 }, { "epoch": 4.246794871794872, "grad_norm": 0.29875823855400085, "learning_rate": 5.094230769230769e-05, "loss": 0.0330804318189621, "step": 2650 }, { "epoch": 4.262820512820513, "grad_norm": 0.42978528141975403, "learning_rate": 5.1134615384615385e-05, "loss": 0.03436744213104248, "step": 2660 }, { "epoch": 4.278846153846154, "grad_norm": 0.6091134548187256, "learning_rate": 5.1326923076923075e-05, "loss": 0.03513849377632141, "step": 2670 }, { "epoch": 4.294871794871795, "grad_norm": 0.2555076479911804, "learning_rate": 5.151923076923077e-05, "loss": 0.03522044718265534, "step": 2680 }, { "epoch": 4.310897435897436, "grad_norm": 0.36062192916870117, "learning_rate": 5.1711538461538466e-05, "loss": 0.03363135159015655, "step": 2690 }, { "epoch": 4.326923076923077, "grad_norm": 0.3527829349040985, "learning_rate": 5.1903846153846156e-05, "loss": 0.03223028481006622, "step": 2700 }, { "epoch": 4.342948717948718, "grad_norm": 0.35655656456947327, "learning_rate": 5.209615384615385e-05, "loss": 0.033691766858100894, "step": 2710 }, { "epoch": 4.358974358974359, "grad_norm": 0.6284633278846741, "learning_rate": 5.228846153846154e-05, "loss": 0.03341765403747558, "step": 2720 }, { "epoch": 4.375, "grad_norm": 0.3222949504852295, "learning_rate": 5.248076923076923e-05, "loss": 0.03094193637371063, "step": 2730 }, { "epoch": 4.391025641025641, "grad_norm": 0.29965752363204956, "learning_rate": 5.2673076923076926e-05, "loss": 0.029050517082214355, "step": 2740 }, { "epoch": 4.407051282051282, "grad_norm": 0.5102453231811523, "learning_rate": 5.2865384615384615e-05, "loss": 0.030456104874610902, "step": 2750 }, { "epoch": 4.423076923076923, "grad_norm": 0.5323256850242615, "learning_rate": 5.3057692307692304e-05, "loss": 0.03457393050193787, "step": 2760 }, { "epoch": 4.439102564102564, "grad_norm": 0.4030460715293884, "learning_rate": 5.325e-05, "loss": 0.029291981458663942, "step": 2770 }, { "epoch": 4.455128205128205, "grad_norm": 0.5449998378753662, "learning_rate": 5.3442307692307696e-05, "loss": 0.03303671181201935, "step": 2780 }, { "epoch": 4.471153846153846, "grad_norm": 0.6861281394958496, "learning_rate": 5.363461538461539e-05, "loss": 0.03397686183452606, "step": 2790 }, { "epoch": 4.487179487179487, "grad_norm": 0.29446086287498474, "learning_rate": 5.382692307692308e-05, "loss": 0.03131229281425476, "step": 2800 }, { "epoch": 4.503205128205128, "grad_norm": 0.4345117211341858, "learning_rate": 5.401923076923077e-05, "loss": 0.030658084154129028, "step": 2810 }, { "epoch": 4.519230769230769, "grad_norm": 0.4236448407173157, "learning_rate": 5.4211538461538466e-05, "loss": 0.04231734573841095, "step": 2820 }, { "epoch": 4.535256410256411, "grad_norm": 0.46679022908210754, "learning_rate": 5.4403846153846155e-05, "loss": 0.035623973608016966, "step": 2830 }, { "epoch": 4.551282051282051, "grad_norm": 0.21705259382724762, "learning_rate": 5.4596153846153845e-05, "loss": 0.03210398554801941, "step": 2840 }, { "epoch": 4.5673076923076925, "grad_norm": 0.8593072295188904, "learning_rate": 5.478846153846154e-05, "loss": 0.03237392902374268, "step": 2850 }, { "epoch": 4.583333333333333, "grad_norm": 0.41345465183258057, "learning_rate": 5.498076923076923e-05, "loss": 0.03224247694015503, "step": 2860 }, { "epoch": 4.5993589743589745, "grad_norm": 0.3806169033050537, "learning_rate": 5.517307692307692e-05, "loss": 0.0312039852142334, "step": 2870 }, { "epoch": 4.615384615384615, "grad_norm": 0.5141472816467285, "learning_rate": 5.536538461538462e-05, "loss": 0.03132537007331848, "step": 2880 }, { "epoch": 4.631410256410256, "grad_norm": 0.30475080013275146, "learning_rate": 5.555769230769231e-05, "loss": 0.030490216612815858, "step": 2890 }, { "epoch": 4.647435897435898, "grad_norm": 0.2685583531856537, "learning_rate": 5.575e-05, "loss": 0.028474408388137817, "step": 2900 }, { "epoch": 4.663461538461538, "grad_norm": 19.334264755249023, "learning_rate": 5.5942307692307696e-05, "loss": 0.05019236207008362, "step": 2910 }, { "epoch": 4.67948717948718, "grad_norm": 0.3654690086841583, "learning_rate": 5.6134615384615385e-05, "loss": 0.028515240550041197, "step": 2920 }, { "epoch": 4.69551282051282, "grad_norm": 0.5019323825836182, "learning_rate": 5.632692307692308e-05, "loss": 0.029542788863182068, "step": 2930 }, { "epoch": 4.711538461538462, "grad_norm": 1.6446436643600464, "learning_rate": 5.651923076923077e-05, "loss": 0.03642590343952179, "step": 2940 }, { "epoch": 4.727564102564102, "grad_norm": 0.2757451832294464, "learning_rate": 5.671153846153846e-05, "loss": 0.028682196140289308, "step": 2950 }, { "epoch": 4.743589743589744, "grad_norm": 0.45390161871910095, "learning_rate": 5.6903846153846155e-05, "loss": 0.0309420108795166, "step": 2960 }, { "epoch": 4.759615384615385, "grad_norm": 0.3506055176258087, "learning_rate": 5.7096153846153844e-05, "loss": 0.03033100664615631, "step": 2970 }, { "epoch": 4.7756410256410255, "grad_norm": 0.39780393242836, "learning_rate": 5.728846153846154e-05, "loss": 0.03323881924152374, "step": 2980 }, { "epoch": 4.791666666666667, "grad_norm": 0.9860163331031799, "learning_rate": 5.7480769230769236e-05, "loss": 0.03152881264686584, "step": 2990 }, { "epoch": 4.8076923076923075, "grad_norm": 0.28229448199272156, "learning_rate": 5.7673076923076925e-05, "loss": 0.03159460425376892, "step": 3000 }, { "epoch": 4.823717948717949, "grad_norm": 0.5562426447868347, "learning_rate": 5.7865384615384615e-05, "loss": 0.03207846283912659, "step": 3010 }, { "epoch": 4.839743589743589, "grad_norm": 0.19844141602516174, "learning_rate": 5.805769230769231e-05, "loss": 0.028690153360366823, "step": 3020 }, { "epoch": 4.855769230769231, "grad_norm": 0.3980916142463684, "learning_rate": 5.825e-05, "loss": 0.03149664700031281, "step": 3030 }, { "epoch": 4.871794871794872, "grad_norm": 0.4366171956062317, "learning_rate": 5.8442307692307696e-05, "loss": 0.0330941379070282, "step": 3040 }, { "epoch": 4.887820512820513, "grad_norm": 0.25535130500793457, "learning_rate": 5.8634615384615385e-05, "loss": 0.035258516669273376, "step": 3050 }, { "epoch": 4.903846153846154, "grad_norm": 0.352820485830307, "learning_rate": 5.8826923076923074e-05, "loss": 0.030943405628204346, "step": 3060 }, { "epoch": 4.919871794871795, "grad_norm": 0.29804009199142456, "learning_rate": 5.901923076923077e-05, "loss": 0.03198661804199219, "step": 3070 }, { "epoch": 4.935897435897436, "grad_norm": 0.667884111404419, "learning_rate": 5.9211538461538466e-05, "loss": 0.03524442315101624, "step": 3080 }, { "epoch": 4.951923076923077, "grad_norm": 0.17836526036262512, "learning_rate": 5.9403846153846155e-05, "loss": 0.028557318449020385, "step": 3090 }, { "epoch": 4.967948717948718, "grad_norm": 0.4316538870334625, "learning_rate": 5.959615384615385e-05, "loss": 0.030312934517860414, "step": 3100 }, { "epoch": 4.983974358974359, "grad_norm": 0.7286023497581482, "learning_rate": 5.978846153846154e-05, "loss": 0.02906084954738617, "step": 3110 }, { "epoch": 5.0, "grad_norm": 0.5286591053009033, "learning_rate": 5.998076923076923e-05, "loss": 0.029400908946990968, "step": 3120 }, { "epoch": 5.0, "eval_accuracy_background": NaN, "eval_accuracy_crop": 0.9891994133694426, "eval_iou_background": 0.0, "eval_iou_crop": 0.9891994133694426, "eval_loss": 0.02786017581820488, "eval_mean_accuracy": 0.9891994133694426, "eval_mean_iou": 0.4945997066847213, "eval_overall_accuracy": 0.9891994133694426, "eval_runtime": 35.3049, "eval_samples_per_second": 24.954, "eval_steps_per_second": 3.144, "step": 3120 }, { "epoch": 5.016025641025641, "grad_norm": 0.4712012708187103, "learning_rate": 5.9999984791661175e-05, "loss": 0.03203360140323639, "step": 3130 }, { "epoch": 5.032051282051282, "grad_norm": 0.3930153548717499, "learning_rate": 5.9999932219645505e-05, "loss": 0.03228476047515869, "step": 3140 }, { "epoch": 5.048076923076923, "grad_norm": 0.29151612520217896, "learning_rate": 5.999984209626152e-05, "loss": 0.0566655158996582, "step": 3150 }, { "epoch": 5.064102564102564, "grad_norm": 0.3100217878818512, "learning_rate": 5.999971442162203e-05, "loss": 0.03302051723003387, "step": 3160 }, { "epoch": 5.080128205128205, "grad_norm": 0.4373692274093628, "learning_rate": 5.999954919588685e-05, "loss": 0.03587725162506104, "step": 3170 }, { "epoch": 5.096153846153846, "grad_norm": 0.2678043246269226, "learning_rate": 5.999934641926279e-05, "loss": 0.028709268569946288, "step": 3180 }, { "epoch": 5.112179487179487, "grad_norm": 0.3017260730266571, "learning_rate": 5.999910609200367e-05, "loss": 0.031138476729393006, "step": 3190 }, { "epoch": 5.128205128205128, "grad_norm": 0.6580696105957031, "learning_rate": 5.9998828214410306e-05, "loss": 0.032147011160850524, "step": 3200 }, { "epoch": 5.144230769230769, "grad_norm": 0.34743523597717285, "learning_rate": 5.999851278683053e-05, "loss": 0.031038272380828857, "step": 3210 }, { "epoch": 5.160256410256411, "grad_norm": 0.4254085123538971, "learning_rate": 5.999815980965916e-05, "loss": 0.03199135661125183, "step": 3220 }, { "epoch": 5.176282051282051, "grad_norm": 0.3759840130805969, "learning_rate": 5.9997769283338035e-05, "loss": 0.027693945169448852, "step": 3230 }, { "epoch": 5.1923076923076925, "grad_norm": 0.25214704871177673, "learning_rate": 5.9997341208355964e-05, "loss": 0.05045799612998962, "step": 3240 }, { "epoch": 5.208333333333333, "grad_norm": 0.7008034586906433, "learning_rate": 5.9996875585248795e-05, "loss": 0.031758451461791994, "step": 3250 }, { "epoch": 5.2243589743589745, "grad_norm": 0.4632321894168854, "learning_rate": 5.999637241459934e-05, "loss": 0.03645231127738953, "step": 3260 }, { "epoch": 5.240384615384615, "grad_norm": 0.2149258404970169, "learning_rate": 5.9995831697037436e-05, "loss": 0.03137375116348266, "step": 3270 }, { "epoch": 5.256410256410256, "grad_norm": 0.4480931758880615, "learning_rate": 5.999525343323989e-05, "loss": 0.029821127653121948, "step": 3280 }, { "epoch": 5.272435897435898, "grad_norm": 0.3848905861377716, "learning_rate": 5.999463762393055e-05, "loss": 0.02956036925315857, "step": 3290 }, { "epoch": 5.288461538461538, "grad_norm": 0.2984158396720886, "learning_rate": 5.999398426988021e-05, "loss": 0.03008987307548523, "step": 3300 }, { "epoch": 5.30448717948718, "grad_norm": 0.23806294798851013, "learning_rate": 5.99932933719067e-05, "loss": 0.030220389366149902, "step": 3310 }, { "epoch": 5.32051282051282, "grad_norm": 0.3457549810409546, "learning_rate": 5.999256493087482e-05, "loss": 0.030119630694389343, "step": 3320 }, { "epoch": 5.336538461538462, "grad_norm": 0.3283259868621826, "learning_rate": 5.999179894769637e-05, "loss": 0.028622913360595702, "step": 3330 }, { "epoch": 5.352564102564102, "grad_norm": 0.342649906873703, "learning_rate": 5.999099542333014e-05, "loss": 0.028369322419166565, "step": 3340 }, { "epoch": 5.368589743589744, "grad_norm": 0.2868306338787079, "learning_rate": 5.999015435878192e-05, "loss": 0.03099144697189331, "step": 3350 }, { "epoch": 5.384615384615385, "grad_norm": 0.3940703868865967, "learning_rate": 5.998927575510448e-05, "loss": 0.02828863263130188, "step": 3360 }, { "epoch": 5.4006410256410255, "grad_norm": 0.4995294511318207, "learning_rate": 5.998835961339758e-05, "loss": 0.029009059071540833, "step": 3370 }, { "epoch": 5.416666666666667, "grad_norm": 0.30469760298728943, "learning_rate": 5.998740593480798e-05, "loss": 0.05138106346130371, "step": 3380 }, { "epoch": 5.4326923076923075, "grad_norm": 0.47182685136795044, "learning_rate": 5.9986414720529394e-05, "loss": 0.02914462685585022, "step": 3390 }, { "epoch": 5.448717948717949, "grad_norm": 0.3122923970222473, "learning_rate": 5.9985385971802556e-05, "loss": 0.027702325582504274, "step": 3400 }, { "epoch": 5.464743589743589, "grad_norm": 0.5003992915153503, "learning_rate": 5.998431968991516e-05, "loss": 0.027012330293655396, "step": 3410 }, { "epoch": 5.480769230769231, "grad_norm": 0.3766479790210724, "learning_rate": 5.998321587620188e-05, "loss": 0.02591935694217682, "step": 3420 }, { "epoch": 5.496794871794872, "grad_norm": 0.2981034815311432, "learning_rate": 5.998207453204439e-05, "loss": 0.028376665711402894, "step": 3430 }, { "epoch": 5.512820512820513, "grad_norm": 0.20478005707263947, "learning_rate": 5.998089565887133e-05, "loss": 0.027712041139602663, "step": 3440 }, { "epoch": 5.528846153846154, "grad_norm": 0.32157033681869507, "learning_rate": 5.997967925815829e-05, "loss": 0.02865525186061859, "step": 3450 }, { "epoch": 5.544871794871795, "grad_norm": 0.3024623990058899, "learning_rate": 5.9978425331427875e-05, "loss": 0.03268029391765594, "step": 3460 }, { "epoch": 5.560897435897436, "grad_norm": 0.6732526421546936, "learning_rate": 5.9977133880249645e-05, "loss": 0.027815333008766173, "step": 3470 }, { "epoch": 5.576923076923077, "grad_norm": 0.3776678740978241, "learning_rate": 5.9975804906240124e-05, "loss": 0.03261052072048187, "step": 3480 }, { "epoch": 5.592948717948718, "grad_norm": 0.24207228422164917, "learning_rate": 5.9974438411062816e-05, "loss": 0.0347442090511322, "step": 3490 }, { "epoch": 5.608974358974359, "grad_norm": 0.36283189058303833, "learning_rate": 5.997303439642817e-05, "loss": 0.030132776498794554, "step": 3500 }, { "epoch": 5.625, "grad_norm": 0.45948708057403564, "learning_rate": 5.9971592864093626e-05, "loss": 0.030280423164367676, "step": 3510 }, { "epoch": 5.641025641025641, "grad_norm": 0.42580944299697876, "learning_rate": 5.997011381586356e-05, "loss": 0.028938519954681396, "step": 3520 }, { "epoch": 5.657051282051282, "grad_norm": 0.3398068845272064, "learning_rate": 5.9968597253589324e-05, "loss": 0.02973540723323822, "step": 3530 }, { "epoch": 5.673076923076923, "grad_norm": 0.21661438047885895, "learning_rate": 5.996704317916923e-05, "loss": 0.02853931486606598, "step": 3540 }, { "epoch": 5.689102564102564, "grad_norm": 0.3464878797531128, "learning_rate": 5.996545159454852e-05, "loss": 0.029515787959098816, "step": 3550 }, { "epoch": 5.705128205128205, "grad_norm": 0.48963528871536255, "learning_rate": 5.9963822501719424e-05, "loss": 0.03200762569904327, "step": 3560 }, { "epoch": 5.721153846153846, "grad_norm": 0.4491106867790222, "learning_rate": 5.9962155902721085e-05, "loss": 0.029964005947113036, "step": 3570 }, { "epoch": 5.737179487179487, "grad_norm": 0.46488893032073975, "learning_rate": 5.996045179963961e-05, "loss": 0.02839279770851135, "step": 3580 }, { "epoch": 5.753205128205128, "grad_norm": 0.49089840054512024, "learning_rate": 5.9958710194608076e-05, "loss": 0.02786598205566406, "step": 3590 }, { "epoch": 5.769230769230769, "grad_norm": 0.2985965609550476, "learning_rate": 5.9956931089806445e-05, "loss": 0.02749096155166626, "step": 3600 }, { "epoch": 5.785256410256411, "grad_norm": 0.4470583498477936, "learning_rate": 5.9955114487461664e-05, "loss": 0.027011263370513915, "step": 3610 }, { "epoch": 5.801282051282051, "grad_norm": 0.24581515789031982, "learning_rate": 5.99532603898476e-05, "loss": 0.02680719792842865, "step": 3620 }, { "epoch": 5.8173076923076925, "grad_norm": 0.22460907697677612, "learning_rate": 5.995136879928506e-05, "loss": 0.026813629269599914, "step": 3630 }, { "epoch": 5.833333333333333, "grad_norm": 0.20517075061798096, "learning_rate": 5.9949439718141765e-05, "loss": 0.02823880612850189, "step": 3640 }, { "epoch": 5.8493589743589745, "grad_norm": 0.35850244760513306, "learning_rate": 5.9947473148832384e-05, "loss": 0.02737196385860443, "step": 3650 }, { "epoch": 5.865384615384615, "grad_norm": 0.43910694122314453, "learning_rate": 5.994546909381849e-05, "loss": 0.030575907230377196, "step": 3660 }, { "epoch": 5.881410256410256, "grad_norm": 0.3412756621837616, "learning_rate": 5.9943427555608605e-05, "loss": 0.028356361389160156, "step": 3670 }, { "epoch": 5.897435897435898, "grad_norm": 0.317654013633728, "learning_rate": 5.994134853675815e-05, "loss": 0.029208123683929443, "step": 3680 }, { "epoch": 5.913461538461538, "grad_norm": 0.7053897380828857, "learning_rate": 5.993923203986945e-05, "loss": 0.028420820832252502, "step": 3690 }, { "epoch": 5.92948717948718, "grad_norm": 0.3130151331424713, "learning_rate": 5.993707806759177e-05, "loss": 0.029576507210731507, "step": 3700 }, { "epoch": 5.94551282051282, "grad_norm": 0.2924903631210327, "learning_rate": 5.9934886622621274e-05, "loss": 0.02915334403514862, "step": 3710 }, { "epoch": 5.961538461538462, "grad_norm": 0.32971587777137756, "learning_rate": 5.9932657707701013e-05, "loss": 0.0252811461687088, "step": 3720 }, { "epoch": 5.977564102564102, "grad_norm": 0.33782604336738586, "learning_rate": 5.993039132562096e-05, "loss": 0.02670964002609253, "step": 3730 }, { "epoch": 5.993589743589744, "grad_norm": 0.20271991193294525, "learning_rate": 5.9928087479217974e-05, "loss": 0.02841241955757141, "step": 3740 }, { "epoch": 6.0, "eval_accuracy_background": NaN, "eval_accuracy_crop": 0.9924938022955456, "eval_iou_background": 0.0, "eval_iou_crop": 0.9924938022955456, "eval_loss": 0.025498226284980774, "eval_mean_accuracy": 0.9924938022955456, "eval_mean_iou": 0.4962469011477728, "eval_overall_accuracy": 0.9924938022955456, "eval_runtime": 36.4427, "eval_samples_per_second": 24.175, "eval_steps_per_second": 3.046, "step": 3744 }, { "epoch": 6.009615384615385, "grad_norm": 0.35521063208580017, "learning_rate": 5.9925746171375826e-05, "loss": 0.03379350304603577, "step": 3750 }, { "epoch": 6.0256410256410255, "grad_norm": 0.2281581610441208, "learning_rate": 5.9923367405025154e-05, "loss": 0.02864885926246643, "step": 3760 }, { "epoch": 6.041666666666667, "grad_norm": 0.22736285626888275, "learning_rate": 5.99209511831435e-05, "loss": 0.02885844111442566, "step": 3770 }, { "epoch": 6.0576923076923075, "grad_norm": 0.43342486023902893, "learning_rate": 5.9918497508755294e-05, "loss": 0.027281907200813294, "step": 3780 }, { "epoch": 6.073717948717949, "grad_norm": 0.6969672441482544, "learning_rate": 5.991600638493183e-05, "loss": 0.028609487414360046, "step": 3790 }, { "epoch": 6.089743589743589, "grad_norm": 0.28597491979599, "learning_rate": 5.991347781479129e-05, "loss": 0.030037641525268555, "step": 3800 }, { "epoch": 6.105769230769231, "grad_norm": 0.3428185284137726, "learning_rate": 5.9910911801498705e-05, "loss": 0.03565230965614319, "step": 3810 }, { "epoch": 6.121794871794871, "grad_norm": 0.2684386074542999, "learning_rate": 5.990830834826601e-05, "loss": 0.03419669270515442, "step": 3820 }, { "epoch": 6.137820512820513, "grad_norm": 0.3748745620250702, "learning_rate": 5.990566745835198e-05, "loss": 0.027441394329071046, "step": 3830 }, { "epoch": 6.153846153846154, "grad_norm": 0.395906537771225, "learning_rate": 5.990298913506227e-05, "loss": 0.027528080344200134, "step": 3840 }, { "epoch": 6.169871794871795, "grad_norm": 0.353055477142334, "learning_rate": 5.9900273381749366e-05, "loss": 0.02825213074684143, "step": 3850 }, { "epoch": 6.185897435897436, "grad_norm": 0.23140257596969604, "learning_rate": 5.989752020181262e-05, "loss": 0.028987017273902894, "step": 3860 }, { "epoch": 6.201923076923077, "grad_norm": 0.8484231233596802, "learning_rate": 5.9894729598698215e-05, "loss": 0.03034222424030304, "step": 3870 }, { "epoch": 6.217948717948718, "grad_norm": 0.35438966751098633, "learning_rate": 5.9891901575899216e-05, "loss": 0.02684345841407776, "step": 3880 }, { "epoch": 6.233974358974359, "grad_norm": 0.25667691230773926, "learning_rate": 5.988903613695549e-05, "loss": 0.028166913986206056, "step": 3890 }, { "epoch": 6.25, "grad_norm": 0.34851160645484924, "learning_rate": 5.988613328545375e-05, "loss": 0.03118346929550171, "step": 3900 }, { "epoch": 6.266025641025641, "grad_norm": 0.4388611912727356, "learning_rate": 5.988319302502753e-05, "loss": 0.029732418060302735, "step": 3910 }, { "epoch": 6.282051282051282, "grad_norm": 0.4745960533618927, "learning_rate": 5.9880215359357215e-05, "loss": 0.03135273456573486, "step": 3920 }, { "epoch": 6.298076923076923, "grad_norm": 0.25516101717948914, "learning_rate": 5.987720029216999e-05, "loss": 0.026095840334892272, "step": 3930 }, { "epoch": 6.314102564102564, "grad_norm": 0.2828456461429596, "learning_rate": 5.987414782723985e-05, "loss": 0.026912707090377807, "step": 3940 }, { "epoch": 6.330128205128205, "grad_norm": 0.5022420287132263, "learning_rate": 5.987105796838763e-05, "loss": 0.029746425151824952, "step": 3950 }, { "epoch": 6.346153846153846, "grad_norm": 0.2610377073287964, "learning_rate": 5.986793071948093e-05, "loss": 0.02759089767932892, "step": 3960 }, { "epoch": 6.362179487179487, "grad_norm": 0.3048124313354492, "learning_rate": 5.9864766084434186e-05, "loss": 0.06129605770111084, "step": 3970 }, { "epoch": 6.378205128205128, "grad_norm": 0.24958468973636627, "learning_rate": 5.9861564067208627e-05, "loss": 0.025263795256614686, "step": 3980 }, { "epoch": 6.394230769230769, "grad_norm": 0.15380585193634033, "learning_rate": 5.985832467181225e-05, "loss": 0.026971763372421263, "step": 3990 }, { "epoch": 6.410256410256411, "grad_norm": 0.31600692868232727, "learning_rate": 5.985504790229987e-05, "loss": 0.040531277656555176, "step": 4000 }, { "epoch": 6.426282051282051, "grad_norm": 0.24213142693042755, "learning_rate": 5.985173376277306e-05, "loss": 0.0310145765542984, "step": 4010 }, { "epoch": 6.4423076923076925, "grad_norm": 0.28655481338500977, "learning_rate": 5.984838225738018e-05, "loss": 0.027170032262802124, "step": 4020 }, { "epoch": 6.458333333333333, "grad_norm": 0.6405965089797974, "learning_rate": 5.9844993390316375e-05, "loss": 0.030834856629371642, "step": 4030 }, { "epoch": 6.4743589743589745, "grad_norm": 0.4619565010070801, "learning_rate": 5.984156716582351e-05, "loss": 0.028239899873733522, "step": 4040 }, { "epoch": 6.490384615384615, "grad_norm": 0.26495423913002014, "learning_rate": 5.9838103588190264e-05, "loss": 0.0297000914812088, "step": 4050 }, { "epoch": 6.506410256410256, "grad_norm": 0.32301002740859985, "learning_rate": 5.983460266175205e-05, "loss": 0.029486265778541566, "step": 4060 }, { "epoch": 6.522435897435898, "grad_norm": 0.301472932100296, "learning_rate": 5.983106439089102e-05, "loss": 0.03017793595790863, "step": 4070 }, { "epoch": 6.538461538461538, "grad_norm": 0.25434401631355286, "learning_rate": 5.982748878003609e-05, "loss": 0.030321276187896727, "step": 4080 }, { "epoch": 6.55448717948718, "grad_norm": 0.23599784076213837, "learning_rate": 5.9823875833662905e-05, "loss": 0.026626965403556822, "step": 4090 }, { "epoch": 6.57051282051282, "grad_norm": 0.30789339542388916, "learning_rate": 5.982022555629384e-05, "loss": 0.026676270365715026, "step": 4100 }, { "epoch": 6.586538461538462, "grad_norm": 0.19604656100273132, "learning_rate": 5.9816537952497995e-05, "loss": 0.030538952350616454, "step": 4110 }, { "epoch": 6.602564102564102, "grad_norm": 0.3010660409927368, "learning_rate": 5.981281302689122e-05, "loss": 0.02867995798587799, "step": 4120 }, { "epoch": 6.618589743589744, "grad_norm": 0.24353009462356567, "learning_rate": 5.980905078413605e-05, "loss": 0.026708436012268067, "step": 4130 }, { "epoch": 6.634615384615385, "grad_norm": 0.2013232409954071, "learning_rate": 5.980525122894173e-05, "loss": 0.024335134029388427, "step": 4140 }, { "epoch": 6.6506410256410255, "grad_norm": 0.6465891599655151, "learning_rate": 5.980141436606424e-05, "loss": 0.02697601020336151, "step": 4150 }, { "epoch": 6.666666666666667, "grad_norm": 0.3785439729690552, "learning_rate": 5.979754020030622e-05, "loss": 0.02764364778995514, "step": 4160 }, { "epoch": 6.6826923076923075, "grad_norm": 0.5132764577865601, "learning_rate": 5.979362873651704e-05, "loss": 0.02624908685684204, "step": 4170 }, { "epoch": 6.698717948717949, "grad_norm": 0.22271735966205597, "learning_rate": 5.978967997959274e-05, "loss": 0.026820868253707886, "step": 4180 }, { "epoch": 6.714743589743589, "grad_norm": 0.36531567573547363, "learning_rate": 5.978569393447602e-05, "loss": 0.027459657192230223, "step": 4190 }, { "epoch": 6.730769230769231, "grad_norm": 0.28964874148368835, "learning_rate": 5.978167060615628e-05, "loss": 0.026858335733413695, "step": 4200 }, { "epoch": 6.746794871794872, "grad_norm": 0.3076871335506439, "learning_rate": 5.9777609999669585e-05, "loss": 0.024161748588085175, "step": 4210 }, { "epoch": 6.762820512820513, "grad_norm": 0.27764636278152466, "learning_rate": 5.977351212009865e-05, "loss": 0.02381557673215866, "step": 4220 }, { "epoch": 6.778846153846154, "grad_norm": 0.30063924193382263, "learning_rate": 5.9769376972572864e-05, "loss": 0.026758816838264466, "step": 4230 }, { "epoch": 6.794871794871795, "grad_norm": 0.23905225098133087, "learning_rate": 5.976520456226825e-05, "loss": 0.030412819981575013, "step": 4240 }, { "epoch": 6.810897435897436, "grad_norm": 1.1192800998687744, "learning_rate": 5.9760994894407476e-05, "loss": 0.028217226266860962, "step": 4250 }, { "epoch": 6.826923076923077, "grad_norm": 0.19219279289245605, "learning_rate": 5.9756747974259836e-05, "loss": 0.035127955675125125, "step": 4260 }, { "epoch": 6.842948717948718, "grad_norm": 0.4265337884426117, "learning_rate": 5.975246380714127e-05, "loss": 0.026281890273094178, "step": 4270 }, { "epoch": 6.858974358974359, "grad_norm": 0.4006291925907135, "learning_rate": 5.9748142398414354e-05, "loss": 0.027091342210769653, "step": 4280 }, { "epoch": 6.875, "grad_norm": 0.3059137761592865, "learning_rate": 5.974378375348824e-05, "loss": 0.027083492279052733, "step": 4290 }, { "epoch": 6.891025641025641, "grad_norm": 0.282601922750473, "learning_rate": 5.9739387877818716e-05, "loss": 0.028448578715324403, "step": 4300 }, { "epoch": 6.907051282051282, "grad_norm": 0.2310851365327835, "learning_rate": 5.973495477690815e-05, "loss": 0.028999334573745726, "step": 4310 }, { "epoch": 6.923076923076923, "grad_norm": 0.17641475796699524, "learning_rate": 5.9730484456305556e-05, "loss": 0.027257826924324036, "step": 4320 }, { "epoch": 6.939102564102564, "grad_norm": 0.4271336495876312, "learning_rate": 5.972597692160647e-05, "loss": 0.02969861328601837, "step": 4330 }, { "epoch": 6.955128205128205, "grad_norm": 0.43056854605674744, "learning_rate": 5.972143217845306e-05, "loss": 0.027163013815879822, "step": 4340 }, { "epoch": 6.971153846153846, "grad_norm": 0.24755986034870148, "learning_rate": 5.971685023253404e-05, "loss": 0.028352290391921997, "step": 4350 }, { "epoch": 6.987179487179487, "grad_norm": 0.28961682319641113, "learning_rate": 5.9712231089584704e-05, "loss": 0.025639042258262634, "step": 4360 }, { "epoch": 7.0, "eval_accuracy_background": NaN, "eval_accuracy_crop": 0.990612086557654, "eval_iou_background": 0.0, "eval_iou_crop": 0.990612086557654, "eval_loss": 0.02488904632627964, "eval_mean_accuracy": 0.990612086557654, "eval_mean_iou": 0.495306043278827, "eval_overall_accuracy": 0.990612086557654, "eval_runtime": 36.5608, "eval_samples_per_second": 24.097, "eval_steps_per_second": 3.036, "step": 4368 }, { "epoch": 7.003205128205129, "grad_norm": 0.31655487418174744, "learning_rate": 5.9707574755386896e-05, "loss": 0.026726734638214112, "step": 4370 }, { "epoch": 7.019230769230769, "grad_norm": 0.5717376470565796, "learning_rate": 5.9702881235769034e-05, "loss": 0.036894702911376955, "step": 4380 }, { "epoch": 7.035256410256411, "grad_norm": 0.1887998878955841, "learning_rate": 5.969815053660606e-05, "loss": 0.02414863556623459, "step": 4390 }, { "epoch": 7.051282051282051, "grad_norm": 0.6055033802986145, "learning_rate": 5.9693382663819454e-05, "loss": 0.027620476484298707, "step": 4400 }, { "epoch": 7.0673076923076925, "grad_norm": 0.2627042829990387, "learning_rate": 5.9688577623377245e-05, "loss": 0.02802366316318512, "step": 4410 }, { "epoch": 7.083333333333333, "grad_norm": 0.49772578477859497, "learning_rate": 5.968373542129397e-05, "loss": 0.027903571724891663, "step": 4420 }, { "epoch": 7.0993589743589745, "grad_norm": 0.2438051700592041, "learning_rate": 5.96788560636307e-05, "loss": 0.026354223489761353, "step": 4430 }, { "epoch": 7.115384615384615, "grad_norm": 0.47156792879104614, "learning_rate": 5.967393955649498e-05, "loss": 0.02697218656539917, "step": 4440 }, { "epoch": 7.131410256410256, "grad_norm": 0.21062052249908447, "learning_rate": 5.966898590604089e-05, "loss": 0.027059781551361083, "step": 4450 }, { "epoch": 7.147435897435898, "grad_norm": 0.26942670345306396, "learning_rate": 5.966399511846897e-05, "loss": 0.029561585187911986, "step": 4460 }, { "epoch": 7.163461538461538, "grad_norm": 0.2541596293449402, "learning_rate": 5.96589672000263e-05, "loss": 0.023916731774806976, "step": 4470 }, { "epoch": 7.17948717948718, "grad_norm": 0.3527907729148865, "learning_rate": 5.965390215700636e-05, "loss": 0.02870793640613556, "step": 4480 }, { "epoch": 7.19551282051282, "grad_norm": 0.3171769380569458, "learning_rate": 5.9648799995749166e-05, "loss": 0.026806265115737915, "step": 4490 }, { "epoch": 7.211538461538462, "grad_norm": 0.17610853910446167, "learning_rate": 5.964366072264117e-05, "loss": 0.028920659422874452, "step": 4500 }, { "epoch": 7.227564102564102, "grad_norm": 0.32638680934906006, "learning_rate": 5.963848434411527e-05, "loss": 0.03080236613750458, "step": 4510 }, { "epoch": 7.243589743589744, "grad_norm": 0.19158519804477692, "learning_rate": 5.963327086665082e-05, "loss": 0.0273240327835083, "step": 4520 }, { "epoch": 7.259615384615385, "grad_norm": 0.27186790108680725, "learning_rate": 5.962802029677361e-05, "loss": 0.02634768784046173, "step": 4530 }, { "epoch": 7.2756410256410255, "grad_norm": 0.25831446051597595, "learning_rate": 5.9622732641055854e-05, "loss": 0.02951570153236389, "step": 4540 }, { "epoch": 7.291666666666667, "grad_norm": 0.22244733572006226, "learning_rate": 5.961740790611619e-05, "loss": 0.026485139131546022, "step": 4550 }, { "epoch": 7.3076923076923075, "grad_norm": 0.41897594928741455, "learning_rate": 5.961204609861967e-05, "loss": 0.02646316885948181, "step": 4560 }, { "epoch": 7.323717948717949, "grad_norm": 0.2055785357952118, "learning_rate": 5.9606647225277765e-05, "loss": 0.02559012770652771, "step": 4570 }, { "epoch": 7.339743589743589, "grad_norm": 0.20391452312469482, "learning_rate": 5.9601211292848304e-05, "loss": 0.026893404126167298, "step": 4580 }, { "epoch": 7.355769230769231, "grad_norm": 0.20527765154838562, "learning_rate": 5.9595738308135536e-05, "loss": 0.02545493245124817, "step": 4590 }, { "epoch": 7.371794871794872, "grad_norm": 0.23612186312675476, "learning_rate": 5.959022827799007e-05, "loss": 0.025466611981391905, "step": 4600 }, { "epoch": 7.387820512820513, "grad_norm": 0.3031468689441681, "learning_rate": 5.95846812093089e-05, "loss": 0.026872754096984863, "step": 4610 }, { "epoch": 7.403846153846154, "grad_norm": 0.48252514004707336, "learning_rate": 5.9579097109035385e-05, "loss": 0.024242226779460908, "step": 4620 }, { "epoch": 7.419871794871795, "grad_norm": 0.3921005129814148, "learning_rate": 5.957347598415922e-05, "loss": 0.049407017230987546, "step": 4630 }, { "epoch": 7.435897435897436, "grad_norm": 0.1986272633075714, "learning_rate": 5.956781784171644e-05, "loss": 0.028812357783317567, "step": 4640 }, { "epoch": 7.451923076923077, "grad_norm": 0.3371562957763672, "learning_rate": 5.956212268878945e-05, "loss": 0.025447696447372437, "step": 4650 }, { "epoch": 7.467948717948718, "grad_norm": 0.3505859375, "learning_rate": 5.9556390532506944e-05, "loss": 0.026783427596092223, "step": 4660 }, { "epoch": 7.483974358974359, "grad_norm": 0.28020942211151123, "learning_rate": 5.955062138004395e-05, "loss": 0.029182153940200805, "step": 4670 }, { "epoch": 7.5, "grad_norm": 0.369118869304657, "learning_rate": 5.9544815238621806e-05, "loss": 0.025740531086921693, "step": 4680 }, { "epoch": 7.516025641025641, "grad_norm": 0.408699095249176, "learning_rate": 5.9538972115508145e-05, "loss": 0.027531349658966066, "step": 4690 }, { "epoch": 7.532051282051282, "grad_norm": 0.34730830788612366, "learning_rate": 5.95330920180169e-05, "loss": 0.029159682989120483, "step": 4700 }, { "epoch": 7.548076923076923, "grad_norm": 0.2350701093673706, "learning_rate": 5.952717495350826e-05, "loss": 0.02840782105922699, "step": 4710 }, { "epoch": 7.564102564102564, "grad_norm": 0.2738591730594635, "learning_rate": 5.9521220929388724e-05, "loss": 0.02403363287448883, "step": 4720 }, { "epoch": 7.580128205128205, "grad_norm": 0.3458068370819092, "learning_rate": 5.9515229953111025e-05, "loss": 0.02593674957752228, "step": 4730 }, { "epoch": 7.596153846153846, "grad_norm": 0.33115094900131226, "learning_rate": 5.950920203217416e-05, "loss": 0.03101572096347809, "step": 4740 }, { "epoch": 7.612179487179487, "grad_norm": 0.2688583433628082, "learning_rate": 5.950313717412336e-05, "loss": 0.0252701997756958, "step": 4750 }, { "epoch": 7.628205128205128, "grad_norm": 0.3633456230163574, "learning_rate": 5.949703538655011e-05, "loss": 0.026082852482795717, "step": 4760 }, { "epoch": 7.644230769230769, "grad_norm": 0.1952267736196518, "learning_rate": 5.9490896677092116e-05, "loss": 0.02533124089241028, "step": 4770 }, { "epoch": 7.660256410256411, "grad_norm": 0.2797028720378876, "learning_rate": 5.948472105343327e-05, "loss": 0.028578019142150878, "step": 4780 }, { "epoch": 7.676282051282051, "grad_norm": 0.2753610610961914, "learning_rate": 5.947850852330372e-05, "loss": 0.024726209044456483, "step": 4790 }, { "epoch": 7.6923076923076925, "grad_norm": 0.23466865718364716, "learning_rate": 5.9472259094479774e-05, "loss": 0.025954902172088623, "step": 4800 }, { "epoch": 7.708333333333333, "grad_norm": 0.2796700596809387, "learning_rate": 5.9465972774783916e-05, "loss": 0.024080921709537507, "step": 4810 }, { "epoch": 7.7243589743589745, "grad_norm": 0.3478195369243622, "learning_rate": 5.9459649572084856e-05, "loss": 0.027611604332923888, "step": 4820 }, { "epoch": 7.740384615384615, "grad_norm": 0.19298334419727325, "learning_rate": 5.945328949429743e-05, "loss": 0.025401183962821962, "step": 4830 }, { "epoch": 7.756410256410256, "grad_norm": 0.12390543520450592, "learning_rate": 5.944689254938264e-05, "loss": 0.026052325963974, "step": 4840 }, { "epoch": 7.772435897435898, "grad_norm": 0.2677946984767914, "learning_rate": 5.944045874534765e-05, "loss": 0.02656397521495819, "step": 4850 }, { "epoch": 7.788461538461538, "grad_norm": 0.2527971565723419, "learning_rate": 5.943398809024575e-05, "loss": 0.029243665933609008, "step": 4860 }, { "epoch": 7.80448717948718, "grad_norm": 0.26553288102149963, "learning_rate": 5.942748059217634e-05, "loss": 0.029876759648323058, "step": 4870 }, { "epoch": 7.82051282051282, "grad_norm": 0.3368934094905853, "learning_rate": 5.942093625928497e-05, "loss": 0.026757529377937316, "step": 4880 }, { "epoch": 7.836538461538462, "grad_norm": 0.27183642983436584, "learning_rate": 5.941435509976326e-05, "loss": 0.024983112514019013, "step": 4890 }, { "epoch": 7.852564102564102, "grad_norm": 0.37313205003738403, "learning_rate": 5.940773712184897e-05, "loss": 0.03283327519893646, "step": 4900 }, { "epoch": 7.868589743589744, "grad_norm": 1.7391911745071411, "learning_rate": 5.940108233382591e-05, "loss": 0.025131082534790038, "step": 4910 }, { "epoch": 7.884615384615385, "grad_norm": 0.23781350255012512, "learning_rate": 5.939439074402399e-05, "loss": 0.02521338164806366, "step": 4920 }, { "epoch": 7.9006410256410255, "grad_norm": 0.352660596370697, "learning_rate": 5.938766236081916e-05, "loss": 0.028029456734657288, "step": 4930 }, { "epoch": 7.916666666666667, "grad_norm": 0.31867527961730957, "learning_rate": 5.9380897192633444e-05, "loss": 0.03154619932174683, "step": 4940 }, { "epoch": 7.9326923076923075, "grad_norm": 0.24382413923740387, "learning_rate": 5.937409524793491e-05, "loss": 0.02668660581111908, "step": 4950 }, { "epoch": 7.948717948717949, "grad_norm": 0.25781384110450745, "learning_rate": 5.936725653523765e-05, "loss": 0.023203477263450623, "step": 4960 }, { "epoch": 7.964743589743589, "grad_norm": 0.24787800014019012, "learning_rate": 5.936038106310178e-05, "loss": 0.023366345465183257, "step": 4970 }, { "epoch": 7.980769230769231, "grad_norm": 0.13882328569889069, "learning_rate": 5.935346884013345e-05, "loss": 0.0248184472322464, "step": 4980 }, { "epoch": 7.996794871794872, "grad_norm": 0.31156691908836365, "learning_rate": 5.9346519874984774e-05, "loss": 0.029391083121299743, "step": 4990 }, { "epoch": 8.0, "eval_accuracy_background": NaN, "eval_accuracy_crop": 0.989805767984434, "eval_iou_background": 0.0, "eval_iou_crop": 0.989805767984434, "eval_loss": 0.024186350405216217, "eval_mean_accuracy": 0.989805767984434, "eval_mean_iou": 0.494902883992217, "eval_overall_accuracy": 0.989805767984434, "eval_runtime": 36.6943, "eval_samples_per_second": 24.009, "eval_steps_per_second": 3.025, "step": 4992 }, { "epoch": 8.012820512820513, "grad_norm": 0.23470188677310944, "learning_rate": 5.933953417635388e-05, "loss": 0.025024530291557313, "step": 5000 }, { "epoch": 8.028846153846153, "grad_norm": 0.410441130399704, "learning_rate": 5.9332511752984885e-05, "loss": 0.02640640139579773, "step": 5010 }, { "epoch": 8.044871794871796, "grad_norm": 0.2965950667858124, "learning_rate": 5.932545261366786e-05, "loss": 0.024568969011306764, "step": 5020 }, { "epoch": 8.060897435897436, "grad_norm": 0.23427285254001617, "learning_rate": 5.9318356767238816e-05, "loss": 0.02536221146583557, "step": 5030 }, { "epoch": 8.076923076923077, "grad_norm": 0.3246198892593384, "learning_rate": 5.931122422257975e-05, "loss": 0.024823522567749022, "step": 5040 }, { "epoch": 8.092948717948717, "grad_norm": 0.3132481276988983, "learning_rate": 5.9304054988618564e-05, "loss": 0.026506271958351136, "step": 5050 }, { "epoch": 8.10897435897436, "grad_norm": 0.26836803555488586, "learning_rate": 5.929684907432911e-05, "loss": 0.024844226241111756, "step": 5060 }, { "epoch": 8.125, "grad_norm": 0.540566623210907, "learning_rate": 5.928960648873114e-05, "loss": 0.02647271752357483, "step": 5070 }, { "epoch": 8.14102564102564, "grad_norm": 0.2447020709514618, "learning_rate": 5.9282327240890296e-05, "loss": 0.023787596821784975, "step": 5080 }, { "epoch": 8.157051282051283, "grad_norm": 0.26237228512763977, "learning_rate": 5.9275011339918124e-05, "loss": 0.025165608525276183, "step": 5090 }, { "epoch": 8.173076923076923, "grad_norm": 0.6064642667770386, "learning_rate": 5.9267658794972066e-05, "loss": 0.02659878134727478, "step": 5100 }, { "epoch": 8.189102564102564, "grad_norm": 0.3193972110748291, "learning_rate": 5.926026961525538e-05, "loss": 0.024824470281600952, "step": 5110 }, { "epoch": 8.205128205128204, "grad_norm": 0.1891361027956009, "learning_rate": 5.925284381001724e-05, "loss": 0.02645259499549866, "step": 5120 }, { "epoch": 8.221153846153847, "grad_norm": 0.16451995074748993, "learning_rate": 5.9245381388552636e-05, "loss": 0.023166729509830473, "step": 5130 }, { "epoch": 8.237179487179487, "grad_norm": 0.4023226201534271, "learning_rate": 5.923788236020239e-05, "loss": 0.02582002878189087, "step": 5140 }, { "epoch": 8.253205128205128, "grad_norm": 0.49970585107803345, "learning_rate": 5.9230346734353144e-05, "loss": 0.029212477803230285, "step": 5150 }, { "epoch": 8.26923076923077, "grad_norm": 0.30196496844291687, "learning_rate": 5.9222774520437365e-05, "loss": 0.026226744055747986, "step": 5160 }, { "epoch": 8.28525641025641, "grad_norm": 0.5048876404762268, "learning_rate": 5.92151657279333e-05, "loss": 0.02555043399333954, "step": 5170 }, { "epoch": 8.301282051282051, "grad_norm": 0.47400155663490295, "learning_rate": 5.9207520366364985e-05, "loss": 0.025614252686500548, "step": 5180 }, { "epoch": 8.317307692307692, "grad_norm": 0.236494243144989, "learning_rate": 5.919983844530224e-05, "loss": 0.025862956047058107, "step": 5190 }, { "epoch": 8.333333333333334, "grad_norm": 0.36385971307754517, "learning_rate": 5.9192119974360645e-05, "loss": 0.026592081785202025, "step": 5200 }, { "epoch": 8.349358974358974, "grad_norm": 0.3385522663593292, "learning_rate": 5.918436496320153e-05, "loss": 0.027330946922302247, "step": 5210 }, { "epoch": 8.365384615384615, "grad_norm": 0.3240174651145935, "learning_rate": 5.917657342153195e-05, "loss": 0.02598976790904999, "step": 5220 }, { "epoch": 8.381410256410255, "grad_norm": 0.34294334053993225, "learning_rate": 5.91687453591047e-05, "loss": 0.026080363988876344, "step": 5230 }, { "epoch": 8.397435897435898, "grad_norm": 0.8963406085968018, "learning_rate": 5.916088078571828e-05, "loss": 0.028058531880378722, "step": 5240 }, { "epoch": 8.413461538461538, "grad_norm": 0.27025267481803894, "learning_rate": 5.9152979711216896e-05, "loss": 0.025007644295692445, "step": 5250 }, { "epoch": 8.429487179487179, "grad_norm": 0.657490611076355, "learning_rate": 5.914504214549044e-05, "loss": 0.025927042961120604, "step": 5260 }, { "epoch": 8.445512820512821, "grad_norm": 0.42213174700737, "learning_rate": 5.9137068098474505e-05, "loss": 0.02605021297931671, "step": 5270 }, { "epoch": 8.461538461538462, "grad_norm": 0.4589182436466217, "learning_rate": 5.91290575801503e-05, "loss": 0.02539997100830078, "step": 5280 }, { "epoch": 8.477564102564102, "grad_norm": 0.17644040286540985, "learning_rate": 5.912101060054472e-05, "loss": 0.026419514417648317, "step": 5290 }, { "epoch": 8.493589743589745, "grad_norm": 0.381026029586792, "learning_rate": 5.911292716973029e-05, "loss": 0.024948535859584807, "step": 5300 }, { "epoch": 8.509615384615385, "grad_norm": 0.5539507865905762, "learning_rate": 5.910480729782516e-05, "loss": 0.024710914492607115, "step": 5310 }, { "epoch": 8.525641025641026, "grad_norm": 0.21454286575317383, "learning_rate": 5.9096650994993106e-05, "loss": 0.027959099411964415, "step": 5320 }, { "epoch": 8.541666666666666, "grad_norm": 0.28659141063690186, "learning_rate": 5.9088458271443485e-05, "loss": 0.024021945893764496, "step": 5330 }, { "epoch": 8.557692307692308, "grad_norm": 0.18293660879135132, "learning_rate": 5.908022913743127e-05, "loss": 0.026880115270614624, "step": 5340 }, { "epoch": 8.573717948717949, "grad_norm": 0.17317962646484375, "learning_rate": 5.9071963603256965e-05, "loss": 0.024883601069450378, "step": 5350 }, { "epoch": 8.58974358974359, "grad_norm": 0.25566357374191284, "learning_rate": 5.906366167926668e-05, "loss": 0.027375391125679015, "step": 5360 }, { "epoch": 8.60576923076923, "grad_norm": 0.26826876401901245, "learning_rate": 5.905532337585206e-05, "loss": 0.025356402993202208, "step": 5370 }, { "epoch": 8.621794871794872, "grad_norm": 12.571466445922852, "learning_rate": 5.9046948703450295e-05, "loss": 0.04102444648742676, "step": 5380 }, { "epoch": 8.637820512820513, "grad_norm": 0.4006149172782898, "learning_rate": 5.903853767254406e-05, "loss": 0.02569517195224762, "step": 5390 }, { "epoch": 8.653846153846153, "grad_norm": 0.5445266962051392, "learning_rate": 5.903009029366159e-05, "loss": 0.028008899092674254, "step": 5400 }, { "epoch": 8.669871794871796, "grad_norm": 0.21055243909358978, "learning_rate": 5.9021606577376594e-05, "loss": 0.025531038641929626, "step": 5410 }, { "epoch": 8.685897435897436, "grad_norm": 0.18889878690242767, "learning_rate": 5.901308653430827e-05, "loss": 0.02987518608570099, "step": 5420 }, { "epoch": 8.701923076923077, "grad_norm": 0.1826149821281433, "learning_rate": 5.900453017512128e-05, "loss": 0.02465471774339676, "step": 5430 }, { "epoch": 8.717948717948717, "grad_norm": 0.37693342566490173, "learning_rate": 5.899593751052574e-05, "loss": 0.02535373270511627, "step": 5440 }, { "epoch": 8.73397435897436, "grad_norm": 0.2092982977628708, "learning_rate": 5.8987308551277244e-05, "loss": 0.02472548484802246, "step": 5450 }, { "epoch": 8.75, "grad_norm": 0.5622506141662598, "learning_rate": 5.8978643308176765e-05, "loss": 0.02310827523469925, "step": 5460 }, { "epoch": 8.76602564102564, "grad_norm": 0.24578887224197388, "learning_rate": 5.8969941792070726e-05, "loss": 0.023329593241214752, "step": 5470 }, { "epoch": 8.782051282051283, "grad_norm": 0.27403467893600464, "learning_rate": 5.896120401385095e-05, "loss": 0.030481630563735963, "step": 5480 }, { "epoch": 8.798076923076923, "grad_norm": 0.23275627195835114, "learning_rate": 5.895242998445464e-05, "loss": 0.026118898391723634, "step": 5490 }, { "epoch": 8.814102564102564, "grad_norm": 0.575597882270813, "learning_rate": 5.894361971486439e-05, "loss": 0.028643453121185304, "step": 5500 }, { "epoch": 8.830128205128204, "grad_norm": 0.24149474501609802, "learning_rate": 5.893477321610814e-05, "loss": 0.02397463619709015, "step": 5510 }, { "epoch": 8.846153846153847, "grad_norm": 0.2661912143230438, "learning_rate": 5.8925890499259194e-05, "loss": 0.02391762286424637, "step": 5520 }, { "epoch": 8.862179487179487, "grad_norm": 0.22941364347934723, "learning_rate": 5.891697157543617e-05, "loss": 0.026679101586341857, "step": 5530 }, { "epoch": 8.878205128205128, "grad_norm": 0.22872291505336761, "learning_rate": 5.890801645580303e-05, "loss": 0.024844254553318023, "step": 5540 }, { "epoch": 8.89423076923077, "grad_norm": 0.20917394757270813, "learning_rate": 5.889902515156903e-05, "loss": 0.024294450879096985, "step": 5550 }, { "epoch": 8.91025641025641, "grad_norm": 0.20533224940299988, "learning_rate": 5.888999767398872e-05, "loss": 0.02404298186302185, "step": 5560 }, { "epoch": 8.926282051282051, "grad_norm": 0.3118135333061218, "learning_rate": 5.888093403436193e-05, "loss": 0.034863132238388064, "step": 5570 }, { "epoch": 8.942307692307692, "grad_norm": 0.23392067849636078, "learning_rate": 5.887183424403376e-05, "loss": 0.0248977929353714, "step": 5580 }, { "epoch": 8.958333333333334, "grad_norm": 0.2115570455789566, "learning_rate": 5.886269831439454e-05, "loss": 0.027330225706100462, "step": 5590 }, { "epoch": 8.974358974358974, "grad_norm": 0.23556120693683624, "learning_rate": 5.885352625687988e-05, "loss": 0.023689250648021697, "step": 5600 }, { "epoch": 8.990384615384615, "grad_norm": 0.4116177558898926, "learning_rate": 5.8844318082970554e-05, "loss": 0.02616216242313385, "step": 5610 }, { "epoch": 9.0, "eval_accuracy_background": NaN, "eval_accuracy_crop": 0.992690972318009, "eval_iou_background": 0.0, "eval_iou_crop": 0.992690972318009, "eval_loss": 0.023758457973599434, "eval_mean_accuracy": 0.992690972318009, "eval_mean_iou": 0.4963454861590045, "eval_overall_accuracy": 0.992690972318009, "eval_runtime": 33.9925, "eval_samples_per_second": 25.917, "eval_steps_per_second": 3.265, "step": 5616 }, { "epoch": 9.006410256410257, "grad_norm": 0.3154357075691223, "learning_rate": 5.8835073804192584e-05, "loss": 0.02569846510887146, "step": 5620 }, { "epoch": 9.022435897435898, "grad_norm": 0.5667648911476135, "learning_rate": 5.8825793432117174e-05, "loss": 0.02536289095878601, "step": 5630 }, { "epoch": 9.038461538461538, "grad_norm": 0.34432125091552734, "learning_rate": 5.88164769783607e-05, "loss": 0.02407982051372528, "step": 5640 }, { "epoch": 9.054487179487179, "grad_norm": 0.21661609411239624, "learning_rate": 5.8807124454584714e-05, "loss": 0.023879535496234894, "step": 5650 }, { "epoch": 9.070512820512821, "grad_norm": 0.18095619976520538, "learning_rate": 5.8797735872495906e-05, "loss": 0.023735204339027406, "step": 5660 }, { "epoch": 9.086538461538462, "grad_norm": 0.16361525654792786, "learning_rate": 5.878831124384611e-05, "loss": 0.02385086417198181, "step": 5670 }, { "epoch": 9.102564102564102, "grad_norm": 0.22889883816242218, "learning_rate": 5.8778850580432285e-05, "loss": 0.02518101632595062, "step": 5680 }, { "epoch": 9.118589743589743, "grad_norm": 0.20400215685367584, "learning_rate": 5.8769353894096474e-05, "loss": 0.025505965948104857, "step": 5690 }, { "epoch": 9.134615384615385, "grad_norm": 0.32870182394981384, "learning_rate": 5.875982119672582e-05, "loss": 0.026022297143936158, "step": 5700 }, { "epoch": 9.150641025641026, "grad_norm": 0.2946268320083618, "learning_rate": 5.875025250025255e-05, "loss": 0.025380977988243104, "step": 5710 }, { "epoch": 9.166666666666666, "grad_norm": 0.19881872832775116, "learning_rate": 5.8740647816653955e-05, "loss": 0.027132874727249144, "step": 5720 }, { "epoch": 9.182692307692308, "grad_norm": 0.27938851714134216, "learning_rate": 5.873100715795234e-05, "loss": 0.029893067479133607, "step": 5730 }, { "epoch": 9.198717948717949, "grad_norm": 0.5062406063079834, "learning_rate": 5.8721330536215087e-05, "loss": 0.029204866290092467, "step": 5740 }, { "epoch": 9.21474358974359, "grad_norm": 0.23313701152801514, "learning_rate": 5.8711617963554555e-05, "loss": 0.025771155953407288, "step": 5750 }, { "epoch": 9.23076923076923, "grad_norm": 0.25921663641929626, "learning_rate": 5.8701869452128125e-05, "loss": 0.027237460017204285, "step": 5760 }, { "epoch": 9.246794871794872, "grad_norm": 0.2337011843919754, "learning_rate": 5.869208501413815e-05, "loss": 0.025128167867660523, "step": 5770 }, { "epoch": 9.262820512820513, "grad_norm": 0.19931243360042572, "learning_rate": 5.868226466183196e-05, "loss": 0.023750042915344237, "step": 5780 }, { "epoch": 9.278846153846153, "grad_norm": 0.7150691151618958, "learning_rate": 5.8672408407501845e-05, "loss": 0.028421562910079957, "step": 5790 }, { "epoch": 9.294871794871796, "grad_norm": 0.28985661268234253, "learning_rate": 5.866251626348502e-05, "loss": 0.025363904237747193, "step": 5800 }, { "epoch": 9.310897435897436, "grad_norm": 0.2512775957584381, "learning_rate": 5.865258824216364e-05, "loss": 0.045425206422805786, "step": 5810 }, { "epoch": 9.326923076923077, "grad_norm": 0.3033793270587921, "learning_rate": 5.864262435596474e-05, "loss": 0.024039334058761595, "step": 5820 }, { "epoch": 9.342948717948717, "grad_norm": 0.6957650184631348, "learning_rate": 5.863262461736029e-05, "loss": 0.024567613005638124, "step": 5830 }, { "epoch": 9.35897435897436, "grad_norm": 0.39600321650505066, "learning_rate": 5.86225890388671e-05, "loss": 0.022677524387836455, "step": 5840 }, { "epoch": 9.375, "grad_norm": 0.6653656363487244, "learning_rate": 5.861251763304686e-05, "loss": 0.025595828890800476, "step": 5850 }, { "epoch": 9.39102564102564, "grad_norm": 0.33803999423980713, "learning_rate": 5.8602410412506096e-05, "loss": 0.029203328490257262, "step": 5860 }, { "epoch": 9.407051282051283, "grad_norm": 0.43624773621559143, "learning_rate": 5.859226738989617e-05, "loss": 0.023597723245620726, "step": 5870 }, { "epoch": 9.423076923076923, "grad_norm": 0.26174575090408325, "learning_rate": 5.858208857791327e-05, "loss": 0.024391451478004457, "step": 5880 }, { "epoch": 9.439102564102564, "grad_norm": 0.3311861753463745, "learning_rate": 5.857187398929835e-05, "loss": 0.025226971507072447, "step": 5890 }, { "epoch": 9.455128205128204, "grad_norm": 0.23434340953826904, "learning_rate": 5.8561623636837176e-05, "loss": 0.026625922322273253, "step": 5900 }, { "epoch": 9.471153846153847, "grad_norm": 0.4138757586479187, "learning_rate": 5.8551337533360266e-05, "loss": 0.025074005126953125, "step": 5910 }, { "epoch": 9.487179487179487, "grad_norm": 0.2533166706562042, "learning_rate": 5.854101569174289e-05, "loss": 0.02433285564184189, "step": 5920 }, { "epoch": 9.503205128205128, "grad_norm": 0.3655529022216797, "learning_rate": 5.853065812490505e-05, "loss": 0.029999709129333495, "step": 5930 }, { "epoch": 9.51923076923077, "grad_norm": 0.18512403964996338, "learning_rate": 5.8520264845811476e-05, "loss": 0.02451878786087036, "step": 5940 }, { "epoch": 9.53525641025641, "grad_norm": 0.21025902032852173, "learning_rate": 5.8509835867471584e-05, "loss": 0.023945842683315278, "step": 5950 }, { "epoch": 9.551282051282051, "grad_norm": 0.23505742847919464, "learning_rate": 5.849937120293949e-05, "loss": 0.024394667148590087, "step": 5960 }, { "epoch": 9.567307692307692, "grad_norm": 0.2023450881242752, "learning_rate": 5.848887086531398e-05, "loss": 0.024341902136802672, "step": 5970 }, { "epoch": 9.583333333333334, "grad_norm": 0.35227471590042114, "learning_rate": 5.847833486773847e-05, "loss": 0.026183578372001647, "step": 5980 }, { "epoch": 9.599358974358974, "grad_norm": 0.36325305700302124, "learning_rate": 5.846776322340103e-05, "loss": 0.0263815313577652, "step": 5990 }, { "epoch": 9.615384615384615, "grad_norm": 0.22818878293037415, "learning_rate": 5.845715594553436e-05, "loss": 0.024744199216365816, "step": 6000 }, { "epoch": 9.631410256410255, "grad_norm": 0.21164748072624207, "learning_rate": 5.844651304741573e-05, "loss": 0.02455538362264633, "step": 6010 }, { "epoch": 9.647435897435898, "grad_norm": 0.5422484278678894, "learning_rate": 5.8435834542367034e-05, "loss": 0.026634138822555543, "step": 6020 }, { "epoch": 9.663461538461538, "grad_norm": 0.250291645526886, "learning_rate": 5.842512044375471e-05, "loss": 0.024287526309490205, "step": 6030 }, { "epoch": 9.679487179487179, "grad_norm": 0.1704210489988327, "learning_rate": 5.8414370764989755e-05, "loss": 0.023805882036685943, "step": 6040 }, { "epoch": 9.695512820512821, "grad_norm": 0.5161727070808411, "learning_rate": 5.8403585519527716e-05, "loss": 0.024588654935359954, "step": 6050 }, { "epoch": 9.711538461538462, "grad_norm": 0.5272760391235352, "learning_rate": 5.839276472086862e-05, "loss": 0.02483978420495987, "step": 6060 }, { "epoch": 9.727564102564102, "grad_norm": 0.10853446274995804, "learning_rate": 5.838190838255706e-05, "loss": 0.023239608108997344, "step": 6070 }, { "epoch": 9.743589743589745, "grad_norm": 0.19157157838344574, "learning_rate": 5.8371016518182066e-05, "loss": 0.024081920087337495, "step": 6080 }, { "epoch": 9.759615384615385, "grad_norm": 0.20798924565315247, "learning_rate": 5.836008914137713e-05, "loss": 0.02249673902988434, "step": 6090 }, { "epoch": 9.775641025641026, "grad_norm": 0.2681480646133423, "learning_rate": 5.834912626582024e-05, "loss": 0.022824807465076445, "step": 6100 }, { "epoch": 9.791666666666666, "grad_norm": 0.3176594078540802, "learning_rate": 5.833812790523378e-05, "loss": 0.022766375541687013, "step": 6110 }, { "epoch": 9.807692307692308, "grad_norm": 0.19717781245708466, "learning_rate": 5.8327094073384566e-05, "loss": 0.024657608568668367, "step": 6120 }, { "epoch": 9.823717948717949, "grad_norm": 0.271308034658432, "learning_rate": 5.831602478408381e-05, "loss": 0.023273898661136626, "step": 6130 }, { "epoch": 9.83974358974359, "grad_norm": 0.21575945615768433, "learning_rate": 5.8304920051187115e-05, "loss": 0.02429761290550232, "step": 6140 }, { "epoch": 9.85576923076923, "grad_norm": 0.26173242926597595, "learning_rate": 5.829377988859444e-05, "loss": 0.0221561923623085, "step": 6150 }, { "epoch": 9.871794871794872, "grad_norm": 0.31573083996772766, "learning_rate": 5.828260431025009e-05, "loss": 0.02833632230758667, "step": 6160 }, { "epoch": 9.887820512820513, "grad_norm": 0.33075302839279175, "learning_rate": 5.8271393330142706e-05, "loss": 0.026998895406723022, "step": 6170 }, { "epoch": 9.903846153846153, "grad_norm": 0.26673537492752075, "learning_rate": 5.8260146962305255e-05, "loss": 0.022507129609584807, "step": 6180 }, { "epoch": 9.919871794871796, "grad_norm": 0.26745378971099854, "learning_rate": 5.824886522081497e-05, "loss": 0.026754948496818542, "step": 6190 }, { "epoch": 9.935897435897436, "grad_norm": 0.3427461087703705, "learning_rate": 5.823754811979338e-05, "loss": 0.024654503166675567, "step": 6200 }, { "epoch": 9.951923076923077, "grad_norm": 0.314081609249115, "learning_rate": 5.822619567340628e-05, "loss": 0.02442961633205414, "step": 6210 }, { "epoch": 9.967948717948717, "grad_norm": 0.20269328355789185, "learning_rate": 5.821480789586369e-05, "loss": 0.026284974813461304, "step": 6220 }, { "epoch": 9.98397435897436, "grad_norm": 0.4023321568965912, "learning_rate": 5.820338480141985e-05, "loss": 0.024913176894187927, "step": 6230 }, { "epoch": 10.0, "grad_norm": 0.252915620803833, "learning_rate": 5.8191926404373263e-05, "loss": 0.0262202650308609, "step": 6240 }, { "epoch": 10.0, "eval_accuracy_background": NaN, "eval_accuracy_crop": 0.9917749635444298, "eval_iou_background": 0.0, "eval_iou_crop": 0.9917749635444298, "eval_loss": 0.023006461560726166, "eval_mean_accuracy": 0.9917749635444298, "eval_mean_iou": 0.4958874817722149, "eval_overall_accuracy": 0.9917749635444298, "eval_runtime": 37.4568, "eval_samples_per_second": 23.52, "eval_steps_per_second": 2.963, "step": 6240 }, { "epoch": 10.01602564102564, "grad_norm": 0.24697117507457733, "learning_rate": 5.818043271906653e-05, "loss": 0.025729253888130188, "step": 6250 }, { "epoch": 10.032051282051283, "grad_norm": 0.19837559759616852, "learning_rate": 5.816890375988649e-05, "loss": 0.025465086102485657, "step": 6260 }, { "epoch": 10.048076923076923, "grad_norm": 0.1660050004720688, "learning_rate": 5.815733954126412e-05, "loss": 0.020888029038906096, "step": 6270 }, { "epoch": 10.064102564102564, "grad_norm": 0.2628149688243866, "learning_rate": 5.814574007767453e-05, "loss": 0.028293904662132264, "step": 6280 }, { "epoch": 10.080128205128204, "grad_norm": 0.34460216760635376, "learning_rate": 5.8134105383636926e-05, "loss": 0.023266653716564178, "step": 6290 }, { "epoch": 10.096153846153847, "grad_norm": 0.17874225974082947, "learning_rate": 5.812243547371464e-05, "loss": 0.02275252640247345, "step": 6300 }, { "epoch": 10.112179487179487, "grad_norm": 0.27956679463386536, "learning_rate": 5.8110730362515086e-05, "loss": 0.024269570410251618, "step": 6310 }, { "epoch": 10.128205128205128, "grad_norm": 0.2500707507133484, "learning_rate": 5.809899006468971e-05, "loss": 0.024498288333415986, "step": 6320 }, { "epoch": 10.14423076923077, "grad_norm": 0.39112788438796997, "learning_rate": 5.808721459493403e-05, "loss": 0.03028434216976166, "step": 6330 }, { "epoch": 10.16025641025641, "grad_norm": 0.39575716853141785, "learning_rate": 5.8075403967987576e-05, "loss": 0.024161356687545776, "step": 6340 }, { "epoch": 10.176282051282051, "grad_norm": 0.35391491651535034, "learning_rate": 5.8063558198633884e-05, "loss": 0.024981695413589477, "step": 6350 }, { "epoch": 10.192307692307692, "grad_norm": 0.22334441542625427, "learning_rate": 5.8051677301700494e-05, "loss": 0.026174888014793396, "step": 6360 }, { "epoch": 10.208333333333334, "grad_norm": 0.32163724303245544, "learning_rate": 5.80397612920589e-05, "loss": 0.02611781358718872, "step": 6370 }, { "epoch": 10.224358974358974, "grad_norm": 0.2528025805950165, "learning_rate": 5.802781018462455e-05, "loss": 0.02261197417974472, "step": 6380 }, { "epoch": 10.240384615384615, "grad_norm": 0.1928073912858963, "learning_rate": 5.801582399435682e-05, "loss": 0.024229426681995393, "step": 6390 }, { "epoch": 10.256410256410255, "grad_norm": 0.4639890193939209, "learning_rate": 5.8003802736259016e-05, "loss": 0.028667190670967103, "step": 6400 }, { "epoch": 10.272435897435898, "grad_norm": 0.1966828852891922, "learning_rate": 5.7991746425378315e-05, "loss": 0.02509651184082031, "step": 6410 }, { "epoch": 10.288461538461538, "grad_norm": 0.8196776509284973, "learning_rate": 5.79796550768058e-05, "loss": 0.024657915532588958, "step": 6420 }, { "epoch": 10.304487179487179, "grad_norm": 0.17653292417526245, "learning_rate": 5.7967528705676376e-05, "loss": 0.026325783133506774, "step": 6430 }, { "epoch": 10.320512820512821, "grad_norm": 0.3170497715473175, "learning_rate": 5.795536732716882e-05, "loss": 0.025801682472229005, "step": 6440 }, { "epoch": 10.336538461538462, "grad_norm": 0.21487988531589508, "learning_rate": 5.7943170956505704e-05, "loss": 0.0252302885055542, "step": 6450 }, { "epoch": 10.352564102564102, "grad_norm": 0.22413036227226257, "learning_rate": 5.793093960895342e-05, "loss": 0.02291768342256546, "step": 6460 }, { "epoch": 10.368589743589745, "grad_norm": 0.16918323934078217, "learning_rate": 5.791867329982213e-05, "loss": 0.024555404484272004, "step": 6470 }, { "epoch": 10.384615384615385, "grad_norm": 15.663335800170898, "learning_rate": 5.7906372044465735e-05, "loss": 0.02946358621120453, "step": 6480 }, { "epoch": 10.400641025641026, "grad_norm": 0.25425177812576294, "learning_rate": 5.789403585828193e-05, "loss": 0.02648257613182068, "step": 6490 }, { "epoch": 10.416666666666666, "grad_norm": 0.6991823315620422, "learning_rate": 5.788166475671209e-05, "loss": 0.02491062581539154, "step": 6500 }, { "epoch": 10.432692307692308, "grad_norm": 0.3954724073410034, "learning_rate": 5.786925875524132e-05, "loss": 0.024881213903427124, "step": 6510 }, { "epoch": 10.448717948717949, "grad_norm": 0.4266594648361206, "learning_rate": 5.785681786939839e-05, "loss": 0.025385937094688414, "step": 6520 }, { "epoch": 10.46474358974359, "grad_norm": 0.19374321401119232, "learning_rate": 5.784434211475575e-05, "loss": 0.02372932732105255, "step": 6530 }, { "epoch": 10.48076923076923, "grad_norm": 0.19281005859375, "learning_rate": 5.783183150692949e-05, "loss": 0.04718855917453766, "step": 6540 }, { "epoch": 10.496794871794872, "grad_norm": 0.33889949321746826, "learning_rate": 5.781928606157933e-05, "loss": 0.02586294412612915, "step": 6550 }, { "epoch": 10.512820512820513, "grad_norm": 0.20998618006706238, "learning_rate": 5.780670579440859e-05, "loss": 0.024337585270404815, "step": 6560 }, { "epoch": 10.528846153846153, "grad_norm": 0.18389397859573364, "learning_rate": 5.779409072116417e-05, "loss": 0.03183537423610687, "step": 6570 }, { "epoch": 10.544871794871796, "grad_norm": 0.2511543035507202, "learning_rate": 5.7781440857636565e-05, "loss": 0.024523010849952696, "step": 6580 }, { "epoch": 10.560897435897436, "grad_norm": 0.4140518009662628, "learning_rate": 5.7768756219659795e-05, "loss": 0.026750874519348145, "step": 6590 }, { "epoch": 10.576923076923077, "grad_norm": 0.3706105649471283, "learning_rate": 5.77560368231114e-05, "loss": 0.02547418177127838, "step": 6600 }, { "epoch": 10.592948717948717, "grad_norm": 0.2744928300380707, "learning_rate": 5.7743282683912464e-05, "loss": 0.025032749772071837, "step": 6610 }, { "epoch": 10.60897435897436, "grad_norm": 0.3092079758644104, "learning_rate": 5.773049381802752e-05, "loss": 0.023941215872764588, "step": 6620 }, { "epoch": 10.625, "grad_norm": 0.17212699353694916, "learning_rate": 5.771767024146458e-05, "loss": 0.023408274352550506, "step": 6630 }, { "epoch": 10.64102564102564, "grad_norm": 0.42189452052116394, "learning_rate": 5.770481197027512e-05, "loss": 0.02556930184364319, "step": 6640 }, { "epoch": 10.657051282051283, "grad_norm": 0.22571487724781036, "learning_rate": 5.769191902055404e-05, "loss": 0.023086170852184295, "step": 6650 }, { "epoch": 10.673076923076923, "grad_norm": 0.22101342678070068, "learning_rate": 5.7678991408439616e-05, "loss": 0.022582973539829253, "step": 6660 }, { "epoch": 10.689102564102564, "grad_norm": 0.1457151621580124, "learning_rate": 5.7666029150113557e-05, "loss": 0.022576460242271425, "step": 6670 }, { "epoch": 10.705128205128204, "grad_norm": 0.3234744071960449, "learning_rate": 5.76530322618009e-05, "loss": 0.02436981350183487, "step": 6680 }, { "epoch": 10.721153846153847, "grad_norm": 0.3178976774215698, "learning_rate": 5.7640000759770065e-05, "loss": 0.02454821616411209, "step": 6690 }, { "epoch": 10.737179487179487, "grad_norm": 0.30942973494529724, "learning_rate": 5.7626934660332766e-05, "loss": 0.024010573327541352, "step": 6700 }, { "epoch": 10.753205128205128, "grad_norm": 0.22055171430110931, "learning_rate": 5.7613833979844045e-05, "loss": 0.023141926527023314, "step": 6710 }, { "epoch": 10.76923076923077, "grad_norm": 0.18640023469924927, "learning_rate": 5.76006987347022e-05, "loss": 0.027003088593482973, "step": 6720 }, { "epoch": 10.78525641025641, "grad_norm": 0.19083808362483978, "learning_rate": 5.7587528941348847e-05, "loss": 0.023353934288024902, "step": 6730 }, { "epoch": 10.801282051282051, "grad_norm": 0.2689061164855957, "learning_rate": 5.757432461626881e-05, "loss": 0.02515825629234314, "step": 6740 }, { "epoch": 10.817307692307692, "grad_norm": 0.1078193411231041, "learning_rate": 5.756108577599012e-05, "loss": 0.022435113787651062, "step": 6750 }, { "epoch": 10.833333333333334, "grad_norm": 0.20451974868774414, "learning_rate": 5.7547812437084055e-05, "loss": 0.02182963490486145, "step": 6760 }, { "epoch": 10.849358974358974, "grad_norm": 0.18874545395374298, "learning_rate": 5.753450461616504e-05, "loss": 0.02445448637008667, "step": 6770 }, { "epoch": 10.865384615384615, "grad_norm": 0.3782961070537567, "learning_rate": 5.752116232989069e-05, "loss": 0.02304581105709076, "step": 6780 }, { "epoch": 10.881410256410255, "grad_norm": 0.31984710693359375, "learning_rate": 5.7507785594961727e-05, "loss": 0.0260525107383728, "step": 6790 }, { "epoch": 10.897435897435898, "grad_norm": 0.19468647241592407, "learning_rate": 5.7494374428122034e-05, "loss": 0.023088333010673524, "step": 6800 }, { "epoch": 10.913461538461538, "grad_norm": 0.22237202525138855, "learning_rate": 5.748092884615855e-05, "loss": 0.02316211611032486, "step": 6810 }, { "epoch": 10.929487179487179, "grad_norm": 0.22920674085617065, "learning_rate": 5.746744886590133e-05, "loss": 0.02307436168193817, "step": 6820 }, { "epoch": 10.945512820512821, "grad_norm": 0.17744478583335876, "learning_rate": 5.745393450422346e-05, "loss": 0.02044380009174347, "step": 6830 }, { "epoch": 10.961538461538462, "grad_norm": 0.19726891815662384, "learning_rate": 5.7440385778041076e-05, "loss": 0.02040384113788605, "step": 6840 }, { "epoch": 10.977564102564102, "grad_norm": 0.24899417161941528, "learning_rate": 5.742680270431331e-05, "loss": 0.022724665701389313, "step": 6850 }, { "epoch": 10.993589743589745, "grad_norm": 0.26402953267097473, "learning_rate": 5.741318530004232e-05, "loss": 0.030551698803901673, "step": 6860 }, { "epoch": 11.0, "eval_accuracy_background": NaN, "eval_accuracy_crop": 0.9930281918984019, "eval_iou_background": 0.0, "eval_iou_crop": 0.9930281918984019, "eval_loss": 0.022466003894805908, "eval_mean_accuracy": 0.9930281918984019, "eval_mean_iou": 0.49651409594920093, "eval_overall_accuracy": 0.9930281918984019, "eval_runtime": 34.6702, "eval_samples_per_second": 25.411, "eval_steps_per_second": 3.202, "step": 6864 }, { "epoch": 11.009615384615385, "grad_norm": 0.22378244996070862, "learning_rate": 5.7399533582273205e-05, "loss": 0.021225209534168243, "step": 6870 }, { "epoch": 11.025641025641026, "grad_norm": 0.2709796130657196, "learning_rate": 5.7385847568094024e-05, "loss": 0.02372860014438629, "step": 6880 }, { "epoch": 11.041666666666666, "grad_norm": 0.2245536744594574, "learning_rate": 5.7372127274635776e-05, "loss": 0.023971401154994965, "step": 6890 }, { "epoch": 11.057692307692308, "grad_norm": 0.16957949101924896, "learning_rate": 5.735837271907235e-05, "loss": 0.023832978308200838, "step": 6900 }, { "epoch": 11.073717948717949, "grad_norm": 0.34410718083381653, "learning_rate": 5.7344583918620553e-05, "loss": 0.022485899925231933, "step": 6910 }, { "epoch": 11.08974358974359, "grad_norm": 0.1762443333864212, "learning_rate": 5.733076089054e-05, "loss": 0.02139884829521179, "step": 6920 }, { "epoch": 11.10576923076923, "grad_norm": 0.501804530620575, "learning_rate": 5.7316903652133216e-05, "loss": 0.022057648003101348, "step": 6930 }, { "epoch": 11.121794871794872, "grad_norm": 0.2498379796743393, "learning_rate": 5.7303012220745496e-05, "loss": 0.024819315969944, "step": 6940 }, { "epoch": 11.137820512820513, "grad_norm": 0.14824801683425903, "learning_rate": 5.728908661376496e-05, "loss": 0.021654021739959717, "step": 6950 }, { "epoch": 11.153846153846153, "grad_norm": 0.21479669213294983, "learning_rate": 5.7275126848622504e-05, "loss": 0.022424818575382234, "step": 6960 }, { "epoch": 11.169871794871796, "grad_norm": 0.2845214903354645, "learning_rate": 5.7261132942791764e-05, "loss": 0.024652427434921263, "step": 6970 }, { "epoch": 11.185897435897436, "grad_norm": 0.11548485606908798, "learning_rate": 5.724710491378914e-05, "loss": 0.025227442383766174, "step": 6980 }, { "epoch": 11.201923076923077, "grad_norm": 0.7020145654678345, "learning_rate": 5.723304277917373e-05, "loss": 0.024792400002479554, "step": 6990 }, { "epoch": 11.217948717948717, "grad_norm": 0.1832510530948639, "learning_rate": 5.7218946556547295e-05, "loss": 0.024586071074008942, "step": 7000 }, { "epoch": 11.23397435897436, "grad_norm": 0.17615912854671478, "learning_rate": 5.7204816263554324e-05, "loss": 0.023746407032012938, "step": 7010 }, { "epoch": 11.25, "grad_norm": 0.37862882018089294, "learning_rate": 5.719065191788189e-05, "loss": 0.023206613957881927, "step": 7020 }, { "epoch": 11.26602564102564, "grad_norm": 0.3130001127719879, "learning_rate": 5.7176453537259726e-05, "loss": 0.021793121099472047, "step": 7030 }, { "epoch": 11.282051282051283, "grad_norm": 0.256571501493454, "learning_rate": 5.716222113946017e-05, "loss": 0.023728683590888977, "step": 7040 }, { "epoch": 11.298076923076923, "grad_norm": 0.35454633831977844, "learning_rate": 5.714795474229812e-05, "loss": 0.022981354594230653, "step": 7050 }, { "epoch": 11.314102564102564, "grad_norm": 0.18379302322864532, "learning_rate": 5.713365436363105e-05, "loss": 0.030341777205467223, "step": 7060 }, { "epoch": 11.330128205128204, "grad_norm": 0.5258625149726868, "learning_rate": 5.711932002135894e-05, "loss": 0.02374261021614075, "step": 7070 }, { "epoch": 11.346153846153847, "grad_norm": 1.599994421005249, "learning_rate": 5.710495173342432e-05, "loss": 0.026943716406822204, "step": 7080 }, { "epoch": 11.362179487179487, "grad_norm": 0.18188756704330444, "learning_rate": 5.7090549517812185e-05, "loss": 0.026072314381599425, "step": 7090 }, { "epoch": 11.378205128205128, "grad_norm": 0.202920064330101, "learning_rate": 5.707611339255001e-05, "loss": 0.02180603742599487, "step": 7100 }, { "epoch": 11.39423076923077, "grad_norm": 0.21981100738048553, "learning_rate": 5.7061643375707705e-05, "loss": 0.0228914275765419, "step": 7110 }, { "epoch": 11.41025641025641, "grad_norm": 0.2820292115211487, "learning_rate": 5.704713948539763e-05, "loss": 0.02497408539056778, "step": 7120 }, { "epoch": 11.426282051282051, "grad_norm": 0.3420892655849457, "learning_rate": 5.7032601739774496e-05, "loss": 0.024737267196178435, "step": 7130 }, { "epoch": 11.442307692307692, "grad_norm": 0.9363774061203003, "learning_rate": 5.701803015703544e-05, "loss": 0.026595598459243773, "step": 7140 }, { "epoch": 11.458333333333334, "grad_norm": 0.31774789094924927, "learning_rate": 5.7003424755419906e-05, "loss": 0.02493547648191452, "step": 7150 }, { "epoch": 11.474358974358974, "grad_norm": 0.2628777027130127, "learning_rate": 5.6988785553209725e-05, "loss": 0.023538801074028014, "step": 7160 }, { "epoch": 11.490384615384615, "grad_norm": 0.288833349943161, "learning_rate": 5.697411256872899e-05, "loss": 0.02390698790550232, "step": 7170 }, { "epoch": 11.506410256410255, "grad_norm": 0.36247196793556213, "learning_rate": 5.695940582034411e-05, "loss": 0.02228284180164337, "step": 7180 }, { "epoch": 11.522435897435898, "grad_norm": 0.3087059259414673, "learning_rate": 5.6944665326463725e-05, "loss": 0.025547000765800475, "step": 7190 }, { "epoch": 11.538461538461538, "grad_norm": 0.18214291334152222, "learning_rate": 5.692989110553876e-05, "loss": 0.023215417563915253, "step": 7200 }, { "epoch": 11.554487179487179, "grad_norm": 0.28081730008125305, "learning_rate": 5.69150831760623e-05, "loss": 0.028072601556777953, "step": 7210 }, { "epoch": 11.570512820512821, "grad_norm": 0.3493572473526001, "learning_rate": 5.6900241556569676e-05, "loss": 0.02590978741645813, "step": 7220 }, { "epoch": 11.586538461538462, "grad_norm": 0.5134046077728271, "learning_rate": 5.688536626563836e-05, "loss": 0.023683522641658784, "step": 7230 }, { "epoch": 11.602564102564102, "grad_norm": 0.29912832379341125, "learning_rate": 5.6870457321887976e-05, "loss": 0.02623124122619629, "step": 7240 }, { "epoch": 11.618589743589745, "grad_norm": 0.6473428010940552, "learning_rate": 5.6855514743980286e-05, "loss": 0.02255244106054306, "step": 7250 }, { "epoch": 11.634615384615385, "grad_norm": 0.3506573438644409, "learning_rate": 5.684053855061913e-05, "loss": 0.023007482290267944, "step": 7260 }, { "epoch": 11.650641025641026, "grad_norm": 0.24525442719459534, "learning_rate": 5.6825528760550426e-05, "loss": 0.02430581897497177, "step": 7270 }, { "epoch": 11.666666666666666, "grad_norm": 0.16928890347480774, "learning_rate": 5.6810485392562176e-05, "loss": 0.023710472881793974, "step": 7280 }, { "epoch": 11.682692307692308, "grad_norm": 0.31393665075302124, "learning_rate": 5.6795408465484385e-05, "loss": 0.022671084105968475, "step": 7290 }, { "epoch": 11.698717948717949, "grad_norm": 0.28465670347213745, "learning_rate": 5.6780297998189054e-05, "loss": 0.026067394018173217, "step": 7300 }, { "epoch": 11.71474358974359, "grad_norm": 0.7650846242904663, "learning_rate": 5.6765154009590206e-05, "loss": 0.023998431861400604, "step": 7310 }, { "epoch": 11.73076923076923, "grad_norm": 0.21619459986686707, "learning_rate": 5.6749976518643794e-05, "loss": 0.024586448073387147, "step": 7320 }, { "epoch": 11.746794871794872, "grad_norm": 0.1897728443145752, "learning_rate": 5.67347655443477e-05, "loss": 0.02236407846212387, "step": 7330 }, { "epoch": 11.762820512820513, "grad_norm": 0.29398226737976074, "learning_rate": 5.671952110574174e-05, "loss": 0.02224285900592804, "step": 7340 }, { "epoch": 11.778846153846153, "grad_norm": 0.22011564671993256, "learning_rate": 5.6704243221907605e-05, "loss": 0.03061325252056122, "step": 7350 }, { "epoch": 11.794871794871796, "grad_norm": 0.28054869174957275, "learning_rate": 5.668893191196886e-05, "loss": 0.023408325016498567, "step": 7360 }, { "epoch": 11.810897435897436, "grad_norm": 0.5754128694534302, "learning_rate": 5.66735871950909e-05, "loss": 0.02518223226070404, "step": 7370 }, { "epoch": 11.826923076923077, "grad_norm": 0.45335614681243896, "learning_rate": 5.6658209090480924e-05, "loss": 0.021876871585845947, "step": 7380 }, { "epoch": 11.842948717948717, "grad_norm": 0.34884557127952576, "learning_rate": 5.664279761738796e-05, "loss": 0.028133094310760498, "step": 7390 }, { "epoch": 11.85897435897436, "grad_norm": 0.2080397754907608, "learning_rate": 5.6627352795102775e-05, "loss": 0.025630709528923035, "step": 7400 }, { "epoch": 11.875, "grad_norm": 0.143293097615242, "learning_rate": 5.6611874642957883e-05, "loss": 0.022189928591251372, "step": 7410 }, { "epoch": 11.89102564102564, "grad_norm": 0.12497073411941528, "learning_rate": 5.659636318032751e-05, "loss": 0.023402199149131775, "step": 7420 }, { "epoch": 11.907051282051283, "grad_norm": 0.1870635747909546, "learning_rate": 5.6580818426627613e-05, "loss": 0.02330264449119568, "step": 7430 }, { "epoch": 11.923076923076923, "grad_norm": 0.25336262583732605, "learning_rate": 5.6565240401315775e-05, "loss": 0.022573448717594147, "step": 7440 }, { "epoch": 11.939102564102564, "grad_norm": 0.18821917474269867, "learning_rate": 5.654962912389126e-05, "loss": 0.022656744718551634, "step": 7450 }, { "epoch": 11.955128205128204, "grad_norm": 0.2991713583469391, "learning_rate": 5.653398461389493e-05, "loss": 0.020939917862415315, "step": 7460 }, { "epoch": 11.971153846153847, "grad_norm": 0.12268902361392975, "learning_rate": 5.651830689090924e-05, "loss": 0.021028047800064086, "step": 7470 }, { "epoch": 11.987179487179487, "grad_norm": 0.2567523717880249, "learning_rate": 5.6502595974558274e-05, "loss": 0.02234974503517151, "step": 7480 }, { "epoch": 12.0, "eval_accuracy_background": NaN, "eval_accuracy_crop": 0.992129391229195, "eval_iou_background": 0.0, "eval_iou_crop": 0.992129391229195, "eval_loss": 0.022144688293337822, "eval_mean_accuracy": 0.992129391229195, "eval_mean_iou": 0.4960646956145975, "eval_overall_accuracy": 0.992129391229195, "eval_runtime": 36.8808, "eval_samples_per_second": 23.888, "eval_steps_per_second": 3.01, "step": 7488 }, { "epoch": 12.003205128205128, "grad_norm": 0.2754271328449249, "learning_rate": 5.648685188450758e-05, "loss": 0.024292236566543578, "step": 7490 }, { "epoch": 12.01923076923077, "grad_norm": 0.33935320377349854, "learning_rate": 5.64710746404643e-05, "loss": 0.023631545901298522, "step": 7500 }, { "epoch": 12.03525641025641, "grad_norm": 0.3824402987957001, "learning_rate": 5.645526426217704e-05, "loss": 0.022230637073516846, "step": 7510 }, { "epoch": 12.051282051282051, "grad_norm": 0.25040367245674133, "learning_rate": 5.643942076943589e-05, "loss": 0.025506353378295897, "step": 7520 }, { "epoch": 12.067307692307692, "grad_norm": 0.27498236298561096, "learning_rate": 5.642354418207239e-05, "loss": 0.02268039286136627, "step": 7530 }, { "epoch": 12.083333333333334, "grad_norm": 0.34720364212989807, "learning_rate": 5.64076345199595e-05, "loss": 0.022122476994991303, "step": 7540 }, { "epoch": 12.099358974358974, "grad_norm": 0.3569972515106201, "learning_rate": 5.639169180301159e-05, "loss": 0.0204841285943985, "step": 7550 }, { "epoch": 12.115384615384615, "grad_norm": 0.21471069753170013, "learning_rate": 5.637571605118439e-05, "loss": 0.0212025985121727, "step": 7560 }, { "epoch": 12.131410256410257, "grad_norm": 0.29891932010650635, "learning_rate": 5.635970728447501e-05, "loss": 0.02276373505592346, "step": 7570 }, { "epoch": 12.147435897435898, "grad_norm": 0.13842028379440308, "learning_rate": 5.634366552292184e-05, "loss": 0.02245960831642151, "step": 7580 }, { "epoch": 12.163461538461538, "grad_norm": 0.2797607183456421, "learning_rate": 5.632759078660461e-05, "loss": 0.023540152609348296, "step": 7590 }, { "epoch": 12.179487179487179, "grad_norm": 0.17318904399871826, "learning_rate": 5.6311483095644316e-05, "loss": 0.022879859805107115, "step": 7600 }, { "epoch": 12.195512820512821, "grad_norm": 0.18579624593257904, "learning_rate": 5.629534247020318e-05, "loss": 0.023134465515613555, "step": 7610 }, { "epoch": 12.211538461538462, "grad_norm": 0.18860526382923126, "learning_rate": 5.627916893048468e-05, "loss": 0.023387345671653747, "step": 7620 }, { "epoch": 12.227564102564102, "grad_norm": 0.21387527883052826, "learning_rate": 5.6262962496733476e-05, "loss": 0.022887122631073, "step": 7630 }, { "epoch": 12.243589743589743, "grad_norm": 0.2140635848045349, "learning_rate": 5.624672318923541e-05, "loss": 0.04748299419879913, "step": 7640 }, { "epoch": 12.259615384615385, "grad_norm": 0.5667896866798401, "learning_rate": 5.623045102831746e-05, "loss": 0.02458832263946533, "step": 7650 }, { "epoch": 12.275641025641026, "grad_norm": 0.1424189805984497, "learning_rate": 5.621414603434774e-05, "loss": 0.023077699542045593, "step": 7660 }, { "epoch": 12.291666666666666, "grad_norm": 0.16174477338790894, "learning_rate": 5.619780822773547e-05, "loss": 0.022830621898174287, "step": 7670 }, { "epoch": 12.307692307692308, "grad_norm": 0.20990987122058868, "learning_rate": 5.61814376289309e-05, "loss": 0.021859164535999297, "step": 7680 }, { "epoch": 12.323717948717949, "grad_norm": 0.22336669266223907, "learning_rate": 5.616503425842538e-05, "loss": 0.02351408451795578, "step": 7690 }, { "epoch": 12.33974358974359, "grad_norm": 0.2330232560634613, "learning_rate": 5.614859813675125e-05, "loss": 0.022451052069664003, "step": 7700 }, { "epoch": 12.35576923076923, "grad_norm": 0.26374495029449463, "learning_rate": 5.613212928448185e-05, "loss": 0.02116663455963135, "step": 7710 }, { "epoch": 12.371794871794872, "grad_norm": 0.2599586248397827, "learning_rate": 5.6115627722231495e-05, "loss": 0.025863853096961976, "step": 7720 }, { "epoch": 12.387820512820513, "grad_norm": 0.14227363467216492, "learning_rate": 5.609909347065544e-05, "loss": 0.02222270667552948, "step": 7730 }, { "epoch": 12.403846153846153, "grad_norm": 0.13823480904102325, "learning_rate": 5.608252655044985e-05, "loss": 0.0206469863653183, "step": 7740 }, { "epoch": 12.419871794871796, "grad_norm": 0.301943838596344, "learning_rate": 5.6065926982351786e-05, "loss": 0.02126401662826538, "step": 7750 }, { "epoch": 12.435897435897436, "grad_norm": 0.5790300965309143, "learning_rate": 5.6049294787139196e-05, "loss": 0.026076066493988036, "step": 7760 }, { "epoch": 12.451923076923077, "grad_norm": 0.1461036205291748, "learning_rate": 5.603262998563084e-05, "loss": 0.022848835587501524, "step": 7770 }, { "epoch": 12.467948717948717, "grad_norm": 0.2628397047519684, "learning_rate": 5.6015932598686297e-05, "loss": 0.023337842524051668, "step": 7780 }, { "epoch": 12.48397435897436, "grad_norm": 0.9180279970169067, "learning_rate": 5.599920264720595e-05, "loss": 0.023993854224681855, "step": 7790 }, { "epoch": 12.5, "grad_norm": 0.4699363112449646, "learning_rate": 5.598244015213092e-05, "loss": 0.028840336203575134, "step": 7800 }, { "epoch": 12.51602564102564, "grad_norm": 0.19016028940677643, "learning_rate": 5.5965645134443077e-05, "loss": 0.02419106662273407, "step": 7810 }, { "epoch": 12.532051282051283, "grad_norm": 0.2766498029232025, "learning_rate": 5.594881761516501e-05, "loss": 0.02264803946018219, "step": 7820 }, { "epoch": 12.548076923076923, "grad_norm": 0.2910785675048828, "learning_rate": 5.593195761535997e-05, "loss": 0.02255287766456604, "step": 7830 }, { "epoch": 12.564102564102564, "grad_norm": 0.3750319182872772, "learning_rate": 5.591506515613187e-05, "loss": 0.02334919720888138, "step": 7840 }, { "epoch": 12.580128205128204, "grad_norm": 0.30327731370925903, "learning_rate": 5.589814025862526e-05, "loss": 0.024473896622657774, "step": 7850 }, { "epoch": 12.596153846153847, "grad_norm": 0.29774945974349976, "learning_rate": 5.5881182944025294e-05, "loss": 0.03147016167640686, "step": 7860 }, { "epoch": 12.612179487179487, "grad_norm": 0.26766225695610046, "learning_rate": 5.58641932335577e-05, "loss": 0.023734019696712495, "step": 7870 }, { "epoch": 12.628205128205128, "grad_norm": 0.20040622353553772, "learning_rate": 5.584717114848874e-05, "loss": 0.02214931696653366, "step": 7880 }, { "epoch": 12.64423076923077, "grad_norm": 0.38767150044441223, "learning_rate": 5.583011671012524e-05, "loss": 0.02312331348657608, "step": 7890 }, { "epoch": 12.66025641025641, "grad_norm": 0.39054760336875916, "learning_rate": 5.581302993981447e-05, "loss": 0.024567997455596922, "step": 7900 }, { "epoch": 12.676282051282051, "grad_norm": 0.36378318071365356, "learning_rate": 5.579591085894422e-05, "loss": 0.021623079478740693, "step": 7910 }, { "epoch": 12.692307692307692, "grad_norm": 0.2785581648349762, "learning_rate": 5.5778759488942684e-05, "loss": 0.02442721575498581, "step": 7920 }, { "epoch": 12.708333333333334, "grad_norm": 0.22777560353279114, "learning_rate": 5.576157585127851e-05, "loss": 0.02141665369272232, "step": 7930 }, { "epoch": 12.724358974358974, "grad_norm": 0.2549479603767395, "learning_rate": 5.574435996746071e-05, "loss": 0.02086998075246811, "step": 7940 }, { "epoch": 12.740384615384615, "grad_norm": 0.3436262607574463, "learning_rate": 5.572711185903866e-05, "loss": 0.022706334292888642, "step": 7950 }, { "epoch": 12.756410256410255, "grad_norm": 0.27255165576934814, "learning_rate": 5.570983154760208e-05, "loss": 0.022074708342552186, "step": 7960 }, { "epoch": 12.772435897435898, "grad_norm": 0.341164231300354, "learning_rate": 5.5692519054780996e-05, "loss": 0.024935758113861083, "step": 7970 }, { "epoch": 12.788461538461538, "grad_norm": 0.22485563158988953, "learning_rate": 5.567517440224573e-05, "loss": 0.021913135051727296, "step": 7980 }, { "epoch": 12.804487179487179, "grad_norm": 0.3586118221282959, "learning_rate": 5.5657797611706824e-05, "loss": 0.023654387891292573, "step": 7990 }, { "epoch": 12.820512820512821, "grad_norm": 0.2203715294599533, "learning_rate": 5.564038870491509e-05, "loss": 0.02048935443162918, "step": 8000 }, { "epoch": 12.836538461538462, "grad_norm": 0.29431021213531494, "learning_rate": 5.5622947703661495e-05, "loss": 0.023632150888442994, "step": 8010 }, { "epoch": 12.852564102564102, "grad_norm": 0.14088395237922668, "learning_rate": 5.560547462977724e-05, "loss": 0.02262326180934906, "step": 8020 }, { "epoch": 12.868589743589745, "grad_norm": 0.2292560487985611, "learning_rate": 5.558796950513359e-05, "loss": 0.02130344957113266, "step": 8030 }, { "epoch": 12.884615384615385, "grad_norm": 0.20232462882995605, "learning_rate": 5.5570432351642015e-05, "loss": 0.024494317173957825, "step": 8040 }, { "epoch": 12.900641025641026, "grad_norm": 0.187949076294899, "learning_rate": 5.555286319125402e-05, "loss": 0.022162510454654692, "step": 8050 }, { "epoch": 12.916666666666666, "grad_norm": 0.21632133424282074, "learning_rate": 5.553526204596118e-05, "loss": 0.022854498028755187, "step": 8060 }, { "epoch": 12.932692307692308, "grad_norm": 0.24667996168136597, "learning_rate": 5.551762893779511e-05, "loss": 0.021657364070415498, "step": 8070 }, { "epoch": 12.948717948717949, "grad_norm": 0.15518800914287567, "learning_rate": 5.5499963888827456e-05, "loss": 0.0220686599612236, "step": 8080 }, { "epoch": 12.96474358974359, "grad_norm": 0.3573208153247833, "learning_rate": 5.548226692116983e-05, "loss": 0.02114778608083725, "step": 8090 }, { "epoch": 12.98076923076923, "grad_norm": 0.31435659527778625, "learning_rate": 5.546453805697377e-05, "loss": 0.021702900528907776, "step": 8100 }, { "epoch": 12.996794871794872, "grad_norm": 0.27735352516174316, "learning_rate": 5.544677731843078e-05, "loss": 0.023104913532733917, "step": 8110 }, { "epoch": 13.0, "eval_accuracy_background": NaN, "eval_accuracy_crop": 0.9926046848573804, "eval_iou_background": 0.0, "eval_iou_crop": 0.9926046848573804, "eval_loss": 0.021475115790963173, "eval_mean_accuracy": 0.9926046848573804, "eval_mean_iou": 0.4963023424286902, "eval_overall_accuracy": 0.9926046848573804, "eval_runtime": 33.4179, "eval_samples_per_second": 26.363, "eval_steps_per_second": 3.322, "step": 8112 }, { "epoch": 13.012820512820513, "grad_norm": 0.23034705221652985, "learning_rate": 5.5428984727772243e-05, "loss": 0.02131539136171341, "step": 8120 }, { "epoch": 13.028846153846153, "grad_norm": 0.21078073978424072, "learning_rate": 5.5411160307269414e-05, "loss": 0.02437637895345688, "step": 8130 }, { "epoch": 13.044871794871796, "grad_norm": 0.3393682539463043, "learning_rate": 5.5393304079233374e-05, "loss": 0.019506585597991944, "step": 8140 }, { "epoch": 13.060897435897436, "grad_norm": 0.16190539300441742, "learning_rate": 5.537541606601505e-05, "loss": 0.019984322786331176, "step": 8150 }, { "epoch": 13.076923076923077, "grad_norm": 0.20051442086696625, "learning_rate": 5.5357496290005144e-05, "loss": 0.021681563556194307, "step": 8160 }, { "epoch": 13.092948717948717, "grad_norm": 0.22765038907527924, "learning_rate": 5.53395447736341e-05, "loss": 0.02261367589235306, "step": 8170 }, { "epoch": 13.10897435897436, "grad_norm": 0.3194507658481598, "learning_rate": 5.532156153937209e-05, "loss": 0.02101070582866669, "step": 8180 }, { "epoch": 13.125, "grad_norm": 0.21458806097507477, "learning_rate": 5.5303546609729026e-05, "loss": 0.02482333928346634, "step": 8190 }, { "epoch": 13.14102564102564, "grad_norm": 0.20597901940345764, "learning_rate": 5.528550000725445e-05, "loss": 0.023645535111427307, "step": 8200 }, { "epoch": 13.157051282051283, "grad_norm": 0.26751774549484253, "learning_rate": 5.526742175453757e-05, "loss": 0.02206695228815079, "step": 8210 }, { "epoch": 13.173076923076923, "grad_norm": 0.25179943442344666, "learning_rate": 5.5249311874207204e-05, "loss": 0.021835704147815705, "step": 8220 }, { "epoch": 13.189102564102564, "grad_norm": 0.2836173474788666, "learning_rate": 5.523117038893178e-05, "loss": 0.024457092583179473, "step": 8230 }, { "epoch": 13.205128205128204, "grad_norm": 0.28686586022377014, "learning_rate": 5.521299732141924e-05, "loss": 0.02112758159637451, "step": 8240 }, { "epoch": 13.221153846153847, "grad_norm": 0.2765688896179199, "learning_rate": 5.519479269441712e-05, "loss": 0.023118673264980315, "step": 8250 }, { "epoch": 13.237179487179487, "grad_norm": 0.25421929359436035, "learning_rate": 5.5176556530712406e-05, "loss": 0.02271728664636612, "step": 8260 }, { "epoch": 13.253205128205128, "grad_norm": 0.13908618688583374, "learning_rate": 5.515828885313159e-05, "loss": 0.02316998243331909, "step": 8270 }, { "epoch": 13.26923076923077, "grad_norm": 0.20099356770515442, "learning_rate": 5.513998968454059e-05, "loss": 0.02332880049943924, "step": 8280 }, { "epoch": 13.28525641025641, "grad_norm": 0.172284796833992, "learning_rate": 5.512165904784475e-05, "loss": 0.02156834751367569, "step": 8290 }, { "epoch": 13.301282051282051, "grad_norm": 0.20619553327560425, "learning_rate": 5.510329696598881e-05, "loss": 0.022567439079284667, "step": 8300 }, { "epoch": 13.317307692307692, "grad_norm": 0.5400692224502563, "learning_rate": 5.5084903461956875e-05, "loss": 0.02533109188079834, "step": 8310 }, { "epoch": 13.333333333333334, "grad_norm": 0.21811255812644958, "learning_rate": 5.506647855877236e-05, "loss": 0.02330648750066757, "step": 8320 }, { "epoch": 13.349358974358974, "grad_norm": 0.3186666965484619, "learning_rate": 5.5048022279498e-05, "loss": 0.02296411395072937, "step": 8330 }, { "epoch": 13.365384615384615, "grad_norm": 0.1704690307378769, "learning_rate": 5.50295346472358e-05, "loss": 0.023494060337543487, "step": 8340 }, { "epoch": 13.381410256410255, "grad_norm": 0.3070222735404968, "learning_rate": 5.5011015685127e-05, "loss": 0.021751950681209564, "step": 8350 }, { "epoch": 13.397435897435898, "grad_norm": 0.25724726915359497, "learning_rate": 5.499246541635206e-05, "loss": 0.022814163565635683, "step": 8360 }, { "epoch": 13.413461538461538, "grad_norm": 0.2161691039800644, "learning_rate": 5.4973883864130645e-05, "loss": 0.021112658083438873, "step": 8370 }, { "epoch": 13.429487179487179, "grad_norm": 0.44564881920814514, "learning_rate": 5.495527105172157e-05, "loss": 0.023457449674606324, "step": 8380 }, { "epoch": 13.445512820512821, "grad_norm": 0.1718994379043579, "learning_rate": 5.4936627002422764e-05, "loss": 0.02579537034034729, "step": 8390 }, { "epoch": 13.461538461538462, "grad_norm": 0.28355881571769714, "learning_rate": 5.491795173957126e-05, "loss": 0.024869288504123687, "step": 8400 }, { "epoch": 13.477564102564102, "grad_norm": 0.16209062933921814, "learning_rate": 5.489924528654317e-05, "loss": 0.02463427037000656, "step": 8410 }, { "epoch": 13.493589743589745, "grad_norm": 0.2889157831668854, "learning_rate": 5.4880507666753656e-05, "loss": 0.024469995498657228, "step": 8420 }, { "epoch": 13.509615384615385, "grad_norm": 0.2265191674232483, "learning_rate": 5.486173890365686e-05, "loss": 0.021540819108486174, "step": 8430 }, { "epoch": 13.525641025641026, "grad_norm": 0.26309970021247864, "learning_rate": 5.484293902074593e-05, "loss": 0.02379208356142044, "step": 8440 }, { "epoch": 13.541666666666666, "grad_norm": 0.37589240074157715, "learning_rate": 5.482410804155298e-05, "loss": 0.02405584156513214, "step": 8450 }, { "epoch": 13.557692307692308, "grad_norm": 0.20189827680587769, "learning_rate": 5.4805245989649014e-05, "loss": 0.021486283838748933, "step": 8460 }, { "epoch": 13.573717948717949, "grad_norm": 0.2146540731191635, "learning_rate": 5.478635288864396e-05, "loss": 0.03736424148082733, "step": 8470 }, { "epoch": 13.58974358974359, "grad_norm": 0.4862072765827179, "learning_rate": 5.476742876218658e-05, "loss": 0.026689136028289796, "step": 8480 }, { "epoch": 13.60576923076923, "grad_norm": 0.27162256836891174, "learning_rate": 5.474847363396451e-05, "loss": 0.023124994337558748, "step": 8490 }, { "epoch": 13.621794871794872, "grad_norm": 0.27008146047592163, "learning_rate": 5.472948752770416e-05, "loss": 0.021969565749168397, "step": 8500 }, { "epoch": 13.637820512820513, "grad_norm": 0.48926037549972534, "learning_rate": 5.4710470467170724e-05, "loss": 0.02505304217338562, "step": 8510 }, { "epoch": 13.653846153846153, "grad_norm": 0.8623170852661133, "learning_rate": 5.4691422476168146e-05, "loss": 0.027776148915290833, "step": 8520 }, { "epoch": 13.669871794871796, "grad_norm": 0.28727611899375916, "learning_rate": 5.467234357853909e-05, "loss": 0.02173895239830017, "step": 8530 }, { "epoch": 13.685897435897436, "grad_norm": 0.34776461124420166, "learning_rate": 5.465323379816489e-05, "loss": 0.022074127197265626, "step": 8540 }, { "epoch": 13.701923076923077, "grad_norm": 0.6116006970405579, "learning_rate": 5.4634093158965546e-05, "loss": 0.023446404933929445, "step": 8550 }, { "epoch": 13.717948717948717, "grad_norm": 0.16603350639343262, "learning_rate": 5.46149216848997e-05, "loss": 0.02595679461956024, "step": 8560 }, { "epoch": 13.73397435897436, "grad_norm": 0.4729422628879547, "learning_rate": 5.459571939996456e-05, "loss": 0.02234387695789337, "step": 8570 }, { "epoch": 13.75, "grad_norm": 0.28798699378967285, "learning_rate": 5.4576486328195936e-05, "loss": 0.020632430911064148, "step": 8580 }, { "epoch": 13.76602564102564, "grad_norm": 0.25495076179504395, "learning_rate": 5.455722249366812e-05, "loss": 0.0207436203956604, "step": 8590 }, { "epoch": 13.782051282051283, "grad_norm": 0.12117909640073776, "learning_rate": 5.4537927920493986e-05, "loss": 0.023870520293712616, "step": 8600 }, { "epoch": 13.798076923076923, "grad_norm": 0.25958535075187683, "learning_rate": 5.451860263282482e-05, "loss": 0.0229059174656868, "step": 8610 }, { "epoch": 13.814102564102564, "grad_norm": 0.32768890261650085, "learning_rate": 5.4499246654850374e-05, "loss": 0.023335784673690796, "step": 8620 }, { "epoch": 13.830128205128204, "grad_norm": 0.24102947115898132, "learning_rate": 5.447986001079882e-05, "loss": 0.019953523576259614, "step": 8630 }, { "epoch": 13.846153846153847, "grad_norm": 0.2998334467411041, "learning_rate": 5.446044272493672e-05, "loss": 0.024226313829421996, "step": 8640 }, { "epoch": 13.862179487179487, "grad_norm": 0.3962441682815552, "learning_rate": 5.4440994821568975e-05, "loss": 0.02433561682701111, "step": 8650 }, { "epoch": 13.878205128205128, "grad_norm": 0.30701038241386414, "learning_rate": 5.442151632503883e-05, "loss": 0.02258110195398331, "step": 8660 }, { "epoch": 13.89423076923077, "grad_norm": 0.27910587191581726, "learning_rate": 5.440200725972781e-05, "loss": 0.02172628790140152, "step": 8670 }, { "epoch": 13.91025641025641, "grad_norm": 0.15440510213375092, "learning_rate": 5.43824676500557e-05, "loss": 0.020690152049064638, "step": 8680 }, { "epoch": 13.926282051282051, "grad_norm": 0.24600139260292053, "learning_rate": 5.436289752048053e-05, "loss": 0.02171063721179962, "step": 8690 }, { "epoch": 13.942307692307692, "grad_norm": 0.25497904419898987, "learning_rate": 5.434329689549853e-05, "loss": 0.02583411931991577, "step": 8700 }, { "epoch": 13.958333333333334, "grad_norm": 0.19038687646389008, "learning_rate": 5.4323665799644084e-05, "loss": 0.022081659734249116, "step": 8710 }, { "epoch": 13.974358974358974, "grad_norm": 0.46222251653671265, "learning_rate": 5.430400425748975e-05, "loss": 0.021066164970397948, "step": 8720 }, { "epoch": 13.990384615384615, "grad_norm": 0.1801607310771942, "learning_rate": 5.428431229364617e-05, "loss": 0.020818568766117096, "step": 8730 }, { "epoch": 14.0, "eval_accuracy_background": NaN, "eval_accuracy_crop": 0.9930547413435651, "eval_iou_background": 0.0, "eval_iou_crop": 0.9930547413435651, "eval_loss": 0.021474849432706833, "eval_mean_accuracy": 0.9930547413435651, "eval_mean_iou": 0.49652737067178254, "eval_overall_accuracy": 0.9930547413435651, "eval_runtime": 37.8302, "eval_samples_per_second": 23.288, "eval_steps_per_second": 2.934, "step": 8736 }, { "epoch": 14.006410256410257, "grad_norm": 0.2677779793739319, "learning_rate": 5.4264589932762065e-05, "loss": 0.02139935493469238, "step": 8740 }, { "epoch": 14.022435897435898, "grad_norm": 0.38585638999938965, "learning_rate": 5.424483719952422e-05, "loss": 0.023751690983772278, "step": 8750 }, { "epoch": 14.038461538461538, "grad_norm": 0.14943043887615204, "learning_rate": 5.422505411865744e-05, "loss": 0.021584996581077577, "step": 8760 }, { "epoch": 14.054487179487179, "grad_norm": 0.27196910977363586, "learning_rate": 5.420524071492448e-05, "loss": 0.0219250351190567, "step": 8770 }, { "epoch": 14.070512820512821, "grad_norm": 0.20667684078216553, "learning_rate": 5.41853970131261e-05, "loss": 0.022500680387020112, "step": 8780 }, { "epoch": 14.086538461538462, "grad_norm": 0.21915121376514435, "learning_rate": 5.416552303810095e-05, "loss": 0.024306899309158324, "step": 8790 }, { "epoch": 14.102564102564102, "grad_norm": 0.7036845684051514, "learning_rate": 5.414561881472558e-05, "loss": 0.022705744206905364, "step": 8800 }, { "epoch": 14.118589743589743, "grad_norm": 0.14661794900894165, "learning_rate": 5.412568436791442e-05, "loss": 0.023541896045207976, "step": 8810 }, { "epoch": 14.134615384615385, "grad_norm": 0.15046636760234833, "learning_rate": 5.410571972261972e-05, "loss": 0.020134034752845763, "step": 8820 }, { "epoch": 14.150641025641026, "grad_norm": 0.19744189083576202, "learning_rate": 5.4085724903831516e-05, "loss": 0.020490117371082306, "step": 8830 }, { "epoch": 14.166666666666666, "grad_norm": 0.19106897711753845, "learning_rate": 5.406569993657762e-05, "loss": 0.02090860456228256, "step": 8840 }, { "epoch": 14.182692307692308, "grad_norm": 0.15247757732868195, "learning_rate": 5.40456448459236e-05, "loss": 0.021018952131271362, "step": 8850 }, { "epoch": 14.198717948717949, "grad_norm": 0.23020872473716736, "learning_rate": 5.4025559656972714e-05, "loss": 0.021595413982868194, "step": 8860 }, { "epoch": 14.21474358974359, "grad_norm": 0.23429949581623077, "learning_rate": 5.400544439486589e-05, "loss": 0.0218031108379364, "step": 8870 }, { "epoch": 14.23076923076923, "grad_norm": 0.2262924611568451, "learning_rate": 5.398529908478171e-05, "loss": 0.022186924517154694, "step": 8880 }, { "epoch": 14.246794871794872, "grad_norm": 0.16878293454647064, "learning_rate": 5.396512375193637e-05, "loss": 0.02016131430864334, "step": 8890 }, { "epoch": 14.262820512820513, "grad_norm": 0.2716978192329407, "learning_rate": 5.3944918421583614e-05, "loss": 0.02049858868122101, "step": 8900 }, { "epoch": 14.278846153846153, "grad_norm": 0.17568959295749664, "learning_rate": 5.3924683119014786e-05, "loss": 0.020586100220680238, "step": 8910 }, { "epoch": 14.294871794871796, "grad_norm": 0.37607234716415405, "learning_rate": 5.390441786955871e-05, "loss": 0.02222006618976593, "step": 8920 }, { "epoch": 14.310897435897436, "grad_norm": 0.19428583979606628, "learning_rate": 5.3884122698581704e-05, "loss": 0.02375749349594116, "step": 8930 }, { "epoch": 14.326923076923077, "grad_norm": 0.27487778663635254, "learning_rate": 5.386379763148754e-05, "loss": 0.02201664596796036, "step": 8940 }, { "epoch": 14.342948717948717, "grad_norm": 0.39120060205459595, "learning_rate": 5.384344269371741e-05, "loss": 0.0217859148979187, "step": 8950 }, { "epoch": 14.35897435897436, "grad_norm": 0.3595455288887024, "learning_rate": 5.3823057910749895e-05, "loss": 0.02015067934989929, "step": 8960 }, { "epoch": 14.375, "grad_norm": 0.2360479086637497, "learning_rate": 5.380264330810095e-05, "loss": 0.022201623022556304, "step": 8970 }, { "epoch": 14.39102564102564, "grad_norm": 0.3942137062549591, "learning_rate": 5.3782198911323835e-05, "loss": 0.022780312597751616, "step": 8980 }, { "epoch": 14.407051282051283, "grad_norm": 0.2150200605392456, "learning_rate": 5.376172474600909e-05, "loss": 0.021103236079216003, "step": 8990 }, { "epoch": 14.423076923076923, "grad_norm": 0.2682059705257416, "learning_rate": 5.374122083778457e-05, "loss": 0.02236236035823822, "step": 9000 }, { "epoch": 14.439102564102564, "grad_norm": 0.1575576364994049, "learning_rate": 5.3720687212315307e-05, "loss": 0.021749910712242127, "step": 9010 }, { "epoch": 14.455128205128204, "grad_norm": 0.2518702447414398, "learning_rate": 5.3700123895303546e-05, "loss": 0.023289932310581206, "step": 9020 }, { "epoch": 14.471153846153847, "grad_norm": 0.1433064192533493, "learning_rate": 5.3679530912488715e-05, "loss": 0.022335848212242125, "step": 9030 }, { "epoch": 14.487179487179487, "grad_norm": 0.40885496139526367, "learning_rate": 5.365890828964735e-05, "loss": 0.026110303401947022, "step": 9040 }, { "epoch": 14.503205128205128, "grad_norm": 0.15134675800800323, "learning_rate": 5.363825605259311e-05, "loss": 0.021898823976516723, "step": 9050 }, { "epoch": 14.51923076923077, "grad_norm": 0.6495422720909119, "learning_rate": 5.3617574227176705e-05, "loss": 0.02424045205116272, "step": 9060 }, { "epoch": 14.53525641025641, "grad_norm": 0.255376935005188, "learning_rate": 5.3596862839285894e-05, "loss": 0.024052375555038454, "step": 9070 }, { "epoch": 14.551282051282051, "grad_norm": 0.3539707064628601, "learning_rate": 5.357612191484544e-05, "loss": 0.022662152349948884, "step": 9080 }, { "epoch": 14.567307692307692, "grad_norm": 0.3168881833553314, "learning_rate": 5.355535147981704e-05, "loss": 0.022143669426441193, "step": 9090 }, { "epoch": 14.583333333333334, "grad_norm": 0.2659461796283722, "learning_rate": 5.35345515601994e-05, "loss": 0.023403067886829377, "step": 9100 }, { "epoch": 14.599358974358974, "grad_norm": 0.2095952033996582, "learning_rate": 5.351372218202807e-05, "loss": 0.024598583579063416, "step": 9110 }, { "epoch": 14.615384615384615, "grad_norm": 0.15591131150722504, "learning_rate": 5.349286337137551e-05, "loss": 0.021713897585868835, "step": 9120 }, { "epoch": 14.631410256410255, "grad_norm": 0.3565748333930969, "learning_rate": 5.3471975154350996e-05, "loss": 0.02306187301874161, "step": 9130 }, { "epoch": 14.647435897435898, "grad_norm": 0.21228060126304626, "learning_rate": 5.3451057557100626e-05, "loss": 0.024527044594287874, "step": 9140 }, { "epoch": 14.663461538461538, "grad_norm": 0.5870145559310913, "learning_rate": 5.343011060580729e-05, "loss": 0.02364142835140228, "step": 9150 }, { "epoch": 14.679487179487179, "grad_norm": 0.24947845935821533, "learning_rate": 5.340913432669058e-05, "loss": 0.021476911008358003, "step": 9160 }, { "epoch": 14.695512820512821, "grad_norm": 0.14991122484207153, "learning_rate": 5.338812874600685e-05, "loss": 0.01937911957502365, "step": 9170 }, { "epoch": 14.711538461538462, "grad_norm": 0.20699752867221832, "learning_rate": 5.3367093890049075e-05, "loss": 0.02318907678127289, "step": 9180 }, { "epoch": 14.727564102564102, "grad_norm": 0.17618581652641296, "learning_rate": 5.334602978514691e-05, "loss": 0.022146952152252198, "step": 9190 }, { "epoch": 14.743589743589745, "grad_norm": 0.2999691367149353, "learning_rate": 5.332493645766661e-05, "loss": 0.023603332042694092, "step": 9200 }, { "epoch": 14.759615384615385, "grad_norm": 0.12214530259370804, "learning_rate": 5.330381393401103e-05, "loss": 0.01993042230606079, "step": 9210 }, { "epoch": 14.775641025641026, "grad_norm": 0.23731796443462372, "learning_rate": 5.328266224061953e-05, "loss": 0.025231325626373292, "step": 9220 }, { "epoch": 14.791666666666666, "grad_norm": 0.2261228710412979, "learning_rate": 5.326148140396802e-05, "loss": 0.02081107348203659, "step": 9230 }, { "epoch": 14.807692307692308, "grad_norm": 0.31297096610069275, "learning_rate": 5.324027145056886e-05, "loss": 0.0231286883354187, "step": 9240 }, { "epoch": 14.823717948717949, "grad_norm": 0.21363691985607147, "learning_rate": 5.321903240697088e-05, "loss": 0.02230333387851715, "step": 9250 }, { "epoch": 14.83974358974359, "grad_norm": 0.44839873909950256, "learning_rate": 5.3197764299759304e-05, "loss": 0.023168602585792543, "step": 9260 }, { "epoch": 14.85576923076923, "grad_norm": 0.16739770770072937, "learning_rate": 5.3176467155555756e-05, "loss": 0.022597478330135347, "step": 9270 }, { "epoch": 14.871794871794872, "grad_norm": 0.3092353045940399, "learning_rate": 5.315514100101817e-05, "loss": 0.024456489086151122, "step": 9280 }, { "epoch": 14.887820512820513, "grad_norm": 0.16906966269016266, "learning_rate": 5.3133785862840834e-05, "loss": 0.021961784362792967, "step": 9290 }, { "epoch": 14.903846153846153, "grad_norm": 0.22468745708465576, "learning_rate": 5.3112401767754295e-05, "loss": 0.024518422782421112, "step": 9300 }, { "epoch": 14.919871794871796, "grad_norm": 0.20197826623916626, "learning_rate": 5.3090988742525335e-05, "loss": 0.02128194123506546, "step": 9310 }, { "epoch": 14.935897435897436, "grad_norm": 0.1813511699438095, "learning_rate": 5.306954681395698e-05, "loss": 0.02294713258743286, "step": 9320 }, { "epoch": 14.951923076923077, "grad_norm": 0.19128936529159546, "learning_rate": 5.30480760088884e-05, "loss": 0.02065814584493637, "step": 9330 }, { "epoch": 14.967948717948717, "grad_norm": 0.17111608386039734, "learning_rate": 5.302657635419494e-05, "loss": 0.021186552941799164, "step": 9340 }, { "epoch": 14.98397435897436, "grad_norm": 0.18109674751758575, "learning_rate": 5.3005047876788036e-05, "loss": 0.021745046973228453, "step": 9350 }, { "epoch": 15.0, "grad_norm": 0.33837464451789856, "learning_rate": 5.298349060361519e-05, "loss": 0.020325188338756562, "step": 9360 }, { "epoch": 15.0, "eval_accuracy_background": NaN, "eval_accuracy_crop": 0.9932818191784245, "eval_iou_background": 0.0, "eval_iou_crop": 0.9932818191784245, "eval_loss": 0.021521028131246567, "eval_mean_accuracy": 0.9932818191784245, "eval_mean_iou": 0.49664090958921225, "eval_overall_accuracy": 0.9932818191784245, "eval_runtime": 33.7503, "eval_samples_per_second": 26.103, "eval_steps_per_second": 3.289, "step": 9360 }, { "epoch": 15.01602564102564, "grad_norm": 0.3929958939552307, "learning_rate": 5.296190456166001e-05, "loss": 0.023122748732566832, "step": 9370 }, { "epoch": 15.032051282051283, "grad_norm": 0.2573523223400116, "learning_rate": 5.294028977794203e-05, "loss": 0.0241893470287323, "step": 9380 }, { "epoch": 15.048076923076923, "grad_norm": 0.1997421532869339, "learning_rate": 5.291864627951682e-05, "loss": 0.0207598477602005, "step": 9390 }, { "epoch": 15.064102564102564, "grad_norm": 0.39551663398742676, "learning_rate": 5.289697409347587e-05, "loss": 0.023863795399665832, "step": 9400 }, { "epoch": 15.080128205128204, "grad_norm": 0.25667789578437805, "learning_rate": 5.2875273246946585e-05, "loss": 0.022325071692466735, "step": 9410 }, { "epoch": 15.096153846153847, "grad_norm": 0.16201859712600708, "learning_rate": 5.285354376709223e-05, "loss": 0.0229871466755867, "step": 9420 }, { "epoch": 15.112179487179487, "grad_norm": 0.27989649772644043, "learning_rate": 5.283178568111194e-05, "loss": 0.021293818950653076, "step": 9430 }, { "epoch": 15.128205128205128, "grad_norm": 0.17262032628059387, "learning_rate": 5.280999901624064e-05, "loss": 0.02089075744152069, "step": 9440 }, { "epoch": 15.14423076923077, "grad_norm": 0.1780463308095932, "learning_rate": 5.278818379974902e-05, "loss": 0.021265433728694917, "step": 9450 }, { "epoch": 15.16025641025641, "grad_norm": 0.3536536395549774, "learning_rate": 5.276634005894351e-05, "loss": 0.021082133054733276, "step": 9460 }, { "epoch": 15.176282051282051, "grad_norm": 0.2297477126121521, "learning_rate": 5.2744467821166254e-05, "loss": 0.02253623902797699, "step": 9470 }, { "epoch": 15.192307692307692, "grad_norm": 0.18978744745254517, "learning_rate": 5.2722567113795074e-05, "loss": 0.02288285344839096, "step": 9480 }, { "epoch": 15.208333333333334, "grad_norm": 0.21709710359573364, "learning_rate": 5.27006379642434e-05, "loss": 0.023053860664367674, "step": 9490 }, { "epoch": 15.224358974358974, "grad_norm": 0.22628439962863922, "learning_rate": 5.26786803999603e-05, "loss": 0.0208284392952919, "step": 9500 }, { "epoch": 15.240384615384615, "grad_norm": 0.15141208469867706, "learning_rate": 5.265669444843036e-05, "loss": 0.02306237518787384, "step": 9510 }, { "epoch": 15.256410256410255, "grad_norm": 0.3083103597164154, "learning_rate": 5.263468013717375e-05, "loss": 0.023408107459545135, "step": 9520 }, { "epoch": 15.272435897435898, "grad_norm": 0.16412577033042908, "learning_rate": 5.2612637493746116e-05, "loss": 0.02096937596797943, "step": 9530 }, { "epoch": 15.288461538461538, "grad_norm": 0.2762000560760498, "learning_rate": 5.2590566545738555e-05, "loss": 0.022352443635463716, "step": 9540 }, { "epoch": 15.304487179487179, "grad_norm": 0.2111821472644806, "learning_rate": 5.256846732077762e-05, "loss": 0.023894698917865755, "step": 9550 }, { "epoch": 15.320512820512821, "grad_norm": 0.17088143527507782, "learning_rate": 5.254633984652523e-05, "loss": 0.021378855407238006, "step": 9560 }, { "epoch": 15.336538461538462, "grad_norm": 0.32388344407081604, "learning_rate": 5.252418415067869e-05, "loss": 0.02264660894870758, "step": 9570 }, { "epoch": 15.352564102564102, "grad_norm": 0.38567298650741577, "learning_rate": 5.2502000260970614e-05, "loss": 0.02282543182373047, "step": 9580 }, { "epoch": 15.368589743589745, "grad_norm": 0.21276728808879852, "learning_rate": 5.247978820516891e-05, "loss": 0.02443356513977051, "step": 9590 }, { "epoch": 15.384615384615385, "grad_norm": 0.2563990354537964, "learning_rate": 5.245754801107676e-05, "loss": 0.019535960257053377, "step": 9600 }, { "epoch": 15.400641025641026, "grad_norm": 0.41692954301834106, "learning_rate": 5.243527970653252e-05, "loss": 0.02194916009902954, "step": 9610 }, { "epoch": 15.416666666666666, "grad_norm": 0.3463697135448456, "learning_rate": 5.241298331940979e-05, "loss": 0.020662128925323486, "step": 9620 }, { "epoch": 15.432692307692308, "grad_norm": 0.44112342596054077, "learning_rate": 5.2390658877617286e-05, "loss": 0.023887792229652406, "step": 9630 }, { "epoch": 15.448717948717949, "grad_norm": 0.24657490849494934, "learning_rate": 5.236830640909884e-05, "loss": 0.021003559231758118, "step": 9640 }, { "epoch": 15.46474358974359, "grad_norm": 0.15210402011871338, "learning_rate": 5.2345925941833395e-05, "loss": 0.022946125268936156, "step": 9650 }, { "epoch": 15.48076923076923, "grad_norm": 0.3580251634120941, "learning_rate": 5.2323517503834904e-05, "loss": 0.02259061336517334, "step": 9660 }, { "epoch": 15.496794871794872, "grad_norm": 0.18118873238563538, "learning_rate": 5.2301081123152344e-05, "loss": 0.019962020218372345, "step": 9670 }, { "epoch": 15.512820512820513, "grad_norm": 0.2210327535867691, "learning_rate": 5.227861682786969e-05, "loss": 0.02092159539461136, "step": 9680 }, { "epoch": 15.528846153846153, "grad_norm": 0.20245303213596344, "learning_rate": 5.2256124646105824e-05, "loss": 0.021818499267101287, "step": 9690 }, { "epoch": 15.544871794871796, "grad_norm": 0.2320573627948761, "learning_rate": 5.223360460601456e-05, "loss": 0.020765697956085204, "step": 9700 }, { "epoch": 15.560897435897436, "grad_norm": 0.2541865110397339, "learning_rate": 5.221105673578456e-05, "loss": 0.02099507302045822, "step": 9710 }, { "epoch": 15.576923076923077, "grad_norm": 0.13668733835220337, "learning_rate": 5.218848106363933e-05, "loss": 0.02639251947402954, "step": 9720 }, { "epoch": 15.592948717948717, "grad_norm": 0.3141450881958008, "learning_rate": 5.216587761783721e-05, "loss": 0.024885575473308563, "step": 9730 }, { "epoch": 15.60897435897436, "grad_norm": 0.3172592520713806, "learning_rate": 5.214324642667124e-05, "loss": 0.02093893736600876, "step": 9740 }, { "epoch": 15.625, "grad_norm": 0.2286350131034851, "learning_rate": 5.2120587518469255e-05, "loss": 0.018888485431671143, "step": 9750 }, { "epoch": 15.64102564102564, "grad_norm": 0.2247171252965927, "learning_rate": 5.209790092159372e-05, "loss": 0.022053344547748564, "step": 9760 }, { "epoch": 15.657051282051283, "grad_norm": 0.20958079397678375, "learning_rate": 5.207518666444181e-05, "loss": 0.01985106021165848, "step": 9770 }, { "epoch": 15.673076923076923, "grad_norm": 0.34183749556541443, "learning_rate": 5.2052444775445304e-05, "loss": 0.02248275876045227, "step": 9780 }, { "epoch": 15.689102564102564, "grad_norm": 0.33114683628082275, "learning_rate": 5.202967528307057e-05, "loss": 0.021262222528457643, "step": 9790 }, { "epoch": 15.705128205128204, "grad_norm": 0.275050550699234, "learning_rate": 5.200687821581851e-05, "loss": 0.02058982998132706, "step": 9800 }, { "epoch": 15.721153846153847, "grad_norm": 0.20836953818798065, "learning_rate": 5.1984053602224576e-05, "loss": 0.02133229672908783, "step": 9810 }, { "epoch": 15.737179487179487, "grad_norm": 0.23136621713638306, "learning_rate": 5.196120147085868e-05, "loss": 0.02231139540672302, "step": 9820 }, { "epoch": 15.753205128205128, "grad_norm": 0.328595370054245, "learning_rate": 5.193832185032516e-05, "loss": 0.02051288038492203, "step": 9830 }, { "epoch": 15.76923076923077, "grad_norm": 0.21010561287403107, "learning_rate": 5.19154147692628e-05, "loss": 0.02157445102930069, "step": 9840 }, { "epoch": 15.78525641025641, "grad_norm": 0.20776112377643585, "learning_rate": 5.1892480256344733e-05, "loss": 0.02124907076358795, "step": 9850 }, { "epoch": 15.801282051282051, "grad_norm": 0.15031340718269348, "learning_rate": 5.186951834027844e-05, "loss": 0.022567817568778993, "step": 9860 }, { "epoch": 15.817307692307692, "grad_norm": 0.12306714057922363, "learning_rate": 5.1846529049805705e-05, "loss": 0.025868281722068787, "step": 9870 }, { "epoch": 15.833333333333334, "grad_norm": 0.21539463102817535, "learning_rate": 5.182351241370255e-05, "loss": 0.021506568789482115, "step": 9880 }, { "epoch": 15.849358974358974, "grad_norm": 0.23063485324382782, "learning_rate": 5.180046846077926e-05, "loss": 0.020231394469738005, "step": 9890 }, { "epoch": 15.865384615384615, "grad_norm": 0.385949969291687, "learning_rate": 5.17773972198803e-05, "loss": 0.021951334178447725, "step": 9900 }, { "epoch": 15.881410256410255, "grad_norm": 0.16245131194591522, "learning_rate": 5.1754298719884284e-05, "loss": 0.021964195370674133, "step": 9910 }, { "epoch": 15.897435897435898, "grad_norm": 0.3329795300960541, "learning_rate": 5.173117298970396e-05, "loss": 0.0215861514210701, "step": 9920 }, { "epoch": 15.913461538461538, "grad_norm": 0.3044317960739136, "learning_rate": 5.170802005828615e-05, "loss": 0.021915099024772643, "step": 9930 }, { "epoch": 15.929487179487179, "grad_norm": 0.213579460978508, "learning_rate": 5.168483995461172e-05, "loss": 0.02080874443054199, "step": 9940 }, { "epoch": 15.945512820512821, "grad_norm": 0.2924198806285858, "learning_rate": 5.166163270769557e-05, "loss": 0.019066746532917022, "step": 9950 }, { "epoch": 15.961538461538462, "grad_norm": 0.17431524395942688, "learning_rate": 5.1638398346586554e-05, "loss": 0.020550887286663055, "step": 9960 }, { "epoch": 15.977564102564102, "grad_norm": 0.2748472988605499, "learning_rate": 5.161513690036747e-05, "loss": 0.019901855289936064, "step": 9970 }, { "epoch": 15.993589743589745, "grad_norm": 0.14452838897705078, "learning_rate": 5.1591848398155034e-05, "loss": 0.02222808003425598, "step": 9980 }, { "epoch": 16.0, "eval_accuracy_background": NaN, "eval_accuracy_crop": 0.9939969538480334, "eval_iou_background": 0.0, "eval_iou_crop": 0.9939969538480334, "eval_loss": 0.021094676107168198, "eval_mean_accuracy": 0.9939969538480334, "eval_mean_iou": 0.4969984769240167, "eval_overall_accuracy": 0.9939969538480334, "eval_runtime": 36.6417, "eval_samples_per_second": 24.044, "eval_steps_per_second": 3.029, "step": 9984 }, { "epoch": 16.009615384615383, "grad_norm": 0.31287750601768494, "learning_rate": 5.15685328690998e-05, "loss": 0.02322327047586441, "step": 9990 }, { "epoch": 16.025641025641026, "grad_norm": 0.2336347997188568, "learning_rate": 5.154519034238619e-05, "loss": 0.021912723779678345, "step": 10000 }, { "epoch": 16.041666666666668, "grad_norm": 0.3953631520271301, "learning_rate": 5.1521820847232366e-05, "loss": 0.02113026976585388, "step": 10010 }, { "epoch": 16.057692307692307, "grad_norm": 0.24499382078647614, "learning_rate": 5.1498424412890303e-05, "loss": 0.020731407403945922, "step": 10020 }, { "epoch": 16.07371794871795, "grad_norm": 0.5267351865768433, "learning_rate": 5.1475001068645665e-05, "loss": 0.022403474152088165, "step": 10030 }, { "epoch": 16.08974358974359, "grad_norm": 0.42104798555374146, "learning_rate": 5.1451550843817804e-05, "loss": 0.022274932265281676, "step": 10040 }, { "epoch": 16.10576923076923, "grad_norm": 0.2278832644224167, "learning_rate": 5.142807376775972e-05, "loss": 0.02154717594385147, "step": 10050 }, { "epoch": 16.121794871794872, "grad_norm": 0.24364672601222992, "learning_rate": 5.1404569869858026e-05, "loss": 0.02109465003013611, "step": 10060 }, { "epoch": 16.137820512820515, "grad_norm": 0.3396551012992859, "learning_rate": 5.13810391795329e-05, "loss": 0.02117479294538498, "step": 10070 }, { "epoch": 16.153846153846153, "grad_norm": 0.28980815410614014, "learning_rate": 5.1357481726238065e-05, "loss": 0.021567390859127046, "step": 10080 }, { "epoch": 16.169871794871796, "grad_norm": 0.49749696254730225, "learning_rate": 5.133389753946075e-05, "loss": 0.024009746313095093, "step": 10090 }, { "epoch": 16.185897435897434, "grad_norm": 0.12388508766889572, "learning_rate": 5.1310286648721634e-05, "loss": 0.021593642234802247, "step": 10100 }, { "epoch": 16.201923076923077, "grad_norm": 0.2578061819076538, "learning_rate": 5.1286649083574815e-05, "loss": 0.02214239239692688, "step": 10110 }, { "epoch": 16.21794871794872, "grad_norm": 0.3693560063838959, "learning_rate": 5.1262984873607796e-05, "loss": 0.021353939175605775, "step": 10120 }, { "epoch": 16.233974358974358, "grad_norm": 0.34106096625328064, "learning_rate": 5.1239294048441434e-05, "loss": 0.02293875515460968, "step": 10130 }, { "epoch": 16.25, "grad_norm": 0.37254226207733154, "learning_rate": 5.1215576637729885e-05, "loss": 0.021828916668891907, "step": 10140 }, { "epoch": 16.266025641025642, "grad_norm": 0.2316504418849945, "learning_rate": 5.1191832671160604e-05, "loss": 0.020688219368457793, "step": 10150 }, { "epoch": 16.28205128205128, "grad_norm": 0.3739328980445862, "learning_rate": 5.116806217845424e-05, "loss": 0.022233347594738006, "step": 10160 }, { "epoch": 16.298076923076923, "grad_norm": 0.22457540035247803, "learning_rate": 5.1144265189364714e-05, "loss": 0.02264664024114609, "step": 10170 }, { "epoch": 16.314102564102566, "grad_norm": 0.11777233332395554, "learning_rate": 5.112044173367905e-05, "loss": 0.02348949760198593, "step": 10180 }, { "epoch": 16.330128205128204, "grad_norm": 0.28684675693511963, "learning_rate": 5.1096591841217446e-05, "loss": 0.024352307617664336, "step": 10190 }, { "epoch": 16.346153846153847, "grad_norm": 0.25637301802635193, "learning_rate": 5.1072715541833164e-05, "loss": 0.022885122895240785, "step": 10200 }, { "epoch": 16.362179487179485, "grad_norm": 0.3261851370334625, "learning_rate": 5.104881286541253e-05, "loss": 0.019211958348751067, "step": 10210 }, { "epoch": 16.378205128205128, "grad_norm": 0.28763964772224426, "learning_rate": 5.1024883841874876e-05, "loss": 0.020585161447525025, "step": 10220 }, { "epoch": 16.39423076923077, "grad_norm": 0.28652098774909973, "learning_rate": 5.100092850117254e-05, "loss": 0.0214982733130455, "step": 10230 }, { "epoch": 16.41025641025641, "grad_norm": 0.3184214234352112, "learning_rate": 5.0976946873290774e-05, "loss": 0.022056803107261658, "step": 10240 }, { "epoch": 16.42628205128205, "grad_norm": 0.19923456013202667, "learning_rate": 5.095293898824775e-05, "loss": 0.02309572547674179, "step": 10250 }, { "epoch": 16.442307692307693, "grad_norm": 0.17553454637527466, "learning_rate": 5.092890487609449e-05, "loss": 0.022355802357196808, "step": 10260 }, { "epoch": 16.458333333333332, "grad_norm": 0.3647463321685791, "learning_rate": 5.0904844566914864e-05, "loss": 0.02329743802547455, "step": 10270 }, { "epoch": 16.474358974358974, "grad_norm": 0.2296924591064453, "learning_rate": 5.088075809082551e-05, "loss": 0.020481960475444795, "step": 10280 }, { "epoch": 16.490384615384617, "grad_norm": 0.3846636414527893, "learning_rate": 5.0856645477975854e-05, "loss": 0.021489247679710388, "step": 10290 }, { "epoch": 16.506410256410255, "grad_norm": 0.17987386882305145, "learning_rate": 5.0832506758548004e-05, "loss": 0.02162156403064728, "step": 10300 }, { "epoch": 16.522435897435898, "grad_norm": 0.18617387115955353, "learning_rate": 5.080834196275677e-05, "loss": 0.02460050880908966, "step": 10310 }, { "epoch": 16.53846153846154, "grad_norm": 0.247709259390831, "learning_rate": 5.0784151120849586e-05, "loss": 0.023134765028953553, "step": 10320 }, { "epoch": 16.55448717948718, "grad_norm": 0.2421119511127472, "learning_rate": 5.0759934263106494e-05, "loss": 0.021051812171936034, "step": 10330 }, { "epoch": 16.57051282051282, "grad_norm": 0.21635238826274872, "learning_rate": 5.073569141984009e-05, "loss": 0.021864362061023712, "step": 10340 }, { "epoch": 16.58653846153846, "grad_norm": 0.2561285197734833, "learning_rate": 5.0711422621395524e-05, "loss": 0.022728589177131654, "step": 10350 }, { "epoch": 16.602564102564102, "grad_norm": Infinity, "learning_rate": 5.0687127898150415e-05, "loss": 0.02334311753511429, "step": 10360 }, { "epoch": 16.618589743589745, "grad_norm": 0.19200755655765533, "learning_rate": 5.0662807280514826e-05, "loss": 0.019696906208992004, "step": 10370 }, { "epoch": 16.634615384615383, "grad_norm": 0.18561463057994843, "learning_rate": 5.063846079893126e-05, "loss": 0.021609894931316376, "step": 10380 }, { "epoch": 16.650641025641026, "grad_norm": 0.41465455293655396, "learning_rate": 5.061408848387455e-05, "loss": 0.021024763584136963, "step": 10390 }, { "epoch": 16.666666666666668, "grad_norm": 0.22879117727279663, "learning_rate": 5.058969036585191e-05, "loss": 0.02340119183063507, "step": 10400 }, { "epoch": 16.682692307692307, "grad_norm": 0.25557494163513184, "learning_rate": 5.056526647540283e-05, "loss": 0.02138064056634903, "step": 10410 }, { "epoch": 16.69871794871795, "grad_norm": 0.26591381430625916, "learning_rate": 5.054081684309905e-05, "loss": 0.020580066740512847, "step": 10420 }, { "epoch": 16.71474358974359, "grad_norm": 0.39383041858673096, "learning_rate": 5.0516341499544553e-05, "loss": 0.022141283750534056, "step": 10430 }, { "epoch": 16.73076923076923, "grad_norm": 0.12815749645233154, "learning_rate": 5.04918404753755e-05, "loss": 0.021316011250019074, "step": 10440 }, { "epoch": 16.746794871794872, "grad_norm": 0.2029629349708557, "learning_rate": 5.0467313801260194e-05, "loss": 0.020795230567455292, "step": 10450 }, { "epoch": 16.76282051282051, "grad_norm": 0.21873608231544495, "learning_rate": 5.044276150789904e-05, "loss": 0.01867365390062332, "step": 10460 }, { "epoch": 16.778846153846153, "grad_norm": 0.24850179255008698, "learning_rate": 5.041818362602451e-05, "loss": 0.02276477813720703, "step": 10470 }, { "epoch": 16.794871794871796, "grad_norm": 0.2151305377483368, "learning_rate": 5.039358018640111e-05, "loss": 0.020194216072559355, "step": 10480 }, { "epoch": 16.810897435897434, "grad_norm": 0.29090625047683716, "learning_rate": 5.036895121982535e-05, "loss": 0.022840428352355956, "step": 10490 }, { "epoch": 16.826923076923077, "grad_norm": 0.484971821308136, "learning_rate": 5.034429675712567e-05, "loss": 0.020849169790744783, "step": 10500 }, { "epoch": 16.84294871794872, "grad_norm": 0.191527858376503, "learning_rate": 5.0319616829162425e-05, "loss": 0.02012902796268463, "step": 10510 }, { "epoch": 16.858974358974358, "grad_norm": 0.2650490999221802, "learning_rate": 5.0294911466827866e-05, "loss": 0.024513523280620574, "step": 10520 }, { "epoch": 16.875, "grad_norm": 0.18225103616714478, "learning_rate": 5.027018070104606e-05, "loss": 0.020731301605701448, "step": 10530 }, { "epoch": 16.891025641025642, "grad_norm": 0.1801590919494629, "learning_rate": 5.024542456277289e-05, "loss": 0.021165245771408082, "step": 10540 }, { "epoch": 16.90705128205128, "grad_norm": 0.12754392623901367, "learning_rate": 5.022064308299599e-05, "loss": 0.021043872833251952, "step": 10550 }, { "epoch": 16.923076923076923, "grad_norm": 0.2560364603996277, "learning_rate": 5.0195836292734685e-05, "loss": 0.0206840842962265, "step": 10560 }, { "epoch": 16.939102564102566, "grad_norm": 0.2144910842180252, "learning_rate": 5.017100422304004e-05, "loss": 0.02304968386888504, "step": 10570 }, { "epoch": 16.955128205128204, "grad_norm": 0.33453020453453064, "learning_rate": 5.0146146904994724e-05, "loss": 0.019574624300003052, "step": 10580 }, { "epoch": 16.971153846153847, "grad_norm": 0.20941559970378876, "learning_rate": 5.0121264369713014e-05, "loss": 0.019577297568321227, "step": 10590 }, { "epoch": 16.98717948717949, "grad_norm": 0.4028733968734741, "learning_rate": 5.009635664834075e-05, "loss": 0.022981104254722596, "step": 10600 }, { "epoch": 17.0, "eval_accuracy_background": NaN, "eval_accuracy_crop": 0.9934732602214905, "eval_iou_background": 0.0, "eval_iou_crop": 0.9934732602214905, "eval_loss": 0.021085940301418304, "eval_mean_accuracy": 0.9934732602214905, "eval_mean_iou": 0.49673663011074526, "eval_overall_accuracy": 0.9934732602214905, "eval_runtime": 37.3069, "eval_samples_per_second": 23.615, "eval_steps_per_second": 2.975, "step": 10608 }, { "epoch": 17.003205128205128, "grad_norm": 0.2146024852991104, "learning_rate": 5.0071423772055325e-05, "loss": 0.023441970348358154, "step": 10610 }, { "epoch": 17.01923076923077, "grad_norm": 0.24669313430786133, "learning_rate": 5.0046465772065565e-05, "loss": 0.022963345050811768, "step": 10620 }, { "epoch": 17.03525641025641, "grad_norm": 0.24953821301460266, "learning_rate": 5.0021482679611796e-05, "loss": 0.020926159620285035, "step": 10630 }, { "epoch": 17.05128205128205, "grad_norm": 0.38920584321022034, "learning_rate": 4.999647452596572e-05, "loss": 0.02207798808813095, "step": 10640 }, { "epoch": 17.067307692307693, "grad_norm": 0.14880864322185516, "learning_rate": 4.9971441342430436e-05, "loss": 0.01930317431688309, "step": 10650 }, { "epoch": 17.083333333333332, "grad_norm": 0.29861998558044434, "learning_rate": 4.994638316034035e-05, "loss": 0.02167111337184906, "step": 10660 }, { "epoch": 17.099358974358974, "grad_norm": 0.15245598554611206, "learning_rate": 4.9921300011061165e-05, "loss": 0.020920625329017638, "step": 10670 }, { "epoch": 17.115384615384617, "grad_norm": 0.3823002874851227, "learning_rate": 4.989619192598984e-05, "loss": 0.02173103839159012, "step": 10680 }, { "epoch": 17.131410256410255, "grad_norm": 0.12307695299386978, "learning_rate": 4.987105893655455e-05, "loss": 0.01931413561105728, "step": 10690 }, { "epoch": 17.147435897435898, "grad_norm": 0.23604857921600342, "learning_rate": 4.984590107421462e-05, "loss": 0.020107203722000123, "step": 10700 }, { "epoch": 17.16346153846154, "grad_norm": 0.23872308433055878, "learning_rate": 4.982071837046056e-05, "loss": 0.020995864272117616, "step": 10710 }, { "epoch": 17.17948717948718, "grad_norm": 0.32323965430259705, "learning_rate": 4.97955108568139e-05, "loss": 0.025125986337661742, "step": 10720 }, { "epoch": 17.19551282051282, "grad_norm": 0.1432463377714157, "learning_rate": 4.9770278564827285e-05, "loss": 0.021775200963020325, "step": 10730 }, { "epoch": 17.21153846153846, "grad_norm": 0.11923806369304657, "learning_rate": 4.974502152608436e-05, "loss": 0.019797271490097045, "step": 10740 }, { "epoch": 17.227564102564102, "grad_norm": 0.17541979253292084, "learning_rate": 4.9719739772199746e-05, "loss": 0.020078325271606447, "step": 10750 }, { "epoch": 17.243589743589745, "grad_norm": 0.13056030869483948, "learning_rate": 4.969443333481897e-05, "loss": 0.022518113255500793, "step": 10760 }, { "epoch": 17.259615384615383, "grad_norm": 0.20280463993549347, "learning_rate": 4.9669102245618515e-05, "loss": 0.020786809921264648, "step": 10770 }, { "epoch": 17.275641025641026, "grad_norm": 0.29219797253608704, "learning_rate": 4.9643746536305666e-05, "loss": 0.022814035415649414, "step": 10780 }, { "epoch": 17.291666666666668, "grad_norm": 0.14200791716575623, "learning_rate": 4.961836623861855e-05, "loss": 0.020292554795742036, "step": 10790 }, { "epoch": 17.307692307692307, "grad_norm": 0.3141058683395386, "learning_rate": 4.9592961384326086e-05, "loss": 0.020652759075164794, "step": 10800 }, { "epoch": 17.32371794871795, "grad_norm": 0.1741625964641571, "learning_rate": 4.9567532005227884e-05, "loss": 0.021090376377105712, "step": 10810 }, { "epoch": 17.33974358974359, "grad_norm": 0.5568787455558777, "learning_rate": 4.9542078133154305e-05, "loss": 0.021536168456077576, "step": 10820 }, { "epoch": 17.35576923076923, "grad_norm": 0.15661457180976868, "learning_rate": 4.9516599799966345e-05, "loss": 0.02182292640209198, "step": 10830 }, { "epoch": 17.371794871794872, "grad_norm": 0.28149616718292236, "learning_rate": 4.94910970375556e-05, "loss": 0.02041949927806854, "step": 10840 }, { "epoch": 17.387820512820515, "grad_norm": 0.24496006965637207, "learning_rate": 4.946556987784429e-05, "loss": 0.01996983289718628, "step": 10850 }, { "epoch": 17.403846153846153, "grad_norm": 0.2575707733631134, "learning_rate": 4.9440018352785126e-05, "loss": 0.022086894512176512, "step": 10860 }, { "epoch": 17.419871794871796, "grad_norm": 0.23366278409957886, "learning_rate": 4.941444249436134e-05, "loss": 0.02037210613489151, "step": 10870 }, { "epoch": 17.435897435897434, "grad_norm": 0.1793389618396759, "learning_rate": 4.9388842334586626e-05, "loss": 0.02084510922431946, "step": 10880 }, { "epoch": 17.451923076923077, "grad_norm": 0.24147175252437592, "learning_rate": 4.936321790550509e-05, "loss": 0.023345901072025298, "step": 10890 }, { "epoch": 17.46794871794872, "grad_norm": 0.21671581268310547, "learning_rate": 4.933756923919122e-05, "loss": 0.021605323255062103, "step": 10900 }, { "epoch": 17.483974358974358, "grad_norm": 0.1815437376499176, "learning_rate": 4.9311896367749824e-05, "loss": 0.021249689161777496, "step": 10910 }, { "epoch": 17.5, "grad_norm": 0.2214028686285019, "learning_rate": 4.928619932331603e-05, "loss": 0.0219644770026207, "step": 10920 }, { "epoch": 17.516025641025642, "grad_norm": 0.1985999345779419, "learning_rate": 4.926047813805523e-05, "loss": 0.02242199182510376, "step": 10930 }, { "epoch": 17.53205128205128, "grad_norm": 0.1635807603597641, "learning_rate": 4.9234732844163016e-05, "loss": 0.019621089100837708, "step": 10940 }, { "epoch": 17.548076923076923, "grad_norm": 0.17596086859703064, "learning_rate": 4.920896347386515e-05, "loss": 0.01990005373954773, "step": 10950 }, { "epoch": 17.564102564102566, "grad_norm": 0.30706778168678284, "learning_rate": 4.9183170059417543e-05, "loss": 0.02136881500482559, "step": 10960 }, { "epoch": 17.580128205128204, "grad_norm": 0.34161821007728577, "learning_rate": 4.915735263310621e-05, "loss": 0.022919312119483948, "step": 10970 }, { "epoch": 17.596153846153847, "grad_norm": 0.3637082874774933, "learning_rate": 4.913151122724721e-05, "loss": 0.022712038457393648, "step": 10980 }, { "epoch": 17.61217948717949, "grad_norm": 0.3159894347190857, "learning_rate": 4.9105645874186624e-05, "loss": 0.020432326197624206, "step": 10990 }, { "epoch": 17.628205128205128, "grad_norm": 0.19917723536491394, "learning_rate": 4.9079756606300495e-05, "loss": 0.02101266086101532, "step": 11000 }, { "epoch": 17.64423076923077, "grad_norm": 0.15398260951042175, "learning_rate": 4.905384345599482e-05, "loss": 0.019868570566177367, "step": 11010 }, { "epoch": 17.66025641025641, "grad_norm": 0.20017024874687195, "learning_rate": 4.902790645570548e-05, "loss": 0.01980249136686325, "step": 11020 }, { "epoch": 17.67628205128205, "grad_norm": 0.27411341667175293, "learning_rate": 4.900194563789821e-05, "loss": 0.020335982739925384, "step": 11030 }, { "epoch": 17.692307692307693, "grad_norm": 0.30393895506858826, "learning_rate": 4.8975961035068546e-05, "loss": 0.019514264166355134, "step": 11040 }, { "epoch": 17.708333333333332, "grad_norm": 0.3436993360519409, "learning_rate": 4.8949952679741825e-05, "loss": 0.019093385338783263, "step": 11050 }, { "epoch": 17.724358974358974, "grad_norm": 0.2846207618713379, "learning_rate": 4.89239206044731e-05, "loss": 0.021279846131801606, "step": 11060 }, { "epoch": 17.740384615384617, "grad_norm": 0.4129798710346222, "learning_rate": 4.8897864841847086e-05, "loss": 0.022613275051116943, "step": 11070 }, { "epoch": 17.756410256410255, "grad_norm": 2.3654696941375732, "learning_rate": 4.88717854244782e-05, "loss": 0.0220718115568161, "step": 11080 }, { "epoch": 17.772435897435898, "grad_norm": 0.3553473949432373, "learning_rate": 4.884568238501044e-05, "loss": 0.022169274091720582, "step": 11090 }, { "epoch": 17.78846153846154, "grad_norm": 0.29762351512908936, "learning_rate": 4.881955575611736e-05, "loss": 0.01977149546146393, "step": 11100 }, { "epoch": 17.80448717948718, "grad_norm": 0.25376608967781067, "learning_rate": 4.879340557050208e-05, "loss": 0.02317061126232147, "step": 11110 }, { "epoch": 17.82051282051282, "grad_norm": 0.24640458822250366, "learning_rate": 4.876723186089716e-05, "loss": 0.020172584056854247, "step": 11120 }, { "epoch": 17.83653846153846, "grad_norm": 0.265215128660202, "learning_rate": 4.874103466006464e-05, "loss": 0.018286675214767456, "step": 11130 }, { "epoch": 17.852564102564102, "grad_norm": 0.2136283665895462, "learning_rate": 4.871481400079595e-05, "loss": 0.023842886090278625, "step": 11140 }, { "epoch": 17.868589743589745, "grad_norm": 0.13030441105365753, "learning_rate": 4.868856991591187e-05, "loss": 0.020331880450248717, "step": 11150 }, { "epoch": 17.884615384615383, "grad_norm": 0.24367603659629822, "learning_rate": 4.866230243826254e-05, "loss": 0.022084075212478637, "step": 11160 }, { "epoch": 17.900641025641026, "grad_norm": 0.15698878467082977, "learning_rate": 4.863601160072735e-05, "loss": 0.019901525974273682, "step": 11170 }, { "epoch": 17.916666666666668, "grad_norm": 0.19495761394500732, "learning_rate": 4.8609697436214936e-05, "loss": 0.02130167782306671, "step": 11180 }, { "epoch": 17.932692307692307, "grad_norm": 0.2175854593515396, "learning_rate": 4.858335997766314e-05, "loss": 0.020918312668800353, "step": 11190 }, { "epoch": 17.94871794871795, "grad_norm": 0.2164468914270401, "learning_rate": 4.855699925803895e-05, "loss": 0.018570704758167265, "step": 11200 }, { "epoch": 17.96474358974359, "grad_norm": 0.29006364941596985, "learning_rate": 4.8530615310338486e-05, "loss": 0.0207971915602684, "step": 11210 }, { "epoch": 17.98076923076923, "grad_norm": 0.1651107370853424, "learning_rate": 4.850420816758692e-05, "loss": 0.019589176774024962, "step": 11220 }, { "epoch": 17.996794871794872, "grad_norm": 0.21724991500377655, "learning_rate": 4.847777786283848e-05, "loss": 0.022823604941368102, "step": 11230 }, { "epoch": 18.0, "eval_accuracy_background": NaN, "eval_accuracy_crop": 0.9950425843081758, "eval_iou_background": 0.0, "eval_iou_crop": 0.9950425843081758, "eval_loss": 0.02110610529780388, "eval_mean_accuracy": 0.9950425843081758, "eval_mean_iou": 0.4975212921540879, "eval_overall_accuracy": 0.9950425843081758, "eval_runtime": 35.539, "eval_samples_per_second": 24.79, "eval_steps_per_second": 3.123, "step": 11232 }, { "epoch": 18.012820512820515, "grad_norm": 0.35942479968070984, "learning_rate": 4.845132442917639e-05, "loss": 0.02182151675224304, "step": 11240 }, { "epoch": 18.028846153846153, "grad_norm": 0.16669394075870514, "learning_rate": 4.84248478997128e-05, "loss": 0.023868051171302796, "step": 11250 }, { "epoch": 18.044871794871796, "grad_norm": 0.18419021368026733, "learning_rate": 4.839834830758877e-05, "loss": 0.020730572938919067, "step": 11260 }, { "epoch": 18.060897435897434, "grad_norm": 0.25199443101882935, "learning_rate": 4.837182568597426e-05, "loss": 0.022674773633480073, "step": 11270 }, { "epoch": 18.076923076923077, "grad_norm": 0.2542992830276489, "learning_rate": 4.834528006806802e-05, "loss": 0.02176496833562851, "step": 11280 }, { "epoch": 18.09294871794872, "grad_norm": 0.23062624037265778, "learning_rate": 4.831871148709761e-05, "loss": 0.02042345404624939, "step": 11290 }, { "epoch": 18.108974358974358, "grad_norm": 0.24492371082305908, "learning_rate": 4.829211997631933e-05, "loss": 0.020384383201599122, "step": 11300 }, { "epoch": 18.125, "grad_norm": 0.2059970200061798, "learning_rate": 4.826550556901817e-05, "loss": 0.02093479335308075, "step": 11310 }, { "epoch": 18.141025641025642, "grad_norm": 0.2103549689054489, "learning_rate": 4.8238868298507776e-05, "loss": 0.020391348004341125, "step": 11320 }, { "epoch": 18.15705128205128, "grad_norm": 0.3284936547279358, "learning_rate": 4.821220819813043e-05, "loss": 0.019772741198539733, "step": 11330 }, { "epoch": 18.173076923076923, "grad_norm": 0.31566521525382996, "learning_rate": 4.818552530125699e-05, "loss": 0.02036762237548828, "step": 11340 }, { "epoch": 18.189102564102566, "grad_norm": 0.2621341347694397, "learning_rate": 4.815881964128683e-05, "loss": 0.02077915072441101, "step": 11350 }, { "epoch": 18.205128205128204, "grad_norm": 0.22479915618896484, "learning_rate": 4.813209125164783e-05, "loss": 0.021999283134937285, "step": 11360 }, { "epoch": 18.221153846153847, "grad_norm": 0.2157152146100998, "learning_rate": 4.810534016579633e-05, "loss": 0.01981757879257202, "step": 11370 }, { "epoch": 18.237179487179485, "grad_norm": 0.2325216680765152, "learning_rate": 4.807856641721705e-05, "loss": 0.02163119465112686, "step": 11380 }, { "epoch": 18.253205128205128, "grad_norm": 0.21789273619651794, "learning_rate": 4.8051770039423106e-05, "loss": 0.01932143568992615, "step": 11390 }, { "epoch": 18.26923076923077, "grad_norm": 0.26940080523490906, "learning_rate": 4.802495106595592e-05, "loss": 0.01925051063299179, "step": 11400 }, { "epoch": 18.28525641025641, "grad_norm": 0.10561416298151016, "learning_rate": 4.7998109530385226e-05, "loss": 0.02480897307395935, "step": 11410 }, { "epoch": 18.30128205128205, "grad_norm": 0.16710583865642548, "learning_rate": 4.797124546630896e-05, "loss": 0.021019226312637328, "step": 11420 }, { "epoch": 18.317307692307693, "grad_norm": 0.23021924495697021, "learning_rate": 4.794435890735329e-05, "loss": 0.019504769146442412, "step": 11430 }, { "epoch": 18.333333333333332, "grad_norm": 0.2658737003803253, "learning_rate": 4.7917449887172514e-05, "loss": 0.01928415447473526, "step": 11440 }, { "epoch": 18.349358974358974, "grad_norm": 0.19552147388458252, "learning_rate": 4.789051843944907e-05, "loss": 0.021296054124832153, "step": 11450 }, { "epoch": 18.365384615384617, "grad_norm": 0.20547446608543396, "learning_rate": 4.786356459789345e-05, "loss": 0.020159099996089936, "step": 11460 }, { "epoch": 18.381410256410255, "grad_norm": 0.524530291557312, "learning_rate": 4.7836588396244204e-05, "loss": 0.02050747275352478, "step": 11470 }, { "epoch": 18.397435897435898, "grad_norm": 0.17480440437793732, "learning_rate": 4.780958986826782e-05, "loss": 0.019351546466350556, "step": 11480 }, { "epoch": 18.41346153846154, "grad_norm": 0.13177591562271118, "learning_rate": 4.778256904775878e-05, "loss": 0.017939174175262453, "step": 11490 }, { "epoch": 18.42948717948718, "grad_norm": 0.49214622378349304, "learning_rate": 4.7755525968539467e-05, "loss": 0.022727851569652558, "step": 11500 }, { "epoch": 18.44551282051282, "grad_norm": 0.25091981887817383, "learning_rate": 4.772846066446009e-05, "loss": 0.021760401129722596, "step": 11510 }, { "epoch": 18.46153846153846, "grad_norm": 0.19878697395324707, "learning_rate": 4.7701373169398705e-05, "loss": 0.02367849349975586, "step": 11520 }, { "epoch": 18.477564102564102, "grad_norm": 0.2601863145828247, "learning_rate": 4.7674263517261144e-05, "loss": 0.020757919549942015, "step": 11530 }, { "epoch": 18.493589743589745, "grad_norm": 0.48353394865989685, "learning_rate": 4.764713174198095e-05, "loss": 0.021673800051212312, "step": 11540 }, { "epoch": 18.509615384615383, "grad_norm": 0.24939925968647003, "learning_rate": 4.761997787751941e-05, "loss": 0.020893728733062743, "step": 11550 }, { "epoch": 18.525641025641026, "grad_norm": 0.19379585981369019, "learning_rate": 4.7592801957865405e-05, "loss": 0.021668604016304015, "step": 11560 }, { "epoch": 18.541666666666668, "grad_norm": 0.2517005503177643, "learning_rate": 4.7565604017035433e-05, "loss": 0.01973954290151596, "step": 11570 }, { "epoch": 18.557692307692307, "grad_norm": 0.17275165021419525, "learning_rate": 4.7538384089073587e-05, "loss": 0.020095641911029815, "step": 11580 }, { "epoch": 18.57371794871795, "grad_norm": 0.2658402919769287, "learning_rate": 4.751114220805145e-05, "loss": 0.0235325887799263, "step": 11590 }, { "epoch": 18.58974358974359, "grad_norm": 0.3461624085903168, "learning_rate": 4.748387840806811e-05, "loss": 0.024012660980224608, "step": 11600 }, { "epoch": 18.60576923076923, "grad_norm": 0.1159447655081749, "learning_rate": 4.745659272325007e-05, "loss": 0.01992557495832443, "step": 11610 }, { "epoch": 18.621794871794872, "grad_norm": 0.15944412350654602, "learning_rate": 4.742928518775122e-05, "loss": 0.02112775146961212, "step": 11620 }, { "epoch": 18.63782051282051, "grad_norm": 0.2626737058162689, "learning_rate": 4.7401955835752835e-05, "loss": 0.02134781926870346, "step": 11630 }, { "epoch": 18.653846153846153, "grad_norm": 0.29534825682640076, "learning_rate": 4.737460470146347e-05, "loss": 0.023002251982688904, "step": 11640 }, { "epoch": 18.669871794871796, "grad_norm": 0.14739343523979187, "learning_rate": 4.734723181911895e-05, "loss": 0.02167621999979019, "step": 11650 }, { "epoch": 18.685897435897434, "grad_norm": 0.28390660881996155, "learning_rate": 4.7319837222982326e-05, "loss": 0.02035772204399109, "step": 11660 }, { "epoch": 18.701923076923077, "grad_norm": 0.16853176057338715, "learning_rate": 4.7292420947343837e-05, "loss": 0.019861750304698944, "step": 11670 }, { "epoch": 18.71794871794872, "grad_norm": 0.3529895544052124, "learning_rate": 4.726498302652083e-05, "loss": 0.02405810058116913, "step": 11680 }, { "epoch": 18.733974358974358, "grad_norm": 0.1473383754491806, "learning_rate": 4.723752349485778e-05, "loss": 0.019948232173919677, "step": 11690 }, { "epoch": 18.75, "grad_norm": 0.20917631685733795, "learning_rate": 4.7210042386726206e-05, "loss": 0.01868666708469391, "step": 11700 }, { "epoch": 18.766025641025642, "grad_norm": 0.26766589283943176, "learning_rate": 4.71825397365246e-05, "loss": 0.021715882420539855, "step": 11710 }, { "epoch": 18.78205128205128, "grad_norm": 0.19145695865154266, "learning_rate": 4.715501557867847e-05, "loss": 0.020045606791973113, "step": 11720 }, { "epoch": 18.798076923076923, "grad_norm": 0.3369481563568115, "learning_rate": 4.71274699476402e-05, "loss": 0.0203432634472847, "step": 11730 }, { "epoch": 18.814102564102566, "grad_norm": 0.30022937059402466, "learning_rate": 4.7099902877889095e-05, "loss": 0.02318585067987442, "step": 11740 }, { "epoch": 18.830128205128204, "grad_norm": 0.3915163278579712, "learning_rate": 4.707231440393125e-05, "loss": 0.0208686038851738, "step": 11750 }, { "epoch": 18.846153846153847, "grad_norm": 0.21271912753582, "learning_rate": 4.704470456029959e-05, "loss": 0.020089086890220643, "step": 11760 }, { "epoch": 18.86217948717949, "grad_norm": 0.17751820385456085, "learning_rate": 4.7017073381553764e-05, "loss": 0.01878455877304077, "step": 11770 }, { "epoch": 18.878205128205128, "grad_norm": 0.2990981638431549, "learning_rate": 4.698942090228014e-05, "loss": 0.02127808630466461, "step": 11780 }, { "epoch": 18.89423076923077, "grad_norm": 0.2674667239189148, "learning_rate": 4.696174715709174e-05, "loss": 0.019745050370693205, "step": 11790 }, { "epoch": 18.91025641025641, "grad_norm": 0.2213476151227951, "learning_rate": 4.6934052180628215e-05, "loss": 0.020797358453273775, "step": 11800 }, { "epoch": 18.92628205128205, "grad_norm": 0.2768981456756592, "learning_rate": 4.6906336007555785e-05, "loss": 0.021738220751285554, "step": 11810 }, { "epoch": 18.942307692307693, "grad_norm": 0.24092870950698853, "learning_rate": 4.6878598672567196e-05, "loss": 0.019201993942260742, "step": 11820 }, { "epoch": 18.958333333333332, "grad_norm": 0.2033987194299698, "learning_rate": 4.685084021038169e-05, "loss": 0.020323386788368224, "step": 11830 }, { "epoch": 18.974358974358974, "grad_norm": 0.34852075576782227, "learning_rate": 4.682306065574496e-05, "loss": 0.020510002970695496, "step": 11840 }, { "epoch": 18.990384615384617, "grad_norm": 0.30719828605651855, "learning_rate": 4.679526004342909e-05, "loss": 0.02114461064338684, "step": 11850 }, { "epoch": 19.0, "eval_accuracy_background": NaN, "eval_accuracy_crop": 0.9936230180006649, "eval_iou_background": 0.0, "eval_iou_crop": 0.9936230180006649, "eval_loss": 0.021593568846583366, "eval_mean_accuracy": 0.9936230180006649, "eval_mean_iou": 0.49681150900033244, "eval_overall_accuracy": 0.9936230180006649, "eval_runtime": 34.3499, "eval_samples_per_second": 25.648, "eval_steps_per_second": 3.231, "step": 11856 }, { "epoch": 19.006410256410255, "grad_norm": 0.21591101586818695, "learning_rate": 4.676743840823253e-05, "loss": 0.020903219282627106, "step": 11860 }, { "epoch": 19.022435897435898, "grad_norm": 0.20247787237167358, "learning_rate": 4.673959578498004e-05, "loss": 0.02262943834066391, "step": 11870 }, { "epoch": 19.03846153846154, "grad_norm": 0.2346166968345642, "learning_rate": 4.671173220852265e-05, "loss": 0.020059294998645782, "step": 11880 }, { "epoch": 19.05448717948718, "grad_norm": 1.1608781814575195, "learning_rate": 4.668384771373761e-05, "loss": 0.022148275375366212, "step": 11890 }, { "epoch": 19.07051282051282, "grad_norm": 0.15627089142799377, "learning_rate": 4.665594233552838e-05, "loss": 0.020119787752628328, "step": 11900 }, { "epoch": 19.08653846153846, "grad_norm": 0.11588118970394135, "learning_rate": 4.6628016108824535e-05, "loss": 0.019988732039928438, "step": 11910 }, { "epoch": 19.102564102564102, "grad_norm": 0.15503299236297607, "learning_rate": 4.660006906858176e-05, "loss": 0.019550320506095887, "step": 11920 }, { "epoch": 19.118589743589745, "grad_norm": 0.22050820291042328, "learning_rate": 4.657210124978179e-05, "loss": 0.022143733501434327, "step": 11930 }, { "epoch": 19.134615384615383, "grad_norm": 0.20957647264003754, "learning_rate": 4.654411268743234e-05, "loss": 0.022113549709320068, "step": 11940 }, { "epoch": 19.150641025641026, "grad_norm": 0.3014703691005707, "learning_rate": 4.651610341656715e-05, "loss": 0.020066747069358827, "step": 11950 }, { "epoch": 19.166666666666668, "grad_norm": 0.3152623474597931, "learning_rate": 4.648807347224584e-05, "loss": 0.018629759550094604, "step": 11960 }, { "epoch": 19.182692307692307, "grad_norm": 0.1889275163412094, "learning_rate": 4.646002288955389e-05, "loss": 0.020815901458263397, "step": 11970 }, { "epoch": 19.19871794871795, "grad_norm": 0.2342025190591812, "learning_rate": 4.643195170360267e-05, "loss": 0.026229670643806456, "step": 11980 }, { "epoch": 19.21474358974359, "grad_norm": 0.2938031852245331, "learning_rate": 4.640385994952929e-05, "loss": 0.02080608904361725, "step": 11990 }, { "epoch": 19.23076923076923, "grad_norm": 0.3468257784843445, "learning_rate": 4.637574766249663e-05, "loss": 0.01990574300289154, "step": 12000 }, { "epoch": 19.246794871794872, "grad_norm": 0.21163998544216156, "learning_rate": 4.634761487769326e-05, "loss": 0.021202640235424043, "step": 12010 }, { "epoch": 19.262820512820515, "grad_norm": 0.2649792432785034, "learning_rate": 4.631946163033341e-05, "loss": 0.02075115293264389, "step": 12020 }, { "epoch": 19.278846153846153, "grad_norm": 0.22733163833618164, "learning_rate": 4.629128795565693e-05, "loss": 0.020252877473831178, "step": 12030 }, { "epoch": 19.294871794871796, "grad_norm": 0.2575659453868866, "learning_rate": 4.626309388892923e-05, "loss": 0.01833537220954895, "step": 12040 }, { "epoch": 19.310897435897434, "grad_norm": 0.2449290156364441, "learning_rate": 4.623487946544125e-05, "loss": 0.02119026631116867, "step": 12050 }, { "epoch": 19.326923076923077, "grad_norm": 0.24784059822559357, "learning_rate": 4.620664472050942e-05, "loss": 0.021312797069549562, "step": 12060 }, { "epoch": 19.34294871794872, "grad_norm": 0.28371208906173706, "learning_rate": 4.617838968947558e-05, "loss": 0.019728806614875794, "step": 12070 }, { "epoch": 19.358974358974358, "grad_norm": 0.19671297073364258, "learning_rate": 4.6150114407706985e-05, "loss": 0.018853692710399626, "step": 12080 }, { "epoch": 19.375, "grad_norm": 0.3938924968242645, "learning_rate": 4.6121818910596236e-05, "loss": 0.019833658635616303, "step": 12090 }, { "epoch": 19.391025641025642, "grad_norm": 0.14277730882167816, "learning_rate": 4.609350323356124e-05, "loss": 0.02033713459968567, "step": 12100 }, { "epoch": 19.40705128205128, "grad_norm": 0.2770462930202484, "learning_rate": 4.6065167412045144e-05, "loss": 0.02059420645236969, "step": 12110 }, { "epoch": 19.423076923076923, "grad_norm": 0.34451714158058167, "learning_rate": 4.603681148151634e-05, "loss": 0.01981956511735916, "step": 12120 }, { "epoch": 19.439102564102566, "grad_norm": 0.3264870345592499, "learning_rate": 4.6008435477468346e-05, "loss": 0.019943416118621826, "step": 12130 }, { "epoch": 19.455128205128204, "grad_norm": 0.21726687252521515, "learning_rate": 4.5980039435419865e-05, "loss": 0.01973196119070053, "step": 12140 }, { "epoch": 19.471153846153847, "grad_norm": 0.15614013373851776, "learning_rate": 4.595162339091465e-05, "loss": 0.019850926101207735, "step": 12150 }, { "epoch": 19.487179487179485, "grad_norm": 0.18427351117134094, "learning_rate": 4.592318737952146e-05, "loss": 0.02109350115060806, "step": 12160 }, { "epoch": 19.503205128205128, "grad_norm": 0.11672289669513702, "learning_rate": 4.589473143683412e-05, "loss": 0.01791137009859085, "step": 12170 }, { "epoch": 19.51923076923077, "grad_norm": 0.1896849125623703, "learning_rate": 4.586625559847134e-05, "loss": 0.02043941617012024, "step": 12180 }, { "epoch": 19.53525641025641, "grad_norm": 0.27887460589408875, "learning_rate": 4.5837759900076764e-05, "loss": 0.021102418005466462, "step": 12190 }, { "epoch": 19.55128205128205, "grad_norm": 0.23531289398670197, "learning_rate": 4.580924437731889e-05, "loss": 0.02202337682247162, "step": 12200 }, { "epoch": 19.567307692307693, "grad_norm": 0.3214361071586609, "learning_rate": 4.578070906589103e-05, "loss": 0.020742881298065185, "step": 12210 }, { "epoch": 19.583333333333332, "grad_norm": 0.28248119354248047, "learning_rate": 4.575215400151125e-05, "loss": 0.024901629984378816, "step": 12220 }, { "epoch": 19.599358974358974, "grad_norm": 0.18560875952243805, "learning_rate": 4.572357921992238e-05, "loss": 0.024428682029247285, "step": 12230 }, { "epoch": 19.615384615384617, "grad_norm": 0.16295818984508514, "learning_rate": 4.569498475689189e-05, "loss": 0.018625980615615843, "step": 12240 }, { "epoch": 19.631410256410255, "grad_norm": 0.32757240533828735, "learning_rate": 4.566637064821189e-05, "loss": 0.01947982907295227, "step": 12250 }, { "epoch": 19.647435897435898, "grad_norm": 0.16526077687740326, "learning_rate": 4.563773692969912e-05, "loss": 0.020589616894721986, "step": 12260 }, { "epoch": 19.66346153846154, "grad_norm": 0.14954017102718353, "learning_rate": 4.560908363719481e-05, "loss": 0.022007776796817778, "step": 12270 }, { "epoch": 19.67948717948718, "grad_norm": 0.5219020843505859, "learning_rate": 4.5580410806564744e-05, "loss": 0.021977654099464415, "step": 12280 }, { "epoch": 19.69551282051282, "grad_norm": 0.2263311743736267, "learning_rate": 4.555171847369912e-05, "loss": 0.07453706264495849, "step": 12290 }, { "epoch": 19.71153846153846, "grad_norm": 0.42330285906791687, "learning_rate": 4.552300667451257e-05, "loss": 0.02022330164909363, "step": 12300 }, { "epoch": 19.727564102564102, "grad_norm": 0.3329327702522278, "learning_rate": 4.5494275444944084e-05, "loss": 0.02050307095050812, "step": 12310 }, { "epoch": 19.743589743589745, "grad_norm": 0.16214507818222046, "learning_rate": 4.546552482095698e-05, "loss": 0.020488479733467103, "step": 12320 }, { "epoch": 19.759615384615383, "grad_norm": 0.24149228632450104, "learning_rate": 4.543675483853885e-05, "loss": 0.019318124651908873, "step": 12330 }, { "epoch": 19.775641025641026, "grad_norm": 0.230012908577919, "learning_rate": 4.5407965533701515e-05, "loss": 0.021290904283523558, "step": 12340 }, { "epoch": 19.791666666666668, "grad_norm": 0.46470877528190613, "learning_rate": 4.537915694248098e-05, "loss": 0.020048537850379945, "step": 12350 }, { "epoch": 19.807692307692307, "grad_norm": 0.2553752362728119, "learning_rate": 4.5350329100937384e-05, "loss": 0.021188224852085113, "step": 12360 }, { "epoch": 19.82371794871795, "grad_norm": 0.26893356442451477, "learning_rate": 4.5321482045155e-05, "loss": 0.018612588942050933, "step": 12370 }, { "epoch": 19.83974358974359, "grad_norm": 0.2323511391878128, "learning_rate": 4.5292615811242094e-05, "loss": 0.018605923652648924, "step": 12380 }, { "epoch": 19.85576923076923, "grad_norm": 0.2298988252878189, "learning_rate": 4.526373043533097e-05, "loss": 0.02025187313556671, "step": 12390 }, { "epoch": 19.871794871794872, "grad_norm": 0.20505259931087494, "learning_rate": 4.523482595357791e-05, "loss": 0.019995367527008055, "step": 12400 }, { "epoch": 19.88782051282051, "grad_norm": 0.22686871886253357, "learning_rate": 4.520590240216308e-05, "loss": 0.02082737982273102, "step": 12410 }, { "epoch": 19.903846153846153, "grad_norm": 0.17272251844406128, "learning_rate": 4.517695981729052e-05, "loss": 0.018464893102645874, "step": 12420 }, { "epoch": 19.919871794871796, "grad_norm": 0.20901983976364136, "learning_rate": 4.51479982351881e-05, "loss": 0.018945954740047455, "step": 12430 }, { "epoch": 19.935897435897434, "grad_norm": 0.2518330514431, "learning_rate": 4.511901769210748e-05, "loss": 0.019997033476829528, "step": 12440 }, { "epoch": 19.951923076923077, "grad_norm": 0.1768152266740799, "learning_rate": 4.509001822432404e-05, "loss": 0.017991378903388977, "step": 12450 }, { "epoch": 19.96794871794872, "grad_norm": 0.17320601642131805, "learning_rate": 4.5060999868136846e-05, "loss": 0.018997488915920256, "step": 12460 }, { "epoch": 19.983974358974358, "grad_norm": 0.17927853763103485, "learning_rate": 4.5031962659868625e-05, "loss": 0.019426614046096802, "step": 12470 }, { "epoch": 20.0, "grad_norm": 0.3162682354450226, "learning_rate": 4.500290663586567e-05, "loss": 0.020051108300685884, "step": 12480 }, { "epoch": 20.0, "eval_accuracy_background": NaN, "eval_accuracy_crop": 0.9945086815274605, "eval_iou_background": 0.0, "eval_iou_crop": 0.9945086815274605, "eval_loss": 0.021114204078912735, "eval_mean_accuracy": 0.9945086815274605, "eval_mean_iou": 0.49725434076373026, "eval_overall_accuracy": 0.9945086815274605, "eval_runtime": 37.3713, "eval_samples_per_second": 23.574, "eval_steps_per_second": 2.97, "step": 12480 }, { "epoch": 20.016025641025642, "grad_norm": 0.2012556493282318, "learning_rate": 4.4973831832497866e-05, "loss": 0.02055087387561798, "step": 12490 }, { "epoch": 20.03205128205128, "grad_norm": 0.15045031905174255, "learning_rate": 4.494473828615856e-05, "loss": 0.01915910392999649, "step": 12500 }, { "epoch": 20.048076923076923, "grad_norm": 0.2423938661813736, "learning_rate": 4.491562603326461e-05, "loss": 0.020449966192245483, "step": 12510 }, { "epoch": 20.064102564102566, "grad_norm": 0.2847105860710144, "learning_rate": 4.488649511025626e-05, "loss": 0.020989495515823364, "step": 12520 }, { "epoch": 20.080128205128204, "grad_norm": 0.13459670543670654, "learning_rate": 4.485734555359709e-05, "loss": 0.019011346995830535, "step": 12530 }, { "epoch": 20.096153846153847, "grad_norm": 0.13958367705345154, "learning_rate": 4.4828177399774075e-05, "loss": 0.017745546996593475, "step": 12540 }, { "epoch": 20.112179487179485, "grad_norm": 0.5769830942153931, "learning_rate": 4.4798990685297436e-05, "loss": 0.02156258821487427, "step": 12550 }, { "epoch": 20.128205128205128, "grad_norm": 0.23155926167964935, "learning_rate": 4.4769785446700604e-05, "loss": 0.02066882699728012, "step": 12560 }, { "epoch": 20.14423076923077, "grad_norm": 0.19524680078029633, "learning_rate": 4.4740561720540235e-05, "loss": 0.020382192730903626, "step": 12570 }, { "epoch": 20.16025641025641, "grad_norm": 0.1671256422996521, "learning_rate": 4.471131954339609e-05, "loss": 0.020238041877746582, "step": 12580 }, { "epoch": 20.17628205128205, "grad_norm": 0.2802197337150574, "learning_rate": 4.4682058951871064e-05, "loss": 0.02127375602722168, "step": 12590 }, { "epoch": 20.192307692307693, "grad_norm": 0.29078349471092224, "learning_rate": 4.465277998259107e-05, "loss": 0.02279140055179596, "step": 12600 }, { "epoch": 20.208333333333332, "grad_norm": 0.13353906571865082, "learning_rate": 4.462348267220504e-05, "loss": 0.02085784077644348, "step": 12610 }, { "epoch": 20.224358974358974, "grad_norm": 0.251830518245697, "learning_rate": 4.459416705738485e-05, "loss": 0.019313177466392516, "step": 12620 }, { "epoch": 20.240384615384617, "grad_norm": 0.1515018790960312, "learning_rate": 4.456483317482532e-05, "loss": 0.019486954808235167, "step": 12630 }, { "epoch": 20.256410256410255, "grad_norm": 0.2656257748603821, "learning_rate": 4.453548106124408e-05, "loss": 0.019967877864837648, "step": 12640 }, { "epoch": 20.272435897435898, "grad_norm": 0.2882594168186188, "learning_rate": 4.450611075338165e-05, "loss": 0.021988412737846373, "step": 12650 }, { "epoch": 20.28846153846154, "grad_norm": 0.23343148827552795, "learning_rate": 4.4476722288001266e-05, "loss": 0.02160133570432663, "step": 12660 }, { "epoch": 20.30448717948718, "grad_norm": 0.18844176828861237, "learning_rate": 4.444731570188891e-05, "loss": 0.01947326064109802, "step": 12670 }, { "epoch": 20.32051282051282, "grad_norm": 0.22692114114761353, "learning_rate": 4.441789103185327e-05, "loss": 0.020046834647655488, "step": 12680 }, { "epoch": 20.33653846153846, "grad_norm": 0.25728389620780945, "learning_rate": 4.438844831472565e-05, "loss": 0.019745376706123353, "step": 12690 }, { "epoch": 20.352564102564102, "grad_norm": 0.22363756597042084, "learning_rate": 4.435898758735992e-05, "loss": 0.018606433272361757, "step": 12700 }, { "epoch": 20.368589743589745, "grad_norm": 0.27292177081108093, "learning_rate": 4.432950888663253e-05, "loss": 0.02225317656993866, "step": 12710 }, { "epoch": 20.384615384615383, "grad_norm": 0.19328796863555908, "learning_rate": 4.430001224944242e-05, "loss": 0.019052274525165558, "step": 12720 }, { "epoch": 20.400641025641026, "grad_norm": 0.21141675114631653, "learning_rate": 4.427049771271097e-05, "loss": 0.01939515322446823, "step": 12730 }, { "epoch": 20.416666666666668, "grad_norm": 0.1359196901321411, "learning_rate": 4.424096531338198e-05, "loss": 0.019548816978931426, "step": 12740 }, { "epoch": 20.432692307692307, "grad_norm": 0.12817251682281494, "learning_rate": 4.421141508842158e-05, "loss": 0.019995583593845366, "step": 12750 }, { "epoch": 20.44871794871795, "grad_norm": 0.35380059480667114, "learning_rate": 4.418184707481826e-05, "loss": 0.020164337754249573, "step": 12760 }, { "epoch": 20.46474358974359, "grad_norm": 0.1468985229730606, "learning_rate": 4.415226130958273e-05, "loss": 0.02081005722284317, "step": 12770 }, { "epoch": 20.48076923076923, "grad_norm": 0.2163703888654709, "learning_rate": 4.4122657829747945e-05, "loss": 0.02169875204563141, "step": 12780 }, { "epoch": 20.496794871794872, "grad_norm": 0.1020355224609375, "learning_rate": 4.409303667236904e-05, "loss": 0.01980442404747009, "step": 12790 }, { "epoch": 20.51282051282051, "grad_norm": 0.24418091773986816, "learning_rate": 4.406339787452324e-05, "loss": 0.02183721661567688, "step": 12800 }, { "epoch": 20.528846153846153, "grad_norm": 0.14093652367591858, "learning_rate": 4.403374147330989e-05, "loss": 0.020429199934005736, "step": 12810 }, { "epoch": 20.544871794871796, "grad_norm": 0.4223717451095581, "learning_rate": 4.4004067505850364e-05, "loss": 0.023128336668014525, "step": 12820 }, { "epoch": 20.560897435897434, "grad_norm": 0.2516572177410126, "learning_rate": 4.3974376009288e-05, "loss": 0.020622837543487548, "step": 12830 }, { "epoch": 20.576923076923077, "grad_norm": 0.1995868682861328, "learning_rate": 4.3944667020788095e-05, "loss": 0.021128560602664947, "step": 12840 }, { "epoch": 20.59294871794872, "grad_norm": 0.2043762505054474, "learning_rate": 4.3914940577537846e-05, "loss": 0.02272690087556839, "step": 12850 }, { "epoch": 20.608974358974358, "grad_norm": 0.13064444065093994, "learning_rate": 4.388519671674629e-05, "loss": 0.018802683055400848, "step": 12860 }, { "epoch": 20.625, "grad_norm": 0.24233034253120422, "learning_rate": 4.3855435475644266e-05, "loss": 0.01757769286632538, "step": 12870 }, { "epoch": 20.641025641025642, "grad_norm": 0.32942742109298706, "learning_rate": 4.382565689148436e-05, "loss": 0.022016283869743348, "step": 12880 }, { "epoch": 20.65705128205128, "grad_norm": 0.12203536927700043, "learning_rate": 4.379586100154089e-05, "loss": 0.01966577023267746, "step": 12890 }, { "epoch": 20.673076923076923, "grad_norm": 0.20064234733581543, "learning_rate": 4.3766047843109805e-05, "loss": 0.018832990527153017, "step": 12900 }, { "epoch": 20.689102564102566, "grad_norm": 0.19822809100151062, "learning_rate": 4.37362174535087e-05, "loss": 0.018135108053684235, "step": 12910 }, { "epoch": 20.705128205128204, "grad_norm": 0.2294996678829193, "learning_rate": 4.370636987007672e-05, "loss": 0.018735188245773315, "step": 12920 }, { "epoch": 20.721153846153847, "grad_norm": 0.2110728770494461, "learning_rate": 4.3676505130174545e-05, "loss": 0.020275780558586122, "step": 12930 }, { "epoch": 20.73717948717949, "grad_norm": 0.20592190325260162, "learning_rate": 4.36466232711843e-05, "loss": 0.01842232495546341, "step": 12940 }, { "epoch": 20.753205128205128, "grad_norm": 0.179015651345253, "learning_rate": 4.361672433050957e-05, "loss": 0.017498084902763368, "step": 12950 }, { "epoch": 20.76923076923077, "grad_norm": 0.24063749611377716, "learning_rate": 4.3586808345575324e-05, "loss": 0.02128181904554367, "step": 12960 }, { "epoch": 20.78525641025641, "grad_norm": 0.187387615442276, "learning_rate": 4.3556875353827835e-05, "loss": 0.018914663791656496, "step": 12970 }, { "epoch": 20.80128205128205, "grad_norm": 0.5925999879837036, "learning_rate": 4.352692539273467e-05, "loss": 0.021723976731300353, "step": 12980 }, { "epoch": 20.817307692307693, "grad_norm": 0.2220763862133026, "learning_rate": 4.349695849978468e-05, "loss": 0.020808309316635132, "step": 12990 }, { "epoch": 20.833333333333332, "grad_norm": 0.3333486020565033, "learning_rate": 4.346697471248786e-05, "loss": 0.019057418406009673, "step": 13000 }, { "epoch": 20.849358974358974, "grad_norm": 0.2940726578235626, "learning_rate": 4.3436974068375366e-05, "loss": 0.020002813637256624, "step": 13010 }, { "epoch": 20.865384615384617, "grad_norm": 0.17128261923789978, "learning_rate": 4.340695660499946e-05, "loss": 0.01909821629524231, "step": 13020 }, { "epoch": 20.881410256410255, "grad_norm": 0.43673065304756165, "learning_rate": 4.337692235993346e-05, "loss": 0.021749889850616454, "step": 13030 }, { "epoch": 20.897435897435898, "grad_norm": 0.3127361238002777, "learning_rate": 4.334687137077168e-05, "loss": 0.01879466772079468, "step": 13040 }, { "epoch": 20.91346153846154, "grad_norm": 0.21073558926582336, "learning_rate": 4.3316803675129406e-05, "loss": 0.020410376787185668, "step": 13050 }, { "epoch": 20.92948717948718, "grad_norm": 0.14328700304031372, "learning_rate": 4.32867193106428e-05, "loss": 0.020307719707489014, "step": 13060 }, { "epoch": 20.94551282051282, "grad_norm": 0.2107803076505661, "learning_rate": 4.325661831496895e-05, "loss": 0.01889844238758087, "step": 13070 }, { "epoch": 20.96153846153846, "grad_norm": 0.2648489773273468, "learning_rate": 4.32265007257857e-05, "loss": 0.018178674578666686, "step": 13080 }, { "epoch": 20.977564102564102, "grad_norm": 0.273515909910202, "learning_rate": 4.319636658079171e-05, "loss": 0.02199030965566635, "step": 13090 }, { "epoch": 20.993589743589745, "grad_norm": 0.22841162979602814, "learning_rate": 4.316621591770633e-05, "loss": 0.019809776544570924, "step": 13100 }, { "epoch": 21.0, "eval_accuracy_background": NaN, "eval_accuracy_crop": 0.9938175634168034, "eval_iou_background": 0.0, "eval_iou_crop": 0.9938175634168034, "eval_loss": 0.020385313779115677, "eval_mean_accuracy": 0.9938175634168034, "eval_mean_iou": 0.4969087817084017, "eval_overall_accuracy": 0.9938175634168034, "eval_runtime": 33.6256, "eval_samples_per_second": 26.2, "eval_steps_per_second": 3.301, "step": 13104 }, { "epoch": 21.009615384615383, "grad_norm": 0.267903596162796, "learning_rate": 4.313604877426961e-05, "loss": 0.01990855634212494, "step": 13110 }, { "epoch": 21.025641025641026, "grad_norm": 0.171073779463768, "learning_rate": 4.3105865188242204e-05, "loss": 0.020437569916248323, "step": 13120 }, { "epoch": 21.041666666666668, "grad_norm": 0.2771551012992859, "learning_rate": 4.3075665197405395e-05, "loss": 0.01863721013069153, "step": 13130 }, { "epoch": 21.057692307692307, "grad_norm": 0.23258356750011444, "learning_rate": 4.3045448839560935e-05, "loss": 0.021201352775096893, "step": 13140 }, { "epoch": 21.07371794871795, "grad_norm": 0.2202278971672058, "learning_rate": 4.3015216152531116e-05, "loss": 0.018389877676963807, "step": 13150 }, { "epoch": 21.08974358974359, "grad_norm": 0.20170554518699646, "learning_rate": 4.298496717415863e-05, "loss": 0.018905219435691834, "step": 13160 }, { "epoch": 21.10576923076923, "grad_norm": 0.10718771815299988, "learning_rate": 4.2954701942306596e-05, "loss": 0.01891837567090988, "step": 13170 }, { "epoch": 21.121794871794872, "grad_norm": 0.2549036145210266, "learning_rate": 4.292442049485846e-05, "loss": 0.020578470826148988, "step": 13180 }, { "epoch": 21.137820512820515, "grad_norm": 0.19412223994731903, "learning_rate": 4.289412286971797e-05, "loss": 0.0190022811293602, "step": 13190 }, { "epoch": 21.153846153846153, "grad_norm": 0.2564097046852112, "learning_rate": 4.28638091048091e-05, "loss": 0.01962098330259323, "step": 13200 }, { "epoch": 21.169871794871796, "grad_norm": 0.25402212142944336, "learning_rate": 4.2833479238076064e-05, "loss": 0.019814261794090272, "step": 13210 }, { "epoch": 21.185897435897434, "grad_norm": 0.26771944761276245, "learning_rate": 4.280313330748322e-05, "loss": 0.020073577761650085, "step": 13220 }, { "epoch": 21.201923076923077, "grad_norm": 0.16304251551628113, "learning_rate": 4.277277135101502e-05, "loss": 0.01943199634552002, "step": 13230 }, { "epoch": 21.21794871794872, "grad_norm": 0.1637098789215088, "learning_rate": 4.274239340667599e-05, "loss": 0.01922507882118225, "step": 13240 }, { "epoch": 21.233974358974358, "grad_norm": 0.3198486566543579, "learning_rate": 4.271199951249065e-05, "loss": 0.01947050541639328, "step": 13250 }, { "epoch": 21.25, "grad_norm": 0.1413649320602417, "learning_rate": 4.268158970650349e-05, "loss": 0.020983971655368805, "step": 13260 }, { "epoch": 21.266025641025642, "grad_norm": 0.2323025017976761, "learning_rate": 4.265116402677895e-05, "loss": 0.01975177079439163, "step": 13270 }, { "epoch": 21.28205128205128, "grad_norm": 0.1868942677974701, "learning_rate": 4.2620722511401295e-05, "loss": 0.020826463401317597, "step": 13280 }, { "epoch": 21.298076923076923, "grad_norm": 0.18894775211811066, "learning_rate": 4.2590265198474635e-05, "loss": 0.021212702989578246, "step": 13290 }, { "epoch": 21.314102564102566, "grad_norm": 0.16890569031238556, "learning_rate": 4.2559792126122843e-05, "loss": 0.01984250396490097, "step": 13300 }, { "epoch": 21.330128205128204, "grad_norm": 0.20023581385612488, "learning_rate": 4.252930333248953e-05, "loss": 0.022342468798160552, "step": 13310 }, { "epoch": 21.346153846153847, "grad_norm": 0.15978285670280457, "learning_rate": 4.249879885573798e-05, "loss": 0.01947198063135147, "step": 13320 }, { "epoch": 21.362179487179485, "grad_norm": 0.2920166254043579, "learning_rate": 4.2468278734051106e-05, "loss": 0.01984085440635681, "step": 13330 }, { "epoch": 21.378205128205128, "grad_norm": 0.323455810546875, "learning_rate": 4.24377430056314e-05, "loss": 0.019038306176662446, "step": 13340 }, { "epoch": 21.39423076923077, "grad_norm": 0.40585651993751526, "learning_rate": 4.24071917087009e-05, "loss": 0.019759251177310942, "step": 13350 }, { "epoch": 21.41025641025641, "grad_norm": 0.24629510939121246, "learning_rate": 4.2376624881501126e-05, "loss": 0.018901845812797545, "step": 13360 }, { "epoch": 21.42628205128205, "grad_norm": 0.1664123833179474, "learning_rate": 4.2346042562293036e-05, "loss": 0.019947101175785065, "step": 13370 }, { "epoch": 21.442307692307693, "grad_norm": 0.2547743022441864, "learning_rate": 4.231544478935698e-05, "loss": 0.020412224531173705, "step": 13380 }, { "epoch": 21.458333333333332, "grad_norm": 0.18273372948169708, "learning_rate": 4.2284831600992655e-05, "loss": 0.020429974794387816, "step": 13390 }, { "epoch": 21.474358974358974, "grad_norm": 0.2642642855644226, "learning_rate": 4.225420303551904e-05, "loss": 0.020530101656913758, "step": 13400 }, { "epoch": 21.490384615384617, "grad_norm": 0.15917940437793732, "learning_rate": 4.2223559131274394e-05, "loss": 0.01909780204296112, "step": 13410 }, { "epoch": 21.506410256410255, "grad_norm": 0.5813198685646057, "learning_rate": 4.219289992661614e-05, "loss": 0.01838366538286209, "step": 13420 }, { "epoch": 21.522435897435898, "grad_norm": 0.17671896517276764, "learning_rate": 4.2162225459920875e-05, "loss": 0.027678591012954713, "step": 13430 }, { "epoch": 21.53846153846154, "grad_norm": 0.2283134162425995, "learning_rate": 4.2131535769584284e-05, "loss": 0.019569410383701323, "step": 13440 }, { "epoch": 21.55448717948718, "grad_norm": 0.2609897255897522, "learning_rate": 4.210083089402112e-05, "loss": 0.02038143128156662, "step": 13450 }, { "epoch": 21.57051282051282, "grad_norm": 0.19937387108802795, "learning_rate": 4.207011087166514e-05, "loss": 0.020251205563545226, "step": 13460 }, { "epoch": 21.58653846153846, "grad_norm": 0.21187132596969604, "learning_rate": 4.203937574096906e-05, "loss": 0.020161247253417967, "step": 13470 }, { "epoch": 21.602564102564102, "grad_norm": 0.3071881830692291, "learning_rate": 4.2008625540404504e-05, "loss": 0.02076636552810669, "step": 13480 }, { "epoch": 21.618589743589745, "grad_norm": 0.22974824905395508, "learning_rate": 4.197786030846197e-05, "loss": 0.01885763257741928, "step": 13490 }, { "epoch": 21.634615384615383, "grad_norm": 0.28531330823898315, "learning_rate": 4.194708008365077e-05, "loss": 0.019565841555595397, "step": 13500 }, { "epoch": 21.650641025641026, "grad_norm": 0.1616925150156021, "learning_rate": 4.191628490449895e-05, "loss": 0.019697439670562745, "step": 13510 }, { "epoch": 21.666666666666668, "grad_norm": 0.30430811643600464, "learning_rate": 4.188547480955332e-05, "loss": 0.019237801432609558, "step": 13520 }, { "epoch": 21.682692307692307, "grad_norm": 0.33268818259239197, "learning_rate": 4.1854649837379345e-05, "loss": 0.01831871420145035, "step": 13530 }, { "epoch": 21.69871794871795, "grad_norm": 0.14555086195468903, "learning_rate": 4.182381002656109e-05, "loss": 0.018807320296764372, "step": 13540 }, { "epoch": 21.71474358974359, "grad_norm": 0.1720515638589859, "learning_rate": 4.179295541570123e-05, "loss": 0.02340574413537979, "step": 13550 }, { "epoch": 21.73076923076923, "grad_norm": 0.20911291241645813, "learning_rate": 4.176208604342095e-05, "loss": 0.018665726482868194, "step": 13560 }, { "epoch": 21.746794871794872, "grad_norm": 0.28284937143325806, "learning_rate": 4.1731201948359874e-05, "loss": 0.018356963992118835, "step": 13570 }, { "epoch": 21.76282051282051, "grad_norm": 0.1911778599023819, "learning_rate": 4.1700303169176135e-05, "loss": 0.019200792908668517, "step": 13580 }, { "epoch": 21.778846153846153, "grad_norm": 0.2843223810195923, "learning_rate": 4.166938974454618e-05, "loss": 0.021648438274860383, "step": 13590 }, { "epoch": 21.794871794871796, "grad_norm": 0.2930411994457245, "learning_rate": 4.163846171316482e-05, "loss": 0.020430997014045715, "step": 13600 }, { "epoch": 21.810897435897434, "grad_norm": 0.22492867708206177, "learning_rate": 4.160751911374512e-05, "loss": 0.020312802493572236, "step": 13610 }, { "epoch": 21.826923076923077, "grad_norm": 0.27271509170532227, "learning_rate": 4.1576561985018413e-05, "loss": 0.01956941783428192, "step": 13620 }, { "epoch": 21.84294871794872, "grad_norm": 0.18191371858119965, "learning_rate": 4.1545590365734214e-05, "loss": 0.019826699793338776, "step": 13630 }, { "epoch": 21.858974358974358, "grad_norm": 0.18989358842372894, "learning_rate": 4.151460429466015e-05, "loss": 0.022326257824897767, "step": 13640 }, { "epoch": 21.875, "grad_norm": 0.1311485916376114, "learning_rate": 4.148360381058195e-05, "loss": 0.018539603054523467, "step": 13650 }, { "epoch": 21.891025641025642, "grad_norm": 0.172775998711586, "learning_rate": 4.1452588952303405e-05, "loss": 0.01976647675037384, "step": 13660 }, { "epoch": 21.90705128205128, "grad_norm": 0.25626641511917114, "learning_rate": 4.142155975864628e-05, "loss": 0.018487341701984406, "step": 13670 }, { "epoch": 21.923076923076923, "grad_norm": 0.43738728761672974, "learning_rate": 4.139051626845027e-05, "loss": 0.01908624768257141, "step": 13680 }, { "epoch": 21.939102564102566, "grad_norm": 0.15852341055870056, "learning_rate": 4.135945852057299e-05, "loss": 0.020242705941200256, "step": 13690 }, { "epoch": 21.955128205128204, "grad_norm": 0.15667416155338287, "learning_rate": 4.1328386553889905e-05, "loss": 0.01994757205247879, "step": 13700 }, { "epoch": 21.971153846153847, "grad_norm": 0.19941070675849915, "learning_rate": 4.129730040729423e-05, "loss": 0.01906275451183319, "step": 13710 }, { "epoch": 21.98717948717949, "grad_norm": 0.1894705593585968, "learning_rate": 4.1266200119697e-05, "loss": 0.020430848002433777, "step": 13720 }, { "epoch": 22.0, "eval_accuracy_background": NaN, "eval_accuracy_crop": 0.9933875583585097, "eval_iou_background": 0.0, "eval_iou_crop": 0.9933875583585097, "eval_loss": 0.020621322095394135, "eval_mean_accuracy": 0.9933875583585097, "eval_mean_iou": 0.49669377917925484, "eval_overall_accuracy": 0.9933875583585097, "eval_runtime": 36.1001, "eval_samples_per_second": 24.404, "eval_steps_per_second": 3.075, "step": 13728 }, { "epoch": 22.003205128205128, "grad_norm": 0.1603962630033493, "learning_rate": 4.1235085730026906e-05, "loss": 0.01928238570690155, "step": 13730 }, { "epoch": 22.01923076923077, "grad_norm": 0.2184651792049408, "learning_rate": 4.120395727723029e-05, "loss": 0.021621887385845185, "step": 13740 }, { "epoch": 22.03525641025641, "grad_norm": 0.2627745270729065, "learning_rate": 4.1172814800271116e-05, "loss": 0.019656090438365935, "step": 13750 }, { "epoch": 22.05128205128205, "grad_norm": 0.22442424297332764, "learning_rate": 4.114165833813088e-05, "loss": 0.020627415180206297, "step": 13760 }, { "epoch": 22.067307692307693, "grad_norm": 0.17564725875854492, "learning_rate": 4.1110487929808615e-05, "loss": 0.018934442102909087, "step": 13770 }, { "epoch": 22.083333333333332, "grad_norm": 0.18704740703105927, "learning_rate": 4.1079303614320775e-05, "loss": 0.01866065263748169, "step": 13780 }, { "epoch": 22.099358974358974, "grad_norm": 0.2043849378824234, "learning_rate": 4.104810543070124e-05, "loss": 0.018551397323608398, "step": 13790 }, { "epoch": 22.115384615384617, "grad_norm": 0.20424875617027283, "learning_rate": 4.101689341800126e-05, "loss": 0.01971668004989624, "step": 13800 }, { "epoch": 22.131410256410255, "grad_norm": 0.3443831205368042, "learning_rate": 4.0985667615289374e-05, "loss": 0.017584788799285888, "step": 13810 }, { "epoch": 22.147435897435898, "grad_norm": 0.18154911696910858, "learning_rate": 4.09544280616514e-05, "loss": 0.01976456493139267, "step": 13820 }, { "epoch": 22.16346153846154, "grad_norm": 0.299681693315506, "learning_rate": 4.092317479619035e-05, "loss": 0.019168442487716673, "step": 13830 }, { "epoch": 22.17948717948718, "grad_norm": 0.24057920277118683, "learning_rate": 4.0891907858026405e-05, "loss": 0.0204695999622345, "step": 13840 }, { "epoch": 22.19551282051282, "grad_norm": 0.16988718509674072, "learning_rate": 4.086062728629686e-05, "loss": 0.020178531110286713, "step": 13850 }, { "epoch": 22.21153846153846, "grad_norm": 0.16011229157447815, "learning_rate": 4.0829333120156105e-05, "loss": 0.019027228653430938, "step": 13860 }, { "epoch": 22.227564102564102, "grad_norm": 0.19061389565467834, "learning_rate": 4.07980253987755e-05, "loss": 0.017354580760002136, "step": 13870 }, { "epoch": 22.243589743589745, "grad_norm": 0.3095988631248474, "learning_rate": 4.0766704161343386e-05, "loss": 0.021051810681819917, "step": 13880 }, { "epoch": 22.259615384615383, "grad_norm": 0.20773190259933472, "learning_rate": 4.073536944706505e-05, "loss": 0.018614771962165832, "step": 13890 }, { "epoch": 22.275641025641026, "grad_norm": 0.21319279074668884, "learning_rate": 4.0704021295162616e-05, "loss": 0.021441462635993957, "step": 13900 }, { "epoch": 22.291666666666668, "grad_norm": 0.1618400663137436, "learning_rate": 4.0672659744875037e-05, "loss": 0.018567459285259248, "step": 13910 }, { "epoch": 22.307692307692307, "grad_norm": 0.1933199018239975, "learning_rate": 4.064128483545805e-05, "loss": 0.020343704521656035, "step": 13920 }, { "epoch": 22.32371794871795, "grad_norm": 0.21862180531024933, "learning_rate": 4.0609896606184083e-05, "loss": 0.018384261429309844, "step": 13930 }, { "epoch": 22.33974358974359, "grad_norm": 0.11272407323122025, "learning_rate": 4.057849509634227e-05, "loss": 0.019320540130138397, "step": 13940 }, { "epoch": 22.35576923076923, "grad_norm": 0.22985297441482544, "learning_rate": 4.0547080345238366e-05, "loss": 0.019348961114883424, "step": 13950 }, { "epoch": 22.371794871794872, "grad_norm": 0.18103434145450592, "learning_rate": 4.051565239219467e-05, "loss": 0.02075628787279129, "step": 13960 }, { "epoch": 22.387820512820515, "grad_norm": 0.36614349484443665, "learning_rate": 4.0484211276550034e-05, "loss": 0.02073187679052353, "step": 13970 }, { "epoch": 22.403846153846153, "grad_norm": 0.2361747920513153, "learning_rate": 4.045275703765978e-05, "loss": 0.018040987849235534, "step": 13980 }, { "epoch": 22.419871794871796, "grad_norm": 0.3148266077041626, "learning_rate": 4.042128971489564e-05, "loss": 0.018202365934848787, "step": 13990 }, { "epoch": 22.435897435897434, "grad_norm": 0.29524365067481995, "learning_rate": 4.0389809347645756e-05, "loss": 0.022849945724010466, "step": 14000 }, { "epoch": 22.451923076923077, "grad_norm": 0.18111714720726013, "learning_rate": 4.035831597531457e-05, "loss": 0.020996542274951936, "step": 14010 }, { "epoch": 22.46794871794872, "grad_norm": 0.22106577455997467, "learning_rate": 4.032680963732281e-05, "loss": 0.020024509727954866, "step": 14020 }, { "epoch": 22.483974358974358, "grad_norm": 0.1745808720588684, "learning_rate": 4.0295290373107435e-05, "loss": 0.02106913775205612, "step": 14030 }, { "epoch": 22.5, "grad_norm": 0.1478717029094696, "learning_rate": 4.02637582221216e-05, "loss": 0.02122243046760559, "step": 14040 }, { "epoch": 22.516025641025642, "grad_norm": 0.1929275542497635, "learning_rate": 4.0232213223834555e-05, "loss": 0.021897953748703004, "step": 14050 }, { "epoch": 22.53205128205128, "grad_norm": 0.17659080028533936, "learning_rate": 4.020065541773166e-05, "loss": 0.022033543884754182, "step": 14060 }, { "epoch": 22.548076923076923, "grad_norm": 0.13583602011203766, "learning_rate": 4.016908484331428e-05, "loss": 0.018372702598571777, "step": 14070 }, { "epoch": 22.564102564102566, "grad_norm": 0.34897923469543457, "learning_rate": 4.0137501540099814e-05, "loss": 0.019510743021965028, "step": 14080 }, { "epoch": 22.580128205128204, "grad_norm": 0.3422403335571289, "learning_rate": 4.010590554762154e-05, "loss": 0.01910087317228317, "step": 14090 }, { "epoch": 22.596153846153847, "grad_norm": 0.2893635630607605, "learning_rate": 4.0074296905428624e-05, "loss": 0.020684905350208282, "step": 14100 }, { "epoch": 22.61217948717949, "grad_norm": 0.1275644600391388, "learning_rate": 4.0042675653086106e-05, "loss": 0.0178190678358078, "step": 14110 }, { "epoch": 22.628205128205128, "grad_norm": 0.1868293285369873, "learning_rate": 4.0011041830174775e-05, "loss": 0.01834036558866501, "step": 14120 }, { "epoch": 22.64423076923077, "grad_norm": 0.49527284502983093, "learning_rate": 3.997939547629117e-05, "loss": 0.020070874691009523, "step": 14130 }, { "epoch": 22.66025641025641, "grad_norm": 0.26819923520088196, "learning_rate": 3.99477366310475e-05, "loss": 0.019629020988941193, "step": 14140 }, { "epoch": 22.67628205128205, "grad_norm": 0.14916187524795532, "learning_rate": 3.991606533407164e-05, "loss": 0.01822882443666458, "step": 14150 }, { "epoch": 22.692307692307693, "grad_norm": 0.16910937428474426, "learning_rate": 3.9884381625007e-05, "loss": 0.017585840821266175, "step": 14160 }, { "epoch": 22.708333333333332, "grad_norm": 0.20301951467990875, "learning_rate": 3.985268554351258e-05, "loss": 0.01928362101316452, "step": 14170 }, { "epoch": 22.724358974358974, "grad_norm": 0.22909867763519287, "learning_rate": 3.982097712926283e-05, "loss": 0.01990673243999481, "step": 14180 }, { "epoch": 22.740384615384617, "grad_norm": 0.2270403355360031, "learning_rate": 3.9789256421947675e-05, "loss": 0.017601516842842103, "step": 14190 }, { "epoch": 22.756410256410255, "grad_norm": 0.1270757019519806, "learning_rate": 3.975752346127238e-05, "loss": 0.017760446667671202, "step": 14200 }, { "epoch": 22.772435897435898, "grad_norm": 0.2982480227947235, "learning_rate": 3.9725778286957576e-05, "loss": 0.01906912475824356, "step": 14210 }, { "epoch": 22.78846153846154, "grad_norm": 0.17086666822433472, "learning_rate": 3.969402093873919e-05, "loss": 0.01819700449705124, "step": 14220 }, { "epoch": 22.80448717948718, "grad_norm": 0.39151689410209656, "learning_rate": 3.9662251456368356e-05, "loss": 0.02173793166875839, "step": 14230 }, { "epoch": 22.82051282051282, "grad_norm": 0.18633385002613068, "learning_rate": 3.963046987961142e-05, "loss": 0.019954121112823485, "step": 14240 }, { "epoch": 22.83653846153846, "grad_norm": 0.46892809867858887, "learning_rate": 3.9598676248249865e-05, "loss": 0.0196262463927269, "step": 14250 }, { "epoch": 22.852564102564102, "grad_norm": 0.23675550520420074, "learning_rate": 3.956687060208027e-05, "loss": 0.019771480560302736, "step": 14260 }, { "epoch": 22.868589743589745, "grad_norm": 0.90940260887146, "learning_rate": 3.9535052980914225e-05, "loss": 0.01900480091571808, "step": 14270 }, { "epoch": 22.884615384615383, "grad_norm": 0.24766959249973297, "learning_rate": 3.9503223424578336e-05, "loss": 0.018568164110183714, "step": 14280 }, { "epoch": 22.900641025641026, "grad_norm": 0.2644439935684204, "learning_rate": 3.947138197291414e-05, "loss": 0.017356520891189574, "step": 14290 }, { "epoch": 22.916666666666668, "grad_norm": 0.2220337688922882, "learning_rate": 3.943952866577806e-05, "loss": 0.01936940848827362, "step": 14300 }, { "epoch": 22.932692307692307, "grad_norm": 0.4620276689529419, "learning_rate": 3.9407663543041365e-05, "loss": 0.018952606618404387, "step": 14310 }, { "epoch": 22.94871794871795, "grad_norm": 0.19378624856472015, "learning_rate": 3.937578664459009e-05, "loss": 0.018753939867019655, "step": 14320 }, { "epoch": 22.96474358974359, "grad_norm": 0.2014065831899643, "learning_rate": 3.934389801032507e-05, "loss": 0.016693435609340668, "step": 14330 }, { "epoch": 22.98076923076923, "grad_norm": 0.16641122102737427, "learning_rate": 3.931199768016177e-05, "loss": 0.019278217852115632, "step": 14340 }, { "epoch": 22.996794871794872, "grad_norm": 0.29802483320236206, "learning_rate": 3.9280085694030305e-05, "loss": 0.01923818439245224, "step": 14350 }, { "epoch": 23.0, "eval_accuracy_background": NaN, "eval_accuracy_crop": 0.993094801866987, "eval_iou_background": 0.0, "eval_iou_crop": 0.993094801866987, "eval_loss": 0.020068027079105377, "eval_mean_accuracy": 0.993094801866987, "eval_mean_iou": 0.4965474009334935, "eval_overall_accuracy": 0.993094801866987, "eval_runtime": 36.1283, "eval_samples_per_second": 24.385, "eval_steps_per_second": 3.072, "step": 14352 }, { "epoch": 23.012820512820515, "grad_norm": 0.2995382249355316, "learning_rate": 3.92481620918754e-05, "loss": 0.019657950103282928, "step": 14360 }, { "epoch": 23.028846153846153, "grad_norm": 0.2757332921028137, "learning_rate": 3.92162269136563e-05, "loss": 0.018961216509342193, "step": 14370 }, { "epoch": 23.044871794871796, "grad_norm": 0.3039601147174835, "learning_rate": 3.918428019934677e-05, "loss": 0.019834233820438384, "step": 14380 }, { "epoch": 23.060897435897434, "grad_norm": 0.194780632853508, "learning_rate": 3.915232198893498e-05, "loss": 0.01919860243797302, "step": 14390 }, { "epoch": 23.076923076923077, "grad_norm": 0.22554276883602142, "learning_rate": 3.912035232242351e-05, "loss": 0.019492052495479584, "step": 14400 }, { "epoch": 23.09294871794872, "grad_norm": 0.15972156822681427, "learning_rate": 3.908837123982927e-05, "loss": 0.017753221094608307, "step": 14410 }, { "epoch": 23.108974358974358, "grad_norm": 0.5075754523277283, "learning_rate": 3.9056378781183475e-05, "loss": 0.018415045738220216, "step": 14420 }, { "epoch": 23.125, "grad_norm": 0.27189725637435913, "learning_rate": 3.902437498653155e-05, "loss": 0.019403353333473206, "step": 14430 }, { "epoch": 23.141025641025642, "grad_norm": 0.1467963010072708, "learning_rate": 3.8992359895933146e-05, "loss": 0.017160786688327788, "step": 14440 }, { "epoch": 23.15705128205128, "grad_norm": 0.26625967025756836, "learning_rate": 3.8960333549462023e-05, "loss": 0.019889238476753234, "step": 14450 }, { "epoch": 23.173076923076923, "grad_norm": 0.24969248473644257, "learning_rate": 3.892829598720605e-05, "loss": 0.02231374979019165, "step": 14460 }, { "epoch": 23.189102564102566, "grad_norm": 0.31410863995552063, "learning_rate": 3.889624724926713e-05, "loss": 0.018841855227947235, "step": 14470 }, { "epoch": 23.205128205128204, "grad_norm": 0.22860413789749146, "learning_rate": 3.8864187375761146e-05, "loss": 0.01967746913433075, "step": 14480 }, { "epoch": 23.221153846153847, "grad_norm": 0.27477866411209106, "learning_rate": 3.8832116406817925e-05, "loss": 0.019077427685260773, "step": 14490 }, { "epoch": 23.237179487179485, "grad_norm": 0.255140483379364, "learning_rate": 3.8800034382581195e-05, "loss": 0.01866084635257721, "step": 14500 }, { "epoch": 23.253205128205128, "grad_norm": 0.22838640213012695, "learning_rate": 3.876794134320849e-05, "loss": 0.018421326577663422, "step": 14510 }, { "epoch": 23.26923076923077, "grad_norm": 0.20836971700191498, "learning_rate": 3.8735837328871164e-05, "loss": 0.02053740918636322, "step": 14520 }, { "epoch": 23.28525641025641, "grad_norm": 0.2750045955181122, "learning_rate": 3.870372237975431e-05, "loss": 0.020710256695747376, "step": 14530 }, { "epoch": 23.30128205128205, "grad_norm": 0.19870932400226593, "learning_rate": 3.867159653605668e-05, "loss": 0.019052860140800477, "step": 14540 }, { "epoch": 23.317307692307693, "grad_norm": 0.190328910946846, "learning_rate": 3.863945983799066e-05, "loss": 0.018334974348545075, "step": 14550 }, { "epoch": 23.333333333333332, "grad_norm": 0.291903555393219, "learning_rate": 3.860731232578227e-05, "loss": 0.019267000257968903, "step": 14560 }, { "epoch": 23.349358974358974, "grad_norm": 0.27889561653137207, "learning_rate": 3.8575154039671036e-05, "loss": 0.017804637551307678, "step": 14570 }, { "epoch": 23.365384615384617, "grad_norm": 0.22380104660987854, "learning_rate": 3.854298501990995e-05, "loss": 0.01788998395204544, "step": 14580 }, { "epoch": 23.381410256410255, "grad_norm": 0.12400069087743759, "learning_rate": 3.8510805306765475e-05, "loss": 0.01820443421602249, "step": 14590 }, { "epoch": 23.397435897435898, "grad_norm": 0.15995730459690094, "learning_rate": 3.847861494051744e-05, "loss": 0.01945657134056091, "step": 14600 }, { "epoch": 23.41346153846154, "grad_norm": 0.26496338844299316, "learning_rate": 3.844641396145901e-05, "loss": 0.018195393681526183, "step": 14610 }, { "epoch": 23.42948717948718, "grad_norm": 0.20483070611953735, "learning_rate": 3.841420240989664e-05, "loss": 0.020259855687618254, "step": 14620 }, { "epoch": 23.44551282051282, "grad_norm": 0.2666868269443512, "learning_rate": 3.838198032615001e-05, "loss": 0.019172832369804382, "step": 14630 }, { "epoch": 23.46153846153846, "grad_norm": 0.21362754702568054, "learning_rate": 3.8349747750552e-05, "loss": 0.020908764004707335, "step": 14640 }, { "epoch": 23.477564102564102, "grad_norm": 0.4054538309574127, "learning_rate": 3.831750472344861e-05, "loss": 0.018270331621170043, "step": 14650 }, { "epoch": 23.493589743589745, "grad_norm": 0.16634970903396606, "learning_rate": 3.828525128519891e-05, "loss": 0.020080511271953583, "step": 14660 }, { "epoch": 23.509615384615383, "grad_norm": 0.23914925754070282, "learning_rate": 3.8252987476175034e-05, "loss": 0.019137150049209593, "step": 14670 }, { "epoch": 23.525641025641026, "grad_norm": 0.27957239747047424, "learning_rate": 3.8220713336762065e-05, "loss": 0.022109146416187286, "step": 14680 }, { "epoch": 23.541666666666668, "grad_norm": 0.30716174840927124, "learning_rate": 3.8188428907358036e-05, "loss": 0.019192996621131896, "step": 14690 }, { "epoch": 23.557692307692307, "grad_norm": 0.1956031769514084, "learning_rate": 3.815613422837385e-05, "loss": 0.019721391797065734, "step": 14700 }, { "epoch": 23.57371794871795, "grad_norm": 0.17833100259304047, "learning_rate": 3.812382934023327e-05, "loss": 0.019487805664539337, "step": 14710 }, { "epoch": 23.58974358974359, "grad_norm": 0.21493379771709442, "learning_rate": 3.8091514283372776e-05, "loss": 0.023573583364486693, "step": 14720 }, { "epoch": 23.60576923076923, "grad_norm": 0.2709469199180603, "learning_rate": 3.805918909824163e-05, "loss": 0.01987534314393997, "step": 14730 }, { "epoch": 23.621794871794872, "grad_norm": 0.2443452924489975, "learning_rate": 3.802685382530176e-05, "loss": 0.020044268667697908, "step": 14740 }, { "epoch": 23.63782051282051, "grad_norm": 0.4415625333786011, "learning_rate": 3.79945085050277e-05, "loss": 0.02128506749868393, "step": 14750 }, { "epoch": 23.653846153846153, "grad_norm": 0.32751327753067017, "learning_rate": 3.7962153177906604e-05, "loss": 0.020609836280345916, "step": 14760 }, { "epoch": 23.669871794871796, "grad_norm": 0.13016824424266815, "learning_rate": 3.792978788443808e-05, "loss": 0.017289696633815764, "step": 14770 }, { "epoch": 23.685897435897434, "grad_norm": 0.1616329401731491, "learning_rate": 3.7897412665134296e-05, "loss": 0.017331838607788086, "step": 14780 }, { "epoch": 23.701923076923077, "grad_norm": 0.13377420604228973, "learning_rate": 3.786502756051978e-05, "loss": 0.0188603475689888, "step": 14790 }, { "epoch": 23.71794871794872, "grad_norm": 0.2854633629322052, "learning_rate": 3.783263261113147e-05, "loss": 0.02164616584777832, "step": 14800 }, { "epoch": 23.733974358974358, "grad_norm": 0.3314487338066101, "learning_rate": 3.7800227857518603e-05, "loss": 0.019945281744003295, "step": 14810 }, { "epoch": 23.75, "grad_norm": 0.2290264219045639, "learning_rate": 3.77678133402427e-05, "loss": 0.020112329721450807, "step": 14820 }, { "epoch": 23.766025641025642, "grad_norm": 0.13024720549583435, "learning_rate": 3.77353890998775e-05, "loss": 0.019747275114059448, "step": 14830 }, { "epoch": 23.78205128205128, "grad_norm": 0.1878332644701004, "learning_rate": 3.7702955177008925e-05, "loss": 0.020870378613471983, "step": 14840 }, { "epoch": 23.798076923076923, "grad_norm": 0.2033655345439911, "learning_rate": 3.767051161223501e-05, "loss": 0.01920759230852127, "step": 14850 }, { "epoch": 23.814102564102566, "grad_norm": 0.21703305840492249, "learning_rate": 3.763805844616583e-05, "loss": 0.02061176300048828, "step": 14860 }, { "epoch": 23.830128205128204, "grad_norm": 0.18274231255054474, "learning_rate": 3.760559571942352e-05, "loss": 0.01780225932598114, "step": 14870 }, { "epoch": 23.846153846153847, "grad_norm": 0.14526008069515228, "learning_rate": 3.757312347264217e-05, "loss": 0.018617327511310577, "step": 14880 }, { "epoch": 23.86217948717949, "grad_norm": 0.28804004192352295, "learning_rate": 3.754064174646778e-05, "loss": 0.01892688572406769, "step": 14890 }, { "epoch": 23.878205128205128, "grad_norm": 0.2636895775794983, "learning_rate": 3.75081505815582e-05, "loss": 0.017559628188610076, "step": 14900 }, { "epoch": 23.89423076923077, "grad_norm": 0.3134286105632782, "learning_rate": 3.7475650018583125e-05, "loss": 0.017634546756744383, "step": 14910 }, { "epoch": 23.91025641025641, "grad_norm": 0.14592482149600983, "learning_rate": 3.744314009822401e-05, "loss": 0.01645429730415344, "step": 14920 }, { "epoch": 23.92628205128205, "grad_norm": 0.31718966364860535, "learning_rate": 3.7410620861174e-05, "loss": 0.019749510288238525, "step": 14930 }, { "epoch": 23.942307692307693, "grad_norm": 0.15980708599090576, "learning_rate": 3.737809234813791e-05, "loss": 0.018121883273124695, "step": 14940 }, { "epoch": 23.958333333333332, "grad_norm": 0.2179875671863556, "learning_rate": 3.7345554599832193e-05, "loss": 0.017582346498966218, "step": 14950 }, { "epoch": 23.974358974358974, "grad_norm": 0.35942304134368896, "learning_rate": 3.731300765698482e-05, "loss": 0.017594431340694428, "step": 14960 }, { "epoch": 23.990384615384617, "grad_norm": 0.31801238656044006, "learning_rate": 3.7280451560335296e-05, "loss": 0.020060564577579498, "step": 14970 }, { "epoch": 24.0, "eval_accuracy_background": NaN, "eval_accuracy_crop": 0.9946048959265277, "eval_iou_background": 0.0, "eval_iou_crop": 0.9946048959265277, "eval_loss": 0.02021464891731739, "eval_mean_accuracy": 0.9946048959265277, "eval_mean_iou": 0.49730244796326384, "eval_overall_accuracy": 0.9946048959265277, "eval_runtime": 35.6985, "eval_samples_per_second": 24.679, "eval_steps_per_second": 3.109, "step": 14976 }, { "epoch": 24.006410256410255, "grad_norm": 0.14806737005710602, "learning_rate": 3.724788635063459e-05, "loss": 0.01889844089746475, "step": 14980 }, { "epoch": 24.022435897435898, "grad_norm": 0.2304924726486206, "learning_rate": 3.721531206864505e-05, "loss": 0.020623423159122467, "step": 14990 }, { "epoch": 24.03846153846154, "grad_norm": 0.30916061997413635, "learning_rate": 3.7182728755140405e-05, "loss": 0.018977370858192445, "step": 15000 }, { "epoch": 24.05448717948718, "grad_norm": 0.21097788214683533, "learning_rate": 3.715013645090568e-05, "loss": 0.018667595088481904, "step": 15010 }, { "epoch": 24.07051282051282, "grad_norm": 0.24656760692596436, "learning_rate": 3.711753519673716e-05, "loss": 0.018757225573062898, "step": 15020 }, { "epoch": 24.08653846153846, "grad_norm": 0.19745862483978271, "learning_rate": 3.708492503344232e-05, "loss": 0.019077688455581665, "step": 15030 }, { "epoch": 24.102564102564102, "grad_norm": 0.179739847779274, "learning_rate": 3.70523060018398e-05, "loss": 0.01716800034046173, "step": 15040 }, { "epoch": 24.118589743589745, "grad_norm": 0.16006341576576233, "learning_rate": 3.701967814275931e-05, "loss": 0.0177186518907547, "step": 15050 }, { "epoch": 24.134615384615383, "grad_norm": 0.0978117287158966, "learning_rate": 3.698704149704167e-05, "loss": 0.016495250165462494, "step": 15060 }, { "epoch": 24.150641025641026, "grad_norm": 0.11445751041173935, "learning_rate": 3.6954396105538654e-05, "loss": 0.01653723120689392, "step": 15070 }, { "epoch": 24.166666666666668, "grad_norm": 0.23772530257701874, "learning_rate": 3.692174200911298e-05, "loss": 0.017874538898468018, "step": 15080 }, { "epoch": 24.182692307692307, "grad_norm": 0.20969147980213165, "learning_rate": 3.688907924863828e-05, "loss": 0.01952790915966034, "step": 15090 }, { "epoch": 24.19871794871795, "grad_norm": 0.21133127808570862, "learning_rate": 3.685640786499902e-05, "loss": 0.02221774160861969, "step": 15100 }, { "epoch": 24.21474358974359, "grad_norm": 0.22195099294185638, "learning_rate": 3.682372789909046e-05, "loss": 0.01861831247806549, "step": 15110 }, { "epoch": 24.23076923076923, "grad_norm": 0.6207820773124695, "learning_rate": 3.679103939181863e-05, "loss": 0.020012585818767546, "step": 15120 }, { "epoch": 24.246794871794872, "grad_norm": 0.16924293339252472, "learning_rate": 3.675834238410021e-05, "loss": 0.018750929832458497, "step": 15130 }, { "epoch": 24.262820512820515, "grad_norm": 0.4020729959011078, "learning_rate": 3.672563691686253e-05, "loss": 0.019597281515598298, "step": 15140 }, { "epoch": 24.278846153846153, "grad_norm": 0.35258713364601135, "learning_rate": 3.669292303104353e-05, "loss": 0.022041317820549012, "step": 15150 }, { "epoch": 24.294871794871796, "grad_norm": 0.252005934715271, "learning_rate": 3.6660200767591684e-05, "loss": 0.018588359653949737, "step": 15160 }, { "epoch": 24.310897435897434, "grad_norm": 0.19117672741413116, "learning_rate": 3.662747016746593e-05, "loss": 0.020121031999588014, "step": 15170 }, { "epoch": 24.326923076923077, "grad_norm": 0.27162230014801025, "learning_rate": 3.659473127163566e-05, "loss": 0.019743911921977997, "step": 15180 }, { "epoch": 24.34294871794872, "grad_norm": 0.1920691579580307, "learning_rate": 3.656198412108063e-05, "loss": 0.01989102065563202, "step": 15190 }, { "epoch": 24.358974358974358, "grad_norm": 0.6647878885269165, "learning_rate": 3.652922875679097e-05, "loss": 0.02017594575881958, "step": 15200 }, { "epoch": 24.375, "grad_norm": 0.3601755201816559, "learning_rate": 3.6496465219767045e-05, "loss": 0.018940435349941255, "step": 15210 }, { "epoch": 24.391025641025642, "grad_norm": 0.3904325067996979, "learning_rate": 3.646369355101947e-05, "loss": 0.018550331890583038, "step": 15220 }, { "epoch": 24.40705128205128, "grad_norm": 0.15530681610107422, "learning_rate": 3.6430913791569046e-05, "loss": 0.01841254234313965, "step": 15230 }, { "epoch": 24.423076923076923, "grad_norm": 0.3251691460609436, "learning_rate": 3.639812598244669e-05, "loss": 0.02285102605819702, "step": 15240 }, { "epoch": 24.439102564102566, "grad_norm": 0.19934895634651184, "learning_rate": 3.636533016469339e-05, "loss": 0.019243350625038146, "step": 15250 }, { "epoch": 24.455128205128204, "grad_norm": 0.2537548542022705, "learning_rate": 3.6332526379360174e-05, "loss": 0.019117164611816406, "step": 15260 }, { "epoch": 24.471153846153847, "grad_norm": 0.43666207790374756, "learning_rate": 3.629971466750804e-05, "loss": 0.01749418079853058, "step": 15270 }, { "epoch": 24.487179487179485, "grad_norm": 0.1722550243139267, "learning_rate": 3.626689507020789e-05, "loss": 0.01986824721097946, "step": 15280 }, { "epoch": 24.503205128205128, "grad_norm": 0.13659906387329102, "learning_rate": 3.623406762854052e-05, "loss": 0.01803697943687439, "step": 15290 }, { "epoch": 24.51923076923077, "grad_norm": 0.26960673928260803, "learning_rate": 3.6201232383596545e-05, "loss": 0.019209080934524538, "step": 15300 }, { "epoch": 24.53525641025641, "grad_norm": 0.18678730726242065, "learning_rate": 3.616838937647631e-05, "loss": 0.018848252296447755, "step": 15310 }, { "epoch": 24.55128205128205, "grad_norm": 0.20644746720790863, "learning_rate": 3.6135538648289924e-05, "loss": 0.020153538882732393, "step": 15320 }, { "epoch": 24.567307692307693, "grad_norm": 0.2550983428955078, "learning_rate": 3.610268024015713e-05, "loss": 0.018184922635555267, "step": 15330 }, { "epoch": 24.583333333333332, "grad_norm": 0.212595596909523, "learning_rate": 3.60698141932073e-05, "loss": 0.021800492703914643, "step": 15340 }, { "epoch": 24.599358974358974, "grad_norm": 0.21287645399570465, "learning_rate": 3.603694054857935e-05, "loss": 0.0211460143327713, "step": 15350 }, { "epoch": 24.615384615384617, "grad_norm": 0.263022243976593, "learning_rate": 3.600405934742173e-05, "loss": 0.01911228895187378, "step": 15360 }, { "epoch": 24.631410256410255, "grad_norm": 0.3004712164402008, "learning_rate": 3.5971170630892335e-05, "loss": 0.018035025894641878, "step": 15370 }, { "epoch": 24.647435897435898, "grad_norm": 0.26106998324394226, "learning_rate": 3.5938274440158465e-05, "loss": 0.0195771187543869, "step": 15380 }, { "epoch": 24.66346153846154, "grad_norm": 0.2818475663661957, "learning_rate": 3.590537081639678e-05, "loss": 0.022684855759143828, "step": 15390 }, { "epoch": 24.67948717948718, "grad_norm": 0.2091861367225647, "learning_rate": 3.587245980079324e-05, "loss": 0.017623940110206605, "step": 15400 }, { "epoch": 24.69551282051282, "grad_norm": 0.15995186567306519, "learning_rate": 3.583954143454306e-05, "loss": 0.016950365900993348, "step": 15410 }, { "epoch": 24.71153846153846, "grad_norm": 0.2445443719625473, "learning_rate": 3.5806615758850656e-05, "loss": 0.018632400035858154, "step": 15420 }, { "epoch": 24.727564102564102, "grad_norm": 0.16126108169555664, "learning_rate": 3.577368281492961e-05, "loss": 0.02036944478750229, "step": 15430 }, { "epoch": 24.743589743589745, "grad_norm": 0.1252046525478363, "learning_rate": 3.574074264400256e-05, "loss": 0.017820917069911957, "step": 15440 }, { "epoch": 24.759615384615383, "grad_norm": 0.24030688405036926, "learning_rate": 3.570779528730123e-05, "loss": 0.017967931926250458, "step": 15450 }, { "epoch": 24.775641025641026, "grad_norm": 0.23811683058738708, "learning_rate": 3.5674840786066316e-05, "loss": 0.01841566860675812, "step": 15460 }, { "epoch": 24.791666666666668, "grad_norm": 0.12228704243898392, "learning_rate": 3.564187918154748e-05, "loss": 0.01655772626399994, "step": 15470 }, { "epoch": 24.807692307692307, "grad_norm": 0.33826643228530884, "learning_rate": 3.560891051500325e-05, "loss": 0.022262121737003326, "step": 15480 }, { "epoch": 24.82371794871795, "grad_norm": 0.15486231446266174, "learning_rate": 3.5575934827701005e-05, "loss": 0.01853054016828537, "step": 15490 }, { "epoch": 24.83974358974359, "grad_norm": 0.14539837837219238, "learning_rate": 3.55429521609169e-05, "loss": 0.01733175814151764, "step": 15500 }, { "epoch": 24.85576923076923, "grad_norm": 0.11349481344223022, "learning_rate": 3.550996255593585e-05, "loss": 0.01947209984064102, "step": 15510 }, { "epoch": 24.871794871794872, "grad_norm": 0.11592088639736176, "learning_rate": 3.547696605405144e-05, "loss": 0.019976770877838133, "step": 15520 }, { "epoch": 24.88782051282051, "grad_norm": 0.1366250067949295, "learning_rate": 3.54439626965659e-05, "loss": 0.01982332468032837, "step": 15530 }, { "epoch": 24.903846153846153, "grad_norm": 0.26332783699035645, "learning_rate": 3.541095252479001e-05, "loss": 0.019325284659862517, "step": 15540 }, { "epoch": 24.919871794871796, "grad_norm": 0.28937360644340515, "learning_rate": 3.537793558004312e-05, "loss": 0.018403801321983337, "step": 15550 }, { "epoch": 24.935897435897434, "grad_norm": 0.24987934529781342, "learning_rate": 3.534491190365301e-05, "loss": 0.01824631690979004, "step": 15560 }, { "epoch": 24.951923076923077, "grad_norm": 0.2229534387588501, "learning_rate": 3.531188153695596e-05, "loss": 0.017522181570529937, "step": 15570 }, { "epoch": 24.96794871794872, "grad_norm": 0.19021247327327728, "learning_rate": 3.527884452129654e-05, "loss": 0.017430993914604186, "step": 15580 }, { "epoch": 24.983974358974358, "grad_norm": 0.24284464120864868, "learning_rate": 3.5245800898027686e-05, "loss": 0.018448805809020995, "step": 15590 }, { "epoch": 25.0, "grad_norm": 0.2781374454498291, "learning_rate": 3.521275070851062e-05, "loss": 0.01919686496257782, "step": 15600 }, { "epoch": 25.0, "eval_accuracy_background": NaN, "eval_accuracy_crop": 0.9942711052673012, "eval_iou_background": 0.0, "eval_iou_crop": 0.9942711052673012, "eval_loss": 0.02018306963145733, "eval_mean_accuracy": 0.9942711052673012, "eval_mean_iou": 0.4971355526336506, "eval_overall_accuracy": 0.9942711052673012, "eval_runtime": 36.4437, "eval_samples_per_second": 24.174, "eval_steps_per_second": 3.046, "step": 15600 }, { "epoch": 25.016025641025642, "grad_norm": 0.2534312605857849, "learning_rate": 3.517969399411475e-05, "loss": 0.023579318821430207, "step": 15610 }, { "epoch": 25.03205128205128, "grad_norm": 0.11608075350522995, "learning_rate": 3.514663079621767e-05, "loss": 0.018984632194042207, "step": 15620 }, { "epoch": 25.048076923076923, "grad_norm": 0.2902360260486603, "learning_rate": 3.5113561156205075e-05, "loss": 0.020681132376194, "step": 15630 }, { "epoch": 25.064102564102566, "grad_norm": 0.19576431810855865, "learning_rate": 3.508048511547073e-05, "loss": 0.01776811182498932, "step": 15640 }, { "epoch": 25.080128205128204, "grad_norm": 0.24080900847911835, "learning_rate": 3.5047402715416446e-05, "loss": 0.0185427263379097, "step": 15650 }, { "epoch": 25.096153846153847, "grad_norm": 0.2039513885974884, "learning_rate": 3.5014313997451935e-05, "loss": 0.01776785999536514, "step": 15660 }, { "epoch": 25.112179487179485, "grad_norm": 0.1810627430677414, "learning_rate": 3.4981219002994846e-05, "loss": 0.01846725046634674, "step": 15670 }, { "epoch": 25.128205128205128, "grad_norm": 0.1586228758096695, "learning_rate": 3.494811777347068e-05, "loss": 0.017729972302913666, "step": 15680 }, { "epoch": 25.14423076923077, "grad_norm": 0.22409388422966003, "learning_rate": 3.491501035031277e-05, "loss": 0.01797963082790375, "step": 15690 }, { "epoch": 25.16025641025641, "grad_norm": 0.2836964428424835, "learning_rate": 3.488189677496216e-05, "loss": 0.01871919333934784, "step": 15700 }, { "epoch": 25.17628205128205, "grad_norm": 0.3083605468273163, "learning_rate": 3.484877708886761e-05, "loss": 0.029295462369918823, "step": 15710 }, { "epoch": 25.192307692307693, "grad_norm": 0.16620995104312897, "learning_rate": 3.481565133348554e-05, "loss": 0.018886157870292665, "step": 15720 }, { "epoch": 25.208333333333332, "grad_norm": 0.1645193248987198, "learning_rate": 3.478251955027994e-05, "loss": 0.01761297434568405, "step": 15730 }, { "epoch": 25.224358974358974, "grad_norm": 0.2028815895318985, "learning_rate": 3.474938178072239e-05, "loss": 0.016765668988227844, "step": 15740 }, { "epoch": 25.240384615384617, "grad_norm": 0.16608351469039917, "learning_rate": 3.4716238066291924e-05, "loss": 0.017553749680519103, "step": 15750 }, { "epoch": 25.256410256410255, "grad_norm": 0.21050165593624115, "learning_rate": 3.468308844847503e-05, "loss": 0.01723504364490509, "step": 15760 }, { "epoch": 25.272435897435898, "grad_norm": 0.18383078277111053, "learning_rate": 3.4649932968765584e-05, "loss": 0.018886186182498932, "step": 15770 }, { "epoch": 25.28846153846154, "grad_norm": 0.1993681639432907, "learning_rate": 3.46167716686648e-05, "loss": 0.018038110435009004, "step": 15780 }, { "epoch": 25.30448717948718, "grad_norm": 0.23899400234222412, "learning_rate": 3.4583604589681186e-05, "loss": 0.019668106734752656, "step": 15790 }, { "epoch": 25.32051282051282, "grad_norm": 0.1922382265329361, "learning_rate": 3.4550431773330475e-05, "loss": 0.02063709795475006, "step": 15800 }, { "epoch": 25.33653846153846, "grad_norm": 0.2628643214702606, "learning_rate": 3.451725326113557e-05, "loss": 0.01967736631631851, "step": 15810 }, { "epoch": 25.352564102564102, "grad_norm": 0.1701427698135376, "learning_rate": 3.448406909462652e-05, "loss": 0.017310415208339692, "step": 15820 }, { "epoch": 25.368589743589745, "grad_norm": 0.1576080322265625, "learning_rate": 3.445087931534046e-05, "loss": 0.01777251958847046, "step": 15830 }, { "epoch": 25.384615384615383, "grad_norm": 0.16868241131305695, "learning_rate": 3.4417683964821526e-05, "loss": 0.016445520520210265, "step": 15840 }, { "epoch": 25.400641025641026, "grad_norm": 0.16120140254497528, "learning_rate": 3.4384483084620844e-05, "loss": 0.01896362453699112, "step": 15850 }, { "epoch": 25.416666666666668, "grad_norm": 0.18768443167209625, "learning_rate": 3.435127671629646e-05, "loss": 0.019315105676651, "step": 15860 }, { "epoch": 25.432692307692307, "grad_norm": 0.23443439602851868, "learning_rate": 3.4318064901413276e-05, "loss": 0.020024727284908294, "step": 15870 }, { "epoch": 25.44871794871795, "grad_norm": 0.2565954327583313, "learning_rate": 3.4284847681543034e-05, "loss": 0.019018030166625975, "step": 15880 }, { "epoch": 25.46474358974359, "grad_norm": 0.23827262222766876, "learning_rate": 3.425162509826423e-05, "loss": 0.017810632288455964, "step": 15890 }, { "epoch": 25.48076923076923, "grad_norm": 0.327517569065094, "learning_rate": 3.421839719316206e-05, "loss": 0.018421182036399843, "step": 15900 }, { "epoch": 25.496794871794872, "grad_norm": 0.24092501401901245, "learning_rate": 3.4185164007828394e-05, "loss": 0.01900428980588913, "step": 15910 }, { "epoch": 25.51282051282051, "grad_norm": 0.1851179599761963, "learning_rate": 3.415192558386174e-05, "loss": 0.01998588740825653, "step": 15920 }, { "epoch": 25.528846153846153, "grad_norm": 0.1723431944847107, "learning_rate": 3.411868196286711e-05, "loss": 0.020996475219726564, "step": 15930 }, { "epoch": 25.544871794871796, "grad_norm": 0.3640829920768738, "learning_rate": 3.4085433186456056e-05, "loss": 0.018327440321445464, "step": 15940 }, { "epoch": 25.560897435897434, "grad_norm": 0.31720489263534546, "learning_rate": 3.405217929624656e-05, "loss": 0.020024177432060242, "step": 15950 }, { "epoch": 25.576923076923077, "grad_norm": 0.4017912447452545, "learning_rate": 3.4018920333863046e-05, "loss": 0.021117068827152252, "step": 15960 }, { "epoch": 25.59294871794872, "grad_norm": 0.2376621961593628, "learning_rate": 3.398565634093624e-05, "loss": 0.022608572244644166, "step": 15970 }, { "epoch": 25.608974358974358, "grad_norm": 0.16885222494602203, "learning_rate": 3.395238735910318e-05, "loss": 0.018388617038726806, "step": 15980 }, { "epoch": 25.625, "grad_norm": 0.41290855407714844, "learning_rate": 3.3919113430007166e-05, "loss": 0.01941043436527252, "step": 15990 }, { "epoch": 25.641025641025642, "grad_norm": 0.2023768275976181, "learning_rate": 3.388583459529767e-05, "loss": 0.01954016238451004, "step": 16000 }, { "epoch": 25.65705128205128, "grad_norm": 0.20842140913009644, "learning_rate": 3.385255089663031e-05, "loss": 0.017747169733047484, "step": 16010 }, { "epoch": 25.673076923076923, "grad_norm": 0.25603970885276794, "learning_rate": 3.3819262375666816e-05, "loss": 0.018988585472106932, "step": 16020 }, { "epoch": 25.689102564102566, "grad_norm": 0.1510162204504013, "learning_rate": 3.3785969074074895e-05, "loss": 0.01683361828327179, "step": 16030 }, { "epoch": 25.705128205128204, "grad_norm": 0.15902109444141388, "learning_rate": 3.37526710335283e-05, "loss": 0.018270856142044066, "step": 16040 }, { "epoch": 25.721153846153847, "grad_norm": 0.2558702528476715, "learning_rate": 3.37193682957067e-05, "loss": 0.019322881102561952, "step": 16050 }, { "epoch": 25.73717948717949, "grad_norm": 0.1546119749546051, "learning_rate": 3.368606090229561e-05, "loss": 0.018722456693649293, "step": 16060 }, { "epoch": 25.753205128205128, "grad_norm": 0.3033224046230316, "learning_rate": 3.365274889498642e-05, "loss": 0.017412158846855163, "step": 16070 }, { "epoch": 25.76923076923077, "grad_norm": 0.1870008111000061, "learning_rate": 3.3619432315476255e-05, "loss": 0.018237538635730743, "step": 16080 }, { "epoch": 25.78525641025641, "grad_norm": 0.24494177103042603, "learning_rate": 3.3586111205467984e-05, "loss": 0.018224699795246123, "step": 16090 }, { "epoch": 25.80128205128205, "grad_norm": 0.19451157748699188, "learning_rate": 3.355278560667015e-05, "loss": 0.018821801245212554, "step": 16100 }, { "epoch": 25.817307692307693, "grad_norm": 0.2671957015991211, "learning_rate": 3.351945556079692e-05, "loss": 0.018619978427886964, "step": 16110 }, { "epoch": 25.833333333333332, "grad_norm": 0.26804009079933167, "learning_rate": 3.348612110956799e-05, "loss": 0.01882866322994232, "step": 16120 }, { "epoch": 25.849358974358974, "grad_norm": 0.15642251074314117, "learning_rate": 3.345278229470861e-05, "loss": 0.017413212358951567, "step": 16130 }, { "epoch": 25.865384615384617, "grad_norm": 0.18739289045333862, "learning_rate": 3.341943915794948e-05, "loss": 0.016757962107658387, "step": 16140 }, { "epoch": 25.881410256410255, "grad_norm": 0.2895760238170624, "learning_rate": 3.3386091741026704e-05, "loss": 0.01972751021385193, "step": 16150 }, { "epoch": 25.897435897435898, "grad_norm": 0.3311401605606079, "learning_rate": 3.3352740085681746e-05, "loss": 0.01809525340795517, "step": 16160 }, { "epoch": 25.91346153846154, "grad_norm": 0.2375466376543045, "learning_rate": 3.3319384233661385e-05, "loss": 0.017514093220233916, "step": 16170 }, { "epoch": 25.92948717948718, "grad_norm": 0.21448341012001038, "learning_rate": 3.328602422671762e-05, "loss": 0.018193159997463227, "step": 16180 }, { "epoch": 25.94551282051282, "grad_norm": 0.19433900713920593, "learning_rate": 3.3252660106607714e-05, "loss": 0.01869487762451172, "step": 16190 }, { "epoch": 25.96153846153846, "grad_norm": 0.2398453950881958, "learning_rate": 3.3219291915094014e-05, "loss": 0.019144269824028014, "step": 16200 }, { "epoch": 25.977564102564102, "grad_norm": 0.1650911569595337, "learning_rate": 3.318591969394399e-05, "loss": 0.018170061707496642, "step": 16210 }, { "epoch": 25.993589743589745, "grad_norm": 0.27485013008117676, "learning_rate": 3.3152543484930166e-05, "loss": 0.018947161734104156, "step": 16220 }, { "epoch": 26.0, "eval_accuracy_background": NaN, "eval_accuracy_crop": 0.9939011345509936, "eval_iou_background": 0.0, "eval_iou_crop": 0.9939011345509936, "eval_loss": 0.019662603735923767, "eval_mean_accuracy": 0.9939011345509936, "eval_mean_iou": 0.4969505672754968, "eval_overall_accuracy": 0.9939011345509936, "eval_runtime": 33.4436, "eval_samples_per_second": 26.343, "eval_steps_per_second": 3.319, "step": 16224 }, { "epoch": 26.009615384615383, "grad_norm": 0.28156745433807373, "learning_rate": 3.311916332983004e-05, "loss": 0.018706916272640227, "step": 16230 }, { "epoch": 26.025641025641026, "grad_norm": 0.21754974126815796, "learning_rate": 3.3085779270426067e-05, "loss": 0.020540393888950348, "step": 16240 }, { "epoch": 26.041666666666668, "grad_norm": 0.24967679381370544, "learning_rate": 3.3052391348505564e-05, "loss": 0.017649886012077332, "step": 16250 }, { "epoch": 26.057692307692307, "grad_norm": 0.14813612401485443, "learning_rate": 3.30189996058607e-05, "loss": 0.01851062774658203, "step": 16260 }, { "epoch": 26.07371794871795, "grad_norm": 0.22831366956233978, "learning_rate": 3.298560408428842e-05, "loss": 0.01758343130350113, "step": 16270 }, { "epoch": 26.08974358974359, "grad_norm": 0.2073722630739212, "learning_rate": 3.295220482559043e-05, "loss": 0.01869661211967468, "step": 16280 }, { "epoch": 26.10576923076923, "grad_norm": 0.1740737408399582, "learning_rate": 3.291880187157305e-05, "loss": 0.01821916401386261, "step": 16290 }, { "epoch": 26.121794871794872, "grad_norm": 0.13525204360485077, "learning_rate": 3.288539526404728e-05, "loss": 0.017048841714859007, "step": 16300 }, { "epoch": 26.137820512820515, "grad_norm": 0.22050967812538147, "learning_rate": 3.285198504482868e-05, "loss": 0.01890978068113327, "step": 16310 }, { "epoch": 26.153846153846153, "grad_norm": 0.20469602942466736, "learning_rate": 3.281857125573731e-05, "loss": 0.018314753472805024, "step": 16320 }, { "epoch": 26.169871794871796, "grad_norm": 0.209336519241333, "learning_rate": 3.2785153938597735e-05, "loss": 0.018661458790302277, "step": 16330 }, { "epoch": 26.185897435897434, "grad_norm": 0.21085456013679504, "learning_rate": 3.2751733135238904e-05, "loss": 0.018338483572006226, "step": 16340 }, { "epoch": 26.201923076923077, "grad_norm": 0.2541113495826721, "learning_rate": 3.271830888749412e-05, "loss": 0.020007582008838655, "step": 16350 }, { "epoch": 26.21794871794872, "grad_norm": 0.21312560141086578, "learning_rate": 3.2684881237201064e-05, "loss": 0.018663078546524048, "step": 16360 }, { "epoch": 26.233974358974358, "grad_norm": 0.23721227049827576, "learning_rate": 3.26514502262016e-05, "loss": 0.018551528453826904, "step": 16370 }, { "epoch": 26.25, "grad_norm": 0.1930939108133316, "learning_rate": 3.2618015896341845e-05, "loss": 0.01799684911966324, "step": 16380 }, { "epoch": 26.266025641025642, "grad_norm": 0.2829596996307373, "learning_rate": 3.258457828947204e-05, "loss": 0.017873871326446533, "step": 16390 }, { "epoch": 26.28205128205128, "grad_norm": 0.2640783488750458, "learning_rate": 3.2551137447446546e-05, "loss": 0.018322183191776274, "step": 16400 }, { "epoch": 26.298076923076923, "grad_norm": 0.289958119392395, "learning_rate": 3.2517693412123775e-05, "loss": 0.016870632767677307, "step": 16410 }, { "epoch": 26.314102564102566, "grad_norm": 0.28756409883499146, "learning_rate": 3.248424622536613e-05, "loss": 0.01998782008886337, "step": 16420 }, { "epoch": 26.330128205128204, "grad_norm": 0.1766269952058792, "learning_rate": 3.245079592903996e-05, "loss": 0.01792311817407608, "step": 16430 }, { "epoch": 26.346153846153847, "grad_norm": 0.2023896872997284, "learning_rate": 3.241734256501551e-05, "loss": 0.01765352636575699, "step": 16440 }, { "epoch": 26.362179487179485, "grad_norm": 0.214618980884552, "learning_rate": 3.2383886175166856e-05, "loss": 0.02051657289266586, "step": 16450 }, { "epoch": 26.378205128205128, "grad_norm": 0.24873432517051697, "learning_rate": 3.235042680137186e-05, "loss": 0.01841997504234314, "step": 16460 }, { "epoch": 26.39423076923077, "grad_norm": 0.20050106942653656, "learning_rate": 3.231696448551214e-05, "loss": 0.01614238917827606, "step": 16470 }, { "epoch": 26.41025641025641, "grad_norm": 0.24351546168327332, "learning_rate": 3.228349926947298e-05, "loss": 0.017914411425590516, "step": 16480 }, { "epoch": 26.42628205128205, "grad_norm": 0.16567222774028778, "learning_rate": 3.2250031195143275e-05, "loss": 0.02165583074092865, "step": 16490 }, { "epoch": 26.442307692307693, "grad_norm": 0.4272846579551697, "learning_rate": 3.221656030441555e-05, "loss": 0.018365731835365294, "step": 16500 }, { "epoch": 26.458333333333332, "grad_norm": 0.18071973323822021, "learning_rate": 3.218308663918581e-05, "loss": 0.01957012712955475, "step": 16510 }, { "epoch": 26.474358974358974, "grad_norm": 0.36537274718284607, "learning_rate": 3.214961024135354e-05, "loss": 0.017680436372756958, "step": 16520 }, { "epoch": 26.490384615384617, "grad_norm": 0.17489488422870636, "learning_rate": 3.2116131152821665e-05, "loss": 0.01833242177963257, "step": 16530 }, { "epoch": 26.506410256410255, "grad_norm": 0.16909535229206085, "learning_rate": 3.208264941549646e-05, "loss": 0.016797228157520293, "step": 16540 }, { "epoch": 26.522435897435898, "grad_norm": 0.4891413748264313, "learning_rate": 3.204916507128752e-05, "loss": 0.020542511343955995, "step": 16550 }, { "epoch": 26.53846153846154, "grad_norm": 0.1969546675682068, "learning_rate": 3.201567816210771e-05, "loss": 0.017972977459430696, "step": 16560 }, { "epoch": 26.55448717948718, "grad_norm": 0.24672983586788177, "learning_rate": 3.19821887298731e-05, "loss": 0.01821368783712387, "step": 16570 }, { "epoch": 26.57051282051282, "grad_norm": 0.28681719303131104, "learning_rate": 3.194869681650291e-05, "loss": 0.019713152945041657, "step": 16580 }, { "epoch": 26.58653846153846, "grad_norm": 0.25685712695121765, "learning_rate": 3.1915202463919486e-05, "loss": 0.02017442286014557, "step": 16590 }, { "epoch": 26.602564102564102, "grad_norm": 0.23373758792877197, "learning_rate": 3.18817057140482e-05, "loss": 0.018385741114616393, "step": 16600 }, { "epoch": 26.618589743589745, "grad_norm": 0.25146484375, "learning_rate": 3.184820660881746e-05, "loss": 0.018222063779830933, "step": 16610 }, { "epoch": 26.634615384615383, "grad_norm": 0.16380375623703003, "learning_rate": 3.1814705190158584e-05, "loss": 0.017400239408016206, "step": 16620 }, { "epoch": 26.650641025641026, "grad_norm": 0.26131364703178406, "learning_rate": 3.17812015000058e-05, "loss": 0.018551069498062133, "step": 16630 }, { "epoch": 26.666666666666668, "grad_norm": 0.19616062939167023, "learning_rate": 3.1747695580296194e-05, "loss": 0.01916258633136749, "step": 16640 }, { "epoch": 26.682692307692307, "grad_norm": 0.3181162178516388, "learning_rate": 3.1714187472969645e-05, "loss": 0.02002188265323639, "step": 16650 }, { "epoch": 26.69871794871795, "grad_norm": 0.16730257868766785, "learning_rate": 3.168067721996874e-05, "loss": 0.017548109591007232, "step": 16660 }, { "epoch": 26.71474358974359, "grad_norm": 0.2955823838710785, "learning_rate": 3.164716486323877e-05, "loss": 0.018543094396591187, "step": 16670 }, { "epoch": 26.73076923076923, "grad_norm": 0.3715968430042267, "learning_rate": 3.161365044472765e-05, "loss": 0.017452259361743928, "step": 16680 }, { "epoch": 26.746794871794872, "grad_norm": 0.22866123914718628, "learning_rate": 3.158013400638591e-05, "loss": 0.01685999482870102, "step": 16690 }, { "epoch": 26.76282051282051, "grad_norm": 0.28637996315956116, "learning_rate": 3.1546615590166555e-05, "loss": 0.015730167925357818, "step": 16700 }, { "epoch": 26.778846153846153, "grad_norm": 0.32194411754608154, "learning_rate": 3.15130952380251e-05, "loss": 0.01947355717420578, "step": 16710 }, { "epoch": 26.794871794871796, "grad_norm": 0.24484354257583618, "learning_rate": 3.1479572991919484e-05, "loss": 0.018607422709465027, "step": 16720 }, { "epoch": 26.810897435897434, "grad_norm": 0.18518950045108795, "learning_rate": 3.144604889381001e-05, "loss": 0.0212660014629364, "step": 16730 }, { "epoch": 26.826923076923077, "grad_norm": 0.20427629351615906, "learning_rate": 3.141252298565929e-05, "loss": 0.017027208209037782, "step": 16740 }, { "epoch": 26.84294871794872, "grad_norm": 0.15825313329696655, "learning_rate": 3.137899530943221e-05, "loss": 0.01743612587451935, "step": 16750 }, { "epoch": 26.858974358974358, "grad_norm": 0.2255682796239853, "learning_rate": 3.1345465907095875e-05, "loss": 0.018269504606723785, "step": 16760 }, { "epoch": 26.875, "grad_norm": 0.18175263702869415, "learning_rate": 3.131193482061953e-05, "loss": 0.01860482394695282, "step": 16770 }, { "epoch": 26.891025641025642, "grad_norm": 0.12016500532627106, "learning_rate": 3.127840209197457e-05, "loss": 0.016855481266975402, "step": 16780 }, { "epoch": 26.90705128205128, "grad_norm": 0.25410565733909607, "learning_rate": 3.1244867763134394e-05, "loss": 0.017106544971466065, "step": 16790 }, { "epoch": 26.923076923076923, "grad_norm": 0.21366466581821442, "learning_rate": 3.121133187607443e-05, "loss": 0.016080071032047272, "step": 16800 }, { "epoch": 26.939102564102566, "grad_norm": 0.24663671851158142, "learning_rate": 3.117779447277206e-05, "loss": 0.02069047689437866, "step": 16810 }, { "epoch": 26.955128205128204, "grad_norm": 0.18783999979496002, "learning_rate": 3.114425559520656e-05, "loss": 0.018526795506477355, "step": 16820 }, { "epoch": 26.971153846153847, "grad_norm": 0.28773772716522217, "learning_rate": 3.1110715285359045e-05, "loss": 0.016332374513149263, "step": 16830 }, { "epoch": 26.98717948717949, "grad_norm": 0.2657904326915741, "learning_rate": 3.107717358521244e-05, "loss": 0.018433491885662078, "step": 16840 }, { "epoch": 27.0, "eval_accuracy_background": NaN, "eval_accuracy_crop": 0.9940095759467305, "eval_iou_background": 0.0, "eval_iou_crop": 0.9940095759467305, "eval_loss": 0.020025471225380898, "eval_mean_accuracy": 0.9940095759467305, "eval_mean_iou": 0.4970047879733652, "eval_overall_accuracy": 0.9940095759467305, "eval_runtime": 37.1358, "eval_samples_per_second": 23.724, "eval_steps_per_second": 2.989, "step": 16848 }, { "epoch": 27.003205128205128, "grad_norm": 0.22491911053657532, "learning_rate": 3.104363053675137e-05, "loss": 0.01717831790447235, "step": 16850 }, { "epoch": 27.01923076923077, "grad_norm": 0.21216291189193726, "learning_rate": 3.1010086181962204e-05, "loss": 0.02005733549594879, "step": 16860 }, { "epoch": 27.03525641025641, "grad_norm": 0.15962018072605133, "learning_rate": 3.09765405628329e-05, "loss": 0.01626526713371277, "step": 16870 }, { "epoch": 27.05128205128205, "grad_norm": 0.30611470341682434, "learning_rate": 3.094299372135304e-05, "loss": 0.017679233849048615, "step": 16880 }, { "epoch": 27.067307692307693, "grad_norm": 0.1625073403120041, "learning_rate": 3.090944569951368e-05, "loss": 0.017544980347156524, "step": 16890 }, { "epoch": 27.083333333333332, "grad_norm": 0.15509426593780518, "learning_rate": 3.08758965393074e-05, "loss": 0.019670356810092927, "step": 16900 }, { "epoch": 27.099358974358974, "grad_norm": 0.17430143058300018, "learning_rate": 3.084234628272819e-05, "loss": 0.01620752215385437, "step": 16910 }, { "epoch": 27.115384615384617, "grad_norm": 0.5051385164260864, "learning_rate": 3.080879497177142e-05, "loss": 0.016746410727500917, "step": 16920 }, { "epoch": 27.131410256410255, "grad_norm": 0.2546785771846771, "learning_rate": 3.0775242648433765e-05, "loss": 0.017816346883773804, "step": 16930 }, { "epoch": 27.147435897435898, "grad_norm": 0.21099740266799927, "learning_rate": 3.0741689354713174e-05, "loss": 0.018470735847949983, "step": 16940 }, { "epoch": 27.16346153846154, "grad_norm": 0.4103090167045593, "learning_rate": 3.070813513260881e-05, "loss": 0.022471068799495696, "step": 16950 }, { "epoch": 27.17948717948718, "grad_norm": 0.7989904880523682, "learning_rate": 3.0674580024121016e-05, "loss": 0.022801533341407776, "step": 16960 }, { "epoch": 27.19551282051282, "grad_norm": 0.18571600317955017, "learning_rate": 3.064102407125121e-05, "loss": 0.017614728212356566, "step": 16970 }, { "epoch": 27.21153846153846, "grad_norm": 0.20276586711406708, "learning_rate": 3.0607467316001895e-05, "loss": 0.01894904226064682, "step": 16980 }, { "epoch": 27.227564102564102, "grad_norm": 0.16137148439884186, "learning_rate": 3.0573909800376557e-05, "loss": 0.018451642990112305, "step": 16990 }, { "epoch": 27.243589743589745, "grad_norm": 0.255106657743454, "learning_rate": 3.0540351566379655e-05, "loss": 0.016291111707687378, "step": 17000 }, { "epoch": 27.259615384615383, "grad_norm": 0.2143152952194214, "learning_rate": 3.0506792656016544e-05, "loss": 0.018117463588714598, "step": 17010 }, { "epoch": 27.275641025641026, "grad_norm": 0.28897568583488464, "learning_rate": 3.0473233111293396e-05, "loss": 0.018759986758232115, "step": 17020 }, { "epoch": 27.291666666666668, "grad_norm": 0.19300523400306702, "learning_rate": 3.0439672974217224e-05, "loss": 0.017782628536224365, "step": 17030 }, { "epoch": 27.307692307692307, "grad_norm": 0.37745583057403564, "learning_rate": 3.040611228679575e-05, "loss": 0.017521949112415315, "step": 17040 }, { "epoch": 27.32371794871795, "grad_norm": 0.34770938754081726, "learning_rate": 3.037255109103739e-05, "loss": 0.018411900103092193, "step": 17050 }, { "epoch": 27.33974358974359, "grad_norm": 0.3388618528842926, "learning_rate": 3.03389894289512e-05, "loss": 0.017944170534610747, "step": 17060 }, { "epoch": 27.35576923076923, "grad_norm": 0.4783923923969269, "learning_rate": 3.0305427342546822e-05, "loss": 0.017406323552131654, "step": 17070 }, { "epoch": 27.371794871794872, "grad_norm": 0.23210877180099487, "learning_rate": 3.027186487383442e-05, "loss": 0.018186374008655547, "step": 17080 }, { "epoch": 27.387820512820515, "grad_norm": 0.20951145887374878, "learning_rate": 3.0238302064824655e-05, "loss": 0.017148332297801973, "step": 17090 }, { "epoch": 27.403846153846153, "grad_norm": 0.25934478640556335, "learning_rate": 3.0204738957528598e-05, "loss": 0.018351730704307557, "step": 17100 }, { "epoch": 27.419871794871796, "grad_norm": 0.33074355125427246, "learning_rate": 3.017117559395769e-05, "loss": 0.018674251437187196, "step": 17110 }, { "epoch": 27.435897435897434, "grad_norm": 0.20370543003082275, "learning_rate": 3.0137612016123713e-05, "loss": 0.01660604923963547, "step": 17120 }, { "epoch": 27.451923076923077, "grad_norm": 0.1768864542245865, "learning_rate": 3.0104048266038683e-05, "loss": 0.017890799045562743, "step": 17130 }, { "epoch": 27.46794871794872, "grad_norm": 0.24105100333690643, "learning_rate": 3.0070484385714886e-05, "loss": 0.017366032302379607, "step": 17140 }, { "epoch": 27.483974358974358, "grad_norm": 0.23368774354457855, "learning_rate": 3.0036920417164707e-05, "loss": 0.019970948994159698, "step": 17150 }, { "epoch": 27.5, "grad_norm": 0.23477400839328766, "learning_rate": 3.000335640240068e-05, "loss": 0.01815476417541504, "step": 17160 }, { "epoch": 27.516025641025642, "grad_norm": 0.17763248085975647, "learning_rate": 2.99697923834354e-05, "loss": 0.020208539068698884, "step": 17170 }, { "epoch": 27.53205128205128, "grad_norm": 0.3844905495643616, "learning_rate": 2.993622840228145e-05, "loss": 0.021753746271133422, "step": 17180 }, { "epoch": 27.548076923076923, "grad_norm": 0.14724838733673096, "learning_rate": 2.990266450095137e-05, "loss": 0.01823679804801941, "step": 17190 }, { "epoch": 27.564102564102566, "grad_norm": 0.3028460144996643, "learning_rate": 2.9869100721457607e-05, "loss": 0.01850302964448929, "step": 17200 }, { "epoch": 27.580128205128204, "grad_norm": 0.2119598090648651, "learning_rate": 2.983553710581244e-05, "loss": 0.018655870854854584, "step": 17210 }, { "epoch": 27.596153846153847, "grad_norm": 0.28319722414016724, "learning_rate": 2.9801973696027976e-05, "loss": 0.021528062224388123, "step": 17220 }, { "epoch": 27.61217948717949, "grad_norm": 0.23997808992862701, "learning_rate": 2.9768410534116014e-05, "loss": 0.019083406031131744, "step": 17230 }, { "epoch": 27.628205128205128, "grad_norm": 0.13642142713069916, "learning_rate": 2.973484766208809e-05, "loss": 0.016877740621566772, "step": 17240 }, { "epoch": 27.64423076923077, "grad_norm": 0.24707046151161194, "learning_rate": 2.9701285121955362e-05, "loss": 0.018179827928543092, "step": 17250 }, { "epoch": 27.66025641025641, "grad_norm": 0.22569923102855682, "learning_rate": 2.9667722955728546e-05, "loss": 0.01901213824748993, "step": 17260 }, { "epoch": 27.67628205128205, "grad_norm": 0.15771102905273438, "learning_rate": 2.9634161205417933e-05, "loss": 0.01683995723724365, "step": 17270 }, { "epoch": 27.692307692307693, "grad_norm": 0.20012004673480988, "learning_rate": 2.9600599913033263e-05, "loss": 0.01727980226278305, "step": 17280 }, { "epoch": 27.708333333333332, "grad_norm": 0.1934272050857544, "learning_rate": 2.9567039120583715e-05, "loss": 0.02061603367328644, "step": 17290 }, { "epoch": 27.724358974358974, "grad_norm": 0.25886762142181396, "learning_rate": 2.9533478870077835e-05, "loss": 0.020030640065670013, "step": 17300 }, { "epoch": 27.740384615384617, "grad_norm": 0.18063512444496155, "learning_rate": 2.94999192035235e-05, "loss": 0.01979861408472061, "step": 17310 }, { "epoch": 27.756410256410255, "grad_norm": 0.20458431541919708, "learning_rate": 2.9466360162927858e-05, "loss": 0.01699683666229248, "step": 17320 }, { "epoch": 27.772435897435898, "grad_norm": 0.37644749879837036, "learning_rate": 2.943280179029726e-05, "loss": 0.018569239974021913, "step": 17330 }, { "epoch": 27.78846153846154, "grad_norm": 0.17035484313964844, "learning_rate": 2.939924412763722e-05, "loss": 0.017614758014678954, "step": 17340 }, { "epoch": 27.80448717948718, "grad_norm": 0.31473007798194885, "learning_rate": 2.93656872169524e-05, "loss": 0.021767276525497436, "step": 17350 }, { "epoch": 27.82051282051282, "grad_norm": 0.21593964099884033, "learning_rate": 2.933213110024645e-05, "loss": 0.016777320206165312, "step": 17360 }, { "epoch": 27.83653846153846, "grad_norm": 0.18493157625198364, "learning_rate": 2.9298575819522102e-05, "loss": 0.01756521761417389, "step": 17370 }, { "epoch": 27.852564102564102, "grad_norm": 0.3156664967536926, "learning_rate": 2.9265021416781e-05, "loss": 0.019868212938308715, "step": 17380 }, { "epoch": 27.868589743589745, "grad_norm": 0.21435518562793732, "learning_rate": 2.923146793402368e-05, "loss": 0.017556165158748627, "step": 17390 }, { "epoch": 27.884615384615383, "grad_norm": 0.299157977104187, "learning_rate": 2.9197915413249558e-05, "loss": 0.019117991626262664, "step": 17400 }, { "epoch": 27.900641025641026, "grad_norm": 0.19042299687862396, "learning_rate": 2.916436389645684e-05, "loss": 0.017305639386177064, "step": 17410 }, { "epoch": 27.916666666666668, "grad_norm": 0.21018587052822113, "learning_rate": 2.913081342564245e-05, "loss": 0.01826658546924591, "step": 17420 }, { "epoch": 27.932692307692307, "grad_norm": 0.2453763633966446, "learning_rate": 2.9097264042802016e-05, "loss": 0.01674496680498123, "step": 17430 }, { "epoch": 27.94871794871795, "grad_norm": 0.22610914707183838, "learning_rate": 2.906371578992983e-05, "loss": 0.017735135555267335, "step": 17440 }, { "epoch": 27.96474358974359, "grad_norm": 0.13290579617023468, "learning_rate": 2.9030168709018718e-05, "loss": 0.016057151556015014, "step": 17450 }, { "epoch": 27.98076923076923, "grad_norm": 0.19744063913822174, "learning_rate": 2.899662284206009e-05, "loss": 0.017448589205741882, "step": 17460 }, { "epoch": 27.996794871794872, "grad_norm": 0.17253318428993225, "learning_rate": 2.8963078231043815e-05, "loss": 0.017911747097969055, "step": 17470 }, { "epoch": 28.0, "eval_accuracy_background": NaN, "eval_accuracy_crop": 0.9948379355687651, "eval_iou_background": 0.0, "eval_iou_crop": 0.9948379355687651, "eval_loss": 0.02010897733271122, "eval_mean_accuracy": 0.9948379355687651, "eval_mean_iou": 0.49741896778438255, "eval_overall_accuracy": 0.9948379355687651, "eval_runtime": 34.0001, "eval_samples_per_second": 25.912, "eval_steps_per_second": 3.265, "step": 17472 }, { "epoch": 28.012820512820515, "grad_norm": 0.27936768531799316, "learning_rate": 2.8929534917958168e-05, "loss": 0.01766881048679352, "step": 17480 }, { "epoch": 28.028846153846153, "grad_norm": 0.20586459338665009, "learning_rate": 2.889599294478983e-05, "loss": 0.019741615653038024, "step": 17490 }, { "epoch": 28.044871794871796, "grad_norm": 0.21455815434455872, "learning_rate": 2.8862452353523816e-05, "loss": 0.016980856657028198, "step": 17500 }, { "epoch": 28.060897435897434, "grad_norm": 0.19373030960559845, "learning_rate": 2.8828913186143368e-05, "loss": 0.01823013126850128, "step": 17510 }, { "epoch": 28.076923076923077, "grad_norm": 0.17310801148414612, "learning_rate": 2.8795375484629966e-05, "loss": 0.018989986181259154, "step": 17520 }, { "epoch": 28.09294871794872, "grad_norm": 0.21625863015651703, "learning_rate": 2.8761839290963278e-05, "loss": 0.0191901758313179, "step": 17530 }, { "epoch": 28.108974358974358, "grad_norm": 0.2613190710544586, "learning_rate": 2.872830464712104e-05, "loss": 0.017746126651763915, "step": 17540 }, { "epoch": 28.125, "grad_norm": 0.20149090886116028, "learning_rate": 2.869477159507909e-05, "loss": 0.018127377331256866, "step": 17550 }, { "epoch": 28.141025641025642, "grad_norm": 0.16633479297161102, "learning_rate": 2.8661240176811262e-05, "loss": 0.01772025227546692, "step": 17560 }, { "epoch": 28.15705128205128, "grad_norm": 0.2722982168197632, "learning_rate": 2.862771043428931e-05, "loss": 0.01718814969062805, "step": 17570 }, { "epoch": 28.173076923076923, "grad_norm": 0.3296166658401489, "learning_rate": 2.8594182409482936e-05, "loss": 0.018961362540721893, "step": 17580 }, { "epoch": 28.189102564102566, "grad_norm": 0.32239073514938354, "learning_rate": 2.856065614435969e-05, "loss": 0.019756153225898743, "step": 17590 }, { "epoch": 28.205128205128204, "grad_norm": 0.2225051373243332, "learning_rate": 2.8527131680884878e-05, "loss": 0.019307398796081544, "step": 17600 }, { "epoch": 28.221153846153847, "grad_norm": 0.20585821568965912, "learning_rate": 2.8493609061021587e-05, "loss": 0.016493897140026092, "step": 17610 }, { "epoch": 28.237179487179485, "grad_norm": 0.18889182806015015, "learning_rate": 2.846008832673059e-05, "loss": 0.016264083981513976, "step": 17620 }, { "epoch": 28.253205128205128, "grad_norm": 0.285885214805603, "learning_rate": 2.8426569519970306e-05, "loss": 0.017493009567260742, "step": 17630 }, { "epoch": 28.26923076923077, "grad_norm": 0.2939261198043823, "learning_rate": 2.8393052682696714e-05, "loss": 0.020128269493579865, "step": 17640 }, { "epoch": 28.28525641025641, "grad_norm": 0.13385264575481415, "learning_rate": 2.8359537856863347e-05, "loss": 0.018474194407463073, "step": 17650 }, { "epoch": 28.30128205128205, "grad_norm": 0.26000240445137024, "learning_rate": 2.8326025084421234e-05, "loss": 0.018103136122226714, "step": 17660 }, { "epoch": 28.317307692307693, "grad_norm": 0.20691624283790588, "learning_rate": 2.82925144073188e-05, "loss": 0.017706459760665892, "step": 17670 }, { "epoch": 28.333333333333332, "grad_norm": 0.24784941971302032, "learning_rate": 2.825900586750188e-05, "loss": 0.017793026566505433, "step": 17680 }, { "epoch": 28.349358974358974, "grad_norm": 0.26193833351135254, "learning_rate": 2.822549950691362e-05, "loss": 0.017840775847434997, "step": 17690 }, { "epoch": 28.365384615384617, "grad_norm": 0.26920002698898315, "learning_rate": 2.8191995367494416e-05, "loss": 0.018862427771091463, "step": 17700 }, { "epoch": 28.381410256410255, "grad_norm": 0.26656651496887207, "learning_rate": 2.8158493491181927e-05, "loss": 0.017476293444633483, "step": 17710 }, { "epoch": 28.397435897435898, "grad_norm": 0.25451958179473877, "learning_rate": 2.8124993919910957e-05, "loss": 0.01856676787137985, "step": 17720 }, { "epoch": 28.41346153846154, "grad_norm": 0.14890992641448975, "learning_rate": 2.8091496695613417e-05, "loss": 0.017985031008720398, "step": 17730 }, { "epoch": 28.42948717948718, "grad_norm": 0.151540607213974, "learning_rate": 2.8058001860218283e-05, "loss": 0.018825435638427736, "step": 17740 }, { "epoch": 28.44551282051282, "grad_norm": 0.3255636692047119, "learning_rate": 2.8024509455651573e-05, "loss": 0.01856929510831833, "step": 17750 }, { "epoch": 28.46153846153846, "grad_norm": 0.20543378591537476, "learning_rate": 2.79910195238362e-05, "loss": 0.020461590588092805, "step": 17760 }, { "epoch": 28.477564102564102, "grad_norm": 0.18936409056186676, "learning_rate": 2.7957532106692045e-05, "loss": 0.017279839515686034, "step": 17770 }, { "epoch": 28.493589743589745, "grad_norm": 0.1524096429347992, "learning_rate": 2.7924047246135805e-05, "loss": 0.016739290952682496, "step": 17780 }, { "epoch": 28.509615384615383, "grad_norm": 0.19720175862312317, "learning_rate": 2.7890564984080976e-05, "loss": 0.017428672313690184, "step": 17790 }, { "epoch": 28.525641025641026, "grad_norm": 0.3380344808101654, "learning_rate": 2.7857085362437823e-05, "loss": 0.0204587385058403, "step": 17800 }, { "epoch": 28.541666666666668, "grad_norm": 0.284423828125, "learning_rate": 2.7823608423113297e-05, "loss": 0.01793450564146042, "step": 17810 }, { "epoch": 28.557692307692307, "grad_norm": 0.2275932878255844, "learning_rate": 2.7790134208010983e-05, "loss": 0.018686480820178986, "step": 17820 }, { "epoch": 28.57371794871795, "grad_norm": 0.18360333144664764, "learning_rate": 2.775666275903105e-05, "loss": 0.019939629733562468, "step": 17830 }, { "epoch": 28.58974358974359, "grad_norm": 0.21367096900939941, "learning_rate": 2.7723194118070243e-05, "loss": 0.022554419934749603, "step": 17840 }, { "epoch": 28.60576923076923, "grad_norm": 0.17508439719676971, "learning_rate": 2.7689728327021735e-05, "loss": 0.018482331931591035, "step": 17850 }, { "epoch": 28.621794871794872, "grad_norm": 0.15296900272369385, "learning_rate": 2.7656265427775185e-05, "loss": 0.019375187158584595, "step": 17860 }, { "epoch": 28.63782051282051, "grad_norm": 0.7016382217407227, "learning_rate": 2.7622805462216606e-05, "loss": 0.019990754127502442, "step": 17870 }, { "epoch": 28.653846153846153, "grad_norm": 0.2018764466047287, "learning_rate": 2.758934847222833e-05, "loss": 0.017743118107318878, "step": 17880 }, { "epoch": 28.669871794871796, "grad_norm": 0.2358604520559311, "learning_rate": 2.7555894499688983e-05, "loss": 0.01779780387878418, "step": 17890 }, { "epoch": 28.685897435897434, "grad_norm": 0.19054515659809113, "learning_rate": 2.7522443586473412e-05, "loss": 0.016434299945831298, "step": 17900 }, { "epoch": 28.701923076923077, "grad_norm": 0.2194616198539734, "learning_rate": 2.748899577445264e-05, "loss": 0.016623528301715852, "step": 17910 }, { "epoch": 28.71794871794872, "grad_norm": 0.6502315402030945, "learning_rate": 2.745555110549377e-05, "loss": 0.01862228810787201, "step": 17920 }, { "epoch": 28.733974358974358, "grad_norm": 0.19788013398647308, "learning_rate": 2.7422109621460017e-05, "loss": 0.01864151954650879, "step": 17930 }, { "epoch": 28.75, "grad_norm": 0.16962723433971405, "learning_rate": 2.7388671364210603e-05, "loss": 0.01620447337627411, "step": 17940 }, { "epoch": 28.766025641025642, "grad_norm": 0.19881170988082886, "learning_rate": 2.735523637560068e-05, "loss": 0.014928872883319854, "step": 17950 }, { "epoch": 28.78205128205128, "grad_norm": 0.1807941198348999, "learning_rate": 2.7321804697481333e-05, "loss": 0.016647966206073762, "step": 17960 }, { "epoch": 28.798076923076923, "grad_norm": 0.19772876799106598, "learning_rate": 2.728837637169951e-05, "loss": 0.018970248103141785, "step": 17970 }, { "epoch": 28.814102564102566, "grad_norm": 0.16825899481773376, "learning_rate": 2.725495144009793e-05, "loss": 0.01681509017944336, "step": 17980 }, { "epoch": 28.830128205128204, "grad_norm": 0.17510464787483215, "learning_rate": 2.7221529944515102e-05, "loss": 0.01798986494541168, "step": 17990 }, { "epoch": 28.846153846153847, "grad_norm": 0.1876489371061325, "learning_rate": 2.718811192678522e-05, "loss": 0.01862945854663849, "step": 18000 }, { "epoch": 28.86217948717949, "grad_norm": 0.2060648500919342, "learning_rate": 2.71546974287381e-05, "loss": 0.017548441886901855, "step": 18010 }, { "epoch": 28.878205128205128, "grad_norm": 0.14439640939235687, "learning_rate": 2.712128649219919e-05, "loss": 0.018567575514316557, "step": 18020 }, { "epoch": 28.89423076923077, "grad_norm": 0.2289525866508484, "learning_rate": 2.708787915898947e-05, "loss": 0.016804379224777222, "step": 18030 }, { "epoch": 28.91025641025641, "grad_norm": 0.1965460628271103, "learning_rate": 2.7054475470925394e-05, "loss": 0.016354453563690186, "step": 18040 }, { "epoch": 28.92628205128205, "grad_norm": 0.2171105146408081, "learning_rate": 2.7021075469818855e-05, "loss": 0.01831267923116684, "step": 18050 }, { "epoch": 28.942307692307693, "grad_norm": 0.20869363844394684, "learning_rate": 2.698767919747716e-05, "loss": 0.017015531659126282, "step": 18060 }, { "epoch": 28.958333333333332, "grad_norm": 0.19030840694904327, "learning_rate": 2.69542866957029e-05, "loss": 0.016517703235149384, "step": 18070 }, { "epoch": 28.974358974358974, "grad_norm": 0.1783386617898941, "learning_rate": 2.6920898006294e-05, "loss": 0.016188053786754607, "step": 18080 }, { "epoch": 28.990384615384617, "grad_norm": 0.366711288690567, "learning_rate": 2.6887513171043574e-05, "loss": 0.019191914796829225, "step": 18090 }, { "epoch": 29.0, "eval_accuracy_background": NaN, "eval_accuracy_crop": 0.9941440235223428, "eval_iou_background": 0.0, "eval_iou_crop": 0.9941440235223428, "eval_loss": 0.01976783573627472, "eval_mean_accuracy": 0.9941440235223428, "eval_mean_iou": 0.4970720117611714, "eval_overall_accuracy": 0.9941440235223428, "eval_runtime": 36.6276, "eval_samples_per_second": 24.053, "eval_steps_per_second": 3.031, "step": 18096 }, { "epoch": 29.006410256410255, "grad_norm": 0.22476401925086975, "learning_rate": 2.6854132231739925e-05, "loss": 0.01691286265850067, "step": 18100 }, { "epoch": 29.022435897435898, "grad_norm": 0.3154968321323395, "learning_rate": 2.6820755230166475e-05, "loss": 0.01910841464996338, "step": 18110 }, { "epoch": 29.03846153846154, "grad_norm": 0.16399899125099182, "learning_rate": 2.6787382208101742e-05, "loss": 0.016472840309143068, "step": 18120 }, { "epoch": 29.05448717948718, "grad_norm": 0.16821284592151642, "learning_rate": 2.6754013207319212e-05, "loss": 0.016911974549293517, "step": 18130 }, { "epoch": 29.07051282051282, "grad_norm": 0.20380234718322754, "learning_rate": 2.672064826958739e-05, "loss": 0.018092145025730134, "step": 18140 }, { "epoch": 29.08653846153846, "grad_norm": 0.24280428886413574, "learning_rate": 2.668728743666967e-05, "loss": 0.01902472972869873, "step": 18150 }, { "epoch": 29.102564102564102, "grad_norm": 0.10191089659929276, "learning_rate": 2.6653930750324293e-05, "loss": 0.015859505534172057, "step": 18160 }, { "epoch": 29.118589743589745, "grad_norm": 0.22622473537921906, "learning_rate": 2.6620578252304342e-05, "loss": 0.017280778288841246, "step": 18170 }, { "epoch": 29.134615384615383, "grad_norm": 0.1407061517238617, "learning_rate": 2.6587229984357643e-05, "loss": 0.016220976412296296, "step": 18180 }, { "epoch": 29.150641025641026, "grad_norm": 0.21767543256282806, "learning_rate": 2.655388598822672e-05, "loss": 0.01778598129749298, "step": 18190 }, { "epoch": 29.166666666666668, "grad_norm": 0.3054591715335846, "learning_rate": 2.6520546305648756e-05, "loss": 0.017548085749149324, "step": 18200 }, { "epoch": 29.182692307692307, "grad_norm": 0.28636229038238525, "learning_rate": 2.6487210978355543e-05, "loss": 0.01895843893289566, "step": 18210 }, { "epoch": 29.19871794871795, "grad_norm": 0.35368236899375916, "learning_rate": 2.6453880048073412e-05, "loss": 0.019381949305534364, "step": 18220 }, { "epoch": 29.21474358974359, "grad_norm": 0.1874903291463852, "learning_rate": 2.642055355652319e-05, "loss": 0.01632430702447891, "step": 18230 }, { "epoch": 29.23076923076923, "grad_norm": 0.18311826884746552, "learning_rate": 2.6387231545420143e-05, "loss": 0.017733582854270936, "step": 18240 }, { "epoch": 29.246794871794872, "grad_norm": 0.20273156464099884, "learning_rate": 2.6353914056473957e-05, "loss": 0.018067078292369844, "step": 18250 }, { "epoch": 29.262820512820515, "grad_norm": 0.1497785747051239, "learning_rate": 2.6320601131388606e-05, "loss": 0.018010175228118895, "step": 18260 }, { "epoch": 29.278846153846153, "grad_norm": 0.3002806305885315, "learning_rate": 2.6287292811862404e-05, "loss": 0.02011309564113617, "step": 18270 }, { "epoch": 29.294871794871796, "grad_norm": 0.19714578986167908, "learning_rate": 2.625398913958788e-05, "loss": 0.017515546083450316, "step": 18280 }, { "epoch": 29.310897435897434, "grad_norm": 0.16085141897201538, "learning_rate": 2.6220690156251714e-05, "loss": 0.017966341972351075, "step": 18290 }, { "epoch": 29.326923076923077, "grad_norm": 0.29824718832969666, "learning_rate": 2.6187395903534774e-05, "loss": 0.017573973536491393, "step": 18300 }, { "epoch": 29.34294871794872, "grad_norm": 0.22585076093673706, "learning_rate": 2.6154106423111982e-05, "loss": 0.017754948139190672, "step": 18310 }, { "epoch": 29.358974358974358, "grad_norm": 0.21738284826278687, "learning_rate": 2.612082175665227e-05, "loss": 0.016331906616687774, "step": 18320 }, { "epoch": 29.375, "grad_norm": 0.1846943348646164, "learning_rate": 2.608754194581855e-05, "loss": 0.016172941029071807, "step": 18330 }, { "epoch": 29.391025641025642, "grad_norm": 0.33803972601890564, "learning_rate": 2.605426703226769e-05, "loss": 0.018246188759803772, "step": 18340 }, { "epoch": 29.40705128205128, "grad_norm": 0.408325731754303, "learning_rate": 2.6020997057650376e-05, "loss": 0.017355483770370484, "step": 18350 }, { "epoch": 29.423076923076923, "grad_norm": 0.3037184178829193, "learning_rate": 2.5987732063611154e-05, "loss": 0.019724389910697936, "step": 18360 }, { "epoch": 29.439102564102566, "grad_norm": 0.25419944524765015, "learning_rate": 2.595447209178832e-05, "loss": 0.017869453132152557, "step": 18370 }, { "epoch": 29.455128205128204, "grad_norm": 0.14279505610466003, "learning_rate": 2.5921217183813863e-05, "loss": 0.016752585768699646, "step": 18380 }, { "epoch": 29.471153846153847, "grad_norm": 0.27937549352645874, "learning_rate": 2.588796738131347e-05, "loss": 0.016731806099414825, "step": 18390 }, { "epoch": 29.487179487179485, "grad_norm": 0.23020735383033752, "learning_rate": 2.585472272590643e-05, "loss": 0.018210668861865998, "step": 18400 }, { "epoch": 29.503205128205128, "grad_norm": 0.21507574617862701, "learning_rate": 2.5821483259205567e-05, "loss": 0.016591228544712067, "step": 18410 }, { "epoch": 29.51923076923077, "grad_norm": 0.2839590311050415, "learning_rate": 2.5788249022817217e-05, "loss": 0.02282397300004959, "step": 18420 }, { "epoch": 29.53525641025641, "grad_norm": 0.3781571090221405, "learning_rate": 2.5755020058341198e-05, "loss": 0.01892540752887726, "step": 18430 }, { "epoch": 29.55128205128205, "grad_norm": 0.21698908507823944, "learning_rate": 2.5721796407370678e-05, "loss": 0.017815405130386354, "step": 18440 }, { "epoch": 29.567307692307693, "grad_norm": 0.18309113383293152, "learning_rate": 2.5688578111492226e-05, "loss": 0.01754446029663086, "step": 18450 }, { "epoch": 29.583333333333332, "grad_norm": 0.33517950773239136, "learning_rate": 2.5655365212285687e-05, "loss": 0.019788458943367004, "step": 18460 }, { "epoch": 29.599358974358974, "grad_norm": 0.2527366876602173, "learning_rate": 2.562215775132412e-05, "loss": 0.018558236956596374, "step": 18470 }, { "epoch": 29.615384615384617, "grad_norm": 0.16812407970428467, "learning_rate": 2.558895577017383e-05, "loss": 0.01707214266061783, "step": 18480 }, { "epoch": 29.631410256410255, "grad_norm": 0.22566336393356323, "learning_rate": 2.5555759310394236e-05, "loss": 0.017504777014255523, "step": 18490 }, { "epoch": 29.647435897435898, "grad_norm": 0.22501593828201294, "learning_rate": 2.552256841353785e-05, "loss": 0.018394570052623748, "step": 18500 }, { "epoch": 29.66346153846154, "grad_norm": 0.2793596088886261, "learning_rate": 2.5489383121150198e-05, "loss": 0.017787255346775055, "step": 18510 }, { "epoch": 29.67948717948718, "grad_norm": 0.25451284646987915, "learning_rate": 2.545620347476983e-05, "loss": 0.017816829681396484, "step": 18520 }, { "epoch": 29.69551282051282, "grad_norm": 0.182916522026062, "learning_rate": 2.5423029515928222e-05, "loss": 0.015044975280761718, "step": 18530 }, { "epoch": 29.71153846153846, "grad_norm": 0.28038299083709717, "learning_rate": 2.53898612861497e-05, "loss": 0.018266837298870086, "step": 18540 }, { "epoch": 29.727564102564102, "grad_norm": 0.20070935785770416, "learning_rate": 2.535669882695143e-05, "loss": 0.017307488620281218, "step": 18550 }, { "epoch": 29.743589743589745, "grad_norm": 0.16599182784557343, "learning_rate": 2.5323542179843398e-05, "loss": 0.016507013142108916, "step": 18560 }, { "epoch": 29.759615384615383, "grad_norm": 0.31842130422592163, "learning_rate": 2.529039138632824e-05, "loss": 0.01756383329629898, "step": 18570 }, { "epoch": 29.775641025641026, "grad_norm": 0.21160408854484558, "learning_rate": 2.5257246487901334e-05, "loss": 0.017595043778419493, "step": 18580 }, { "epoch": 29.791666666666668, "grad_norm": 0.3422801196575165, "learning_rate": 2.5224107526050642e-05, "loss": 0.01520456224679947, "step": 18590 }, { "epoch": 29.807692307692307, "grad_norm": 0.25898370146751404, "learning_rate": 2.5190974542256692e-05, "loss": 0.01841132342815399, "step": 18600 }, { "epoch": 29.82371794871795, "grad_norm": 0.1891406774520874, "learning_rate": 2.5157847577992546e-05, "loss": 0.01658058613538742, "step": 18610 }, { "epoch": 29.83974358974359, "grad_norm": 0.23026420176029205, "learning_rate": 2.5124726674723743e-05, "loss": 0.01813756823539734, "step": 18620 }, { "epoch": 29.85576923076923, "grad_norm": 0.2280377894639969, "learning_rate": 2.5091611873908197e-05, "loss": 0.017634110152721406, "step": 18630 }, { "epoch": 29.871794871794872, "grad_norm": 0.15516796708106995, "learning_rate": 2.5058503216996203e-05, "loss": 0.017671449482440947, "step": 18640 }, { "epoch": 29.88782051282051, "grad_norm": 0.22381436824798584, "learning_rate": 2.5025400745430386e-05, "loss": 0.020836320519447327, "step": 18650 }, { "epoch": 29.903846153846153, "grad_norm": 0.34390851855278015, "learning_rate": 2.499230450064559e-05, "loss": 0.016714659333229066, "step": 18660 }, { "epoch": 29.919871794871796, "grad_norm": 0.1868579089641571, "learning_rate": 2.4959214524068895e-05, "loss": 0.01652257442474365, "step": 18670 }, { "epoch": 29.935897435897434, "grad_norm": 0.33658018708229065, "learning_rate": 2.492613085711953e-05, "loss": 0.019786456227302553, "step": 18680 }, { "epoch": 29.951923076923077, "grad_norm": 0.3263466954231262, "learning_rate": 2.4893053541208803e-05, "loss": 0.01672910451889038, "step": 18690 }, { "epoch": 29.96794871794872, "grad_norm": 0.1933736503124237, "learning_rate": 2.4859982617740095e-05, "loss": 0.016717056930065154, "step": 18700 }, { "epoch": 29.983974358974358, "grad_norm": 0.19713625311851501, "learning_rate": 2.4826918128108804e-05, "loss": 0.015745478868484496, "step": 18710 }, { "epoch": 30.0, "grad_norm": 0.3783319890499115, "learning_rate": 2.4793860113702224e-05, "loss": 0.01757550835609436, "step": 18720 }, { "epoch": 30.0, "eval_accuracy_background": NaN, "eval_accuracy_crop": 0.9938032339129165, "eval_iou_background": 0.0, "eval_iou_crop": 0.9938032339129165, "eval_loss": 0.019861619919538498, "eval_mean_accuracy": 0.9938032339129165, "eval_mean_iou": 0.49690161695645824, "eval_overall_accuracy": 0.9938032339129165, "eval_runtime": 34.6402, "eval_samples_per_second": 25.433, "eval_steps_per_second": 3.204, "step": 18720 }, { "epoch": 30.016025641025642, "grad_norm": 0.16413821280002594, "learning_rate": 2.4760808615899574e-05, "loss": 0.01777813136577606, "step": 18730 }, { "epoch": 30.03205128205128, "grad_norm": 0.31454184651374817, "learning_rate": 2.4727763676071933e-05, "loss": 0.017831768095493316, "step": 18740 }, { "epoch": 30.048076923076923, "grad_norm": 0.17286618053913116, "learning_rate": 2.469472533558212e-05, "loss": 0.01538146287202835, "step": 18750 }, { "epoch": 30.064102564102566, "grad_norm": 0.2103702574968338, "learning_rate": 2.4661693635784752e-05, "loss": 0.01683357059955597, "step": 18760 }, { "epoch": 30.080128205128204, "grad_norm": 0.26402515172958374, "learning_rate": 2.4628668618026087e-05, "loss": 0.01848662793636322, "step": 18770 }, { "epoch": 30.096153846153847, "grad_norm": 0.18548455834388733, "learning_rate": 2.459565032364406e-05, "loss": 0.01583154797554016, "step": 18780 }, { "epoch": 30.112179487179485, "grad_norm": 0.3564921021461487, "learning_rate": 2.4562638793968147e-05, "loss": 0.018435961008071898, "step": 18790 }, { "epoch": 30.128205128205128, "grad_norm": 0.25918951630592346, "learning_rate": 2.4529634070319397e-05, "loss": 0.01644742488861084, "step": 18800 }, { "epoch": 30.14423076923077, "grad_norm": 0.12722241878509521, "learning_rate": 2.4496636194010315e-05, "loss": 0.015885476768016816, "step": 18810 }, { "epoch": 30.16025641025641, "grad_norm": 0.32097119092941284, "learning_rate": 2.446364520634483e-05, "loss": 0.017591801285743714, "step": 18820 }, { "epoch": 30.17628205128205, "grad_norm": 0.3098345696926117, "learning_rate": 2.4430661148618263e-05, "loss": 0.01817426085472107, "step": 18830 }, { "epoch": 30.192307692307693, "grad_norm": 0.10627975314855576, "learning_rate": 2.4397684062117274e-05, "loss": 0.01692270040512085, "step": 18840 }, { "epoch": 30.208333333333332, "grad_norm": 0.30470719933509827, "learning_rate": 2.436471398811977e-05, "loss": 0.023598943650722504, "step": 18850 }, { "epoch": 30.224358974358974, "grad_norm": 0.18903091549873352, "learning_rate": 2.433175096789488e-05, "loss": 0.018566161394119263, "step": 18860 }, { "epoch": 30.240384615384617, "grad_norm": 0.24132509529590607, "learning_rate": 2.4298795042702934e-05, "loss": 0.01988525241613388, "step": 18870 }, { "epoch": 30.256410256410255, "grad_norm": 0.21019354462623596, "learning_rate": 2.426584625379534e-05, "loss": 0.015944890677928925, "step": 18880 }, { "epoch": 30.272435897435898, "grad_norm": 0.1862611621618271, "learning_rate": 2.4232904642414613e-05, "loss": 0.019066677987575532, "step": 18890 }, { "epoch": 30.28846153846154, "grad_norm": 0.14646784961223602, "learning_rate": 2.4199970249794265e-05, "loss": 0.017621082067489625, "step": 18900 }, { "epoch": 30.30448717948718, "grad_norm": 0.16661445796489716, "learning_rate": 2.416704311715875e-05, "loss": 0.018165433406829835, "step": 18910 }, { "epoch": 30.32051282051282, "grad_norm": 0.1440495401620865, "learning_rate": 2.4134123285723477e-05, "loss": 0.017569950222969054, "step": 18920 }, { "epoch": 30.33653846153846, "grad_norm": 0.1688620001077652, "learning_rate": 2.4101210796694704e-05, "loss": 0.01924261450767517, "step": 18930 }, { "epoch": 30.352564102564102, "grad_norm": 0.23648303747177124, "learning_rate": 2.406830569126947e-05, "loss": 0.016629470884799956, "step": 18940 }, { "epoch": 30.368589743589745, "grad_norm": 0.18785063922405243, "learning_rate": 2.4035408010635602e-05, "loss": 0.016591790318489074, "step": 18950 }, { "epoch": 30.384615384615383, "grad_norm": 0.27757275104522705, "learning_rate": 2.4002517795971635e-05, "loss": 0.015801700949668884, "step": 18960 }, { "epoch": 30.400641025641026, "grad_norm": 0.23185785114765167, "learning_rate": 2.396963508844672e-05, "loss": 0.017946965992450714, "step": 18970 }, { "epoch": 30.416666666666668, "grad_norm": 0.13707129657268524, "learning_rate": 2.393675992922066e-05, "loss": 0.017562000453472136, "step": 18980 }, { "epoch": 30.432692307692307, "grad_norm": 0.2708001732826233, "learning_rate": 2.390389235944379e-05, "loss": 0.017391282320022582, "step": 18990 }, { "epoch": 30.44871794871795, "grad_norm": 0.3154091238975525, "learning_rate": 2.3871032420256935e-05, "loss": 0.017524364590644836, "step": 19000 }, { "epoch": 30.46474358974359, "grad_norm": 0.11177410930395126, "learning_rate": 2.383818015279137e-05, "loss": 0.016034242510795594, "step": 19010 }, { "epoch": 30.48076923076923, "grad_norm": 0.3845656216144562, "learning_rate": 2.38053355981688e-05, "loss": 0.01779812276363373, "step": 19020 }, { "epoch": 30.496794871794872, "grad_norm": 0.17460379004478455, "learning_rate": 2.3772498797501216e-05, "loss": 0.016889259219169617, "step": 19030 }, { "epoch": 30.51282051282051, "grad_norm": 0.3242394030094147, "learning_rate": 2.3739669791890964e-05, "loss": 0.018510475754737854, "step": 19040 }, { "epoch": 30.528846153846153, "grad_norm": 0.20849762856960297, "learning_rate": 2.3706848622430604e-05, "loss": 0.021446534991264345, "step": 19050 }, { "epoch": 30.544871794871796, "grad_norm": 0.21807478368282318, "learning_rate": 2.367403533020287e-05, "loss": 0.01776108145713806, "step": 19060 }, { "epoch": 30.560897435897434, "grad_norm": 0.25212904810905457, "learning_rate": 2.3641229956280664e-05, "loss": 0.016906160116195678, "step": 19070 }, { "epoch": 30.576923076923077, "grad_norm": 0.281026691198349, "learning_rate": 2.360843254172699e-05, "loss": 0.01904033124446869, "step": 19080 }, { "epoch": 30.59294871794872, "grad_norm": 0.23205946385860443, "learning_rate": 2.3575643127594844e-05, "loss": 0.019401495158672333, "step": 19090 }, { "epoch": 30.608974358974358, "grad_norm": 0.27174898982048035, "learning_rate": 2.354286175492723e-05, "loss": 0.017321227490901946, "step": 19100 }, { "epoch": 30.625, "grad_norm": 0.20251794159412384, "learning_rate": 2.35100884647571e-05, "loss": 0.016313399374485015, "step": 19110 }, { "epoch": 30.641025641025642, "grad_norm": 0.20957690477371216, "learning_rate": 2.3477323298107272e-05, "loss": 0.01830974668264389, "step": 19120 }, { "epoch": 30.65705128205128, "grad_norm": 0.24208217859268188, "learning_rate": 2.3444566295990394e-05, "loss": 0.018986716866493225, "step": 19130 }, { "epoch": 30.673076923076923, "grad_norm": 0.21137221157550812, "learning_rate": 2.34118174994089e-05, "loss": 0.01549697071313858, "step": 19140 }, { "epoch": 30.689102564102566, "grad_norm": 0.1292686015367508, "learning_rate": 2.337907694935497e-05, "loss": 0.01448969691991806, "step": 19150 }, { "epoch": 30.705128205128204, "grad_norm": 0.5369989275932312, "learning_rate": 2.3346344686810425e-05, "loss": 0.017180973291397096, "step": 19160 }, { "epoch": 30.721153846153847, "grad_norm": 0.15116402506828308, "learning_rate": 2.3313620752746743e-05, "loss": 0.01746930181980133, "step": 19170 }, { "epoch": 30.73717948717949, "grad_norm": 0.18977545201778412, "learning_rate": 2.328090518812497e-05, "loss": 0.017718806862831116, "step": 19180 }, { "epoch": 30.753205128205128, "grad_norm": 0.24602241814136505, "learning_rate": 2.3248198033895656e-05, "loss": 0.01417199969291687, "step": 19190 }, { "epoch": 30.76923076923077, "grad_norm": 0.17947567999362946, "learning_rate": 2.321549933099885e-05, "loss": 0.01706766039133072, "step": 19200 }, { "epoch": 30.78525641025641, "grad_norm": 0.264165461063385, "learning_rate": 2.3182809120364025e-05, "loss": 0.018426090478897095, "step": 19210 }, { "epoch": 30.80128205128205, "grad_norm": 0.2544117569923401, "learning_rate": 2.3150127442909995e-05, "loss": 0.01882348358631134, "step": 19220 }, { "epoch": 30.817307692307693, "grad_norm": 0.3076484203338623, "learning_rate": 2.3117454339544905e-05, "loss": 0.020687735080718993, "step": 19230 }, { "epoch": 30.833333333333332, "grad_norm": 0.11014673858880997, "learning_rate": 2.3084789851166193e-05, "loss": 0.017028237879276275, "step": 19240 }, { "epoch": 30.849358974358974, "grad_norm": 0.18839921057224274, "learning_rate": 2.305213401866047e-05, "loss": 0.01736733615398407, "step": 19250 }, { "epoch": 30.865384615384617, "grad_norm": 0.23541928827762604, "learning_rate": 2.301948688290355e-05, "loss": 0.018210001289844513, "step": 19260 }, { "epoch": 30.881410256410255, "grad_norm": 0.3167148530483246, "learning_rate": 2.2986848484760347e-05, "loss": 0.01704998463392258, "step": 19270 }, { "epoch": 30.897435897435898, "grad_norm": 0.16180235147476196, "learning_rate": 2.295421886508482e-05, "loss": 0.016264955699443816, "step": 19280 }, { "epoch": 30.91346153846154, "grad_norm": 0.15794259309768677, "learning_rate": 2.2921598064719967e-05, "loss": 0.015913558006286622, "step": 19290 }, { "epoch": 30.92948717948718, "grad_norm": 0.23684170842170715, "learning_rate": 2.288898612449775e-05, "loss": 0.016857190430164336, "step": 19300 }, { "epoch": 30.94551282051282, "grad_norm": 0.2018110305070877, "learning_rate": 2.2856383085239006e-05, "loss": 0.016819515824317934, "step": 19310 }, { "epoch": 30.96153846153846, "grad_norm": 0.1599140614271164, "learning_rate": 2.2823788987753455e-05, "loss": 0.01580142229795456, "step": 19320 }, { "epoch": 30.977564102564102, "grad_norm": 0.24981378018856049, "learning_rate": 2.279120387283964e-05, "loss": 0.015910467505455016, "step": 19330 }, { "epoch": 30.993589743589745, "grad_norm": 0.28735724091529846, "learning_rate": 2.2758627781284816e-05, "loss": 0.017541563510894774, "step": 19340 }, { "epoch": 31.0, "eval_accuracy_background": NaN, "eval_accuracy_crop": 0.9944844391959217, "eval_iou_background": 0.0, "eval_iou_crop": 0.9944844391959217, "eval_loss": 0.019985564053058624, "eval_mean_accuracy": 0.9944844391959217, "eval_mean_iou": 0.49724221959796083, "eval_overall_accuracy": 0.9944844391959217, "eval_runtime": 36.7889, "eval_samples_per_second": 23.947, "eval_steps_per_second": 3.017, "step": 19344 }, { "epoch": 31.009615384615383, "grad_norm": 0.2439119815826416, "learning_rate": 2.2726060753864988e-05, "loss": 0.018191331624984743, "step": 19350 }, { "epoch": 31.025641025641026, "grad_norm": 0.3016776442527771, "learning_rate": 2.2693502831344777e-05, "loss": 0.020268651843070983, "step": 19360 }, { "epoch": 31.041666666666668, "grad_norm": 0.32880714535713196, "learning_rate": 2.2660954054477455e-05, "loss": 0.017281071841716768, "step": 19370 }, { "epoch": 31.057692307692307, "grad_norm": 0.2541023790836334, "learning_rate": 2.262841446400479e-05, "loss": 0.019116970896720886, "step": 19380 }, { "epoch": 31.07371794871795, "grad_norm": 0.18661518394947052, "learning_rate": 2.2595884100657097e-05, "loss": 0.019770850241184235, "step": 19390 }, { "epoch": 31.08974358974359, "grad_norm": 0.20742931962013245, "learning_rate": 2.2563363005153122e-05, "loss": 0.017932724952697755, "step": 19400 }, { "epoch": 31.10576923076923, "grad_norm": 0.17395730316638947, "learning_rate": 2.253085121819999e-05, "loss": 0.016612696647644042, "step": 19410 }, { "epoch": 31.121794871794872, "grad_norm": 0.18982364237308502, "learning_rate": 2.2498348780493213e-05, "loss": 0.017651978135108947, "step": 19420 }, { "epoch": 31.137820512820515, "grad_norm": 0.21119636297225952, "learning_rate": 2.2465855732716593e-05, "loss": 0.01741878390312195, "step": 19430 }, { "epoch": 31.153846153846153, "grad_norm": 0.14208346605300903, "learning_rate": 2.2433372115542156e-05, "loss": 0.016079866886138917, "step": 19440 }, { "epoch": 31.169871794871796, "grad_norm": 0.25211822986602783, "learning_rate": 2.2400897969630124e-05, "loss": 0.018734976649284363, "step": 19450 }, { "epoch": 31.185897435897434, "grad_norm": 0.2365114986896515, "learning_rate": 2.236843333562891e-05, "loss": 0.018830107152462007, "step": 19460 }, { "epoch": 31.201923076923077, "grad_norm": 0.17614121735095978, "learning_rate": 2.2335978254174947e-05, "loss": 0.0184120312333107, "step": 19470 }, { "epoch": 31.21794871794872, "grad_norm": 0.20924115180969238, "learning_rate": 2.2303532765892776e-05, "loss": 0.016105358302593232, "step": 19480 }, { "epoch": 31.233974358974358, "grad_norm": 0.24466224014759064, "learning_rate": 2.2271096911394915e-05, "loss": 0.017399719357490538, "step": 19490 }, { "epoch": 31.25, "grad_norm": 0.25696849822998047, "learning_rate": 2.2238670731281777e-05, "loss": 0.019315233826637267, "step": 19500 }, { "epoch": 31.266025641025642, "grad_norm": 0.18336695432662964, "learning_rate": 2.220625426614172e-05, "loss": 0.019180014729499817, "step": 19510 }, { "epoch": 31.28205128205128, "grad_norm": 0.16215188801288605, "learning_rate": 2.217384755655094e-05, "loss": 0.017604315280914308, "step": 19520 }, { "epoch": 31.298076923076923, "grad_norm": 0.20540721714496613, "learning_rate": 2.2141450643073385e-05, "loss": 0.016566553711891176, "step": 19530 }, { "epoch": 31.314102564102566, "grad_norm": 0.1780283898115158, "learning_rate": 2.2109063566260757e-05, "loss": 0.017858338356018067, "step": 19540 }, { "epoch": 31.330128205128204, "grad_norm": 0.18861666321754456, "learning_rate": 2.207668636665247e-05, "loss": 0.017510268092155456, "step": 19550 }, { "epoch": 31.346153846153847, "grad_norm": 0.19381392002105713, "learning_rate": 2.2044319084775527e-05, "loss": 0.018178185820579527, "step": 19560 }, { "epoch": 31.362179487179485, "grad_norm": 0.38980987668037415, "learning_rate": 2.2011961761144564e-05, "loss": 0.01890987753868103, "step": 19570 }, { "epoch": 31.378205128205128, "grad_norm": 0.19346356391906738, "learning_rate": 2.1979614436261738e-05, "loss": 0.016417838633060455, "step": 19580 }, { "epoch": 31.39423076923077, "grad_norm": 0.12977997958660126, "learning_rate": 2.1947277150616657e-05, "loss": 0.01629534810781479, "step": 19590 }, { "epoch": 31.41025641025641, "grad_norm": 0.1743568629026413, "learning_rate": 2.1914949944686403e-05, "loss": 0.016549454629421236, "step": 19600 }, { "epoch": 31.42628205128205, "grad_norm": 0.2148018628358841, "learning_rate": 2.1882632858935444e-05, "loss": 0.01895429939031601, "step": 19610 }, { "epoch": 31.442307692307693, "grad_norm": 0.18487632274627686, "learning_rate": 2.1850325933815543e-05, "loss": 0.01637043058872223, "step": 19620 }, { "epoch": 31.458333333333332, "grad_norm": 0.13393473625183105, "learning_rate": 2.1818029209765767e-05, "loss": 0.016339100897312164, "step": 19630 }, { "epoch": 31.474358974358974, "grad_norm": 0.20370787382125854, "learning_rate": 2.178574272721243e-05, "loss": 0.018028752505779268, "step": 19640 }, { "epoch": 31.490384615384617, "grad_norm": 0.25137999653816223, "learning_rate": 2.1753466526568994e-05, "loss": 0.01807292252779007, "step": 19650 }, { "epoch": 31.506410256410255, "grad_norm": 0.16452151536941528, "learning_rate": 2.172120064823607e-05, "loss": 0.016776645183563234, "step": 19660 }, { "epoch": 31.522435897435898, "grad_norm": 0.24991628527641296, "learning_rate": 2.1688945132601353e-05, "loss": 0.01746867746114731, "step": 19670 }, { "epoch": 31.53846153846154, "grad_norm": 0.1564711034297943, "learning_rate": 2.165670002003956e-05, "loss": 0.017263816297054292, "step": 19680 }, { "epoch": 31.55448717948718, "grad_norm": 0.3326375484466553, "learning_rate": 2.1624465350912364e-05, "loss": 0.01697736531496048, "step": 19690 }, { "epoch": 31.57051282051282, "grad_norm": 0.20660139620304108, "learning_rate": 2.1592241165568414e-05, "loss": 0.018276007473468782, "step": 19700 }, { "epoch": 31.58653846153846, "grad_norm": 0.40372759103775024, "learning_rate": 2.15600275043432e-05, "loss": 0.02003702074289322, "step": 19710 }, { "epoch": 31.602564102564102, "grad_norm": 0.15206032991409302, "learning_rate": 2.152782440755904e-05, "loss": 0.017818894982337952, "step": 19720 }, { "epoch": 31.618589743589745, "grad_norm": 0.13178607821464539, "learning_rate": 2.1495631915525037e-05, "loss": 0.016516712307929993, "step": 19730 }, { "epoch": 31.634615384615383, "grad_norm": 0.21214501559734344, "learning_rate": 2.146345006853704e-05, "loss": 0.01590079963207245, "step": 19740 }, { "epoch": 31.650641025641026, "grad_norm": 0.32824045419692993, "learning_rate": 2.143127890687754e-05, "loss": 0.019777245819568634, "step": 19750 }, { "epoch": 31.666666666666668, "grad_norm": 0.234486922621727, "learning_rate": 2.139911847081565e-05, "loss": 0.017515866458415984, "step": 19760 }, { "epoch": 31.682692307692307, "grad_norm": 0.22934257984161377, "learning_rate": 2.13669688006071e-05, "loss": 0.016662122309207918, "step": 19770 }, { "epoch": 31.69871794871795, "grad_norm": 0.1262832134962082, "learning_rate": 2.13348299364941e-05, "loss": 0.015176795423030853, "step": 19780 }, { "epoch": 31.71474358974359, "grad_norm": 0.2548614740371704, "learning_rate": 2.1302701918705366e-05, "loss": 0.016658619046211243, "step": 19790 }, { "epoch": 31.73076923076923, "grad_norm": 0.22975604236125946, "learning_rate": 2.127058478745601e-05, "loss": 0.016251307725906373, "step": 19800 }, { "epoch": 31.746794871794872, "grad_norm": 0.1711985468864441, "learning_rate": 2.1238478582947536e-05, "loss": 0.016480182111263276, "step": 19810 }, { "epoch": 31.76282051282051, "grad_norm": 0.18895778059959412, "learning_rate": 2.1206383345367758e-05, "loss": 0.015181083977222443, "step": 19820 }, { "epoch": 31.778846153846153, "grad_norm": 0.291815847158432, "learning_rate": 2.117429911489079e-05, "loss": 0.016021300852298737, "step": 19830 }, { "epoch": 31.794871794871796, "grad_norm": 0.22896820306777954, "learning_rate": 2.1142225931676927e-05, "loss": 0.017041173577308655, "step": 19840 }, { "epoch": 31.810897435897434, "grad_norm": 0.302873432636261, "learning_rate": 2.111016383587266e-05, "loss": 0.019928278028964998, "step": 19850 }, { "epoch": 31.826923076923077, "grad_norm": 0.164812833070755, "learning_rate": 2.107811286761061e-05, "loss": 0.015497127175331115, "step": 19860 }, { "epoch": 31.84294871794872, "grad_norm": 0.16975809633731842, "learning_rate": 2.1046073067009445e-05, "loss": 0.016188402473926545, "step": 19870 }, { "epoch": 31.858974358974358, "grad_norm": 0.36495378613471985, "learning_rate": 2.1014044474173875e-05, "loss": 0.017252467572689056, "step": 19880 }, { "epoch": 31.875, "grad_norm": 0.20239555835723877, "learning_rate": 2.0982027129194575e-05, "loss": 0.01678476929664612, "step": 19890 }, { "epoch": 31.891025641025642, "grad_norm": 0.1358657330274582, "learning_rate": 2.0950021072148134e-05, "loss": 0.015673230588436126, "step": 19900 }, { "epoch": 31.90705128205128, "grad_norm": 0.2856846749782562, "learning_rate": 2.091802634309701e-05, "loss": 0.016705143451690673, "step": 19910 }, { "epoch": 31.923076923076923, "grad_norm": 0.2075328826904297, "learning_rate": 2.088604298208952e-05, "loss": 0.016370122134685517, "step": 19920 }, { "epoch": 31.939102564102566, "grad_norm": 0.35649049282073975, "learning_rate": 2.0854071029159677e-05, "loss": 0.017739146947860718, "step": 19930 }, { "epoch": 31.955128205128204, "grad_norm": 0.13242048025131226, "learning_rate": 2.0822110524327288e-05, "loss": 0.014195217192173004, "step": 19940 }, { "epoch": 31.971153846153847, "grad_norm": 0.24350504577159882, "learning_rate": 2.079016150759778e-05, "loss": 0.015973867475986482, "step": 19950 }, { "epoch": 31.98717948717949, "grad_norm": 0.213619664311409, "learning_rate": 2.0758224018962245e-05, "loss": 0.015781714022159575, "step": 19960 }, { "epoch": 32.0, "eval_accuracy_background": NaN, "eval_accuracy_crop": 0.9944120790817592, "eval_iou_background": 0.0, "eval_iou_crop": 0.9944120790817592, "eval_loss": 0.0200140792876482, "eval_mean_accuracy": 0.9944120790817592, "eval_mean_iou": 0.4972060395408796, "eval_overall_accuracy": 0.9944120790817592, "eval_runtime": 33.8312, "eval_samples_per_second": 26.041, "eval_steps_per_second": 3.281, "step": 19968 }, { "epoch": 32.00320512820513, "grad_norm": 0.20292647182941437, "learning_rate": 2.0726298098397293e-05, "loss": 0.01762781888246536, "step": 19970 }, { "epoch": 32.01923076923077, "grad_norm": 0.2187168300151825, "learning_rate": 2.0694383785865096e-05, "loss": 0.018177005648612975, "step": 19980 }, { "epoch": 32.03525641025641, "grad_norm": 0.1956453174352646, "learning_rate": 2.0662481121313287e-05, "loss": 0.015864048898220063, "step": 19990 }, { "epoch": 32.05128205128205, "grad_norm": 0.198273703455925, "learning_rate": 2.0630590144674892e-05, "loss": 0.01622176021337509, "step": 20000 }, { "epoch": 32.06730769230769, "grad_norm": 0.31272777915000916, "learning_rate": 2.0598710895868334e-05, "loss": 0.01678209602832794, "step": 20010 }, { "epoch": 32.083333333333336, "grad_norm": 0.1479337066411972, "learning_rate": 2.0566843414797376e-05, "loss": 0.016315804421901704, "step": 20020 }, { "epoch": 32.09935897435897, "grad_norm": 0.27872732281684875, "learning_rate": 2.0534987741350998e-05, "loss": 0.015953147411346437, "step": 20030 }, { "epoch": 32.11538461538461, "grad_norm": 0.3238850235939026, "learning_rate": 2.050314391540343e-05, "loss": 0.018918916583061218, "step": 20040 }, { "epoch": 32.131410256410255, "grad_norm": 0.15245018899440765, "learning_rate": 2.0471311976814087e-05, "loss": 0.017076580226421355, "step": 20050 }, { "epoch": 32.1474358974359, "grad_norm": 0.24578693509101868, "learning_rate": 2.043949196542746e-05, "loss": 0.017965242266654968, "step": 20060 }, { "epoch": 32.16346153846154, "grad_norm": 0.25664666295051575, "learning_rate": 2.0407683921073162e-05, "loss": 0.015073423087596894, "step": 20070 }, { "epoch": 32.17948717948718, "grad_norm": 0.3077508509159088, "learning_rate": 2.0375887883565804e-05, "loss": 0.017870493233203888, "step": 20080 }, { "epoch": 32.19551282051282, "grad_norm": 0.2546335458755493, "learning_rate": 2.034410389270494e-05, "loss": 0.01735943853855133, "step": 20090 }, { "epoch": 32.21153846153846, "grad_norm": 0.21156133711338043, "learning_rate": 2.0312331988275088e-05, "loss": 0.016164271533489226, "step": 20100 }, { "epoch": 32.2275641025641, "grad_norm": 0.3001927435398102, "learning_rate": 2.0280572210045635e-05, "loss": 0.018456773459911348, "step": 20110 }, { "epoch": 32.243589743589745, "grad_norm": 0.31825509667396545, "learning_rate": 2.0248824597770755e-05, "loss": 0.016630133986473082, "step": 20120 }, { "epoch": 32.25961538461539, "grad_norm": 0.2378247082233429, "learning_rate": 2.0217089191189414e-05, "loss": 0.01976967453956604, "step": 20130 }, { "epoch": 32.27564102564103, "grad_norm": 0.18928292393684387, "learning_rate": 2.0185366030025318e-05, "loss": 0.017893411219120026, "step": 20140 }, { "epoch": 32.291666666666664, "grad_norm": 0.2184019684791565, "learning_rate": 2.0153655153986802e-05, "loss": 0.01799514442682266, "step": 20150 }, { "epoch": 32.30769230769231, "grad_norm": 0.21833495795726776, "learning_rate": 2.012195660276686e-05, "loss": 0.017339520156383514, "step": 20160 }, { "epoch": 32.32371794871795, "grad_norm": 0.15069442987442017, "learning_rate": 2.0090270416043062e-05, "loss": 0.0172134205698967, "step": 20170 }, { "epoch": 32.33974358974359, "grad_norm": 0.2600660026073456, "learning_rate": 2.0058596633477466e-05, "loss": 0.016678699851036073, "step": 20180 }, { "epoch": 32.35576923076923, "grad_norm": 0.1994108259677887, "learning_rate": 2.0026935294716622e-05, "loss": 0.015475691854953766, "step": 20190 }, { "epoch": 32.37179487179487, "grad_norm": 0.21630628407001495, "learning_rate": 1.9995286439391534e-05, "loss": 0.017725011706352232, "step": 20200 }, { "epoch": 32.38782051282051, "grad_norm": 0.19832247495651245, "learning_rate": 1.9963650107117524e-05, "loss": 0.01869271993637085, "step": 20210 }, { "epoch": 32.40384615384615, "grad_norm": 0.30244359374046326, "learning_rate": 1.993202633749427e-05, "loss": 0.016116544604301453, "step": 20220 }, { "epoch": 32.419871794871796, "grad_norm": 0.18958574533462524, "learning_rate": 1.9900415170105742e-05, "loss": 0.017023716866970063, "step": 20230 }, { "epoch": 32.43589743589744, "grad_norm": 0.17420433461666107, "learning_rate": 1.986881664452009e-05, "loss": 0.016739176213741304, "step": 20240 }, { "epoch": 32.45192307692308, "grad_norm": 0.2467166930437088, "learning_rate": 1.983723080028968e-05, "loss": 0.018593187630176543, "step": 20250 }, { "epoch": 32.467948717948715, "grad_norm": 0.28554871678352356, "learning_rate": 1.980565767695099e-05, "loss": 0.016727207601070403, "step": 20260 }, { "epoch": 32.48397435897436, "grad_norm": 0.1542108952999115, "learning_rate": 1.977409731402458e-05, "loss": 0.017496013641357423, "step": 20270 }, { "epoch": 32.5, "grad_norm": 0.25290215015411377, "learning_rate": 1.9742549751015017e-05, "loss": 0.017381764948368073, "step": 20280 }, { "epoch": 32.51602564102564, "grad_norm": 0.29386764764785767, "learning_rate": 1.9711015027410876e-05, "loss": 0.01782107949256897, "step": 20290 }, { "epoch": 32.532051282051285, "grad_norm": 0.16702982783317566, "learning_rate": 1.9679493182684652e-05, "loss": 0.019069728255271912, "step": 20300 }, { "epoch": 32.54807692307692, "grad_norm": 0.18099455535411835, "learning_rate": 1.964798425629269e-05, "loss": 0.017506472766399384, "step": 20310 }, { "epoch": 32.56410256410256, "grad_norm": 0.24803365767002106, "learning_rate": 1.9616488287675206e-05, "loss": 0.017393815517425536, "step": 20320 }, { "epoch": 32.580128205128204, "grad_norm": 0.3099249005317688, "learning_rate": 1.958500531625619e-05, "loss": 0.017757055163383485, "step": 20330 }, { "epoch": 32.59615384615385, "grad_norm": 0.26295241713523865, "learning_rate": 1.9553535381443338e-05, "loss": 0.019146613776683807, "step": 20340 }, { "epoch": 32.61217948717949, "grad_norm": 0.20898409187793732, "learning_rate": 1.9522078522628033e-05, "loss": 0.015631602704524995, "step": 20350 }, { "epoch": 32.62820512820513, "grad_norm": 0.26636362075805664, "learning_rate": 1.949063477918533e-05, "loss": 0.016522231698036193, "step": 20360 }, { "epoch": 32.64423076923077, "grad_norm": 0.1526382863521576, "learning_rate": 1.9459204190473802e-05, "loss": 0.01739153265953064, "step": 20370 }, { "epoch": 32.66025641025641, "grad_norm": 0.1401011198759079, "learning_rate": 1.942778679583562e-05, "loss": 0.019062598049640656, "step": 20380 }, { "epoch": 32.67628205128205, "grad_norm": 0.1234615296125412, "learning_rate": 1.9396382634596407e-05, "loss": 0.01573781669139862, "step": 20390 }, { "epoch": 32.69230769230769, "grad_norm": 0.1545998752117157, "learning_rate": 1.936499174606521e-05, "loss": 0.0160080686211586, "step": 20400 }, { "epoch": 32.708333333333336, "grad_norm": 0.2190633863210678, "learning_rate": 1.9333614169534487e-05, "loss": 0.017323097586631774, "step": 20410 }, { "epoch": 32.72435897435897, "grad_norm": 0.17037837207317352, "learning_rate": 1.9302249944280038e-05, "loss": 0.01716713011264801, "step": 20420 }, { "epoch": 32.74038461538461, "grad_norm": 0.1786699891090393, "learning_rate": 1.927089910956092e-05, "loss": 0.016597647964954377, "step": 20430 }, { "epoch": 32.756410256410255, "grad_norm": 0.17182430624961853, "learning_rate": 1.9239561704619446e-05, "loss": 0.01536448448896408, "step": 20440 }, { "epoch": 32.7724358974359, "grad_norm": 0.2537788450717926, "learning_rate": 1.920823776868114e-05, "loss": 0.016394978761672972, "step": 20450 }, { "epoch": 32.78846153846154, "grad_norm": 0.16524158418178558, "learning_rate": 1.917692734095462e-05, "loss": 0.015741994976997374, "step": 20460 }, { "epoch": 32.80448717948718, "grad_norm": 0.1434193253517151, "learning_rate": 1.9145630460631635e-05, "loss": 0.01593421697616577, "step": 20470 }, { "epoch": 32.82051282051282, "grad_norm": 0.17073401808738708, "learning_rate": 1.9114347166886964e-05, "loss": 0.01650923937559128, "step": 20480 }, { "epoch": 32.83653846153846, "grad_norm": 0.3167841136455536, "learning_rate": 1.9083077498878368e-05, "loss": 0.01636764109134674, "step": 20490 }, { "epoch": 32.8525641025641, "grad_norm": 0.2121230661869049, "learning_rate": 1.9051821495746568e-05, "loss": 0.017221438884735107, "step": 20500 }, { "epoch": 32.868589743589745, "grad_norm": 0.21488206088542938, "learning_rate": 1.9020579196615186e-05, "loss": 0.019027261435985564, "step": 20510 }, { "epoch": 32.88461538461539, "grad_norm": 0.29599708318710327, "learning_rate": 1.898935064059067e-05, "loss": 0.017841733992099762, "step": 20520 }, { "epoch": 32.90064102564103, "grad_norm": 0.29984208941459656, "learning_rate": 1.8958135866762265e-05, "loss": 0.014774924516677857, "step": 20530 }, { "epoch": 32.916666666666664, "grad_norm": 0.12954168021678925, "learning_rate": 1.8926934914201985e-05, "loss": 0.017967401444911955, "step": 20540 }, { "epoch": 32.93269230769231, "grad_norm": 0.30942749977111816, "learning_rate": 1.889574782196455e-05, "loss": 0.017368033528327942, "step": 20550 }, { "epoch": 32.94871794871795, "grad_norm": 0.2094551920890808, "learning_rate": 1.8864574629087292e-05, "loss": 0.016224345564842223, "step": 20560 }, { "epoch": 32.96474358974359, "grad_norm": 0.14891894161701202, "learning_rate": 1.8833415374590166e-05, "loss": 0.016691766679286957, "step": 20570 }, { "epoch": 32.98076923076923, "grad_norm": 0.18832015991210938, "learning_rate": 1.88022700974757e-05, "loss": 0.0156965970993042, "step": 20580 }, { "epoch": 32.99679487179487, "grad_norm": 0.23059141635894775, "learning_rate": 1.877113883672889e-05, "loss": 0.01726524829864502, "step": 20590 }, { "epoch": 33.0, "eval_accuracy_background": NaN, "eval_accuracy_crop": 0.9942788803107692, "eval_iou_background": 0.0, "eval_iou_crop": 0.9942788803107692, "eval_loss": 0.020376013591885567, "eval_mean_accuracy": 0.9942788803107692, "eval_mean_iou": 0.4971394401553846, "eval_overall_accuracy": 0.9942788803107692, "eval_runtime": 37.9451, "eval_samples_per_second": 23.218, "eval_steps_per_second": 2.925, "step": 20592 }, { "epoch": 33.01282051282051, "grad_norm": 0.39327189326286316, "learning_rate": 1.8740021631317218e-05, "loss": 0.017206089198589326, "step": 20600 }, { "epoch": 33.02884615384615, "grad_norm": 0.1914307326078415, "learning_rate": 1.870891852019056e-05, "loss": 0.016519653797149658, "step": 20610 }, { "epoch": 33.044871794871796, "grad_norm": 0.2121661901473999, "learning_rate": 1.867782954228113e-05, "loss": 0.015963272750377656, "step": 20620 }, { "epoch": 33.06089743589744, "grad_norm": 0.30400747060775757, "learning_rate": 1.864675473650349e-05, "loss": 0.016937200725078583, "step": 20630 }, { "epoch": 33.07692307692308, "grad_norm": 0.3372012972831726, "learning_rate": 1.8615694141754452e-05, "loss": 0.016758477687835692, "step": 20640 }, { "epoch": 33.092948717948715, "grad_norm": 0.24343878030776978, "learning_rate": 1.8584647796913012e-05, "loss": 0.016826589405536652, "step": 20650 }, { "epoch": 33.10897435897436, "grad_norm": 0.15249626338481903, "learning_rate": 1.8553615740840348e-05, "loss": 0.015092889964580535, "step": 20660 }, { "epoch": 33.125, "grad_norm": 0.19110490381717682, "learning_rate": 1.8522598012379775e-05, "loss": 0.01849246323108673, "step": 20670 }, { "epoch": 33.14102564102564, "grad_norm": 0.2535833716392517, "learning_rate": 1.8491594650356628e-05, "loss": 0.015939351916313172, "step": 20680 }, { "epoch": 33.157051282051285, "grad_norm": 0.22929275035858154, "learning_rate": 1.84606056935783e-05, "loss": 0.017489184439182282, "step": 20690 }, { "epoch": 33.17307692307692, "grad_norm": 0.18164564669132233, "learning_rate": 1.842963118083413e-05, "loss": 0.017232929170131684, "step": 20700 }, { "epoch": 33.18910256410256, "grad_norm": 0.27839672565460205, "learning_rate": 1.8398671150895387e-05, "loss": 0.01881408542394638, "step": 20710 }, { "epoch": 33.205128205128204, "grad_norm": 0.19318817555904388, "learning_rate": 1.8367725642515194e-05, "loss": 0.018310660123825075, "step": 20720 }, { "epoch": 33.22115384615385, "grad_norm": 0.2456924468278885, "learning_rate": 1.8336794694428538e-05, "loss": 0.016548536717891693, "step": 20730 }, { "epoch": 33.23717948717949, "grad_norm": 0.21222732961177826, "learning_rate": 1.8305878345352128e-05, "loss": 0.01763034164905548, "step": 20740 }, { "epoch": 33.25320512820513, "grad_norm": 0.2785607576370239, "learning_rate": 1.827497663398444e-05, "loss": 0.017290203273296355, "step": 20750 }, { "epoch": 33.26923076923077, "grad_norm": 0.21620532870292664, "learning_rate": 1.8244089599005614e-05, "loss": 0.016354984045028685, "step": 20760 }, { "epoch": 33.28525641025641, "grad_norm": 0.20393012464046478, "learning_rate": 1.8213217279077404e-05, "loss": 0.017573037743568422, "step": 20770 }, { "epoch": 33.30128205128205, "grad_norm": 0.36304399371147156, "learning_rate": 1.8182359712843167e-05, "loss": 0.017528465390205382, "step": 20780 }, { "epoch": 33.31730769230769, "grad_norm": 0.22492188215255737, "learning_rate": 1.815151693892779e-05, "loss": 0.017989248037338257, "step": 20790 }, { "epoch": 33.333333333333336, "grad_norm": 0.2187756448984146, "learning_rate": 1.812068899593764e-05, "loss": 0.01499476730823517, "step": 20800 }, { "epoch": 33.34935897435897, "grad_norm": 0.24590715765953064, "learning_rate": 1.80898759224605e-05, "loss": 0.015721192955970763, "step": 20810 }, { "epoch": 33.36538461538461, "grad_norm": 0.24541331827640533, "learning_rate": 1.805907775706558e-05, "loss": 0.016486722230911254, "step": 20820 }, { "epoch": 33.381410256410255, "grad_norm": 0.1651931256055832, "learning_rate": 1.8028294538303406e-05, "loss": 0.013715134561061859, "step": 20830 }, { "epoch": 33.3974358974359, "grad_norm": 0.19833572208881378, "learning_rate": 1.7997526304705794e-05, "loss": 0.017861519753932954, "step": 20840 }, { "epoch": 33.41346153846154, "grad_norm": 0.3858487010002136, "learning_rate": 1.7966773094785797e-05, "loss": 0.01793777197599411, "step": 20850 }, { "epoch": 33.42948717948718, "grad_norm": 0.18505503237247467, "learning_rate": 1.7936034947037694e-05, "loss": 0.017797833681106566, "step": 20860 }, { "epoch": 33.44551282051282, "grad_norm": 0.22491593658924103, "learning_rate": 1.7905311899936866e-05, "loss": 0.016132043302059175, "step": 20870 }, { "epoch": 33.46153846153846, "grad_norm": 0.31241080164909363, "learning_rate": 1.7874603991939832e-05, "loss": 0.01723512411117554, "step": 20880 }, { "epoch": 33.4775641025641, "grad_norm": 0.22338682413101196, "learning_rate": 1.7843911261484143e-05, "loss": 0.01857716590166092, "step": 20890 }, { "epoch": 33.493589743589745, "grad_norm": 0.17251259088516235, "learning_rate": 1.7813233746988342e-05, "loss": 0.01721866726875305, "step": 20900 }, { "epoch": 33.50961538461539, "grad_norm": 0.29601749777793884, "learning_rate": 1.778257148685194e-05, "loss": 0.015932697057724, "step": 20910 }, { "epoch": 33.52564102564103, "grad_norm": 0.2978237271308899, "learning_rate": 1.775192451945537e-05, "loss": 0.02037028968334198, "step": 20920 }, { "epoch": 33.541666666666664, "grad_norm": 0.30537930130958557, "learning_rate": 1.772129288315988e-05, "loss": 0.017719516158103944, "step": 20930 }, { "epoch": 33.55769230769231, "grad_norm": 0.22285668551921844, "learning_rate": 1.7690676616307557e-05, "loss": 0.016088514029979704, "step": 20940 }, { "epoch": 33.57371794871795, "grad_norm": 0.2706559896469116, "learning_rate": 1.7660075757221257e-05, "loss": 0.01752999275922775, "step": 20950 }, { "epoch": 33.58974358974359, "grad_norm": 0.26175978779792786, "learning_rate": 1.762949034420452e-05, "loss": 0.01744309365749359, "step": 20960 }, { "epoch": 33.60576923076923, "grad_norm": 0.2228078991174698, "learning_rate": 1.7598920415541584e-05, "loss": 0.014882448315620422, "step": 20970 }, { "epoch": 33.62179487179487, "grad_norm": 0.32473987340927124, "learning_rate": 1.7568366009497296e-05, "loss": 0.015201118588447571, "step": 20980 }, { "epoch": 33.63782051282051, "grad_norm": 0.304934561252594, "learning_rate": 1.753782716431704e-05, "loss": 0.01800795942544937, "step": 20990 }, { "epoch": 33.65384615384615, "grad_norm": 0.31276214122772217, "learning_rate": 1.750730391822677e-05, "loss": 0.01764012724161148, "step": 21000 }, { "epoch": 33.669871794871796, "grad_norm": 0.29347148537635803, "learning_rate": 1.74767963094329e-05, "loss": 0.015694963932037353, "step": 21010 }, { "epoch": 33.68589743589744, "grad_norm": 0.23848111927509308, "learning_rate": 1.7446304376122243e-05, "loss": 0.015342199802398681, "step": 21020 }, { "epoch": 33.70192307692308, "grad_norm": 0.2893546521663666, "learning_rate": 1.741582815646203e-05, "loss": 0.016236743330955504, "step": 21030 }, { "epoch": 33.717948717948715, "grad_norm": 0.16735322773456573, "learning_rate": 1.7385367688599806e-05, "loss": 0.01636255383491516, "step": 21040 }, { "epoch": 33.73397435897436, "grad_norm": 0.1988530308008194, "learning_rate": 1.735492301066338e-05, "loss": 0.01668114960193634, "step": 21050 }, { "epoch": 33.75, "grad_norm": 0.23962755501270294, "learning_rate": 1.7324494160760836e-05, "loss": 0.015705478191375733, "step": 21060 }, { "epoch": 33.76602564102564, "grad_norm": 0.21621818840503693, "learning_rate": 1.7294081176980425e-05, "loss": 0.015569266676902772, "step": 21070 }, { "epoch": 33.782051282051285, "grad_norm": 0.24654187262058258, "learning_rate": 1.726368409739053e-05, "loss": 0.01715252101421356, "step": 21080 }, { "epoch": 33.79807692307692, "grad_norm": 0.1876109540462494, "learning_rate": 1.7233302960039623e-05, "loss": 0.01747075617313385, "step": 21090 }, { "epoch": 33.81410256410256, "grad_norm": 0.23322805762290955, "learning_rate": 1.7202937802956262e-05, "loss": 0.016252437233924867, "step": 21100 }, { "epoch": 33.830128205128204, "grad_norm": 0.23609565198421478, "learning_rate": 1.717258866414894e-05, "loss": 0.01643247902393341, "step": 21110 }, { "epoch": 33.84615384615385, "grad_norm": 0.3684849143028259, "learning_rate": 1.714225558160615e-05, "loss": 0.017096474766731262, "step": 21120 }, { "epoch": 33.86217948717949, "grad_norm": 0.2654605209827423, "learning_rate": 1.7111938593296264e-05, "loss": 0.017919202148914338, "step": 21130 }, { "epoch": 33.87820512820513, "grad_norm": 0.20781879127025604, "learning_rate": 1.7081637737167533e-05, "loss": 0.01715526878833771, "step": 21140 }, { "epoch": 33.89423076923077, "grad_norm": 0.13161198794841766, "learning_rate": 1.705135305114797e-05, "loss": 0.015387746691703796, "step": 21150 }, { "epoch": 33.91025641025641, "grad_norm": 0.17452581226825714, "learning_rate": 1.7021084573145398e-05, "loss": 0.016045419871807097, "step": 21160 }, { "epoch": 33.92628205128205, "grad_norm": 0.23587223887443542, "learning_rate": 1.69908323410473e-05, "loss": 0.016072101891040802, "step": 21170 }, { "epoch": 33.94230769230769, "grad_norm": 0.22696083784103394, "learning_rate": 1.696059639272088e-05, "loss": 0.01690460592508316, "step": 21180 }, { "epoch": 33.958333333333336, "grad_norm": 0.1225031316280365, "learning_rate": 1.6930376766012908e-05, "loss": 0.014914783835411071, "step": 21190 }, { "epoch": 33.97435897435897, "grad_norm": 0.21893461048603058, "learning_rate": 1.690017349874977e-05, "loss": 0.015275843441486359, "step": 21200 }, { "epoch": 33.99038461538461, "grad_norm": 0.21739615499973297, "learning_rate": 1.6869986628737328e-05, "loss": 0.017197136580944062, "step": 21210 }, { "epoch": 34.0, "eval_accuracy_background": NaN, "eval_accuracy_crop": 0.9943248461984221, "eval_iou_background": 0.0, "eval_iou_crop": 0.9943248461984221, "eval_loss": 0.020052911713719368, "eval_mean_accuracy": 0.9943248461984221, "eval_mean_iou": 0.49716242309921105, "eval_overall_accuracy": 0.9943248461984221, "eval_runtime": 34.797, "eval_samples_per_second": 25.318, "eval_steps_per_second": 3.19, "step": 21216 }, { "epoch": 34.006410256410255, "grad_norm": 0.20955397188663483, "learning_rate": 1.6839816193760956e-05, "loss": 0.016813498735427857, "step": 21220 }, { "epoch": 34.0224358974359, "grad_norm": 0.16414804756641388, "learning_rate": 1.680966223158545e-05, "loss": 0.018084326386451723, "step": 21230 }, { "epoch": 34.03846153846154, "grad_norm": 0.15389882028102875, "learning_rate": 1.677952477995496e-05, "loss": 0.014949892461299897, "step": 21240 }, { "epoch": 34.05448717948718, "grad_norm": 0.41440197825431824, "learning_rate": 1.6749403876593016e-05, "loss": 0.0161476194858551, "step": 21250 }, { "epoch": 34.07051282051282, "grad_norm": 0.16516150534152985, "learning_rate": 1.671929955920238e-05, "loss": 0.015723276138305663, "step": 21260 }, { "epoch": 34.08653846153846, "grad_norm": 0.23862521350383759, "learning_rate": 1.6689211865465112e-05, "loss": 0.018700094521045686, "step": 21270 }, { "epoch": 34.1025641025641, "grad_norm": 0.21786873042583466, "learning_rate": 1.6659140833042396e-05, "loss": 0.01598741263151169, "step": 21280 }, { "epoch": 34.118589743589745, "grad_norm": 0.25507768988609314, "learning_rate": 1.662908649957463e-05, "loss": 0.018819132447242738, "step": 21290 }, { "epoch": 34.13461538461539, "grad_norm": 0.300803542137146, "learning_rate": 1.659904890268125e-05, "loss": 0.015170007944107056, "step": 21300 }, { "epoch": 34.15064102564103, "grad_norm": 0.29243379831314087, "learning_rate": 1.6569028079960784e-05, "loss": 0.016631196439266204, "step": 21310 }, { "epoch": 34.166666666666664, "grad_norm": 0.19141027331352234, "learning_rate": 1.6539024068990763e-05, "loss": 0.016062526404857634, "step": 21320 }, { "epoch": 34.18269230769231, "grad_norm": 0.20939382910728455, "learning_rate": 1.650903690732763e-05, "loss": 0.017645548284053802, "step": 21330 }, { "epoch": 34.19871794871795, "grad_norm": 0.18657660484313965, "learning_rate": 1.647906663250679e-05, "loss": 0.01685621738433838, "step": 21340 }, { "epoch": 34.21474358974359, "grad_norm": 0.1822206825017929, "learning_rate": 1.6449113282042487e-05, "loss": 0.015916763246059416, "step": 21350 }, { "epoch": 34.23076923076923, "grad_norm": 0.15757238864898682, "learning_rate": 1.6419176893427782e-05, "loss": 0.01677289605140686, "step": 21360 }, { "epoch": 34.24679487179487, "grad_norm": 0.284332811832428, "learning_rate": 1.6389257504134493e-05, "loss": 0.019593562185764312, "step": 21370 }, { "epoch": 34.26282051282051, "grad_norm": 0.18422994017601013, "learning_rate": 1.635935515161319e-05, "loss": 0.01710815876722336, "step": 21380 }, { "epoch": 34.27884615384615, "grad_norm": 0.5568610429763794, "learning_rate": 1.6329469873293075e-05, "loss": 0.01849326938390732, "step": 21390 }, { "epoch": 34.294871794871796, "grad_norm": 0.31829866766929626, "learning_rate": 1.6299601706582018e-05, "loss": 0.017198029160499572, "step": 21400 }, { "epoch": 34.31089743589744, "grad_norm": 0.20451748371124268, "learning_rate": 1.626975068886645e-05, "loss": 0.01799066215753555, "step": 21410 }, { "epoch": 34.32692307692308, "grad_norm": 0.20002973079681396, "learning_rate": 1.6239916857511355e-05, "loss": 0.01884330064058304, "step": 21420 }, { "epoch": 34.342948717948715, "grad_norm": 0.30173996090888977, "learning_rate": 1.621010024986017e-05, "loss": 0.017067836225032808, "step": 21430 }, { "epoch": 34.35897435897436, "grad_norm": 0.10792756825685501, "learning_rate": 1.618030090323481e-05, "loss": 0.015255683660507202, "step": 21440 }, { "epoch": 34.375, "grad_norm": 0.12689737975597382, "learning_rate": 1.6150518854935565e-05, "loss": 0.01729035973548889, "step": 21450 }, { "epoch": 34.39102564102564, "grad_norm": 0.2511466443538666, "learning_rate": 1.6120754142241054e-05, "loss": 0.017050476372241975, "step": 21460 }, { "epoch": 34.407051282051285, "grad_norm": 0.18853789567947388, "learning_rate": 1.6091006802408235e-05, "loss": 0.017363327741622924, "step": 21470 }, { "epoch": 34.42307692307692, "grad_norm": 0.3251549303531647, "learning_rate": 1.606127687267231e-05, "loss": 0.018454910814762117, "step": 21480 }, { "epoch": 34.43910256410256, "grad_norm": 0.2640705406665802, "learning_rate": 1.6031564390246658e-05, "loss": 0.016056951880455018, "step": 21490 }, { "epoch": 34.455128205128204, "grad_norm": 0.18613886833190918, "learning_rate": 1.6001869392322858e-05, "loss": 0.01786269247531891, "step": 21500 }, { "epoch": 34.47115384615385, "grad_norm": 0.20375767350196838, "learning_rate": 1.5972191916070596e-05, "loss": 0.016657865047454833, "step": 21510 }, { "epoch": 34.48717948717949, "grad_norm": 0.24429640173912048, "learning_rate": 1.5942531998637594e-05, "loss": 0.01770131289958954, "step": 21520 }, { "epoch": 34.50320512820513, "grad_norm": 0.2473338544368744, "learning_rate": 1.5912889677149642e-05, "loss": 0.01627267897129059, "step": 21530 }, { "epoch": 34.51923076923077, "grad_norm": 0.2190127968788147, "learning_rate": 1.5883264988710472e-05, "loss": 0.017829541862010957, "step": 21540 }, { "epoch": 34.53525641025641, "grad_norm": 0.19037863612174988, "learning_rate": 1.5853657970401737e-05, "loss": 0.015989118814468385, "step": 21550 }, { "epoch": 34.55128205128205, "grad_norm": 0.21903391182422638, "learning_rate": 1.5824068659283008e-05, "loss": 0.01725687235593796, "step": 21560 }, { "epoch": 34.56730769230769, "grad_norm": 0.2204754799604416, "learning_rate": 1.579449709239168e-05, "loss": 0.017172558605670928, "step": 21570 }, { "epoch": 34.583333333333336, "grad_norm": 0.2683911621570587, "learning_rate": 1.576494330674291e-05, "loss": 0.017743289470672607, "step": 21580 }, { "epoch": 34.59935897435897, "grad_norm": 0.33655408024787903, "learning_rate": 1.573540733932962e-05, "loss": 0.01954861730337143, "step": 21590 }, { "epoch": 34.61538461538461, "grad_norm": 0.21347151696681976, "learning_rate": 1.5705889227122447e-05, "loss": 0.016614097356796264, "step": 21600 }, { "epoch": 34.631410256410255, "grad_norm": 0.15411275625228882, "learning_rate": 1.5676389007069636e-05, "loss": 0.016484692692756653, "step": 21610 }, { "epoch": 34.6474358974359, "grad_norm": 0.176540344953537, "learning_rate": 1.5646906716097077e-05, "loss": 0.01782839298248291, "step": 21620 }, { "epoch": 34.66346153846154, "grad_norm": 0.196277916431427, "learning_rate": 1.5617442391108185e-05, "loss": 0.016824160516262055, "step": 21630 }, { "epoch": 34.67948717948718, "grad_norm": 0.17190760374069214, "learning_rate": 1.5587996068983898e-05, "loss": 0.017154139280319215, "step": 21640 }, { "epoch": 34.69551282051282, "grad_norm": 0.22652341425418854, "learning_rate": 1.5558567786582625e-05, "loss": 0.01551772803068161, "step": 21650 }, { "epoch": 34.71153846153846, "grad_norm": 0.18472275137901306, "learning_rate": 1.5529157580740208e-05, "loss": 0.016417087614536287, "step": 21660 }, { "epoch": 34.7275641025641, "grad_norm": 0.1436266005039215, "learning_rate": 1.5499765488269823e-05, "loss": 0.015257637202739715, "step": 21670 }, { "epoch": 34.743589743589745, "grad_norm": 0.17458178102970123, "learning_rate": 1.5470391545962e-05, "loss": 0.016225042939186095, "step": 21680 }, { "epoch": 34.75961538461539, "grad_norm": 0.2286684513092041, "learning_rate": 1.5441035790584567e-05, "loss": 0.015088406205177308, "step": 21690 }, { "epoch": 34.77564102564103, "grad_norm": 0.20617453753948212, "learning_rate": 1.5411698258882544e-05, "loss": 0.019415798783302306, "step": 21700 }, { "epoch": 34.791666666666664, "grad_norm": 0.27824974060058594, "learning_rate": 1.538237898757818e-05, "loss": 0.016725006699562072, "step": 21710 }, { "epoch": 34.80769230769231, "grad_norm": 0.3207818269729614, "learning_rate": 1.5353078013370825e-05, "loss": 0.017934221029281616, "step": 21720 }, { "epoch": 34.82371794871795, "grad_norm": 0.279192715883255, "learning_rate": 1.532379537293698e-05, "loss": 0.01687939018011093, "step": 21730 }, { "epoch": 34.83974358974359, "grad_norm": 0.20006349682807922, "learning_rate": 1.529453110293014e-05, "loss": 0.017583459615707397, "step": 21740 }, { "epoch": 34.85576923076923, "grad_norm": 0.18787308037281036, "learning_rate": 1.5265285239980853e-05, "loss": 0.016055051982402802, "step": 21750 }, { "epoch": 34.87179487179487, "grad_norm": 0.17467689514160156, "learning_rate": 1.5236057820696611e-05, "loss": 0.014874547719955444, "step": 21760 }, { "epoch": 34.88782051282051, "grad_norm": 0.1657082885503769, "learning_rate": 1.5206848881661795e-05, "loss": 0.01701228618621826, "step": 21770 }, { "epoch": 34.90384615384615, "grad_norm": 0.21422915160655975, "learning_rate": 1.517765845943769e-05, "loss": 0.013626405596733093, "step": 21780 }, { "epoch": 34.919871794871796, "grad_norm": 0.17761960625648499, "learning_rate": 1.5148486590562397e-05, "loss": 0.015357902646064759, "step": 21790 }, { "epoch": 34.93589743589744, "grad_norm": 0.22630220651626587, "learning_rate": 1.5119333311550774e-05, "loss": 0.017622944712638856, "step": 21800 }, { "epoch": 34.95192307692308, "grad_norm": 0.2030019313097, "learning_rate": 1.5090198658894412e-05, "loss": 0.015071728825569152, "step": 21810 }, { "epoch": 34.967948717948715, "grad_norm": 0.20724034309387207, "learning_rate": 1.5061082669061616e-05, "loss": 0.015122558176517486, "step": 21820 }, { "epoch": 34.98397435897436, "grad_norm": 0.21548005938529968, "learning_rate": 1.5031985378497286e-05, "loss": 0.015170219540596008, "step": 21830 }, { "epoch": 35.0, "grad_norm": 0.38227030634880066, "learning_rate": 1.5002906823622952e-05, "loss": 0.01641150414943695, "step": 21840 }, { "epoch": 35.0, "eval_accuracy_background": NaN, "eval_accuracy_crop": 0.9944768052603207, "eval_iou_background": 0.0, "eval_iou_crop": 0.9944768052603207, "eval_loss": 0.020126385614275932, "eval_mean_accuracy": 0.9944768052603207, "eval_mean_iou": 0.49723840263016034, "eval_overall_accuracy": 0.9944768052603207, "eval_runtime": 38.2142, "eval_samples_per_second": 23.054, "eval_steps_per_second": 2.905, "step": 21840 }, { "epoch": 35.01602564102564, "grad_norm": 0.21358564496040344, "learning_rate": 1.4973847040836694e-05, "loss": 0.01789025217294693, "step": 21850 }, { "epoch": 35.032051282051285, "grad_norm": 0.17884580790996552, "learning_rate": 1.494480606651306e-05, "loss": 0.01608734130859375, "step": 21860 }, { "epoch": 35.04807692307692, "grad_norm": 0.21326984465122223, "learning_rate": 1.4915783937003082e-05, "loss": 0.015660709142684935, "step": 21870 }, { "epoch": 35.06410256410256, "grad_norm": 0.2864038646221161, "learning_rate": 1.4886780688634215e-05, "loss": 0.017929084599018097, "step": 21880 }, { "epoch": 35.080128205128204, "grad_norm": 0.2441897988319397, "learning_rate": 1.4857796357710244e-05, "loss": 0.016878053545951843, "step": 21890 }, { "epoch": 35.09615384615385, "grad_norm": 0.13917766511440277, "learning_rate": 1.482883098051129e-05, "loss": 0.015508748590946198, "step": 21900 }, { "epoch": 35.11217948717949, "grad_norm": 0.24030865728855133, "learning_rate": 1.4799884593293774e-05, "loss": 0.016100330650806426, "step": 21910 }, { "epoch": 35.12820512820513, "grad_norm": 0.1820264607667923, "learning_rate": 1.4770957232290306e-05, "loss": 0.0162878543138504, "step": 21920 }, { "epoch": 35.14423076923077, "grad_norm": 0.32589879631996155, "learning_rate": 1.4742048933709703e-05, "loss": 0.015093128383159637, "step": 21930 }, { "epoch": 35.16025641025641, "grad_norm": 0.19097191095352173, "learning_rate": 1.471315973373694e-05, "loss": 0.015476828813552857, "step": 21940 }, { "epoch": 35.17628205128205, "grad_norm": 0.2574111223220825, "learning_rate": 1.4684289668533035e-05, "loss": 0.015994681417942046, "step": 21950 }, { "epoch": 35.19230769230769, "grad_norm": 0.23750333487987518, "learning_rate": 1.46554387742351e-05, "loss": 0.016456691920757292, "step": 21960 }, { "epoch": 35.208333333333336, "grad_norm": 0.17710988223552704, "learning_rate": 1.4626607086956247e-05, "loss": 0.018455283343791963, "step": 21970 }, { "epoch": 35.22435897435897, "grad_norm": 0.20170064270496368, "learning_rate": 1.459779464278552e-05, "loss": 0.015077553689479828, "step": 21980 }, { "epoch": 35.24038461538461, "grad_norm": 0.19065245985984802, "learning_rate": 1.4569001477787887e-05, "loss": 0.015032243728637696, "step": 21990 }, { "epoch": 35.256410256410255, "grad_norm": 0.21032994985580444, "learning_rate": 1.454022762800419e-05, "loss": 0.016578561067581175, "step": 22000 }, { "epoch": 35.2724358974359, "grad_norm": 0.16680917143821716, "learning_rate": 1.4511473129451104e-05, "loss": 0.01732310503721237, "step": 22010 }, { "epoch": 35.28846153846154, "grad_norm": 0.23096022009849548, "learning_rate": 1.448273801812105e-05, "loss": 0.01660955846309662, "step": 22020 }, { "epoch": 35.30448717948718, "grad_norm": 0.2508348226547241, "learning_rate": 1.4454022329982211e-05, "loss": 0.018918997049331664, "step": 22030 }, { "epoch": 35.32051282051282, "grad_norm": 0.24053210020065308, "learning_rate": 1.4425326100978457e-05, "loss": 0.01606069952249527, "step": 22040 }, { "epoch": 35.33653846153846, "grad_norm": 0.2144107073545456, "learning_rate": 1.4396649367029276e-05, "loss": 0.0184576615691185, "step": 22050 }, { "epoch": 35.3525641025641, "grad_norm": 0.14575442671775818, "learning_rate": 1.436799216402978e-05, "loss": 0.016260060667991637, "step": 22060 }, { "epoch": 35.368589743589745, "grad_norm": 1.1794764995574951, "learning_rate": 1.4339354527850607e-05, "loss": 0.020083679258823393, "step": 22070 }, { "epoch": 35.38461538461539, "grad_norm": 0.13893654942512512, "learning_rate": 1.4310736494337942e-05, "loss": 0.015297307074069977, "step": 22080 }, { "epoch": 35.40064102564103, "grad_norm": 0.18111540377140045, "learning_rate": 1.4282138099313387e-05, "loss": 0.01572381556034088, "step": 22090 }, { "epoch": 35.416666666666664, "grad_norm": 0.42808642983436584, "learning_rate": 1.4253559378574003e-05, "loss": 0.01678737848997116, "step": 22100 }, { "epoch": 35.43269230769231, "grad_norm": 0.13981445133686066, "learning_rate": 1.4225000367892193e-05, "loss": 0.017235587537288665, "step": 22110 }, { "epoch": 35.44871794871795, "grad_norm": 0.20941753685474396, "learning_rate": 1.4196461103015702e-05, "loss": 0.017411012947559357, "step": 22120 }, { "epoch": 35.46474358974359, "grad_norm": 0.6376186609268188, "learning_rate": 1.416794161966758e-05, "loss": 0.016298440098762513, "step": 22130 }, { "epoch": 35.48076923076923, "grad_norm": 0.23102906346321106, "learning_rate": 1.4139441953546066e-05, "loss": 0.01776314973831177, "step": 22140 }, { "epoch": 35.49679487179487, "grad_norm": 0.2962859272956848, "learning_rate": 1.411096214032465e-05, "loss": 0.01731310933828354, "step": 22150 }, { "epoch": 35.51282051282051, "grad_norm": 0.23769651353359222, "learning_rate": 1.4082502215651917e-05, "loss": 0.016284912824630737, "step": 22160 }, { "epoch": 35.52884615384615, "grad_norm": 0.18228153884410858, "learning_rate": 1.4054062215151613e-05, "loss": 0.020034399628639222, "step": 22170 }, { "epoch": 35.544871794871796, "grad_norm": 0.18201936781406403, "learning_rate": 1.4025642174422486e-05, "loss": 0.017537105083465575, "step": 22180 }, { "epoch": 35.56089743589744, "grad_norm": 0.4543226361274719, "learning_rate": 1.3997242129038356e-05, "loss": 0.015438666939735413, "step": 22190 }, { "epoch": 35.57692307692308, "grad_norm": 0.24460890889167786, "learning_rate": 1.396886211454797e-05, "loss": 0.018091557919979094, "step": 22200 }, { "epoch": 35.592948717948715, "grad_norm": 0.18424424529075623, "learning_rate": 1.3940502166475032e-05, "loss": 0.018727067112922668, "step": 22210 }, { "epoch": 35.60897435897436, "grad_norm": 0.26234355568885803, "learning_rate": 1.3912162320318123e-05, "loss": 0.016178201138973235, "step": 22220 }, { "epoch": 35.625, "grad_norm": 0.2422022521495819, "learning_rate": 1.388384261155064e-05, "loss": 0.015156194567680359, "step": 22230 }, { "epoch": 35.64102564102564, "grad_norm": 0.1734664887189865, "learning_rate": 1.3855543075620797e-05, "loss": 0.017206396162509918, "step": 22240 }, { "epoch": 35.657051282051285, "grad_norm": 0.2907467484474182, "learning_rate": 1.3827263747951562e-05, "loss": 0.016376833617687225, "step": 22250 }, { "epoch": 35.67307692307692, "grad_norm": 0.24350029230117798, "learning_rate": 1.379900466394059e-05, "loss": 0.015387177467346191, "step": 22260 }, { "epoch": 35.68910256410256, "grad_norm": 0.3772830069065094, "learning_rate": 1.3770765858960186e-05, "loss": 0.016205161809921265, "step": 22270 }, { "epoch": 35.705128205128204, "grad_norm": 0.2506941556930542, "learning_rate": 1.3742547368357301e-05, "loss": 0.015187104046344758, "step": 22280 }, { "epoch": 35.72115384615385, "grad_norm": 0.19765767455101013, "learning_rate": 1.3714349227453465e-05, "loss": 0.01931019574403763, "step": 22290 }, { "epoch": 35.73717948717949, "grad_norm": 0.19929717481136322, "learning_rate": 1.3686171471544685e-05, "loss": 0.016018737852573395, "step": 22300 }, { "epoch": 35.75320512820513, "grad_norm": 0.1984601467847824, "learning_rate": 1.3658014135901497e-05, "loss": 0.014734065532684327, "step": 22310 }, { "epoch": 35.76923076923077, "grad_norm": 0.40620043873786926, "learning_rate": 1.362987725576888e-05, "loss": 0.015730082988739014, "step": 22320 }, { "epoch": 35.78525641025641, "grad_norm": 0.3521387279033661, "learning_rate": 1.3601760866366162e-05, "loss": 0.01754833906888962, "step": 22330 }, { "epoch": 35.80128205128205, "grad_norm": 0.24290741980075836, "learning_rate": 1.3573665002887077e-05, "loss": 0.017669287323951722, "step": 22340 }, { "epoch": 35.81730769230769, "grad_norm": 0.17810413241386414, "learning_rate": 1.3545589700499635e-05, "loss": 0.020247091352939606, "step": 22350 }, { "epoch": 35.833333333333336, "grad_norm": 0.14838717877864838, "learning_rate": 1.35175349943461e-05, "loss": 0.01527022272348404, "step": 22360 }, { "epoch": 35.84935897435897, "grad_norm": 0.2912243902683258, "learning_rate": 1.3489500919542986e-05, "loss": 0.016757158935070036, "step": 22370 }, { "epoch": 35.86538461538461, "grad_norm": 0.21893537044525146, "learning_rate": 1.3461487511180978e-05, "loss": 0.015293167531490326, "step": 22380 }, { "epoch": 35.881410256410255, "grad_norm": 1.0193276405334473, "learning_rate": 1.3433494804324856e-05, "loss": 0.016315315663814545, "step": 22390 }, { "epoch": 35.8974358974359, "grad_norm": 0.17110298573970795, "learning_rate": 1.340552283401353e-05, "loss": 0.01678704172372818, "step": 22400 }, { "epoch": 35.91346153846154, "grad_norm": 0.16251139342784882, "learning_rate": 1.3377571635259946e-05, "loss": 0.01620495766401291, "step": 22410 }, { "epoch": 35.92948717948718, "grad_norm": 0.23209409415721893, "learning_rate": 1.3349641243051024e-05, "loss": 0.01586238145828247, "step": 22420 }, { "epoch": 35.94551282051282, "grad_norm": 0.18744707107543945, "learning_rate": 1.3321731692347677e-05, "loss": 0.015024115145206452, "step": 22430 }, { "epoch": 35.96153846153846, "grad_norm": 0.2283950001001358, "learning_rate": 1.3293843018084694e-05, "loss": 0.017005932331085206, "step": 22440 }, { "epoch": 35.9775641025641, "grad_norm": 0.1587640643119812, "learning_rate": 1.3265975255170752e-05, "loss": 0.01497320681810379, "step": 22450 }, { "epoch": 35.993589743589745, "grad_norm": 0.24272678792476654, "learning_rate": 1.323812843848835e-05, "loss": 0.015645579993724824, "step": 22460 }, { "epoch": 36.0, "eval_accuracy_background": NaN, "eval_accuracy_crop": 0.9943568706288223, "eval_iou_background": 0.0, "eval_iou_crop": 0.9943568706288223, "eval_loss": 0.01989772543311119, "eval_mean_accuracy": 0.9943568706288223, "eval_mean_iou": 0.49717843531441114, "eval_overall_accuracy": 0.9943568706288223, "eval_runtime": 33.2251, "eval_samples_per_second": 26.516, "eval_steps_per_second": 3.341, "step": 22464 }, { "epoch": 36.00961538461539, "grad_norm": 0.288159042596817, "learning_rate": 1.3210302602893786e-05, "loss": 0.018663883209228516, "step": 22470 }, { "epoch": 36.02564102564103, "grad_norm": 0.23569166660308838, "learning_rate": 1.3182497783217056e-05, "loss": 0.018452876806259157, "step": 22480 }, { "epoch": 36.041666666666664, "grad_norm": 0.21642519533634186, "learning_rate": 1.3154714014261883e-05, "loss": 0.015343722701072694, "step": 22490 }, { "epoch": 36.05769230769231, "grad_norm": 0.22760255634784698, "learning_rate": 1.3126951330805642e-05, "loss": 0.01572542041540146, "step": 22500 }, { "epoch": 36.07371794871795, "grad_norm": 0.3025057911872864, "learning_rate": 1.3099209767599281e-05, "loss": 0.0153671994805336, "step": 22510 }, { "epoch": 36.08974358974359, "grad_norm": 0.15354813635349274, "learning_rate": 1.3071489359367355e-05, "loss": 0.01692756712436676, "step": 22520 }, { "epoch": 36.10576923076923, "grad_norm": 0.17194056510925293, "learning_rate": 1.3043790140807906e-05, "loss": 0.014068475365638733, "step": 22530 }, { "epoch": 36.12179487179487, "grad_norm": 0.15622089803218842, "learning_rate": 1.3016112146592455e-05, "loss": 0.016722860932350158, "step": 22540 }, { "epoch": 36.13782051282051, "grad_norm": 0.1638362854719162, "learning_rate": 1.2988455411365977e-05, "loss": 0.015230931341648102, "step": 22550 }, { "epoch": 36.15384615384615, "grad_norm": 0.13722914457321167, "learning_rate": 1.2960819969746834e-05, "loss": 0.01575602889060974, "step": 22560 }, { "epoch": 36.169871794871796, "grad_norm": 0.19152559340000153, "learning_rate": 1.2933205856326707e-05, "loss": 0.016750311851501463, "step": 22570 }, { "epoch": 36.18589743589744, "grad_norm": 0.22722868621349335, "learning_rate": 1.2905613105670604e-05, "loss": 0.017107746005058287, "step": 22580 }, { "epoch": 36.20192307692308, "grad_norm": 0.20090383291244507, "learning_rate": 1.2878041752316795e-05, "loss": 0.01707886755466461, "step": 22590 }, { "epoch": 36.217948717948715, "grad_norm": 0.30684250593185425, "learning_rate": 1.2850491830776763e-05, "loss": 0.015686306357383727, "step": 22600 }, { "epoch": 36.23397435897436, "grad_norm": 0.38281959295272827, "learning_rate": 1.2822963375535157e-05, "loss": 0.016925340890884398, "step": 22610 }, { "epoch": 36.25, "grad_norm": 0.13256315886974335, "learning_rate": 1.2795456421049745e-05, "loss": 0.015876471996307373, "step": 22620 }, { "epoch": 36.26602564102564, "grad_norm": 0.20564967393875122, "learning_rate": 1.2767971001751421e-05, "loss": 0.017224545776844024, "step": 22630 }, { "epoch": 36.282051282051285, "grad_norm": 0.2498830109834671, "learning_rate": 1.274050715204408e-05, "loss": 0.017621617019176482, "step": 22640 }, { "epoch": 36.29807692307692, "grad_norm": 0.20384597778320312, "learning_rate": 1.271306490630464e-05, "loss": 0.017306581139564514, "step": 22650 }, { "epoch": 36.31410256410256, "grad_norm": 0.2650028467178345, "learning_rate": 1.2685644298882995e-05, "loss": 0.018349188566207885, "step": 22660 }, { "epoch": 36.330128205128204, "grad_norm": 0.26796209812164307, "learning_rate": 1.2658245364101908e-05, "loss": 0.016682200133800507, "step": 22670 }, { "epoch": 36.34615384615385, "grad_norm": 0.21453388035297394, "learning_rate": 1.2630868136257046e-05, "loss": 0.015657338500022887, "step": 22680 }, { "epoch": 36.36217948717949, "grad_norm": 0.21216073632240295, "learning_rate": 1.2603512649616918e-05, "loss": 0.0167051762342453, "step": 22690 }, { "epoch": 36.37820512820513, "grad_norm": 0.23570826649665833, "learning_rate": 1.2576178938422783e-05, "loss": 0.016896094381809234, "step": 22700 }, { "epoch": 36.39423076923077, "grad_norm": 0.19886046648025513, "learning_rate": 1.2548867036888653e-05, "loss": 0.01972999721765518, "step": 22710 }, { "epoch": 36.41025641025641, "grad_norm": 0.15892775356769562, "learning_rate": 1.2521576979201263e-05, "loss": 0.015883344411849975, "step": 22720 }, { "epoch": 36.42628205128205, "grad_norm": 0.19700266420841217, "learning_rate": 1.2494308799519978e-05, "loss": 0.016928695142269135, "step": 22730 }, { "epoch": 36.44230769230769, "grad_norm": 0.264184832572937, "learning_rate": 1.2467062531976793e-05, "loss": 0.016545480489730834, "step": 22740 }, { "epoch": 36.458333333333336, "grad_norm": 0.16336825489997864, "learning_rate": 1.2439838210676287e-05, "loss": 0.017241653800010682, "step": 22750 }, { "epoch": 36.47435897435897, "grad_norm": 0.23629607260227203, "learning_rate": 1.2412635869695533e-05, "loss": 0.017029844224452972, "step": 22760 }, { "epoch": 36.49038461538461, "grad_norm": 0.3340592086315155, "learning_rate": 1.2385455543084123e-05, "loss": 0.01720907390117645, "step": 22770 }, { "epoch": 36.506410256410255, "grad_norm": 0.19274036586284637, "learning_rate": 1.2358297264864092e-05, "loss": 0.016783547401428223, "step": 22780 }, { "epoch": 36.5224358974359, "grad_norm": 0.18603284657001495, "learning_rate": 1.2331161069029858e-05, "loss": 0.01721578687429428, "step": 22790 }, { "epoch": 36.53846153846154, "grad_norm": 0.11972931027412415, "learning_rate": 1.2304046989548192e-05, "loss": 0.014783450961112976, "step": 22800 }, { "epoch": 36.55448717948718, "grad_norm": 0.3555784225463867, "learning_rate": 1.2276955060358226e-05, "loss": 0.017993715405464173, "step": 22810 }, { "epoch": 36.57051282051282, "grad_norm": 0.34433314204216003, "learning_rate": 1.224988531537131e-05, "loss": 0.016906990110874175, "step": 22820 }, { "epoch": 36.58653846153846, "grad_norm": 0.5027971863746643, "learning_rate": 1.2222837788471063e-05, "loss": 0.018943968415260314, "step": 22830 }, { "epoch": 36.6025641025641, "grad_norm": 0.20395967364311218, "learning_rate": 1.2195812513513296e-05, "loss": 0.01724863946437836, "step": 22840 }, { "epoch": 36.618589743589745, "grad_norm": 0.28834840655326843, "learning_rate": 1.2168809524325933e-05, "loss": 0.015643900632858275, "step": 22850 }, { "epoch": 36.63461538461539, "grad_norm": 0.21641777455806732, "learning_rate": 1.214182885470903e-05, "loss": 0.017647816240787505, "step": 22860 }, { "epoch": 36.65064102564103, "grad_norm": 0.3445920944213867, "learning_rate": 1.2114870538434714e-05, "loss": 0.016782249510288238, "step": 22870 }, { "epoch": 36.666666666666664, "grad_norm": 0.13885021209716797, "learning_rate": 1.2087934609247105e-05, "loss": 0.015873390436172485, "step": 22880 }, { "epoch": 36.68269230769231, "grad_norm": 0.23944304883480072, "learning_rate": 1.2061021100862299e-05, "loss": 0.017741519212722778, "step": 22890 }, { "epoch": 36.69871794871795, "grad_norm": 0.3721672594547272, "learning_rate": 1.2034130046968354e-05, "loss": 0.016412384808063507, "step": 22900 }, { "epoch": 36.71474358974359, "grad_norm": 0.18133358657360077, "learning_rate": 1.2007261481225219e-05, "loss": 0.01654135584831238, "step": 22910 }, { "epoch": 36.73076923076923, "grad_norm": 0.3101484179496765, "learning_rate": 1.1980415437264663e-05, "loss": 0.016016936302185057, "step": 22920 }, { "epoch": 36.74679487179487, "grad_norm": 0.22311484813690186, "learning_rate": 1.1953591948690295e-05, "loss": 0.015279117226600646, "step": 22930 }, { "epoch": 36.76282051282051, "grad_norm": 0.24442212283611298, "learning_rate": 1.1926791049077488e-05, "loss": 0.014929187297821046, "step": 22940 }, { "epoch": 36.77884615384615, "grad_norm": 0.3146619498729706, "learning_rate": 1.1900012771973318e-05, "loss": 0.01882108747959137, "step": 22950 }, { "epoch": 36.794871794871796, "grad_norm": 0.1457722783088684, "learning_rate": 1.1873257150896574e-05, "loss": 0.0153776615858078, "step": 22960 }, { "epoch": 36.81089743589744, "grad_norm": 0.2382783740758896, "learning_rate": 1.1846524219337653e-05, "loss": 0.017199514806270598, "step": 22970 }, { "epoch": 36.82692307692308, "grad_norm": 0.24974709749221802, "learning_rate": 1.1819814010758592e-05, "loss": 0.014994804561138154, "step": 22980 }, { "epoch": 36.842948717948715, "grad_norm": 0.20457038283348083, "learning_rate": 1.1793126558592936e-05, "loss": 0.016099771857261656, "step": 22990 }, { "epoch": 36.85897435897436, "grad_norm": 0.23483367264270782, "learning_rate": 1.1766461896245799e-05, "loss": 0.01868714243173599, "step": 23000 }, { "epoch": 36.875, "grad_norm": 0.6059851050376892, "learning_rate": 1.1739820057093717e-05, "loss": 0.017937764525413513, "step": 23010 }, { "epoch": 36.89102564102564, "grad_norm": 0.1811147779226303, "learning_rate": 1.1713201074484695e-05, "loss": 0.0166801318526268, "step": 23020 }, { "epoch": 36.907051282051285, "grad_norm": 0.21145541965961456, "learning_rate": 1.1686604981738125e-05, "loss": 0.013983434438705445, "step": 23030 }, { "epoch": 36.92307692307692, "grad_norm": 0.214041605591774, "learning_rate": 1.1660031812144714e-05, "loss": 0.01572011113166809, "step": 23040 }, { "epoch": 36.93910256410256, "grad_norm": 0.22706560790538788, "learning_rate": 1.1633481598966517e-05, "loss": 0.017827320098876952, "step": 23050 }, { "epoch": 36.955128205128204, "grad_norm": 0.3339959979057312, "learning_rate": 1.1606954375436844e-05, "loss": 0.01578756719827652, "step": 23060 }, { "epoch": 36.97115384615385, "grad_norm": 0.14821887016296387, "learning_rate": 1.1580450174760207e-05, "loss": 0.013439583778381347, "step": 23070 }, { "epoch": 36.98717948717949, "grad_norm": 0.25462669134140015, "learning_rate": 1.155396903011231e-05, "loss": 0.016404566168785096, "step": 23080 }, { "epoch": 37.0, "eval_accuracy_background": NaN, "eval_accuracy_crop": 0.994671632892193, "eval_iou_background": 0.0, "eval_iou_crop": 0.994671632892193, "eval_loss": 0.020206322893500328, "eval_mean_accuracy": 0.994671632892193, "eval_mean_iou": 0.4973358164460965, "eval_overall_accuracy": 0.994671632892193, "eval_runtime": 37.3005, "eval_samples_per_second": 23.619, "eval_steps_per_second": 2.976, "step": 23088 }, { "epoch": 37.00320512820513, "grad_norm": 0.21158915758132935, "learning_rate": 1.152751097464002e-05, "loss": 0.015553076565265656, "step": 23090 }, { "epoch": 37.01923076923077, "grad_norm": 0.25237855315208435, "learning_rate": 1.1501076041461263e-05, "loss": 0.01741306185722351, "step": 23100 }, { "epoch": 37.03525641025641, "grad_norm": 0.2519232928752899, "learning_rate": 1.1474664263665058e-05, "loss": 0.01529698520898819, "step": 23110 }, { "epoch": 37.05128205128205, "grad_norm": 0.21226823329925537, "learning_rate": 1.1448275674311433e-05, "loss": 0.015720130503177644, "step": 23120 }, { "epoch": 37.06730769230769, "grad_norm": 0.23775885999202728, "learning_rate": 1.1421910306431366e-05, "loss": 0.016531336307525634, "step": 23130 }, { "epoch": 37.083333333333336, "grad_norm": 0.26207420229911804, "learning_rate": 1.1395568193026793e-05, "loss": 0.01683205962181091, "step": 23140 }, { "epoch": 37.09935897435897, "grad_norm": 0.1489706188440323, "learning_rate": 1.1369249367070547e-05, "loss": 0.01453751027584076, "step": 23150 }, { "epoch": 37.11538461538461, "grad_norm": 0.3232400119304657, "learning_rate": 1.1342953861506292e-05, "loss": 0.015127748250961304, "step": 23160 }, { "epoch": 37.131410256410255, "grad_norm": 0.14400829374790192, "learning_rate": 1.1316681709248497e-05, "loss": 0.016363504528999328, "step": 23170 }, { "epoch": 37.1474358974359, "grad_norm": 0.20948931574821472, "learning_rate": 1.1290432943182424e-05, "loss": 0.01620877981185913, "step": 23180 }, { "epoch": 37.16346153846154, "grad_norm": 0.20353281497955322, "learning_rate": 1.1264207596164054e-05, "loss": 0.01643351763486862, "step": 23190 }, { "epoch": 37.17948717948718, "grad_norm": 0.274657666683197, "learning_rate": 1.1238005701020041e-05, "loss": 0.01700735241174698, "step": 23200 }, { "epoch": 37.19551282051282, "grad_norm": 0.18592192232608795, "learning_rate": 1.1211827290547692e-05, "loss": 0.0167900487780571, "step": 23210 }, { "epoch": 37.21153846153846, "grad_norm": 0.22565753757953644, "learning_rate": 1.1185672397514934e-05, "loss": 0.017685797810554505, "step": 23220 }, { "epoch": 37.2275641025641, "grad_norm": 0.2227252721786499, "learning_rate": 1.1159541054660221e-05, "loss": 0.016418448090553282, "step": 23230 }, { "epoch": 37.243589743589745, "grad_norm": 0.1974440962076187, "learning_rate": 1.1133433294692572e-05, "loss": 0.016411659121513367, "step": 23240 }, { "epoch": 37.25961538461539, "grad_norm": 0.23630648851394653, "learning_rate": 1.1107349150291451e-05, "loss": 0.015778197348117827, "step": 23250 }, { "epoch": 37.27564102564103, "grad_norm": 0.24195736646652222, "learning_rate": 1.1081288654106766e-05, "loss": 0.018958286941051485, "step": 23260 }, { "epoch": 37.291666666666664, "grad_norm": 0.26252642273902893, "learning_rate": 1.1055251838758845e-05, "loss": 0.014841924607753753, "step": 23270 }, { "epoch": 37.30769230769231, "grad_norm": 0.31109318137168884, "learning_rate": 1.1029238736838372e-05, "loss": 0.015234443545341491, "step": 23280 }, { "epoch": 37.32371794871795, "grad_norm": 0.1700698584318161, "learning_rate": 1.100324938090632e-05, "loss": 0.01698564887046814, "step": 23290 }, { "epoch": 37.33974358974359, "grad_norm": 0.3437885642051697, "learning_rate": 1.0977283803493969e-05, "loss": 0.017111869156360628, "step": 23300 }, { "epoch": 37.35576923076923, "grad_norm": 0.1766347438097, "learning_rate": 1.0951342037102834e-05, "loss": 0.015108849108219146, "step": 23310 }, { "epoch": 37.37179487179487, "grad_norm": 0.12043841928243637, "learning_rate": 1.0925424114204595e-05, "loss": 0.016464079916477203, "step": 23320 }, { "epoch": 37.38782051282051, "grad_norm": 0.17048747837543488, "learning_rate": 1.089953006724113e-05, "loss": 0.015369604527950286, "step": 23330 }, { "epoch": 37.40384615384615, "grad_norm": 0.23106801509857178, "learning_rate": 1.0873659928624396e-05, "loss": 0.016500791907310484, "step": 23340 }, { "epoch": 37.419871794871796, "grad_norm": 0.21294309198856354, "learning_rate": 1.0847813730736425e-05, "loss": 0.01694266200065613, "step": 23350 }, { "epoch": 37.43589743589744, "grad_norm": 0.32963627576828003, "learning_rate": 1.0821991505929308e-05, "loss": 0.01594506800174713, "step": 23360 }, { "epoch": 37.45192307692308, "grad_norm": 0.1813533902168274, "learning_rate": 1.0796193286525123e-05, "loss": 0.016716423630714416, "step": 23370 }, { "epoch": 37.467948717948715, "grad_norm": 0.26174938678741455, "learning_rate": 1.0770419104815865e-05, "loss": 0.015045812726020813, "step": 23380 }, { "epoch": 37.48397435897436, "grad_norm": 0.2079877108335495, "learning_rate": 1.0744668993063485e-05, "loss": 0.018359565734863283, "step": 23390 }, { "epoch": 37.5, "grad_norm": 0.20566006004810333, "learning_rate": 1.071894298349979e-05, "loss": 0.015936543047428132, "step": 23400 }, { "epoch": 37.51602564102564, "grad_norm": 0.28617823123931885, "learning_rate": 1.0693241108326403e-05, "loss": 0.017071568965911867, "step": 23410 }, { "epoch": 37.532051282051285, "grad_norm": 0.26072004437446594, "learning_rate": 1.066756339971476e-05, "loss": 0.01661945730447769, "step": 23420 }, { "epoch": 37.54807692307692, "grad_norm": 0.3663628101348877, "learning_rate": 1.0641909889806031e-05, "loss": 0.018573790788650513, "step": 23430 }, { "epoch": 37.56410256410256, "grad_norm": 0.17218117415905, "learning_rate": 1.0616280610711091e-05, "loss": 0.016396109759807587, "step": 23440 }, { "epoch": 37.580128205128204, "grad_norm": 0.22288602590560913, "learning_rate": 1.0590675594510505e-05, "loss": 0.01738508343696594, "step": 23450 }, { "epoch": 37.59615384615385, "grad_norm": 0.37467584013938904, "learning_rate": 1.0565094873254465e-05, "loss": 0.018944214284420013, "step": 23460 }, { "epoch": 37.61217948717949, "grad_norm": 0.2776801586151123, "learning_rate": 1.0539538478962747e-05, "loss": 0.01660025417804718, "step": 23470 }, { "epoch": 37.62820512820513, "grad_norm": 0.20448212325572968, "learning_rate": 1.0514006443624662e-05, "loss": 0.015200547873973846, "step": 23480 }, { "epoch": 37.64423076923077, "grad_norm": 0.1657698005437851, "learning_rate": 1.048849879919905e-05, "loss": 0.016621682047843932, "step": 23490 }, { "epoch": 37.66025641025641, "grad_norm": 0.26464536786079407, "learning_rate": 1.0463015577614229e-05, "loss": 0.015735004842281342, "step": 23500 }, { "epoch": 37.67628205128205, "grad_norm": 0.2032068520784378, "learning_rate": 1.043755681076792e-05, "loss": 0.015402042865753173, "step": 23510 }, { "epoch": 37.69230769230769, "grad_norm": 0.17969150841236115, "learning_rate": 1.0412122530527233e-05, "loss": 0.017205047607421874, "step": 23520 }, { "epoch": 37.708333333333336, "grad_norm": 0.19498120248317719, "learning_rate": 1.038671276872867e-05, "loss": 0.01565683037042618, "step": 23530 }, { "epoch": 37.72435897435897, "grad_norm": 0.19549378752708435, "learning_rate": 1.0361327557177987e-05, "loss": 0.015888868272304534, "step": 23540 }, { "epoch": 37.74038461538461, "grad_norm": 0.2367735356092453, "learning_rate": 1.0335966927650242e-05, "loss": 0.014090916514396668, "step": 23550 }, { "epoch": 37.756410256410255, "grad_norm": 0.23632864654064178, "learning_rate": 1.031063091188974e-05, "loss": 0.01592555195093155, "step": 23560 }, { "epoch": 37.7724358974359, "grad_norm": 0.310810923576355, "learning_rate": 1.0285319541609926e-05, "loss": 0.01689329594373703, "step": 23570 }, { "epoch": 37.78846153846154, "grad_norm": 0.2423119992017746, "learning_rate": 1.0260032848493435e-05, "loss": 0.01609664112329483, "step": 23580 }, { "epoch": 37.80448717948718, "grad_norm": 0.6026171445846558, "learning_rate": 1.0234770864192017e-05, "loss": 0.018726027011871337, "step": 23590 }, { "epoch": 37.82051282051282, "grad_norm": 0.19461028277873993, "learning_rate": 1.0209533620326467e-05, "loss": 0.016687697172164916, "step": 23600 }, { "epoch": 37.83653846153846, "grad_norm": 0.22026409208774567, "learning_rate": 1.0184321148486611e-05, "loss": 0.016354964673519136, "step": 23610 }, { "epoch": 37.8525641025641, "grad_norm": 0.21099981665611267, "learning_rate": 1.0159133480231309e-05, "loss": 0.016322591900825502, "step": 23620 }, { "epoch": 37.868589743589745, "grad_norm": 0.23654359579086304, "learning_rate": 1.0133970647088319e-05, "loss": 0.015580883622169495, "step": 23630 }, { "epoch": 37.88461538461539, "grad_norm": 0.26577523350715637, "learning_rate": 1.010883268055435e-05, "loss": 0.015951704978942872, "step": 23640 }, { "epoch": 37.90064102564103, "grad_norm": 0.2249913364648819, "learning_rate": 1.0083719612094985e-05, "loss": 0.015364047884941102, "step": 23650 }, { "epoch": 37.916666666666664, "grad_norm": 0.18984052538871765, "learning_rate": 1.0058631473144612e-05, "loss": 0.017053684592247008, "step": 23660 }, { "epoch": 37.93269230769231, "grad_norm": 0.2318568080663681, "learning_rate": 1.0033568295106447e-05, "loss": 0.015068621933460235, "step": 23670 }, { "epoch": 37.94871794871795, "grad_norm": 0.15936224162578583, "learning_rate": 1.0008530109352451e-05, "loss": 0.01654555946588516, "step": 23680 }, { "epoch": 37.96474358974359, "grad_norm": 0.22086742520332336, "learning_rate": 9.983516947223295e-06, "loss": 0.015361197292804718, "step": 23690 }, { "epoch": 37.98076923076923, "grad_norm": 0.22516322135925293, "learning_rate": 9.958528840028318e-06, "loss": 0.014470289647579192, "step": 23700 }, { "epoch": 37.99679487179487, "grad_norm": 0.2869127690792084, "learning_rate": 9.933565819045535e-06, "loss": 0.015866735577583314, "step": 23710 }, { "epoch": 38.0, "eval_accuracy_background": NaN, "eval_accuracy_crop": 0.994711799246515, "eval_iou_background": 0.0, "eval_iou_crop": 0.994711799246515, "eval_loss": 0.020105723291635513, "eval_mean_accuracy": 0.994711799246515, "eval_mean_iou": 0.4973558996232575, "eval_overall_accuracy": 0.994711799246515, "eval_runtime": 37.8447, "eval_samples_per_second": 23.279, "eval_steps_per_second": 2.933, "step": 23712 }, { "epoch": 38.01282051282051, "grad_norm": 0.33901864290237427, "learning_rate": 9.908627915521517e-06, "loss": 0.018547910451889037, "step": 23720 }, { "epoch": 38.02884615384615, "grad_norm": 0.16018831729888916, "learning_rate": 9.883715160671417e-06, "loss": 0.01707872897386551, "step": 23730 }, { "epoch": 38.044871794871796, "grad_norm": 0.20760636031627655, "learning_rate": 9.858827585678927e-06, "loss": 0.014723710715770721, "step": 23740 }, { "epoch": 38.06089743589744, "grad_norm": 0.13612684607505798, "learning_rate": 9.833965221696173e-06, "loss": 0.01674003005027771, "step": 23750 }, { "epoch": 38.07692307692308, "grad_norm": 0.25980329513549805, "learning_rate": 9.809128099843762e-06, "loss": 0.01604759246110916, "step": 23760 }, { "epoch": 38.092948717948715, "grad_norm": 0.19050900638103485, "learning_rate": 9.784316251210708e-06, "loss": 0.01640096753835678, "step": 23770 }, { "epoch": 38.10897435897436, "grad_norm": 0.14670228958129883, "learning_rate": 9.759529706854358e-06, "loss": 0.01692793667316437, "step": 23780 }, { "epoch": 38.125, "grad_norm": 0.35709166526794434, "learning_rate": 9.734768497800417e-06, "loss": 0.01609407812356949, "step": 23790 }, { "epoch": 38.14102564102564, "grad_norm": 0.14939945936203003, "learning_rate": 9.71003265504285e-06, "loss": 0.01561930924654007, "step": 23800 }, { "epoch": 38.157051282051285, "grad_norm": 0.15570472180843353, "learning_rate": 9.685322209543902e-06, "loss": 0.01589246392250061, "step": 23810 }, { "epoch": 38.17307692307692, "grad_norm": 1.414204478263855, "learning_rate": 9.660637192233995e-06, "loss": 0.017615300416946412, "step": 23820 }, { "epoch": 38.18910256410256, "grad_norm": 0.35402265191078186, "learning_rate": 9.635977634011746e-06, "loss": 0.017381852865219115, "step": 23830 }, { "epoch": 38.205128205128204, "grad_norm": 0.25513955950737, "learning_rate": 9.611343565743905e-06, "loss": 0.01766020655632019, "step": 23840 }, { "epoch": 38.22115384615385, "grad_norm": 0.3296538293361664, "learning_rate": 9.586735018265286e-06, "loss": 0.015471471846103669, "step": 23850 }, { "epoch": 38.23717948717949, "grad_norm": 0.21262164413928986, "learning_rate": 9.562152022378805e-06, "loss": 0.01721178591251373, "step": 23860 }, { "epoch": 38.25320512820513, "grad_norm": 0.2243012934923172, "learning_rate": 9.537594608855346e-06, "loss": 0.016124792397022247, "step": 23870 }, { "epoch": 38.26923076923077, "grad_norm": 0.2297602891921997, "learning_rate": 9.513062808433812e-06, "loss": 0.016854944825172424, "step": 23880 }, { "epoch": 38.28525641025641, "grad_norm": 0.17969532310962677, "learning_rate": 9.488556651821015e-06, "loss": 0.01468597948551178, "step": 23890 }, { "epoch": 38.30128205128205, "grad_norm": 0.14572586119174957, "learning_rate": 9.464076169691695e-06, "loss": 0.016614058613777162, "step": 23900 }, { "epoch": 38.31730769230769, "grad_norm": 0.26776814460754395, "learning_rate": 9.439621392688424e-06, "loss": 0.018319609761238097, "step": 23910 }, { "epoch": 38.333333333333336, "grad_norm": 0.2173573523759842, "learning_rate": 9.415192351421622e-06, "loss": 0.01664339154958725, "step": 23920 }, { "epoch": 38.34935897435897, "grad_norm": 0.15949775278568268, "learning_rate": 9.390789076469508e-06, "loss": 0.015475131571292877, "step": 23930 }, { "epoch": 38.36538461538461, "grad_norm": 0.2620789110660553, "learning_rate": 9.366411598378002e-06, "loss": 0.016509468853473663, "step": 23940 }, { "epoch": 38.381410256410255, "grad_norm": 0.1472664624452591, "learning_rate": 9.342059947660772e-06, "loss": 0.014990060031414032, "step": 23950 }, { "epoch": 38.3974358974359, "grad_norm": 0.18245157599449158, "learning_rate": 9.317734154799161e-06, "loss": 0.014974790811538696, "step": 23960 }, { "epoch": 38.41346153846154, "grad_norm": 0.17518733441829681, "learning_rate": 9.293434250242118e-06, "loss": 0.014383029937744141, "step": 23970 }, { "epoch": 38.42948717948718, "grad_norm": 0.2786952257156372, "learning_rate": 9.269160264406191e-06, "loss": 0.01810127794742584, "step": 23980 }, { "epoch": 38.44551282051282, "grad_norm": 0.36375606060028076, "learning_rate": 9.244912227675515e-06, "loss": 0.01558787077665329, "step": 23990 }, { "epoch": 38.46153846153846, "grad_norm": 0.3051488399505615, "learning_rate": 9.220690170401706e-06, "loss": 0.016568799316883088, "step": 24000 }, { "epoch": 38.4775641025641, "grad_norm": 0.3144865930080414, "learning_rate": 9.196494122903887e-06, "loss": 0.016344723105430604, "step": 24010 }, { "epoch": 38.493589743589745, "grad_norm": 0.2198195904493332, "learning_rate": 9.172324115468627e-06, "loss": 0.01742914617061615, "step": 24020 }, { "epoch": 38.50961538461539, "grad_norm": 0.14419761300086975, "learning_rate": 9.148180178349868e-06, "loss": 0.014622488617897033, "step": 24030 }, { "epoch": 38.52564102564103, "grad_norm": 0.4606773853302002, "learning_rate": 9.124062341768952e-06, "loss": 0.017567554116249086, "step": 24040 }, { "epoch": 38.541666666666664, "grad_norm": 0.22927908599376678, "learning_rate": 9.099970635914548e-06, "loss": 0.016272497177124024, "step": 24050 }, { "epoch": 38.55769230769231, "grad_norm": 0.24040409922599792, "learning_rate": 9.075905090942596e-06, "loss": 0.016256177425384523, "step": 24060 }, { "epoch": 38.57371794871795, "grad_norm": 0.21078439056873322, "learning_rate": 9.051865736976301e-06, "loss": 0.016719946265220643, "step": 24070 }, { "epoch": 38.58974358974359, "grad_norm": 0.2633036971092224, "learning_rate": 9.027852604106088e-06, "loss": 0.02072600871324539, "step": 24080 }, { "epoch": 38.60576923076923, "grad_norm": 0.18229462206363678, "learning_rate": 9.003865722389567e-06, "loss": 0.015358808636665344, "step": 24090 }, { "epoch": 38.62179487179487, "grad_norm": 0.19882303476333618, "learning_rate": 8.979905121851467e-06, "loss": 0.016199222207069396, "step": 24100 }, { "epoch": 38.63782051282051, "grad_norm": 0.45651525259017944, "learning_rate": 8.955970832483638e-06, "loss": 0.017452967166900635, "step": 24110 }, { "epoch": 38.65384615384615, "grad_norm": 0.2524290084838867, "learning_rate": 8.932062884245003e-06, "loss": 0.016453859210014344, "step": 24120 }, { "epoch": 38.669871794871796, "grad_norm": 0.2278241664171219, "learning_rate": 8.908181307061483e-06, "loss": 0.015465860068798066, "step": 24130 }, { "epoch": 38.68589743589744, "grad_norm": 0.2560878098011017, "learning_rate": 8.884326130826026e-06, "loss": 0.015456955134868621, "step": 24140 }, { "epoch": 38.70192307692308, "grad_norm": 0.3257920742034912, "learning_rate": 8.860497385398508e-06, "loss": 0.015827256441116332, "step": 24150 }, { "epoch": 38.717948717948715, "grad_norm": 0.20929136872291565, "learning_rate": 8.836695100605724e-06, "loss": 0.016620129346847534, "step": 24160 }, { "epoch": 38.73397435897436, "grad_norm": 0.13535356521606445, "learning_rate": 8.812919306241357e-06, "loss": 0.014112985134124756, "step": 24170 }, { "epoch": 38.75, "grad_norm": 0.16185446083545685, "learning_rate": 8.789170032065942e-06, "loss": 0.015326277911663055, "step": 24180 }, { "epoch": 38.76602564102564, "grad_norm": 0.1711299568414688, "learning_rate": 8.765447307806778e-06, "loss": 0.015358074009418488, "step": 24190 }, { "epoch": 38.782051282051285, "grad_norm": 0.2369769811630249, "learning_rate": 8.741751163157977e-06, "loss": 0.01606872081756592, "step": 24200 }, { "epoch": 38.79807692307692, "grad_norm": 0.15555396676063538, "learning_rate": 8.718081627780365e-06, "loss": 0.01599678099155426, "step": 24210 }, { "epoch": 38.81410256410256, "grad_norm": 0.15274319052696228, "learning_rate": 8.694438731301438e-06, "loss": 0.01639477014541626, "step": 24220 }, { "epoch": 38.830128205128204, "grad_norm": 0.2684861719608307, "learning_rate": 8.670822503315383e-06, "loss": 0.015719787776470186, "step": 24230 }, { "epoch": 38.84615384615385, "grad_norm": 0.1806175410747528, "learning_rate": 8.647232973382985e-06, "loss": 0.015344932675361633, "step": 24240 }, { "epoch": 38.86217948717949, "grad_norm": 0.24044187366962433, "learning_rate": 8.623670171031605e-06, "loss": 0.016125327348709105, "step": 24250 }, { "epoch": 38.87820512820513, "grad_norm": 0.12867572903633118, "learning_rate": 8.600134125755167e-06, "loss": 0.014567478001117707, "step": 24260 }, { "epoch": 38.89423076923077, "grad_norm": 0.21303340792655945, "learning_rate": 8.576624867014103e-06, "loss": 0.015585868060588837, "step": 24270 }, { "epoch": 38.91025641025641, "grad_norm": 0.28452131152153015, "learning_rate": 8.553142424235285e-06, "loss": 0.021391259133815767, "step": 24280 }, { "epoch": 38.92628205128205, "grad_norm": 0.23524612188339233, "learning_rate": 8.52968682681206e-06, "loss": 0.015364663302898407, "step": 24290 }, { "epoch": 38.94230769230769, "grad_norm": 0.11293990910053253, "learning_rate": 8.506258104104156e-06, "loss": 0.017296475172042847, "step": 24300 }, { "epoch": 38.958333333333336, "grad_norm": 0.16653001308441162, "learning_rate": 8.482856285437641e-06, "loss": 0.015031147003173827, "step": 24310 }, { "epoch": 38.97435897435897, "grad_norm": 0.16830308735370636, "learning_rate": 8.459481400104944e-06, "loss": 0.015499937534332275, "step": 24320 }, { "epoch": 38.99038461538461, "grad_norm": 0.2528221607208252, "learning_rate": 8.436133477364744e-06, "loss": 0.01602058857679367, "step": 24330 }, { "epoch": 39.0, "eval_accuracy_background": NaN, "eval_accuracy_crop": 0.993977198746663, "eval_iou_background": 0.0, "eval_iou_crop": 0.993977198746663, "eval_loss": 0.01995796337723732, "eval_mean_accuracy": 0.993977198746663, "eval_mean_iou": 0.4969885993733315, "eval_overall_accuracy": 0.993977198746663, "eval_runtime": 36.9028, "eval_samples_per_second": 23.874, "eval_steps_per_second": 3.008, "step": 24336 }, { "epoch": 39.006410256410255, "grad_norm": 0.17803779244422913, "learning_rate": 8.41281254644201e-06, "loss": 0.015400844812393188, "step": 24340 }, { "epoch": 39.0224358974359, "grad_norm": 0.22943975031375885, "learning_rate": 8.38951863652788e-06, "loss": 0.015846404433250427, "step": 24350 }, { "epoch": 39.03846153846154, "grad_norm": 0.16049297153949738, "learning_rate": 8.366251776779703e-06, "loss": 0.016045811772346496, "step": 24360 }, { "epoch": 39.05448717948718, "grad_norm": 0.24429626762866974, "learning_rate": 8.34301199632097e-06, "loss": 0.01571701467037201, "step": 24370 }, { "epoch": 39.07051282051282, "grad_norm": 0.1993391364812851, "learning_rate": 8.31979932424125e-06, "loss": 0.015247522294521332, "step": 24380 }, { "epoch": 39.08653846153846, "grad_norm": 0.27727046608924866, "learning_rate": 8.296613789596195e-06, "loss": 0.01646144688129425, "step": 24390 }, { "epoch": 39.1025641025641, "grad_norm": 0.18288573622703552, "learning_rate": 8.273455421407507e-06, "loss": 0.015640558302402498, "step": 24400 }, { "epoch": 39.118589743589745, "grad_norm": 0.2001703977584839, "learning_rate": 8.250324248662851e-06, "loss": 0.016720101237297058, "step": 24410 }, { "epoch": 39.13461538461539, "grad_norm": 0.19897131621837616, "learning_rate": 8.22722030031586e-06, "loss": 0.0157785564661026, "step": 24420 }, { "epoch": 39.15064102564103, "grad_norm": 0.24104949831962585, "learning_rate": 8.204143605286108e-06, "loss": 0.0160867303609848, "step": 24430 }, { "epoch": 39.166666666666664, "grad_norm": 0.24458633363246918, "learning_rate": 8.18109419245903e-06, "loss": 0.016290827095508574, "step": 24440 }, { "epoch": 39.18269230769231, "grad_norm": 0.1716788113117218, "learning_rate": 8.158072090685932e-06, "loss": 0.016715715825557708, "step": 24450 }, { "epoch": 39.19871794871795, "grad_norm": 0.22832617163658142, "learning_rate": 8.135077328783933e-06, "loss": 0.01692715138196945, "step": 24460 }, { "epoch": 39.21474358974359, "grad_norm": 0.18675878643989563, "learning_rate": 8.112109935535911e-06, "loss": 0.01532553881406784, "step": 24470 }, { "epoch": 39.23076923076923, "grad_norm": 0.12506017088890076, "learning_rate": 8.089169939690502e-06, "loss": 0.015082789957523346, "step": 24480 }, { "epoch": 39.24679487179487, "grad_norm": 0.21302586793899536, "learning_rate": 8.066257369962057e-06, "loss": 0.01701800376176834, "step": 24490 }, { "epoch": 39.26282051282051, "grad_norm": 0.24796637892723083, "learning_rate": 8.043372255030575e-06, "loss": 0.0160111665725708, "step": 24500 }, { "epoch": 39.27884615384615, "grad_norm": 0.2887095808982849, "learning_rate": 8.020514623541694e-06, "loss": 0.017407669126987456, "step": 24510 }, { "epoch": 39.294871794871796, "grad_norm": 0.3488383889198303, "learning_rate": 7.997684504106676e-06, "loss": 0.01571747213602066, "step": 24520 }, { "epoch": 39.31089743589744, "grad_norm": 0.226148322224617, "learning_rate": 7.974881925302302e-06, "loss": 0.015099994838237762, "step": 24530 }, { "epoch": 39.32692307692308, "grad_norm": 0.3096556067466736, "learning_rate": 7.952106915670916e-06, "loss": 0.016435901820659637, "step": 24540 }, { "epoch": 39.342948717948715, "grad_norm": 0.2816888391971588, "learning_rate": 7.929359503720353e-06, "loss": 0.015460148453712463, "step": 24550 }, { "epoch": 39.35897435897436, "grad_norm": 0.35477447509765625, "learning_rate": 7.906639717923872e-06, "loss": 0.016706505417823793, "step": 24560 }, { "epoch": 39.375, "grad_norm": 0.24022874236106873, "learning_rate": 7.883947586720184e-06, "loss": 0.016275283694267274, "step": 24570 }, { "epoch": 39.39102564102564, "grad_norm": 0.26919639110565186, "learning_rate": 7.861283138513379e-06, "loss": 0.015610189735889434, "step": 24580 }, { "epoch": 39.407051282051285, "grad_norm": 0.1808389574289322, "learning_rate": 7.838646401672877e-06, "loss": 0.015449799597263336, "step": 24590 }, { "epoch": 39.42307692307692, "grad_norm": 0.4874921441078186, "learning_rate": 7.816037404533419e-06, "loss": 0.016526190936565398, "step": 24600 }, { "epoch": 39.43910256410256, "grad_norm": 0.19372950494289398, "learning_rate": 7.793456175395047e-06, "loss": 0.015258733928203583, "step": 24610 }, { "epoch": 39.455128205128204, "grad_norm": 0.25843602418899536, "learning_rate": 7.770902742523004e-06, "loss": 0.01753896176815033, "step": 24620 }, { "epoch": 39.47115384615385, "grad_norm": 0.3352802097797394, "learning_rate": 7.748377134147773e-06, "loss": 0.015356187522411347, "step": 24630 }, { "epoch": 39.48717948717949, "grad_norm": 0.3038973808288574, "learning_rate": 7.725879378465005e-06, "loss": 0.016666561365127563, "step": 24640 }, { "epoch": 39.50320512820513, "grad_norm": 0.3017987608909607, "learning_rate": 7.70340950363548e-06, "loss": 0.017762826383113862, "step": 24650 }, { "epoch": 39.51923076923077, "grad_norm": 0.33316072821617126, "learning_rate": 7.680967537785063e-06, "loss": 0.017564620077610015, "step": 24660 }, { "epoch": 39.53525641025641, "grad_norm": 0.15575845539569855, "learning_rate": 7.658553509004727e-06, "loss": 0.016127300262451173, "step": 24670 }, { "epoch": 39.55128205128205, "grad_norm": 0.22450171411037445, "learning_rate": 7.636167445350428e-06, "loss": 0.01595790982246399, "step": 24680 }, { "epoch": 39.56730769230769, "grad_norm": 0.2731344401836395, "learning_rate": 7.6138093748431546e-06, "loss": 0.01668483167886734, "step": 24690 }, { "epoch": 39.583333333333336, "grad_norm": 0.3419455587863922, "learning_rate": 7.59147932546883e-06, "loss": 0.017664189636707305, "step": 24700 }, { "epoch": 39.59935897435897, "grad_norm": 0.2861364185810089, "learning_rate": 7.569177325178327e-06, "loss": 0.016958671808242797, "step": 24710 }, { "epoch": 39.61538461538461, "grad_norm": 0.22354727983474731, "learning_rate": 7.546903401887379e-06, "loss": 0.014354288578033447, "step": 24720 }, { "epoch": 39.631410256410255, "grad_norm": 0.19608914852142334, "learning_rate": 7.524657583476605e-06, "loss": 0.0166490837931633, "step": 24730 }, { "epoch": 39.6474358974359, "grad_norm": 0.2198379784822464, "learning_rate": 7.502439897791437e-06, "loss": 0.015417565405368806, "step": 24740 }, { "epoch": 39.66346153846154, "grad_norm": 0.20546835660934448, "learning_rate": 7.480250372642068e-06, "loss": 0.016092032194137573, "step": 24750 }, { "epoch": 39.67948717948718, "grad_norm": 0.15944629907608032, "learning_rate": 7.458089035803488e-06, "loss": 0.016166119277477263, "step": 24760 }, { "epoch": 39.69551282051282, "grad_norm": 0.2077895849943161, "learning_rate": 7.4359559150153515e-06, "loss": 0.016647087037563325, "step": 24770 }, { "epoch": 39.71153846153846, "grad_norm": 0.2985486090183258, "learning_rate": 7.413851037982039e-06, "loss": 0.0149301677942276, "step": 24780 }, { "epoch": 39.7275641025641, "grad_norm": 0.2112479954957962, "learning_rate": 7.391774432372546e-06, "loss": 0.01450248658657074, "step": 24790 }, { "epoch": 39.743589743589745, "grad_norm": 0.17922662198543549, "learning_rate": 7.369726125820506e-06, "loss": 0.01495046615600586, "step": 24800 }, { "epoch": 39.75961538461539, "grad_norm": 0.47651559114456177, "learning_rate": 7.347706145924102e-06, "loss": 0.015616017580032348, "step": 24810 }, { "epoch": 39.77564102564103, "grad_norm": 0.2250736951828003, "learning_rate": 7.325714520246083e-06, "loss": 0.015163148939609527, "step": 24820 }, { "epoch": 39.791666666666664, "grad_norm": 0.30563393235206604, "learning_rate": 7.303751276313708e-06, "loss": 0.017797407507896424, "step": 24830 }, { "epoch": 39.80769230769231, "grad_norm": 0.28298625349998474, "learning_rate": 7.281816441618683e-06, "loss": 0.01727517545223236, "step": 24840 }, { "epoch": 39.82371794871795, "grad_norm": 0.159531369805336, "learning_rate": 7.259910043617184e-06, "loss": 0.014825603365898133, "step": 24850 }, { "epoch": 39.83974358974359, "grad_norm": 0.23511718213558197, "learning_rate": 7.238032109729784e-06, "loss": 0.01609167754650116, "step": 24860 }, { "epoch": 39.85576923076923, "grad_norm": 0.18503232300281525, "learning_rate": 7.216182667341421e-06, "loss": 0.015320885181427001, "step": 24870 }, { "epoch": 39.87179487179487, "grad_norm": 0.2012065351009369, "learning_rate": 7.1943617438013575e-06, "loss": 0.015608593821525574, "step": 24880 }, { "epoch": 39.88782051282051, "grad_norm": 0.21703606843948364, "learning_rate": 7.172569366423192e-06, "loss": 0.016441847383975982, "step": 24890 }, { "epoch": 39.90384615384615, "grad_norm": 0.23235177993774414, "learning_rate": 7.150805562484757e-06, "loss": 0.015067581832408906, "step": 24900 }, { "epoch": 39.919871794871796, "grad_norm": 0.17375889420509338, "learning_rate": 7.129070359228141e-06, "loss": 0.015520639717578888, "step": 24910 }, { "epoch": 39.93589743589744, "grad_norm": 0.25928303599357605, "learning_rate": 7.1073637838596295e-06, "loss": 0.016400909423828124, "step": 24920 }, { "epoch": 39.95192307692308, "grad_norm": 0.2684354782104492, "learning_rate": 7.085685863549677e-06, "loss": 0.014837732911109925, "step": 24930 }, { "epoch": 39.967948717948715, "grad_norm": 0.1675349771976471, "learning_rate": 7.064036625432845e-06, "loss": 0.01386062353849411, "step": 24940 }, { "epoch": 39.98397435897436, "grad_norm": 0.1556652933359146, "learning_rate": 7.04241609660783e-06, "loss": 0.015193133056163788, "step": 24950 }, { "epoch": 40.0, "grad_norm": 0.1886562556028366, "learning_rate": 7.020824304137371e-06, "loss": 0.016913583874702452, "step": 24960 }, { "epoch": 40.0, "eval_accuracy_background": NaN, "eval_accuracy_crop": 0.99422067331566, "eval_iou_background": 0.0, "eval_iou_crop": 0.99422067331566, "eval_loss": 0.020114142447710037, "eval_mean_accuracy": 0.99422067331566, "eval_mean_iou": 0.49711033665783, "eval_overall_accuracy": 0.99422067331566, "eval_runtime": 36.914, "eval_samples_per_second": 23.866, "eval_steps_per_second": 3.007, "step": 24960 }, { "epoch": 40.01602564102564, "grad_norm": 0.2767278850078583, "learning_rate": 6.999261275048225e-06, "loss": 0.01817336827516556, "step": 24970 }, { "epoch": 40.032051282051285, "grad_norm": 0.2951214611530304, "learning_rate": 6.977727036331173e-06, "loss": 0.01619821637868881, "step": 24980 }, { "epoch": 40.04807692307692, "grad_norm": 0.1292918175458908, "learning_rate": 6.956221614940956e-06, "loss": 0.014623300731182098, "step": 24990 }, { "epoch": 40.06410256410256, "grad_norm": 0.20540687441825867, "learning_rate": 6.9347450377962165e-06, "loss": 0.01665116548538208, "step": 25000 }, { "epoch": 40.080128205128204, "grad_norm": 0.2571375072002411, "learning_rate": 6.913297331779519e-06, "loss": 0.017682230472564696, "step": 25010 }, { "epoch": 40.09615384615385, "grad_norm": 0.15255051851272583, "learning_rate": 6.891878523737288e-06, "loss": 0.015555602312088013, "step": 25020 }, { "epoch": 40.11217948717949, "grad_norm": 0.23749525845050812, "learning_rate": 6.870488640479759e-06, "loss": 0.014760158956050873, "step": 25030 }, { "epoch": 40.12820512820513, "grad_norm": 0.21181119978427887, "learning_rate": 6.8491277087809825e-06, "loss": 0.01597866863012314, "step": 25040 }, { "epoch": 40.14423076923077, "grad_norm": 0.20752058923244476, "learning_rate": 6.827795755378748e-06, "loss": 0.015467330813407898, "step": 25050 }, { "epoch": 40.16025641025641, "grad_norm": 0.20832841098308563, "learning_rate": 6.806492806974585e-06, "loss": 0.014457957446575164, "step": 25060 }, { "epoch": 40.17628205128205, "grad_norm": 0.3054477572441101, "learning_rate": 6.785218890233718e-06, "loss": 0.017588084936141966, "step": 25070 }, { "epoch": 40.19230769230769, "grad_norm": 0.17937707901000977, "learning_rate": 6.76397403178504e-06, "loss": 0.018201759457588194, "step": 25080 }, { "epoch": 40.208333333333336, "grad_norm": 0.29307428002357483, "learning_rate": 6.742758258221043e-06, "loss": 0.018449212610721587, "step": 25090 }, { "epoch": 40.22435897435897, "grad_norm": 0.24432845413684845, "learning_rate": 6.72157159609784e-06, "loss": 0.014859245717525482, "step": 25100 }, { "epoch": 40.24038461538461, "grad_norm": 0.22483396530151367, "learning_rate": 6.700414071935105e-06, "loss": 0.015055778622627258, "step": 25110 }, { "epoch": 40.256410256410255, "grad_norm": 0.2526216506958008, "learning_rate": 6.679285712216013e-06, "loss": 0.018913255631923677, "step": 25120 }, { "epoch": 40.2724358974359, "grad_norm": 0.22699980437755585, "learning_rate": 6.65818654338727e-06, "loss": 0.01627463400363922, "step": 25130 }, { "epoch": 40.28846153846154, "grad_norm": 0.22891508042812347, "learning_rate": 6.637116591859016e-06, "loss": 0.014021383225917816, "step": 25140 }, { "epoch": 40.30448717948718, "grad_norm": 0.4274570345878601, "learning_rate": 6.616075884004814e-06, "loss": 0.015786799788475036, "step": 25150 }, { "epoch": 40.32051282051282, "grad_norm": 0.23200179636478424, "learning_rate": 6.595064446161651e-06, "loss": 0.01862145811319351, "step": 25160 }, { "epoch": 40.33653846153846, "grad_norm": 0.21189109981060028, "learning_rate": 6.5740823046298635e-06, "loss": 0.016546203196048735, "step": 25170 }, { "epoch": 40.3525641025641, "grad_norm": 0.1477491557598114, "learning_rate": 6.553129485673102e-06, "loss": 0.01560390293598175, "step": 25180 }, { "epoch": 40.368589743589745, "grad_norm": 0.2399207502603531, "learning_rate": 6.532206015518335e-06, "loss": 0.016629183292388917, "step": 25190 }, { "epoch": 40.38461538461539, "grad_norm": 0.23641736805438995, "learning_rate": 6.5113119203557935e-06, "loss": 0.014262259006500244, "step": 25200 }, { "epoch": 40.40064102564103, "grad_norm": 0.2298787534236908, "learning_rate": 6.490447226338914e-06, "loss": 0.016633336246013642, "step": 25210 }, { "epoch": 40.416666666666664, "grad_norm": 0.23965148627758026, "learning_rate": 6.469611959584367e-06, "loss": 0.015581512451171875, "step": 25220 }, { "epoch": 40.43269230769231, "grad_norm": 0.2782652676105499, "learning_rate": 6.448806146171956e-06, "loss": 0.015923717617988588, "step": 25230 }, { "epoch": 40.44871794871795, "grad_norm": 0.26331111788749695, "learning_rate": 6.428029812144645e-06, "loss": 0.01515209823846817, "step": 25240 }, { "epoch": 40.46474358974359, "grad_norm": 0.14023475348949432, "learning_rate": 6.40728298350847e-06, "loss": 0.01516883671283722, "step": 25250 }, { "epoch": 40.48076923076923, "grad_norm": 0.2951227128505707, "learning_rate": 6.386565686232559e-06, "loss": 0.017284339666366576, "step": 25260 }, { "epoch": 40.49679487179487, "grad_norm": 0.16548790037631989, "learning_rate": 6.365877946249076e-06, "loss": 0.01613745987415314, "step": 25270 }, { "epoch": 40.51282051282051, "grad_norm": 0.1807432919740677, "learning_rate": 6.345219789453155e-06, "loss": 0.015473423898220063, "step": 25280 }, { "epoch": 40.52884615384615, "grad_norm": 0.2848694324493408, "learning_rate": 6.324591241702936e-06, "loss": 0.018060161173343657, "step": 25290 }, { "epoch": 40.544871794871796, "grad_norm": 0.3384859561920166, "learning_rate": 6.303992328819491e-06, "loss": 0.01675328016281128, "step": 25300 }, { "epoch": 40.56089743589744, "grad_norm": 0.22389455139636993, "learning_rate": 6.283423076586781e-06, "loss": 0.016042980551719665, "step": 25310 }, { "epoch": 40.57692307692308, "grad_norm": 0.17604903876781464, "learning_rate": 6.2628835107516455e-06, "loss": 0.016725756227970123, "step": 25320 }, { "epoch": 40.592948717948715, "grad_norm": 0.25768691301345825, "learning_rate": 6.242373657023781e-06, "loss": 0.019709506630897523, "step": 25330 }, { "epoch": 40.60897435897436, "grad_norm": 0.1502496600151062, "learning_rate": 6.221893541075665e-06, "loss": 0.01530470848083496, "step": 25340 }, { "epoch": 40.625, "grad_norm": 0.1721327155828476, "learning_rate": 6.201443188542578e-06, "loss": 0.014876532554626464, "step": 25350 }, { "epoch": 40.64102564102564, "grad_norm": 0.2560622990131378, "learning_rate": 6.181022625022545e-06, "loss": 0.018010224401950835, "step": 25360 }, { "epoch": 40.657051282051285, "grad_norm": 0.2247401624917984, "learning_rate": 6.1606318760762736e-06, "loss": 0.015419256687164307, "step": 25370 }, { "epoch": 40.67307692307692, "grad_norm": 0.30187827348709106, "learning_rate": 6.140270967227186e-06, "loss": 0.016022133827209472, "step": 25380 }, { "epoch": 40.68910256410256, "grad_norm": 0.34004607796669006, "learning_rate": 6.1199399239613415e-06, "loss": 0.014920261502265931, "step": 25390 }, { "epoch": 40.705128205128204, "grad_norm": 0.2171996682882309, "learning_rate": 6.0996387717274075e-06, "loss": 0.014161355793476105, "step": 25400 }, { "epoch": 40.72115384615385, "grad_norm": 0.20698174834251404, "learning_rate": 6.079367535936635e-06, "loss": 0.014669777452945709, "step": 25410 }, { "epoch": 40.73717948717949, "grad_norm": 0.25983357429504395, "learning_rate": 6.059126241962855e-06, "loss": 0.01593896597623825, "step": 25420 }, { "epoch": 40.75320512820513, "grad_norm": 0.2570424973964691, "learning_rate": 6.038914915142382e-06, "loss": 0.01410575807094574, "step": 25430 }, { "epoch": 40.76923076923077, "grad_norm": 0.26547595858573914, "learning_rate": 6.0187335807740466e-06, "loss": 0.015393352508544922, "step": 25440 }, { "epoch": 40.78525641025641, "grad_norm": 0.2659681439399719, "learning_rate": 5.998582264119139e-06, "loss": 0.016424615681171418, "step": 25450 }, { "epoch": 40.80128205128205, "grad_norm": 0.34854865074157715, "learning_rate": 5.978460990401354e-06, "loss": 0.01758902072906494, "step": 25460 }, { "epoch": 40.81730769230769, "grad_norm": 0.17777316272258759, "learning_rate": 5.9583697848067956e-06, "loss": 0.015313540399074555, "step": 25470 }, { "epoch": 40.833333333333336, "grad_norm": 0.18771472573280334, "learning_rate": 5.938308672483938e-06, "loss": 0.01502901166677475, "step": 25480 }, { "epoch": 40.84935897435897, "grad_norm": 0.3147774338722229, "learning_rate": 5.918277678543577e-06, "loss": 0.01601252555847168, "step": 25490 }, { "epoch": 40.86538461538461, "grad_norm": 0.23806339502334595, "learning_rate": 5.898276828058796e-06, "loss": 0.014863020181655884, "step": 25500 }, { "epoch": 40.881410256410255, "grad_norm": 0.3651522696018219, "learning_rate": 5.878306146064971e-06, "loss": 0.016445618867874146, "step": 25510 }, { "epoch": 40.8974358974359, "grad_norm": 0.1553424745798111, "learning_rate": 5.8583656575597185e-06, "loss": 0.016130393743515013, "step": 25520 }, { "epoch": 40.91346153846154, "grad_norm": 0.28873416781425476, "learning_rate": 5.838455387502827e-06, "loss": 0.015049414336681366, "step": 25530 }, { "epoch": 40.92948717948718, "grad_norm": 0.15633444488048553, "learning_rate": 5.8185753608162925e-06, "loss": 0.014455890655517578, "step": 25540 }, { "epoch": 40.94551282051282, "grad_norm": 0.14832356572151184, "learning_rate": 5.798725602384255e-06, "loss": 0.013777816295623779, "step": 25550 }, { "epoch": 40.96153846153846, "grad_norm": 0.20623108744621277, "learning_rate": 5.778906137052938e-06, "loss": 0.015763524174690246, "step": 25560 }, { "epoch": 40.9775641025641, "grad_norm": 0.2389468252658844, "learning_rate": 5.759116989630681e-06, "loss": 0.014373081922531127, "step": 25570 }, { "epoch": 40.993589743589745, "grad_norm": 0.29480746388435364, "learning_rate": 5.739358184887848e-06, "loss": 0.01522452086210251, "step": 25580 }, { "epoch": 41.0, "eval_accuracy_background": NaN, "eval_accuracy_crop": 0.9940996521535862, "eval_iou_background": 0.0, "eval_iou_crop": 0.9940996521535862, "eval_loss": 0.020095132291316986, "eval_mean_accuracy": 0.9940996521535862, "eval_mean_iou": 0.4970498260767931, "eval_overall_accuracy": 0.9940996521535862, "eval_runtime": 37.2653, "eval_samples_per_second": 23.641, "eval_steps_per_second": 2.979, "step": 25584 }, { "epoch": 41.00961538461539, "grad_norm": 0.3987937867641449, "learning_rate": 5.719629747556845e-06, "loss": 0.016790422797203063, "step": 25590 }, { "epoch": 41.02564102564103, "grad_norm": 0.19474653899669647, "learning_rate": 5.699931702332041e-06, "loss": 0.017181749641895293, "step": 25600 }, { "epoch": 41.041666666666664, "grad_norm": 0.2711731791496277, "learning_rate": 5.6802640738697886e-06, "loss": 0.014211881160736083, "step": 25610 }, { "epoch": 41.05769230769231, "grad_norm": 0.19230277836322784, "learning_rate": 5.660626886788346e-06, "loss": 0.014677666127681732, "step": 25620 }, { "epoch": 41.07371794871795, "grad_norm": 0.19400446116924286, "learning_rate": 5.641020165667877e-06, "loss": 0.015897363424301147, "step": 25630 }, { "epoch": 41.08974358974359, "grad_norm": 0.22516271471977234, "learning_rate": 5.621443935050425e-06, "loss": 0.01558980643749237, "step": 25640 }, { "epoch": 41.10576923076923, "grad_norm": 0.3416449725627899, "learning_rate": 5.601898219439836e-06, "loss": 0.015873518586158753, "step": 25650 }, { "epoch": 41.12179487179487, "grad_norm": 0.19939953088760376, "learning_rate": 5.582383043301795e-06, "loss": 0.015979786217212678, "step": 25660 }, { "epoch": 41.13782051282051, "grad_norm": 0.250853568315506, "learning_rate": 5.562898431063722e-06, "loss": 0.014754825830459594, "step": 25670 }, { "epoch": 41.15384615384615, "grad_norm": 0.2654423713684082, "learning_rate": 5.54344440711482e-06, "loss": 0.015471953153610229, "step": 25680 }, { "epoch": 41.169871794871796, "grad_norm": 0.14206144213676453, "learning_rate": 5.524020995805969e-06, "loss": 0.015542498230934143, "step": 25690 }, { "epoch": 41.18589743589744, "grad_norm": 0.18759794533252716, "learning_rate": 5.504628221449758e-06, "loss": 0.015560188889503479, "step": 25700 }, { "epoch": 41.20192307692308, "grad_norm": 0.3173023760318756, "learning_rate": 5.485266108320405e-06, "loss": 0.01945880651473999, "step": 25710 }, { "epoch": 41.217948717948715, "grad_norm": 0.2542124092578888, "learning_rate": 5.4659346806537655e-06, "loss": 0.014651864767074585, "step": 25720 }, { "epoch": 41.23397435897436, "grad_norm": 0.27335309982299805, "learning_rate": 5.446633962647285e-06, "loss": 0.016305601596832274, "step": 25730 }, { "epoch": 41.25, "grad_norm": 0.2038680613040924, "learning_rate": 5.427363978459954e-06, "loss": 0.017959526181221007, "step": 25740 }, { "epoch": 41.26602564102564, "grad_norm": 0.2484351098537445, "learning_rate": 5.4081247522123045e-06, "loss": 0.01636822074651718, "step": 25750 }, { "epoch": 41.282051282051285, "grad_norm": 0.3129584789276123, "learning_rate": 5.388916307986377e-06, "loss": 0.017649772763252258, "step": 25760 }, { "epoch": 41.29807692307692, "grad_norm": 0.1396629959344864, "learning_rate": 5.3697386698256615e-06, "loss": 0.014575979113578797, "step": 25770 }, { "epoch": 41.31410256410256, "grad_norm": 0.22827285528182983, "learning_rate": 5.3505918617350925e-06, "loss": 0.017386044561862945, "step": 25780 }, { "epoch": 41.330128205128204, "grad_norm": 0.2325846254825592, "learning_rate": 5.331475907681026e-06, "loss": 0.017429980635643005, "step": 25790 }, { "epoch": 41.34615384615385, "grad_norm": 0.23950380086898804, "learning_rate": 5.312390831591198e-06, "loss": 0.015714964270591734, "step": 25800 }, { "epoch": 41.36217948717949, "grad_norm": 0.27715402841567993, "learning_rate": 5.2933366573546706e-06, "loss": 0.0166951447725296, "step": 25810 }, { "epoch": 41.37820512820513, "grad_norm": 0.1530309021472931, "learning_rate": 5.274313408821847e-06, "loss": 0.015098965167999268, "step": 25820 }, { "epoch": 41.39423076923077, "grad_norm": 0.22261077165603638, "learning_rate": 5.255321109804428e-06, "loss": 0.015312607586383819, "step": 25830 }, { "epoch": 41.41025641025641, "grad_norm": 0.32534727454185486, "learning_rate": 5.236359784075342e-06, "loss": 0.015554714202880859, "step": 25840 }, { "epoch": 41.42628205128205, "grad_norm": 0.19333140552043915, "learning_rate": 5.217429455368782e-06, "loss": 0.017826384305953978, "step": 25850 }, { "epoch": 41.44230769230769, "grad_norm": 0.15747572481632233, "learning_rate": 5.198530147380119e-06, "loss": 0.015524250268936158, "step": 25860 }, { "epoch": 41.458333333333336, "grad_norm": 0.17645283043384552, "learning_rate": 5.179661883765895e-06, "loss": 0.015526919066905976, "step": 25870 }, { "epoch": 41.47435897435897, "grad_norm": 0.36010026931762695, "learning_rate": 5.16082468814381e-06, "loss": 0.014788316190242767, "step": 25880 }, { "epoch": 41.49038461538461, "grad_norm": 0.19072842597961426, "learning_rate": 5.142018584092671e-06, "loss": 0.015993580222129822, "step": 25890 }, { "epoch": 41.506410256410255, "grad_norm": 0.4782772660255432, "learning_rate": 5.123243595152352e-06, "loss": 0.01729568690061569, "step": 25900 }, { "epoch": 41.5224358974359, "grad_norm": 0.3561180830001831, "learning_rate": 5.10449974482379e-06, "loss": 0.01687629222869873, "step": 25910 }, { "epoch": 41.53846153846154, "grad_norm": 0.2846118211746216, "learning_rate": 5.085787056568958e-06, "loss": 0.014310000836849213, "step": 25920 }, { "epoch": 41.55448717948718, "grad_norm": 0.25853434205055237, "learning_rate": 5.067105553810797e-06, "loss": 0.01574263572692871, "step": 25930 }, { "epoch": 41.57051282051282, "grad_norm": 0.2583836615085602, "learning_rate": 5.048455259933239e-06, "loss": 0.0166735514998436, "step": 25940 }, { "epoch": 41.58653846153846, "grad_norm": 0.327605664730072, "learning_rate": 5.02983619828113e-06, "loss": 0.017606456577777863, "step": 25950 }, { "epoch": 41.6025641025641, "grad_norm": 0.22916877269744873, "learning_rate": 5.0112483921602245e-06, "loss": 0.017084847390651702, "step": 25960 }, { "epoch": 41.618589743589745, "grad_norm": 0.26464664936065674, "learning_rate": 4.992691864837168e-06, "loss": 0.01525554358959198, "step": 25970 }, { "epoch": 41.63461538461539, "grad_norm": 0.23156094551086426, "learning_rate": 4.974166639539456e-06, "loss": 0.016177119314670564, "step": 25980 }, { "epoch": 41.65064102564103, "grad_norm": 0.6411205530166626, "learning_rate": 4.95567273945537e-06, "loss": 0.018624112010002136, "step": 25990 }, { "epoch": 41.666666666666664, "grad_norm": 0.15057504177093506, "learning_rate": 4.937210187734021e-06, "loss": 0.016141235828399658, "step": 26000 }, { "epoch": 41.68269230769231, "grad_norm": 0.19816768169403076, "learning_rate": 4.918779007485267e-06, "loss": 0.015264487266540528, "step": 26010 }, { "epoch": 41.69871794871795, "grad_norm": 0.6768742203712463, "learning_rate": 4.900379221779675e-06, "loss": 0.015703105926513673, "step": 26020 }, { "epoch": 41.71474358974359, "grad_norm": 0.25814998149871826, "learning_rate": 4.882010853648558e-06, "loss": 0.015486818552017213, "step": 26030 }, { "epoch": 41.73076923076923, "grad_norm": 0.8268464207649231, "learning_rate": 4.863673926083869e-06, "loss": 0.016236080229282378, "step": 26040 }, { "epoch": 41.74679487179487, "grad_norm": 0.4920724928379059, "learning_rate": 4.84536846203821e-06, "loss": 0.015169557929039002, "step": 26050 }, { "epoch": 41.76282051282051, "grad_norm": 0.2606591284275055, "learning_rate": 4.827094484424812e-06, "loss": 0.015283823013305664, "step": 26060 }, { "epoch": 41.77884615384615, "grad_norm": 0.13277415931224823, "learning_rate": 4.808852016117502e-06, "loss": 0.01575264483690262, "step": 26070 }, { "epoch": 41.794871794871796, "grad_norm": 0.2759374678134918, "learning_rate": 4.790641079950631e-06, "loss": 0.015719589591026307, "step": 26080 }, { "epoch": 41.81089743589744, "grad_norm": 0.15149956941604614, "learning_rate": 4.772461698719114e-06, "loss": 0.01773640960454941, "step": 26090 }, { "epoch": 41.82692307692308, "grad_norm": 0.171827033162117, "learning_rate": 4.754313895178356e-06, "loss": 0.013866463303565979, "step": 26100 }, { "epoch": 41.842948717948715, "grad_norm": 0.22337575256824493, "learning_rate": 4.736197692044247e-06, "loss": 0.015522250533103943, "step": 26110 }, { "epoch": 41.85897435897436, "grad_norm": 0.23868010938167572, "learning_rate": 4.718113111993102e-06, "loss": 0.016120049357414245, "step": 26120 }, { "epoch": 41.875, "grad_norm": 0.20721125602722168, "learning_rate": 4.7000601776616555e-06, "loss": 0.013855832815170287, "step": 26130 }, { "epoch": 41.89102564102564, "grad_norm": 0.16768178343772888, "learning_rate": 4.682038911647055e-06, "loss": 0.015320493280887604, "step": 26140 }, { "epoch": 41.907051282051285, "grad_norm": 0.12928467988967896, "learning_rate": 4.664049336506776e-06, "loss": 0.014598225057125092, "step": 26150 }, { "epoch": 41.92307692307692, "grad_norm": 0.1440954953432083, "learning_rate": 4.646091474758649e-06, "loss": 0.016231146454811097, "step": 26160 }, { "epoch": 41.93910256410256, "grad_norm": 0.24078907072544098, "learning_rate": 4.628165348880804e-06, "loss": 0.014495283365249634, "step": 26170 }, { "epoch": 41.955128205128204, "grad_norm": 0.22821982204914093, "learning_rate": 4.610270981311635e-06, "loss": 0.015969564020633698, "step": 26180 }, { "epoch": 41.97115384615385, "grad_norm": 0.23779672384262085, "learning_rate": 4.592408394449795e-06, "loss": 0.01441107839345932, "step": 26190 }, { "epoch": 41.98717948717949, "grad_norm": 0.3836655616760254, "learning_rate": 4.574577610654166e-06, "loss": 0.015869605541229247, "step": 26200 }, { "epoch": 42.0, "eval_accuracy_background": NaN, "eval_accuracy_crop": 0.994351797801006, "eval_iou_background": 0.0, "eval_iou_crop": 0.994351797801006, "eval_loss": 0.02014019899070263, "eval_mean_accuracy": 0.994351797801006, "eval_mean_iou": 0.497175898900503, "eval_overall_accuracy": 0.994351797801006, "eval_runtime": 36.6017, "eval_samples_per_second": 24.07, "eval_steps_per_second": 3.033, "step": 26208 }, { "epoch": 42.00320512820513, "grad_norm": 0.29316309094429016, "learning_rate": 4.556778652243794e-06, "loss": 0.01479412317276001, "step": 26210 }, { "epoch": 42.01923076923077, "grad_norm": 0.16531236469745636, "learning_rate": 4.5390115414979075e-06, "loss": 0.016060255467891693, "step": 26220 }, { "epoch": 42.03525641025641, "grad_norm": 0.19547884166240692, "learning_rate": 4.52127630065587e-06, "loss": 0.015150436758995056, "step": 26230 }, { "epoch": 42.05128205128205, "grad_norm": 0.16640129685401917, "learning_rate": 4.503572951917148e-06, "loss": 0.014872702956199645, "step": 26240 }, { "epoch": 42.06730769230769, "grad_norm": 0.2892209589481354, "learning_rate": 4.485901517441287e-06, "loss": 0.015216463804244995, "step": 26250 }, { "epoch": 42.083333333333336, "grad_norm": 0.21113869547843933, "learning_rate": 4.468262019347903e-06, "loss": 0.016292546689510346, "step": 26260 }, { "epoch": 42.09935897435897, "grad_norm": 0.1838555932044983, "learning_rate": 4.450654479716605e-06, "loss": 0.015022656321525574, "step": 26270 }, { "epoch": 42.11538461538461, "grad_norm": 0.3587568700313568, "learning_rate": 4.433078920587024e-06, "loss": 0.015241105854511262, "step": 26280 }, { "epoch": 42.131410256410255, "grad_norm": 0.21483244001865387, "learning_rate": 4.415535363958758e-06, "loss": 0.014974254369735717, "step": 26290 }, { "epoch": 42.1474358974359, "grad_norm": 0.1855795532464981, "learning_rate": 4.398023831791339e-06, "loss": 0.015840443968772887, "step": 26300 }, { "epoch": 42.16346153846154, "grad_norm": 0.3836417496204376, "learning_rate": 4.380544346004206e-06, "loss": 0.016102108359336852, "step": 26310 }, { "epoch": 42.17948717948718, "grad_norm": 0.5235776305198669, "learning_rate": 4.363096928476711e-06, "loss": 0.015670591592788698, "step": 26320 }, { "epoch": 42.19551282051282, "grad_norm": 0.18320339918136597, "learning_rate": 4.345681601048034e-06, "loss": 0.016145786643028258, "step": 26330 }, { "epoch": 42.21153846153846, "grad_norm": 0.28371089696884155, "learning_rate": 4.328298385517211e-06, "loss": 0.016843417286872865, "step": 26340 }, { "epoch": 42.2275641025641, "grad_norm": 0.228502556681633, "learning_rate": 4.310947303643083e-06, "loss": 0.0176925390958786, "step": 26350 }, { "epoch": 42.243589743589745, "grad_norm": 0.18563459813594818, "learning_rate": 4.293628377144249e-06, "loss": 0.015420678257942199, "step": 26360 }, { "epoch": 42.25961538461539, "grad_norm": 0.1388738751411438, "learning_rate": 4.276341627699079e-06, "loss": 0.014767493307590484, "step": 26370 }, { "epoch": 42.27564102564103, "grad_norm": 0.26905345916748047, "learning_rate": 4.259087076945658e-06, "loss": 0.017391054332256316, "step": 26380 }, { "epoch": 42.291666666666664, "grad_norm": 0.3260944187641144, "learning_rate": 4.2418647464817676e-06, "loss": 0.015436826646327973, "step": 26390 }, { "epoch": 42.30769230769231, "grad_norm": 0.2720956802368164, "learning_rate": 4.224674657864849e-06, "loss": 0.015343558788299561, "step": 26400 }, { "epoch": 42.32371794871795, "grad_norm": 0.3105943500995636, "learning_rate": 4.207516832612004e-06, "loss": 0.01600683182477951, "step": 26410 }, { "epoch": 42.33974358974359, "grad_norm": 0.1694859266281128, "learning_rate": 4.19039129219995e-06, "loss": 0.015880517661571503, "step": 26420 }, { "epoch": 42.35576923076923, "grad_norm": 0.23244249820709229, "learning_rate": 4.173298058064966e-06, "loss": 0.01499905288219452, "step": 26430 }, { "epoch": 42.37179487179487, "grad_norm": 0.13790948688983917, "learning_rate": 4.156237151602924e-06, "loss": 0.01563873440027237, "step": 26440 }, { "epoch": 42.38782051282051, "grad_norm": 0.21401254832744598, "learning_rate": 4.139208594169222e-06, "loss": 0.013216523826122284, "step": 26450 }, { "epoch": 42.40384615384615, "grad_norm": 0.34675493836402893, "learning_rate": 4.1222124070787486e-06, "loss": 0.01638118177652359, "step": 26460 }, { "epoch": 42.419871794871796, "grad_norm": 0.3985803425312042, "learning_rate": 4.1052486116059025e-06, "loss": 0.015673717856407164, "step": 26470 }, { "epoch": 42.43589743589744, "grad_norm": 0.2814825773239136, "learning_rate": 4.08831722898451e-06, "loss": 0.016618813574314117, "step": 26480 }, { "epoch": 42.45192307692308, "grad_norm": 0.30511173605918884, "learning_rate": 4.071418280407851e-06, "loss": 0.016613090038299562, "step": 26490 }, { "epoch": 42.467948717948715, "grad_norm": 0.14859738945960999, "learning_rate": 4.054551787028579e-06, "loss": 0.015386627614498138, "step": 26500 }, { "epoch": 42.48397435897436, "grad_norm": 0.16382090747356415, "learning_rate": 4.037717769958757e-06, "loss": 0.015242880582809449, "step": 26510 }, { "epoch": 42.5, "grad_norm": 0.19191813468933105, "learning_rate": 4.020916250269759e-06, "loss": 0.015674485266208647, "step": 26520 }, { "epoch": 42.51602564102564, "grad_norm": 0.2717702388763428, "learning_rate": 4.004147248992315e-06, "loss": 0.015406608581542969, "step": 26530 }, { "epoch": 42.532051282051285, "grad_norm": 0.16631656885147095, "learning_rate": 3.987410787116437e-06, "loss": 0.017826557159423828, "step": 26540 }, { "epoch": 42.54807692307692, "grad_norm": 0.14068414270877838, "learning_rate": 3.970706885591398e-06, "loss": 0.015842844545841218, "step": 26550 }, { "epoch": 42.56410256410256, "grad_norm": 0.4580654203891754, "learning_rate": 3.954035565325729e-06, "loss": 0.0158625990152359, "step": 26560 }, { "epoch": 42.580128205128204, "grad_norm": 0.19596777856349945, "learning_rate": 3.937396847187178e-06, "loss": 0.017040717601776122, "step": 26570 }, { "epoch": 42.59615384615385, "grad_norm": 0.20717613399028778, "learning_rate": 3.9207907520026785e-06, "loss": 0.01783076226711273, "step": 26580 }, { "epoch": 42.61217948717949, "grad_norm": 0.31024429202079773, "learning_rate": 3.90421730055832e-06, "loss": 0.01567111015319824, "step": 26590 }, { "epoch": 42.62820512820513, "grad_norm": 0.2499951869249344, "learning_rate": 3.887676513599355e-06, "loss": 0.01636154353618622, "step": 26600 }, { "epoch": 42.64423076923077, "grad_norm": 0.2433861494064331, "learning_rate": 3.871168411830125e-06, "loss": 0.01593855917453766, "step": 26610 }, { "epoch": 42.66025641025641, "grad_norm": 0.2491329163312912, "learning_rate": 3.854693015914077e-06, "loss": 0.015329115092754364, "step": 26620 }, { "epoch": 42.67628205128205, "grad_norm": 0.14969664812088013, "learning_rate": 3.838250346473719e-06, "loss": 0.014433610439300536, "step": 26630 }, { "epoch": 42.69230769230769, "grad_norm": 0.38936251401901245, "learning_rate": 3.821840424090572e-06, "loss": 0.016875535249710083, "step": 26640 }, { "epoch": 42.708333333333336, "grad_norm": 0.1981746256351471, "learning_rate": 3.805463269305198e-06, "loss": 0.015121924877166747, "step": 26650 }, { "epoch": 42.72435897435897, "grad_norm": 0.1704418808221817, "learning_rate": 3.789118902617128e-06, "loss": 0.014542128145694732, "step": 26660 }, { "epoch": 42.74038461538461, "grad_norm": 0.1894160807132721, "learning_rate": 3.7728073444848498e-06, "loss": 0.0160506010055542, "step": 26670 }, { "epoch": 42.756410256410255, "grad_norm": 0.1900782287120819, "learning_rate": 3.7565286153257817e-06, "loss": 0.014742588996887207, "step": 26680 }, { "epoch": 42.7724358974359, "grad_norm": 0.3177216053009033, "learning_rate": 3.7402827355162604e-06, "loss": 0.018726998567581178, "step": 26690 }, { "epoch": 42.78846153846154, "grad_norm": 0.12900514900684357, "learning_rate": 3.7240697253915035e-06, "loss": 0.016341936588287354, "step": 26700 }, { "epoch": 42.80448717948718, "grad_norm": 0.32256948947906494, "learning_rate": 3.70788960524557e-06, "loss": 0.016415780782699584, "step": 26710 }, { "epoch": 42.82051282051282, "grad_norm": 0.21882539987564087, "learning_rate": 3.691742395331367e-06, "loss": 0.01733297109603882, "step": 26720 }, { "epoch": 42.83653846153846, "grad_norm": 0.18651629984378815, "learning_rate": 3.6756281158606054e-06, "loss": 0.01441863775253296, "step": 26730 }, { "epoch": 42.8525641025641, "grad_norm": 0.47701722383499146, "learning_rate": 3.659546787003758e-06, "loss": 0.016977226734161376, "step": 26740 }, { "epoch": 42.868589743589745, "grad_norm": 0.16446605324745178, "learning_rate": 3.6434984288900865e-06, "loss": 0.014697203040122985, "step": 26750 }, { "epoch": 42.88461538461539, "grad_norm": 0.2559817135334015, "learning_rate": 3.6274830616075516e-06, "loss": 0.017075923085212708, "step": 26760 }, { "epoch": 42.90064102564103, "grad_norm": 0.24415402114391327, "learning_rate": 3.6115007052028284e-06, "loss": 0.014367210865020751, "step": 26770 }, { "epoch": 42.916666666666664, "grad_norm": 0.20452040433883667, "learning_rate": 3.5955513796812735e-06, "loss": 0.014483383297920227, "step": 26780 }, { "epoch": 42.93269230769231, "grad_norm": 0.2807683050632477, "learning_rate": 3.579635105006911e-06, "loss": 0.016357490420341493, "step": 26790 }, { "epoch": 42.94871794871795, "grad_norm": 0.2562841773033142, "learning_rate": 3.5637519011023643e-06, "loss": 0.014637213945388795, "step": 26800 }, { "epoch": 42.96474358974359, "grad_norm": 0.14608466625213623, "learning_rate": 3.547901787848894e-06, "loss": 0.013836972415447235, "step": 26810 }, { "epoch": 42.98076923076923, "grad_norm": 0.16006146371364594, "learning_rate": 3.5320847850863226e-06, "loss": 0.014361758530139924, "step": 26820 }, { "epoch": 42.99679487179487, "grad_norm": 0.47250473499298096, "learning_rate": 3.516300912613026e-06, "loss": 0.01773979663848877, "step": 26830 }, { "epoch": 43.0, "eval_accuracy_background": NaN, "eval_accuracy_crop": 0.994088920900306, "eval_iou_background": 0.0, "eval_iou_crop": 0.994088920900306, "eval_loss": 0.020025523379445076, "eval_mean_accuracy": 0.994088920900306, "eval_mean_iou": 0.497044460450153, "eval_overall_accuracy": 0.994088920900306, "eval_runtime": 36.7941, "eval_samples_per_second": 23.944, "eval_steps_per_second": 3.017, "step": 26832 }, { "epoch": 43.01282051282051, "grad_norm": 0.29088303446769714, "learning_rate": 3.500550190185925e-06, "loss": 0.01717466115951538, "step": 26840 }, { "epoch": 43.02884615384615, "grad_norm": 0.1873633861541748, "learning_rate": 3.484832637520434e-06, "loss": 0.016117362678050993, "step": 26850 }, { "epoch": 43.044871794871796, "grad_norm": 0.2045789510011673, "learning_rate": 3.46914827429044e-06, "loss": 0.013911810517311097, "step": 26860 }, { "epoch": 43.06089743589744, "grad_norm": 0.34062501788139343, "learning_rate": 3.453497120128305e-06, "loss": 0.016339190304279327, "step": 26870 }, { "epoch": 43.07692307692308, "grad_norm": 0.42955282330513, "learning_rate": 3.4378791946248243e-06, "loss": 0.01733371913433075, "step": 26880 }, { "epoch": 43.092948717948715, "grad_norm": 0.20695674419403076, "learning_rate": 3.4222945173291775e-06, "loss": 0.015658491849899293, "step": 26890 }, { "epoch": 43.10897435897436, "grad_norm": 0.1519077867269516, "learning_rate": 3.4067431077489462e-06, "loss": 0.014752121269702911, "step": 26900 }, { "epoch": 43.125, "grad_norm": 0.25421059131622314, "learning_rate": 3.39122498535007e-06, "loss": 0.015768979489803315, "step": 26910 }, { "epoch": 43.14102564102564, "grad_norm": 0.18975526094436646, "learning_rate": 3.3757401695568035e-06, "loss": 0.014000184834003448, "step": 26920 }, { "epoch": 43.157051282051285, "grad_norm": 0.2322075515985489, "learning_rate": 3.360288679751743e-06, "loss": 0.016468195617198943, "step": 26930 }, { "epoch": 43.17307692307692, "grad_norm": 0.1704903393983841, "learning_rate": 3.344870535275737e-06, "loss": 0.015251073241233825, "step": 26940 }, { "epoch": 43.18910256410256, "grad_norm": 0.2622019350528717, "learning_rate": 3.329485755427911e-06, "loss": 0.01728222370147705, "step": 26950 }, { "epoch": 43.205128205128204, "grad_norm": 0.20400291681289673, "learning_rate": 3.314134359465628e-06, "loss": 0.017922963201999664, "step": 26960 }, { "epoch": 43.22115384615385, "grad_norm": 0.2157556712627411, "learning_rate": 3.298816366604467e-06, "loss": 0.014815334975719453, "step": 26970 }, { "epoch": 43.23717948717949, "grad_norm": 0.2109391689300537, "learning_rate": 3.2835317960181897e-06, "loss": 0.01620374619960785, "step": 26980 }, { "epoch": 43.25320512820513, "grad_norm": 0.1645727902650833, "learning_rate": 3.2682806668387164e-06, "loss": 0.015756240487098692, "step": 26990 }, { "epoch": 43.26923076923077, "grad_norm": 0.36243924498558044, "learning_rate": 3.2530629981561235e-06, "loss": 0.01778024137020111, "step": 27000 }, { "epoch": 43.28525641025641, "grad_norm": 0.19168062508106232, "learning_rate": 3.237878809018603e-06, "loss": 0.017213815450668336, "step": 27010 }, { "epoch": 43.30128205128205, "grad_norm": 0.16529713571071625, "learning_rate": 3.222728118432422e-06, "loss": 0.016842158138751985, "step": 27020 }, { "epoch": 43.31730769230769, "grad_norm": 0.1824539601802826, "learning_rate": 3.2076109453619272e-06, "loss": 0.015598250925540924, "step": 27030 }, { "epoch": 43.333333333333336, "grad_norm": 0.295734167098999, "learning_rate": 3.1925273087295227e-06, "loss": 0.016020989418029784, "step": 27040 }, { "epoch": 43.34935897435897, "grad_norm": 0.21684059500694275, "learning_rate": 3.177477227415615e-06, "loss": 0.014618444442749023, "step": 27050 }, { "epoch": 43.36538461538461, "grad_norm": 0.2538378834724426, "learning_rate": 3.162460720258622e-06, "loss": 0.0158008947968483, "step": 27060 }, { "epoch": 43.381410256410255, "grad_norm": 0.6507816314697266, "learning_rate": 3.1474778060549382e-06, "loss": 0.014226746559143067, "step": 27070 }, { "epoch": 43.3974358974359, "grad_norm": 0.16738712787628174, "learning_rate": 3.1325285035588946e-06, "loss": 0.014349660277366639, "step": 27080 }, { "epoch": 43.41346153846154, "grad_norm": 0.20467263460159302, "learning_rate": 3.11761283148276e-06, "loss": 0.015114837884902954, "step": 27090 }, { "epoch": 43.42948717948718, "grad_norm": 0.2085937112569809, "learning_rate": 3.1027308084967165e-06, "loss": 0.017206262052059173, "step": 27100 }, { "epoch": 43.44551282051282, "grad_norm": 0.21267284452915192, "learning_rate": 3.0878824532288097e-06, "loss": 0.015570643544197082, "step": 27110 }, { "epoch": 43.46153846153846, "grad_norm": 0.1843212991952896, "learning_rate": 3.073067784264946e-06, "loss": 0.014846120774745942, "step": 27120 }, { "epoch": 43.4775641025641, "grad_norm": 0.17467311024665833, "learning_rate": 3.058286820148878e-06, "loss": 0.01577175408601761, "step": 27130 }, { "epoch": 43.493589743589745, "grad_norm": 0.1674850881099701, "learning_rate": 3.043539579382153e-06, "loss": 0.018162702023983002, "step": 27140 }, { "epoch": 43.50961538461539, "grad_norm": 0.2373264729976654, "learning_rate": 3.028826080424122e-06, "loss": 0.015372383594512939, "step": 27150 }, { "epoch": 43.52564102564103, "grad_norm": 0.19924038648605347, "learning_rate": 3.014146341691896e-06, "loss": 0.016307435929775238, "step": 27160 }, { "epoch": 43.541666666666664, "grad_norm": 0.1887895166873932, "learning_rate": 2.999500381560316e-06, "loss": 0.01485365629196167, "step": 27170 }, { "epoch": 43.55769230769231, "grad_norm": 0.1540125608444214, "learning_rate": 2.984888218361962e-06, "loss": 0.015328684449195861, "step": 27180 }, { "epoch": 43.57371794871795, "grad_norm": 0.2239919900894165, "learning_rate": 2.9703098703870955e-06, "loss": 0.01701032221317291, "step": 27190 }, { "epoch": 43.58974358974359, "grad_norm": 0.17102038860321045, "learning_rate": 2.9557653558836573e-06, "loss": 0.017765198647975922, "step": 27200 }, { "epoch": 43.60576923076923, "grad_norm": 0.35426074266433716, "learning_rate": 2.9412546930572272e-06, "loss": 0.016049553453922272, "step": 27210 }, { "epoch": 43.62179487179487, "grad_norm": 0.27008602023124695, "learning_rate": 2.926777900071029e-06, "loss": 0.015111444890499115, "step": 27220 }, { "epoch": 43.63782051282051, "grad_norm": 0.2623751163482666, "learning_rate": 2.9123349950458787e-06, "loss": 0.016740386188030244, "step": 27230 }, { "epoch": 43.65384615384615, "grad_norm": 0.2563033998012543, "learning_rate": 2.8979259960601814e-06, "loss": 0.016247636079788207, "step": 27240 }, { "epoch": 43.669871794871796, "grad_norm": 0.1995753049850464, "learning_rate": 2.8835509211499023e-06, "loss": 0.014546093344688416, "step": 27250 }, { "epoch": 43.68589743589744, "grad_norm": 0.33641499280929565, "learning_rate": 2.8692097883085287e-06, "loss": 0.016625453531742097, "step": 27260 }, { "epoch": 43.70192307692308, "grad_norm": 0.21244969964027405, "learning_rate": 2.8549026154870815e-06, "loss": 0.01481824517250061, "step": 27270 }, { "epoch": 43.717948717948715, "grad_norm": 0.14407239854335785, "learning_rate": 2.8406294205940686e-06, "loss": 0.01736980527639389, "step": 27280 }, { "epoch": 43.73397435897436, "grad_norm": 0.2475077360868454, "learning_rate": 2.826390221495455e-06, "loss": 0.014761492609977722, "step": 27290 }, { "epoch": 43.75, "grad_norm": 0.4000876843929291, "learning_rate": 2.8121850360146705e-06, "loss": 0.014825972914695739, "step": 27300 }, { "epoch": 43.76602564102564, "grad_norm": 0.18984384834766388, "learning_rate": 2.7980138819325485e-06, "loss": 0.014206968247890472, "step": 27310 }, { "epoch": 43.782051282051285, "grad_norm": 0.19604291021823883, "learning_rate": 2.7838767769873496e-06, "loss": 0.015287370979785919, "step": 27320 }, { "epoch": 43.79807692307692, "grad_norm": 0.19393356144428253, "learning_rate": 2.769773738874689e-06, "loss": 0.016640615463256837, "step": 27330 }, { "epoch": 43.81410256410256, "grad_norm": 0.21643687784671783, "learning_rate": 2.7557047852475594e-06, "loss": 0.018344175815582276, "step": 27340 }, { "epoch": 43.830128205128204, "grad_norm": 0.13708262145519257, "learning_rate": 2.7416699337162867e-06, "loss": 0.01578976958990097, "step": 27350 }, { "epoch": 43.84615384615385, "grad_norm": 0.14703939855098724, "learning_rate": 2.7276692018484985e-06, "loss": 0.014624001085758209, "step": 27360 }, { "epoch": 43.86217948717949, "grad_norm": 0.2415064126253128, "learning_rate": 2.7137026071691264e-06, "loss": 0.016246536374092103, "step": 27370 }, { "epoch": 43.87820512820513, "grad_norm": 0.24955536425113678, "learning_rate": 2.6997701671603656e-06, "loss": 0.016141320765018462, "step": 27380 }, { "epoch": 43.89423076923077, "grad_norm": 0.17620126903057098, "learning_rate": 2.685871899261667e-06, "loss": 0.01502893716096878, "step": 27390 }, { "epoch": 43.91025641025641, "grad_norm": 0.2734832465648651, "learning_rate": 2.672007820869694e-06, "loss": 0.014052066206932067, "step": 27400 }, { "epoch": 43.92628205128205, "grad_norm": 0.2465400993824005, "learning_rate": 2.658177949338332e-06, "loss": 0.016524510085582735, "step": 27410 }, { "epoch": 43.94230769230769, "grad_norm": 0.19635044038295746, "learning_rate": 2.6443823019786294e-06, "loss": 0.01548108011484146, "step": 27420 }, { "epoch": 43.958333333333336, "grad_norm": 0.24371761083602905, "learning_rate": 2.630620896058814e-06, "loss": 0.015163727104663849, "step": 27430 }, { "epoch": 43.97435897435897, "grad_norm": 0.4667902886867523, "learning_rate": 2.61689374880425e-06, "loss": 0.015231020748615265, "step": 27440 }, { "epoch": 43.99038461538461, "grad_norm": 0.12969057261943817, "learning_rate": 2.6032008773974025e-06, "loss": 0.014242395758628845, "step": 27450 }, { "epoch": 44.0, "eval_accuracy_background": NaN, "eval_accuracy_crop": 0.9941604908104137, "eval_iou_background": 0.0, "eval_iou_crop": 0.9941604908104137, "eval_loss": 0.020124657079577446, "eval_mean_accuracy": 0.9941604908104137, "eval_mean_iou": 0.49708024540520684, "eval_overall_accuracy": 0.9941604908104137, "eval_runtime": 34.7455, "eval_samples_per_second": 25.356, "eval_steps_per_second": 3.195, "step": 27456 }, { "epoch": 44.006410256410255, "grad_norm": 0.30255669355392456, "learning_rate": 2.589542298977855e-06, "loss": 0.015404768288135529, "step": 27460 }, { "epoch": 44.0224358974359, "grad_norm": 0.3007180988788605, "learning_rate": 2.575918030642255e-06, "loss": 0.0179648295044899, "step": 27470 }, { "epoch": 44.03846153846154, "grad_norm": 0.1760716438293457, "learning_rate": 2.562328089444307e-06, "loss": 0.014116986095905304, "step": 27480 }, { "epoch": 44.05448717948718, "grad_norm": 0.24967922270298004, "learning_rate": 2.548772492394734e-06, "loss": 0.014366871118545533, "step": 27490 }, { "epoch": 44.07051282051282, "grad_norm": 0.3034117817878723, "learning_rate": 2.5352512564612974e-06, "loss": 0.01600838303565979, "step": 27500 }, { "epoch": 44.08653846153846, "grad_norm": 0.12008709460496902, "learning_rate": 2.521764398568718e-06, "loss": 0.0159003883600235, "step": 27510 }, { "epoch": 44.1025641025641, "grad_norm": 0.2040448933839798, "learning_rate": 2.508311935598705e-06, "loss": 0.014509931206703186, "step": 27520 }, { "epoch": 44.118589743589745, "grad_norm": 0.2661570906639099, "learning_rate": 2.494893884389919e-06, "loss": 0.015130941569805146, "step": 27530 }, { "epoch": 44.13461538461539, "grad_norm": 0.15182486176490784, "learning_rate": 2.4815102617379225e-06, "loss": 0.013831028342247009, "step": 27540 }, { "epoch": 44.15064102564103, "grad_norm": 0.1749591827392578, "learning_rate": 2.468161084395204e-06, "loss": 0.013595481216907502, "step": 27550 }, { "epoch": 44.166666666666664, "grad_norm": 0.3487001359462738, "learning_rate": 2.4548463690711374e-06, "loss": 0.01625477969646454, "step": 27560 }, { "epoch": 44.18269230769231, "grad_norm": 0.13901624083518982, "learning_rate": 2.4415661324319483e-06, "loss": 0.016819104552268982, "step": 27570 }, { "epoch": 44.19871794871795, "grad_norm": 0.30229440331459045, "learning_rate": 2.4283203911007025e-06, "loss": 0.015779705345630647, "step": 27580 }, { "epoch": 44.21474358974359, "grad_norm": 0.25994357466697693, "learning_rate": 2.415109161657297e-06, "loss": 0.015110963582992553, "step": 27590 }, { "epoch": 44.23076923076923, "grad_norm": 0.30086851119995117, "learning_rate": 2.4019324606384386e-06, "loss": 0.01726055294275284, "step": 27600 }, { "epoch": 44.24679487179487, "grad_norm": 0.2616719901561737, "learning_rate": 2.3887903045375892e-06, "loss": 0.015254242718219757, "step": 27610 }, { "epoch": 44.26282051282051, "grad_norm": 0.21804143488407135, "learning_rate": 2.3756827098049903e-06, "loss": 0.015104980766773224, "step": 27620 }, { "epoch": 44.27884615384615, "grad_norm": 0.17123019695281982, "learning_rate": 2.3626096928476217e-06, "loss": 0.016506747901439668, "step": 27630 }, { "epoch": 44.294871794871796, "grad_norm": 0.14019793272018433, "learning_rate": 2.3495712700291626e-06, "loss": 0.015451580286026001, "step": 27640 }, { "epoch": 44.31089743589744, "grad_norm": 0.19523628056049347, "learning_rate": 2.336567457670016e-06, "loss": 0.014861449599266052, "step": 27650 }, { "epoch": 44.32692307692308, "grad_norm": 0.3144244849681854, "learning_rate": 2.3235982720472472e-06, "loss": 0.0171344593167305, "step": 27660 }, { "epoch": 44.342948717948715, "grad_norm": 0.26892611384391785, "learning_rate": 2.3106637293945744e-06, "loss": 0.015473204851150512, "step": 27670 }, { "epoch": 44.35897435897436, "grad_norm": 0.2885870635509491, "learning_rate": 2.297763845902363e-06, "loss": 0.015359418094158172, "step": 27680 }, { "epoch": 44.375, "grad_norm": 0.25746890902519226, "learning_rate": 2.284898637717604e-06, "loss": 0.017470090091228484, "step": 27690 }, { "epoch": 44.39102564102564, "grad_norm": 0.28341439366340637, "learning_rate": 2.2720681209438554e-06, "loss": 0.0152324378490448, "step": 27700 }, { "epoch": 44.407051282051285, "grad_norm": 0.14297837018966675, "learning_rate": 2.2592723116412808e-06, "loss": 0.014833158254623413, "step": 27710 }, { "epoch": 44.42307692307692, "grad_norm": 0.4007345736026764, "learning_rate": 2.2465112258265843e-06, "loss": 0.016784362494945526, "step": 27720 }, { "epoch": 44.43910256410256, "grad_norm": 0.3117080628871918, "learning_rate": 2.2337848794730086e-06, "loss": 0.014937087893486023, "step": 27730 }, { "epoch": 44.455128205128204, "grad_norm": 0.1790848821401596, "learning_rate": 2.2210932885103187e-06, "loss": 0.01584649384021759, "step": 27740 }, { "epoch": 44.47115384615385, "grad_norm": 0.21932725608348846, "learning_rate": 2.208436468824766e-06, "loss": 0.015516798198223113, "step": 27750 }, { "epoch": 44.48717948717949, "grad_norm": 0.28099390864372253, "learning_rate": 2.1958144362590782e-06, "loss": 0.014791370928287506, "step": 27760 }, { "epoch": 44.50320512820513, "grad_norm": 0.2671107351779938, "learning_rate": 2.1832272066124505e-06, "loss": 0.015020786225795746, "step": 27770 }, { "epoch": 44.51923076923077, "grad_norm": 0.18826693296432495, "learning_rate": 2.1706747956405127e-06, "loss": 0.015770190954208375, "step": 27780 }, { "epoch": 44.53525641025641, "grad_norm": 0.19428688287734985, "learning_rate": 2.1581572190552967e-06, "loss": 0.015303173661231994, "step": 27790 }, { "epoch": 44.55128205128205, "grad_norm": 0.4277273714542389, "learning_rate": 2.145674492525246e-06, "loss": 0.016667276620864868, "step": 27800 }, { "epoch": 44.56730769230769, "grad_norm": 0.7560595273971558, "learning_rate": 2.1332266316751847e-06, "loss": 0.015411508083343507, "step": 27810 }, { "epoch": 44.583333333333336, "grad_norm": 0.29418060183525085, "learning_rate": 2.1208136520862797e-06, "loss": 0.0169709712266922, "step": 27820 }, { "epoch": 44.59935897435897, "grad_norm": 0.19701217114925385, "learning_rate": 2.108435569296052e-06, "loss": 0.01795113682746887, "step": 27830 }, { "epoch": 44.61538461538461, "grad_norm": 0.14509624242782593, "learning_rate": 2.0960923987983284e-06, "loss": 0.014287503063678741, "step": 27840 }, { "epoch": 44.631410256410255, "grad_norm": 0.36489754915237427, "learning_rate": 2.0837841560432457e-06, "loss": 0.015942329168319704, "step": 27850 }, { "epoch": 44.6474358974359, "grad_norm": 0.11930820345878601, "learning_rate": 2.0715108564372142e-06, "loss": 0.015546457469463348, "step": 27860 }, { "epoch": 44.66346153846154, "grad_norm": 0.2540605962276459, "learning_rate": 2.059272515342908e-06, "loss": 0.016875529289245607, "step": 27870 }, { "epoch": 44.67948717948718, "grad_norm": 0.16444745659828186, "learning_rate": 2.0470691480792536e-06, "loss": 0.014741550385951995, "step": 27880 }, { "epoch": 44.69551282051282, "grad_norm": 0.2876399755477905, "learning_rate": 2.0349007699213753e-06, "loss": 0.015293492376804352, "step": 27890 }, { "epoch": 44.71153846153846, "grad_norm": 0.2536555230617523, "learning_rate": 2.0227673961006256e-06, "loss": 0.015358929336071015, "step": 27900 }, { "epoch": 44.7275641025641, "grad_norm": 0.19557474553585052, "learning_rate": 2.010669041804529e-06, "loss": 0.016900573670864106, "step": 27910 }, { "epoch": 44.743589743589745, "grad_norm": 0.1628609001636505, "learning_rate": 1.998605722176783e-06, "loss": 0.014129322767257691, "step": 27920 }, { "epoch": 44.75961538461539, "grad_norm": 0.31376421451568604, "learning_rate": 1.9865774523172144e-06, "loss": 0.014543657004833222, "step": 27930 }, { "epoch": 44.77564102564103, "grad_norm": 0.2780262529850006, "learning_rate": 1.9745842472818064e-06, "loss": 0.016083812713623045, "step": 27940 }, { "epoch": 44.791666666666664, "grad_norm": 0.27902719378471375, "learning_rate": 1.9626261220826215e-06, "loss": 0.01627094894647598, "step": 27950 }, { "epoch": 44.80769230769231, "grad_norm": 0.14558209478855133, "learning_rate": 1.950703091687829e-06, "loss": 0.016811907291412354, "step": 27960 }, { "epoch": 44.82371794871795, "grad_norm": 0.1528220772743225, "learning_rate": 1.938815171021674e-06, "loss": 0.015494759380817413, "step": 27970 }, { "epoch": 44.83974358974359, "grad_norm": 0.17824630439281464, "learning_rate": 1.926962374964434e-06, "loss": 0.014403627812862396, "step": 27980 }, { "epoch": 44.85576923076923, "grad_norm": 0.19053614139556885, "learning_rate": 1.9151447183524406e-06, "loss": 0.01513499766588211, "step": 27990 }, { "epoch": 44.87179487179487, "grad_norm": 0.16789144277572632, "learning_rate": 1.9033622159780384e-06, "loss": 0.01565001755952835, "step": 28000 }, { "epoch": 44.88782051282051, "grad_norm": 0.5544441938400269, "learning_rate": 1.891614882589553e-06, "loss": 0.017771418392658233, "step": 28010 }, { "epoch": 44.90384615384615, "grad_norm": 0.16271017491817474, "learning_rate": 1.879902732891302e-06, "loss": 0.014310617744922639, "step": 28020 }, { "epoch": 44.919871794871796, "grad_norm": 0.15753960609436035, "learning_rate": 1.8682257815435656e-06, "loss": 0.01696074903011322, "step": 28030 }, { "epoch": 44.93589743589744, "grad_norm": 0.2823743224143982, "learning_rate": 1.8565840431625481e-06, "loss": 0.015506458282470704, "step": 28040 }, { "epoch": 44.95192307692308, "grad_norm": 0.23352116346359253, "learning_rate": 1.8449775323203999e-06, "loss": 0.016029059886932373, "step": 28050 }, { "epoch": 44.967948717948715, "grad_norm": 0.17740987241268158, "learning_rate": 1.8334062635451666e-06, "loss": 0.013458451628684998, "step": 28060 }, { "epoch": 44.98397435897436, "grad_norm": 0.2244955152273178, "learning_rate": 1.8218702513207719e-06, "loss": 0.014353370666503907, "step": 28070 }, { "epoch": 45.0, "grad_norm": 0.4715847671031952, "learning_rate": 1.810369510087022e-06, "loss": 0.017550067603588106, "step": 28080 }, { "epoch": 45.0, "eval_accuracy_background": NaN, "eval_accuracy_crop": 0.9943540625822703, "eval_iou_background": 0.0, "eval_iou_crop": 0.9943540625822703, "eval_loss": 0.02013225294649601, "eval_mean_accuracy": 0.9943540625822703, "eval_mean_iou": 0.49717703129113516, "eval_overall_accuracy": 0.9943540625822703, "eval_runtime": 36.6879, "eval_samples_per_second": 24.013, "eval_steps_per_second": 3.026, "step": 28080 }, { "epoch": 45.01602564102564, "grad_norm": 0.1651143878698349, "learning_rate": 1.7989040542395718e-06, "loss": 0.018633659183979034, "step": 28090 }, { "epoch": 45.032051282051285, "grad_norm": 0.19748444855213165, "learning_rate": 1.787473898129901e-06, "loss": 0.015196537971496582, "step": 28100 }, { "epoch": 45.04807692307692, "grad_norm": 0.33418723940849304, "learning_rate": 1.7760790560653017e-06, "loss": 0.015192590653896332, "step": 28110 }, { "epoch": 45.06410256410256, "grad_norm": 0.14408601820468903, "learning_rate": 1.7647195423088847e-06, "loss": 0.015492323040962219, "step": 28120 }, { "epoch": 45.080128205128204, "grad_norm": 0.29509687423706055, "learning_rate": 1.7533953710795125e-06, "loss": 0.017135265469551086, "step": 28130 }, { "epoch": 45.09615384615385, "grad_norm": 0.26235827803611755, "learning_rate": 1.7421065565518268e-06, "loss": 0.015517695248126984, "step": 28140 }, { "epoch": 45.11217948717949, "grad_norm": 0.28314176201820374, "learning_rate": 1.7308531128562078e-06, "loss": 0.015604059398174285, "step": 28150 }, { "epoch": 45.12820512820513, "grad_norm": 0.11849040538072586, "learning_rate": 1.7196350540787608e-06, "loss": 0.014853383600711822, "step": 28160 }, { "epoch": 45.14423076923077, "grad_norm": 0.23854577541351318, "learning_rate": 1.7084523942612973e-06, "loss": 0.014524076879024506, "step": 28170 }, { "epoch": 45.16025641025641, "grad_norm": 0.1700575202703476, "learning_rate": 1.6973051474013235e-06, "loss": 0.014762865006923675, "step": 28180 }, { "epoch": 45.17628205128205, "grad_norm": 0.49644744396209717, "learning_rate": 1.6861933274520148e-06, "loss": 0.017141200602054596, "step": 28190 }, { "epoch": 45.19230769230769, "grad_norm": 0.19865137338638306, "learning_rate": 1.6751169483222085e-06, "loss": 0.016650579869747162, "step": 28200 }, { "epoch": 45.208333333333336, "grad_norm": 0.18995289504528046, "learning_rate": 1.6640760238763674e-06, "loss": 0.014912016689777374, "step": 28210 }, { "epoch": 45.22435897435897, "grad_norm": 0.2153007537126541, "learning_rate": 1.6530705679345903e-06, "loss": 0.015772540867328644, "step": 28220 }, { "epoch": 45.24038461538461, "grad_norm": 0.24620386958122253, "learning_rate": 1.642100594272571e-06, "loss": 0.01578496992588043, "step": 28230 }, { "epoch": 45.256410256410255, "grad_norm": 0.24804896116256714, "learning_rate": 1.6311661166215852e-06, "loss": 0.01627521514892578, "step": 28240 }, { "epoch": 45.2724358974359, "grad_norm": 0.2947867512702942, "learning_rate": 1.6202671486684983e-06, "loss": 0.016264370083808898, "step": 28250 }, { "epoch": 45.28846153846154, "grad_norm": 0.3211228847503662, "learning_rate": 1.6094037040556974e-06, "loss": 0.01533527672290802, "step": 28260 }, { "epoch": 45.30448717948718, "grad_norm": 0.11963362246751785, "learning_rate": 1.5985757963811321e-06, "loss": 0.01475638747215271, "step": 28270 }, { "epoch": 45.32051282051282, "grad_norm": 0.18524231016635895, "learning_rate": 1.5877834391982504e-06, "loss": 0.01482924073934555, "step": 28280 }, { "epoch": 45.33653846153846, "grad_norm": 0.28835541009902954, "learning_rate": 1.5770266460160131e-06, "loss": 0.016960692405700684, "step": 28290 }, { "epoch": 45.3525641025641, "grad_norm": 0.1376773864030838, "learning_rate": 1.5663054302988567e-06, "loss": 0.012472666800022125, "step": 28300 }, { "epoch": 45.368589743589745, "grad_norm": 0.14204904437065125, "learning_rate": 1.5556198054666926e-06, "loss": 0.015999537706375123, "step": 28310 }, { "epoch": 45.38461538461539, "grad_norm": 0.2678837776184082, "learning_rate": 1.5449697848948752e-06, "loss": 0.014210084080696106, "step": 28320 }, { "epoch": 45.40064102564103, "grad_norm": 0.1973719596862793, "learning_rate": 1.5343553819141975e-06, "loss": 0.014268584549427032, "step": 28330 }, { "epoch": 45.416666666666664, "grad_norm": 0.2594611346721649, "learning_rate": 1.5237766098108751e-06, "loss": 0.014816278219223022, "step": 28340 }, { "epoch": 45.43269230769231, "grad_norm": 0.279196172952652, "learning_rate": 1.5132334818265025e-06, "loss": 0.016316065192222597, "step": 28350 }, { "epoch": 45.44871794871795, "grad_norm": 0.43597644567489624, "learning_rate": 1.502726011158083e-06, "loss": 0.01635250747203827, "step": 28360 }, { "epoch": 45.46474358974359, "grad_norm": 0.18337690830230713, "learning_rate": 1.4922542109579795e-06, "loss": 0.016265441477298737, "step": 28370 }, { "epoch": 45.48076923076923, "grad_norm": 0.20995689928531647, "learning_rate": 1.4818180943338933e-06, "loss": 0.016318605840206148, "step": 28380 }, { "epoch": 45.49679487179487, "grad_norm": 0.2006613314151764, "learning_rate": 1.4714176743488683e-06, "loss": 0.015098708868026733, "step": 28390 }, { "epoch": 45.51282051282051, "grad_norm": 0.2999541759490967, "learning_rate": 1.4610529640212778e-06, "loss": 0.017057086527347564, "step": 28400 }, { "epoch": 45.52884615384615, "grad_norm": 0.20985443890094757, "learning_rate": 1.4507239763247737e-06, "loss": 0.017215265333652495, "step": 28410 }, { "epoch": 45.544871794871796, "grad_norm": 0.20283368229866028, "learning_rate": 1.4404307241883141e-06, "loss": 0.016273701190948488, "step": 28420 }, { "epoch": 45.56089743589744, "grad_norm": 0.19095061719417572, "learning_rate": 1.4301732204961126e-06, "loss": 0.015474717319011688, "step": 28430 }, { "epoch": 45.57692307692308, "grad_norm": 0.22321997582912445, "learning_rate": 1.4199514780876522e-06, "loss": 0.01592065840959549, "step": 28440 }, { "epoch": 45.592948717948715, "grad_norm": 0.1768302619457245, "learning_rate": 1.4097655097576311e-06, "loss": 0.01777350455522537, "step": 28450 }, { "epoch": 45.60897435897436, "grad_norm": 0.23306363821029663, "learning_rate": 1.399615328255991e-06, "loss": 0.014549048244953155, "step": 28460 }, { "epoch": 45.625, "grad_norm": 0.2044477015733719, "learning_rate": 1.3895009462878549e-06, "loss": 0.014472572505474091, "step": 28470 }, { "epoch": 45.64102564102564, "grad_norm": 0.20422783493995667, "learning_rate": 1.3794223765135528e-06, "loss": 0.016355401277542113, "step": 28480 }, { "epoch": 45.657051282051285, "grad_norm": 0.226772740483284, "learning_rate": 1.3693796315485795e-06, "loss": 0.017167499661445616, "step": 28490 }, { "epoch": 45.67307692307692, "grad_norm": 0.2985594868659973, "learning_rate": 1.3593727239636e-06, "loss": 0.014691081643104554, "step": 28500 }, { "epoch": 45.68910256410256, "grad_norm": 0.19260768592357635, "learning_rate": 1.3494016662844011e-06, "loss": 0.013783803582191468, "step": 28510 }, { "epoch": 45.705128205128204, "grad_norm": 0.143308624625206, "learning_rate": 1.3394664709919124e-06, "loss": 0.014209818840026856, "step": 28520 }, { "epoch": 45.72115384615385, "grad_norm": 0.19426290690898895, "learning_rate": 1.3295671505221662e-06, "loss": 0.01644579917192459, "step": 28530 }, { "epoch": 45.73717948717949, "grad_norm": 0.2254563271999359, "learning_rate": 1.3197037172662907e-06, "loss": 0.01647055149078369, "step": 28540 }, { "epoch": 45.75320512820513, "grad_norm": 0.16074714064598083, "learning_rate": 1.3098761835704998e-06, "loss": 0.013935215771198273, "step": 28550 }, { "epoch": 45.76923076923077, "grad_norm": 0.1878928691148758, "learning_rate": 1.3000845617360603e-06, "loss": 0.013784874975681306, "step": 28560 }, { "epoch": 45.78525641025641, "grad_norm": 0.17561303079128265, "learning_rate": 1.2903288640192912e-06, "loss": 0.017161037027835845, "step": 28570 }, { "epoch": 45.80128205128205, "grad_norm": 0.18273845314979553, "learning_rate": 1.280609102631548e-06, "loss": 0.01593673825263977, "step": 28580 }, { "epoch": 45.81730769230769, "grad_norm": 0.17031314969062805, "learning_rate": 1.2709252897392087e-06, "loss": 0.019837839901447295, "step": 28590 }, { "epoch": 45.833333333333336, "grad_norm": 0.20939388871192932, "learning_rate": 1.2612774374636371e-06, "loss": 0.013969935476779938, "step": 28600 }, { "epoch": 45.84935897435897, "grad_norm": 0.24031426012516022, "learning_rate": 1.2516655578812064e-06, "loss": 0.014740148186683654, "step": 28610 }, { "epoch": 45.86538461538461, "grad_norm": 0.24765194952487946, "learning_rate": 1.2420896630232492e-06, "loss": 0.016033956408500673, "step": 28620 }, { "epoch": 45.881410256410255, "grad_norm": 0.19048111140727997, "learning_rate": 1.2325497648760541e-06, "loss": 0.014833833277225494, "step": 28630 }, { "epoch": 45.8974358974359, "grad_norm": 0.18617895245552063, "learning_rate": 1.2230458753808627e-06, "loss": 0.014240382611751557, "step": 28640 }, { "epoch": 45.91346153846154, "grad_norm": 0.31206122040748596, "learning_rate": 1.2135780064338286e-06, "loss": 0.01533261239528656, "step": 28650 }, { "epoch": 45.92948717948718, "grad_norm": 0.20606957376003265, "learning_rate": 1.204146169886029e-06, "loss": 0.014057578146457672, "step": 28660 }, { "epoch": 45.94551282051282, "grad_norm": 0.23616547882556915, "learning_rate": 1.1947503775434331e-06, "loss": 0.014241991937160492, "step": 28670 }, { "epoch": 45.96153846153846, "grad_norm": 0.1639358252286911, "learning_rate": 1.185390641166907e-06, "loss": 0.014378885924816131, "step": 28680 }, { "epoch": 45.9775641025641, "grad_norm": 0.15627682209014893, "learning_rate": 1.1760669724721617e-06, "loss": 0.014659261703491211, "step": 28690 }, { "epoch": 45.993589743589745, "grad_norm": 0.26203301548957825, "learning_rate": 1.166779383129779e-06, "loss": 0.015890493988990784, "step": 28700 }, { "epoch": 46.0, "eval_accuracy_background": NaN, "eval_accuracy_crop": 0.9945594450825891, "eval_iou_background": 0.0, "eval_iou_crop": 0.9945594450825891, "eval_loss": 0.020292576402425766, "eval_mean_accuracy": 0.9945594450825891, "eval_mean_iou": 0.49727972254129454, "eval_overall_accuracy": 0.9945594450825891, "eval_runtime": 36.7287, "eval_samples_per_second": 23.987, "eval_steps_per_second": 3.022, "step": 28704 }, { "epoch": 46.00961538461539, "grad_norm": 0.3840465545654297, "learning_rate": 1.1575278847651827e-06, "loss": 0.01647926717996597, "step": 28710 }, { "epoch": 46.02564102564103, "grad_norm": 0.18426565825939178, "learning_rate": 1.1483124889585993e-06, "loss": 0.017899718880653382, "step": 28720 }, { "epoch": 46.041666666666664, "grad_norm": 0.20105373859405518, "learning_rate": 1.1391332072450889e-06, "loss": 0.01438898742198944, "step": 28730 }, { "epoch": 46.05769230769231, "grad_norm": 0.24992328882217407, "learning_rate": 1.12999005111449e-06, "loss": 0.01659611314535141, "step": 28740 }, { "epoch": 46.07371794871795, "grad_norm": 0.3238469660282135, "learning_rate": 1.120883032011435e-06, "loss": 0.01489597260951996, "step": 28750 }, { "epoch": 46.08974358974359, "grad_norm": 0.19242458045482635, "learning_rate": 1.1118121613353116e-06, "loss": 0.016142360866069794, "step": 28760 }, { "epoch": 46.10576923076923, "grad_norm": 0.18957214057445526, "learning_rate": 1.102777450440271e-06, "loss": 0.014619649946689605, "step": 28770 }, { "epoch": 46.12179487179487, "grad_norm": 0.23426936566829681, "learning_rate": 1.093778910635197e-06, "loss": 0.01602981686592102, "step": 28780 }, { "epoch": 46.13782051282051, "grad_norm": 0.12782423198223114, "learning_rate": 1.0848165531836928e-06, "loss": 0.01440301388502121, "step": 28790 }, { "epoch": 46.15384615384615, "grad_norm": 0.15051813423633575, "learning_rate": 1.0758903893040818e-06, "loss": 0.014021952450275422, "step": 28800 }, { "epoch": 46.169871794871796, "grad_norm": 0.20854970812797546, "learning_rate": 1.0670004301693759e-06, "loss": 0.014505341649055481, "step": 28810 }, { "epoch": 46.18589743589744, "grad_norm": 0.267604798078537, "learning_rate": 1.0581466869072742e-06, "loss": 0.01678110808134079, "step": 28820 }, { "epoch": 46.20192307692308, "grad_norm": 0.2318347543478012, "learning_rate": 1.0493291706001351e-06, "loss": 0.016690635681152345, "step": 28830 }, { "epoch": 46.217948717948715, "grad_norm": 0.2543177008628845, "learning_rate": 1.040547892284983e-06, "loss": 0.01519085317850113, "step": 28840 }, { "epoch": 46.23397435897436, "grad_norm": 0.24292704463005066, "learning_rate": 1.0318028629534692e-06, "loss": 0.015352633595466614, "step": 28850 }, { "epoch": 46.25, "grad_norm": 0.21064502000808716, "learning_rate": 1.023094093551884e-06, "loss": 0.015822599828243255, "step": 28860 }, { "epoch": 46.26602564102564, "grad_norm": 0.23230800032615662, "learning_rate": 1.014421594981131e-06, "loss": 0.015927149355411528, "step": 28870 }, { "epoch": 46.282051282051285, "grad_norm": 0.2421070784330368, "learning_rate": 1.0057853780966963e-06, "loss": 0.017552420496940613, "step": 28880 }, { "epoch": 46.29807692307692, "grad_norm": 0.624501645565033, "learning_rate": 9.9718545370867e-07, "loss": 0.014928498864173889, "step": 28890 }, { "epoch": 46.31410256410256, "grad_norm": 0.19537627696990967, "learning_rate": 9.886218325817077e-07, "loss": 0.017242853343486787, "step": 28900 }, { "epoch": 46.330128205128204, "grad_norm": 0.22607381641864777, "learning_rate": 9.800945254350214e-07, "loss": 0.01496628373861313, "step": 28910 }, { "epoch": 46.34615384615385, "grad_norm": 0.13908298313617706, "learning_rate": 9.716035429423664e-07, "loss": 0.014618669450283051, "step": 28920 }, { "epoch": 46.36217948717949, "grad_norm": 0.18439020216464996, "learning_rate": 9.631488957320412e-07, "loss": 0.015577107667922974, "step": 28930 }, { "epoch": 46.37820512820513, "grad_norm": 0.1568029224872589, "learning_rate": 9.5473059438685e-07, "loss": 0.01452416330575943, "step": 28940 }, { "epoch": 46.39423076923077, "grad_norm": 0.24759726226329803, "learning_rate": 9.463486494441109e-07, "loss": 0.01573837101459503, "step": 28950 }, { "epoch": 46.41025641025641, "grad_norm": 0.17013372480869293, "learning_rate": 9.380030713956312e-07, "loss": 0.014234659075737, "step": 28960 }, { "epoch": 46.42628205128205, "grad_norm": 0.30214694142341614, "learning_rate": 9.296938706876912e-07, "loss": 0.01595083475112915, "step": 28970 }, { "epoch": 46.44230769230769, "grad_norm": 0.1471533179283142, "learning_rate": 9.21421057721048e-07, "loss": 0.01590404808521271, "step": 28980 }, { "epoch": 46.458333333333336, "grad_norm": 0.7183281779289246, "learning_rate": 9.131846428509116e-07, "loss": 0.018260614573955537, "step": 28990 }, { "epoch": 46.47435897435897, "grad_norm": 0.35162976384162903, "learning_rate": 9.049846363869185e-07, "loss": 0.015864627063274385, "step": 29000 }, { "epoch": 46.49038461538461, "grad_norm": 0.15817900002002716, "learning_rate": 8.968210485931416e-07, "loss": 0.016380338370800017, "step": 29010 }, { "epoch": 46.506410256410255, "grad_norm": 0.12012694776058197, "learning_rate": 8.88693889688067e-07, "loss": 0.01590409129858017, "step": 29020 }, { "epoch": 46.5224358974359, "grad_norm": 0.3282858729362488, "learning_rate": 8.806031698445871e-07, "loss": 0.01775611788034439, "step": 29030 }, { "epoch": 46.53846153846154, "grad_norm": 0.1852198839187622, "learning_rate": 8.725488991899744e-07, "loss": 0.015829768776893616, "step": 29040 }, { "epoch": 46.55448717948718, "grad_norm": 0.20046696066856384, "learning_rate": 8.64531087805881e-07, "loss": 0.015343976020812989, "step": 29050 }, { "epoch": 46.57051282051282, "grad_norm": 0.2418874055147171, "learning_rate": 8.565497457283255e-07, "loss": 0.017346106469631195, "step": 29060 }, { "epoch": 46.58653846153846, "grad_norm": 0.4987257421016693, "learning_rate": 8.4860488294767e-07, "loss": 0.019559434056282042, "step": 29070 }, { "epoch": 46.6025641025641, "grad_norm": 0.1215410903096199, "learning_rate": 8.406965094086228e-07, "loss": 0.016896621882915498, "step": 29080 }, { "epoch": 46.618589743589745, "grad_norm": 0.33348989486694336, "learning_rate": 8.32824635010212e-07, "loss": 0.015085728466510772, "step": 29090 }, { "epoch": 46.63461538461539, "grad_norm": 0.2648869454860687, "learning_rate": 8.249892696057859e-07, "loss": 0.015950463712215424, "step": 29100 }, { "epoch": 46.65064102564103, "grad_norm": 0.19538435339927673, "learning_rate": 8.171904230029825e-07, "loss": 0.01668214350938797, "step": 29110 }, { "epoch": 46.666666666666664, "grad_norm": 0.18072417378425598, "learning_rate": 8.094281049637464e-07, "loss": 0.016334332525730133, "step": 29120 }, { "epoch": 46.68269230769231, "grad_norm": 0.1829649657011032, "learning_rate": 8.017023252042754e-07, "loss": 0.016166871786117552, "step": 29130 }, { "epoch": 46.69871794871795, "grad_norm": 0.4030074179172516, "learning_rate": 7.940130933950507e-07, "loss": 0.013730643689632416, "step": 29140 }, { "epoch": 46.71474358974359, "grad_norm": 0.17484812438488007, "learning_rate": 7.863604191608064e-07, "loss": 0.01635509878396988, "step": 29150 }, { "epoch": 46.73076923076923, "grad_norm": 0.18135464191436768, "learning_rate": 7.787443120805005e-07, "loss": 0.015697748959064485, "step": 29160 }, { "epoch": 46.74679487179487, "grad_norm": 0.13859571516513824, "learning_rate": 7.711647816873368e-07, "loss": 0.015978921949863435, "step": 29170 }, { "epoch": 46.76282051282051, "grad_norm": 0.2009444385766983, "learning_rate": 7.636218374687199e-07, "loss": 0.015128642320632935, "step": 29180 }, { "epoch": 46.77884615384615, "grad_norm": 0.2652130126953125, "learning_rate": 7.561154888662769e-07, "loss": 0.01556456983089447, "step": 29190 }, { "epoch": 46.794871794871796, "grad_norm": 0.2845443785190582, "learning_rate": 7.486457452758089e-07, "loss": 0.01592068076133728, "step": 29200 }, { "epoch": 46.81089743589744, "grad_norm": 0.3579454720020294, "learning_rate": 7.412126160473132e-07, "loss": 0.017798911035060882, "step": 29210 }, { "epoch": 46.82692307692308, "grad_norm": 0.3009389340877533, "learning_rate": 7.338161104849439e-07, "loss": 0.014792460203170776, "step": 29220 }, { "epoch": 46.842948717948715, "grad_norm": 0.1783992201089859, "learning_rate": 7.264562378470218e-07, "loss": 0.014789550006389618, "step": 29230 }, { "epoch": 46.85897435897436, "grad_norm": 0.25910669565200806, "learning_rate": 7.191330073460112e-07, "loss": 0.015309041738510132, "step": 29240 }, { "epoch": 46.875, "grad_norm": 0.18250508606433868, "learning_rate": 7.118464281485093e-07, "loss": 0.01649814248085022, "step": 29250 }, { "epoch": 46.89102564102564, "grad_norm": 0.24331484735012054, "learning_rate": 7.045965093752404e-07, "loss": 0.014434045553207398, "step": 29260 }, { "epoch": 46.907051282051285, "grad_norm": 0.2377888709306717, "learning_rate": 6.973832601010355e-07, "loss": 0.016079428791999816, "step": 29270 }, { "epoch": 46.92307692307692, "grad_norm": 0.15704555809497833, "learning_rate": 6.902066893548287e-07, "loss": 0.014078518748283387, "step": 29280 }, { "epoch": 46.93910256410256, "grad_norm": 0.23384375870227814, "learning_rate": 6.830668061196343e-07, "loss": 0.014607027173042297, "step": 29290 }, { "epoch": 46.955128205128204, "grad_norm": 0.18686407804489136, "learning_rate": 6.759636193325602e-07, "loss": 0.014813660085201264, "step": 29300 }, { "epoch": 46.97115384615385, "grad_norm": 0.26398709416389465, "learning_rate": 6.688971378847741e-07, "loss": 0.01318851113319397, "step": 29310 }, { "epoch": 46.98717948717949, "grad_norm": 0.18248681724071503, "learning_rate": 6.618673706214906e-07, "loss": 0.0148567795753479, "step": 29320 }, { "epoch": 47.0, "eval_accuracy_background": NaN, "eval_accuracy_crop": 0.9944227821134659, "eval_iou_background": 0.0, "eval_iou_crop": 0.9944227821134659, "eval_loss": 0.020170465111732483, "eval_mean_accuracy": 0.9944227821134659, "eval_mean_iou": 0.49721139105673295, "eval_overall_accuracy": 0.9944227821134659, "eval_runtime": 36.4476, "eval_samples_per_second": 24.172, "eval_steps_per_second": 3.045, "step": 29328 }, { "epoch": 47.00320512820513, "grad_norm": 0.19211849570274353, "learning_rate": 6.548743263419776e-07, "loss": 0.014146706461906433, "step": 29330 }, { "epoch": 47.01923076923077, "grad_norm": 0.2360827475786209, "learning_rate": 6.4791801379954e-07, "loss": 0.01773252189159393, "step": 29340 }, { "epoch": 47.03525641025641, "grad_norm": 0.2603590488433838, "learning_rate": 6.409984417014925e-07, "loss": 0.01506693959236145, "step": 29350 }, { "epoch": 47.05128205128205, "grad_norm": 0.18242602050304413, "learning_rate": 6.341156187091701e-07, "loss": 0.013457678258419037, "step": 29360 }, { "epoch": 47.06730769230769, "grad_norm": 0.2075904905796051, "learning_rate": 6.272695534379081e-07, "loss": 0.01512986421585083, "step": 29370 }, { "epoch": 47.083333333333336, "grad_norm": 0.22988946735858917, "learning_rate": 6.204602544570281e-07, "loss": 0.016033825278282166, "step": 29380 }, { "epoch": 47.09935897435897, "grad_norm": 0.3159559369087219, "learning_rate": 6.136877302898292e-07, "loss": 0.013354584574699402, "step": 29390 }, { "epoch": 47.11538461538461, "grad_norm": 0.2574511468410492, "learning_rate": 6.0695198941359e-07, "loss": 0.01559361070394516, "step": 29400 }, { "epoch": 47.131410256410255, "grad_norm": 0.1401338130235672, "learning_rate": 6.00253040259533e-07, "loss": 0.013778312504291535, "step": 29410 }, { "epoch": 47.1474358974359, "grad_norm": 0.1621578484773636, "learning_rate": 5.935908912128341e-07, "loss": 0.014947706460952758, "step": 29420 }, { "epoch": 47.16346153846154, "grad_norm": 0.2939842939376831, "learning_rate": 5.869655506126126e-07, "loss": 0.01636326462030411, "step": 29430 }, { "epoch": 47.17948717948718, "grad_norm": 0.19054542481899261, "learning_rate": 5.803770267518982e-07, "loss": 0.01627662926912308, "step": 29440 }, { "epoch": 47.19551282051282, "grad_norm": 0.2699260413646698, "learning_rate": 5.738253278776539e-07, "loss": 0.016452524065971374, "step": 29450 }, { "epoch": 47.21153846153846, "grad_norm": 0.2037985920906067, "learning_rate": 5.673104621907333e-07, "loss": 0.015268644690513611, "step": 29460 }, { "epoch": 47.2275641025641, "grad_norm": 0.30037999153137207, "learning_rate": 5.608324378458963e-07, "loss": 0.01586823761463165, "step": 29470 }, { "epoch": 47.243589743589745, "grad_norm": 0.2645445168018341, "learning_rate": 5.5439126295178e-07, "loss": 0.015708522498607637, "step": 29480 }, { "epoch": 47.25961538461539, "grad_norm": 0.3870357573032379, "learning_rate": 5.479869455709052e-07, "loss": 0.016060520708560944, "step": 29490 }, { "epoch": 47.27564102564103, "grad_norm": 0.19194801151752472, "learning_rate": 5.416194937196495e-07, "loss": 0.01743350476026535, "step": 29500 }, { "epoch": 47.291666666666664, "grad_norm": 0.30442389845848083, "learning_rate": 5.352889153682505e-07, "loss": 0.01601257026195526, "step": 29510 }, { "epoch": 47.30769230769231, "grad_norm": 0.25080567598342896, "learning_rate": 5.289952184407865e-07, "loss": 0.01648728996515274, "step": 29520 }, { "epoch": 47.32371794871795, "grad_norm": 0.2596561908721924, "learning_rate": 5.227384108151756e-07, "loss": 0.017070797085762025, "step": 29530 }, { "epoch": 47.33974358974359, "grad_norm": 0.29917219281196594, "learning_rate": 5.165185003231565e-07, "loss": 0.015356317162513733, "step": 29540 }, { "epoch": 47.35576923076923, "grad_norm": 0.2286434918642044, "learning_rate": 5.10335494750288e-07, "loss": 0.014167396724224091, "step": 29550 }, { "epoch": 47.37179487179487, "grad_norm": 0.19285114109516144, "learning_rate": 5.041894018359261e-07, "loss": 0.017361018061637878, "step": 29560 }, { "epoch": 47.38782051282051, "grad_norm": 0.14305496215820312, "learning_rate": 4.980802292732334e-07, "loss": 0.015142789483070374, "step": 29570 }, { "epoch": 47.40384615384615, "grad_norm": 0.282820463180542, "learning_rate": 4.920079847091496e-07, "loss": 0.015746691823005678, "step": 29580 }, { "epoch": 47.419871794871796, "grad_norm": 0.2337058037519455, "learning_rate": 4.859726757443982e-07, "loss": 0.015282636880874634, "step": 29590 }, { "epoch": 47.43589743589744, "grad_norm": 0.32512274384498596, "learning_rate": 4.799743099334631e-07, "loss": 0.015692408382892608, "step": 29600 }, { "epoch": 47.45192307692308, "grad_norm": 0.13401460647583008, "learning_rate": 4.740128947845912e-07, "loss": 0.016146978735923766, "step": 29610 }, { "epoch": 47.467948717948715, "grad_norm": 0.26592469215393066, "learning_rate": 4.680884377597805e-07, "loss": 0.015301698446273803, "step": 29620 }, { "epoch": 47.48397435897436, "grad_norm": 0.1883552521467209, "learning_rate": 4.6220094627475895e-07, "loss": 0.015270110964775086, "step": 29630 }, { "epoch": 47.5, "grad_norm": 0.19824181497097015, "learning_rate": 4.563504276989849e-07, "loss": 0.014729148149490357, "step": 29640 }, { "epoch": 47.51602564102564, "grad_norm": 0.21780923008918762, "learning_rate": 4.5053688935564717e-07, "loss": 0.015432906150817872, "step": 29650 }, { "epoch": 47.532051282051285, "grad_norm": 0.4414511024951935, "learning_rate": 4.4476033852163145e-07, "loss": 0.015779447555541993, "step": 29660 }, { "epoch": 47.54807692307692, "grad_norm": 0.1364261358976364, "learning_rate": 4.3902078242754385e-07, "loss": 0.01659596264362335, "step": 29670 }, { "epoch": 47.56410256410256, "grad_norm": 0.35218995809555054, "learning_rate": 4.333182282576675e-07, "loss": 0.016677287220954896, "step": 29680 }, { "epoch": 47.580128205128204, "grad_norm": 0.29971766471862793, "learning_rate": 4.276526831499794e-07, "loss": 0.01663128137588501, "step": 29690 }, { "epoch": 47.59615384615385, "grad_norm": 0.2820044457912445, "learning_rate": 4.2202415419612005e-07, "loss": 0.02061349302530289, "step": 29700 }, { "epoch": 47.61217948717949, "grad_norm": 0.31131041049957275, "learning_rate": 4.164326484414138e-07, "loss": 0.013606531918048859, "step": 29710 }, { "epoch": 47.62820512820513, "grad_norm": 0.2105891853570938, "learning_rate": 4.1087817288482875e-07, "loss": 0.0146299347281456, "step": 29720 }, { "epoch": 47.64423076923077, "grad_norm": 0.3198206424713135, "learning_rate": 4.0536073447898335e-07, "loss": 0.01710765212774277, "step": 29730 }, { "epoch": 47.66025641025641, "grad_norm": 0.32681313157081604, "learning_rate": 3.998803401301432e-07, "loss": 0.01708281636238098, "step": 29740 }, { "epoch": 47.67628205128205, "grad_norm": 0.16148649156093597, "learning_rate": 3.944369966981942e-07, "loss": 0.013828080892562867, "step": 29750 }, { "epoch": 47.69230769230769, "grad_norm": 0.14714373648166656, "learning_rate": 3.8903071099665954e-07, "loss": 0.014756929874420167, "step": 29760 }, { "epoch": 47.708333333333336, "grad_norm": 0.1887401044368744, "learning_rate": 3.836614897926627e-07, "loss": 0.015036147832870484, "step": 29770 }, { "epoch": 47.72435897435897, "grad_norm": 0.12260463833808899, "learning_rate": 3.7832933980694093e-07, "loss": 0.014743056893348695, "step": 29780 }, { "epoch": 47.74038461538461, "grad_norm": 0.1461956650018692, "learning_rate": 3.730342677138288e-07, "loss": 0.01465459018945694, "step": 29790 }, { "epoch": 47.756410256410255, "grad_norm": 0.1801733672618866, "learning_rate": 3.6777628014124765e-07, "loss": 0.015738433599472045, "step": 29800 }, { "epoch": 47.7724358974359, "grad_norm": 0.4738054871559143, "learning_rate": 3.6255538367069627e-07, "loss": 0.015090972185134888, "step": 29810 }, { "epoch": 47.78846153846154, "grad_norm": 0.19924671947956085, "learning_rate": 3.573715848372505e-07, "loss": 0.013384111225605011, "step": 29820 }, { "epoch": 47.80448717948718, "grad_norm": 0.21563731133937836, "learning_rate": 3.522248901295566e-07, "loss": 0.01708632558584213, "step": 29830 }, { "epoch": 47.82051282051282, "grad_norm": 0.1177823469042778, "learning_rate": 3.4711530598979804e-07, "loss": 0.014025388658046723, "step": 29840 }, { "epoch": 47.83653846153846, "grad_norm": 0.21340911090373993, "learning_rate": 3.4204283881372865e-07, "loss": 0.015341711044311524, "step": 29850 }, { "epoch": 47.8525641025641, "grad_norm": 0.25823020935058594, "learning_rate": 3.3700749495062967e-07, "loss": 0.0161429226398468, "step": 29860 }, { "epoch": 47.868589743589745, "grad_norm": 0.17127935588359833, "learning_rate": 3.320092807033126e-07, "loss": 0.015382185578346252, "step": 29870 }, { "epoch": 47.88461538461539, "grad_norm": 0.3297446072101593, "learning_rate": 3.270482023281196e-07, "loss": 0.014325524866580962, "step": 29880 }, { "epoch": 47.90064102564103, "grad_norm": 0.29137560725212097, "learning_rate": 3.221242660349133e-07, "loss": 0.01332940012216568, "step": 29890 }, { "epoch": 47.916666666666664, "grad_norm": 0.32856279611587524, "learning_rate": 3.172374779870568e-07, "loss": 0.014823773503303527, "step": 29900 }, { "epoch": 47.93269230769231, "grad_norm": 0.17741520702838898, "learning_rate": 3.1238784430141055e-07, "loss": 0.015712600946426392, "step": 29910 }, { "epoch": 47.94871794871795, "grad_norm": 0.2017545849084854, "learning_rate": 3.0757537104833535e-07, "loss": 0.014169232547283172, "step": 29920 }, { "epoch": 47.96474358974359, "grad_norm": 0.42925751209259033, "learning_rate": 3.0280006425168595e-07, "loss": 0.014644335210323333, "step": 29930 }, { "epoch": 47.98076923076923, "grad_norm": 0.22347226738929749, "learning_rate": 2.9806192988877433e-07, "loss": 0.015193971991539, "step": 29940 }, { "epoch": 47.99679487179487, "grad_norm": 0.40303295850753784, "learning_rate": 2.9336097389040306e-07, "loss": 0.01591367870569229, "step": 29950 }, { "epoch": 48.0, "eval_accuracy_background": NaN, "eval_accuracy_crop": 0.9945665710298691, "eval_iou_background": 0.0, "eval_iou_crop": 0.9945665710298691, "eval_loss": 0.020120423287153244, "eval_mean_accuracy": 0.9945665710298691, "eval_mean_iou": 0.49728328551493456, "eval_overall_accuracy": 0.9945665710298691, "eval_runtime": 37.1194, "eval_samples_per_second": 23.734, "eval_steps_per_second": 2.99, "step": 29952 }, { "epoch": 48.01282051282051, "grad_norm": 0.3217231333255768, "learning_rate": 2.8869720214082516e-07, "loss": 0.016921477019786836, "step": 29960 }, { "epoch": 48.02884615384615, "grad_norm": 0.244198739528656, "learning_rate": 2.8407062047775766e-07, "loss": 0.016343486309051514, "step": 29970 }, { "epoch": 48.044871794871796, "grad_norm": 0.164947971701622, "learning_rate": 2.7948123469236144e-07, "loss": 0.014403104782104492, "step": 29980 }, { "epoch": 48.06089743589744, "grad_norm": 0.26034456491470337, "learning_rate": 2.7492905052923787e-07, "loss": 0.014593911170959473, "step": 29990 }, { "epoch": 48.07692307692308, "grad_norm": 0.24076637625694275, "learning_rate": 2.704140736864258e-07, "loss": 0.01490584909915924, "step": 30000 }, { "epoch": 48.092948717948715, "grad_norm": 0.6028624773025513, "learning_rate": 2.659363098153911e-07, "loss": 0.01517893821001053, "step": 30010 }, { "epoch": 48.10897435897436, "grad_norm": 0.2520623505115509, "learning_rate": 2.61495764521017e-07, "loss": 0.014839889109134674, "step": 30020 }, { "epoch": 48.125, "grad_norm": 0.2981051504611969, "learning_rate": 2.57092443361604e-07, "loss": 0.015197667479515075, "step": 30030 }, { "epoch": 48.14102564102564, "grad_norm": 0.43795815110206604, "learning_rate": 2.5272635184884984e-07, "loss": 0.015172593295574188, "step": 30040 }, { "epoch": 48.157051282051285, "grad_norm": 0.20417974889278412, "learning_rate": 2.4839749544786295e-07, "loss": 0.015543046593666076, "step": 30050 }, { "epoch": 48.17307692307692, "grad_norm": 0.18061110377311707, "learning_rate": 2.441058795771356e-07, "loss": 0.017139905691146852, "step": 30060 }, { "epoch": 48.18910256410256, "grad_norm": 0.23065555095672607, "learning_rate": 2.3985150960855076e-07, "loss": 0.016555704176425934, "step": 30070 }, { "epoch": 48.205128205128204, "grad_norm": 0.3085910379886627, "learning_rate": 2.3563439086736527e-07, "loss": 0.01564658135175705, "step": 30080 }, { "epoch": 48.22115384615385, "grad_norm": 0.23773734271526337, "learning_rate": 2.3145452863221005e-07, "loss": 0.014543934166431427, "step": 30090 }, { "epoch": 48.23717948717949, "grad_norm": 0.22569432854652405, "learning_rate": 2.2731192813507994e-07, "loss": 0.015152448415756225, "step": 30100 }, { "epoch": 48.25320512820513, "grad_norm": 0.14688453078269958, "learning_rate": 2.2320659456133708e-07, "loss": 0.014234429597854615, "step": 30110 }, { "epoch": 48.26923076923077, "grad_norm": 0.24380047619342804, "learning_rate": 2.1913853304968424e-07, "loss": 0.01611334681510925, "step": 30120 }, { "epoch": 48.28525641025641, "grad_norm": 0.14260771870613098, "learning_rate": 2.1510774869217487e-07, "loss": 0.015012171864509583, "step": 30130 }, { "epoch": 48.30128205128205, "grad_norm": 0.2861193120479584, "learning_rate": 2.111142465342064e-07, "loss": 0.016360846161842347, "step": 30140 }, { "epoch": 48.31730769230769, "grad_norm": 0.2750746011734009, "learning_rate": 2.0715803157450363e-07, "loss": 0.01570931077003479, "step": 30150 }, { "epoch": 48.333333333333336, "grad_norm": 0.2039780616760254, "learning_rate": 2.0323910876511865e-07, "loss": 0.014663182199001312, "step": 30160 }, { "epoch": 48.34935897435897, "grad_norm": 0.22376634180545807, "learning_rate": 1.9935748301143088e-07, "loss": 0.015046155452728272, "step": 30170 }, { "epoch": 48.36538461538461, "grad_norm": 0.2677769958972931, "learning_rate": 1.9551315917213042e-07, "loss": 0.016514752805233002, "step": 30180 }, { "epoch": 48.381410256410255, "grad_norm": 0.22665229439735413, "learning_rate": 1.917061420592081e-07, "loss": 0.01419323831796646, "step": 30190 }, { "epoch": 48.3974358974359, "grad_norm": 0.1681465357542038, "learning_rate": 1.8793643643796875e-07, "loss": 0.01583888232707977, "step": 30200 }, { "epoch": 48.41346153846154, "grad_norm": 0.21254311501979828, "learning_rate": 1.8420404702701454e-07, "loss": 0.014522895216941833, "step": 30210 }, { "epoch": 48.42948717948718, "grad_norm": 0.24662892520427704, "learning_rate": 1.8050897849822834e-07, "loss": 0.015381748974323272, "step": 30220 }, { "epoch": 48.44551282051282, "grad_norm": 0.3101390302181244, "learning_rate": 1.768512354767804e-07, "loss": 0.01487893909215927, "step": 30230 }, { "epoch": 48.46153846153846, "grad_norm": 0.22097496688365936, "learning_rate": 1.7323082254113164e-07, "loss": 0.016166384518146514, "step": 30240 }, { "epoch": 48.4775641025641, "grad_norm": 0.3343227505683899, "learning_rate": 1.6964774422300044e-07, "loss": 0.01674746870994568, "step": 30250 }, { "epoch": 48.493589743589745, "grad_norm": 0.19467172026634216, "learning_rate": 1.6610200500738582e-07, "loss": 0.01600864976644516, "step": 30260 }, { "epoch": 48.50961538461539, "grad_norm": 0.2762576639652252, "learning_rate": 1.6259360933253753e-07, "loss": 0.015086030960083008, "step": 30270 }, { "epoch": 48.52564102564103, "grad_norm": 0.24769337475299835, "learning_rate": 1.5912256158996942e-07, "loss": 0.015443995594978333, "step": 30280 }, { "epoch": 48.541666666666664, "grad_norm": 0.1559792309999466, "learning_rate": 1.55688866124446e-07, "loss": 0.015271198749542237, "step": 30290 }, { "epoch": 48.55769230769231, "grad_norm": 0.1803930699825287, "learning_rate": 1.5229252723397258e-07, "loss": 0.015923991799354553, "step": 30300 }, { "epoch": 48.57371794871795, "grad_norm": 0.3541063964366913, "learning_rate": 1.4893354916980184e-07, "loss": 0.016122613847255707, "step": 30310 }, { "epoch": 48.58974358974359, "grad_norm": 0.4706469476222992, "learning_rate": 1.456119361364139e-07, "loss": 0.01912512183189392, "step": 30320 }, { "epoch": 48.60576923076923, "grad_norm": 0.12142220139503479, "learning_rate": 1.4232769229152288e-07, "loss": 0.014127688109874725, "step": 30330 }, { "epoch": 48.62179487179487, "grad_norm": 0.1580854058265686, "learning_rate": 1.3908082174606706e-07, "loss": 0.014183083176612854, "step": 30340 }, { "epoch": 48.63782051282051, "grad_norm": 0.26331380009651184, "learning_rate": 1.3587132856420548e-07, "loss": 0.0163043275475502, "step": 30350 }, { "epoch": 48.65384615384615, "grad_norm": 0.20253534615039825, "learning_rate": 1.3269921676330454e-07, "loss": 0.015480676293373108, "step": 30360 }, { "epoch": 48.669871794871796, "grad_norm": 0.2272229939699173, "learning_rate": 1.2956449031394812e-07, "loss": 0.014747309684753417, "step": 30370 }, { "epoch": 48.68589743589744, "grad_norm": 0.20176143944263458, "learning_rate": 1.2646715313991752e-07, "loss": 0.015023146569728852, "step": 30380 }, { "epoch": 48.70192307692308, "grad_norm": 0.21797366440296173, "learning_rate": 1.2340720911819813e-07, "loss": 0.013974227011203766, "step": 30390 }, { "epoch": 48.717948717948715, "grad_norm": 0.22505918145179749, "learning_rate": 1.2038466207896948e-07, "loss": 0.016614991426467895, "step": 30400 }, { "epoch": 48.73397435897436, "grad_norm": 0.19929960370063782, "learning_rate": 1.1739951580559848e-07, "loss": 0.014316211640834808, "step": 30410 }, { "epoch": 48.75, "grad_norm": 0.21867802739143372, "learning_rate": 1.1445177403463625e-07, "loss": 0.013949672877788543, "step": 30420 }, { "epoch": 48.76602564102564, "grad_norm": 0.23756097257137299, "learning_rate": 1.1154144045581793e-07, "loss": 0.015197613835334777, "step": 30430 }, { "epoch": 48.782051282051285, "grad_norm": 0.22117167711257935, "learning_rate": 1.0866851871205285e-07, "loss": 0.016008210182189942, "step": 30440 }, { "epoch": 48.79807692307692, "grad_norm": 0.3231344223022461, "learning_rate": 1.0583301239941779e-07, "loss": 0.01568482965230942, "step": 30450 }, { "epoch": 48.81410256410256, "grad_norm": 0.17670945823192596, "learning_rate": 1.0303492506715694e-07, "loss": 0.016737291216850282, "step": 30460 }, { "epoch": 48.830128205128204, "grad_norm": 0.1884097009897232, "learning_rate": 1.0027426021768538e-07, "loss": 0.01551150381565094, "step": 30470 }, { "epoch": 48.84615384615385, "grad_norm": 0.2625444829463959, "learning_rate": 9.755102130656224e-08, "loss": 0.016746190190315247, "step": 30480 }, { "epoch": 48.86217948717949, "grad_norm": 0.1864202916622162, "learning_rate": 9.486521174251084e-08, "loss": 0.015225236117839814, "step": 30490 }, { "epoch": 48.87820512820513, "grad_norm": 0.2155330926179886, "learning_rate": 9.22168348873953e-08, "loss": 0.015401580929756164, "step": 30500 }, { "epoch": 48.89423076923077, "grad_norm": 0.1633170247077942, "learning_rate": 8.960589405623387e-08, "loss": 0.014916175603866577, "step": 30510 }, { "epoch": 48.91025641025641, "grad_norm": 0.12435637414455414, "learning_rate": 8.703239251718232e-08, "loss": 0.013650523126125335, "step": 30520 }, { "epoch": 48.92628205128205, "grad_norm": 0.30057162046432495, "learning_rate": 8.449633349152386e-08, "loss": 0.01748047173023224, "step": 30530 }, { "epoch": 48.94230769230769, "grad_norm": 0.2039416879415512, "learning_rate": 8.199772015368589e-08, "loss": 0.015227577090263367, "step": 30540 }, { "epoch": 48.958333333333336, "grad_norm": 0.21043160557746887, "learning_rate": 7.953655563121997e-08, "loss": 0.014290571212768555, "step": 30550 }, { "epoch": 48.97435897435897, "grad_norm": 0.2100180834531784, "learning_rate": 7.711284300480515e-08, "loss": 0.014251884818077088, "step": 30560 }, { "epoch": 48.99038461538461, "grad_norm": 0.21715593338012695, "learning_rate": 7.472658530823462e-08, "loss": 0.014218038320541382, "step": 30570 }, { "epoch": 49.0, "eval_accuracy_background": NaN, "eval_accuracy_crop": 0.9944413095763941, "eval_iou_background": 0.0, "eval_iou_crop": 0.9944413095763941, "eval_loss": 0.02020249515771866, "eval_mean_accuracy": 0.9944413095763941, "eval_mean_iou": 0.49722065478819705, "eval_overall_accuracy": 0.9944413095763941, "eval_runtime": 35.2908, "eval_samples_per_second": 24.964, "eval_steps_per_second": 3.145, "step": 30576 }, { "epoch": 49.006410256410255, "grad_norm": 0.14238475263118744, "learning_rate": 7.23777855284291e-08, "loss": 0.01677718460559845, "step": 30580 }, { "epoch": 49.0224358974359, "grad_norm": 0.16817408800125122, "learning_rate": 7.006644660541017e-08, "loss": 0.01699436902999878, "step": 30590 }, { "epoch": 49.03846153846154, "grad_norm": 0.17423322796821594, "learning_rate": 6.77925714323202e-08, "loss": 0.014678655564785004, "step": 30600 }, { "epoch": 49.05448717948718, "grad_norm": 0.1805097758769989, "learning_rate": 6.555616285540578e-08, "loss": 0.014561456441879273, "step": 30610 }, { "epoch": 49.07051282051282, "grad_norm": 0.2081202119588852, "learning_rate": 6.335722367400432e-08, "loss": 0.01384183019399643, "step": 30620 }, { "epoch": 49.08653846153846, "grad_norm": 0.15623706579208374, "learning_rate": 6.119575664056742e-08, "loss": 0.015648487210273742, "step": 30630 }, { "epoch": 49.1025641025641, "grad_norm": 0.2110489159822464, "learning_rate": 5.907176446063756e-08, "loss": 0.014263100922107697, "step": 30640 }, { "epoch": 49.118589743589745, "grad_norm": 0.28423362970352173, "learning_rate": 5.698524979284137e-08, "loss": 0.015709523856639863, "step": 30650 }, { "epoch": 49.13461538461539, "grad_norm": 0.14654381573200226, "learning_rate": 5.493621524890635e-08, "loss": 0.013169506192207336, "step": 30660 }, { "epoch": 49.15064102564103, "grad_norm": 0.14621169865131378, "learning_rate": 5.292466339363755e-08, "loss": 0.013864442706108093, "step": 30670 }, { "epoch": 49.166666666666664, "grad_norm": 0.25173547863960266, "learning_rate": 5.095059674492419e-08, "loss": 0.014880277216434479, "step": 30680 }, { "epoch": 49.18269230769231, "grad_norm": 0.18250636756420135, "learning_rate": 4.90140177737397e-08, "loss": 0.01771833300590515, "step": 30690 }, { "epoch": 49.19871794871795, "grad_norm": 0.3039625585079193, "learning_rate": 4.711492890412505e-08, "loss": 0.016953884065151213, "step": 30700 }, { "epoch": 49.21474358974359, "grad_norm": 0.19342657923698425, "learning_rate": 4.525333251319874e-08, "loss": 0.015878045558929445, "step": 30710 }, { "epoch": 49.23076923076923, "grad_norm": 0.18217705190181732, "learning_rate": 4.3429230931150147e-08, "loss": 0.01639633923768997, "step": 30720 }, { "epoch": 49.24679487179487, "grad_norm": 0.18027818202972412, "learning_rate": 4.1642626441236176e-08, "loss": 0.015499140322208404, "step": 30730 }, { "epoch": 49.26282051282051, "grad_norm": 0.2628711462020874, "learning_rate": 3.9893521279771306e-08, "loss": 0.0162249356508255, "step": 30740 }, { "epoch": 49.27884615384615, "grad_norm": 0.243768110871315, "learning_rate": 3.8181917636144204e-08, "loss": 0.015266633033752442, "step": 30750 }, { "epoch": 49.294871794871796, "grad_norm": 0.1773437112569809, "learning_rate": 3.6507817652791097e-08, "loss": 0.01466292142868042, "step": 30760 }, { "epoch": 49.31089743589744, "grad_norm": 0.2148677408695221, "learning_rate": 3.4871223425205765e-08, "loss": 0.015212836861610412, "step": 30770 }, { "epoch": 49.32692307692308, "grad_norm": 0.18120285868644714, "learning_rate": 3.327213700194287e-08, "loss": 0.01541500985622406, "step": 30780 }, { "epoch": 49.342948717948715, "grad_norm": 0.18982188403606415, "learning_rate": 3.171056038459797e-08, "loss": 0.014369365572929383, "step": 30790 }, { "epoch": 49.35897435897436, "grad_norm": 0.25578516721725464, "learning_rate": 3.018649552782748e-08, "loss": 0.013744239509105683, "step": 30800 }, { "epoch": 49.375, "grad_norm": 0.19422441720962524, "learning_rate": 2.8699944339318772e-08, "loss": 0.01523786187171936, "step": 30810 }, { "epoch": 49.39102564102564, "grad_norm": 0.16022297739982605, "learning_rate": 2.725090867981339e-08, "loss": 0.013491976261138915, "step": 30820 }, { "epoch": 49.407051282051285, "grad_norm": 0.25612369179725647, "learning_rate": 2.5839390363093794e-08, "loss": 0.015447477996349334, "step": 30830 }, { "epoch": 49.42307692307692, "grad_norm": 0.281077116727829, "learning_rate": 2.4465391155973348e-08, "loss": 0.015871243178844453, "step": 30840 }, { "epoch": 49.43910256410256, "grad_norm": 0.28401511907577515, "learning_rate": 2.3128912778312972e-08, "loss": 0.014910534024238586, "step": 30850 }, { "epoch": 49.455128205128204, "grad_norm": 0.20785029232501984, "learning_rate": 2.1829956902997827e-08, "loss": 0.015724971890449524, "step": 30860 }, { "epoch": 49.47115384615385, "grad_norm": 0.21415190398693085, "learning_rate": 2.0568525155953977e-08, "loss": 0.015615622699260711, "step": 30870 }, { "epoch": 49.48717948717949, "grad_norm": 0.2392239272594452, "learning_rate": 1.9344619116135053e-08, "loss": 0.014797545969486237, "step": 30880 }, { "epoch": 49.50320512820513, "grad_norm": 0.19695653021335602, "learning_rate": 1.8158240315522268e-08, "loss": 0.014986464381217956, "step": 30890 }, { "epoch": 49.51923076923077, "grad_norm": 0.1637471318244934, "learning_rate": 1.700939023912107e-08, "loss": 0.01669236570596695, "step": 30900 }, { "epoch": 49.53525641025641, "grad_norm": 0.18263565003871918, "learning_rate": 1.5898070324967817e-08, "loss": 0.015411651134490967, "step": 30910 }, { "epoch": 49.55128205128205, "grad_norm": 0.18167492747306824, "learning_rate": 1.4824281964116448e-08, "loss": 0.01622420400381088, "step": 30920 }, { "epoch": 49.56730769230769, "grad_norm": 0.22437846660614014, "learning_rate": 1.3788026500645146e-08, "loss": 0.01716858595609665, "step": 30930 }, { "epoch": 49.583333333333336, "grad_norm": 0.3032570779323578, "learning_rate": 1.2789305231649673e-08, "loss": 0.017374490201473237, "step": 30940 }, { "epoch": 49.59935897435897, "grad_norm": 0.22446005046367645, "learning_rate": 1.1828119407246706e-08, "loss": 0.01592111587524414, "step": 30950 }, { "epoch": 49.61538461538461, "grad_norm": 0.16224347054958344, "learning_rate": 1.0904470230563846e-08, "loss": 0.015438228845596313, "step": 30960 }, { "epoch": 49.631410256410255, "grad_norm": 0.25943079590797424, "learning_rate": 1.0018358857746269e-08, "loss": 0.01580757200717926, "step": 30970 }, { "epoch": 49.6474358974359, "grad_norm": 0.2159086912870407, "learning_rate": 9.169786397956737e-09, "loss": 0.015580564737319946, "step": 30980 }, { "epoch": 49.66346153846154, "grad_norm": 0.11406087875366211, "learning_rate": 8.358753913365602e-09, "loss": 0.016402420401573182, "step": 30990 }, { "epoch": 49.67948717948718, "grad_norm": 0.1852705031633377, "learning_rate": 7.585262419150807e-09, "loss": 0.013904960453510284, "step": 31000 }, { "epoch": 49.69551282051282, "grad_norm": 0.23676978051662445, "learning_rate": 6.849312883507874e-09, "loss": 0.014888903498649598, "step": 31010 }, { "epoch": 49.71153846153846, "grad_norm": 0.26518747210502625, "learning_rate": 6.150906227633257e-09, "loss": 0.01523749977350235, "step": 31020 }, { "epoch": 49.7275641025641, "grad_norm": 0.2656959891319275, "learning_rate": 5.490043325734329e-09, "loss": 0.015963858366012572, "step": 31030 }, { "epoch": 49.743589743589745, "grad_norm": 0.2506769597530365, "learning_rate": 4.86672500501939e-09, "loss": 0.014689253270626068, "step": 31040 }, { "epoch": 49.75961538461539, "grad_norm": 0.1506599485874176, "learning_rate": 4.280952045710995e-09, "loss": 0.013626089692115784, "step": 31050 }, { "epoch": 49.77564102564103, "grad_norm": 0.20847705006599426, "learning_rate": 3.732725181029295e-09, "loss": 0.017631037533283232, "step": 31060 }, { "epoch": 49.791666666666664, "grad_norm": 0.20817498862743378, "learning_rate": 3.22204509719537e-09, "loss": 0.017304442822933197, "step": 31070 }, { "epoch": 49.80769230769231, "grad_norm": 0.1805124431848526, "learning_rate": 2.748912433434558e-09, "loss": 0.016351301968097687, "step": 31080 }, { "epoch": 49.82371794871795, "grad_norm": 0.26457956433296204, "learning_rate": 2.31332778197646e-09, "loss": 0.015876618027687073, "step": 31090 }, { "epoch": 49.83974358974359, "grad_norm": 0.24689678847789764, "learning_rate": 1.915291688048271e-09, "loss": 0.01468927413225174, "step": 31100 }, { "epoch": 49.85576923076923, "grad_norm": 0.3630339503288269, "learning_rate": 1.5548046498781165e-09, "loss": 0.01462317705154419, "step": 31110 }, { "epoch": 49.87179487179487, "grad_norm": 0.2253216654062271, "learning_rate": 1.2318671186950514e-09, "loss": 0.01607371270656586, "step": 31120 }, { "epoch": 49.88782051282051, "grad_norm": 0.22526100277900696, "learning_rate": 9.464794987190662e-10, "loss": 0.015546643733978271, "step": 31130 }, { "epoch": 49.90384615384615, "grad_norm": 0.319961816072464, "learning_rate": 6.986421471777416e-10, "loss": 0.014966805279254914, "step": 31140 }, { "epoch": 49.919871794871796, "grad_norm": 0.18173271417617798, "learning_rate": 4.883553742962566e-10, "loss": 0.014906413853168488, "step": 31150 }, { "epoch": 49.93589743589744, "grad_norm": 0.17242680490016937, "learning_rate": 3.15619443287396e-10, "loss": 0.015132717788219452, "step": 31160 }, { "epoch": 49.95192307692308, "grad_norm": 0.27377742528915405, "learning_rate": 1.8043457037153487e-10, "loss": 0.014666974544525146, "step": 31170 }, { "epoch": 49.967948717948715, "grad_norm": 0.16656553745269775, "learning_rate": 8.28009247599848e-11, "loss": 0.013900679349899293, "step": 31180 }, { "epoch": 49.98397435897436, "grad_norm": 0.32068225741386414, "learning_rate": 2.2718628661655416e-11, "loss": 0.015854406356811523, "step": 31190 }, { "epoch": 50.0, "grad_norm": 0.23049330711364746, "learning_rate": 1.8775728638509294e-13, "loss": 0.01387498527765274, "step": 31200 }, { "epoch": 50.0, "eval_accuracy_background": NaN, "eval_accuracy_crop": 0.9944053129595398, "eval_iou_background": 0.0, "eval_iou_crop": 0.9944053129595398, "eval_loss": 0.020112894475460052, "eval_mean_accuracy": 0.9944053129595398, "eval_mean_iou": 0.4972026564797699, "eval_overall_accuracy": 0.9944053129595398, "eval_runtime": 37.2964, "eval_samples_per_second": 23.622, "eval_steps_per_second": 2.976, "step": 31200 }, { "epoch": 50.0, "step": 31200, "total_flos": 4.3705957092950016e+18, "train_loss": 0.02810766745263185, "train_runtime": 4914.0636, "train_samples_per_second": 50.742, "train_steps_per_second": 6.349 } ], "logging_steps": 10, "max_steps": 31200, "num_input_tokens_seen": 0, "num_train_epochs": 50, "save_steps": 500, "stateful_callbacks": { "TrainerControl": { "args": { "should_epoch_stop": false, "should_evaluate": false, "should_log": false, "should_save": true, "should_training_stop": true }, "attributes": {} } }, "total_flos": 4.3705957092950016e+18, "train_batch_size": 8, "trial_name": null, "trial_params": null }