| { | |
| "best_global_step": 1600, | |
| "best_metric": 0.01271997, | |
| "best_model_checkpoint": "/home/serusr01/new/output/v48-20251225-153152/checkpoint-1600", | |
| "epoch": 1.0, | |
| "eval_steps": 100, | |
| "global_step": 2318, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "acc": 0.5208333333333334, | |
| "epoch": 0.0004314063848144953, | |
| "grad_norm": 144.0, | |
| "learning_rate": 5.999997244730563e-06, | |
| "loss": 1.0161079168319702, | |
| "step": 1 | |
| }, | |
| { | |
| "acc": 0.8512820512820513, | |
| "epoch": 0.0021570319240724763, | |
| "grad_norm": 12.3125, | |
| "learning_rate": 5.9999311185171385e-06, | |
| "loss": 0.40382882952690125, | |
| "step": 5 | |
| }, | |
| { | |
| "acc": 0.8622047244094488, | |
| "epoch": 0.004314063848144953, | |
| "grad_norm": 18.75, | |
| "learning_rate": 5.999724477231659e-06, | |
| "loss": 0.35493576526641846, | |
| "step": 10 | |
| }, | |
| { | |
| "acc": 0.8986083499005965, | |
| "epoch": 0.0064710957722174285, | |
| "grad_norm": 17.0, | |
| "learning_rate": 5.9993800856327355e-06, | |
| "loss": 0.29610111713409426, | |
| "step": 15 | |
| }, | |
| { | |
| "acc": 0.8972868217054264, | |
| "epoch": 0.008628127696289905, | |
| "grad_norm": 9.125, | |
| "learning_rate": 5.998897959535169e-06, | |
| "loss": 0.302515435218811, | |
| "step": 20 | |
| }, | |
| { | |
| "acc": 0.876953125, | |
| "epoch": 0.010785159620362382, | |
| "grad_norm": 14.9375, | |
| "learning_rate": 5.998278121078668e-06, | |
| "loss": 0.3312162160873413, | |
| "step": 25 | |
| }, | |
| { | |
| "acc": 0.8975903614457831, | |
| "epoch": 0.012942191544434857, | |
| "grad_norm": 13.375, | |
| "learning_rate": 5.997520598726825e-06, | |
| "loss": 0.30696473121643064, | |
| "step": 30 | |
| }, | |
| { | |
| "acc": 0.8742632612966601, | |
| "epoch": 0.015099223468507334, | |
| "grad_norm": 13.25, | |
| "learning_rate": 5.996625427265816e-06, | |
| "loss": 0.36511917114257814, | |
| "step": 35 | |
| }, | |
| { | |
| "acc": 0.8631790744466801, | |
| "epoch": 0.01725625539257981, | |
| "grad_norm": 9.3125, | |
| "learning_rate": 5.995592647802802e-06, | |
| "loss": 0.338372015953064, | |
| "step": 40 | |
| }, | |
| { | |
| "acc": 0.8964803312629399, | |
| "epoch": 0.019413287316652286, | |
| "grad_norm": 6.0, | |
| "learning_rate": 5.9944223077640325e-06, | |
| "loss": 0.32863070964813235, | |
| "step": 45 | |
| }, | |
| { | |
| "acc": 0.8957915831663327, | |
| "epoch": 0.021570319240724764, | |
| "grad_norm": 7.96875, | |
| "learning_rate": 5.993114460892682e-06, | |
| "loss": 0.27804901599884035, | |
| "step": 50 | |
| }, | |
| { | |
| "acc": 0.8773006134969326, | |
| "epoch": 0.02372735116479724, | |
| "grad_norm": 8.9375, | |
| "learning_rate": 5.9916691672463725e-06, | |
| "loss": 0.31299686431884766, | |
| "step": 55 | |
| }, | |
| { | |
| "acc": 0.9154929577464789, | |
| "epoch": 0.025884383088869714, | |
| "grad_norm": 9.1875, | |
| "learning_rate": 5.990086493194415e-06, | |
| "loss": 0.2637639045715332, | |
| "step": 60 | |
| }, | |
| { | |
| "acc": 0.8927125506072875, | |
| "epoch": 0.028041415012942193, | |
| "grad_norm": 9.5, | |
| "learning_rate": 5.988366511414766e-06, | |
| "loss": 0.30198609828948975, | |
| "step": 65 | |
| }, | |
| { | |
| "acc": 0.9156118143459916, | |
| "epoch": 0.030198446937014668, | |
| "grad_norm": 9.0625, | |
| "learning_rate": 5.986509300890691e-06, | |
| "loss": 0.24096782207489015, | |
| "step": 70 | |
| }, | |
| { | |
| "acc": 0.8798449612403101, | |
| "epoch": 0.032355478861087146, | |
| "grad_norm": 7.4375, | |
| "learning_rate": 5.984514946907133e-06, | |
| "loss": 0.2953530550003052, | |
| "step": 75 | |
| }, | |
| { | |
| "acc": 0.8818737270875764, | |
| "epoch": 0.03451251078515962, | |
| "grad_norm": 15.9375, | |
| "learning_rate": 5.982383541046798e-06, | |
| "loss": 0.3139867067337036, | |
| "step": 80 | |
| }, | |
| { | |
| "acc": 0.9300567107750473, | |
| "epoch": 0.036669542709232096, | |
| "grad_norm": 6.5, | |
| "learning_rate": 5.98011518118595e-06, | |
| "loss": 0.20404579639434814, | |
| "step": 85 | |
| }, | |
| { | |
| "acc": 0.9036885245901639, | |
| "epoch": 0.03882657463330457, | |
| "grad_norm": 9.75, | |
| "learning_rate": 5.977709971489917e-06, | |
| "loss": 0.2777871131896973, | |
| "step": 90 | |
| }, | |
| { | |
| "acc": 0.8767676767676768, | |
| "epoch": 0.040983606557377046, | |
| "grad_norm": 12.3125, | |
| "learning_rate": 5.975168022408304e-06, | |
| "loss": 0.31117370128631594, | |
| "step": 95 | |
| }, | |
| { | |
| "acc": 0.9202334630350194, | |
| "epoch": 0.04314063848144953, | |
| "grad_norm": 5.9375, | |
| "learning_rate": 5.972489450669929e-06, | |
| "loss": 0.23061840534210204, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.04314063848144953, | |
| "eval_acc": 0.9116072164099185, | |
| "eval_loss": 0.031446851789951324, | |
| "eval_mrr": 0.8872438524590165, | |
| "eval_ndcg": 0.9145527118078347, | |
| "eval_runtime": 92.9513, | |
| "eval_samples_per_second": 21.0, | |
| "eval_steps_per_second": 10.5, | |
| "step": 100 | |
| }, | |
| { | |
| "acc": 0.9188118811881189, | |
| "epoch": 0.045297670405522, | |
| "grad_norm": 10.6875, | |
| "learning_rate": 5.969674379277452e-06, | |
| "loss": 0.253974175453186, | |
| "step": 105 | |
| }, | |
| { | |
| "acc": 0.916030534351145, | |
| "epoch": 0.04745470232959448, | |
| "grad_norm": 8.5, | |
| "learning_rate": 5.9667229375017345e-06, | |
| "loss": 0.21623244285583496, | |
| "step": 110 | |
| }, | |
| { | |
| "acc": 0.9288461538461539, | |
| "epoch": 0.04961173425366695, | |
| "grad_norm": 9.4375, | |
| "learning_rate": 5.9636352608759e-06, | |
| "loss": 0.2198498487472534, | |
| "step": 115 | |
| }, | |
| { | |
| "acc": 0.8913480885311871, | |
| "epoch": 0.05176876617773943, | |
| "grad_norm": 10.1875, | |
| "learning_rate": 5.960411491189113e-06, | |
| "loss": 0.3081125497817993, | |
| "step": 120 | |
| }, | |
| { | |
| "acc": 0.8951612903225806, | |
| "epoch": 0.05392579810181191, | |
| "grad_norm": 10.125, | |
| "learning_rate": 5.957051776480063e-06, | |
| "loss": 0.2795823335647583, | |
| "step": 125 | |
| }, | |
| { | |
| "acc": 0.91, | |
| "epoch": 0.056082830025884385, | |
| "grad_norm": 10.375, | |
| "learning_rate": 5.953556271030172e-06, | |
| "loss": 0.24584596157073973, | |
| "step": 130 | |
| }, | |
| { | |
| "acc": 0.9195402298850575, | |
| "epoch": 0.05823986194995686, | |
| "grad_norm": 10.0, | |
| "learning_rate": 5.949925135356506e-06, | |
| "loss": 0.20374622344970703, | |
| "step": 135 | |
| }, | |
| { | |
| "acc": 0.9190283400809717, | |
| "epoch": 0.060396893874029335, | |
| "grad_norm": 13.25, | |
| "learning_rate": 5.946158536204403e-06, | |
| "loss": 0.2638064384460449, | |
| "step": 140 | |
| }, | |
| { | |
| "acc": 0.9001956947162426, | |
| "epoch": 0.06255392579810182, | |
| "grad_norm": 7.21875, | |
| "learning_rate": 5.942256646539821e-06, | |
| "loss": 0.2462130069732666, | |
| "step": 145 | |
| }, | |
| { | |
| "acc": 0.9201520912547528, | |
| "epoch": 0.06471095772217429, | |
| "grad_norm": 7.21875, | |
| "learning_rate": 5.9382196455413914e-06, | |
| "loss": 0.25420570373535156, | |
| "step": 150 | |
| }, | |
| { | |
| "acc": 0.9288702928870293, | |
| "epoch": 0.06686798964624677, | |
| "grad_norm": 10.1875, | |
| "learning_rate": 5.9340477185921895e-06, | |
| "loss": 0.2146221160888672, | |
| "step": 155 | |
| }, | |
| { | |
| "acc": 0.904397705544933, | |
| "epoch": 0.06902502157031924, | |
| "grad_norm": 27.875, | |
| "learning_rate": 5.9297410572712245e-06, | |
| "loss": 0.2619572877883911, | |
| "step": 160 | |
| }, | |
| { | |
| "acc": 0.903041825095057, | |
| "epoch": 0.07118205349439172, | |
| "grad_norm": 8.1875, | |
| "learning_rate": 5.925299859344643e-06, | |
| "loss": 0.2647380352020264, | |
| "step": 165 | |
| }, | |
| { | |
| "acc": 0.9238095238095239, | |
| "epoch": 0.07333908541846419, | |
| "grad_norm": 9.75, | |
| "learning_rate": 5.9207243287566444e-06, | |
| "loss": 0.2059628963470459, | |
| "step": 170 | |
| }, | |
| { | |
| "acc": 0.9080234833659491, | |
| "epoch": 0.07549611734253667, | |
| "grad_norm": 19.0, | |
| "learning_rate": 5.916014675620117e-06, | |
| "loss": 0.22357699871063233, | |
| "step": 175 | |
| }, | |
| { | |
| "acc": 0.9107883817427386, | |
| "epoch": 0.07765314926660914, | |
| "grad_norm": 12.125, | |
| "learning_rate": 5.911171116206986e-06, | |
| "loss": 0.23458666801452638, | |
| "step": 180 | |
| }, | |
| { | |
| "acc": 0.9092664092664092, | |
| "epoch": 0.07981018119068162, | |
| "grad_norm": 10.3125, | |
| "learning_rate": 5.9061938729382915e-06, | |
| "loss": 0.2300126552581787, | |
| "step": 185 | |
| }, | |
| { | |
| "acc": 0.924901185770751, | |
| "epoch": 0.08196721311475409, | |
| "grad_norm": 12.25, | |
| "learning_rate": 5.901083174373961e-06, | |
| "loss": 0.21468648910522461, | |
| "step": 190 | |
| }, | |
| { | |
| "acc": 0.9180327868852459, | |
| "epoch": 0.08412424503882658, | |
| "grad_norm": 9.375, | |
| "learning_rate": 5.895839255202328e-06, | |
| "loss": 0.2179854154586792, | |
| "step": 195 | |
| }, | |
| { | |
| "acc": 0.935871743486974, | |
| "epoch": 0.08628127696289906, | |
| "grad_norm": 11.9375, | |
| "learning_rate": 5.8904623562293435e-06, | |
| "loss": 0.22542862892150878, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.08628127696289906, | |
| "eval_acc": 0.9252821484471538, | |
| "eval_loss": 0.02712932601571083, | |
| "eval_mrr": 0.9001439305230289, | |
| "eval_ndcg": 0.9243608674894062, | |
| "eval_runtime": 85.9276, | |
| "eval_samples_per_second": 22.717, | |
| "eval_steps_per_second": 11.358, | |
| "step": 200 | |
| }, | |
| { | |
| "acc": 0.9358178053830227, | |
| "epoch": 0.08843830888697153, | |
| "grad_norm": 5.96875, | |
| "learning_rate": 5.884952724367524e-06, | |
| "loss": 0.20223827362060548, | |
| "step": 205 | |
| }, | |
| { | |
| "acc": 0.9375, | |
| "epoch": 0.090595340811044, | |
| "grad_norm": 7.1875, | |
| "learning_rate": 5.879310612624611e-06, | |
| "loss": 0.19913212060928345, | |
| "step": 210 | |
| }, | |
| { | |
| "acc": 0.9217557251908397, | |
| "epoch": 0.09275237273511648, | |
| "grad_norm": 18.5, | |
| "learning_rate": 5.873536280091955e-06, | |
| "loss": 0.21275293827056885, | |
| "step": 215 | |
| }, | |
| { | |
| "acc": 0.9144050104384134, | |
| "epoch": 0.09490940465918896, | |
| "grad_norm": 14.375, | |
| "learning_rate": 5.867629991932611e-06, | |
| "loss": 0.23634729385375977, | |
| "step": 220 | |
| }, | |
| { | |
| "acc": 0.9239766081871345, | |
| "epoch": 0.09706643658326143, | |
| "grad_norm": 13.125, | |
| "learning_rate": 5.861592019369172e-06, | |
| "loss": 0.2160120725631714, | |
| "step": 225 | |
| }, | |
| { | |
| "acc": 0.8967611336032388, | |
| "epoch": 0.0992234685073339, | |
| "grad_norm": 12.0625, | |
| "learning_rate": 5.855422639671309e-06, | |
| "loss": 0.25560975074768066, | |
| "step": 230 | |
| }, | |
| { | |
| "acc": 0.9405940594059405, | |
| "epoch": 0.10138050043140638, | |
| "grad_norm": 11.3125, | |
| "learning_rate": 5.849122136143034e-06, | |
| "loss": 0.20292723178863525, | |
| "step": 235 | |
| }, | |
| { | |
| "acc": 0.9289827255278311, | |
| "epoch": 0.10353753235547886, | |
| "grad_norm": 16.125, | |
| "learning_rate": 5.842690798109697e-06, | |
| "loss": 0.19165314435958863, | |
| "step": 240 | |
| }, | |
| { | |
| "acc": 0.9137931034482759, | |
| "epoch": 0.10569456427955133, | |
| "grad_norm": 14.375, | |
| "learning_rate": 5.8361289209047004e-06, | |
| "loss": 0.22526559829711915, | |
| "step": 245 | |
| }, | |
| { | |
| "acc": 0.9123505976095617, | |
| "epoch": 0.10785159620362382, | |
| "grad_norm": 11.375, | |
| "learning_rate": 5.829436805855932e-06, | |
| "loss": 0.1998949646949768, | |
| "step": 250 | |
| }, | |
| { | |
| "acc": 0.9298245614035088, | |
| "epoch": 0.1100086281276963, | |
| "grad_norm": 9.3125, | |
| "learning_rate": 5.82261476027193e-06, | |
| "loss": 0.211327862739563, | |
| "step": 255 | |
| }, | |
| { | |
| "acc": 0.9357429718875502, | |
| "epoch": 0.11216566005176877, | |
| "grad_norm": 23.375, | |
| "learning_rate": 5.8156630974277715e-06, | |
| "loss": 0.21703736782073973, | |
| "step": 260 | |
| }, | |
| { | |
| "acc": 0.9092783505154639, | |
| "epoch": 0.11432269197584125, | |
| "grad_norm": 17.875, | |
| "learning_rate": 5.808582136550686e-06, | |
| "loss": 0.2739748954772949, | |
| "step": 265 | |
| }, | |
| { | |
| "acc": 0.9468302658486708, | |
| "epoch": 0.11647972389991372, | |
| "grad_norm": 9.9375, | |
| "learning_rate": 5.8013722028053985e-06, | |
| "loss": 0.14410748481750488, | |
| "step": 270 | |
| }, | |
| { | |
| "acc": 0.9310344827586207, | |
| "epoch": 0.1186367558239862, | |
| "grad_norm": 8.5, | |
| "learning_rate": 5.794033627279193e-06, | |
| "loss": 0.1934072494506836, | |
| "step": 275 | |
| }, | |
| { | |
| "acc": 0.9155470249520153, | |
| "epoch": 0.12079378774805867, | |
| "grad_norm": 11.4375, | |
| "learning_rate": 5.786566746966714e-06, | |
| "loss": 0.20481424331665038, | |
| "step": 280 | |
| }, | |
| { | |
| "acc": 0.9216494845360824, | |
| "epoch": 0.12295081967213115, | |
| "grad_norm": 12.5, | |
| "learning_rate": 5.778971904754485e-06, | |
| "loss": 0.22041404247283936, | |
| "step": 285 | |
| }, | |
| { | |
| "acc": 0.917864476386037, | |
| "epoch": 0.12510785159620363, | |
| "grad_norm": 19.375, | |
| "learning_rate": 5.771249449405169e-06, | |
| "loss": 0.20896604061126708, | |
| "step": 290 | |
| }, | |
| { | |
| "acc": 0.950592885375494, | |
| "epoch": 0.1272648835202761, | |
| "grad_norm": 16.875, | |
| "learning_rate": 5.763399735541551e-06, | |
| "loss": 0.1629479169845581, | |
| "step": 295 | |
| }, | |
| { | |
| "acc": 0.9405737704918032, | |
| "epoch": 0.12942191544434858, | |
| "grad_norm": 13.25, | |
| "learning_rate": 5.755423123630251e-06, | |
| "loss": 0.19808459281921387, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.12942191544434858, | |
| "eval_acc": 0.9337671966389324, | |
| "eval_loss": 0.024512404575943947, | |
| "eval_mrr": 0.9131787909836067, | |
| "eval_ndcg": 0.9342680843294187, | |
| "eval_runtime": 79.6383, | |
| "eval_samples_per_second": 24.511, | |
| "eval_steps_per_second": 12.255, | |
| "step": 300 | |
| }, | |
| { | |
| "acc": 0.9308943089430894, | |
| "epoch": 0.13157894736842105, | |
| "grad_norm": 14.125, | |
| "learning_rate": 5.747319979965173e-06, | |
| "loss": 0.20357720851898192, | |
| "step": 305 | |
| }, | |
| { | |
| "acc": 0.9433962264150944, | |
| "epoch": 0.13373597929249353, | |
| "grad_norm": 14.6875, | |
| "learning_rate": 5.739090676650683e-06, | |
| "loss": 0.15039076805114746, | |
| "step": 310 | |
| }, | |
| { | |
| "acc": 0.9392712550607287, | |
| "epoch": 0.135893011216566, | |
| "grad_norm": 8.625, | |
| "learning_rate": 5.730735591584529e-06, | |
| "loss": 0.16305069923400878, | |
| "step": 315 | |
| }, | |
| { | |
| "acc": 0.9192307692307692, | |
| "epoch": 0.13805004314063848, | |
| "grad_norm": 12.75, | |
| "learning_rate": 5.722255108440474e-06, | |
| "loss": 0.2407283067703247, | |
| "step": 320 | |
| }, | |
| { | |
| "acc": 0.9298597194388778, | |
| "epoch": 0.14020707506471095, | |
| "grad_norm": 12.125, | |
| "learning_rate": 5.713649616650687e-06, | |
| "loss": 0.2122425317764282, | |
| "step": 325 | |
| }, | |
| { | |
| "acc": 0.9352226720647774, | |
| "epoch": 0.14236410698878343, | |
| "grad_norm": 11.125, | |
| "learning_rate": 5.7049195113878585e-06, | |
| "loss": 0.19069076776504518, | |
| "step": 330 | |
| }, | |
| { | |
| "acc": 0.9189765458422174, | |
| "epoch": 0.14452113891285592, | |
| "grad_norm": 17.75, | |
| "learning_rate": 5.696065193547054e-06, | |
| "loss": 0.22681286334991455, | |
| "step": 335 | |
| }, | |
| { | |
| "acc": 0.943579766536965, | |
| "epoch": 0.14667817083692838, | |
| "grad_norm": 14.8125, | |
| "learning_rate": 5.6870870697273e-06, | |
| "loss": 0.1799636721611023, | |
| "step": 340 | |
| }, | |
| { | |
| "acc": 0.9392712550607287, | |
| "epoch": 0.14883520276100087, | |
| "grad_norm": 8.0625, | |
| "learning_rate": 5.677985552212919e-06, | |
| "loss": 0.21220760345458983, | |
| "step": 345 | |
| }, | |
| { | |
| "acc": 0.9386138613861386, | |
| "epoch": 0.15099223468507333, | |
| "grad_norm": 15.0625, | |
| "learning_rate": 5.668761058954594e-06, | |
| "loss": 0.19215874671936034, | |
| "step": 350 | |
| }, | |
| { | |
| "acc": 0.9487704918032787, | |
| "epoch": 0.15314926660914582, | |
| "grad_norm": 9.75, | |
| "learning_rate": 5.659414013550172e-06, | |
| "loss": 0.17874677181243898, | |
| "step": 355 | |
| }, | |
| { | |
| "acc": 0.9449901768172888, | |
| "epoch": 0.15530629853321828, | |
| "grad_norm": 15.1875, | |
| "learning_rate": 5.649944845225219e-06, | |
| "loss": 0.17652757167816163, | |
| "step": 360 | |
| }, | |
| { | |
| "acc": 0.9281314168377823, | |
| "epoch": 0.15746333045729077, | |
| "grad_norm": 28.25, | |
| "learning_rate": 5.6403539888133056e-06, | |
| "loss": 0.2142080545425415, | |
| "step": 365 | |
| }, | |
| { | |
| "acc": 0.950920245398773, | |
| "epoch": 0.15962036238136323, | |
| "grad_norm": 12.0625, | |
| "learning_rate": 5.63064188473604e-06, | |
| "loss": 0.17437468767166137, | |
| "step": 370 | |
| }, | |
| { | |
| "acc": 0.9422680412371134, | |
| "epoch": 0.16177739430543572, | |
| "grad_norm": 19.75, | |
| "learning_rate": 5.6208089789828435e-06, | |
| "loss": 0.18122278451919555, | |
| "step": 375 | |
| }, | |
| { | |
| "acc": 0.9439071566731141, | |
| "epoch": 0.16393442622950818, | |
| "grad_norm": 13.8125, | |
| "learning_rate": 5.610855723090466e-06, | |
| "loss": 0.18091570138931273, | |
| "step": 380 | |
| }, | |
| { | |
| "acc": 0.923728813559322, | |
| "epoch": 0.16609145815358067, | |
| "grad_norm": 29.875, | |
| "learning_rate": 5.600782574122259e-06, | |
| "loss": 0.24442174434661865, | |
| "step": 385 | |
| }, | |
| { | |
| "acc": 0.9310344827586207, | |
| "epoch": 0.16824849007765316, | |
| "grad_norm": 16.75, | |
| "learning_rate": 5.590589994647182e-06, | |
| "loss": 0.20708949565887452, | |
| "step": 390 | |
| }, | |
| { | |
| "acc": 0.9395161290322581, | |
| "epoch": 0.17040552200172562, | |
| "grad_norm": 8.0625, | |
| "learning_rate": 5.58027845271856e-06, | |
| "loss": 0.21145825386047362, | |
| "step": 395 | |
| }, | |
| { | |
| "acc": 0.9618473895582329, | |
| "epoch": 0.1725625539257981, | |
| "grad_norm": 8.5, | |
| "learning_rate": 5.569848421852592e-06, | |
| "loss": 0.12078192234039306, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.1725625539257981, | |
| "eval_acc": 0.9410989373094983, | |
| "eval_loss": 0.02222530171275139, | |
| "eval_mrr": 0.9228056693989071, | |
| "eval_ndcg": 0.9416691570915218, | |
| "eval_runtime": 87.651, | |
| "eval_samples_per_second": 22.27, | |
| "eval_steps_per_second": 11.135, | |
| "step": 400 | |
| }, | |
| { | |
| "acc": 0.951310861423221, | |
| "epoch": 0.17471958584987057, | |
| "grad_norm": 9.25, | |
| "learning_rate": 5.559300381006607e-06, | |
| "loss": 0.1467392086982727, | |
| "step": 405 | |
| }, | |
| { | |
| "acc": 0.9362139917695473, | |
| "epoch": 0.17687661777394306, | |
| "grad_norm": 9.5625, | |
| "learning_rate": 5.548634814557066e-06, | |
| "loss": 0.16467268466949464, | |
| "step": 410 | |
| }, | |
| { | |
| "acc": 0.9474671669793621, | |
| "epoch": 0.17903364969801552, | |
| "grad_norm": 21.0, | |
| "learning_rate": 5.537852212277326e-06, | |
| "loss": 0.16810253858566285, | |
| "step": 415 | |
| }, | |
| { | |
| "acc": 0.9329388560157791, | |
| "epoch": 0.181190681622088, | |
| "grad_norm": 21.75, | |
| "learning_rate": 5.5269530693151425e-06, | |
| "loss": 0.15131351947784424, | |
| "step": 420 | |
| }, | |
| { | |
| "acc": 0.9265536723163842, | |
| "epoch": 0.18334771354616047, | |
| "grad_norm": 26.25, | |
| "learning_rate": 5.5159378861699356e-06, | |
| "loss": 0.2086423635482788, | |
| "step": 425 | |
| }, | |
| { | |
| "acc": 0.9393939393939394, | |
| "epoch": 0.18550474547023296, | |
| "grad_norm": 9.875, | |
| "learning_rate": 5.504807168669804e-06, | |
| "loss": 0.19425034523010254, | |
| "step": 430 | |
| }, | |
| { | |
| "acc": 0.9387351778656127, | |
| "epoch": 0.18766177739430542, | |
| "grad_norm": 40.0, | |
| "learning_rate": 5.4935614279482984e-06, | |
| "loss": 0.1988288640975952, | |
| "step": 435 | |
| }, | |
| { | |
| "acc": 0.9350912778904665, | |
| "epoch": 0.1898188093183779, | |
| "grad_norm": 29.125, | |
| "learning_rate": 5.482201180420952e-06, | |
| "loss": 0.1814996600151062, | |
| "step": 440 | |
| }, | |
| { | |
| "acc": 0.9528688524590164, | |
| "epoch": 0.1919758412424504, | |
| "grad_norm": 17.25, | |
| "learning_rate": 5.4707269477615584e-06, | |
| "loss": 0.1311618208885193, | |
| "step": 445 | |
| }, | |
| { | |
| "acc": 0.9548872180451128, | |
| "epoch": 0.19413287316652286, | |
| "grad_norm": 21.375, | |
| "learning_rate": 5.4591392568782275e-06, | |
| "loss": 0.15495063066482545, | |
| "step": 450 | |
| }, | |
| { | |
| "acc": 0.9633204633204633, | |
| "epoch": 0.19628990509059535, | |
| "grad_norm": 14.8125, | |
| "learning_rate": 5.447438639889178e-06, | |
| "loss": 0.12518519163131714, | |
| "step": 455 | |
| }, | |
| { | |
| "acc": 0.9411764705882353, | |
| "epoch": 0.1984469370146678, | |
| "grad_norm": 17.875, | |
| "learning_rate": 5.435625634098311e-06, | |
| "loss": 0.17223405838012695, | |
| "step": 460 | |
| }, | |
| { | |
| "acc": 0.9486166007905138, | |
| "epoch": 0.2006039689387403, | |
| "grad_norm": 19.0, | |
| "learning_rate": 5.423700781970527e-06, | |
| "loss": 0.14980016946792601, | |
| "step": 465 | |
| }, | |
| { | |
| "acc": 0.9354838709677419, | |
| "epoch": 0.20276100086281276, | |
| "grad_norm": 23.0, | |
| "learning_rate": 5.411664631106827e-06, | |
| "loss": 0.1699580192565918, | |
| "step": 470 | |
| }, | |
| { | |
| "acc": 0.9409368635437881, | |
| "epoch": 0.20491803278688525, | |
| "grad_norm": 11.6875, | |
| "learning_rate": 5.399517734219159e-06, | |
| "loss": 0.2162698268890381, | |
| "step": 475 | |
| }, | |
| { | |
| "acc": 0.9535353535353536, | |
| "epoch": 0.2070750647109577, | |
| "grad_norm": 16.625, | |
| "learning_rate": 5.387260649105032e-06, | |
| "loss": 0.15655677318572997, | |
| "step": 480 | |
| }, | |
| { | |
| "acc": 0.9541984732824428, | |
| "epoch": 0.2092320966350302, | |
| "grad_norm": 14.4375, | |
| "learning_rate": 5.374893938621913e-06, | |
| "loss": 0.15797934532165528, | |
| "step": 485 | |
| }, | |
| { | |
| "acc": 0.9343629343629344, | |
| "epoch": 0.21138912855910266, | |
| "grad_norm": 6.59375, | |
| "learning_rate": 5.362418170661375e-06, | |
| "loss": 0.2039581060409546, | |
| "step": 490 | |
| }, | |
| { | |
| "acc": 0.9559386973180076, | |
| "epoch": 0.21354616048317515, | |
| "grad_norm": 16.625, | |
| "learning_rate": 5.3498339181230125e-06, | |
| "loss": 0.1275785207748413, | |
| "step": 495 | |
| }, | |
| { | |
| "acc": 0.9521988527724665, | |
| "epoch": 0.21570319240724764, | |
| "grad_norm": 8.1875, | |
| "learning_rate": 5.3371417588881456e-06, | |
| "loss": 0.16547932624816894, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.21570319240724764, | |
| "eval_acc": 0.9440645852211879, | |
| "eval_loss": 0.0200980044901371, | |
| "eval_mrr": 0.9341749609679937, | |
| "eval_ndcg": 0.9502093836313069, | |
| "eval_runtime": 89.2845, | |
| "eval_samples_per_second": 21.863, | |
| "eval_steps_per_second": 10.931, | |
| "step": 500 | |
| }, | |
| { | |
| "acc": 0.9288617886178862, | |
| "epoch": 0.2178602243313201, | |
| "grad_norm": 17.0, | |
| "learning_rate": 5.324342275793272e-06, | |
| "loss": 0.22141244411468505, | |
| "step": 505 | |
| }, | |
| { | |
| "acc": 0.9502982107355865, | |
| "epoch": 0.2200172562553926, | |
| "grad_norm": 11.0, | |
| "learning_rate": 5.3114360566033085e-06, | |
| "loss": 0.13880524635314942, | |
| "step": 510 | |
| }, | |
| { | |
| "acc": 0.9284253578732107, | |
| "epoch": 0.22217428817946505, | |
| "grad_norm": 10.125, | |
| "learning_rate": 5.298423693984598e-06, | |
| "loss": 0.22597496509552, | |
| "step": 515 | |
| }, | |
| { | |
| "acc": 0.9553398058252427, | |
| "epoch": 0.22433132010353754, | |
| "grad_norm": 20.75, | |
| "learning_rate": 5.285305785477699e-06, | |
| "loss": 0.16823456287384034, | |
| "step": 520 | |
| }, | |
| { | |
| "acc": 0.9405940594059405, | |
| "epoch": 0.22648835202761, | |
| "grad_norm": 9.875, | |
| "learning_rate": 5.272082933469936e-06, | |
| "loss": 0.18091850280761718, | |
| "step": 525 | |
| }, | |
| { | |
| "acc": 0.9634888438133874, | |
| "epoch": 0.2286453839516825, | |
| "grad_norm": 9.6875, | |
| "learning_rate": 5.258755745167744e-06, | |
| "loss": 0.13490262031555175, | |
| "step": 530 | |
| }, | |
| { | |
| "acc": 0.9320754716981132, | |
| "epoch": 0.23080241587575495, | |
| "grad_norm": 18.625, | |
| "learning_rate": 5.245324832568787e-06, | |
| "loss": 0.19664840698242186, | |
| "step": 535 | |
| }, | |
| { | |
| "acc": 0.9470588235294117, | |
| "epoch": 0.23295944779982744, | |
| "grad_norm": 10.3125, | |
| "learning_rate": 5.2317908124338475e-06, | |
| "loss": 0.14916815757751464, | |
| "step": 540 | |
| }, | |
| { | |
| "acc": 0.9350912778904665, | |
| "epoch": 0.2351164797238999, | |
| "grad_norm": 12.0625, | |
| "learning_rate": 5.21815430625851e-06, | |
| "loss": 0.1888782262802124, | |
| "step": 545 | |
| }, | |
| { | |
| "acc": 0.9365079365079365, | |
| "epoch": 0.2372735116479724, | |
| "grad_norm": 14.6875, | |
| "learning_rate": 5.204415940244618e-06, | |
| "loss": 0.18109892606735228, | |
| "step": 550 | |
| }, | |
| { | |
| "acc": 0.9479768786127167, | |
| "epoch": 0.23943054357204488, | |
| "grad_norm": 35.0, | |
| "learning_rate": 5.1905763452715215e-06, | |
| "loss": 0.150074303150177, | |
| "step": 555 | |
| }, | |
| { | |
| "acc": 0.9408284023668639, | |
| "epoch": 0.24158757549611734, | |
| "grad_norm": 10.0625, | |
| "learning_rate": 5.176636156867102e-06, | |
| "loss": 0.13501374721527098, | |
| "step": 560 | |
| }, | |
| { | |
| "acc": 0.9490196078431372, | |
| "epoch": 0.24374460742018983, | |
| "grad_norm": 11.5625, | |
| "learning_rate": 5.162596015178593e-06, | |
| "loss": 0.14429715871810914, | |
| "step": 565 | |
| }, | |
| { | |
| "acc": 0.961038961038961, | |
| "epoch": 0.2459016393442623, | |
| "grad_norm": 4.53125, | |
| "learning_rate": 5.14845656494318e-06, | |
| "loss": 0.10108070373535157, | |
| "step": 570 | |
| }, | |
| { | |
| "acc": 0.9427402862985685, | |
| "epoch": 0.24805867126833478, | |
| "grad_norm": 13.5, | |
| "learning_rate": 5.134218455458396e-06, | |
| "loss": 0.16440529823303224, | |
| "step": 575 | |
| }, | |
| { | |
| "acc": 0.9512670565302144, | |
| "epoch": 0.25021570319240727, | |
| "grad_norm": 23.0, | |
| "learning_rate": 5.119882340552303e-06, | |
| "loss": 0.15792548656463623, | |
| "step": 580 | |
| }, | |
| { | |
| "acc": 0.9310344827586207, | |
| "epoch": 0.25237273511647973, | |
| "grad_norm": 20.375, | |
| "learning_rate": 5.105448878553472e-06, | |
| "loss": 0.17942277193069459, | |
| "step": 585 | |
| }, | |
| { | |
| "acc": 0.9586614173228346, | |
| "epoch": 0.2545297670405522, | |
| "grad_norm": 6.21875, | |
| "learning_rate": 5.0909187322607434e-06, | |
| "loss": 0.1267695426940918, | |
| "step": 590 | |
| }, | |
| { | |
| "acc": 0.9421157684630739, | |
| "epoch": 0.25668679896462465, | |
| "grad_norm": 10.8125, | |
| "learning_rate": 5.076292568912801e-06, | |
| "loss": 0.17552590370178223, | |
| "step": 595 | |
| }, | |
| { | |
| "acc": 0.9418837675350702, | |
| "epoch": 0.25884383088869717, | |
| "grad_norm": 15.5625, | |
| "learning_rate": 5.061571060157525e-06, | |
| "loss": 0.1582822322845459, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.25884383088869717, | |
| "eval_acc": 0.9523848751956504, | |
| "eval_loss": 0.01805831491947174, | |
| "eval_mrr": 0.9341017759562842, | |
| "eval_ndcg": 0.9502097307474537, | |
| "eval_runtime": 80.1164, | |
| "eval_samples_per_second": 24.365, | |
| "eval_steps_per_second": 12.182, | |
| "step": 600 | |
| }, | |
| { | |
| "acc": 0.9516806722689075, | |
| "epoch": 0.26100086281276963, | |
| "grad_norm": 9.5625, | |
| "learning_rate": 5.04675488202115e-06, | |
| "loss": 0.1585369348526001, | |
| "step": 605 | |
| }, | |
| { | |
| "acc": 0.967479674796748, | |
| "epoch": 0.2631578947368421, | |
| "grad_norm": 14.375, | |
| "learning_rate": 5.031844714877224e-06, | |
| "loss": 0.14685235023498536, | |
| "step": 610 | |
| }, | |
| { | |
| "acc": 0.9669902912621359, | |
| "epoch": 0.2653149266609146, | |
| "grad_norm": 11.875, | |
| "learning_rate": 5.016841243415359e-06, | |
| "loss": 0.09848676323890686, | |
| "step": 615 | |
| }, | |
| { | |
| "acc": 0.9692622950819673, | |
| "epoch": 0.26747195858498707, | |
| "grad_norm": 11.875, | |
| "learning_rate": 5.001745156609801e-06, | |
| "loss": 0.09488856792449951, | |
| "step": 620 | |
| }, | |
| { | |
| "acc": 0.962, | |
| "epoch": 0.26962899050905953, | |
| "grad_norm": 8.625, | |
| "learning_rate": 4.9865571476877775e-06, | |
| "loss": 0.10182394981384277, | |
| "step": 625 | |
| }, | |
| { | |
| "acc": 0.9538152610441767, | |
| "epoch": 0.271786022433132, | |
| "grad_norm": 13.0, | |
| "learning_rate": 4.9712779140976725e-06, | |
| "loss": 0.14965349435806274, | |
| "step": 630 | |
| }, | |
| { | |
| "acc": 0.9563567362428842, | |
| "epoch": 0.2739430543572045, | |
| "grad_norm": 15.5625, | |
| "learning_rate": 4.9559081574769965e-06, | |
| "loss": 0.14634023904800414, | |
| "step": 635 | |
| }, | |
| { | |
| "acc": 0.9402390438247012, | |
| "epoch": 0.27610008628127697, | |
| "grad_norm": 19.875, | |
| "learning_rate": 4.9404485836201695e-06, | |
| "loss": 0.15552909374237062, | |
| "step": 640 | |
| }, | |
| { | |
| "acc": 0.9487666034155597, | |
| "epoch": 0.27825711820534943, | |
| "grad_norm": 38.25, | |
| "learning_rate": 4.924899902446105e-06, | |
| "loss": 0.16740797758102416, | |
| "step": 645 | |
| }, | |
| { | |
| "acc": 0.9620253164556962, | |
| "epoch": 0.2804141501294219, | |
| "grad_norm": 8.9375, | |
| "learning_rate": 4.909262827965613e-06, | |
| "loss": 0.13096400499343872, | |
| "step": 650 | |
| }, | |
| { | |
| "acc": 0.9373737373737374, | |
| "epoch": 0.2825711820534944, | |
| "grad_norm": 23.375, | |
| "learning_rate": 4.893538078248613e-06, | |
| "loss": 0.19330756664276122, | |
| "step": 655 | |
| }, | |
| { | |
| "acc": 0.9327902240325866, | |
| "epoch": 0.28472821397756687, | |
| "grad_norm": 31.25, | |
| "learning_rate": 4.877726375391156e-06, | |
| "loss": 0.17176212072372438, | |
| "step": 660 | |
| }, | |
| { | |
| "acc": 0.9481037924151696, | |
| "epoch": 0.28688524590163933, | |
| "grad_norm": 17.125, | |
| "learning_rate": 4.861828445482269e-06, | |
| "loss": 0.15666646957397462, | |
| "step": 665 | |
| }, | |
| { | |
| "acc": 0.9557344064386318, | |
| "epoch": 0.28904227782571185, | |
| "grad_norm": 15.875, | |
| "learning_rate": 4.8458450185706095e-06, | |
| "loss": 0.12006416320800781, | |
| "step": 670 | |
| }, | |
| { | |
| "acc": 0.9530612244897959, | |
| "epoch": 0.2911993097497843, | |
| "grad_norm": 13.375, | |
| "learning_rate": 4.829776828630942e-06, | |
| "loss": 0.12216727733612061, | |
| "step": 675 | |
| }, | |
| { | |
| "acc": 0.9454545454545454, | |
| "epoch": 0.29335634167385677, | |
| "grad_norm": 12.8125, | |
| "learning_rate": 4.813624613530434e-06, | |
| "loss": 0.1579872488975525, | |
| "step": 680 | |
| }, | |
| { | |
| "acc": 0.94831013916501, | |
| "epoch": 0.29551337359792923, | |
| "grad_norm": 10.75, | |
| "learning_rate": 4.79738911499477e-06, | |
| "loss": 0.14849933385848998, | |
| "step": 685 | |
| }, | |
| { | |
| "acc": 0.958904109589041, | |
| "epoch": 0.29767040552200175, | |
| "grad_norm": 9.125, | |
| "learning_rate": 4.781071078574092e-06, | |
| "loss": 0.12549154758453368, | |
| "step": 690 | |
| }, | |
| { | |
| "acc": 0.943359375, | |
| "epoch": 0.2998274374460742, | |
| "grad_norm": 19.25, | |
| "learning_rate": 4.764671253608765e-06, | |
| "loss": 0.11965863704681397, | |
| "step": 695 | |
| }, | |
| { | |
| "acc": 0.9474747474747475, | |
| "epoch": 0.30198446937014667, | |
| "grad_norm": 24.5, | |
| "learning_rate": 4.748190393194964e-06, | |
| "loss": 0.15644127130508423, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.30198446937014667, | |
| "eval_acc": 0.9549386275640498, | |
| "eval_loss": 0.017001556232571602, | |
| "eval_mrr": 0.9429937548790007, | |
| "eval_ndcg": 0.9569733975457528, | |
| "eval_runtime": 87.9286, | |
| "eval_samples_per_second": 22.2, | |
| "eval_steps_per_second": 11.1, | |
| "step": 700 | |
| }, | |
| { | |
| "acc": 0.9433962264150944, | |
| "epoch": 0.30414150129421913, | |
| "grad_norm": 14.875, | |
| "learning_rate": 4.731629254150091e-06, | |
| "loss": 0.1656881332397461, | |
| "step": 705 | |
| }, | |
| { | |
| "acc": 0.9484536082474226, | |
| "epoch": 0.30629853321829165, | |
| "grad_norm": 16.875, | |
| "learning_rate": 4.714988596978023e-06, | |
| "loss": 0.16551544666290283, | |
| "step": 710 | |
| }, | |
| { | |
| "acc": 0.9601593625498008, | |
| "epoch": 0.3084555651423641, | |
| "grad_norm": 13.0625, | |
| "learning_rate": 4.698269185834188e-06, | |
| "loss": 0.13289109468460084, | |
| "step": 715 | |
| }, | |
| { | |
| "acc": 0.9621928166351607, | |
| "epoch": 0.31061259706643657, | |
| "grad_norm": 6.625, | |
| "learning_rate": 4.681471788490473e-06, | |
| "loss": 0.13107165098190307, | |
| "step": 720 | |
| }, | |
| { | |
| "acc": 0.9474708171206225, | |
| "epoch": 0.3127696289905091, | |
| "grad_norm": 17.125, | |
| "learning_rate": 4.664597176299972e-06, | |
| "loss": 0.15164241790771485, | |
| "step": 725 | |
| }, | |
| { | |
| "acc": 0.9386138613861386, | |
| "epoch": 0.31492666091458155, | |
| "grad_norm": 13.25, | |
| "learning_rate": 4.647646124161557e-06, | |
| "loss": 0.14998191595077515, | |
| "step": 730 | |
| }, | |
| { | |
| "acc": 0.9588014981273408, | |
| "epoch": 0.317083692838654, | |
| "grad_norm": 17.0, | |
| "learning_rate": 4.6306194104843005e-06, | |
| "loss": 0.1452179431915283, | |
| "step": 735 | |
| }, | |
| { | |
| "acc": 0.9697542533081286, | |
| "epoch": 0.31924072476272647, | |
| "grad_norm": 21.75, | |
| "learning_rate": 4.613517817151725e-06, | |
| "loss": 0.0863348662853241, | |
| "step": 740 | |
| }, | |
| { | |
| "acc": 0.9271653543307087, | |
| "epoch": 0.321397756686799, | |
| "grad_norm": 22.25, | |
| "learning_rate": 4.596342129485904e-06, | |
| "loss": 0.2219313383102417, | |
| "step": 745 | |
| }, | |
| { | |
| "acc": 0.948559670781893, | |
| "epoch": 0.32355478861087145, | |
| "grad_norm": 17.25, | |
| "learning_rate": 4.579093136211394e-06, | |
| "loss": 0.14628000259399415, | |
| "step": 750 | |
| }, | |
| { | |
| "acc": 0.9550321199143469, | |
| "epoch": 0.3257118205349439, | |
| "grad_norm": 19.375, | |
| "learning_rate": 4.561771629419018e-06, | |
| "loss": 0.14832402467727662, | |
| "step": 755 | |
| }, | |
| { | |
| "acc": 0.9540918163672655, | |
| "epoch": 0.32786885245901637, | |
| "grad_norm": 24.875, | |
| "learning_rate": 4.544378404529493e-06, | |
| "loss": 0.16552494764328002, | |
| "step": 760 | |
| }, | |
| { | |
| "acc": 0.9468302658486708, | |
| "epoch": 0.3300258843830889, | |
| "grad_norm": 11.75, | |
| "learning_rate": 4.526914260256897e-06, | |
| "loss": 0.18186469078063966, | |
| "step": 765 | |
| }, | |
| { | |
| "acc": 0.9342915811088296, | |
| "epoch": 0.33218291630716135, | |
| "grad_norm": 29.0, | |
| "learning_rate": 4.509379998572003e-06, | |
| "loss": 0.189409601688385, | |
| "step": 770 | |
| }, | |
| { | |
| "acc": 0.9652509652509652, | |
| "epoch": 0.3343399482312338, | |
| "grad_norm": 8.125, | |
| "learning_rate": 4.491776424665441e-06, | |
| "loss": 0.11067872047424317, | |
| "step": 775 | |
| }, | |
| { | |
| "acc": 0.9634408602150538, | |
| "epoch": 0.3364969801553063, | |
| "grad_norm": 19.125, | |
| "learning_rate": 4.474104346910724e-06, | |
| "loss": 0.12605880498886107, | |
| "step": 780 | |
| }, | |
| { | |
| "acc": 0.9620493358633776, | |
| "epoch": 0.3386540120793788, | |
| "grad_norm": 16.625, | |
| "learning_rate": 4.4563645768271375e-06, | |
| "loss": 0.1279581904411316, | |
| "step": 785 | |
| }, | |
| { | |
| "acc": 0.9608610567514677, | |
| "epoch": 0.34081104400345125, | |
| "grad_norm": 26.125, | |
| "learning_rate": 4.438557929042457e-06, | |
| "loss": 0.13559631109237671, | |
| "step": 790 | |
| }, | |
| { | |
| "acc": 0.963265306122449, | |
| "epoch": 0.3429680759275237, | |
| "grad_norm": 16.75, | |
| "learning_rate": 4.4206852212555544e-06, | |
| "loss": 0.11860382556915283, | |
| "step": 795 | |
| }, | |
| { | |
| "acc": 0.9424184261036468, | |
| "epoch": 0.3451251078515962, | |
| "grad_norm": 27.25, | |
| "learning_rate": 4.402747274198838e-06, | |
| "loss": 0.1454222559928894, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.3451251078515962, | |
| "eval_acc": 0.9581514128017135, | |
| "eval_loss": 0.01614278368651867, | |
| "eval_mrr": 0.9435499609679938, | |
| "eval_ndcg": 0.9573436789903933, | |
| "eval_runtime": 106.3602, | |
| "eval_samples_per_second": 18.353, | |
| "eval_steps_per_second": 9.176, | |
| "step": 800 | |
| }, | |
| { | |
| "acc": 0.9533468559837728, | |
| "epoch": 0.3472821397756687, | |
| "grad_norm": 23.125, | |
| "learning_rate": 4.384744911600571e-06, | |
| "loss": 0.15178160667419432, | |
| "step": 805 | |
| }, | |
| { | |
| "acc": 0.9653767820773931, | |
| "epoch": 0.34943917169974115, | |
| "grad_norm": 16.625, | |
| "learning_rate": 4.36667896014704e-06, | |
| "loss": 0.12023615837097168, | |
| "step": 810 | |
| }, | |
| { | |
| "acc": 0.9449715370018975, | |
| "epoch": 0.3515962036238136, | |
| "grad_norm": 15.0625, | |
| "learning_rate": 4.3485502494445945e-06, | |
| "loss": 0.1382935166358948, | |
| "step": 815 | |
| }, | |
| { | |
| "acc": 0.9700598802395209, | |
| "epoch": 0.3537532355478861, | |
| "grad_norm": 4.75, | |
| "learning_rate": 4.330359611981552e-06, | |
| "loss": 0.09947049617767334, | |
| "step": 820 | |
| }, | |
| { | |
| "acc": 0.9504132231404959, | |
| "epoch": 0.3559102674719586, | |
| "grad_norm": 11.4375, | |
| "learning_rate": 4.312107883089969e-06, | |
| "loss": 0.15309990644454957, | |
| "step": 825 | |
| }, | |
| { | |
| "acc": 0.9606625258799172, | |
| "epoch": 0.35806729939603105, | |
| "grad_norm": 13.375, | |
| "learning_rate": 4.293795900907278e-06, | |
| "loss": 0.1290311336517334, | |
| "step": 830 | |
| }, | |
| { | |
| "acc": 0.9569471624266145, | |
| "epoch": 0.36022433132010356, | |
| "grad_norm": 13.875, | |
| "learning_rate": 4.275424506337804e-06, | |
| "loss": 0.1386420726776123, | |
| "step": 835 | |
| }, | |
| { | |
| "acc": 0.9567779960707269, | |
| "epoch": 0.362381363244176, | |
| "grad_norm": 15.0, | |
| "learning_rate": 4.256994543014147e-06, | |
| "loss": 0.13683913946151732, | |
| "step": 840 | |
| }, | |
| { | |
| "acc": 0.978515625, | |
| "epoch": 0.3645383951682485, | |
| "grad_norm": 10.875, | |
| "learning_rate": 4.2385068572584416e-06, | |
| "loss": 0.07557401657104493, | |
| "step": 845 | |
| }, | |
| { | |
| "acc": 0.9506903353057199, | |
| "epoch": 0.36669542709232095, | |
| "grad_norm": 21.875, | |
| "learning_rate": 4.2199622980434965e-06, | |
| "loss": 0.15320024490356446, | |
| "step": 850 | |
| }, | |
| { | |
| "acc": 0.9474708171206225, | |
| "epoch": 0.36885245901639346, | |
| "grad_norm": 30.875, | |
| "learning_rate": 4.2013617169537995e-06, | |
| "loss": 0.15146974325180054, | |
| "step": 855 | |
| }, | |
| { | |
| "acc": 0.936127744510978, | |
| "epoch": 0.3710094909404659, | |
| "grad_norm": 13.9375, | |
| "learning_rate": 4.182705968146426e-06, | |
| "loss": 0.18005824089050293, | |
| "step": 860 | |
| }, | |
| { | |
| "acc": 0.9661354581673307, | |
| "epoch": 0.3731665228645384, | |
| "grad_norm": 11.0625, | |
| "learning_rate": 4.163995908311802e-06, | |
| "loss": 0.10441750288009644, | |
| "step": 865 | |
| }, | |
| { | |
| "acc": 0.9671179883945842, | |
| "epoch": 0.37532355478861085, | |
| "grad_norm": 11.625, | |
| "learning_rate": 4.145232396634372e-06, | |
| "loss": 0.11835912466049195, | |
| "step": 870 | |
| }, | |
| { | |
| "acc": 0.9686888454011742, | |
| "epoch": 0.37748058671268336, | |
| "grad_norm": 8.1875, | |
| "learning_rate": 4.126416294753141e-06, | |
| "loss": 0.08334695696830749, | |
| "step": 875 | |
| }, | |
| { | |
| "acc": 0.9533898305084746, | |
| "epoch": 0.3796376186367558, | |
| "grad_norm": 15.0625, | |
| "learning_rate": 4.1075484667221095e-06, | |
| "loss": 0.1570521354675293, | |
| "step": 880 | |
| }, | |
| { | |
| "acc": 0.9628865979381444, | |
| "epoch": 0.3817946505608283, | |
| "grad_norm": 11.875, | |
| "learning_rate": 4.088629778970591e-06, | |
| "loss": 0.10125092267990113, | |
| "step": 885 | |
| }, | |
| { | |
| "acc": 0.9517102615694165, | |
| "epoch": 0.3839516824849008, | |
| "grad_norm": 10.3125, | |
| "learning_rate": 4.06966110026343e-06, | |
| "loss": 0.15835769176483155, | |
| "step": 890 | |
| }, | |
| { | |
| "acc": 0.9445506692160612, | |
| "epoch": 0.38610871440897326, | |
| "grad_norm": 51.0, | |
| "learning_rate": 4.050643301661107e-06, | |
| "loss": 0.17581632137298583, | |
| "step": 895 | |
| }, | |
| { | |
| "acc": 0.9544554455445544, | |
| "epoch": 0.3882657463330457, | |
| "grad_norm": 17.75, | |
| "learning_rate": 4.0315772564797325e-06, | |
| "loss": 0.16853252649307252, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.3882657463330457, | |
| "eval_acc": 0.9614465771480353, | |
| "eval_loss": 0.015336191281676292, | |
| "eval_mrr": 0.9436902322404371, | |
| "eval_ndcg": 0.9575083647474146, | |
| "eval_runtime": 79.2201, | |
| "eval_samples_per_second": 24.64, | |
| "eval_steps_per_second": 12.32, | |
| "step": 900 | |
| }, | |
| { | |
| "acc": 0.9625984251968503, | |
| "epoch": 0.3904227782571182, | |
| "grad_norm": 9.5625, | |
| "learning_rate": 4.012463840250949e-06, | |
| "loss": 0.11725049018859864, | |
| "step": 905 | |
| }, | |
| { | |
| "acc": 0.9578313253012049, | |
| "epoch": 0.3925798101811907, | |
| "grad_norm": 26.125, | |
| "learning_rate": 3.993303930681726e-06, | |
| "loss": 0.11894035339355469, | |
| "step": 910 | |
| }, | |
| { | |
| "acc": 0.952, | |
| "epoch": 0.39473684210526316, | |
| "grad_norm": 26.375, | |
| "learning_rate": 3.974098407614051e-06, | |
| "loss": 0.14351377487182618, | |
| "step": 915 | |
| }, | |
| { | |
| "acc": 0.9588477366255144, | |
| "epoch": 0.3968938740293356, | |
| "grad_norm": 33.75, | |
| "learning_rate": 3.954848152984529e-06, | |
| "loss": 0.12398861646652222, | |
| "step": 920 | |
| }, | |
| { | |
| "acc": 0.9679358717434869, | |
| "epoch": 0.3990509059534081, | |
| "grad_norm": 10.4375, | |
| "learning_rate": 3.935554050783885e-06, | |
| "loss": 0.10695126056671142, | |
| "step": 925 | |
| }, | |
| { | |
| "acc": 0.9646365422396856, | |
| "epoch": 0.4012079378774806, | |
| "grad_norm": 9.5, | |
| "learning_rate": 3.916216987016363e-06, | |
| "loss": 0.11947808265686036, | |
| "step": 930 | |
| }, | |
| { | |
| "acc": 0.9588235294117647, | |
| "epoch": 0.40336496980155306, | |
| "grad_norm": 19.375, | |
| "learning_rate": 3.8968378496590485e-06, | |
| "loss": 0.12318050861358643, | |
| "step": 935 | |
| }, | |
| { | |
| "acc": 0.9699812382739212, | |
| "epoch": 0.4055220017256255, | |
| "grad_norm": 9.5625, | |
| "learning_rate": 3.877417528621087e-06, | |
| "loss": 0.08819143176078796, | |
| "step": 940 | |
| }, | |
| { | |
| "acc": 0.9529411764705882, | |
| "epoch": 0.40767903364969804, | |
| "grad_norm": 25.0, | |
| "learning_rate": 3.8579569157028195e-06, | |
| "loss": 0.1318346619606018, | |
| "step": 945 | |
| }, | |
| { | |
| "acc": 0.9395085066162571, | |
| "epoch": 0.4098360655737705, | |
| "grad_norm": 14.875, | |
| "learning_rate": 3.838456904554829e-06, | |
| "loss": 0.14923615455627443, | |
| "step": 950 | |
| }, | |
| { | |
| "acc": 0.9635974304068522, | |
| "epoch": 0.41199309749784296, | |
| "grad_norm": 11.25, | |
| "learning_rate": 3.8189183906369035e-06, | |
| "loss": 0.10555909872055054, | |
| "step": 955 | |
| }, | |
| { | |
| "acc": 0.9612244897959183, | |
| "epoch": 0.4141501294219154, | |
| "grad_norm": 27.0, | |
| "learning_rate": 3.799342271176918e-06, | |
| "loss": 0.11741663217544555, | |
| "step": 960 | |
| }, | |
| { | |
| "acc": 0.9591397849462365, | |
| "epoch": 0.41630716134598794, | |
| "grad_norm": 11.875, | |
| "learning_rate": 3.7797294451296307e-06, | |
| "loss": 0.1104978084564209, | |
| "step": 965 | |
| }, | |
| { | |
| "acc": 0.9538461538461539, | |
| "epoch": 0.4184641932700604, | |
| "grad_norm": 32.0, | |
| "learning_rate": 3.7600808131354033e-06, | |
| "loss": 0.12546448707580565, | |
| "step": 970 | |
| }, | |
| { | |
| "acc": 0.9642147117296223, | |
| "epoch": 0.42062122519413286, | |
| "grad_norm": 17.375, | |
| "learning_rate": 3.740397277478841e-06, | |
| "loss": 0.15062413215637208, | |
| "step": 975 | |
| }, | |
| { | |
| "acc": 0.9447731755424064, | |
| "epoch": 0.4227782571182053, | |
| "grad_norm": 20.25, | |
| "learning_rate": 3.720679742047358e-06, | |
| "loss": 0.19598883390426636, | |
| "step": 980 | |
| }, | |
| { | |
| "acc": 0.9592668024439919, | |
| "epoch": 0.42493528904227784, | |
| "grad_norm": 8.4375, | |
| "learning_rate": 3.700929112289674e-06, | |
| "loss": 0.11623775959014893, | |
| "step": 985 | |
| }, | |
| { | |
| "acc": 0.9693486590038314, | |
| "epoch": 0.4270923209663503, | |
| "grad_norm": 17.625, | |
| "learning_rate": 3.681146295174234e-06, | |
| "loss": 0.1038408637046814, | |
| "step": 990 | |
| }, | |
| { | |
| "acc": 0.9513184584178499, | |
| "epoch": 0.42924935289042276, | |
| "grad_norm": 6.40625, | |
| "learning_rate": 3.6613321991475553e-06, | |
| "loss": 0.16103934049606322, | |
| "step": 995 | |
| }, | |
| { | |
| "acc": 0.9710982658959537, | |
| "epoch": 0.4314063848144953, | |
| "grad_norm": 35.5, | |
| "learning_rate": 3.6414877340925163e-06, | |
| "loss": 0.1310266375541687, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.4314063848144953, | |
| "eval_acc": 0.9601285114095065, | |
| "eval_loss": 0.01538186427205801, | |
| "eval_mrr": 0.9503037177985949, | |
| "eval_ndcg": 0.9625558802620603, | |
| "eval_runtime": 84.007, | |
| "eval_samples_per_second": 23.236, | |
| "eval_steps_per_second": 11.618, | |
| "step": 1000 | |
| }, | |
| { | |
| "acc": 0.9592668024439919, | |
| "epoch": 0.43356341673856774, | |
| "grad_norm": 14.5625, | |
| "learning_rate": 3.6216138112865695e-06, | |
| "loss": 0.11078298091888428, | |
| "step": 1005 | |
| }, | |
| { | |
| "acc": 0.9509433962264151, | |
| "epoch": 0.4357204486626402, | |
| "grad_norm": 31.75, | |
| "learning_rate": 3.601711343359897e-06, | |
| "loss": 0.12666620016098024, | |
| "step": 1010 | |
| }, | |
| { | |
| "acc": 0.958984375, | |
| "epoch": 0.43787748058671266, | |
| "grad_norm": 7.84375, | |
| "learning_rate": 3.5817812442535008e-06, | |
| "loss": 0.10231037139892578, | |
| "step": 1015 | |
| }, | |
| { | |
| "acc": 0.9519038076152304, | |
| "epoch": 0.4400345125107852, | |
| "grad_norm": 34.75, | |
| "learning_rate": 3.561824429177234e-06, | |
| "loss": 0.14617985486984253, | |
| "step": 1020 | |
| }, | |
| { | |
| "acc": 0.9489603024574669, | |
| "epoch": 0.44219154443485764, | |
| "grad_norm": 24.75, | |
| "learning_rate": 3.541841814567774e-06, | |
| "loss": 0.15405564308166503, | |
| "step": 1025 | |
| }, | |
| { | |
| "acc": 0.9458413926499033, | |
| "epoch": 0.4443485763589301, | |
| "grad_norm": 25.875, | |
| "learning_rate": 3.521834318046539e-06, | |
| "loss": 0.1635340690612793, | |
| "step": 1030 | |
| }, | |
| { | |
| "acc": 0.964509394572025, | |
| "epoch": 0.44650560828300256, | |
| "grad_norm": 10.9375, | |
| "learning_rate": 3.5018028583775472e-06, | |
| "loss": 0.10313594341278076, | |
| "step": 1035 | |
| }, | |
| { | |
| "acc": 0.9542857142857143, | |
| "epoch": 0.4486626402070751, | |
| "grad_norm": 23.375, | |
| "learning_rate": 3.481748355425229e-06, | |
| "loss": 0.13693466186523437, | |
| "step": 1040 | |
| }, | |
| { | |
| "acc": 0.9563567362428842, | |
| "epoch": 0.45081967213114754, | |
| "grad_norm": 22.875, | |
| "learning_rate": 3.4616717301121857e-06, | |
| "loss": 0.123587965965271, | |
| "step": 1045 | |
| }, | |
| { | |
| "acc": 0.9590163934426229, | |
| "epoch": 0.45297670405522, | |
| "grad_norm": 12.5, | |
| "learning_rate": 3.441573904376899e-06, | |
| "loss": 0.13192965984344482, | |
| "step": 1050 | |
| }, | |
| { | |
| "acc": 0.9732824427480916, | |
| "epoch": 0.4551337359792925, | |
| "grad_norm": 20.875, | |
| "learning_rate": 3.4214558011313937e-06, | |
| "loss": 0.09249483346939087, | |
| "step": 1055 | |
| }, | |
| { | |
| "acc": 0.9580838323353293, | |
| "epoch": 0.457290767903365, | |
| "grad_norm": 26.75, | |
| "learning_rate": 3.4013183442188606e-06, | |
| "loss": 0.11342880725860596, | |
| "step": 1060 | |
| }, | |
| { | |
| "acc": 0.9686274509803922, | |
| "epoch": 0.45944779982743744, | |
| "grad_norm": 22.375, | |
| "learning_rate": 3.381162458371229e-06, | |
| "loss": 0.08873859643936158, | |
| "step": 1065 | |
| }, | |
| { | |
| "acc": 0.969758064516129, | |
| "epoch": 0.4616048317515099, | |
| "grad_norm": 39.0, | |
| "learning_rate": 3.3609890691667005e-06, | |
| "loss": 0.0837608277797699, | |
| "step": 1070 | |
| }, | |
| { | |
| "acc": 0.962671905697446, | |
| "epoch": 0.4637618636755824, | |
| "grad_norm": 16.375, | |
| "learning_rate": 3.340799102987251e-06, | |
| "loss": 0.11619801521301269, | |
| "step": 1075 | |
| }, | |
| { | |
| "acc": 0.9606003752345216, | |
| "epoch": 0.4659188955996549, | |
| "grad_norm": 15.0625, | |
| "learning_rate": 3.320593486976088e-06, | |
| "loss": 0.10996119976043701, | |
| "step": 1080 | |
| }, | |
| { | |
| "acc": 0.955193482688391, | |
| "epoch": 0.46807592752372734, | |
| "grad_norm": 24.5, | |
| "learning_rate": 3.300373148995072e-06, | |
| "loss": 0.12069922685623169, | |
| "step": 1085 | |
| }, | |
| { | |
| "acc": 0.9691991786447639, | |
| "epoch": 0.4702329594477998, | |
| "grad_norm": 7.90625, | |
| "learning_rate": 3.280139017582113e-06, | |
| "loss": 0.14299554824829103, | |
| "step": 1090 | |
| }, | |
| { | |
| "acc": 0.9743589743589743, | |
| "epoch": 0.4723899913718723, | |
| "grad_norm": 10.875, | |
| "learning_rate": 3.2598920219085285e-06, | |
| "loss": 0.08163526654243469, | |
| "step": 1095 | |
| }, | |
| { | |
| "acc": 0.9682242990654205, | |
| "epoch": 0.4745470232959448, | |
| "grad_norm": 9.25, | |
| "learning_rate": 3.2396330917363754e-06, | |
| "loss": 0.108160400390625, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.4745470232959448, | |
| "eval_acc": 0.964494604168383, | |
| "eval_loss": 0.014143780805170536, | |
| "eval_mrr": 0.9522601971116316, | |
| "eval_ndcg": 0.963948349172127, | |
| "eval_runtime": 88.1934, | |
| "eval_samples_per_second": 22.133, | |
| "eval_steps_per_second": 11.067, | |
| "step": 1100 | |
| }, | |
| { | |
| "acc": 0.9725490196078431, | |
| "epoch": 0.47670405522001724, | |
| "grad_norm": 11.875, | |
| "learning_rate": 3.219363157375755e-06, | |
| "loss": 0.08103461265563965, | |
| "step": 1105 | |
| }, | |
| { | |
| "acc": 0.97678916827853, | |
| "epoch": 0.47886108714408976, | |
| "grad_norm": 26.5, | |
| "learning_rate": 3.1990831496420897e-06, | |
| "loss": 0.08858168721199036, | |
| "step": 1110 | |
| }, | |
| { | |
| "acc": 0.9602385685884692, | |
| "epoch": 0.4810181190681622, | |
| "grad_norm": 15.6875, | |
| "learning_rate": 3.1787939998133853e-06, | |
| "loss": 0.11988803148269653, | |
| "step": 1115 | |
| }, | |
| { | |
| "acc": 0.9718875502008032, | |
| "epoch": 0.4831751509922347, | |
| "grad_norm": 19.0, | |
| "learning_rate": 3.158496639587459e-06, | |
| "loss": 0.09709318280220032, | |
| "step": 1120 | |
| }, | |
| { | |
| "acc": 0.948559670781893, | |
| "epoch": 0.48533218291630714, | |
| "grad_norm": 17.75, | |
| "learning_rate": 3.1381920010391566e-06, | |
| "loss": 0.1310401439666748, | |
| "step": 1125 | |
| }, | |
| { | |
| "acc": 0.9658444022770398, | |
| "epoch": 0.48748921484037966, | |
| "grad_norm": 27.875, | |
| "learning_rate": 3.1178810165775532e-06, | |
| "loss": 0.11905833482742309, | |
| "step": 1130 | |
| }, | |
| { | |
| "acc": 0.967680608365019, | |
| "epoch": 0.4896462467644521, | |
| "grad_norm": 14.9375, | |
| "learning_rate": 3.0975646189031345e-06, | |
| "loss": 0.09188791513442993, | |
| "step": 1135 | |
| }, | |
| { | |
| "acc": 0.9701195219123506, | |
| "epoch": 0.4918032786885246, | |
| "grad_norm": 14.3125, | |
| "learning_rate": 3.0772437409649664e-06, | |
| "loss": 0.09571850299835205, | |
| "step": 1140 | |
| }, | |
| { | |
| "acc": 0.9606741573033708, | |
| "epoch": 0.49396031061259704, | |
| "grad_norm": 16.875, | |
| "learning_rate": 3.056919315917851e-06, | |
| "loss": 0.1439572811126709, | |
| "step": 1145 | |
| }, | |
| { | |
| "acc": 0.9613152804642167, | |
| "epoch": 0.49611734253666956, | |
| "grad_norm": 23.625, | |
| "learning_rate": 3.0365922770794798e-06, | |
| "loss": 0.11582168340682983, | |
| "step": 1150 | |
| }, | |
| { | |
| "acc": 0.980276134122288, | |
| "epoch": 0.498274374460742, | |
| "grad_norm": 18.0, | |
| "learning_rate": 3.016263557887571e-06, | |
| "loss": 0.0658139169216156, | |
| "step": 1155 | |
| }, | |
| { | |
| "acc": 0.9581673306772909, | |
| "epoch": 0.5004314063848145, | |
| "grad_norm": 9.1875, | |
| "learning_rate": 2.995934091857007e-06, | |
| "loss": 0.11524436473846436, | |
| "step": 1160 | |
| }, | |
| { | |
| "acc": 0.9642105263157895, | |
| "epoch": 0.502588438308887, | |
| "grad_norm": 28.125, | |
| "learning_rate": 2.975604812536964e-06, | |
| "loss": 0.10523393154144287, | |
| "step": 1165 | |
| }, | |
| { | |
| "acc": 0.9673076923076923, | |
| "epoch": 0.5047454702329595, | |
| "grad_norm": 14.75, | |
| "learning_rate": 2.9552766534680456e-06, | |
| "loss": 0.10329176187515259, | |
| "step": 1170 | |
| }, | |
| { | |
| "acc": 0.9471544715447154, | |
| "epoch": 0.5069025021570319, | |
| "grad_norm": 22.5, | |
| "learning_rate": 2.9349505481394128e-06, | |
| "loss": 0.15873119831085206, | |
| "step": 1175 | |
| }, | |
| { | |
| "acc": 0.9680638722554891, | |
| "epoch": 0.5090595340811044, | |
| "grad_norm": 14.25, | |
| "learning_rate": 2.914627429945915e-06, | |
| "loss": 0.10048485994338989, | |
| "step": 1180 | |
| }, | |
| { | |
| "acc": 0.9631067961165048, | |
| "epoch": 0.5112165660051768, | |
| "grad_norm": 38.25, | |
| "learning_rate": 2.894308232145232e-06, | |
| "loss": 0.10857952833175659, | |
| "step": 1185 | |
| }, | |
| { | |
| "acc": 0.9545454545454546, | |
| "epoch": 0.5133735979292493, | |
| "grad_norm": 23.125, | |
| "learning_rate": 2.8739938878150124e-06, | |
| "loss": 0.11859817504882812, | |
| "step": 1190 | |
| }, | |
| { | |
| "acc": 0.9759036144578314, | |
| "epoch": 0.5155306298533219, | |
| "grad_norm": 14.8125, | |
| "learning_rate": 2.8536853298100302e-06, | |
| "loss": 0.06508604288101197, | |
| "step": 1195 | |
| }, | |
| { | |
| "acc": 0.9620253164556962, | |
| "epoch": 0.5176876617773943, | |
| "grad_norm": 10.6875, | |
| "learning_rate": 2.833383490719347e-06, | |
| "loss": 0.13556112051010133, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.5176876617773943, | |
| "eval_acc": 0.964824120603015, | |
| "eval_loss": 0.013948180712759495, | |
| "eval_mrr": 0.9515539617486339, | |
| "eval_ndcg": 0.9634276661273489, | |
| "eval_runtime": 79.2148, | |
| "eval_samples_per_second": 24.642, | |
| "eval_steps_per_second": 12.321, | |
| "step": 1200 | |
| }, | |
| { | |
| "acc": 0.948559670781893, | |
| "epoch": 0.5198446937014668, | |
| "grad_norm": 13.125, | |
| "learning_rate": 2.8130893028234826e-06, | |
| "loss": 0.16991710662841797, | |
| "step": 1205 | |
| }, | |
| { | |
| "acc": 0.9775967413441955, | |
| "epoch": 0.5220017256255393, | |
| "grad_norm": 16.5, | |
| "learning_rate": 2.7928036980516074e-06, | |
| "loss": 0.0850608766078949, | |
| "step": 1210 | |
| }, | |
| { | |
| "acc": 0.9673704414587332, | |
| "epoch": 0.5241587575496117, | |
| "grad_norm": 7.8125, | |
| "learning_rate": 2.7725276079387467e-06, | |
| "loss": 0.10886862277984619, | |
| "step": 1215 | |
| }, | |
| { | |
| "acc": 0.9599198396793587, | |
| "epoch": 0.5263157894736842, | |
| "grad_norm": 15.4375, | |
| "learning_rate": 2.752261963583003e-06, | |
| "loss": 0.10846264362335205, | |
| "step": 1220 | |
| }, | |
| { | |
| "acc": 0.9634888438133874, | |
| "epoch": 0.5284728213977566, | |
| "grad_norm": 44.75, | |
| "learning_rate": 2.7320076956028e-06, | |
| "loss": 0.137969446182251, | |
| "step": 1225 | |
| }, | |
| { | |
| "acc": 0.9713114754098361, | |
| "epoch": 0.5306298533218292, | |
| "grad_norm": 14.4375, | |
| "learning_rate": 2.711765734094143e-06, | |
| "loss": 0.11962342262268066, | |
| "step": 1230 | |
| }, | |
| { | |
| "acc": 0.9619238476953907, | |
| "epoch": 0.5327868852459017, | |
| "grad_norm": 27.75, | |
| "learning_rate": 2.6915370085879188e-06, | |
| "loss": 0.11013026237487793, | |
| "step": 1235 | |
| }, | |
| { | |
| "acc": 0.955193482688391, | |
| "epoch": 0.5349439171699741, | |
| "grad_norm": 34.0, | |
| "learning_rate": 2.6713224480071984e-06, | |
| "loss": 0.142191481590271, | |
| "step": 1240 | |
| }, | |
| { | |
| "acc": 0.9458874458874459, | |
| "epoch": 0.5371009490940466, | |
| "grad_norm": 9.0, | |
| "learning_rate": 2.651122980624588e-06, | |
| "loss": 0.16410914659500123, | |
| "step": 1245 | |
| }, | |
| { | |
| "acc": 0.9612403100775194, | |
| "epoch": 0.5392579810181191, | |
| "grad_norm": 18.875, | |
| "learning_rate": 2.630939534019599e-06, | |
| "loss": 0.11810561418533325, | |
| "step": 1250 | |
| }, | |
| { | |
| "acc": 0.9647058823529412, | |
| "epoch": 0.5414150129421915, | |
| "grad_norm": 24.25, | |
| "learning_rate": 2.6107730350360508e-06, | |
| "loss": 0.09320048093795777, | |
| "step": 1255 | |
| }, | |
| { | |
| "acc": 0.9604743083003953, | |
| "epoch": 0.543572044866264, | |
| "grad_norm": 12.5, | |
| "learning_rate": 2.5906244097395137e-06, | |
| "loss": 0.13027727603912354, | |
| "step": 1260 | |
| }, | |
| { | |
| "acc": 0.9547244094488189, | |
| "epoch": 0.5457290767903364, | |
| "grad_norm": 20.5, | |
| "learning_rate": 2.570494583374779e-06, | |
| "loss": 0.144273042678833, | |
| "step": 1265 | |
| }, | |
| { | |
| "acc": 0.9634615384615385, | |
| "epoch": 0.547886108714409, | |
| "grad_norm": 9.3125, | |
| "learning_rate": 2.5503844803233732e-06, | |
| "loss": 0.11550074815750122, | |
| "step": 1270 | |
| }, | |
| { | |
| "acc": 0.96, | |
| "epoch": 0.5500431406384815, | |
| "grad_norm": 18.875, | |
| "learning_rate": 2.53029502406111e-06, | |
| "loss": 0.13190865516662598, | |
| "step": 1275 | |
| }, | |
| { | |
| "acc": 0.9737903225806451, | |
| "epoch": 0.5522001725625539, | |
| "grad_norm": 11.5, | |
| "learning_rate": 2.510227137115681e-06, | |
| "loss": 0.07091407179832458, | |
| "step": 1280 | |
| }, | |
| { | |
| "acc": 0.952191235059761, | |
| "epoch": 0.5543572044866264, | |
| "grad_norm": 26.5, | |
| "learning_rate": 2.490181741024292e-06, | |
| "loss": 0.14382394552230834, | |
| "step": 1285 | |
| }, | |
| { | |
| "acc": 0.9730848861283644, | |
| "epoch": 0.5565142364106989, | |
| "grad_norm": 49.0, | |
| "learning_rate": 2.4701597562913486e-06, | |
| "loss": 0.09190328121185302, | |
| "step": 1290 | |
| }, | |
| { | |
| "acc": 0.9789674952198852, | |
| "epoch": 0.5586712683347713, | |
| "grad_norm": 9.0, | |
| "learning_rate": 2.4501621023461826e-06, | |
| "loss": 0.07015281319618225, | |
| "step": 1295 | |
| }, | |
| { | |
| "acc": 0.9668615984405458, | |
| "epoch": 0.5608283002588438, | |
| "grad_norm": 35.5, | |
| "learning_rate": 2.4301896975008326e-06, | |
| "loss": 0.14583213329315187, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.5608283002588438, | |
| "eval_acc": 0.9630941593211961, | |
| "eval_loss": 0.013961615972220898, | |
| "eval_mrr": 0.9501536885245901, | |
| "eval_ndcg": 0.962423945763765, | |
| "eval_runtime": 79.8407, | |
| "eval_samples_per_second": 24.449, | |
| "eval_steps_per_second": 12.224, | |
| "step": 1300 | |
| }, | |
| { | |
| "acc": 0.9674329501915708, | |
| "epoch": 0.5629853321829164, | |
| "grad_norm": 18.75, | |
| "learning_rate": 2.410243458907872e-06, | |
| "loss": 0.11050969362258911, | |
| "step": 1305 | |
| }, | |
| { | |
| "acc": 0.950381679389313, | |
| "epoch": 0.5651423641069888, | |
| "grad_norm": 18.25, | |
| "learning_rate": 2.390324302518298e-06, | |
| "loss": 0.1483738660812378, | |
| "step": 1310 | |
| }, | |
| { | |
| "acc": 0.9652351738241309, | |
| "epoch": 0.5672993960310613, | |
| "grad_norm": 15.6875, | |
| "learning_rate": 2.370433143039462e-06, | |
| "loss": 0.12723606824874878, | |
| "step": 1315 | |
| }, | |
| { | |
| "acc": 0.9678638941398866, | |
| "epoch": 0.5694564279551337, | |
| "grad_norm": 14.25, | |
| "learning_rate": 2.3505708938930706e-06, | |
| "loss": 0.10463025569915771, | |
| "step": 1320 | |
| }, | |
| { | |
| "acc": 0.9717171717171718, | |
| "epoch": 0.5716134598792062, | |
| "grad_norm": 22.5, | |
| "learning_rate": 2.3307384671732414e-06, | |
| "loss": 0.09857140183448791, | |
| "step": 1325 | |
| }, | |
| { | |
| "acc": 0.9667359667359667, | |
| "epoch": 0.5737704918032787, | |
| "grad_norm": 14.1875, | |
| "learning_rate": 2.310936773604614e-06, | |
| "loss": 0.11535818576812744, | |
| "step": 1330 | |
| }, | |
| { | |
| "acc": 0.9669421487603306, | |
| "epoch": 0.5759275237273511, | |
| "grad_norm": 8.4375, | |
| "learning_rate": 2.291166722500533e-06, | |
| "loss": 0.10772225856781006, | |
| "step": 1335 | |
| }, | |
| { | |
| "acc": 0.9666666666666667, | |
| "epoch": 0.5780845556514237, | |
| "grad_norm": 11.375, | |
| "learning_rate": 2.2714292217212883e-06, | |
| "loss": 0.10940985679626465, | |
| "step": 1340 | |
| }, | |
| { | |
| "acc": 0.9622266401590457, | |
| "epoch": 0.5802415875754962, | |
| "grad_norm": 17.125, | |
| "learning_rate": 2.2517251776324297e-06, | |
| "loss": 0.106083083152771, | |
| "step": 1345 | |
| }, | |
| { | |
| "acc": 0.9638095238095238, | |
| "epoch": 0.5823986194995686, | |
| "grad_norm": 18.375, | |
| "learning_rate": 2.232055495063139e-06, | |
| "loss": 0.1073039174079895, | |
| "step": 1350 | |
| }, | |
| { | |
| "acc": 0.9717741935483871, | |
| "epoch": 0.5845556514236411, | |
| "grad_norm": 15.875, | |
| "learning_rate": 2.212421077264685e-06, | |
| "loss": 0.08703945875167847, | |
| "step": 1355 | |
| }, | |
| { | |
| "acc": 0.9564356435643564, | |
| "epoch": 0.5867126833477135, | |
| "grad_norm": 55.5, | |
| "learning_rate": 2.192822825868944e-06, | |
| "loss": 0.16233315467834472, | |
| "step": 1360 | |
| }, | |
| { | |
| "acc": 0.9764705882352941, | |
| "epoch": 0.588869715271786, | |
| "grad_norm": 20.125, | |
| "learning_rate": 2.1732616408469933e-06, | |
| "loss": 0.08231468200683593, | |
| "step": 1365 | |
| }, | |
| { | |
| "acc": 0.9704724409448819, | |
| "epoch": 0.5910267471958585, | |
| "grad_norm": 16.625, | |
| "learning_rate": 2.1537384204677878e-06, | |
| "loss": 0.10110975503921509, | |
| "step": 1370 | |
| }, | |
| { | |
| "acc": 0.9421157684630739, | |
| "epoch": 0.5931837791199309, | |
| "grad_norm": 20.625, | |
| "learning_rate": 2.1342540612569065e-06, | |
| "loss": 0.1582808256149292, | |
| "step": 1375 | |
| }, | |
| { | |
| "acc": 0.9518304431599229, | |
| "epoch": 0.5953408110440035, | |
| "grad_norm": 24.125, | |
| "learning_rate": 2.1148094579553858e-06, | |
| "loss": 0.13189674615859986, | |
| "step": 1380 | |
| }, | |
| { | |
| "acc": 0.9643564356435643, | |
| "epoch": 0.597497842968076, | |
| "grad_norm": 14.5, | |
| "learning_rate": 2.0954055034786334e-06, | |
| "loss": 0.11206245422363281, | |
| "step": 1385 | |
| }, | |
| { | |
| "acc": 0.9660678642714571, | |
| "epoch": 0.5996548748921484, | |
| "grad_norm": 23.375, | |
| "learning_rate": 2.07604308887542e-06, | |
| "loss": 0.10885739326477051, | |
| "step": 1390 | |
| }, | |
| { | |
| "acc": 0.9610136452241715, | |
| "epoch": 0.6018119068162209, | |
| "grad_norm": 29.125, | |
| "learning_rate": 2.0567231032869656e-06, | |
| "loss": 0.11518199443817138, | |
| "step": 1395 | |
| }, | |
| { | |
| "acc": 0.9661733615221987, | |
| "epoch": 0.6039689387402933, | |
| "grad_norm": 49.0, | |
| "learning_rate": 2.0374464339061075e-06, | |
| "loss": 0.1330866575241089, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.6039689387402933, | |
| "eval_acc": 0.9657302907982536, | |
| "eval_loss": 0.013689501211047173, | |
| "eval_mrr": 0.9512752488290397, | |
| "eval_ndcg": 0.9632696768901375, | |
| "eval_runtime": 79.2378, | |
| "eval_samples_per_second": 24.635, | |
| "eval_steps_per_second": 12.317, | |
| "step": 1400 | |
| }, | |
| { | |
| "acc": 0.9659574468085106, | |
| "epoch": 0.6061259706643658, | |
| "grad_norm": 18.0, | |
| "learning_rate": 2.0182139659365603e-06, | |
| "loss": 0.13202260732650756, | |
| "step": 1405 | |
| }, | |
| { | |
| "acc": 0.9670103092783505, | |
| "epoch": 0.6082830025884383, | |
| "grad_norm": 15.1875, | |
| "learning_rate": 1.9990265825522662e-06, | |
| "loss": 0.13020153045654298, | |
| "step": 1410 | |
| }, | |
| { | |
| "acc": 0.9691991786447639, | |
| "epoch": 0.6104400345125108, | |
| "grad_norm": 11.3125, | |
| "learning_rate": 1.9798851648568368e-06, | |
| "loss": 0.10563652515411377, | |
| "step": 1415 | |
| }, | |
| { | |
| "acc": 0.9745098039215686, | |
| "epoch": 0.6125970664365833, | |
| "grad_norm": 16.875, | |
| "learning_rate": 1.960790591843097e-06, | |
| "loss": 0.10446355342864991, | |
| "step": 1420 | |
| }, | |
| { | |
| "acc": 0.967479674796748, | |
| "epoch": 0.6147540983606558, | |
| "grad_norm": 16.125, | |
| "learning_rate": 1.9417437403527154e-06, | |
| "loss": 0.09148574471473694, | |
| "step": 1425 | |
| }, | |
| { | |
| "acc": 0.9619238476953907, | |
| "epoch": 0.6169111302847282, | |
| "grad_norm": 24.25, | |
| "learning_rate": 1.9227454850359425e-06, | |
| "loss": 0.10498310327529907, | |
| "step": 1430 | |
| }, | |
| { | |
| "acc": 0.9658444022770398, | |
| "epoch": 0.6190681622088007, | |
| "grad_norm": 17.375, | |
| "learning_rate": 1.903796698311443e-06, | |
| "loss": 0.09558966159820556, | |
| "step": 1435 | |
| }, | |
| { | |
| "acc": 0.9481327800829875, | |
| "epoch": 0.6212251941328731, | |
| "grad_norm": 19.5, | |
| "learning_rate": 1.8848982503262351e-06, | |
| "loss": 0.16058825254440307, | |
| "step": 1440 | |
| }, | |
| { | |
| "acc": 0.9735349716446124, | |
| "epoch": 0.6233822260569456, | |
| "grad_norm": 21.0, | |
| "learning_rate": 1.8660510089157334e-06, | |
| "loss": 0.10411704778671264, | |
| "step": 1445 | |
| }, | |
| { | |
| "acc": 0.9736842105263158, | |
| "epoch": 0.6255392579810182, | |
| "grad_norm": 77.0, | |
| "learning_rate": 1.8472558395638938e-06, | |
| "loss": 0.09424603581428528, | |
| "step": 1450 | |
| }, | |
| { | |
| "acc": 0.9693251533742331, | |
| "epoch": 0.6276962899050906, | |
| "grad_norm": 9.4375, | |
| "learning_rate": 1.8285136053634757e-06, | |
| "loss": 0.12103959321975707, | |
| "step": 1455 | |
| }, | |
| { | |
| "acc": 0.9722222222222222, | |
| "epoch": 0.6298533218291631, | |
| "grad_norm": 15.625, | |
| "learning_rate": 1.8098251669764012e-06, | |
| "loss": 0.07807242274284362, | |
| "step": 1460 | |
| }, | |
| { | |
| "acc": 0.9619238476953907, | |
| "epoch": 0.6320103537532356, | |
| "grad_norm": 12.5625, | |
| "learning_rate": 1.7911913825942357e-06, | |
| "loss": 0.1451573133468628, | |
| "step": 1465 | |
| }, | |
| { | |
| "acc": 0.9725738396624473, | |
| "epoch": 0.634167385677308, | |
| "grad_norm": 9.0, | |
| "learning_rate": 1.7726131078987784e-06, | |
| "loss": 0.10025861263275146, | |
| "step": 1470 | |
| }, | |
| { | |
| "acc": 0.9704724409448819, | |
| "epoch": 0.6363244176013805, | |
| "grad_norm": 14.0625, | |
| "learning_rate": 1.7540911960227694e-06, | |
| "loss": 0.08995563387870789, | |
| "step": 1475 | |
| }, | |
| { | |
| "acc": 0.9669724770642202, | |
| "epoch": 0.6384814495254529, | |
| "grad_norm": 40.5, | |
| "learning_rate": 1.7356264975107124e-06, | |
| "loss": 0.10488073825836182, | |
| "step": 1480 | |
| }, | |
| { | |
| "acc": 0.9637404580152672, | |
| "epoch": 0.6406384814495254, | |
| "grad_norm": 45.25, | |
| "learning_rate": 1.7172198602798172e-06, | |
| "loss": 0.1231122612953186, | |
| "step": 1485 | |
| }, | |
| { | |
| "acc": 0.9597701149425287, | |
| "epoch": 0.642795513373598, | |
| "grad_norm": 13.4375, | |
| "learning_rate": 1.6988721295810601e-06, | |
| "loss": 0.13030797243118286, | |
| "step": 1490 | |
| }, | |
| { | |
| "acc": 0.9408163265306122, | |
| "epoch": 0.6449525452976704, | |
| "grad_norm": 12.9375, | |
| "learning_rate": 1.680584147960375e-06, | |
| "loss": 0.15982915163040162, | |
| "step": 1495 | |
| }, | |
| { | |
| "acc": 0.958984375, | |
| "epoch": 0.6471095772217429, | |
| "grad_norm": 44.0, | |
| "learning_rate": 1.6623567552199572e-06, | |
| "loss": 0.14942604303359985, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.6471095772217429, | |
| "eval_acc": 0.966554081884834, | |
| "eval_loss": 0.013300775550305843, | |
| "eval_mrr": 0.955933474824356, | |
| "eval_ndcg": 0.9667304856555736, | |
| "eval_runtime": 151.3502, | |
| "eval_samples_per_second": 12.897, | |
| "eval_steps_per_second": 6.449, | |
| "step": 1500 | |
| }, | |
| { | |
| "acc": 0.9682242990654205, | |
| "epoch": 0.6492666091458154, | |
| "grad_norm": 21.0, | |
| "learning_rate": 1.644190788379699e-06, | |
| "loss": 0.10443732738494874, | |
| "step": 1505 | |
| }, | |
| { | |
| "acc": 0.9662921348314607, | |
| "epoch": 0.6514236410698878, | |
| "grad_norm": 19.75, | |
| "learning_rate": 1.6260870816387567e-06, | |
| "loss": 0.1014707326889038, | |
| "step": 1510 | |
| }, | |
| { | |
| "acc": 0.9601518026565465, | |
| "epoch": 0.6535806729939603, | |
| "grad_norm": 19.625, | |
| "learning_rate": 1.6080464663372418e-06, | |
| "loss": 0.13788410425186157, | |
| "step": 1515 | |
| }, | |
| { | |
| "acc": 0.9372623574144486, | |
| "epoch": 0.6557377049180327, | |
| "grad_norm": 16.875, | |
| "learning_rate": 1.5900697709180413e-06, | |
| "loss": 0.15338197946548462, | |
| "step": 1520 | |
| }, | |
| { | |
| "acc": 0.9784735812133072, | |
| "epoch": 0.6578947368421053, | |
| "grad_norm": 17.0, | |
| "learning_rate": 1.5721578208887794e-06, | |
| "loss": 0.07349593639373779, | |
| "step": 1525 | |
| }, | |
| { | |
| "acc": 0.9643564356435643, | |
| "epoch": 0.6600517687661778, | |
| "grad_norm": 25.875, | |
| "learning_rate": 1.5543114387839121e-06, | |
| "loss": 0.12870337963104247, | |
| "step": 1530 | |
| }, | |
| { | |
| "acc": 0.9527720739219713, | |
| "epoch": 0.6622088006902502, | |
| "grad_norm": 44.5, | |
| "learning_rate": 1.5365314441269463e-06, | |
| "loss": 0.12610948085784912, | |
| "step": 1535 | |
| }, | |
| { | |
| "acc": 0.9777365491651205, | |
| "epoch": 0.6643658326143227, | |
| "grad_norm": 17.375, | |
| "learning_rate": 1.5188186533928136e-06, | |
| "loss": 0.07375502586364746, | |
| "step": 1540 | |
| }, | |
| { | |
| "acc": 0.9482758620689655, | |
| "epoch": 0.6665228645383952, | |
| "grad_norm": 21.25, | |
| "learning_rate": 1.501173879970375e-06, | |
| "loss": 0.14943064451217652, | |
| "step": 1545 | |
| }, | |
| { | |
| "acc": 0.9661654135338346, | |
| "epoch": 0.6686798964624676, | |
| "grad_norm": 2.265625, | |
| "learning_rate": 1.4835979341250695e-06, | |
| "loss": 0.08926547765731811, | |
| "step": 1550 | |
| }, | |
| { | |
| "acc": 0.9563409563409564, | |
| "epoch": 0.6708369283865401, | |
| "grad_norm": 38.0, | |
| "learning_rate": 1.4660916229617048e-06, | |
| "loss": 0.1205858826637268, | |
| "step": 1555 | |
| }, | |
| { | |
| "acc": 0.9730290456431535, | |
| "epoch": 0.6729939603106126, | |
| "grad_norm": 8.1875, | |
| "learning_rate": 1.448655750387395e-06, | |
| "loss": 0.09265510439872741, | |
| "step": 1560 | |
| }, | |
| { | |
| "acc": 0.969939879759519, | |
| "epoch": 0.6751509922346851, | |
| "grad_norm": 18.375, | |
| "learning_rate": 1.431291117074648e-06, | |
| "loss": 0.08199839591979981, | |
| "step": 1565 | |
| }, | |
| { | |
| "acc": 0.9661654135338346, | |
| "epoch": 0.6773080241587576, | |
| "grad_norm": 12.9375, | |
| "learning_rate": 1.4139985204245914e-06, | |
| "loss": 0.11682524681091308, | |
| "step": 1570 | |
| }, | |
| { | |
| "acc": 0.9781746031746031, | |
| "epoch": 0.67946505608283, | |
| "grad_norm": 5.8125, | |
| "learning_rate": 1.3967787545303558e-06, | |
| "loss": 0.06525716781616211, | |
| "step": 1575 | |
| }, | |
| { | |
| "acc": 0.9621052631578947, | |
| "epoch": 0.6816220880069025, | |
| "grad_norm": 9.1875, | |
| "learning_rate": 1.3796326101406157e-06, | |
| "loss": 0.1133004069328308, | |
| "step": 1580 | |
| }, | |
| { | |
| "acc": 0.977319587628866, | |
| "epoch": 0.683779119930975, | |
| "grad_norm": 21.375, | |
| "learning_rate": 1.362560874623272e-06, | |
| "loss": 0.09791160225868226, | |
| "step": 1585 | |
| }, | |
| { | |
| "acc": 0.9688715953307393, | |
| "epoch": 0.6859361518550474, | |
| "grad_norm": 11.3125, | |
| "learning_rate": 1.3455643319292957e-06, | |
| "loss": 0.09991573691368102, | |
| "step": 1590 | |
| }, | |
| { | |
| "acc": 0.946611909650924, | |
| "epoch": 0.6880931837791199, | |
| "grad_norm": 18.125, | |
| "learning_rate": 1.3286437625567298e-06, | |
| "loss": 0.149417245388031, | |
| "step": 1595 | |
| }, | |
| { | |
| "acc": 0.9684418145956607, | |
| "epoch": 0.6902502157031924, | |
| "grad_norm": 16.25, | |
| "learning_rate": 1.3117999435148463e-06, | |
| "loss": 0.10367587804794312, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.6902502157031924, | |
| "eval_acc": 0.9691902133618914, | |
| "eval_loss": 0.012719967402517796, | |
| "eval_mrr": 0.9549814597970335, | |
| "eval_ndcg": 0.9660630214705206, | |
| "eval_runtime": 79.2818, | |
| "eval_samples_per_second": 24.621, | |
| "eval_steps_per_second": 12.311, | |
| "step": 1600 | |
| }, | |
| { | |
| "acc": 0.9466950959488273, | |
| "epoch": 0.6924072476272649, | |
| "grad_norm": 40.0, | |
| "learning_rate": 1.2950336482884685e-06, | |
| "loss": 0.1607167363166809, | |
| "step": 1605 | |
| }, | |
| { | |
| "acc": 0.9825581395348837, | |
| "epoch": 0.6945642795513374, | |
| "grad_norm": 17.25, | |
| "learning_rate": 1.2783456468024478e-06, | |
| "loss": 0.05497826337814331, | |
| "step": 1610 | |
| }, | |
| { | |
| "acc": 0.9757575757575757, | |
| "epoch": 0.6967213114754098, | |
| "grad_norm": 6.1875, | |
| "learning_rate": 1.261736705386309e-06, | |
| "loss": 0.07923665642738342, | |
| "step": 1615 | |
| }, | |
| { | |
| "acc": 0.9713114754098361, | |
| "epoch": 0.6988783433994823, | |
| "grad_norm": 18.625, | |
| "learning_rate": 1.2452075867390614e-06, | |
| "loss": 0.1029202938079834, | |
| "step": 1620 | |
| }, | |
| { | |
| "acc": 0.9633401221995926, | |
| "epoch": 0.7010353753235548, | |
| "grad_norm": 26.25, | |
| "learning_rate": 1.2287590498941737e-06, | |
| "loss": 0.09318680763244629, | |
| "step": 1625 | |
| }, | |
| { | |
| "acc": 0.9791666666666666, | |
| "epoch": 0.7031924072476272, | |
| "grad_norm": 15.125, | |
| "learning_rate": 1.212391850184718e-06, | |
| "loss": 0.06999911665916443, | |
| "step": 1630 | |
| }, | |
| { | |
| "acc": 0.9701789264413518, | |
| "epoch": 0.7053494391716998, | |
| "grad_norm": 5.875, | |
| "learning_rate": 1.1961067392086857e-06, | |
| "loss": 0.0950109601020813, | |
| "step": 1635 | |
| }, | |
| { | |
| "acc": 0.9804305283757339, | |
| "epoch": 0.7075064710957722, | |
| "grad_norm": 16.25, | |
| "learning_rate": 1.1799044647944712e-06, | |
| "loss": 0.07231690287590027, | |
| "step": 1640 | |
| }, | |
| { | |
| "acc": 0.9720558882235529, | |
| "epoch": 0.7096635030198447, | |
| "grad_norm": 7.78125, | |
| "learning_rate": 1.1637857709665332e-06, | |
| "loss": 0.09113675355911255, | |
| "step": 1645 | |
| }, | |
| { | |
| "acc": 0.9636015325670498, | |
| "epoch": 0.7118205349439172, | |
| "grad_norm": 13.5625, | |
| "learning_rate": 1.147751397911227e-06, | |
| "loss": 0.10403518676757813, | |
| "step": 1650 | |
| }, | |
| { | |
| "acc": 0.9708171206225681, | |
| "epoch": 0.7139775668679896, | |
| "grad_norm": 7.53125, | |
| "learning_rate": 1.1318020819428137e-06, | |
| "loss": 0.09691992402076721, | |
| "step": 1655 | |
| }, | |
| { | |
| "acc": 0.9642105263157895, | |
| "epoch": 0.7161345987920621, | |
| "grad_norm": 9.6875, | |
| "learning_rate": 1.1159385554696505e-06, | |
| "loss": 0.10156847238540649, | |
| "step": 1660 | |
| }, | |
| { | |
| "acc": 0.9689922480620154, | |
| "epoch": 0.7182916307161346, | |
| "grad_norm": 17.625, | |
| "learning_rate": 1.1001615469605548e-06, | |
| "loss": 0.11043925285339355, | |
| "step": 1665 | |
| }, | |
| { | |
| "acc": 0.9627450980392157, | |
| "epoch": 0.7204486626402071, | |
| "grad_norm": 19.75, | |
| "learning_rate": 1.0844717809113533e-06, | |
| "loss": 0.10573551654815674, | |
| "step": 1670 | |
| }, | |
| { | |
| "acc": 0.9693251533742331, | |
| "epoch": 0.7226056945642796, | |
| "grad_norm": 9.0, | |
| "learning_rate": 1.0688699778116152e-06, | |
| "loss": 0.10334875583648681, | |
| "step": 1675 | |
| }, | |
| { | |
| "acc": 0.9562624254473161, | |
| "epoch": 0.724762726488352, | |
| "grad_norm": 25.25, | |
| "learning_rate": 1.053356854111562e-06, | |
| "loss": 0.12973486185073851, | |
| "step": 1680 | |
| }, | |
| { | |
| "acc": 0.9851380042462845, | |
| "epoch": 0.7269197584124245, | |
| "grad_norm": 10.75, | |
| "learning_rate": 1.0379331221891693e-06, | |
| "loss": 0.060935062170028684, | |
| "step": 1685 | |
| }, | |
| { | |
| "acc": 0.9683794466403162, | |
| "epoch": 0.729076790336497, | |
| "grad_norm": 19.5, | |
| "learning_rate": 1.0225994903174548e-06, | |
| "loss": 0.10458909273147583, | |
| "step": 1690 | |
| }, | |
| { | |
| "acc": 0.9680638722554891, | |
| "epoch": 0.7312338222605694, | |
| "grad_norm": 11.75, | |
| "learning_rate": 1.0073566626319522e-06, | |
| "loss": 0.10490895509719848, | |
| "step": 1695 | |
| }, | |
| { | |
| "acc": 0.9766536964980544, | |
| "epoch": 0.7333908541846419, | |
| "grad_norm": 14.4375, | |
| "learning_rate": 9.922053390983784e-07, | |
| "loss": 0.07817023992538452, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.7333908541846419, | |
| "eval_acc": 0.9688606969272593, | |
| "eval_loss": 0.012777810916304588, | |
| "eval_mrr": 0.9589456479313037, | |
| "eval_ndcg": 0.9690763069755148, | |
| "eval_runtime": 78.9281, | |
| "eval_samples_per_second": 24.731, | |
| "eval_steps_per_second": 12.366, | |
| "step": 1700 | |
| }, | |
| { | |
| "acc": 0.9959514170040485, | |
| "epoch": 0.7355478861087144, | |
| "grad_norm": 8.5625, | |
| "learning_rate": 9.771462154804868e-07, | |
| "loss": 0.03793552815914154, | |
| "step": 1705 | |
| }, | |
| { | |
| "acc": 0.9590643274853801, | |
| "epoch": 0.7377049180327869, | |
| "grad_norm": 12.25, | |
| "learning_rate": 9.621799833081207e-07, | |
| "loss": 0.10620832443237305, | |
| "step": 1710 | |
| }, | |
| { | |
| "acc": 0.9735234215885947, | |
| "epoch": 0.7398619499568594, | |
| "grad_norm": 32.75, | |
| "learning_rate": 9.473073298454589e-07, | |
| "loss": 0.10178903341293336, | |
| "step": 1715 | |
| }, | |
| { | |
| "acc": 0.9735234215885947, | |
| "epoch": 0.7420189818809318, | |
| "grad_norm": 19.875, | |
| "learning_rate": 9.325289380594497e-07, | |
| "loss": 0.10075312852859497, | |
| "step": 1720 | |
| }, | |
| { | |
| "acc": 0.972, | |
| "epoch": 0.7441760138050043, | |
| "grad_norm": 65.0, | |
| "learning_rate": 9.178454865884533e-07, | |
| "loss": 0.13943676948547362, | |
| "step": 1725 | |
| }, | |
| { | |
| "acc": 0.9418837675350702, | |
| "epoch": 0.7463330457290768, | |
| "grad_norm": 19.25, | |
| "learning_rate": 9.03257649711078e-07, | |
| "loss": 0.19055825471878052, | |
| "step": 1730 | |
| }, | |
| { | |
| "acc": 0.9684418145956607, | |
| "epoch": 0.7484900776531492, | |
| "grad_norm": 21.625, | |
| "learning_rate": 8.887660973152132e-07, | |
| "loss": 0.0985870122909546, | |
| "step": 1735 | |
| }, | |
| { | |
| "acc": 0.9464285714285714, | |
| "epoch": 0.7506471095772217, | |
| "grad_norm": 17.125, | |
| "learning_rate": 8.743714948672717e-07, | |
| "loss": 0.1542567014694214, | |
| "step": 1740 | |
| }, | |
| { | |
| "acc": 0.9735772357723578, | |
| "epoch": 0.7528041415012943, | |
| "grad_norm": 4.3125, | |
| "learning_rate": 8.600745033816266e-07, | |
| "loss": 0.08904030919075012, | |
| "step": 1745 | |
| }, | |
| { | |
| "acc": 0.9654471544715447, | |
| "epoch": 0.7549611734253667, | |
| "grad_norm": 19.75, | |
| "learning_rate": 8.458757793902626e-07, | |
| "loss": 0.12530235052108765, | |
| "step": 1750 | |
| }, | |
| { | |
| "acc": 0.9553398058252427, | |
| "epoch": 0.7571182053494392, | |
| "grad_norm": 16.25, | |
| "learning_rate": 8.317759749126208e-07, | |
| "loss": 0.11530464887619019, | |
| "step": 1755 | |
| }, | |
| { | |
| "acc": 0.9684418145956607, | |
| "epoch": 0.7592752372735116, | |
| "grad_norm": 15.375, | |
| "learning_rate": 8.17775737425663e-07, | |
| "loss": 0.09011921286582947, | |
| "step": 1760 | |
| }, | |
| { | |
| "acc": 0.959349593495935, | |
| "epoch": 0.7614322691975841, | |
| "grad_norm": 19.0, | |
| "learning_rate": 8.038757098341323e-07, | |
| "loss": 0.1243793249130249, | |
| "step": 1765 | |
| }, | |
| { | |
| "acc": 0.9701492537313433, | |
| "epoch": 0.7635893011216566, | |
| "grad_norm": 23.75, | |
| "learning_rate": 7.900765304410384e-07, | |
| "loss": 0.10980342626571656, | |
| "step": 1770 | |
| }, | |
| { | |
| "acc": 0.972, | |
| "epoch": 0.765746333045729, | |
| "grad_norm": 12.1875, | |
| "learning_rate": 7.763788329183404e-07, | |
| "loss": 0.11545180082321167, | |
| "step": 1775 | |
| }, | |
| { | |
| "acc": 0.9709864603481625, | |
| "epoch": 0.7679033649698016, | |
| "grad_norm": 54.0, | |
| "learning_rate": 7.627832462778483e-07, | |
| "loss": 0.08927801847457886, | |
| "step": 1780 | |
| }, | |
| { | |
| "acc": 0.9572649572649573, | |
| "epoch": 0.7700603968938741, | |
| "grad_norm": 34.0, | |
| "learning_rate": 7.492903948423435e-07, | |
| "loss": 0.11710215806961059, | |
| "step": 1785 | |
| }, | |
| { | |
| "acc": 0.980276134122288, | |
| "epoch": 0.7722174288179465, | |
| "grad_norm": 17.5, | |
| "learning_rate": 7.359008982169011e-07, | |
| "loss": 0.06560088992118836, | |
| "step": 1790 | |
| }, | |
| { | |
| "acc": 0.9651162790697675, | |
| "epoch": 0.774374460742019, | |
| "grad_norm": 18.75, | |
| "learning_rate": 7.226153712604432e-07, | |
| "loss": 0.11870806217193604, | |
| "step": 1795 | |
| }, | |
| { | |
| "acc": 0.967479674796748, | |
| "epoch": 0.7765314926660914, | |
| "grad_norm": 7.78125, | |
| "learning_rate": 7.094344240575007e-07, | |
| "loss": 0.10688618421554566, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.7765314926660914, | |
| "eval_acc": 0.9681192849493369, | |
| "eval_loss": 0.012869248166680336, | |
| "eval_mrr": 0.9557627097970336, | |
| "eval_ndcg": 0.9666167539180269, | |
| "eval_runtime": 104.0332, | |
| "eval_samples_per_second": 18.763, | |
| "eval_steps_per_second": 9.382, | |
| "step": 1800 | |
| }, | |
| { | |
| "acc": 0.9689922480620154, | |
| "epoch": 0.7786885245901639, | |
| "grad_norm": 25.875, | |
| "learning_rate": 6.963586618902001e-07, | |
| "loss": 0.10927298069000244, | |
| "step": 1805 | |
| }, | |
| { | |
| "acc": 0.9309664694280079, | |
| "epoch": 0.7808455565142364, | |
| "grad_norm": 42.25, | |
| "learning_rate": 6.83388685210466e-07, | |
| "loss": 0.19510157108306886, | |
| "step": 1810 | |
| }, | |
| { | |
| "acc": 0.9536290322580645, | |
| "epoch": 0.7830025884383088, | |
| "grad_norm": 20.5, | |
| "learning_rate": 6.705250896124497e-07, | |
| "loss": 0.14289662837982178, | |
| "step": 1815 | |
| }, | |
| { | |
| "acc": 0.9706498951781971, | |
| "epoch": 0.7851596203623814, | |
| "grad_norm": 11.4375, | |
| "learning_rate": 6.577684658051778e-07, | |
| "loss": 0.08139024376869201, | |
| "step": 1820 | |
| }, | |
| { | |
| "acc": 0.9577464788732394, | |
| "epoch": 0.7873166522864539, | |
| "grad_norm": 15.5, | |
| "learning_rate": 6.45119399585426e-07, | |
| "loss": 0.13177180290222168, | |
| "step": 1825 | |
| }, | |
| { | |
| "acc": 0.9609375, | |
| "epoch": 0.7894736842105263, | |
| "grad_norm": 12.9375, | |
| "learning_rate": 6.325784718108197e-07, | |
| "loss": 0.12202094793319702, | |
| "step": 1830 | |
| }, | |
| { | |
| "acc": 0.9866156787762906, | |
| "epoch": 0.7916307161345988, | |
| "grad_norm": 3.3125, | |
| "learning_rate": 6.201462583731599e-07, | |
| "loss": 0.06428595781326293, | |
| "step": 1835 | |
| }, | |
| { | |
| "acc": 0.959915611814346, | |
| "epoch": 0.7937877480586712, | |
| "grad_norm": 42.25, | |
| "learning_rate": 6.078233301719783e-07, | |
| "loss": 0.15991504192352296, | |
| "step": 1840 | |
| }, | |
| { | |
| "acc": 0.9642857142857143, | |
| "epoch": 0.7959447799827437, | |
| "grad_norm": 18.125, | |
| "learning_rate": 5.956102530883194e-07, | |
| "loss": 0.09823997020721435, | |
| "step": 1845 | |
| }, | |
| { | |
| "acc": 0.9693877551020408, | |
| "epoch": 0.7981018119068162, | |
| "grad_norm": 18.875, | |
| "learning_rate": 5.83507587958757e-07, | |
| "loss": 0.09222990274429321, | |
| "step": 1850 | |
| }, | |
| { | |
| "acc": 0.9811320754716981, | |
| "epoch": 0.8002588438308887, | |
| "grad_norm": 14.5, | |
| "learning_rate": 5.715158905496371e-07, | |
| "loss": 0.07574660181999207, | |
| "step": 1855 | |
| }, | |
| { | |
| "acc": 0.9813278008298755, | |
| "epoch": 0.8024158757549612, | |
| "grad_norm": 21.375, | |
| "learning_rate": 5.596357115315609e-07, | |
| "loss": 0.06097576022148132, | |
| "step": 1860 | |
| }, | |
| { | |
| "acc": 0.9708333333333333, | |
| "epoch": 0.8045729076790337, | |
| "grad_norm": 12.875, | |
| "learning_rate": 5.478675964540932e-07, | |
| "loss": 0.09323927164077758, | |
| "step": 1865 | |
| }, | |
| { | |
| "acc": 0.959830866807611, | |
| "epoch": 0.8067299396031061, | |
| "grad_norm": 15.375, | |
| "learning_rate": 5.362120857207107e-07, | |
| "loss": 0.1452409267425537, | |
| "step": 1870 | |
| }, | |
| { | |
| "acc": 0.9698795180722891, | |
| "epoch": 0.8088869715271786, | |
| "grad_norm": 21.375, | |
| "learning_rate": 5.246697145639902e-07, | |
| "loss": 0.09878731966018676, | |
| "step": 1875 | |
| }, | |
| { | |
| "acc": 0.95703125, | |
| "epoch": 0.811044003451251, | |
| "grad_norm": 19.75, | |
| "learning_rate": 5.132410130210248e-07, | |
| "loss": 0.11701784133911133, | |
| "step": 1880 | |
| }, | |
| { | |
| "acc": 0.9629629629629629, | |
| "epoch": 0.8132010353753235, | |
| "grad_norm": 45.0, | |
| "learning_rate": 5.019265059090872e-07, | |
| "loss": 0.10859339237213135, | |
| "step": 1885 | |
| }, | |
| { | |
| "acc": 0.9723865877712031, | |
| "epoch": 0.8153580672993961, | |
| "grad_norm": 11.375, | |
| "learning_rate": 4.907267128015301e-07, | |
| "loss": 0.12401903867721557, | |
| "step": 1890 | |
| }, | |
| { | |
| "acc": 0.9619771863117871, | |
| "epoch": 0.8175150992234685, | |
| "grad_norm": 22.25, | |
| "learning_rate": 4.796421480039241e-07, | |
| "loss": 0.15579386949539184, | |
| "step": 1895 | |
| }, | |
| { | |
| "acc": 0.9611650485436893, | |
| "epoch": 0.819672131147541, | |
| "grad_norm": 36.75, | |
| "learning_rate": 4.686733205304431e-07, | |
| "loss": 0.1097487449645996, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.819672131147541, | |
| "eval_acc": 0.9676250102973886, | |
| "eval_loss": 0.013201375491917133, | |
| "eval_mrr": 0.9557108704137391, | |
| "eval_ndcg": 0.966545438225074, | |
| "eval_runtime": 79.4758, | |
| "eval_samples_per_second": 24.561, | |
| "eval_steps_per_second": 12.28, | |
| "step": 1900 | |
| }, | |
| { | |
| "acc": 0.9821073558648111, | |
| "epoch": 0.8218291630716135, | |
| "grad_norm": 10.3125, | |
| "learning_rate": 4.5782073408048806e-07, | |
| "loss": 0.06990676522254943, | |
| "step": 1905 | |
| }, | |
| { | |
| "acc": 0.9524752475247524, | |
| "epoch": 0.8239861949956859, | |
| "grad_norm": 31.25, | |
| "learning_rate": 4.470848870155572e-07, | |
| "loss": 0.13890234231948853, | |
| "step": 1910 | |
| }, | |
| { | |
| "acc": 0.9555984555984556, | |
| "epoch": 0.8261432269197584, | |
| "grad_norm": 18.0, | |
| "learning_rate": 4.3646627233636093e-07, | |
| "loss": 0.14572898149490357, | |
| "step": 1915 | |
| }, | |
| { | |
| "acc": 0.9524714828897338, | |
| "epoch": 0.8283002588438308, | |
| "grad_norm": 37.25, | |
| "learning_rate": 4.259653776601826e-07, | |
| "loss": 0.15419791936874389, | |
| "step": 1920 | |
| }, | |
| { | |
| "acc": 0.9623015873015873, | |
| "epoch": 0.8304572907679033, | |
| "grad_norm": 29.375, | |
| "learning_rate": 4.155826851984866e-07, | |
| "loss": 0.11790182590484619, | |
| "step": 1925 | |
| }, | |
| { | |
| "acc": 0.9747081712062257, | |
| "epoch": 0.8326143226919759, | |
| "grad_norm": 30.625, | |
| "learning_rate": 4.0531867173477464e-07, | |
| "loss": 0.07656689286231995, | |
| "step": 1930 | |
| }, | |
| { | |
| "acc": 0.9800796812749004, | |
| "epoch": 0.8347713546160483, | |
| "grad_norm": 24.875, | |
| "learning_rate": 3.951738086026925e-07, | |
| "loss": 0.08629719018936158, | |
| "step": 1935 | |
| }, | |
| { | |
| "acc": 0.9488752556237219, | |
| "epoch": 0.8369283865401208, | |
| "grad_norm": 15.9375, | |
| "learning_rate": 3.8514856166438363e-07, | |
| "loss": 0.10850644111633301, | |
| "step": 1940 | |
| }, | |
| { | |
| "acc": 0.9452332657200812, | |
| "epoch": 0.8390854184641933, | |
| "grad_norm": 8.375, | |
| "learning_rate": 3.752433912890991e-07, | |
| "loss": 0.2388686418533325, | |
| "step": 1945 | |
| }, | |
| { | |
| "acc": 0.9788461538461538, | |
| "epoch": 0.8412424503882657, | |
| "grad_norm": 11.4375, | |
| "learning_rate": 3.654587523320528e-07, | |
| "loss": 0.07610541582107544, | |
| "step": 1950 | |
| }, | |
| { | |
| "acc": 0.9603174603174603, | |
| "epoch": 0.8433994823123382, | |
| "grad_norm": 11.625, | |
| "learning_rate": 3.557950941135394e-07, | |
| "loss": 0.13932143449783324, | |
| "step": 1955 | |
| }, | |
| { | |
| "acc": 0.9722772277227723, | |
| "epoch": 0.8455565142364107, | |
| "grad_norm": 14.875, | |
| "learning_rate": 3.462528603982974e-07, | |
| "loss": 0.07739153504371643, | |
| "step": 1960 | |
| }, | |
| { | |
| "acc": 0.9672131147540983, | |
| "epoch": 0.8477135461604832, | |
| "grad_norm": 19.0, | |
| "learning_rate": 3.3683248937513147e-07, | |
| "loss": 0.10050212144851685, | |
| "step": 1965 | |
| }, | |
| { | |
| "acc": 0.9665271966527197, | |
| "epoch": 0.8498705780845557, | |
| "grad_norm": 20.0, | |
| "learning_rate": 3.275344136367935e-07, | |
| "loss": 0.11014137268066407, | |
| "step": 1970 | |
| }, | |
| { | |
| "acc": 0.9486652977412731, | |
| "epoch": 0.8520276100086281, | |
| "grad_norm": 14.125, | |
| "learning_rate": 3.183590601601121e-07, | |
| "loss": 0.12993695735931396, | |
| "step": 1975 | |
| }, | |
| { | |
| "acc": 0.9609053497942387, | |
| "epoch": 0.8541846419327006, | |
| "grad_norm": 10.875, | |
| "learning_rate": 3.0930685028638974e-07, | |
| "loss": 0.12024986743927002, | |
| "step": 1980 | |
| }, | |
| { | |
| "acc": 0.9685534591194969, | |
| "epoch": 0.8563416738567731, | |
| "grad_norm": 34.75, | |
| "learning_rate": 3.003781997020524e-07, | |
| "loss": 0.12062886953353882, | |
| "step": 1985 | |
| }, | |
| { | |
| "acc": 0.9519038076152304, | |
| "epoch": 0.8584987057808455, | |
| "grad_norm": 31.25, | |
| "learning_rate": 2.9157351841956137e-07, | |
| "loss": 0.1291287899017334, | |
| "step": 1990 | |
| }, | |
| { | |
| "acc": 0.964, | |
| "epoch": 0.860655737704918, | |
| "grad_norm": 43.75, | |
| "learning_rate": 2.828932107585858e-07, | |
| "loss": 0.09483218193054199, | |
| "step": 1995 | |
| }, | |
| { | |
| "acc": 0.974, | |
| "epoch": 0.8628127696289906, | |
| "grad_norm": 27.125, | |
| "learning_rate": 2.743376753274339e-07, | |
| "loss": 0.10480066537857055, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.8628127696289906, | |
| "eval_acc": 0.9682016640579949, | |
| "eval_loss": 0.012947115115821362, | |
| "eval_mrr": 0.9556596409055426, | |
| "eval_ndcg": 0.9665805610309751, | |
| "eval_runtime": 79.6091, | |
| "eval_samples_per_second": 24.52, | |
| "eval_steps_per_second": 12.26, | |
| "step": 2000 | |
| }, | |
| { | |
| "acc": 0.9676113360323887, | |
| "epoch": 0.864969801553063, | |
| "grad_norm": 37.75, | |
| "learning_rate": 2.659073050047511e-07, | |
| "loss": 0.10173193216323853, | |
| "step": 2005 | |
| }, | |
| { | |
| "acc": 0.9330628803245437, | |
| "epoch": 0.8671268334771355, | |
| "grad_norm": 43.5, | |
| "learning_rate": 2.576024869214767e-07, | |
| "loss": 0.18904460668563844, | |
| "step": 2010 | |
| }, | |
| { | |
| "acc": 0.9647058823529412, | |
| "epoch": 0.869283865401208, | |
| "grad_norm": 25.75, | |
| "learning_rate": 2.4942360244306616e-07, | |
| "loss": 0.10903534889221192, | |
| "step": 2015 | |
| }, | |
| { | |
| "acc": 0.950920245398773, | |
| "epoch": 0.8714408973252804, | |
| "grad_norm": 15.5625, | |
| "learning_rate": 2.41371027151981e-07, | |
| "loss": 0.1522771120071411, | |
| "step": 2020 | |
| }, | |
| { | |
| "acc": 0.9818913480885312, | |
| "epoch": 0.8735979292493529, | |
| "grad_norm": 13.1875, | |
| "learning_rate": 2.3344513083043905e-07, | |
| "loss": 0.04949050545692444, | |
| "step": 2025 | |
| }, | |
| { | |
| "acc": 0.984251968503937, | |
| "epoch": 0.8757549611734253, | |
| "grad_norm": 9.5, | |
| "learning_rate": 2.2564627744343414e-07, | |
| "loss": 0.08176502585411072, | |
| "step": 2030 | |
| }, | |
| { | |
| "acc": 0.9659318637274549, | |
| "epoch": 0.8779119930974978, | |
| "grad_norm": 29.0, | |
| "learning_rate": 2.1797482512202438e-07, | |
| "loss": 0.10490905046463013, | |
| "step": 2035 | |
| }, | |
| { | |
| "acc": 0.9746588693957114, | |
| "epoch": 0.8800690250215704, | |
| "grad_norm": 17.875, | |
| "learning_rate": 2.1043112614688276e-07, | |
| "loss": 0.09062132239341736, | |
| "step": 2040 | |
| }, | |
| { | |
| "acc": 0.9331941544885177, | |
| "epoch": 0.8822260569456428, | |
| "grad_norm": 57.0, | |
| "learning_rate": 2.0301552693212566e-07, | |
| "loss": 0.14883551597595215, | |
| "step": 2045 | |
| }, | |
| { | |
| "acc": 0.9767441860465116, | |
| "epoch": 0.8843830888697153, | |
| "grad_norm": 15.375, | |
| "learning_rate": 1.9572836800939897e-07, | |
| "loss": 0.08334760069847107, | |
| "step": 2050 | |
| }, | |
| { | |
| "acc": 0.9729166666666667, | |
| "epoch": 0.8865401207937877, | |
| "grad_norm": 8.1875, | |
| "learning_rate": 1.885699840122439e-07, | |
| "loss": 0.08837355971336365, | |
| "step": 2055 | |
| }, | |
| { | |
| "acc": 0.9670103092783505, | |
| "epoch": 0.8886971527178602, | |
| "grad_norm": 15.0625, | |
| "learning_rate": 1.8154070366073094e-07, | |
| "loss": 0.10173630714416504, | |
| "step": 2060 | |
| }, | |
| { | |
| "acc": 0.9784735812133072, | |
| "epoch": 0.8908541846419327, | |
| "grad_norm": 14.5625, | |
| "learning_rate": 1.746408497463623e-07, | |
| "loss": 0.0801218330860138, | |
| "step": 2065 | |
| }, | |
| { | |
| "acc": 0.9614604462474645, | |
| "epoch": 0.8930112165660051, | |
| "grad_norm": 23.25, | |
| "learning_rate": 1.6787073911725103e-07, | |
| "loss": 0.11090768575668335, | |
| "step": 2070 | |
| }, | |
| { | |
| "acc": 0.9774127310061602, | |
| "epoch": 0.8951682484900777, | |
| "grad_norm": 16.25, | |
| "learning_rate": 1.6123068266356977e-07, | |
| "loss": 0.0828168511390686, | |
| "step": 2075 | |
| }, | |
| { | |
| "acc": 0.9747081712062257, | |
| "epoch": 0.8973252804141502, | |
| "grad_norm": 9.75, | |
| "learning_rate": 1.5472098530327506e-07, | |
| "loss": 0.07558534741401672, | |
| "step": 2080 | |
| }, | |
| { | |
| "acc": 0.9762376237623762, | |
| "epoch": 0.8994823123382226, | |
| "grad_norm": 25.375, | |
| "learning_rate": 1.4834194596810525e-07, | |
| "loss": 0.1000247836112976, | |
| "step": 2085 | |
| }, | |
| { | |
| "acc": 0.9703557312252964, | |
| "epoch": 0.9016393442622951, | |
| "grad_norm": 30.5, | |
| "learning_rate": 1.420938575898525e-07, | |
| "loss": 0.09505946040153504, | |
| "step": 2090 | |
| }, | |
| { | |
| "acc": 0.975, | |
| "epoch": 0.9037963761863675, | |
| "grad_norm": 25.875, | |
| "learning_rate": 1.3597700708691185e-07, | |
| "loss": 0.06200051307678223, | |
| "step": 2095 | |
| }, | |
| { | |
| "acc": 0.9657258064516129, | |
| "epoch": 0.90595340811044, | |
| "grad_norm": 23.625, | |
| "learning_rate": 1.299916753511058e-07, | |
| "loss": 0.11226943731307984, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.90595340811044, | |
| "eval_acc": 0.9686959387099432, | |
| "eval_loss": 0.013119321316480637, | |
| "eval_mrr": 0.9532567330210773, | |
| "eval_ndcg": 0.9646967453417445, | |
| "eval_runtime": 80.4978, | |
| "eval_samples_per_second": 24.249, | |
| "eval_steps_per_second": 12.125, | |
| "step": 2100 | |
| }, | |
| { | |
| "acc": 0.9623762376237623, | |
| "epoch": 0.9081104400345125, | |
| "grad_norm": 25.25, | |
| "learning_rate": 1.2413813723478384e-07, | |
| "loss": 0.1284480571746826, | |
| "step": 2105 | |
| }, | |
| { | |
| "acc": 0.974155069582505, | |
| "epoch": 0.910267471958585, | |
| "grad_norm": 20.375, | |
| "learning_rate": 1.1841666153820308e-07, | |
| "loss": 0.10379769802093505, | |
| "step": 2110 | |
| }, | |
| { | |
| "acc": 0.9459459459459459, | |
| "epoch": 0.9124245038826575, | |
| "grad_norm": 44.0, | |
| "learning_rate": 1.1282751099718425e-07, | |
| "loss": 0.11733911037445069, | |
| "step": 2115 | |
| }, | |
| { | |
| "acc": 0.9717171717171718, | |
| "epoch": 0.91458153580673, | |
| "grad_norm": 26.125, | |
| "learning_rate": 1.0737094227104516e-07, | |
| "loss": 0.09395996928215027, | |
| "step": 2120 | |
| }, | |
| { | |
| "acc": 0.9621513944223108, | |
| "epoch": 0.9167385677308024, | |
| "grad_norm": 8.75, | |
| "learning_rate": 1.0204720593081606e-07, | |
| "loss": 0.12494601011276245, | |
| "step": 2125 | |
| }, | |
| { | |
| "acc": 0.9595375722543352, | |
| "epoch": 0.9188955996548749, | |
| "grad_norm": 11.25, | |
| "learning_rate": 9.685654644773256e-08, | |
| "loss": 0.11165077686309814, | |
| "step": 2130 | |
| }, | |
| { | |
| "acc": 0.96875, | |
| "epoch": 0.9210526315789473, | |
| "grad_norm": 22.375, | |
| "learning_rate": 9.179920218200888e-08, | |
| "loss": 0.09169023633003234, | |
| "step": 2135 | |
| }, | |
| { | |
| "acc": 0.9607843137254902, | |
| "epoch": 0.9232096635030198, | |
| "grad_norm": 12.875, | |
| "learning_rate": 8.687540537189397e-08, | |
| "loss": 0.09511052966117858, | |
| "step": 2140 | |
| }, | |
| { | |
| "acc": 0.9585798816568047, | |
| "epoch": 0.9253666954270923, | |
| "grad_norm": 12.0, | |
| "learning_rate": 8.208538212300442e-08, | |
| "loss": 0.13230088949203492, | |
| "step": 2145 | |
| }, | |
| { | |
| "acc": 0.9676767676767677, | |
| "epoch": 0.9275237273511648, | |
| "grad_norm": 21.5, | |
| "learning_rate": 7.742935239794335e-08, | |
| "loss": 0.09337798357009888, | |
| "step": 2150 | |
| }, | |
| { | |
| "acc": 0.9746588693957114, | |
| "epoch": 0.9296807592752373, | |
| "grad_norm": 22.875, | |
| "learning_rate": 7.290753000619854e-08, | |
| "loss": 0.08780012726783752, | |
| "step": 2155 | |
| }, | |
| { | |
| "acc": 0.9766081871345029, | |
| "epoch": 0.9318377911993098, | |
| "grad_norm": 9.75, | |
| "learning_rate": 6.852012259432461e-08, | |
| "loss": 0.08427742719650269, | |
| "step": 2160 | |
| }, | |
| { | |
| "acc": 0.9631067961165048, | |
| "epoch": 0.9339948231233822, | |
| "grad_norm": 19.375, | |
| "learning_rate": 6.426733163640697e-08, | |
| "loss": 0.11798138618469238, | |
| "step": 2165 | |
| }, | |
| { | |
| "acc": 0.9481037924151696, | |
| "epoch": 0.9361518550474547, | |
| "grad_norm": 23.875, | |
| "learning_rate": 6.014935242481057e-08, | |
| "loss": 0.15075309276580812, | |
| "step": 2170 | |
| }, | |
| { | |
| "acc": 0.9627450980392157, | |
| "epoch": 0.9383088869715271, | |
| "grad_norm": 46.5, | |
| "learning_rate": 5.6166374061211724e-08, | |
| "loss": 0.11194332838058471, | |
| "step": 2175 | |
| }, | |
| { | |
| "acc": 0.9747368421052631, | |
| "epoch": 0.9404659188955996, | |
| "grad_norm": 21.375, | |
| "learning_rate": 5.2318579447914406e-08, | |
| "loss": 0.10739229917526245, | |
| "step": 2180 | |
| }, | |
| { | |
| "acc": 0.9782178217821782, | |
| "epoch": 0.9426229508196722, | |
| "grad_norm": 8.9375, | |
| "learning_rate": 4.8606145279450955e-08, | |
| "loss": 0.07264227271080018, | |
| "step": 2185 | |
| }, | |
| { | |
| "acc": 0.9506172839506173, | |
| "epoch": 0.9447799827437446, | |
| "grad_norm": 12.125, | |
| "learning_rate": 4.50292420344689e-08, | |
| "loss": 0.13095579147338868, | |
| "step": 2190 | |
| }, | |
| { | |
| "acc": 0.9532520325203252, | |
| "epoch": 0.9469370146678171, | |
| "grad_norm": 17.125, | |
| "learning_rate": 4.158803396790056e-08, | |
| "loss": 0.12362562417984009, | |
| "step": 2195 | |
| }, | |
| { | |
| "acc": 0.9645669291338582, | |
| "epoch": 0.9490940465918896, | |
| "grad_norm": 12.0, | |
| "learning_rate": 3.8282679103422756e-08, | |
| "loss": 0.11409047842025757, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.9490940465918896, | |
| "eval_acc": 0.9687783178186012, | |
| "eval_loss": 0.01316050160676241, | |
| "eval_mrr": 0.9534153005464481, | |
| "eval_ndcg": 0.9648771549963384, | |
| "eval_runtime": 79.4962, | |
| "eval_samples_per_second": 24.555, | |
| "eval_steps_per_second": 12.277, | |
| "step": 2200 | |
| }, | |
| { | |
| "acc": 0.9576271186440678, | |
| "epoch": 0.951251078515962, | |
| "grad_norm": 11.8125, | |
| "learning_rate": 3.5113329226198256e-08, | |
| "loss": 0.15816335678100585, | |
| "step": 2205 | |
| }, | |
| { | |
| "acc": 0.970954356846473, | |
| "epoch": 0.9534081104400345, | |
| "grad_norm": 14.0, | |
| "learning_rate": 3.20801298759067e-08, | |
| "loss": 0.09749370217323303, | |
| "step": 2210 | |
| }, | |
| { | |
| "acc": 0.9624505928853755, | |
| "epoch": 0.955565142364107, | |
| "grad_norm": 8.75, | |
| "learning_rate": 2.9183220340060936e-08, | |
| "loss": 0.12024798393249511, | |
| "step": 2215 | |
| }, | |
| { | |
| "acc": 0.9827213822894169, | |
| "epoch": 0.9577221742881795, | |
| "grad_norm": 8.4375, | |
| "learning_rate": 2.642273364760983e-08, | |
| "loss": 0.07396645545959472, | |
| "step": 2220 | |
| }, | |
| { | |
| "acc": 0.9569672131147541, | |
| "epoch": 0.959879206212252, | |
| "grad_norm": 35.75, | |
| "learning_rate": 2.3798796562832126e-08, | |
| "loss": 0.12084379196166992, | |
| "step": 2225 | |
| }, | |
| { | |
| "acc": 0.9570552147239264, | |
| "epoch": 0.9620362381363244, | |
| "grad_norm": 26.875, | |
| "learning_rate": 2.1311529579512766e-08, | |
| "loss": 0.1314695119857788, | |
| "step": 2230 | |
| }, | |
| { | |
| "acc": 0.9333333333333333, | |
| "epoch": 0.9641932700603969, | |
| "grad_norm": 25.75, | |
| "learning_rate": 1.8961046915409674e-08, | |
| "loss": 0.17206907272338867, | |
| "step": 2235 | |
| }, | |
| { | |
| "acc": 0.955193482688391, | |
| "epoch": 0.9663503019844694, | |
| "grad_norm": 12.625, | |
| "learning_rate": 1.674745650701126e-08, | |
| "loss": 0.14425760507583618, | |
| "step": 2240 | |
| }, | |
| { | |
| "acc": 0.9656488549618321, | |
| "epoch": 0.9685073339085418, | |
| "grad_norm": 18.125, | |
| "learning_rate": 1.4670860004576735e-08, | |
| "loss": 0.10901893377304077, | |
| "step": 2245 | |
| }, | |
| { | |
| "acc": 0.9776422764227642, | |
| "epoch": 0.9706643658326143, | |
| "grad_norm": 19.125, | |
| "learning_rate": 1.2731352767470838e-08, | |
| "loss": 0.09050199985504151, | |
| "step": 2250 | |
| }, | |
| { | |
| "acc": 0.9449715370018975, | |
| "epoch": 0.9728213977566867, | |
| "grad_norm": 25.625, | |
| "learning_rate": 1.0929023859783005e-08, | |
| "loss": 0.15088040828704835, | |
| "step": 2255 | |
| }, | |
| { | |
| "acc": 0.9625984251968503, | |
| "epoch": 0.9749784296807593, | |
| "grad_norm": 23.25, | |
| "learning_rate": 9.26395604623831e-09, | |
| "loss": 0.09685000777244568, | |
| "step": 2260 | |
| }, | |
| { | |
| "acc": 0.9695817490494296, | |
| "epoch": 0.9771354616048318, | |
| "grad_norm": 14.4375, | |
| "learning_rate": 7.736225788396833e-09, | |
| "loss": 0.09270592331886292, | |
| "step": 2265 | |
| }, | |
| { | |
| "acc": 0.9692622950819673, | |
| "epoch": 0.9792924935289042, | |
| "grad_norm": 8.75, | |
| "learning_rate": 6.345903241142481e-09, | |
| "loss": 0.09486258029937744, | |
| "step": 2270 | |
| }, | |
| { | |
| "acc": 0.9613034623217923, | |
| "epoch": 0.9814495254529767, | |
| "grad_norm": 7.375, | |
| "learning_rate": 5.0930522494612165e-09, | |
| "loss": 0.12018944025039673, | |
| "step": 2275 | |
| }, | |
| { | |
| "acc": 0.9679358717434869, | |
| "epoch": 0.9836065573770492, | |
| "grad_norm": 41.25, | |
| "learning_rate": 3.977730345508413e-09, | |
| "loss": 0.11004929542541504, | |
| "step": 2280 | |
| }, | |
| { | |
| "acc": 0.9537223340040242, | |
| "epoch": 0.9857635893011216, | |
| "grad_norm": 14.1875, | |
| "learning_rate": 2.9999887459692954e-09, | |
| "loss": 0.1255163073539734, | |
| "step": 2285 | |
| }, | |
| { | |
| "acc": 0.9727095516569201, | |
| "epoch": 0.9879206212251941, | |
| "grad_norm": 29.25, | |
| "learning_rate": 2.1598723497041616e-09, | |
| "loss": 0.09740299582481385, | |
| "step": 2290 | |
| }, | |
| { | |
| "acc": 0.978, | |
| "epoch": 0.9900776531492667, | |
| "grad_norm": 17.875, | |
| "learning_rate": 1.457419735688359e-09, | |
| "loss": 0.08217963576316833, | |
| "step": 2295 | |
| }, | |
| { | |
| "acc": 0.9664031620553359, | |
| "epoch": 0.9922346850733391, | |
| "grad_norm": 21.75, | |
| "learning_rate": 8.926631612410364e-10, | |
| "loss": 0.106975257396698, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.9922346850733391, | |
| "eval_acc": 0.9682016640579949, | |
| "eval_loss": 0.012847819365561008, | |
| "eval_mrr": 0.9552498048399688, | |
| "eval_ndcg": 0.9662674769692897, | |
| "eval_runtime": 79.8344, | |
| "eval_samples_per_second": 24.451, | |
| "eval_steps_per_second": 12.225, | |
| "step": 2300 | |
| }, | |
| { | |
| "acc": 0.944558521560575, | |
| "epoch": 0.9943917169974116, | |
| "grad_norm": 23.5, | |
| "learning_rate": 4.6562856054166436e-10, | |
| "loss": 0.17086119651794435, | |
| "step": 2305 | |
| }, | |
| { | |
| "acc": 0.9473684210526315, | |
| "epoch": 0.996548748921484, | |
| "grad_norm": 22.25, | |
| "learning_rate": 1.763355434416525e-10, | |
| "loss": 0.13650988340377807, | |
| "step": 2310 | |
| }, | |
| { | |
| "acc": 0.9756592292089249, | |
| "epoch": 0.9987057808455565, | |
| "grad_norm": 6.40625, | |
| "learning_rate": 2.479739456207053e-11, | |
| "loss": 0.08215545415878296, | |
| "step": 2315 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_acc": 0.9688606969272593, | |
| "eval_loss": 0.0128474785014987, | |
| "eval_mrr": 0.954596018735363, | |
| "eval_ndcg": 0.9658596079307621, | |
| "eval_runtime": 78.5065, | |
| "eval_samples_per_second": 24.864, | |
| "eval_steps_per_second": 12.432, | |
| "step": 2318 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 2318, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 400, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 6.864146106092093e+17, | |
| "train_batch_size": 2, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |