| { | |
| "best_metric": 0.846644903673702, | |
| "best_model_checkpoint": "result/my-unsup-simcse-bert-large-uncased-d0.2-rs192-std0.1-t0.05", | |
| "epoch": 1.0, | |
| "global_step": 15626, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01, | |
| "eval_avg_sts": 0.6284471674961587, | |
| "eval_sickr_spearman": 0.6147007873963284, | |
| "eval_stsb_spearman": 0.642193547595989, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "eval_avg_sts": 0.6752562982834547, | |
| "eval_sickr_spearman": 0.6594757163746471, | |
| "eval_stsb_spearman": 0.6910368801922622, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "eval_avg_sts": 0.6885266805855659, | |
| "eval_sickr_spearman": 0.6739856239324923, | |
| "eval_stsb_spearman": 0.7030677372386395, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 9.680020478689364e-06, | |
| "loss": 0.0393, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "eval_avg_sts": 0.7136869238086576, | |
| "eval_sickr_spearman": 0.6992179945753817, | |
| "eval_stsb_spearman": 0.7281558530419336, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "eval_avg_sts": 0.7414985057899455, | |
| "eval_sickr_spearman": 0.7279799307074509, | |
| "eval_stsb_spearman": 0.7550170808724401, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "eval_avg_sts": 0.7607871015229407, | |
| "eval_sickr_spearman": 0.7483565010882515, | |
| "eval_stsb_spearman": 0.77321770195763, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "eval_avg_sts": 0.7720875460016747, | |
| "eval_sickr_spearman": 0.755885952611893, | |
| "eval_stsb_spearman": 0.7882891393914564, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 9.36004095737873e-06, | |
| "loss": 0.0016, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "eval_avg_sts": 0.7690732051021041, | |
| "eval_sickr_spearman": 0.7558774030758371, | |
| "eval_stsb_spearman": 0.7822690071283711, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "eval_avg_sts": 0.7706230123500749, | |
| "eval_sickr_spearman": 0.7564492133384577, | |
| "eval_stsb_spearman": 0.7847968113616921, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "eval_avg_sts": 0.7759234749063804, | |
| "eval_sickr_spearman": 0.7603653331320113, | |
| "eval_stsb_spearman": 0.7914816166807493, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "eval_avg_sts": 0.7778780797756797, | |
| "eval_sickr_spearman": 0.7626101627199244, | |
| "eval_stsb_spearman": 0.7931459968314349, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 9.040061436068092e-06, | |
| "loss": 0.0008, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "eval_avg_sts": 0.7864881429536921, | |
| "eval_sickr_spearman": 0.7687771640201724, | |
| "eval_stsb_spearman": 0.8041991218872119, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "eval_avg_sts": 0.7853051845863528, | |
| "eval_sickr_spearman": 0.7664657152945645, | |
| "eval_stsb_spearman": 0.8041446538781412, | |
| "step": 1625 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "eval_avg_sts": 0.7794344010748193, | |
| "eval_sickr_spearman": 0.7616587626626382, | |
| "eval_stsb_spearman": 0.7972100394870003, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "eval_avg_sts": 0.7788742107713875, | |
| "eval_sickr_spearman": 0.759037225146146, | |
| "eval_stsb_spearman": 0.798711196396629, | |
| "step": 1875 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 8.720081914757458e-06, | |
| "loss": 0.0006, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "eval_avg_sts": 0.7770697347799143, | |
| "eval_sickr_spearman": 0.7594302636492141, | |
| "eval_stsb_spearman": 0.7947092059106146, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "eval_avg_sts": 0.782438364238089, | |
| "eval_sickr_spearman": 0.7615730751776726, | |
| "eval_stsb_spearman": 0.8033036532985053, | |
| "step": 2125 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "eval_avg_sts": 0.7915329396525467, | |
| "eval_sickr_spearman": 0.767920913574835, | |
| "eval_stsb_spearman": 0.8151449657302583, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "eval_avg_sts": 0.7871408433644984, | |
| "eval_sickr_spearman": 0.7656207521580654, | |
| "eval_stsb_spearman": 0.8086609345709314, | |
| "step": 2375 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 8.400102393446819e-06, | |
| "loss": 0.0004, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "eval_avg_sts": 0.7961056571455418, | |
| "eval_sickr_spearman": 0.7728497691422113, | |
| "eval_stsb_spearman": 0.8193615451488724, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "eval_avg_sts": 0.797889284518303, | |
| "eval_sickr_spearman": 0.7737841181584869, | |
| "eval_stsb_spearman": 0.821994450878119, | |
| "step": 2625 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "eval_avg_sts": 0.8076834297763748, | |
| "eval_sickr_spearman": 0.7795960735569043, | |
| "eval_stsb_spearman": 0.8357707859958452, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "eval_avg_sts": 0.8094598315742048, | |
| "eval_sickr_spearman": 0.7826569995893524, | |
| "eval_stsb_spearman": 0.8362626635590572, | |
| "step": 2875 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 8.080122872136184e-06, | |
| "loss": 0.0004, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "eval_avg_sts": 0.8073096095705694, | |
| "eval_sickr_spearman": 0.7816718336677553, | |
| "eval_stsb_spearman": 0.8329473854733834, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "eval_avg_sts": 0.8011518883834527, | |
| "eval_sickr_spearman": 0.7773814075006944, | |
| "eval_stsb_spearman": 0.824922369266211, | |
| "step": 3125 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "eval_avg_sts": 0.8042032043961311, | |
| "eval_sickr_spearman": 0.7785283902030365, | |
| "eval_stsb_spearman": 0.8298780185892256, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "eval_avg_sts": 0.7998679494915892, | |
| "eval_sickr_spearman": 0.7764289507591766, | |
| "eval_stsb_spearman": 0.8233069482240019, | |
| "step": 3375 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 7.760143350825547e-06, | |
| "loss": 0.0007, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "eval_avg_sts": 0.8063843789314207, | |
| "eval_sickr_spearman": 0.7847530768561116, | |
| "eval_stsb_spearman": 0.8280156810067297, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "eval_avg_sts": 0.8012493451189945, | |
| "eval_sickr_spearman": 0.780463995559891, | |
| "eval_stsb_spearman": 0.822034694678098, | |
| "step": 3625 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "eval_avg_sts": 0.7878526976938728, | |
| "eval_sickr_spearman": 0.7717850156855313, | |
| "eval_stsb_spearman": 0.8039203797022143, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "eval_avg_sts": 0.789256789304419, | |
| "eval_sickr_spearman": 0.7713525436481825, | |
| "eval_stsb_spearman": 0.8071610349606555, | |
| "step": 3875 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 7.440163829514912e-06, | |
| "loss": 0.0005, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "eval_avg_sts": 0.7949974539902241, | |
| "eval_sickr_spearman": 0.7751448872933301, | |
| "eval_stsb_spearman": 0.8148500206871182, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "eval_avg_sts": 0.7983176661936285, | |
| "eval_sickr_spearman": 0.7787147989077179, | |
| "eval_stsb_spearman": 0.8179205334795392, | |
| "step": 4125 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "eval_avg_sts": 0.8011537162173987, | |
| "eval_sickr_spearman": 0.7797869011229179, | |
| "eval_stsb_spearman": 0.8225205313118794, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "eval_avg_sts": 0.8017265968939464, | |
| "eval_sickr_spearman": 0.7797175922435428, | |
| "eval_stsb_spearman": 0.8237356015443501, | |
| "step": 4375 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 7.120184308204276e-06, | |
| "loss": 0.0006, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "eval_avg_sts": 0.7954208465844541, | |
| "eval_sickr_spearman": 0.7741096729640344, | |
| "eval_stsb_spearman": 0.8167320202048738, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "eval_avg_sts": 0.7920347843147073, | |
| "eval_sickr_spearman": 0.7712112361477516, | |
| "eval_stsb_spearman": 0.8128583324816632, | |
| "step": 4625 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "eval_avg_sts": 0.7892462337813722, | |
| "eval_sickr_spearman": 0.7675803250345374, | |
| "eval_stsb_spearman": 0.8109121425282069, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "eval_avg_sts": 0.7906076744998956, | |
| "eval_sickr_spearman": 0.7698981083276131, | |
| "eval_stsb_spearman": 0.811317240672178, | |
| "step": 4875 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 6.800204786893639e-06, | |
| "loss": 0.0003, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "eval_avg_sts": 0.7936889300507597, | |
| "eval_sickr_spearman": 0.771515849393072, | |
| "eval_stsb_spearman": 0.8158620107084473, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "eval_avg_sts": 0.795947295315311, | |
| "eval_sickr_spearman": 0.7723795926902337, | |
| "eval_stsb_spearman": 0.8195149979403883, | |
| "step": 5125 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "eval_avg_sts": 0.7853385790441166, | |
| "eval_sickr_spearman": 0.7590891467668007, | |
| "eval_stsb_spearman": 0.8115880113214325, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "eval_avg_sts": 0.7705051643120862, | |
| "eval_sickr_spearman": 0.7443019076292519, | |
| "eval_stsb_spearman": 0.7967084209949203, | |
| "step": 5375 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 6.480225265583003e-06, | |
| "loss": 0.0005, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "eval_avg_sts": 0.7907964258469622, | |
| "eval_sickr_spearman": 0.7614572241610039, | |
| "eval_stsb_spearman": 0.8201356275329206, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "eval_avg_sts": 0.7921176828014587, | |
| "eval_sickr_spearman": 0.7642644018845545, | |
| "eval_stsb_spearman": 0.819970963718363, | |
| "step": 5625 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "eval_avg_sts": 0.7968607448341978, | |
| "eval_sickr_spearman": 0.7683329723940726, | |
| "eval_stsb_spearman": 0.825388517274323, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "eval_avg_sts": 0.7976863770446964, | |
| "eval_sickr_spearman": 0.7699123310715607, | |
| "eval_stsb_spearman": 0.8254604230178321, | |
| "step": 5875 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 6.1602457442723675e-06, | |
| "loss": 0.0002, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "eval_avg_sts": 0.7994668096008009, | |
| "eval_sickr_spearman": 0.771331217839144, | |
| "eval_stsb_spearman": 0.8276024013624578, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "eval_avg_sts": 0.794729526056908, | |
| "eval_sickr_spearman": 0.7672111099577826, | |
| "eval_stsb_spearman": 0.8222479421560336, | |
| "step": 6125 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "eval_avg_sts": 0.795235400500955, | |
| "eval_sickr_spearman": 0.7679053034668677, | |
| "eval_stsb_spearman": 0.8225654975350422, | |
| "step": 6250 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "eval_avg_sts": 0.7944326246735636, | |
| "eval_sickr_spearman": 0.7672546261356856, | |
| "eval_stsb_spearman": 0.8216106232114415, | |
| "step": 6375 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 5.840266222961732e-06, | |
| "loss": 0.0003, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "eval_avg_sts": 0.7986839621065949, | |
| "eval_sickr_spearman": 0.7724915051565845, | |
| "eval_stsb_spearman": 0.8248764190566051, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "eval_avg_sts": 0.7975713516762131, | |
| "eval_sickr_spearman": 0.7717222390359516, | |
| "eval_stsb_spearman": 0.8234204643164746, | |
| "step": 6625 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "eval_avg_sts": 0.797016157457973, | |
| "eval_sickr_spearman": 0.7719195988317609, | |
| "eval_stsb_spearman": 0.8221127160841852, | |
| "step": 6750 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "eval_avg_sts": 0.7981065290191448, | |
| "eval_sickr_spearman": 0.7723574983835721, | |
| "eval_stsb_spearman": 0.8238555596547175, | |
| "step": 6875 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 5.520286701651095e-06, | |
| "loss": 0.0002, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "eval_avg_sts": 0.794247421496165, | |
| "eval_sickr_spearman": 0.7691365327211322, | |
| "eval_stsb_spearman": 0.8193583102711979, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "eval_avg_sts": 0.7948016455050979, | |
| "eval_sickr_spearman": 0.7697465277093963, | |
| "eval_stsb_spearman": 0.8198567633007994, | |
| "step": 7125 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "eval_avg_sts": 0.8016506160778312, | |
| "eval_sickr_spearman": 0.7711050874135733, | |
| "eval_stsb_spearman": 0.8321961447420889, | |
| "step": 7250 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "eval_avg_sts": 0.8006808507853271, | |
| "eval_sickr_spearman": 0.771960281174679, | |
| "eval_stsb_spearman": 0.8294014203959752, | |
| "step": 7375 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 5.200307180340458e-06, | |
| "loss": 0.0002, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "eval_avg_sts": 0.8018251757415639, | |
| "eval_sickr_spearman": 0.7727575494274502, | |
| "eval_stsb_spearman": 0.8308928020556775, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "eval_avg_sts": 0.803004866092661, | |
| "eval_sickr_spearman": 0.7736833008765683, | |
| "eval_stsb_spearman": 0.8323264313087536, | |
| "step": 7625 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "eval_avg_sts": 0.8038993525909374, | |
| "eval_sickr_spearman": 0.7753128520550592, | |
| "eval_stsb_spearman": 0.8324858531268156, | |
| "step": 7750 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "eval_avg_sts": 0.8041681113226734, | |
| "eval_sickr_spearman": 0.7751615060544277, | |
| "eval_stsb_spearman": 0.8331747165909191, | |
| "step": 7875 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 4.8803276590298225e-06, | |
| "loss": 0.0002, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "eval_avg_sts": 0.8018348905135586, | |
| "eval_sickr_spearman": 0.7739083746179073, | |
| "eval_stsb_spearman": 0.8297614064092099, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "eval_avg_sts": 0.8018628636791422, | |
| "eval_sickr_spearman": 0.773909095084429, | |
| "eval_stsb_spearman": 0.8298166322738554, | |
| "step": 8125 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "eval_avg_sts": 0.8007180571037122, | |
| "eval_sickr_spearman": 0.7734595239749681, | |
| "eval_stsb_spearman": 0.8279765902324565, | |
| "step": 8250 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "eval_avg_sts": 0.8013781529559565, | |
| "eval_sickr_spearman": 0.7741623150512106, | |
| "eval_stsb_spearman": 0.8285939908607025, | |
| "step": 8375 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 4.560348137719187e-06, | |
| "loss": 0.0001, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "eval_avg_sts": 0.8011023693716508, | |
| "eval_sickr_spearman": 0.773219416498879, | |
| "eval_stsb_spearman": 0.8289853222444226, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "eval_avg_sts": 0.8010778307928474, | |
| "eval_sickr_spearman": 0.7739564057193454, | |
| "eval_stsb_spearman": 0.8281992558663496, | |
| "step": 8625 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "eval_avg_sts": 0.7930314341293583, | |
| "eval_sickr_spearman": 0.768819671544945, | |
| "eval_stsb_spearman": 0.8172431967137714, | |
| "step": 8750 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "eval_avg_sts": 0.8008054784067702, | |
| "eval_sickr_spearman": 0.77372042891798, | |
| "eval_stsb_spearman": 0.8278905278955604, | |
| "step": 8875 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 4.24036861640855e-06, | |
| "loss": 0.0002, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "eval_avg_sts": 0.8000783181562559, | |
| "eval_sickr_spearman": 0.7738420436668213, | |
| "eval_stsb_spearman": 0.8263145926456905, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "eval_avg_sts": 0.7973449571834637, | |
| "eval_sickr_spearman": 0.7723996696906348, | |
| "eval_stsb_spearman": 0.8222902446762925, | |
| "step": 9125 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "eval_avg_sts": 0.7988496681005242, | |
| "eval_sickr_spearman": 0.7730466966581075, | |
| "eval_stsb_spearman": 0.8246526395429409, | |
| "step": 9250 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "eval_avg_sts": 0.7993298924498625, | |
| "eval_sickr_spearman": 0.7727984719258754, | |
| "eval_stsb_spearman": 0.8258613129738497, | |
| "step": 9375 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 3.920389095097914e-06, | |
| "loss": 0.0003, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "eval_avg_sts": 0.7965436121884861, | |
| "eval_sickr_spearman": 0.7714783851339504, | |
| "eval_stsb_spearman": 0.8216088392430219, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "eval_avg_sts": 0.7965626735690534, | |
| "eval_sickr_spearman": 0.7711964425685087, | |
| "eval_stsb_spearman": 0.821928904569598, | |
| "step": 9625 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "eval_avg_sts": 0.8151815992402154, | |
| "eval_sickr_spearman": 0.7837182948067288, | |
| "eval_stsb_spearman": 0.846644903673702, | |
| "step": 9750 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "eval_avg_sts": 0.8133407145565232, | |
| "eval_sickr_spearman": 0.781603293286003, | |
| "eval_stsb_spearman": 0.8450781358270435, | |
| "step": 9875 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 3.600409573787278e-06, | |
| "loss": 0.0003, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "eval_avg_sts": 0.8124932154038879, | |
| "eval_sickr_spearman": 0.7823626649997396, | |
| "eval_stsb_spearman": 0.8426237658080361, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "eval_avg_sts": 0.8115787916906023, | |
| "eval_sickr_spearman": 0.7813677967956519, | |
| "eval_stsb_spearman": 0.8417897865855526, | |
| "step": 10125 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "eval_avg_sts": 0.8119336290772945, | |
| "eval_sickr_spearman": 0.7792161955756303, | |
| "eval_stsb_spearman": 0.8446510625789586, | |
| "step": 10250 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "eval_avg_sts": 0.8115907034881888, | |
| "eval_sickr_spearman": 0.78014233127356, | |
| "eval_stsb_spearman": 0.8430390757028176, | |
| "step": 10375 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 3.280430052476642e-06, | |
| "loss": 0.0002, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "eval_avg_sts": 0.8104476252807031, | |
| "eval_sickr_spearman": 0.7828967228166299, | |
| "eval_stsb_spearman": 0.8379985277447765, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "eval_avg_sts": 0.8102305253603619, | |
| "eval_sickr_spearman": 0.7820164087894723, | |
| "eval_stsb_spearman": 0.8384446419312516, | |
| "step": 10625 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "eval_avg_sts": 0.8097882331288244, | |
| "eval_sickr_spearman": 0.7812608315327494, | |
| "eval_stsb_spearman": 0.8383156347248993, | |
| "step": 10750 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "eval_avg_sts": 0.8093084729110784, | |
| "eval_sickr_spearman": 0.7823192929151409, | |
| "eval_stsb_spearman": 0.836297652907016, | |
| "step": 10875 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 2.960450531166006e-06, | |
| "loss": 0.0001, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "eval_avg_sts": 0.8099056196812371, | |
| "eval_sickr_spearman": 0.7821833648980712, | |
| "eval_stsb_spearman": 0.837627874464403, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "eval_avg_sts": 0.8088954804536845, | |
| "eval_sickr_spearman": 0.781711171139833, | |
| "eval_stsb_spearman": 0.8360797897675362, | |
| "step": 11125 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "eval_avg_sts": 0.8078360242726743, | |
| "eval_sickr_spearman": 0.7812626086835025, | |
| "eval_stsb_spearman": 0.8344094398618461, | |
| "step": 11250 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "eval_avg_sts": 0.8064158612496863, | |
| "eval_sickr_spearman": 0.7790308435551806, | |
| "eval_stsb_spearman": 0.8338008789441921, | |
| "step": 11375 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 2.640471009855369e-06, | |
| "loss": 0.0002, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "eval_avg_sts": 0.8062047487025639, | |
| "eval_sickr_spearman": 0.7787426089154506, | |
| "eval_stsb_spearman": 0.8336668884896772, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "eval_avg_sts": 0.8045069953131643, | |
| "eval_sickr_spearman": 0.778478774075251, | |
| "eval_stsb_spearman": 0.8305352165510776, | |
| "step": 11625 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "eval_avg_sts": 0.803837740393073, | |
| "eval_sickr_spearman": 0.7792353599851041, | |
| "eval_stsb_spearman": 0.828440120801042, | |
| "step": 11750 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "eval_avg_sts": 0.8057568273780229, | |
| "eval_sickr_spearman": 0.7798796011486935, | |
| "eval_stsb_spearman": 0.8316340536073524, | |
| "step": 11875 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 2.3204914885447333e-06, | |
| "loss": 0.0003, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "eval_avg_sts": 0.8082419639074342, | |
| "eval_sickr_spearman": 0.7811977186654596, | |
| "eval_stsb_spearman": 0.8352862091494088, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "eval_avg_sts": 0.8068636148311418, | |
| "eval_sickr_spearman": 0.780421343941814, | |
| "eval_stsb_spearman": 0.8333058857204696, | |
| "step": 12125 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "eval_avg_sts": 0.8061658395612952, | |
| "eval_sickr_spearman": 0.7795610108528546, | |
| "eval_stsb_spearman": 0.8327706682697357, | |
| "step": 12250 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "eval_avg_sts": 0.8079201453234057, | |
| "eval_sickr_spearman": 0.7817473865903174, | |
| "eval_stsb_spearman": 0.8340929040564942, | |
| "step": 12375 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 2.000511967234097e-06, | |
| "loss": 0.0004, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "eval_avg_sts": 0.8067038392243449, | |
| "eval_sickr_spearman": 0.7809990620299117, | |
| "eval_stsb_spearman": 0.8324086164187781, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "eval_avg_sts": 0.8065569387711908, | |
| "eval_sickr_spearman": 0.7809066021596432, | |
| "eval_stsb_spearman": 0.8322072753827385, | |
| "step": 12625 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "eval_avg_sts": 0.8076585976169773, | |
| "eval_sickr_spearman": 0.7825363454625398, | |
| "eval_stsb_spearman": 0.8327808497714149, | |
| "step": 12750 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "eval_avg_sts": 0.807540679281871, | |
| "eval_sickr_spearman": 0.7822868719216702, | |
| "eval_stsb_spearman": 0.8327944866420718, | |
| "step": 12875 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.6805324459234608e-06, | |
| "loss": 0.0002, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "eval_avg_sts": 0.8088561109375074, | |
| "eval_sickr_spearman": 0.782293644306973, | |
| "eval_stsb_spearman": 0.8354185775680419, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "eval_avg_sts": 0.8091518230287003, | |
| "eval_sickr_spearman": 0.782567277491866, | |
| "eval_stsb_spearman": 0.8357363685655345, | |
| "step": 13125 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "eval_avg_sts": 0.8080911332689825, | |
| "eval_sickr_spearman": 0.781686291029288, | |
| "eval_stsb_spearman": 0.834495975508677, | |
| "step": 13250 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "eval_avg_sts": 0.808319352838154, | |
| "eval_sickr_spearman": 0.7820563226347674, | |
| "eval_stsb_spearman": 0.8345823830415405, | |
| "step": 13375 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.3605529246128248e-06, | |
| "loss": 0.0002, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "eval_avg_sts": 0.8078707451193119, | |
| "eval_sickr_spearman": 0.7816650612824524, | |
| "eval_stsb_spearman": 0.8340764289561713, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "eval_avg_sts": 0.8078035266499075, | |
| "eval_sickr_spearman": 0.7814631385320067, | |
| "eval_stsb_spearman": 0.8341439147678084, | |
| "step": 13625 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "eval_avg_sts": 0.8081068218462715, | |
| "eval_sickr_spearman": 0.7814149153061628, | |
| "eval_stsb_spearman": 0.83479872838638, | |
| "step": 13750 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "eval_avg_sts": 0.8078873563713755, | |
| "eval_sickr_spearman": 0.7812975753253495, | |
| "eval_stsb_spearman": 0.8344771374174016, | |
| "step": 13875 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.0405734033021888e-06, | |
| "loss": 0.0001, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "eval_avg_sts": 0.8076536250964819, | |
| "eval_sickr_spearman": 0.7811343176115614, | |
| "eval_stsb_spearman": 0.8341729325814025, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "eval_avg_sts": 0.808264708696997, | |
| "eval_sickr_spearman": 0.781519959325008, | |
| "eval_stsb_spearman": 0.8350094580689861, | |
| "step": 14125 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "eval_avg_sts": 0.8081160617292164, | |
| "eval_sickr_spearman": 0.7813509859101486, | |
| "eval_stsb_spearman": 0.8348811375482841, | |
| "step": 14250 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "eval_avg_sts": 0.8079808418885974, | |
| "eval_sickr_spearman": 0.7813589110418859, | |
| "eval_stsb_spearman": 0.8346027727353089, | |
| "step": 14375 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 7.205938819915525e-07, | |
| "loss": 0.0001, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "eval_avg_sts": 0.807857763516069, | |
| "eval_sickr_spearman": 0.7812119839025867, | |
| "eval_stsb_spearman": 0.8345035431295512, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "eval_avg_sts": 0.8078723757771976, | |
| "eval_sickr_spearman": 0.7811225980228105, | |
| "eval_stsb_spearman": 0.8346221535315848, | |
| "step": 14625 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "eval_avg_sts": 0.8076256348401094, | |
| "eval_sickr_spearman": 0.7807085218973124, | |
| "eval_stsb_spearman": 0.8345427477829063, | |
| "step": 14750 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "eval_avg_sts": 0.807561552002372, | |
| "eval_sickr_spearman": 0.7807236516942655, | |
| "eval_stsb_spearman": 0.8343994523104786, | |
| "step": 14875 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 4.0061436068091647e-07, | |
| "loss": 0.0002, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "eval_avg_sts": 0.8071056385688217, | |
| "eval_sickr_spearman": 0.7804617861292249, | |
| "eval_stsb_spearman": 0.8337494910084184, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "eval_avg_sts": 0.8072176202500672, | |
| "eval_sickr_spearman": 0.7806607789824831, | |
| "eval_stsb_spearman": 0.8337744615176512, | |
| "step": 15125 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "eval_avg_sts": 0.8069637618748426, | |
| "eval_sickr_spearman": 0.7803758104576507, | |
| "eval_stsb_spearman": 0.8335517132920346, | |
| "step": 15250 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "eval_avg_sts": 0.8071120078313754, | |
| "eval_sickr_spearman": 0.7805422382241337, | |
| "eval_stsb_spearman": 0.8336817774386172, | |
| "step": 15375 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 8.06348393702803e-08, | |
| "loss": 0.0002, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "eval_avg_sts": 0.8071405000997522, | |
| "eval_sickr_spearman": 0.7806282619268093, | |
| "eval_stsb_spearman": 0.8336527382726953, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_avg_sts": 0.8071312391718575, | |
| "eval_sickr_spearman": 0.7806449767501098, | |
| "eval_stsb_spearman": 0.8336175015936051, | |
| "step": 15625 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 15626, | |
| "train_runtime": 9225.5566, | |
| "train_samples_per_second": 1.694 | |
| } | |
| ], | |
| "max_steps": 15626, | |
| "num_train_epochs": 1, | |
| "total_flos": 129097571734978560, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |