radoslavralev commited on
Commit
9734b02
·
verified ·
1 Parent(s): c2004c7

Training in progress, step 5000

Browse files
1_Pooling/config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
- "word_embedding_dimension": 384,
3
- "pooling_mode_cls_token": false,
4
- "pooling_mode_mean_tokens": true,
5
  "pooling_mode_max_tokens": false,
6
  "pooling_mode_mean_sqrt_len_tokens": false,
7
  "pooling_mode_weightedmean_tokens": false,
 
1
  {
2
+ "word_embedding_dimension": 512,
3
+ "pooling_mode_cls_token": true,
4
+ "pooling_mode_mean_tokens": false,
5
  "pooling_mode_max_tokens": false,
6
  "pooling_mode_mean_sqrt_len_tokens": false,
7
  "pooling_mode_weightedmean_tokens": false,
Information-Retrieval_evaluation_BeIR-touche2020-subset-test_results.csv CHANGED
@@ -1,2 +1,3 @@
1
  epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
2
  -1,-1,0.7142857142857143,0.8979591836734694,0.9387755102040817,1.0,0.7142857142857143,0.015925713751801022,0.6870748299319728,0.045760276621368244,0.6326530612244898,0.06999790748190285,0.553061224489796,0.12237196517849,0.8126660187884677,0.590781089071972,0.25216721309824497
 
 
1
  epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
2
  -1,-1,0.7142857142857143,0.8979591836734694,0.9387755102040817,1.0,0.7142857142857143,0.015925713751801022,0.6870748299319728,0.045760276621368244,0.6326530612244898,0.06999790748190285,0.553061224489796,0.12237196517849,0.8126660187884677,0.590781089071972,0.25216721309824497
3
+ -1,-1,0.7346938775510204,0.8775510204081632,0.9591836734693877,0.9795918367346939,0.7346938775510204,0.01624309573792563,0.6734693877551019,0.04452074460941084,0.6367346938775511,0.07034165898860735,0.5510204081632654,0.12170213074025006,0.8187074829931973,0.5930193473989395,0.2518399254976325
Information-Retrieval_evaluation_NanoArguAna_results.csv CHANGED
@@ -1,3 +1,4 @@
1
  epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
2
  -1,-1,0.2,0.56,0.74,0.76,0.2,0.2,0.18666666666666668,0.56,0.14800000000000002,0.74,0.07600000000000001,0.76,0.40222222222222215,0.49058314613975507,0.4109932426184554
3
  -1,-1,0.2,0.56,0.74,0.76,0.2,0.2,0.18666666666666668,0.56,0.14800000000000002,0.74,0.07600000000000001,0.76,0.40222222222222215,0.49058314613975507,0.4109932426184554
 
 
1
  epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
2
  -1,-1,0.2,0.56,0.74,0.76,0.2,0.2,0.18666666666666668,0.56,0.14800000000000002,0.74,0.07600000000000001,0.76,0.40222222222222215,0.49058314613975507,0.4109932426184554
3
  -1,-1,0.2,0.56,0.74,0.76,0.2,0.2,0.18666666666666668,0.56,0.14800000000000002,0.74,0.07600000000000001,0.76,0.40222222222222215,0.49058314613975507,0.4109932426184554
4
+ -1,-1,0.22,0.5,0.66,0.8,0.22,0.22,0.16666666666666663,0.5,0.132,0.66,0.08,0.8,0.3926269841269841,0.48983349748002636,0.40347638549721887
Information-Retrieval_evaluation_NanoClimateFEVER_results.csv CHANGED
@@ -1,3 +1,4 @@
1
  epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
2
  -1,-1,0.14,0.32,0.48,0.62,0.14,0.05833333333333333,0.12,0.155,0.10400000000000002,0.22066666666666668,0.07400000000000001,0.30733333333333335,0.27213492063492056,0.215125793679731,0.15431110143807805
3
  -1,-1,0.14,0.32,0.48,0.62,0.14,0.05833333333333333,0.12,0.155,0.10400000000000002,0.22066666666666668,0.07400000000000001,0.30733333333333335,0.27213492063492056,0.215125793679731,0.15431110143807805
 
 
1
  epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
2
  -1,-1,0.14,0.32,0.48,0.62,0.14,0.05833333333333333,0.12,0.155,0.10400000000000002,0.22066666666666668,0.07400000000000001,0.30733333333333335,0.27213492063492056,0.215125793679731,0.15431110143807805
3
  -1,-1,0.14,0.32,0.48,0.62,0.14,0.05833333333333333,0.12,0.155,0.10400000000000002,0.22066666666666668,0.07400000000000001,0.30733333333333335,0.27213492063492056,0.215125793679731,0.15431110143807805
4
+ -1,-1,0.12,0.28,0.46,0.66,0.12,0.03833333333333333,0.09999999999999998,0.135,0.1,0.23633333333333334,0.07600000000000001,0.32466666666666666,0.2561746031746031,0.20845131843009276,0.14245832634750027
Information-Retrieval_evaluation_NanoDBPedia_results.csv CHANGED
@@ -1,3 +1,4 @@
1
  epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
2
  -1,-1,0.62,0.84,0.88,0.88,0.62,0.05039842070870112,0.4933333333333333,0.13002690694209756,0.44,0.18830365543570443,0.37199999999999994,0.2679047211992138,0.7323333333333334,0.46809379506385207,0.33243413363446367
3
  -1,-1,0.62,0.84,0.88,0.88,0.62,0.05039842070870112,0.4933333333333333,0.13002690694209756,0.44,0.18830365543570443,0.37199999999999994,0.2679047211992138,0.7323333333333334,0.46809379506385207,0.33243413363446367
 
 
1
  epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
2
  -1,-1,0.62,0.84,0.88,0.88,0.62,0.05039842070870112,0.4933333333333333,0.13002690694209756,0.44,0.18830365543570443,0.37199999999999994,0.2679047211992138,0.7323333333333334,0.46809379506385207,0.33243413363446367
3
  -1,-1,0.62,0.84,0.88,0.88,0.62,0.05039842070870112,0.4933333333333333,0.13002690694209756,0.44,0.18830365543570443,0.37199999999999994,0.2679047211992138,0.7323333333333334,0.46809379506385207,0.33243413363446367
4
+ -1,-1,0.66,0.76,0.86,0.88,0.66,0.08255861663979128,0.4733333333333334,0.1250415886989628,0.44800000000000006,0.17452224602058436,0.34800000000000003,0.25437495331636306,0.7288333333333334,0.4636652485066659,0.32939375725902226
Information-Retrieval_evaluation_NanoFEVER_results.csv CHANGED
@@ -1,3 +1,4 @@
1
  epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
2
  -1,-1,0.62,0.88,0.94,0.96,0.62,0.5966666666666667,0.30666666666666664,0.8433333333333333,0.19599999999999995,0.8933333333333333,0.09999999999999998,0.9133333333333333,0.753,0.7821095700854137,0.7330432132878941
3
  -1,-1,0.62,0.88,0.94,0.96,0.62,0.5966666666666667,0.30666666666666664,0.8433333333333333,0.19599999999999995,0.8933333333333333,0.09999999999999998,0.9133333333333333,0.753,0.7821095700854137,0.7330432132878941
 
 
1
  epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
2
  -1,-1,0.62,0.88,0.94,0.96,0.62,0.5966666666666667,0.30666666666666664,0.8433333333333333,0.19599999999999995,0.8933333333333333,0.09999999999999998,0.9133333333333333,0.753,0.7821095700854137,0.7330432132878941
3
  -1,-1,0.62,0.88,0.94,0.96,0.62,0.5966666666666667,0.30666666666666664,0.8433333333333333,0.19599999999999995,0.8933333333333333,0.09999999999999998,0.9133333333333333,0.753,0.7821095700854137,0.7330432132878941
4
+ -1,-1,0.7,0.86,0.9,0.98,0.7,0.6466666666666666,0.2866666666666667,0.8066666666666665,0.184,0.8566666666666666,0.09999999999999998,0.9266666666666667,0.784190476190476,0.7923317127841635,0.7390417679680837
Information-Retrieval_evaluation_NanoFiQA2018_results.csv CHANGED
@@ -1,3 +1,4 @@
1
  epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
2
  -1,-1,0.22,0.38,0.54,0.6,0.22,0.11752380952380952,0.15999999999999998,0.21912698412698414,0.14400000000000002,0.34296031746031747,0.088,0.3807380952380952,0.33804761904761904,0.2959832185054632,0.24139316426365195
3
  -1,-1,0.22,0.38,0.54,0.6,0.22,0.11752380952380952,0.15999999999999998,0.21912698412698414,0.14400000000000002,0.34296031746031747,0.088,0.3807380952380952,0.33804761904761904,0.2959832185054632,0.24139316426365195
 
 
1
  epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
2
  -1,-1,0.22,0.38,0.54,0.6,0.22,0.11752380952380952,0.15999999999999998,0.21912698412698414,0.14400000000000002,0.34296031746031747,0.088,0.3807380952380952,0.33804761904761904,0.2959832185054632,0.24139316426365195
3
  -1,-1,0.22,0.38,0.54,0.6,0.22,0.11752380952380952,0.15999999999999998,0.21912698412698414,0.14400000000000002,0.34296031746031747,0.088,0.3807380952380952,0.33804761904761904,0.2959832185054632,0.24139316426365195
4
+ -1,-1,0.28,0.38,0.52,0.6,0.28,0.12974603174603175,0.16666666666666663,0.20312698412698413,0.14400000000000002,0.2956269841269841,0.08999999999999998,0.3792936507936508,0.3612698412698412,0.29381486022787706,0.24011197414286956
Information-Retrieval_evaluation_NanoHotpotQA_results.csv CHANGED
@@ -1,3 +1,4 @@
1
  epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
2
  -1,-1,0.6,0.74,0.78,0.86,0.6,0.3,0.32666666666666666,0.49,0.21599999999999994,0.54,0.12599999999999997,0.63,0.6806666666666666,0.5588160498147219,0.47611256957303766
3
  -1,-1,0.6,0.74,0.78,0.86,0.6,0.3,0.32666666666666666,0.49,0.21599999999999994,0.54,0.12599999999999997,0.63,0.6806666666666666,0.5588160498147219,0.47611256957303766
 
 
1
  epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
2
  -1,-1,0.6,0.74,0.78,0.86,0.6,0.3,0.32666666666666666,0.49,0.21599999999999994,0.54,0.12599999999999997,0.63,0.6806666666666666,0.5588160498147219,0.47611256957303766
3
  -1,-1,0.6,0.74,0.78,0.86,0.6,0.3,0.32666666666666666,0.49,0.21599999999999994,0.54,0.12599999999999997,0.63,0.6806666666666666,0.5588160498147219,0.47611256957303766
4
+ -1,-1,0.64,0.74,0.78,0.84,0.64,0.32,0.3,0.45,0.19599999999999998,0.49,0.11199999999999999,0.56,0.704079365079365,0.5341052902954041,0.46245563144445145
Information-Retrieval_evaluation_NanoMSMARCO_results.csv CHANGED
@@ -1,3 +1,4 @@
1
  epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
2
  -1,-1,0.22,0.5,0.62,0.74,0.22,0.22,0.16666666666666663,0.5,0.124,0.62,0.07400000000000001,0.74,0.39240476190476187,0.47667177266958005,0.406991563991564
3
  -1,-1,0.22,0.5,0.62,0.74,0.22,0.22,0.16666666666666663,0.5,0.124,0.62,0.07400000000000001,0.74,0.39240476190476187,0.47667177266958005,0.406991563991564
 
 
1
  epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
2
  -1,-1,0.22,0.5,0.62,0.74,0.22,0.22,0.16666666666666663,0.5,0.124,0.62,0.07400000000000001,0.74,0.39240476190476187,0.47667177266958005,0.406991563991564
3
  -1,-1,0.22,0.5,0.62,0.74,0.22,0.22,0.16666666666666663,0.5,0.124,0.62,0.07400000000000001,0.74,0.39240476190476187,0.47667177266958005,0.406991563991564
4
+ -1,-1,0.3,0.56,0.66,0.8,0.3,0.3,0.18666666666666665,0.56,0.132,0.66,0.08,0.8,0.4522142857142857,0.5355647548788993,0.4614012040294735
Information-Retrieval_evaluation_NanoNFCorpus_results.csv CHANGED
@@ -1,3 +1,4 @@
1
  epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
2
  -1,-1,0.34,0.5,0.58,0.6,0.34,0.01200107748257525,0.3133333333333333,0.042785241025884206,0.292,0.08113445148474485,0.25,0.10803989338634405,0.4345555555555555,0.2802878906182637,0.10597358556555182
3
  -1,-1,0.34,0.5,0.58,0.6,0.34,0.01200107748257525,0.3133333333333333,0.042785241025884206,0.292,0.08113445148474485,0.25,0.10803989338634405,0.4345555555555555,0.2802878906182637,0.10597358556555182
 
 
1
  epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
2
  -1,-1,0.34,0.5,0.58,0.6,0.34,0.01200107748257525,0.3133333333333333,0.042785241025884206,0.292,0.08113445148474485,0.25,0.10803989338634405,0.4345555555555555,0.2802878906182637,0.10597358556555182
3
  -1,-1,0.34,0.5,0.58,0.6,0.34,0.01200107748257525,0.3133333333333333,0.042785241025884206,0.292,0.08113445148474485,0.25,0.10803989338634405,0.4345555555555555,0.2802878906182637,0.10597358556555182
4
+ -1,-1,0.36,0.54,0.58,0.66,0.36,0.013678895813410474,0.34,0.05640197371329614,0.308,0.07379464684205841,0.242,0.09966914120321839,0.45522222222222225,0.2773671235823583,0.10548993594921903
Information-Retrieval_evaluation_NanoNQ_results.csv CHANGED
@@ -1,3 +1,4 @@
1
  epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
2
  -1,-1,0.28,0.46,0.56,0.64,0.28,0.27,0.15999999999999998,0.45,0.11600000000000002,0.54,0.066,0.61,0.39785714285714285,0.4442430372694745,0.39869586832265574
3
  -1,-1,0.28,0.46,0.56,0.64,0.28,0.27,0.15999999999999998,0.45,0.11600000000000002,0.54,0.066,0.61,0.39785714285714285,0.4442430372694745,0.39869586832265574
 
 
1
  epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
2
  -1,-1,0.28,0.46,0.56,0.64,0.28,0.27,0.15999999999999998,0.45,0.11600000000000002,0.54,0.066,0.61,0.39785714285714285,0.4442430372694745,0.39869586832265574
3
  -1,-1,0.28,0.46,0.56,0.64,0.28,0.27,0.15999999999999998,0.45,0.11600000000000002,0.54,0.066,0.61,0.39785714285714285,0.4442430372694745,0.39869586832265574
4
+ -1,-1,0.3,0.54,0.62,0.7,0.3,0.29,0.18,0.51,0.132,0.6,0.07400000000000001,0.68,0.43224603174603166,0.48789548101925573,0.4304090341200182
Information-Retrieval_evaluation_NanoQuoraRetrieval_results.csv CHANGED
@@ -1,3 +1,4 @@
1
  epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
2
  -1,-1,0.96,1.0,1.0,1.0,0.96,0.8373333333333334,0.4133333333333333,0.9653333333333333,0.264,0.986,0.13999999999999999,1.0,0.9733333333333334,0.9736013358388067,0.958547619047619
3
  -1,-1,0.96,1.0,1.0,1.0,0.96,0.8373333333333334,0.4133333333333333,0.9653333333333333,0.264,0.986,0.13999999999999999,1.0,0.9733333333333334,0.9736013358388067,0.958547619047619
 
 
1
  epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
2
  -1,-1,0.96,1.0,1.0,1.0,0.96,0.8373333333333334,0.4133333333333333,0.9653333333333333,0.264,0.986,0.13999999999999999,1.0,0.9733333333333334,0.9736013358388067,0.958547619047619
3
  -1,-1,0.96,1.0,1.0,1.0,0.96,0.8373333333333334,0.4133333333333333,0.9653333333333333,0.264,0.986,0.13999999999999999,1.0,0.9733333333333334,0.9736013358388067,0.958547619047619
4
+ -1,-1,0.96,0.98,1.0,1.0,0.96,0.8473333333333334,0.4133333333333333,0.9520000000000001,0.264,0.986,0.13999999999999999,1.0,0.975,0.9790267083021519,0.9683809523809523
Information-Retrieval_evaluation_NanoSCIDOCS_results.csv CHANGED
@@ -1,3 +1,4 @@
1
  epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
2
  -1,-1,0.46,0.64,0.78,0.82,0.46,0.09766666666666668,0.3533333333333333,0.21966666666666665,0.3,0.30966666666666665,0.18799999999999997,0.38666666666666655,0.5706666666666667,0.3818424009361081,0.30532272577213904
3
  -1,-1,0.46,0.64,0.78,0.82,0.46,0.09766666666666668,0.3533333333333333,0.21966666666666665,0.3,0.30966666666666665,0.18799999999999997,0.38666666666666655,0.5706666666666667,0.3818424009361081,0.30532272577213904
 
 
1
  epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
2
  -1,-1,0.46,0.64,0.78,0.82,0.46,0.09766666666666668,0.3533333333333333,0.21966666666666665,0.3,0.30966666666666665,0.18799999999999997,0.38666666666666655,0.5706666666666667,0.3818424009361081,0.30532272577213904
3
  -1,-1,0.46,0.64,0.78,0.82,0.46,0.09766666666666668,0.3533333333333333,0.21966666666666665,0.3,0.30966666666666665,0.18799999999999997,0.38666666666666655,0.5706666666666667,0.3818424009361081,0.30532272577213904
4
+ -1,-1,0.46,0.66,0.76,0.82,0.46,0.09766666666666668,0.34,0.21166666666666667,0.27599999999999997,0.2846666666666666,0.19399999999999998,0.3986666666666666,0.5684444444444444,0.38054185058113466,0.29482712989551213
Information-Retrieval_evaluation_NanoSciFact_results.csv CHANGED
@@ -1,3 +1,4 @@
1
  epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
2
  -1,-1,0.48,0.6,0.64,0.8,0.48,0.435,0.22666666666666668,0.585,0.148,0.63,0.09,0.79,0.5592777777777777,0.6050538780432089,0.5513100730514523
3
  -1,-1,0.48,0.6,0.64,0.8,0.48,0.435,0.22666666666666668,0.585,0.148,0.63,0.09,0.79,0.5592777777777777,0.6050538780432089,0.5513100730514523
 
 
1
  epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
2
  -1,-1,0.48,0.6,0.64,0.8,0.48,0.435,0.22666666666666668,0.585,0.148,0.63,0.09,0.79,0.5592777777777777,0.6050538780432089,0.5513100730514523
3
  -1,-1,0.48,0.6,0.64,0.8,0.48,0.435,0.22666666666666668,0.585,0.148,0.63,0.09,0.79,0.5592777777777777,0.6050538780432089,0.5513100730514523
4
+ -1,-1,0.42,0.64,0.72,0.78,0.42,0.375,0.2333333333333333,0.615,0.16,0.7,0.088,0.77,0.5500238095238096,0.5949205162858369,0.5379955882174889
Information-Retrieval_evaluation_NanoTouche2020_results.csv CHANGED
@@ -1,3 +1,4 @@
1
  epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
2
  -1,-1,0.4489795918367347,0.7142857142857143,0.8367346938775511,0.9795918367346939,0.4489795918367347,0.03145284890764548,0.3877551020408163,0.08052290820807267,0.37959183673469393,0.12752705262749714,0.3285714285714286,0.21259838452857663,0.6169663103336572,0.36562572315623365,0.2636080363851069
3
  -1,-1,0.4489795918367347,0.7142857142857143,0.8367346938775511,0.9795918367346939,0.4489795918367347,0.03145284890764548,0.3877551020408163,0.08052290820807267,0.37959183673469393,0.12752705262749714,0.3285714285714286,0.21259838452857663,0.6169663103336572,0.36562572315623365,0.2636080363851069
 
 
1
  epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
2
  -1,-1,0.4489795918367347,0.7142857142857143,0.8367346938775511,0.9795918367346939,0.4489795918367347,0.03145284890764548,0.3877551020408163,0.08052290820807267,0.37959183673469393,0.12752705262749714,0.3285714285714286,0.21259838452857663,0.6169663103336572,0.36562572315623365,0.2636080363851069
3
  -1,-1,0.4489795918367347,0.7142857142857143,0.8367346938775511,0.9795918367346939,0.4489795918367347,0.03145284890764548,0.3877551020408163,0.08052290820807267,0.37959183673469393,0.12752705262749714,0.3285714285714286,0.21259838452857663,0.6169663103336572,0.36562572315623365,0.2636080363851069
4
+ -1,-1,0.4897959183673469,0.8367346938775511,0.8775510204081632,0.9591836734693877,0.4897959183673469,0.0391261657646358,0.46258503401360546,0.09935212519786965,0.4081632653061225,0.14360455232476052,0.35306122448979593,0.23229499710875565,0.6578798185941044,0.39999686542243473,0.28337269656997294
NanoBEIR_evaluation_mean_results.csv CHANGED
@@ -1,3 +1,4 @@
1
  epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
2
  -1,-1,0.42992150706436427,0.6257142857142857,0.7212872841444271,0.7891993720565149,0.42992150706436427,0.24818278127867163,0.27803244374672936,0.4031381056643362,0.22089167974882265,0.47843016489807155,0.15173626373626373,0.5466626482835049,0.5479589469487428,0.4875413547554317,0.4106720689962823
3
  -1,-1,0.42992150706436427,0.6257142857142857,0.7212872841444271,0.7891993720565149,0.42992150706436427,0.24818278127867163,0.27803244374672936,0.4031381056643362,0.22089167974882265,0.47843016489807155,0.15173626373626373,0.5466626482835049,0.5479589469487428,0.4875413547554317,0.4106720689962823
 
 
1
  epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
2
  -1,-1,0.42992150706436427,0.6257142857142857,0.7212872841444271,0.7891993720565149,0.42992150706436427,0.24818278127867163,0.27803244374672936,0.4031381056643362,0.22089167974882265,0.47843016489807155,0.15173626373626373,0.5466626482835049,0.5479589469487428,0.4875413547554317,0.4106720689962823
3
  -1,-1,0.42992150706436427,0.6257142857142857,0.7212872841444271,0.7891993720565149,0.42992150706436427,0.24818278127867163,0.27803244374672936,0.4031381056643362,0.22089167974882265,0.47843016489807155,0.15173626373626373,0.5466626482835049,0.5479589469487428,0.4875413547554317,0.4106720689962823
4
+ -1,-1,0.4545996860282574,0.6366718995290425,0.7228885400313971,0.8060910518053375,0.4545996860282574,0.2615469007664515,0.2807116692830978,0.40186584654388047,0.2218587127158556,0.4739396227677734,0.1520816326530612,0.5558179032632299,0.5629388627245769,0.49519347906125394,0.41529341414013715
README.md CHANGED
@@ -5,232 +5,51 @@ tags:
5
  - feature-extraction
6
  - dense
7
  - generated_from_trainer
8
- - dataset_size:359997
9
  - loss:MultipleNegativesRankingLoss
10
- base_model: sentence-transformers/all-MiniLM-L12-v2
11
  widget:
12
- - source_sentence: When do you use Ms. or Mrs.? Is one for a married woman and one
13
- for one that's not married? Which one is for what?
14
  sentences:
15
- - When do you use Ms. or Mrs.? Is one for a married woman and one for one that's
16
- not married? Which one is for what?
17
- - Nations that do/does otherwise? Which one do I use?
18
- - Why don't bikes have a gear indicator?
19
- - source_sentence: Which ointment is applied to the face of UFC fighters at the commencement
20
- of a bout? What does it do?
21
  sentences:
22
- - How can I save a Snapchat video that others posted?
23
- - Which ointment is applied to the face of UFC fighters at the commencement of a
24
- bout? What does it do?
25
- - How do I get the body of a UFC Fighter?
26
- - source_sentence: Do you love the life you live?
27
  sentences:
28
- - Can I do shoulder and triceps workout on same day? What other combinations like
29
- this can I do?
30
- - Do you love the life you're living?
31
- - Where can you find an online TI-84 calculator?
32
- - source_sentence: Ordered food on Swiggy 3 days ago.After accepting my money, said
33
- no more on Menu! When if ever will I atleast get refund in cr card a/c?
34
  sentences:
35
- - Is getting to the Tel Aviv airport to catch a 5:30 AM flight very expensive?
36
- - How do I die and make it look like an accident?
37
- - Ordered food on Swiggy 3 days ago.After accepting my money, said no more on Menu!
38
- When if ever will I atleast get refund in cr card a/c?
39
- - source_sentence: How do you earn money on Quora?
40
  sentences:
41
- - What is a cheap healthy diet I can keep the same and eat every day?
42
- - What are some things new employees should know going into their first day at Maximus?
43
- - What is the best way to make money on Quora?
44
  pipeline_tag: sentence-similarity
45
  library_name: sentence-transformers
46
- metrics:
47
- - cosine_accuracy@1
48
- - cosine_accuracy@3
49
- - cosine_accuracy@5
50
- - cosine_accuracy@10
51
- - cosine_precision@1
52
- - cosine_precision@3
53
- - cosine_precision@5
54
- - cosine_precision@10
55
- - cosine_recall@1
56
- - cosine_recall@3
57
- - cosine_recall@5
58
- - cosine_recall@10
59
- - cosine_ndcg@10
60
- - cosine_mrr@10
61
- - cosine_map@100
62
- model-index:
63
- - name: SentenceTransformer based on sentence-transformers/all-MiniLM-L12-v2
64
- results:
65
- - task:
66
- type: information-retrieval
67
- name: Information Retrieval
68
- dataset:
69
- name: NanoMSMARCO
70
- type: NanoMSMARCO
71
- metrics:
72
- - type: cosine_accuracy@1
73
- value: 0.3
74
- name: Cosine Accuracy@1
75
- - type: cosine_accuracy@3
76
- value: 0.56
77
- name: Cosine Accuracy@3
78
- - type: cosine_accuracy@5
79
- value: 0.66
80
- name: Cosine Accuracy@5
81
- - type: cosine_accuracy@10
82
- value: 0.8
83
- name: Cosine Accuracy@10
84
- - type: cosine_precision@1
85
- value: 0.3
86
- name: Cosine Precision@1
87
- - type: cosine_precision@3
88
- value: 0.18666666666666665
89
- name: Cosine Precision@3
90
- - type: cosine_precision@5
91
- value: 0.132
92
- name: Cosine Precision@5
93
- - type: cosine_precision@10
94
- value: 0.08
95
- name: Cosine Precision@10
96
- - type: cosine_recall@1
97
- value: 0.3
98
- name: Cosine Recall@1
99
- - type: cosine_recall@3
100
- value: 0.56
101
- name: Cosine Recall@3
102
- - type: cosine_recall@5
103
- value: 0.66
104
- name: Cosine Recall@5
105
- - type: cosine_recall@10
106
- value: 0.8
107
- name: Cosine Recall@10
108
- - type: cosine_ndcg@10
109
- value: 0.5355647548788993
110
- name: Cosine Ndcg@10
111
- - type: cosine_mrr@10
112
- value: 0.4522142857142857
113
- name: Cosine Mrr@10
114
- - type: cosine_map@100
115
- value: 0.4614012040294735
116
- name: Cosine Map@100
117
- - task:
118
- type: information-retrieval
119
- name: Information Retrieval
120
- dataset:
121
- name: NanoNQ
122
- type: NanoNQ
123
- metrics:
124
- - type: cosine_accuracy@1
125
- value: 0.3
126
- name: Cosine Accuracy@1
127
- - type: cosine_accuracy@3
128
- value: 0.54
129
- name: Cosine Accuracy@3
130
- - type: cosine_accuracy@5
131
- value: 0.62
132
- name: Cosine Accuracy@5
133
- - type: cosine_accuracy@10
134
- value: 0.7
135
- name: Cosine Accuracy@10
136
- - type: cosine_precision@1
137
- value: 0.3
138
- name: Cosine Precision@1
139
- - type: cosine_precision@3
140
- value: 0.18
141
- name: Cosine Precision@3
142
- - type: cosine_precision@5
143
- value: 0.132
144
- name: Cosine Precision@5
145
- - type: cosine_precision@10
146
- value: 0.07400000000000001
147
- name: Cosine Precision@10
148
- - type: cosine_recall@1
149
- value: 0.29
150
- name: Cosine Recall@1
151
- - type: cosine_recall@3
152
- value: 0.51
153
- name: Cosine Recall@3
154
- - type: cosine_recall@5
155
- value: 0.6
156
- name: Cosine Recall@5
157
- - type: cosine_recall@10
158
- value: 0.68
159
- name: Cosine Recall@10
160
- - type: cosine_ndcg@10
161
- value: 0.48789548101925573
162
- name: Cosine Ndcg@10
163
- - type: cosine_mrr@10
164
- value: 0.43224603174603166
165
- name: Cosine Mrr@10
166
- - type: cosine_map@100
167
- value: 0.4304090341200182
168
- name: Cosine Map@100
169
- - task:
170
- type: nano-beir
171
- name: Nano BEIR
172
- dataset:
173
- name: NanoBEIR mean
174
- type: NanoBEIR_mean
175
- metrics:
176
- - type: cosine_accuracy@1
177
- value: 0.3
178
- name: Cosine Accuracy@1
179
- - type: cosine_accuracy@3
180
- value: 0.55
181
- name: Cosine Accuracy@3
182
- - type: cosine_accuracy@5
183
- value: 0.64
184
- name: Cosine Accuracy@5
185
- - type: cosine_accuracy@10
186
- value: 0.75
187
- name: Cosine Accuracy@10
188
- - type: cosine_precision@1
189
- value: 0.3
190
- name: Cosine Precision@1
191
- - type: cosine_precision@3
192
- value: 0.18333333333333332
193
- name: Cosine Precision@3
194
- - type: cosine_precision@5
195
- value: 0.132
196
- name: Cosine Precision@5
197
- - type: cosine_precision@10
198
- value: 0.07700000000000001
199
- name: Cosine Precision@10
200
- - type: cosine_recall@1
201
- value: 0.295
202
- name: Cosine Recall@1
203
- - type: cosine_recall@3
204
- value: 0.535
205
- name: Cosine Recall@3
206
- - type: cosine_recall@5
207
- value: 0.63
208
- name: Cosine Recall@5
209
- - type: cosine_recall@10
210
- value: 0.74
211
- name: Cosine Recall@10
212
- - type: cosine_ndcg@10
213
- value: 0.5117301179490775
214
- name: Cosine Ndcg@10
215
- - type: cosine_mrr@10
216
- value: 0.4422301587301587
217
- name: Cosine Mrr@10
218
- - type: cosine_map@100
219
- value: 0.44590511907474584
220
- name: Cosine Map@100
221
  ---
222
 
223
- # SentenceTransformer based on sentence-transformers/all-MiniLM-L12-v2
224
 
225
- This is a [sentence-transformers](https://www.SBERT.net) model finetuned from [sentence-transformers/all-MiniLM-L12-v2](https://huggingface.co/sentence-transformers/all-MiniLM-L12-v2). It maps sentences & paragraphs to a 384-dimensional dense vector space and can be used for semantic textual similarity, semantic search, paraphrase mining, text classification, clustering, and more.
226
 
227
  ## Model Details
228
 
229
  ### Model Description
230
  - **Model Type:** Sentence Transformer
231
- - **Base model:** [sentence-transformers/all-MiniLM-L12-v2](https://huggingface.co/sentence-transformers/all-MiniLM-L12-v2) <!-- at revision 936af83a2ecce5fe87a09109ff5cbcefe073173a -->
232
  - **Maximum Sequence Length:** 128 tokens
233
- - **Output Dimensionality:** 384 dimensions
234
  - **Similarity Function:** Cosine Similarity
235
  <!-- - **Training Dataset:** Unknown -->
236
  <!-- - **Language:** Unknown -->
@@ -247,8 +66,7 @@ This is a [sentence-transformers](https://www.SBERT.net) model finetuned from [s
247
  ```
248
  SentenceTransformer(
249
  (0): Transformer({'max_seq_length': 128, 'do_lower_case': False, 'architecture': 'BertModel'})
250
- (1): Pooling({'word_embedding_dimension': 384, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
251
- (2): Normalize()
252
  )
253
  ```
254
 
@@ -267,23 +85,23 @@ Then you can load this model and run inference.
267
  from sentence_transformers import SentenceTransformer
268
 
269
  # Download from the 🤗 Hub
270
- model = SentenceTransformer("redis/model-a-baseline")
271
  # Run inference
272
  sentences = [
273
- 'How do you earn money on Quora?',
274
- 'What is the best way to make money on Quora?',
275
- 'What are some things new employees should know going into their first day at Maximus?',
276
  ]
277
  embeddings = model.encode(sentences)
278
  print(embeddings.shape)
279
- # [3, 384]
280
 
281
  # Get the similarity scores for the embeddings
282
  similarities = model.similarity(embeddings, embeddings)
283
  print(similarities)
284
- # tensor([[ 1.0000, 0.9933, -0.0070],
285
- # [ 0.9933, 1.0000, 0.0008],
286
- # [-0.0070, 0.0008, 0.9999]])
287
  ```
288
 
289
  <!--
@@ -310,65 +128,6 @@ You can finetune this model on your own dataset.
310
  *List how the model may foreseeably be misused and address what users ought not to do with the model.*
311
  -->
312
 
313
- ## Evaluation
314
-
315
- ### Metrics
316
-
317
- #### Information Retrieval
318
-
319
- * Datasets: `NanoMSMARCO` and `NanoNQ`
320
- * Evaluated with [<code>InformationRetrievalEvaluator</code>](https://sbert.net/docs/package_reference/sentence_transformer/evaluation.html#sentence_transformers.evaluation.InformationRetrievalEvaluator)
321
-
322
- | Metric | NanoMSMARCO | NanoNQ |
323
- |:--------------------|:------------|:-----------|
324
- | cosine_accuracy@1 | 0.3 | 0.3 |
325
- | cosine_accuracy@3 | 0.56 | 0.54 |
326
- | cosine_accuracy@5 | 0.66 | 0.62 |
327
- | cosine_accuracy@10 | 0.8 | 0.7 |
328
- | cosine_precision@1 | 0.3 | 0.3 |
329
- | cosine_precision@3 | 0.1867 | 0.18 |
330
- | cosine_precision@5 | 0.132 | 0.132 |
331
- | cosine_precision@10 | 0.08 | 0.074 |
332
- | cosine_recall@1 | 0.3 | 0.29 |
333
- | cosine_recall@3 | 0.56 | 0.51 |
334
- | cosine_recall@5 | 0.66 | 0.6 |
335
- | cosine_recall@10 | 0.8 | 0.68 |
336
- | **cosine_ndcg@10** | **0.5356** | **0.4879** |
337
- | cosine_mrr@10 | 0.4522 | 0.4322 |
338
- | cosine_map@100 | 0.4614 | 0.4304 |
339
-
340
- #### Nano BEIR
341
-
342
- * Dataset: `NanoBEIR_mean`
343
- * Evaluated with [<code>NanoBEIREvaluator</code>](https://sbert.net/docs/package_reference/sentence_transformer/evaluation.html#sentence_transformers.evaluation.NanoBEIREvaluator) with these parameters:
344
- ```json
345
- {
346
- "dataset_names": [
347
- "msmarco",
348
- "nq"
349
- ],
350
- "dataset_id": "lightonai/NanoBEIR-en"
351
- }
352
- ```
353
-
354
- | Metric | Value |
355
- |:--------------------|:-----------|
356
- | cosine_accuracy@1 | 0.3 |
357
- | cosine_accuracy@3 | 0.55 |
358
- | cosine_accuracy@5 | 0.64 |
359
- | cosine_accuracy@10 | 0.75 |
360
- | cosine_precision@1 | 0.3 |
361
- | cosine_precision@3 | 0.1833 |
362
- | cosine_precision@5 | 0.132 |
363
- | cosine_precision@10 | 0.077 |
364
- | cosine_recall@1 | 0.295 |
365
- | cosine_recall@3 | 0.535 |
366
- | cosine_recall@5 | 0.63 |
367
- | cosine_recall@10 | 0.74 |
368
- | **cosine_ndcg@10** | **0.5117** |
369
- | cosine_mrr@10 | 0.4422 |
370
- | cosine_map@100 | 0.4459 |
371
-
372
  <!--
373
  ## Bias, Risks and Limitations
374
 
@@ -387,49 +146,23 @@ You can finetune this model on your own dataset.
387
 
388
  #### Unnamed Dataset
389
 
390
- * Size: 359,997 training samples
391
- * Columns: <code>anchor</code>, <code>positive</code>, and <code>negative</code>
392
- * Approximate statistics based on the first 1000 samples:
393
- | | anchor | positive | negative |
394
- |:--------|:----------------------------------------------------------------------------------|:----------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------|
395
- | type | string | string | string |
396
- | details | <ul><li>min: 4 tokens</li><li>mean: 15.46 tokens</li><li>max: 49 tokens</li></ul> | <ul><li>min: 4 tokens</li><li>mean: 15.52 tokens</li><li>max: 49 tokens</li></ul> | <ul><li>min: 5 tokens</li><li>mean: 16.99 tokens</li><li>max: 128 tokens</li></ul> |
397
- * Samples:
398
- | anchor | positive | negative |
399
- |:--------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------|
400
- | <code>Shall I upgrade my iPhone 5s to iOS 10 final version?</code> | <code>Should I upgrade an iPhone 5s to iOS 10?</code> | <code>Whether extension of CA-articleship is to be served at same firm/company?</code> |
401
- | <code>Is Donald Trump really going to be the president of United States?</code> | <code>Do you think Donald Trump could conceivably be the next President of the United States?</code> | <code>Since solid carbon dioxide is dry ice and incredibly cold, why doesn't it have an effect on global warming?</code> |
402
- | <code>What are real tips to improve work life balance?</code> | <code>What are the best ways to create a work life balance?</code> | <code>How do you open a briefcase combination lock without the combination?</code> |
403
- * Loss: [<code>MultipleNegativesRankingLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#multiplenegativesrankingloss) with these parameters:
404
- ```json
405
- {
406
- "scale": 7.0,
407
- "similarity_fct": "cos_sim",
408
- "gather_across_devices": false
409
- }
410
- ```
411
-
412
- ### Evaluation Dataset
413
-
414
- #### Unnamed Dataset
415
-
416
- * Size: 40,000 evaluation samples
417
- * Columns: <code>anchor</code>, <code>positive</code>, and <code>negative</code>
418
  * Approximate statistics based on the first 1000 samples:
419
- | | anchor | positive | negative |
420
- |:--------|:----------------------------------------------------------------------------------|:----------------------------------------------------------------------------------|:----------------------------------------------------------------------------------|
421
- | type | string | string | string |
422
- | details | <ul><li>min: 6 tokens</li><li>mean: 15.71 tokens</li><li>max: 65 tokens</li></ul> | <ul><li>min: 6 tokens</li><li>mean: 15.79 tokens</li><li>max: 65 tokens</li></ul> | <ul><li>min: 6 tokens</li><li>mean: 16.97 tokens</li><li>max: 78 tokens</li></ul> |
423
  * Samples:
424
- | anchor | positive | negative |
425
- |:------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------|
426
- | <code>Why were feathered dinosaur fossils only found in the last 20 years?</code> | <code>Why were feathered dinosaur fossils only found in the last 20 years?</code> | <code>Why are only few people aware that many dinosaurs had feathers?</code> |
427
- | <code>If FOX News is the conservative news station, which cable news network is for liberals/progressives?</code> | <code>If FOX News is the conservative news station, which cable news network is for liberals/progressives?</code> | <code>How much did Fox News and conservative leaning media networks stoke the anger that contributed to Donald Trump's popularity?</code> |
428
- | <code>How can guys last longer during sex?</code> | <code>How do I last longer in sex?</code> | <code>What is a permanent solution for rough and puffy hair?</code> |
429
  * Loss: [<code>MultipleNegativesRankingLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#multiplenegativesrankingloss) with these parameters:
430
  ```json
431
  {
432
- "scale": 7.0,
433
  "similarity_fct": "cos_sim",
434
  "gather_across_devices": false
435
  }
@@ -438,49 +171,36 @@ You can finetune this model on your own dataset.
438
  ### Training Hyperparameters
439
  #### Non-Default Hyperparameters
440
 
441
- - `eval_strategy`: steps
442
- - `per_device_train_batch_size`: 128
443
- - `per_device_eval_batch_size`: 128
444
- - `learning_rate`: 2e-05
445
- - `weight_decay`: 0.0001
446
- - `max_steps`: 5000
447
- - `warmup_ratio`: 0.1
448
  - `fp16`: True
449
- - `dataloader_drop_last`: True
450
- - `dataloader_num_workers`: 1
451
- - `dataloader_prefetch_factor`: 1
452
- - `load_best_model_at_end`: True
453
- - `optim`: adamw_torch
454
- - `ddp_find_unused_parameters`: False
455
- - `push_to_hub`: True
456
- - `hub_model_id`: redis/model-a-baseline
457
- - `eval_on_start`: True
458
 
459
  #### All Hyperparameters
460
  <details><summary>Click to expand</summary>
461
 
462
  - `overwrite_output_dir`: False
463
  - `do_predict`: False
464
- - `eval_strategy`: steps
465
  - `prediction_loss_only`: True
466
- - `per_device_train_batch_size`: 128
467
- - `per_device_eval_batch_size`: 128
468
  - `per_gpu_train_batch_size`: None
469
  - `per_gpu_eval_batch_size`: None
470
  - `gradient_accumulation_steps`: 1
471
  - `eval_accumulation_steps`: None
472
  - `torch_empty_cache_steps`: None
473
- - `learning_rate`: 2e-05
474
- - `weight_decay`: 0.0001
475
  - `adam_beta1`: 0.9
476
  - `adam_beta2`: 0.999
477
  - `adam_epsilon`: 1e-08
478
- - `max_grad_norm`: 1.0
479
- - `num_train_epochs`: 3.0
480
- - `max_steps`: 5000
481
  - `lr_scheduler_type`: linear
482
  - `lr_scheduler_kwargs`: {}
483
- - `warmup_ratio`: 0.1
484
  - `warmup_steps`: 0
485
  - `log_level`: passive
486
  - `log_level_replica`: warning
@@ -508,14 +228,14 @@ You can finetune this model on your own dataset.
508
  - `tpu_num_cores`: None
509
  - `tpu_metrics_debug`: False
510
  - `debug`: []
511
- - `dataloader_drop_last`: True
512
- - `dataloader_num_workers`: 1
513
- - `dataloader_prefetch_factor`: 1
514
  - `past_index`: -1
515
  - `disable_tqdm`: False
516
  - `remove_unused_columns`: True
517
  - `label_names`: None
518
- - `load_best_model_at_end`: True
519
  - `ignore_data_skip`: False
520
  - `fsdp`: []
521
  - `fsdp_min_num_params`: 0
@@ -525,23 +245,23 @@ You can finetune this model on your own dataset.
525
  - `parallelism_config`: None
526
  - `deepspeed`: None
527
  - `label_smoothing_factor`: 0.0
528
- - `optim`: adamw_torch
529
  - `optim_args`: None
530
  - `adafactor`: False
531
  - `group_by_length`: False
532
  - `length_column_name`: length
533
  - `project`: huggingface
534
  - `trackio_space_id`: trackio
535
- - `ddp_find_unused_parameters`: False
536
  - `ddp_bucket_cap_mb`: None
537
  - `ddp_broadcast_buffers`: False
538
  - `dataloader_pin_memory`: True
539
  - `dataloader_persistent_workers`: False
540
  - `skip_memory_metrics`: True
541
  - `use_legacy_prediction_loop`: False
542
- - `push_to_hub`: True
543
  - `resume_from_checkpoint`: None
544
- - `hub_model_id`: redis/model-a-baseline
545
  - `hub_strategy`: every_save
546
  - `hub_private_repo`: None
547
  - `hub_always_push`: False
@@ -568,43 +288,31 @@ You can finetune this model on your own dataset.
568
  - `neftune_noise_alpha`: None
569
  - `optim_target_modules`: None
570
  - `batch_eval_metrics`: False
571
- - `eval_on_start`: True
572
  - `use_liger_kernel`: False
573
  - `liger_kernel_config`: None
574
  - `eval_use_gather_object`: False
575
  - `average_tokens_across_devices`: True
576
  - `prompts`: None
577
  - `batch_sampler`: batch_sampler
578
- - `multi_dataset_batch_sampler`: proportional
579
  - `router_mapping`: {}
580
  - `learning_rate_mapping`: {}
581
 
582
  </details>
583
 
584
  ### Training Logs
585
- | Epoch | Step | Training Loss | Validation Loss | NanoMSMARCO_cosine_ndcg@10 | NanoNQ_cosine_ndcg@10 | NanoBEIR_mean_cosine_ndcg@10 |
586
- |:------:|:----:|:-------------:|:---------------:|:--------------------------:|:---------------------:|:----------------------------:|
587
- | 0 | 0 | - | 0.6041 | 0.5887 | 0.5786 | 0.5836 |
588
- | 0.0889 | 250 | 0.6046 | 0.4244 | 0.5682 | 0.5422 | 0.5552 |
589
- | 0.1778 | 500 | 0.526 | 0.4117 | 0.6079 | 0.5506 | 0.5793 |
590
- | 0.2667 | 750 | 0.5052 | 0.4022 | 0.5708 | 0.5352 | 0.5530 |
591
- | 0.3556 | 1000 | 0.4909 | 0.3970 | 0.5908 | 0.5296 | 0.5602 |
592
- | 0.4445 | 1250 | 0.4802 | 0.3937 | 0.5650 | 0.4936 | 0.5293 |
593
- | 0.5334 | 1500 | 0.4791 | 0.3902 | 0.5622 | 0.5014 | 0.5318 |
594
- | 0.6223 | 1750 | 0.4735 | 0.3875 | 0.5640 | 0.5174 | 0.5407 |
595
- | 0.7112 | 2000 | 0.4671 | 0.3853 | 0.5550 | 0.5179 | 0.5364 |
596
- | 0.8001 | 2250 | 0.4647 | 0.3835 | 0.5552 | 0.5172 | 0.5362 |
597
- | 0.8890 | 2500 | 0.4623 | 0.3818 | 0.5488 | 0.5079 | 0.5284 |
598
- | 0.9780 | 2750 | 0.4599 | 0.3805 | 0.5406 | 0.4908 | 0.5157 |
599
- | 1.0669 | 3000 | 0.4518 | 0.3785 | 0.5353 | 0.4900 | 0.5126 |
600
- | 1.1558 | 3250 | 0.4504 | 0.3783 | 0.5397 | 0.4879 | 0.5138 |
601
- | 1.2447 | 3500 | 0.4476 | 0.3771 | 0.5425 | 0.4907 | 0.5166 |
602
- | 1.3336 | 3750 | 0.4466 | 0.3769 | 0.5357 | 0.4954 | 0.5155 |
603
- | 1.4225 | 4000 | 0.4436 | 0.3759 | 0.5345 | 0.4882 | 0.5113 |
604
- | 1.5114 | 4250 | 0.4435 | 0.3754 | 0.5374 | 0.4928 | 0.5151 |
605
- | 1.6003 | 4500 | 0.4437 | 0.3754 | 0.5386 | 0.4991 | 0.5188 |
606
- | 1.6892 | 4750 | 0.4424 | 0.3750 | 0.5348 | 0.4891 | 0.5119 |
607
- | 1.7781 | 5000 | 0.4442 | 0.3748 | 0.5356 | 0.4879 | 0.5117 |
608
 
609
 
610
  ### Framework Versions
@@ -613,7 +321,7 @@ You can finetune this model on your own dataset.
613
  - Transformers: 4.57.3
614
  - PyTorch: 2.9.1+cu128
615
  - Accelerate: 1.12.0
616
- - Datasets: 2.21.0
617
  - Tokenizers: 0.22.1
618
 
619
  ## Citation
 
5
  - feature-extraction
6
  - dense
7
  - generated_from_trainer
8
+ - dataset_size:100000
9
  - loss:MultipleNegativesRankingLoss
10
+ base_model: prajjwal1/bert-small
11
  widget:
12
+ - source_sentence: How do I polish my English skills?
 
13
  sentences:
14
+ - How can we polish English skills?
15
+ - Why should I move to Israel as a Jew?
16
+ - What are vitamins responsible for?
17
+ - source_sentence: Can I use the Kozuka Gothic Pro font as a font-face on my web site?
 
 
18
  sentences:
19
+ - Can I use the Kozuka Gothic Pro font as a font-face on my web site?
20
+ - Why are Google, Facebook, YouTube and other social networking sites banned in
21
+ China?
22
+ - What font is used in Bloomberg Terminal?
23
+ - source_sentence: Is Quora the best Q&A site?
24
  sentences:
25
+ - What was the best Quora question ever?
26
+ - Is Quora the best inquiry site?
27
+ - Where do I buy Oway hair products online?
28
+ - source_sentence: How can I customize my walking speed on Google Maps?
 
 
29
  sentences:
30
+ - How do I bring back Google maps icon in my home screen?
31
+ - How many pages are there in all the Harry Potter books combined?
32
+ - How can I customize my walking speed on Google Maps?
33
+ - source_sentence: DId something exist before the Big Bang?
 
34
  sentences:
35
+ - How can I improve my memory problem?
36
+ - Where can I buy Fairy Tail Manga?
37
+ - Is there a scientific name for what existed before the Big Bang?
38
  pipeline_tag: sentence-similarity
39
  library_name: sentence-transformers
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
40
  ---
41
 
42
+ # SentenceTransformer based on prajjwal1/bert-small
43
 
44
+ This is a [sentence-transformers](https://www.SBERT.net) model finetuned from [prajjwal1/bert-small](https://huggingface.co/prajjwal1/bert-small). It maps sentences & paragraphs to a 512-dimensional dense vector space and can be used for semantic textual similarity, semantic search, paraphrase mining, text classification, clustering, and more.
45
 
46
  ## Model Details
47
 
48
  ### Model Description
49
  - **Model Type:** Sentence Transformer
50
+ - **Base model:** [prajjwal1/bert-small](https://huggingface.co/prajjwal1/bert-small) <!-- at revision 0ec5f86f27c1a77d704439db5e01c307ea11b9d4 -->
51
  - **Maximum Sequence Length:** 128 tokens
52
+ - **Output Dimensionality:** 512 dimensions
53
  - **Similarity Function:** Cosine Similarity
54
  <!-- - **Training Dataset:** Unknown -->
55
  <!-- - **Language:** Unknown -->
 
66
  ```
67
  SentenceTransformer(
68
  (0): Transformer({'max_seq_length': 128, 'do_lower_case': False, 'architecture': 'BertModel'})
69
+ (1): Pooling({'word_embedding_dimension': 512, 'pooling_mode_cls_token': True, 'pooling_mode_mean_tokens': False, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
 
70
  )
71
  ```
72
 
 
85
  from sentence_transformers import SentenceTransformer
86
 
87
  # Download from the 🤗 Hub
88
+ model = SentenceTransformer("sentence_transformers_model_id")
89
  # Run inference
90
  sentences = [
91
+ 'DId something exist before the Big Bang?',
92
+ 'Is there a scientific name for what existed before the Big Bang?',
93
+ 'Where can I buy Fairy Tail Manga?',
94
  ]
95
  embeddings = model.encode(sentences)
96
  print(embeddings.shape)
97
+ # [3, 512]
98
 
99
  # Get the similarity scores for the embeddings
100
  similarities = model.similarity(embeddings, embeddings)
101
  print(similarities)
102
+ # tensor([[ 1.0000, 0.7596, -0.0398],
103
+ # [ 0.7596, 1.0000, -0.0308],
104
+ # [-0.0398, -0.0308, 1.0000]])
105
  ```
106
 
107
  <!--
 
128
  *List how the model may foreseeably be misused and address what users ought not to do with the model.*
129
  -->
130
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
131
  <!--
132
  ## Bias, Risks and Limitations
133
 
 
146
 
147
  #### Unnamed Dataset
148
 
149
+ * Size: 100,000 training samples
150
+ * Columns: <code>sentence_0</code>, <code>sentence_1</code>, and <code>sentence_2</code>
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
151
  * Approximate statistics based on the first 1000 samples:
152
+ | | sentence_0 | sentence_1 | sentence_2 |
153
+ |:--------|:----------------------------------------------------------------------------------|:---------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------|
154
+ | type | string | string | string |
155
+ | details | <ul><li>min: 3 tokens</li><li>mean: 15.53 tokens</li><li>max: 59 tokens</li></ul> | <ul><li>min: 3 tokens</li><li>mean: 15.5 tokens</li><li>max: 59 tokens</li></ul> | <ul><li>min: 6 tokens</li><li>mean: 16.87 tokens</li><li>max: 128 tokens</li></ul> |
156
  * Samples:
157
+ | sentence_0 | sentence_1 | sentence_2 |
158
+ |:----------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------|:-----------------------------------------------------------------------|
159
+ | <code>Is there visitor entry facility in Jaipur airport. How much is the ticket?</code> | <code>Is there visitor entry facility in Jaipur airport. How much is the ticket?</code> | <code>How much is the airport tax in bogota?</code> |
160
+ | <code>Which concept is more important: good planning or hard work?</code> | <code>Which concept is more important: good planning or hard work?</code> | <code>What is important in life: luck or hard work?</code> |
161
+ | <code>What is the most efficient way to make money?</code> | <code>How can I make my money make money?</code> | <code>What can one learn about Quantum Mechanics in 10 minutes?</code> |
162
  * Loss: [<code>MultipleNegativesRankingLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#multiplenegativesrankingloss) with these parameters:
163
  ```json
164
  {
165
+ "scale": 20.0,
166
  "similarity_fct": "cos_sim",
167
  "gather_across_devices": false
168
  }
 
171
  ### Training Hyperparameters
172
  #### Non-Default Hyperparameters
173
 
174
+ - `per_device_train_batch_size`: 64
175
+ - `per_device_eval_batch_size`: 64
 
 
 
 
 
176
  - `fp16`: True
177
+ - `multi_dataset_batch_sampler`: round_robin
 
 
 
 
 
 
 
 
178
 
179
  #### All Hyperparameters
180
  <details><summary>Click to expand</summary>
181
 
182
  - `overwrite_output_dir`: False
183
  - `do_predict`: False
184
+ - `eval_strategy`: no
185
  - `prediction_loss_only`: True
186
+ - `per_device_train_batch_size`: 64
187
+ - `per_device_eval_batch_size`: 64
188
  - `per_gpu_train_batch_size`: None
189
  - `per_gpu_eval_batch_size`: None
190
  - `gradient_accumulation_steps`: 1
191
  - `eval_accumulation_steps`: None
192
  - `torch_empty_cache_steps`: None
193
+ - `learning_rate`: 5e-05
194
+ - `weight_decay`: 0.0
195
  - `adam_beta1`: 0.9
196
  - `adam_beta2`: 0.999
197
  - `adam_epsilon`: 1e-08
198
+ - `max_grad_norm`: 1
199
+ - `num_train_epochs`: 3
200
+ - `max_steps`: -1
201
  - `lr_scheduler_type`: linear
202
  - `lr_scheduler_kwargs`: {}
203
+ - `warmup_ratio`: 0.0
204
  - `warmup_steps`: 0
205
  - `log_level`: passive
206
  - `log_level_replica`: warning
 
228
  - `tpu_num_cores`: None
229
  - `tpu_metrics_debug`: False
230
  - `debug`: []
231
+ - `dataloader_drop_last`: False
232
+ - `dataloader_num_workers`: 0
233
+ - `dataloader_prefetch_factor`: None
234
  - `past_index`: -1
235
  - `disable_tqdm`: False
236
  - `remove_unused_columns`: True
237
  - `label_names`: None
238
+ - `load_best_model_at_end`: False
239
  - `ignore_data_skip`: False
240
  - `fsdp`: []
241
  - `fsdp_min_num_params`: 0
 
245
  - `parallelism_config`: None
246
  - `deepspeed`: None
247
  - `label_smoothing_factor`: 0.0
248
+ - `optim`: adamw_torch_fused
249
  - `optim_args`: None
250
  - `adafactor`: False
251
  - `group_by_length`: False
252
  - `length_column_name`: length
253
  - `project`: huggingface
254
  - `trackio_space_id`: trackio
255
+ - `ddp_find_unused_parameters`: None
256
  - `ddp_bucket_cap_mb`: None
257
  - `ddp_broadcast_buffers`: False
258
  - `dataloader_pin_memory`: True
259
  - `dataloader_persistent_workers`: False
260
  - `skip_memory_metrics`: True
261
  - `use_legacy_prediction_loop`: False
262
+ - `push_to_hub`: False
263
  - `resume_from_checkpoint`: None
264
+ - `hub_model_id`: None
265
  - `hub_strategy`: every_save
266
  - `hub_private_repo`: None
267
  - `hub_always_push`: False
 
288
  - `neftune_noise_alpha`: None
289
  - `optim_target_modules`: None
290
  - `batch_eval_metrics`: False
291
+ - `eval_on_start`: False
292
  - `use_liger_kernel`: False
293
  - `liger_kernel_config`: None
294
  - `eval_use_gather_object`: False
295
  - `average_tokens_across_devices`: True
296
  - `prompts`: None
297
  - `batch_sampler`: batch_sampler
298
+ - `multi_dataset_batch_sampler`: round_robin
299
  - `router_mapping`: {}
300
  - `learning_rate_mapping`: {}
301
 
302
  </details>
303
 
304
  ### Training Logs
305
+ | Epoch | Step | Training Loss |
306
+ |:------:|:----:|:-------------:|
307
+ | 0.3199 | 500 | 0.2284 |
308
+ | 0.6398 | 1000 | 0.0571 |
309
+ | 0.9597 | 1500 | 0.0486 |
310
+ | 1.2796 | 2000 | 0.0378 |
311
+ | 1.5995 | 2500 | 0.0367 |
312
+ | 1.9194 | 3000 | 0.0338 |
313
+ | 2.2393 | 3500 | 0.0327 |
314
+ | 2.5592 | 4000 | 0.0285 |
315
+ | 2.8791 | 4500 | 0.0285 |
 
 
 
 
 
 
 
 
 
 
 
 
316
 
317
 
318
  ### Framework Versions
 
321
  - Transformers: 4.57.3
322
  - PyTorch: 2.9.1+cu128
323
  - Accelerate: 1.12.0
324
+ - Datasets: 4.4.2
325
  - Tokenizers: 0.22.1
326
 
327
  ## Citation
config.json CHANGED
@@ -5,7 +5,6 @@
5
  "attention_probs_dropout_prob": 0.1,
6
  "classifier_dropout": null,
7
  "dtype": "float32",
8
- "gradient_checkpointing": false,
9
  "hidden_act": "gelu",
10
  "hidden_dropout_prob": 0.1,
11
  "hidden_size": 384,
 
5
  "attention_probs_dropout_prob": 0.1,
6
  "classifier_dropout": null,
7
  "dtype": "float32",
 
8
  "hidden_act": "gelu",
9
  "hidden_dropout_prob": 0.1,
10
  "hidden_size": 384,
config_sentence_transformers.json CHANGED
@@ -1,10 +1,10 @@
1
  {
 
2
  "__version__": {
3
  "sentence_transformers": "5.2.0",
4
  "transformers": "4.57.3",
5
  "pytorch": "2.9.1+cu128"
6
  },
7
- "model_type": "SentenceTransformer",
8
  "prompts": {
9
  "query": "",
10
  "document": ""
 
1
  {
2
+ "model_type": "SentenceTransformer",
3
  "__version__": {
4
  "sentence_transformers": "5.2.0",
5
  "transformers": "4.57.3",
6
  "pytorch": "2.9.1+cu128"
7
  },
 
8
  "prompts": {
9
  "query": "",
10
  "document": ""
eval/Information-Retrieval_evaluation_NanoMSMARCO_results.csv CHANGED
@@ -62,3 +62,24 @@ epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accurac
62
  1.600284495021337,4500,0.3,0.56,0.66,0.8,0.3,0.3,0.18666666666666665,0.56,0.132,0.66,0.08,0.8,0.45607142857142846,0.5386023478916117,0.46551784920060785
63
  1.689189189189189,4750,0.3,0.56,0.66,0.8,0.3,0.3,0.18666666666666665,0.56,0.132,0.66,0.08,0.8,0.4514603174603174,0.5348185801809706,0.46079536150124384
64
  1.7780938833570412,5000,0.3,0.56,0.66,0.8,0.3,0.3,0.18666666666666665,0.56,0.132,0.66,0.08,0.8,0.4522142857142857,0.5355647548788993,0.4614012040294735
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
62
  1.600284495021337,4500,0.3,0.56,0.66,0.8,0.3,0.3,0.18666666666666665,0.56,0.132,0.66,0.08,0.8,0.45607142857142846,0.5386023478916117,0.46551784920060785
63
  1.689189189189189,4750,0.3,0.56,0.66,0.8,0.3,0.3,0.18666666666666665,0.56,0.132,0.66,0.08,0.8,0.4514603174603174,0.5348185801809706,0.46079536150124384
64
  1.7780938833570412,5000,0.3,0.56,0.66,0.8,0.3,0.3,0.18666666666666665,0.56,0.132,0.66,0.08,0.8,0.4522142857142857,0.5355647548788993,0.4614012040294735
65
+ 0,0,0.44,0.66,0.72,0.8,0.44,0.44,0.22,0.66,0.14400000000000002,0.72,0.08,0.8,0.5702460317460318,0.6259279298239366,0.582094782035277
66
+ 0.08890469416785206,250,0.34,0.64,0.68,0.74,0.34,0.34,0.21333333333333332,0.64,0.136,0.68,0.07400000000000001,0.74,0.4901904761904761,0.5518217997585461,0.5028933975941648
67
+ 0.17780938833570412,500,0.4,0.64,0.7,0.76,0.4,0.4,0.21333333333333335,0.64,0.14,0.7,0.07600000000000001,0.76,0.5331666666666667,0.5890890591305245,0.5445426099548939
68
+ 0.26671408250355616,750,0.34,0.6,0.72,0.76,0.34,0.34,0.19999999999999996,0.6,0.14400000000000002,0.72,0.07600000000000001,0.76,0.49199999999999994,0.5578563077041967,0.5039374750624472
69
+ 0.35561877667140823,1000,0.36,0.58,0.68,0.78,0.36,0.36,0.19333333333333333,0.58,0.136,0.68,0.07800000000000001,0.78,0.49224603174603176,0.5610805034589428,0.5030704016084822
70
+ 0.4445234708392603,1250,0.3,0.6,0.68,0.78,0.3,0.3,0.19999999999999996,0.6,0.136,0.68,0.07800000000000001,0.78,0.4645476190476191,0.5409688242651494,0.47528103315885334
71
+ 0.5334281650071123,1500,0.3,0.6,0.68,0.8,0.3,0.3,0.19999999999999996,0.6,0.136,0.68,0.08,0.8,0.47022222222222226,0.5497081271943999,0.4789771850855242
72
+ 0.6223328591749644,1750,0.3,0.6,0.68,0.76,0.3,0.3,0.19999999999999996,0.6,0.136,0.68,0.07600000000000001,0.76,0.4595555555555556,0.532908343998826,0.4714941392922588
73
+ 0.7112375533428165,2000,0.28,0.6,0.7,0.76,0.28,0.28,0.19999999999999996,0.6,0.14,0.7,0.07600000000000001,0.76,0.45466666666666666,0.5298619903730956,0.4669250036513194
74
+ 0.8001422475106685,2250,0.3,0.6,0.7,0.76,0.3,0.3,0.2,0.6,0.14,0.7,0.07600000000000001,0.76,0.46835714285714275,0.5402193595040476,0.48060903793426674
75
+ 0.8890469416785206,2500,0.28,0.62,0.68,0.76,0.28,0.28,0.20666666666666664,0.62,0.136,0.68,0.07600000000000001,0.76,0.4574999999999999,0.5323268680323201,0.46963514478938045
76
+ 0.9779516358463727,2750,0.28,0.64,0.7,0.76,0.28,0.28,0.21333333333333332,0.64,0.14,0.7,0.07600000000000001,0.76,0.4491904761904761,0.5259513581732703,0.4613480824826055
77
+ 1.0668563300142249,3000,0.28,0.62,0.72,0.76,0.28,0.28,0.20666666666666664,0.62,0.14400000000000002,0.72,0.07600000000000001,0.76,0.4425238095238095,0.5208170866111872,0.4550919833004972
78
+ 1.1557610241820768,3250,0.28,0.6,0.72,0.76,0.28,0.28,0.2,0.6,0.14400000000000002,0.72,0.07600000000000001,0.76,0.44371428571428567,0.5215917357685905,0.45619204861352564
79
+ 1.2446657183499288,3500,0.26,0.6,0.7,0.76,0.26,0.26,0.2,0.6,0.14,0.7,0.07600000000000001,0.76,0.42971428571428566,0.5109788233660602,0.44246522295786994
80
+ 1.333570412517781,3750,0.26,0.58,0.7,0.76,0.26,0.26,0.19333333333333333,0.58,0.14,0.7,0.07600000000000001,0.76,0.4313809523809524,0.5122109495989572,0.4436650404829164
81
+ 1.422475106685633,4000,0.26,0.56,0.7,0.76,0.26,0.26,0.18666666666666668,0.56,0.14,0.7,0.07600000000000001,0.76,0.42971428571428566,0.510824480760425,0.4419819268211145
82
+ 1.5113798008534851,4250,0.26,0.58,0.7,0.76,0.26,0.26,0.19333333333333333,0.58,0.14,0.7,0.07600000000000001,0.76,0.428047619047619,0.5095923545275279,0.44066726246555254
83
+ 1.600284495021337,4500,0.26,0.56,0.7,0.76,0.26,0.26,0.18666666666666668,0.56,0.14,0.7,0.07600000000000001,0.76,0.4263809523809524,0.5082058856889958,0.4389900767590395
84
+ 1.689189189189189,4750,0.26,0.56,0.7,0.78,0.26,0.26,0.18666666666666668,0.56,0.14,0.7,0.078,0.78,0.4317142857142857,0.5166057772867828,0.4425420850161611
85
+ 1.7780938833570412,5000,0.26,0.56,0.7,0.78,0.26,0.26,0.18666666666666668,0.56,0.14,0.7,0.078,0.78,0.4317142857142857,0.5166057772867828,0.4425420850161611
eval/Information-Retrieval_evaluation_NanoNQ_results.csv CHANGED
@@ -62,3 +62,24 @@ epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accurac
62
  1.600284495021337,4500,0.32,0.54,0.62,0.7,0.32,0.31,0.18,0.51,0.132,0.6,0.07400000000000001,0.68,0.44685714285714284,0.4990606536584677,0.44488210342400414
63
  1.689189189189189,4750,0.3,0.54,0.62,0.7,0.3,0.29,0.18,0.51,0.132,0.6,0.07400000000000001,0.68,0.4335238095238095,0.48906065365846774,0.43155164353196623
64
  1.7780938833570412,5000,0.3,0.54,0.62,0.7,0.3,0.29,0.18,0.51,0.132,0.6,0.07400000000000001,0.68,0.43224603174603166,0.48789548101925573,0.4304090341200182
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
62
  1.600284495021337,4500,0.32,0.54,0.62,0.7,0.32,0.31,0.18,0.51,0.132,0.6,0.07400000000000001,0.68,0.44685714285714284,0.4990606536584677,0.44488210342400414
63
  1.689189189189189,4750,0.3,0.54,0.62,0.7,0.3,0.29,0.18,0.51,0.132,0.6,0.07400000000000001,0.68,0.4335238095238095,0.48906065365846774,0.43155164353196623
64
  1.7780938833570412,5000,0.3,0.54,0.62,0.7,0.3,0.29,0.18,0.51,0.132,0.6,0.07400000000000001,0.68,0.43224603174603166,0.48789548101925573,0.4304090341200182
65
+ 0,0,0.58,0.68,0.74,0.78,0.58,0.55,0.22666666666666666,0.64,0.15600000000000003,0.7,0.086,0.75,0.6453888888888889,0.6582829486426273,0.6272793246081896
66
+ 0.08890469416785206,250,0.38,0.66,0.7,0.76,0.38,0.35,0.22,0.62,0.14400000000000002,0.67,0.08,0.72,0.523,0.5569696638651971,0.499903651936299
67
+ 0.17780938833570412,500,0.4,0.58,0.68,0.72,0.4,0.37,0.2,0.56,0.14,0.66,0.07400000000000001,0.68,0.5123333333333333,0.5411787059288392,0.4972754670524756
68
+ 0.26671408250355616,750,0.38,0.58,0.64,0.72,0.38,0.35,0.2,0.56,0.132,0.62,0.07600000000000001,0.7,0.4976031746031746,0.5371528639423688,0.48630137314468846
69
+ 0.35561877667140823,1000,0.38,0.58,0.64,0.7,0.38,0.35,0.2,0.56,0.132,0.61,0.07400000000000001,0.68,0.4921666666666667,0.5283767078645479,0.48312556422745045
70
+ 0.4445234708392603,1250,0.38,0.52,0.64,0.7,0.38,0.35,0.18,0.5,0.136,0.62,0.07400000000000001,0.68,0.4825238095238095,0.519596224961587,0.47207282351313246
71
+ 0.5334281650071123,1500,0.4,0.52,0.62,0.7,0.4,0.37,0.18,0.5,0.132,0.6,0.07400000000000001,0.68,0.49205555555555563,0.5281390811577225,0.48417056351095694
72
+ 0.6223328591749644,1750,0.38,0.56,0.62,0.7,0.38,0.35,0.19333333333333333,0.54,0.132,0.6,0.07400000000000001,0.68,0.48888888888888893,0.5258717709672651,0.48055319920579437
73
+ 0.7112375533428165,2000,0.42,0.6,0.62,0.7,0.42,0.38,0.20666666666666664,0.57,0.128,0.59,0.07400000000000001,0.68,0.5155555555555555,0.5412454206192028,0.4993498464462553
74
+ 0.8001422475106685,2250,0.38,0.58,0.62,0.7,0.38,0.35,0.19333333333333333,0.54,0.128,0.59,0.07400000000000001,0.68,0.4938888888888889,0.5249963447842245,0.47683980373124074
75
+ 0.8890469416785206,2500,0.4,0.56,0.62,0.7,0.4,0.37,0.19333333333333333,0.53,0.128,0.59,0.07400000000000001,0.68,0.502357142857143,0.5316357137578318,0.4859429987300997
76
+ 0.9779516358463727,2750,0.36,0.58,0.62,0.68,0.36,0.33,0.2,0.55,0.128,0.59,0.07200000000000001,0.66,0.48066666666666663,0.5120675950658177,0.4676537576967769
77
+ 1.0668563300142249,3000,0.4,0.56,0.62,0.68,0.4,0.36,0.19333333333333333,0.53,0.128,0.59,0.07200000000000001,0.66,0.5000000000000001,0.5234648938832518,0.4821727053198498
78
+ 1.1557610241820768,3250,0.34,0.54,0.62,0.68,0.34,0.32,0.18666666666666665,0.51,0.128,0.59,0.07200000000000001,0.66,0.46372222222222226,0.501861476984271,0.4557671925483281
79
+ 1.2446657183499288,3500,0.34,0.56,0.62,0.68,0.34,0.32,0.18666666666666665,0.52,0.128,0.59,0.07200000000000001,0.66,0.46602380952380945,0.5030439030999874,0.45651412149314324
80
+ 1.333570412517781,3750,0.34,0.56,0.62,0.68,0.34,0.32,0.19333333333333333,0.53,0.128,0.59,0.07200000000000001,0.66,0.4626904761904762,0.5007757968407431,0.4535704563335965
81
+ 1.422475106685633,4000,0.34,0.56,0.62,0.68,0.34,0.32,0.19333333333333333,0.53,0.128,0.59,0.07200000000000001,0.66,0.46077777777777773,0.4987875433113901,0.4515167848333299
82
+ 1.5113798008534851,4250,0.32,0.56,0.62,0.68,0.32,0.3,0.18666666666666665,0.52,0.128,0.59,0.07200000000000001,0.66,0.45474603174603173,0.4936739625315223,0.44331149664382424
83
+ 1.600284495021337,4500,0.34,0.56,0.62,0.68,0.34,0.31,0.19333333333333333,0.53,0.128,0.59,0.07200000000000001,0.66,0.46341269841269844,0.4981843146804683,0.44850926901873406
84
+ 1.689189189189189,4750,0.34,0.56,0.62,0.68,0.34,0.31,0.19333333333333333,0.53,0.128,0.59,0.07200000000000001,0.66,0.46341269841269844,0.4981843146804683,0.4485243115129495
85
+ 1.7780938833570412,5000,0.34,0.56,0.62,0.68,0.34,0.31,0.19333333333333333,0.53,0.128,0.59,0.07200000000000001,0.66,0.46341269841269844,0.4981843146804683,0.4486372793651192
eval/NanoBEIR_evaluation_mean_results.csv CHANGED
@@ -62,3 +62,24 @@ epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accurac
62
  1.600284495021337,4500,0.31,0.55,0.64,0.75,0.31,0.305,0.18333333333333332,0.535,0.132,0.63,0.07700000000000001,0.74,0.45146428571428565,0.5188315007750397,0.455199976312306
63
  1.689189189189189,4750,0.3,0.55,0.64,0.75,0.3,0.295,0.18333333333333332,0.535,0.132,0.63,0.07700000000000001,0.74,0.44249206349206344,0.5119396169197192,0.44617350251660504
64
  1.7780938833570412,5000,0.3,0.55,0.64,0.75,0.3,0.295,0.18333333333333332,0.535,0.132,0.63,0.07700000000000001,0.74,0.4422301587301587,0.5117301179490775,0.44590511907474584
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
62
  1.600284495021337,4500,0.31,0.55,0.64,0.75,0.31,0.305,0.18333333333333332,0.535,0.132,0.63,0.07700000000000001,0.74,0.45146428571428565,0.5188315007750397,0.455199976312306
63
  1.689189189189189,4750,0.3,0.55,0.64,0.75,0.3,0.295,0.18333333333333332,0.535,0.132,0.63,0.07700000000000001,0.74,0.44249206349206344,0.5119396169197192,0.44617350251660504
64
  1.7780938833570412,5000,0.3,0.55,0.64,0.75,0.3,0.295,0.18333333333333332,0.535,0.132,0.63,0.07700000000000001,0.74,0.4422301587301587,0.5117301179490775,0.44590511907474584
65
+ 0,0,0.51,0.67,0.73,0.79,0.51,0.495,0.22333333333333333,0.65,0.15000000000000002,0.71,0.08299999999999999,0.775,0.6078174603174604,0.6421054392332819,0.6046870533217332
66
+ 0.08890469416785206,250,0.36,0.65,0.69,0.75,0.36,0.345,0.21666666666666667,0.63,0.14,0.675,0.07700000000000001,0.73,0.5065952380952381,0.5543957318118715,0.501398524765232
67
+ 0.17780938833570412,500,0.4,0.61,0.69,0.74,0.4,0.385,0.20666666666666667,0.6000000000000001,0.14,0.6799999999999999,0.07500000000000001,0.72,0.52275,0.5651338825296819,0.5209090385036848
68
+ 0.26671408250355616,750,0.36,0.59,0.6799999999999999,0.74,0.36,0.345,0.19999999999999998,0.5800000000000001,0.138,0.6699999999999999,0.07600000000000001,0.73,0.4948015873015873,0.5475045858232828,0.49511942410356785
69
+ 0.35561877667140823,1000,0.37,0.58,0.66,0.74,0.37,0.355,0.19666666666666666,0.5700000000000001,0.134,0.645,0.07600000000000001,0.73,0.4922063492063492,0.5447286056617453,0.4930979829179663
70
+ 0.4445234708392603,1250,0.33999999999999997,0.56,0.66,0.74,0.33999999999999997,0.32499999999999996,0.18999999999999997,0.55,0.136,0.65,0.07600000000000001,0.73,0.4735357142857143,0.5302825246133682,0.4736769283359929
71
+ 0.5334281650071123,1500,0.35,0.56,0.65,0.75,0.35,0.33499999999999996,0.18999999999999997,0.55,0.134,0.64,0.07700000000000001,0.74,0.48113888888888895,0.5389236041760612,0.4815738742982406
72
+ 0.6223328591749644,1750,0.33999999999999997,0.5800000000000001,0.65,0.73,0.33999999999999997,0.32499999999999996,0.19666666666666666,0.5700000000000001,0.134,0.64,0.07500000000000001,0.72,0.47422222222222227,0.5293900574830456,0.4760236692490266
73
+ 0.7112375533428165,2000,0.35,0.6,0.6599999999999999,0.73,0.35,0.33,0.2033333333333333,0.585,0.134,0.645,0.07500000000000001,0.72,0.4851111111111111,0.5355537054961492,0.4831374250487873
74
+ 0.8001422475106685,2250,0.33999999999999997,0.59,0.6599999999999999,0.73,0.33999999999999997,0.32499999999999996,0.19666666666666666,0.5700000000000001,0.134,0.645,0.07500000000000001,0.72,0.4811230158730158,0.5326078521441361,0.47872442083275374
75
+ 0.8890469416785206,2500,0.34,0.5900000000000001,0.65,0.73,0.34,0.325,0.19999999999999998,0.575,0.132,0.635,0.07500000000000001,0.72,0.4799285714285714,0.531981290895076,0.47778907175974006
76
+ 0.9779516358463727,2750,0.32,0.61,0.6599999999999999,0.72,0.32,0.30500000000000005,0.20666666666666667,0.595,0.134,0.645,0.07400000000000001,0.71,0.46492857142857136,0.519009476619544,0.46450092008969124
77
+ 1.0668563300142249,3000,0.34,0.5900000000000001,0.6699999999999999,0.72,0.34,0.32,0.19999999999999998,0.575,0.136,0.655,0.07400000000000001,0.71,0.47126190476190477,0.5221409902472195,0.46863234431017353
78
+ 1.1557610241820768,3250,0.31000000000000005,0.5700000000000001,0.6699999999999999,0.72,0.31000000000000005,0.30000000000000004,0.19333333333333333,0.5549999999999999,0.136,0.655,0.07400000000000001,0.71,0.45371825396825394,0.5117266063764307,0.4559796205809269
79
+ 1.2446657183499288,3500,0.30000000000000004,0.5800000000000001,0.6599999999999999,0.72,0.30000000000000004,0.29000000000000004,0.19333333333333333,0.56,0.134,0.645,0.07400000000000001,0.71,0.44786904761904756,0.5070113632330238,0.4494896722255066
80
+ 1.333570412517781,3750,0.30000000000000004,0.5700000000000001,0.6599999999999999,0.72,0.30000000000000004,0.29000000000000004,0.19333333333333333,0.5549999999999999,0.134,0.645,0.07400000000000001,0.71,0.4470357142857143,0.5064933732198502,0.4486177484082564
81
+ 1.422475106685633,4000,0.30000000000000004,0.56,0.6599999999999999,0.72,0.30000000000000004,0.29000000000000004,0.19,0.545,0.134,0.645,0.07400000000000001,0.71,0.4452460317460317,0.5048060120359075,0.4467493558272222
82
+ 1.5113798008534851,4250,0.29000000000000004,0.5700000000000001,0.6599999999999999,0.72,0.29000000000000004,0.28,0.19,0.55,0.134,0.645,0.07400000000000001,0.71,0.44139682539682534,0.5016331585295251,0.4419893795546884
83
+ 1.600284495021337,4500,0.30000000000000004,0.56,0.6599999999999999,0.72,0.30000000000000004,0.28500000000000003,0.19,0.545,0.134,0.645,0.07400000000000001,0.71,0.4448968253968254,0.5031951001847321,0.4437496728888868
84
+ 1.689189189189189,4750,0.30000000000000004,0.56,0.6599999999999999,0.73,0.30000000000000004,0.28500000000000003,0.19,0.545,0.134,0.645,0.07500000000000001,0.72,0.4475634920634921,0.5073950459836256,0.4455331982645553
85
+ 1.7780938833570412,5000,0.30000000000000004,0.56,0.6599999999999999,0.73,0.30000000000000004,0.28500000000000003,0.19,0.545,0.134,0.645,0.07500000000000001,0.72,0.4475634920634921,0.5073950459836256,0.44558968219064016
final_metrics.json CHANGED
@@ -1,16 +1,231 @@
1
  {
2
- "val_cosine_accuracy@1": 0.828675,
3
- "val_cosine_accuracy@3": 0.90055,
4
- "val_cosine_accuracy@5": 0.926875,
5
- "val_cosine_precision@1": 0.828675,
6
- "val_cosine_precision@3": 0.3001833333333333,
7
- "val_cosine_precision@5": 0.185375,
8
- "val_cosine_recall@1": 0.828675,
9
- "val_cosine_recall@3": 0.90055,
10
- "val_cosine_recall@5": 0.926875,
11
- "val_cosine_ndcg@10": 0.8917140181282133,
12
- "val_cosine_mrr@1": 0.828675,
13
- "val_cosine_mrr@5": 0.8668858333333296,
14
- "val_cosine_mrr@10": 0.870941557539677,
15
- "val_cosine_map@100": 0.873184159266725
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
16
  }
 
1
  {
2
+ "nano_beir": {
3
+ "NanoClimateFEVER_cosine_accuracy@1": 0.12,
4
+ "NanoClimateFEVER_cosine_accuracy@3": 0.28,
5
+ "NanoClimateFEVER_cosine_accuracy@5": 0.46,
6
+ "NanoClimateFEVER_cosine_accuracy@10": 0.66,
7
+ "NanoClimateFEVER_cosine_precision@1": 0.12,
8
+ "NanoClimateFEVER_cosine_precision@3": 0.09999999999999998,
9
+ "NanoClimateFEVER_cosine_precision@5": 0.1,
10
+ "NanoClimateFEVER_cosine_precision@10": 0.07600000000000001,
11
+ "NanoClimateFEVER_cosine_recall@1": 0.03833333333333333,
12
+ "NanoClimateFEVER_cosine_recall@3": 0.135,
13
+ "NanoClimateFEVER_cosine_recall@5": 0.23633333333333334,
14
+ "NanoClimateFEVER_cosine_recall@10": 0.32466666666666666,
15
+ "NanoClimateFEVER_cosine_ndcg@10": 0.20845131843009276,
16
+ "NanoClimateFEVER_cosine_mrr@10": 0.2561746031746031,
17
+ "NanoClimateFEVER_cosine_map@100": 0.14245832634750027,
18
+ "NanoDBPedia_cosine_accuracy@1": 0.66,
19
+ "NanoDBPedia_cosine_accuracy@3": 0.76,
20
+ "NanoDBPedia_cosine_accuracy@5": 0.86,
21
+ "NanoDBPedia_cosine_accuracy@10": 0.88,
22
+ "NanoDBPedia_cosine_precision@1": 0.66,
23
+ "NanoDBPedia_cosine_precision@3": 0.4733333333333334,
24
+ "NanoDBPedia_cosine_precision@5": 0.44800000000000006,
25
+ "NanoDBPedia_cosine_precision@10": 0.34800000000000003,
26
+ "NanoDBPedia_cosine_recall@1": 0.08255861663979128,
27
+ "NanoDBPedia_cosine_recall@3": 0.1250415886989628,
28
+ "NanoDBPedia_cosine_recall@5": 0.17452224602058436,
29
+ "NanoDBPedia_cosine_recall@10": 0.25437495331636306,
30
+ "NanoDBPedia_cosine_ndcg@10": 0.4636652485066659,
31
+ "NanoDBPedia_cosine_mrr@10": 0.7288333333333334,
32
+ "NanoDBPedia_cosine_map@100": 0.32939375725902226,
33
+ "NanoFEVER_cosine_accuracy@1": 0.7,
34
+ "NanoFEVER_cosine_accuracy@3": 0.86,
35
+ "NanoFEVER_cosine_accuracy@5": 0.9,
36
+ "NanoFEVER_cosine_accuracy@10": 0.98,
37
+ "NanoFEVER_cosine_precision@1": 0.7,
38
+ "NanoFEVER_cosine_precision@3": 0.2866666666666667,
39
+ "NanoFEVER_cosine_precision@5": 0.184,
40
+ "NanoFEVER_cosine_precision@10": 0.09999999999999998,
41
+ "NanoFEVER_cosine_recall@1": 0.6466666666666666,
42
+ "NanoFEVER_cosine_recall@3": 0.8066666666666665,
43
+ "NanoFEVER_cosine_recall@5": 0.8566666666666666,
44
+ "NanoFEVER_cosine_recall@10": 0.9266666666666667,
45
+ "NanoFEVER_cosine_ndcg@10": 0.7923317127841635,
46
+ "NanoFEVER_cosine_mrr@10": 0.784190476190476,
47
+ "NanoFEVER_cosine_map@100": 0.7390417679680837,
48
+ "NanoFiQA2018_cosine_accuracy@1": 0.28,
49
+ "NanoFiQA2018_cosine_accuracy@3": 0.38,
50
+ "NanoFiQA2018_cosine_accuracy@5": 0.52,
51
+ "NanoFiQA2018_cosine_accuracy@10": 0.6,
52
+ "NanoFiQA2018_cosine_precision@1": 0.28,
53
+ "NanoFiQA2018_cosine_precision@3": 0.16666666666666663,
54
+ "NanoFiQA2018_cosine_precision@5": 0.14400000000000002,
55
+ "NanoFiQA2018_cosine_precision@10": 0.08999999999999998,
56
+ "NanoFiQA2018_cosine_recall@1": 0.12974603174603175,
57
+ "NanoFiQA2018_cosine_recall@3": 0.20312698412698413,
58
+ "NanoFiQA2018_cosine_recall@5": 0.2956269841269841,
59
+ "NanoFiQA2018_cosine_recall@10": 0.3792936507936508,
60
+ "NanoFiQA2018_cosine_ndcg@10": 0.29381486022787706,
61
+ "NanoFiQA2018_cosine_mrr@10": 0.3612698412698412,
62
+ "NanoFiQA2018_cosine_map@100": 0.24011197414286956,
63
+ "NanoHotpotQA_cosine_accuracy@1": 0.64,
64
+ "NanoHotpotQA_cosine_accuracy@3": 0.74,
65
+ "NanoHotpotQA_cosine_accuracy@5": 0.78,
66
+ "NanoHotpotQA_cosine_accuracy@10": 0.84,
67
+ "NanoHotpotQA_cosine_precision@1": 0.64,
68
+ "NanoHotpotQA_cosine_precision@3": 0.3,
69
+ "NanoHotpotQA_cosine_precision@5": 0.19599999999999998,
70
+ "NanoHotpotQA_cosine_precision@10": 0.11199999999999999,
71
+ "NanoHotpotQA_cosine_recall@1": 0.32,
72
+ "NanoHotpotQA_cosine_recall@3": 0.45,
73
+ "NanoHotpotQA_cosine_recall@5": 0.49,
74
+ "NanoHotpotQA_cosine_recall@10": 0.56,
75
+ "NanoHotpotQA_cosine_ndcg@10": 0.5341052902954041,
76
+ "NanoHotpotQA_cosine_mrr@10": 0.704079365079365,
77
+ "NanoHotpotQA_cosine_map@100": 0.46245563144445145,
78
+ "NanoMSMARCO_cosine_accuracy@1": 0.3,
79
+ "NanoMSMARCO_cosine_accuracy@3": 0.56,
80
+ "NanoMSMARCO_cosine_accuracy@5": 0.66,
81
+ "NanoMSMARCO_cosine_accuracy@10": 0.8,
82
+ "NanoMSMARCO_cosine_precision@1": 0.3,
83
+ "NanoMSMARCO_cosine_precision@3": 0.18666666666666665,
84
+ "NanoMSMARCO_cosine_precision@5": 0.132,
85
+ "NanoMSMARCO_cosine_precision@10": 0.08,
86
+ "NanoMSMARCO_cosine_recall@1": 0.3,
87
+ "NanoMSMARCO_cosine_recall@3": 0.56,
88
+ "NanoMSMARCO_cosine_recall@5": 0.66,
89
+ "NanoMSMARCO_cosine_recall@10": 0.8,
90
+ "NanoMSMARCO_cosine_ndcg@10": 0.5355647548788993,
91
+ "NanoMSMARCO_cosine_mrr@10": 0.4522142857142857,
92
+ "NanoMSMARCO_cosine_map@100": 0.4614012040294735,
93
+ "NanoNFCorpus_cosine_accuracy@1": 0.36,
94
+ "NanoNFCorpus_cosine_accuracy@3": 0.54,
95
+ "NanoNFCorpus_cosine_accuracy@5": 0.58,
96
+ "NanoNFCorpus_cosine_accuracy@10": 0.66,
97
+ "NanoNFCorpus_cosine_precision@1": 0.36,
98
+ "NanoNFCorpus_cosine_precision@3": 0.34,
99
+ "NanoNFCorpus_cosine_precision@5": 0.308,
100
+ "NanoNFCorpus_cosine_precision@10": 0.242,
101
+ "NanoNFCorpus_cosine_recall@1": 0.013678895813410474,
102
+ "NanoNFCorpus_cosine_recall@3": 0.05640197371329614,
103
+ "NanoNFCorpus_cosine_recall@5": 0.07379464684205841,
104
+ "NanoNFCorpus_cosine_recall@10": 0.09966914120321839,
105
+ "NanoNFCorpus_cosine_ndcg@10": 0.2773671235823583,
106
+ "NanoNFCorpus_cosine_mrr@10": 0.45522222222222225,
107
+ "NanoNFCorpus_cosine_map@100": 0.10548993594921903,
108
+ "NanoNQ_cosine_accuracy@1": 0.3,
109
+ "NanoNQ_cosine_accuracy@3": 0.54,
110
+ "NanoNQ_cosine_accuracy@5": 0.62,
111
+ "NanoNQ_cosine_accuracy@10": 0.7,
112
+ "NanoNQ_cosine_precision@1": 0.3,
113
+ "NanoNQ_cosine_precision@3": 0.18,
114
+ "NanoNQ_cosine_precision@5": 0.132,
115
+ "NanoNQ_cosine_precision@10": 0.07400000000000001,
116
+ "NanoNQ_cosine_recall@1": 0.29,
117
+ "NanoNQ_cosine_recall@3": 0.51,
118
+ "NanoNQ_cosine_recall@5": 0.6,
119
+ "NanoNQ_cosine_recall@10": 0.68,
120
+ "NanoNQ_cosine_ndcg@10": 0.48789548101925573,
121
+ "NanoNQ_cosine_mrr@10": 0.43224603174603166,
122
+ "NanoNQ_cosine_map@100": 0.4304090341200182,
123
+ "NanoQuoraRetrieval_cosine_accuracy@1": 0.96,
124
+ "NanoQuoraRetrieval_cosine_accuracy@3": 0.98,
125
+ "NanoQuoraRetrieval_cosine_accuracy@5": 1.0,
126
+ "NanoQuoraRetrieval_cosine_accuracy@10": 1.0,
127
+ "NanoQuoraRetrieval_cosine_precision@1": 0.96,
128
+ "NanoQuoraRetrieval_cosine_precision@3": 0.4133333333333333,
129
+ "NanoQuoraRetrieval_cosine_precision@5": 0.264,
130
+ "NanoQuoraRetrieval_cosine_precision@10": 0.13999999999999999,
131
+ "NanoQuoraRetrieval_cosine_recall@1": 0.8473333333333334,
132
+ "NanoQuoraRetrieval_cosine_recall@3": 0.9520000000000001,
133
+ "NanoQuoraRetrieval_cosine_recall@5": 0.986,
134
+ "NanoQuoraRetrieval_cosine_recall@10": 1.0,
135
+ "NanoQuoraRetrieval_cosine_ndcg@10": 0.9790267083021519,
136
+ "NanoQuoraRetrieval_cosine_mrr@10": 0.975,
137
+ "NanoQuoraRetrieval_cosine_map@100": 0.9683809523809523,
138
+ "NanoSCIDOCS_cosine_accuracy@1": 0.46,
139
+ "NanoSCIDOCS_cosine_accuracy@3": 0.66,
140
+ "NanoSCIDOCS_cosine_accuracy@5": 0.76,
141
+ "NanoSCIDOCS_cosine_accuracy@10": 0.82,
142
+ "NanoSCIDOCS_cosine_precision@1": 0.46,
143
+ "NanoSCIDOCS_cosine_precision@3": 0.34,
144
+ "NanoSCIDOCS_cosine_precision@5": 0.27599999999999997,
145
+ "NanoSCIDOCS_cosine_precision@10": 0.19399999999999998,
146
+ "NanoSCIDOCS_cosine_recall@1": 0.09766666666666668,
147
+ "NanoSCIDOCS_cosine_recall@3": 0.21166666666666667,
148
+ "NanoSCIDOCS_cosine_recall@5": 0.2846666666666666,
149
+ "NanoSCIDOCS_cosine_recall@10": 0.3986666666666666,
150
+ "NanoSCIDOCS_cosine_ndcg@10": 0.38054185058113466,
151
+ "NanoSCIDOCS_cosine_mrr@10": 0.5684444444444444,
152
+ "NanoSCIDOCS_cosine_map@100": 0.29482712989551213,
153
+ "NanoArguAna_cosine_accuracy@1": 0.22,
154
+ "NanoArguAna_cosine_accuracy@3": 0.5,
155
+ "NanoArguAna_cosine_accuracy@5": 0.66,
156
+ "NanoArguAna_cosine_accuracy@10": 0.8,
157
+ "NanoArguAna_cosine_precision@1": 0.22,
158
+ "NanoArguAna_cosine_precision@3": 0.16666666666666663,
159
+ "NanoArguAna_cosine_precision@5": 0.132,
160
+ "NanoArguAna_cosine_precision@10": 0.08,
161
+ "NanoArguAna_cosine_recall@1": 0.22,
162
+ "NanoArguAna_cosine_recall@3": 0.5,
163
+ "NanoArguAna_cosine_recall@5": 0.66,
164
+ "NanoArguAna_cosine_recall@10": 0.8,
165
+ "NanoArguAna_cosine_ndcg@10": 0.48983349748002636,
166
+ "NanoArguAna_cosine_mrr@10": 0.3926269841269841,
167
+ "NanoArguAna_cosine_map@100": 0.40347638549721887,
168
+ "NanoSciFact_cosine_accuracy@1": 0.42,
169
+ "NanoSciFact_cosine_accuracy@3": 0.64,
170
+ "NanoSciFact_cosine_accuracy@5": 0.72,
171
+ "NanoSciFact_cosine_accuracy@10": 0.78,
172
+ "NanoSciFact_cosine_precision@1": 0.42,
173
+ "NanoSciFact_cosine_precision@3": 0.2333333333333333,
174
+ "NanoSciFact_cosine_precision@5": 0.16,
175
+ "NanoSciFact_cosine_precision@10": 0.088,
176
+ "NanoSciFact_cosine_recall@1": 0.375,
177
+ "NanoSciFact_cosine_recall@3": 0.615,
178
+ "NanoSciFact_cosine_recall@5": 0.7,
179
+ "NanoSciFact_cosine_recall@10": 0.77,
180
+ "NanoSciFact_cosine_ndcg@10": 0.5949205162858369,
181
+ "NanoSciFact_cosine_mrr@10": 0.5500238095238096,
182
+ "NanoSciFact_cosine_map@100": 0.5379955882174889,
183
+ "NanoTouche2020_cosine_accuracy@1": 0.4897959183673469,
184
+ "NanoTouche2020_cosine_accuracy@3": 0.8367346938775511,
185
+ "NanoTouche2020_cosine_accuracy@5": 0.8775510204081632,
186
+ "NanoTouche2020_cosine_accuracy@10": 0.9591836734693877,
187
+ "NanoTouche2020_cosine_precision@1": 0.4897959183673469,
188
+ "NanoTouche2020_cosine_precision@3": 0.46258503401360546,
189
+ "NanoTouche2020_cosine_precision@5": 0.4081632653061225,
190
+ "NanoTouche2020_cosine_precision@10": 0.35306122448979593,
191
+ "NanoTouche2020_cosine_recall@1": 0.0391261657646358,
192
+ "NanoTouche2020_cosine_recall@3": 0.09935212519786965,
193
+ "NanoTouche2020_cosine_recall@5": 0.14360455232476052,
194
+ "NanoTouche2020_cosine_recall@10": 0.23229499710875565,
195
+ "NanoTouche2020_cosine_ndcg@10": 0.39999686542243473,
196
+ "NanoTouche2020_cosine_mrr@10": 0.6578798185941044,
197
+ "NanoTouche2020_cosine_map@100": 0.28337269656997294,
198
+ "NanoBEIR_mean_cosine_accuracy@1": 0.4545996860282574,
199
+ "NanoBEIR_mean_cosine_accuracy@3": 0.6366718995290425,
200
+ "NanoBEIR_mean_cosine_accuracy@5": 0.7228885400313971,
201
+ "NanoBEIR_mean_cosine_accuracy@10": 0.8060910518053375,
202
+ "NanoBEIR_mean_cosine_precision@1": 0.4545996860282574,
203
+ "NanoBEIR_mean_cosine_precision@3": 0.2807116692830978,
204
+ "NanoBEIR_mean_cosine_precision@5": 0.2218587127158556,
205
+ "NanoBEIR_mean_cosine_precision@10": 0.1520816326530612,
206
+ "NanoBEIR_mean_cosine_recall@1": 0.2615469007664515,
207
+ "NanoBEIR_mean_cosine_recall@3": 0.40186584654388047,
208
+ "NanoBEIR_mean_cosine_recall@5": 0.4739396227677734,
209
+ "NanoBEIR_mean_cosine_recall@10": 0.5558179032632299,
210
+ "NanoBEIR_mean_cosine_ndcg@10": 0.49519347906125394,
211
+ "NanoBEIR_mean_cosine_mrr@10": 0.5629388627245769,
212
+ "NanoBEIR_mean_cosine_map@100": 0.41529341414013715
213
+ },
214
+ "beir_touche2020": {
215
+ "BeIR-touche2020-subset-test_cosine_accuracy@1": 0.7346938775510204,
216
+ "BeIR-touche2020-subset-test_cosine_accuracy@3": 0.8775510204081632,
217
+ "BeIR-touche2020-subset-test_cosine_accuracy@5": 0.9591836734693877,
218
+ "BeIR-touche2020-subset-test_cosine_accuracy@10": 0.9795918367346939,
219
+ "BeIR-touche2020-subset-test_cosine_precision@1": 0.7346938775510204,
220
+ "BeIR-touche2020-subset-test_cosine_precision@3": 0.6734693877551019,
221
+ "BeIR-touche2020-subset-test_cosine_precision@5": 0.6367346938775511,
222
+ "BeIR-touche2020-subset-test_cosine_precision@10": 0.5510204081632654,
223
+ "BeIR-touche2020-subset-test_cosine_recall@1": 0.01624309573792563,
224
+ "BeIR-touche2020-subset-test_cosine_recall@3": 0.04452074460941084,
225
+ "BeIR-touche2020-subset-test_cosine_recall@5": 0.07034165898860735,
226
+ "BeIR-touche2020-subset-test_cosine_recall@10": 0.12170213074025006,
227
+ "BeIR-touche2020-subset-test_cosine_ndcg@10": 0.5930193473989395,
228
+ "BeIR-touche2020-subset-test_cosine_mrr@10": 0.8187074829931973,
229
+ "BeIR-touche2020-subset-test_cosine_map@100": 0.2518399254976325
230
+ }
231
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ae38eeb892fe2e3392c100342b99b38af1c80a62f836226261339f39b8066ae9
3
  size 133462128
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e8047d77ced88713240245a68d96f88c650644e287418c20b245e56ea70742aa
3
  size 133462128
modules.json CHANGED
@@ -10,11 +10,5 @@
10
  "name": "1",
11
  "path": "1_Pooling",
12
  "type": "sentence_transformers.models.Pooling"
13
- },
14
- {
15
- "idx": 2,
16
- "name": "2",
17
- "path": "2_Normalize",
18
- "type": "sentence_transformers.models.Normalize"
19
  }
20
  ]
 
10
  "name": "1",
11
  "path": "1_Pooling",
12
  "type": "sentence_transformers.models.Pooling"
 
 
 
 
 
 
13
  }
14
  ]
tokenizer_config.json CHANGED
@@ -41,14 +41,14 @@
41
  "special": true
42
  }
43
  },
44
- "clean_up_tokenization_spaces": false,
45
  "cls_token": "[CLS]",
46
  "do_basic_tokenize": true,
47
  "do_lower_case": true,
48
  "extra_special_tokens": {},
49
  "mask_token": "[MASK]",
50
  "max_length": 128,
51
- "model_max_length": 128,
52
  "never_split": null,
53
  "pad_to_multiple_of": null,
54
  "pad_token": "[PAD]",
 
41
  "special": true
42
  }
43
  },
44
+ "clean_up_tokenization_spaces": true,
45
  "cls_token": "[CLS]",
46
  "do_basic_tokenize": true,
47
  "do_lower_case": true,
48
  "extra_special_tokens": {},
49
  "mask_token": "[MASK]",
50
  "max_length": 128,
51
+ "model_max_length": 512,
52
  "never_split": null,
53
  "pad_to_multiple_of": null,
54
  "pad_token": "[PAD]",