Update README.md
Browse files
README.md
CHANGED
|
@@ -79,6 +79,37 @@ print(sentence_embeddings)
|
|
| 79 |
|
| 80 |
For an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: [https://seb.sbert.net](https://seb.sbert.net?model_name={MODEL_NAME})
|
| 81 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 82 |
|
| 83 |
## Training
|
| 84 |
The model was trained with the parameters:
|
|
|
|
| 79 |
|
| 80 |
For an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: [https://seb.sbert.net](https://seb.sbert.net?model_name={MODEL_NAME})
|
| 81 |
|
| 82 |
+
| Model | id_raw_acc | vn_raw_acc | br_raw_acc | th_raw_acc | my_raw_acc | ph_raw_acc | sg_raw_acc | avg |
|
| 83 |
+
| --- | --- | --- | --- | --- | --- | --- | --- | --- |
|
| 84 |
+
| thtang_ALL_679283 | 0.7237258030526538 | 0.6179635458604787 | 0.569428189266009 | 0.6527431841130933 | 0.6970707704501172 | 0.6920863309352518 | 0.6943807865065542 | 0.6639140871691654 |
|
| 85 |
+
| thtang_ALL_660924 | 0.7262546286540025 | 0.6174145377351585 | 0.5722287242935964 | 0.6543588017502524 | 0.6976652026022918 | 0.6906474820143885 | 0.69227243560363 | 0.6644059732361887 |
|
| 86 |
+
| sentence-transformers_sentence-t5-xxl | 0.509829304271909 | 0.1838445208989093 | 0.3636515632837318 | 0.1691013126893301 | 0.5924507116673822 | 0.6481671805412813 | 0.637546979558163 | 0.4435130818443867 |
|
| 87 |
+
| sentence-transformers_gtr-t5-xxl | 0.5993015624529608 | 0.2482248737281311 | 0.4078749373014546 | 0.1722652305621003 | 0.5840626135200291 | 0.6400137033230558 | 0.6157301310844258 | 0.4667818645674511 |
|
| 88 |
+
| sentence-transformers_LaBSE | 0.5029653490682482 | 0.3281604567747602 | 0.3315499080421334 | 0.3979131605520027 | 0.5494534526600838 | 0.5371017471736896 | 0.5506462553854615 | 0.4568271899509114 |
|
| 89 |
+
| sentence-transformers_all-MiniLM-L6-v2 | 0.5080230002709456 | 0.2575946124002635 | 0.270397926768099 | 0.1580949175361831 | 0.5462831478484859 | 0.6006851661527921 | 0.5968466403886699 | 0.4197036301950627 |
|
| 90 |
+
| sentence-transformers_all-mpnet-base-v2 | 0.4696992503838396 | 0.2315350267183954 | 0.2474502591539876 | 0.1631100639515314 | 0.5266338628182688 | 0.5907159986296677 | 0.5775048125401046 | 0.400949896313685 |
|
| 91 |
+
| sentence-transformers_all-MiniLM-L12-v2 | 0.4897793298612156 | 0.2404655588902715 | 0.2573566293261996 | 0.1640525075732076 | 0.5451273075525908 | 0.6038026721479959 | 0.5889632413603447 | 0.4127924638159751 |
|
| 92 |
+
| sentence-transformers_paraphrase-MiniLM-L6-v2 | 0.4492278050395881 | 0.2358538906375814 | 0.2611603410800869 | 0.1422753281723325 | 0.5184108847131865 | 0.5714285714285714 | 0.5602713355944633 | 0.3912325938094015 |
|
| 93 |
+
| sentence-transformers_paraphrase-mpnet-base-v2 | 0.4600355239786856 | 0.2044506258692628 | 0.2691857548904865 | 0.1475260854930999 | 0.5289455434100591 | 0.5870503597122302 | 0.5819965166376386 | 0.3970272014273518 |
|
| 94 |
+
| sentence-transformers_paraphrase-multilingual-MiniLM-L12-v2 | 0.4488364391727127 | 0.2832149915818754 | 0.2945159672295602 | 0.3639851901716593 | 0.5396783461576566 | 0.5686879068174032 | 0.5613713447612063 | 0.4371843122702963 |
|
| 95 |
+
| sentence-transformers_paraphrase-multilingual-mpnet-base-v2 | 0.4902610109281392 | 0.3257814215650391 | 0.3281641865908711 | 0.3843487041400202 | 0.5530200455731317 | 0.5735525865022267 | 0.573379778164818 | 0.4612153904948922 |
|
| 96 |
+
| sentence-transformers_all-distilroberta-v1 | 0.4674112653159527 | 0.2234097064636556 | 0.2405952181909379 | 0.1759340289464826 | 0.5149433638255011 | 0.5754025351147654 | 0.5644880374003116 | 0.3945977364653724 |
|
| 97 |
+
| sentence-transformers_distiluse-base-multilingual-cased-v2 | 0.4351386338320739 | 0.2385989312641827 | 0.2841080086941983 | 0.2690339952877819 | 0.5313562960272118 | 0.5353545734840699 | 0.5437711980933174 | 0.4053373766689765 |
|
| 98 |
+
| sentence-transformers_clip-ViT-B-32-multilingual-v1 | 0.4444712045037179 | 0.2734426469511749 | 0.2800117037284735 | 0.2825311342982161 | 0.5029556487566461 | 0.5404590613223706 | 0.5338711155926299 | 0.4082489307361756 |
|
| 99 |
+
| intfloat_e5-large-v2 | 0.5510431405605564 | 0.2806163531220262 | 0.359471660257482 | 0.1715920565466173 | 0.57164558634127 | 0.6121274409044193 | 0.608396736639472 | 0.4506989963388347 |
|
| 100 |
+
| intfloat_e5-small-v2 | 0.5141343288075383 | 0.2682087694897884 | 0.3303795351947834 | 0.1630427465499831 | 0.549717644727717 | 0.586639260020555 | 0.5867632230268586 | 0.4284122154024605 |
|
| 101 |
+
| intfloat_e5-large | 0.5544751181623867 | 0.2853744235414684 | 0.3668700886139441 | 0.1815213732749916 | 0.5777550279052871 | 0.6291538198013018 | 0.6182968191401595 | 0.4590638100627913 |
|
| 102 |
+
| intfloat_e5-small | 0.5131107565403258 | 0.2736256496596149 | 0.320514964052834 | 0.1665769101312689 | 0.5514679171757868 | 0.6038711887632751 | 0.5906132551104593 | 0.4313972344905092 |
|
| 103 |
+
| intfloat_multilingual-e5-large | 0.5299394888159678 | 0.4199546153283068 | 0.339157331549908 | 0.476910131268933 | 0.5582048149004326 | 0.5775608084960603 | 0.581629846915391 | 0.4976224338964285 |
|
| 104 |
+
| intfloat_multilingual-e5-base | 0.5206370232110065 | 0.4321425957104165 | 0.3417488714261829 | 0.4740828004039044 | 0.5527558535054985 | 0.5737581363480644 | 0.5744797873315611 | 0.4956578668480906 |
|
| 105 |
+
| intfloat_multilingual-e5-small | 0.4949574013306439 | 0.426762316082278 | 0.3095636181240595 | 0.474217435207001 | 0.5443677553581454 | 0.5643713600548133 | 0.5703547529562747 | 0.4835135198733166 |
|
| 106 |
+
| BAAI_bge-large-en-v1.5 | 0.4980582231989643 | 0.2554717809823585 | 0.3068048821267347 | 0.1741164591046785 | 0.5688715696311218 | 0.6287427201096266 | 0.6171968099734164 | 0.4356089207324145 |
|
| 107 |
+
| BAAI_bge-base-en-v1.5 | 0.5172953608092242 | 0.2430275968084327 | 0.3150810901187093 | 0.1753281723325479 | 0.562134671906476 | 0.6236724905789653 | 0.6025300210835091 | 0.4341527719482663 |
|
| 108 |
+
| BAAI_bge-small-en-v1.5 | 0.5137429629406629 | 0.2515555230217407 | 0.29986624310316 | 0.1612924941097273 | 0.5616723357881179 | 0.6168550873586844 | 0.6101384178201485 | 0.4307318663060345 |
|
| 109 |
+
| thenlper_gte-large | 0.5510130354938736 | 0.2816411682892907 | 0.3396171208827955 | 0.1873106698081454 | 0.5949605363098973 | 0.6518670777663583 | 0.6351636263635531 | 0.4630818907019877 |
|
| 110 |
+
| thenlper_gte-base | 0.5545955384291176 | 0.2787863260376254 | 0.3276625982277211 | 0.1720296196566812 | 0.580892308708431 | 0.6367934224049332 | 0.6203135026125217 | 0.4530104737252901 |
|
| 111 |
+
| llmrails_ember-v1 | 0.5085347864045519 | 0.2476392650611229 | 0.3102324026082594 | 0.1719959609559071 | 0.5761698754994882 | 0.6305926687221651 | 0.6204051700430837 | 0.4379385898992255 |
|
| 112 |
+
| infgrad_stella-base-en-v2 | 0.524159316012885 | 0.2623526828197057 | 0.3060524995820097 | 0.1880848199259508 | 0.5683762095043097 | 0.6302843439534087 | 0.6167384728206068 | 0.4422926206598395 |
|
| 113 |
|
| 114 |
## Training
|
| 115 |
The model was trained with the parameters:
|