radoslavralev commited on
Commit
8d27fbe
·
verified ·
1 Parent(s): 69d1432

Add new SentenceTransformer model

Browse files
Files changed (2) hide show
  1. README.md +91 -91
  2. config_sentence_transformers.json +1 -1
README.md CHANGED
@@ -7,7 +7,7 @@ tags:
7
  - generated_from_trainer
8
  - dataset_size:111470
9
  - loss:MultipleNegativesRankingLoss
10
- base_model: thenlper/gte-small
11
  widget:
12
  - source_sentence: why are some rocks radioactive
13
  sentences:
@@ -106,7 +106,7 @@ metrics:
106
  - cosine_mrr@10
107
  - cosine_map@100
108
  model-index:
109
- - name: SentenceTransformer based on thenlper/gte-small
110
  results:
111
  - task:
112
  type: information-retrieval
@@ -116,7 +116,7 @@ model-index:
116
  type: NanoMSMARCO
117
  metrics:
118
  - type: cosine_accuracy@1
119
- value: 0.3
120
  name: Cosine Accuracy@1
121
  - type: cosine_accuracy@3
122
  value: 0.54
@@ -125,10 +125,10 @@ model-index:
125
  value: 0.62
126
  name: Cosine Accuracy@5
127
  - type: cosine_accuracy@10
128
- value: 0.76
129
  name: Cosine Accuracy@10
130
  - type: cosine_precision@1
131
- value: 0.3
132
  name: Cosine Precision@1
133
  - type: cosine_precision@3
134
  value: 0.18
@@ -137,10 +137,10 @@ model-index:
137
  value: 0.124
138
  name: Cosine Precision@5
139
  - type: cosine_precision@10
140
- value: 0.07600000000000001
141
  name: Cosine Precision@10
142
  - type: cosine_recall@1
143
- value: 0.3
144
  name: Cosine Recall@1
145
  - type: cosine_recall@3
146
  value: 0.54
@@ -149,16 +149,16 @@ model-index:
149
  value: 0.62
150
  name: Cosine Recall@5
151
  - type: cosine_recall@10
152
- value: 0.76
153
  name: Cosine Recall@10
154
  - type: cosine_ndcg@10
155
- value: 0.5241190384704345
156
  name: Cosine Ndcg@10
157
  - type: cosine_mrr@10
158
- value: 0.4492698412698413
159
  name: Cosine Mrr@10
160
  - type: cosine_map@100
161
- value: 0.45777964902887497
162
  name: Cosine Map@100
163
  - task:
164
  type: information-retrieval
@@ -168,49 +168,49 @@ model-index:
168
  type: NanoNQ
169
  metrics:
170
  - type: cosine_accuracy@1
171
- value: 0.38
172
  name: Cosine Accuracy@1
173
  - type: cosine_accuracy@3
174
- value: 0.52
175
  name: Cosine Accuracy@3
176
  - type: cosine_accuracy@5
177
- value: 0.54
178
  name: Cosine Accuracy@5
179
  - type: cosine_accuracy@10
180
- value: 0.68
181
  name: Cosine Accuracy@10
182
  - type: cosine_precision@1
183
- value: 0.38
184
  name: Cosine Precision@1
185
  - type: cosine_precision@3
186
- value: 0.1733333333333333
187
  name: Cosine Precision@3
188
  - type: cosine_precision@5
189
- value: 0.11200000000000002
190
  name: Cosine Precision@5
191
  - type: cosine_precision@10
192
  value: 0.07200000000000001
193
  name: Cosine Precision@10
194
  - type: cosine_recall@1
195
- value: 0.35
196
  name: Cosine Recall@1
197
  - type: cosine_recall@3
198
- value: 0.49
199
  name: Cosine Recall@3
200
  - type: cosine_recall@5
201
- value: 0.52
202
  name: Cosine Recall@5
203
  - type: cosine_recall@10
204
- value: 0.66
205
  name: Cosine Recall@10
206
  - type: cosine_ndcg@10
207
- value: 0.5017561161582912
208
  name: Cosine Ndcg@10
209
  - type: cosine_mrr@10
210
- value: 0.46857142857142864
211
  name: Cosine Mrr@10
212
  - type: cosine_map@100
213
- value: 0.4585943213547632
214
  name: Cosine Map@100
215
  - task:
216
  type: nano-beir
@@ -220,61 +220,61 @@ model-index:
220
  type: NanoBEIR_mean
221
  metrics:
222
  - type: cosine_accuracy@1
223
- value: 0.33999999999999997
224
  name: Cosine Accuracy@1
225
  - type: cosine_accuracy@3
226
- value: 0.53
227
  name: Cosine Accuracy@3
228
  - type: cosine_accuracy@5
229
- value: 0.5800000000000001
230
  name: Cosine Accuracy@5
231
  - type: cosine_accuracy@10
232
- value: 0.72
233
  name: Cosine Accuracy@10
234
  - type: cosine_precision@1
235
- value: 0.33999999999999997
236
  name: Cosine Precision@1
237
  - type: cosine_precision@3
238
- value: 0.17666666666666664
239
  name: Cosine Precision@3
240
  - type: cosine_precision@5
241
- value: 0.11800000000000001
242
  name: Cosine Precision@5
243
  - type: cosine_precision@10
244
- value: 0.07400000000000001
245
  name: Cosine Precision@10
246
  - type: cosine_recall@1
247
- value: 0.32499999999999996
248
  name: Cosine Recall@1
249
  - type: cosine_recall@3
250
- value: 0.515
251
  name: Cosine Recall@3
252
  - type: cosine_recall@5
253
- value: 0.5700000000000001
254
  name: Cosine Recall@5
255
  - type: cosine_recall@10
256
- value: 0.71
257
  name: Cosine Recall@10
258
  - type: cosine_ndcg@10
259
- value: 0.5129375773143628
260
  name: Cosine Ndcg@10
261
  - type: cosine_mrr@10
262
- value: 0.45892063492063495
263
  name: Cosine Mrr@10
264
  - type: cosine_map@100
265
- value: 0.4581869851918191
266
  name: Cosine Map@100
267
  ---
268
 
269
- # SentenceTransformer based on thenlper/gte-small
270
 
271
- This is a [sentence-transformers](https://www.SBERT.net) model finetuned from [thenlper/gte-small](https://huggingface.co/thenlper/gte-small). It maps sentences & paragraphs to a 384-dimensional dense vector space and can be used for semantic textual similarity, semantic search, paraphrase mining, text classification, clustering, and more.
272
 
273
  ## Model Details
274
 
275
  ### Model Description
276
  - **Model Type:** Sentence Transformer
277
- - **Base model:** [thenlper/gte-small](https://huggingface.co/thenlper/gte-small) <!-- at revision 17e1f347d17fe144873b1201da91788898c639cd -->
278
  - **Maximum Sequence Length:** 128 tokens
279
  - **Output Dimensionality:** 384 dimensions
280
  - **Similarity Function:** Cosine Similarity
@@ -327,9 +327,9 @@ print(embeddings.shape)
327
  # Get the similarity scores for the embeddings
328
  similarities = model.similarity(embeddings, embeddings)
329
  print(similarities)
330
- # tensor([[1.0000, 1.0000, 0.9779],
331
- # [1.0000, 1.0000, 0.9779],
332
- # [0.9779, 0.9779, 1.0000]])
333
  ```
334
 
335
  <!--
@@ -367,21 +367,21 @@ You can finetune this model on your own dataset.
367
 
368
  | Metric | NanoMSMARCO | NanoNQ |
369
  |:--------------------|:------------|:-----------|
370
- | cosine_accuracy@1 | 0.3 | 0.38 |
371
- | cosine_accuracy@3 | 0.54 | 0.52 |
372
- | cosine_accuracy@5 | 0.62 | 0.54 |
373
- | cosine_accuracy@10 | 0.76 | 0.68 |
374
- | cosine_precision@1 | 0.3 | 0.38 |
375
- | cosine_precision@3 | 0.18 | 0.1733 |
376
- | cosine_precision@5 | 0.124 | 0.112 |
377
- | cosine_precision@10 | 0.076 | 0.072 |
378
- | cosine_recall@1 | 0.3 | 0.35 |
379
- | cosine_recall@3 | 0.54 | 0.49 |
380
- | cosine_recall@5 | 0.62 | 0.52 |
381
- | cosine_recall@10 | 0.76 | 0.66 |
382
- | **cosine_ndcg@10** | **0.5241** | **0.5018** |
383
- | cosine_mrr@10 | 0.4493 | 0.4686 |
384
- | cosine_map@100 | 0.4578 | 0.4586 |
385
 
386
  #### Nano BEIR
387
 
@@ -399,21 +399,21 @@ You can finetune this model on your own dataset.
399
 
400
  | Metric | Value |
401
  |:--------------------|:-----------|
402
- | cosine_accuracy@1 | 0.34 |
403
- | cosine_accuracy@3 | 0.53 |
404
- | cosine_accuracy@5 | 0.58 |
405
- | cosine_accuracy@10 | 0.72 |
406
- | cosine_precision@1 | 0.34 |
407
- | cosine_precision@3 | 0.1767 |
408
- | cosine_precision@5 | 0.118 |
409
- | cosine_precision@10 | 0.074 |
410
- | cosine_recall@1 | 0.325 |
411
- | cosine_recall@3 | 0.515 |
412
- | cosine_recall@5 | 0.57 |
413
- | cosine_recall@10 | 0.71 |
414
- | **cosine_ndcg@10** | **0.5129** |
415
- | cosine_mrr@10 | 0.4589 |
416
- | cosine_map@100 | 0.4582 |
417
 
418
  <!--
419
  ## Bias, Risks and Limitations
@@ -487,7 +487,7 @@ You can finetune this model on your own dataset.
487
  - `eval_strategy`: steps
488
  - `per_device_train_batch_size`: 128
489
  - `per_device_eval_batch_size`: 128
490
- - `learning_rate`: 1e-06
491
  - `weight_decay`: 0.001
492
  - `max_steps`: 3000
493
  - `warmup_ratio`: 0.1
@@ -516,7 +516,7 @@ You can finetune this model on your own dataset.
516
  - `gradient_accumulation_steps`: 1
517
  - `eval_accumulation_steps`: None
518
  - `torch_empty_cache_steps`: None
519
- - `learning_rate`: 1e-06
520
  - `weight_decay`: 0.001
521
  - `adam_beta1`: 0.9
522
  - `adam_beta2`: 0.999
@@ -630,19 +630,19 @@ You can finetune this model on your own dataset.
630
  ### Training Logs
631
  | Epoch | Step | Training Loss | Validation Loss | NanoMSMARCO_cosine_ndcg@10 | NanoNQ_cosine_ndcg@10 | NanoBEIR_mean_cosine_ndcg@10 |
632
  |:------:|:----:|:-------------:|:---------------:|:--------------------------:|:---------------------:|:----------------------------:|
633
- | 0 | 0 | - | 4.0678 | 0.6259 | 0.6583 | 0.6421 |
634
- | 0.2874 | 250 | 4.2246 | 3.8520 | 0.6117 | 0.6465 | 0.6291 |
635
- | 0.5747 | 500 | 3.8138 | 3.1367 | 0.6062 | 0.6457 | 0.6260 |
636
- | 0.8621 | 750 | 2.9174 | 1.8442 | 0.5837 | 0.5594 | 0.5715 |
637
- | 1.1494 | 1000 | 1.8256 | 1.2096 | 0.5462 | 0.4989 | 0.5226 |
638
- | 1.4368 | 1250 | 1.4465 | 1.0779 | 0.5347 | 0.4650 | 0.4998 |
639
- | 1.7241 | 1500 | 1.3307 | 1.0331 | 0.5358 | 0.4801 | 0.5079 |
640
- | 2.0115 | 1750 | 1.2785 | 1.0094 | 0.5359 | 0.4848 | 0.5104 |
641
- | 2.2989 | 2000 | 1.249 | 0.9957 | 0.5282 | 0.4860 | 0.5071 |
642
- | 2.5862 | 2250 | 1.228 | 0.9865 | 0.5245 | 0.4939 | 0.5092 |
643
- | 2.8736 | 2500 | 1.2043 | 0.9809 | 0.5235 | 0.5018 | 0.5126 |
644
- | 3.1609 | 2750 | 1.208 | 0.9771 | 0.5261 | 0.5018 | 0.5139 |
645
- | 3.4483 | 3000 | 1.2008 | 0.9762 | 0.5241 | 0.5018 | 0.5129 |
646
 
647
 
648
  ### Framework Versions
 
7
  - generated_from_trainer
8
  - dataset_size:111470
9
  - loss:MultipleNegativesRankingLoss
10
+ base_model: sentence-transformers/all-MiniLM-L6-v2
11
  widget:
12
  - source_sentence: why are some rocks radioactive
13
  sentences:
 
106
  - cosine_mrr@10
107
  - cosine_map@100
108
  model-index:
109
+ - name: SentenceTransformer based on sentence-transformers/all-MiniLM-L6-v2
110
  results:
111
  - task:
112
  type: information-retrieval
 
116
  type: NanoMSMARCO
117
  metrics:
118
  - type: cosine_accuracy@1
119
+ value: 0.32
120
  name: Cosine Accuracy@1
121
  - type: cosine_accuracy@3
122
  value: 0.54
 
125
  value: 0.62
126
  name: Cosine Accuracy@5
127
  - type: cosine_accuracy@10
128
+ value: 0.74
129
  name: Cosine Accuracy@10
130
  - type: cosine_precision@1
131
+ value: 0.32
132
  name: Cosine Precision@1
133
  - type: cosine_precision@3
134
  value: 0.18
 
137
  value: 0.124
138
  name: Cosine Precision@5
139
  - type: cosine_precision@10
140
+ value: 0.07400000000000001
141
  name: Cosine Precision@10
142
  - type: cosine_recall@1
143
+ value: 0.32
144
  name: Cosine Recall@1
145
  - type: cosine_recall@3
146
  value: 0.54
 
149
  value: 0.62
150
  name: Cosine Recall@5
151
  - type: cosine_recall@10
152
+ value: 0.74
153
  name: Cosine Recall@10
154
  - type: cosine_ndcg@10
155
+ value: 0.5189661789845982
156
  name: Cosine Ndcg@10
157
  - type: cosine_mrr@10
158
+ value: 0.4499126984126983
159
  name: Cosine Mrr@10
160
  - type: cosine_map@100
161
+ value: 0.4616522119994847
162
  name: Cosine Map@100
163
  - task:
164
  type: information-retrieval
 
168
  type: NanoNQ
169
  metrics:
170
  - type: cosine_accuracy@1
171
+ value: 0.34
172
  name: Cosine Accuracy@1
173
  - type: cosine_accuracy@3
174
+ value: 0.54
175
  name: Cosine Accuracy@3
176
  - type: cosine_accuracy@5
177
+ value: 0.58
178
  name: Cosine Accuracy@5
179
  - type: cosine_accuracy@10
180
+ value: 0.66
181
  name: Cosine Accuracy@10
182
  - type: cosine_precision@1
183
+ value: 0.34
184
  name: Cosine Precision@1
185
  - type: cosine_precision@3
186
+ value: 0.19333333333333333
187
  name: Cosine Precision@3
188
  - type: cosine_precision@5
189
+ value: 0.12400000000000003
190
  name: Cosine Precision@5
191
  - type: cosine_precision@10
192
  value: 0.07200000000000001
193
  name: Cosine Precision@10
194
  - type: cosine_recall@1
195
+ value: 0.31
196
  name: Cosine Recall@1
197
  - type: cosine_recall@3
198
+ value: 0.52
199
  name: Cosine Recall@3
200
  - type: cosine_recall@5
201
+ value: 0.56
202
  name: Cosine Recall@5
203
  - type: cosine_recall@10
204
+ value: 0.65
205
  name: Cosine Recall@10
206
  - type: cosine_ndcg@10
207
+ value: 0.495936841468424
208
  name: Cosine Ndcg@10
209
  - type: cosine_mrr@10
210
+ value: 0.455388888888889
211
  name: Cosine Mrr@10
212
  - type: cosine_map@100
213
+ value: 0.45517920150439706
214
  name: Cosine Map@100
215
  - task:
216
  type: nano-beir
 
220
  type: NanoBEIR_mean
221
  metrics:
222
  - type: cosine_accuracy@1
223
+ value: 0.33
224
  name: Cosine Accuracy@1
225
  - type: cosine_accuracy@3
226
+ value: 0.54
227
  name: Cosine Accuracy@3
228
  - type: cosine_accuracy@5
229
+ value: 0.6
230
  name: Cosine Accuracy@5
231
  - type: cosine_accuracy@10
232
+ value: 0.7
233
  name: Cosine Accuracy@10
234
  - type: cosine_precision@1
235
+ value: 0.33
236
  name: Cosine Precision@1
237
  - type: cosine_precision@3
238
+ value: 0.18666666666666665
239
  name: Cosine Precision@3
240
  - type: cosine_precision@5
241
+ value: 0.12400000000000001
242
  name: Cosine Precision@5
243
  - type: cosine_precision@10
244
+ value: 0.07300000000000001
245
  name: Cosine Precision@10
246
  - type: cosine_recall@1
247
+ value: 0.315
248
  name: Cosine Recall@1
249
  - type: cosine_recall@3
250
+ value: 0.53
251
  name: Cosine Recall@3
252
  - type: cosine_recall@5
253
+ value: 0.5900000000000001
254
  name: Cosine Recall@5
255
  - type: cosine_recall@10
256
+ value: 0.6950000000000001
257
  name: Cosine Recall@10
258
  - type: cosine_ndcg@10
259
+ value: 0.5074515102265111
260
  name: Cosine Ndcg@10
261
  - type: cosine_mrr@10
262
+ value: 0.45265079365079364
263
  name: Cosine Mrr@10
264
  - type: cosine_map@100
265
+ value: 0.4584157067519409
266
  name: Cosine Map@100
267
  ---
268
 
269
+ # SentenceTransformer based on sentence-transformers/all-MiniLM-L6-v2
270
 
271
+ This is a [sentence-transformers](https://www.SBERT.net) model finetuned from [sentence-transformers/all-MiniLM-L6-v2](https://huggingface.co/sentence-transformers/all-MiniLM-L6-v2). It maps sentences & paragraphs to a 384-dimensional dense vector space and can be used for semantic textual similarity, semantic search, paraphrase mining, text classification, clustering, and more.
272
 
273
  ## Model Details
274
 
275
  ### Model Description
276
  - **Model Type:** Sentence Transformer
277
+ - **Base model:** [sentence-transformers/all-MiniLM-L6-v2](https://huggingface.co/sentence-transformers/all-MiniLM-L6-v2) <!-- at revision c9745ed1d9f207416be6d2e6f8de32d1f16199bf -->
278
  - **Maximum Sequence Length:** 128 tokens
279
  - **Output Dimensionality:** 384 dimensions
280
  - **Similarity Function:** Cosine Similarity
 
327
  # Get the similarity scores for the embeddings
328
  similarities = model.similarity(embeddings, embeddings)
329
  print(similarities)
330
+ # tensor([[1.0000, 1.0000, 0.9705],
331
+ # [1.0000, 1.0000, 0.9705],
332
+ # [0.9705, 0.9705, 1.0000]])
333
  ```
334
 
335
  <!--
 
367
 
368
  | Metric | NanoMSMARCO | NanoNQ |
369
  |:--------------------|:------------|:-----------|
370
+ | cosine_accuracy@1 | 0.32 | 0.34 |
371
+ | cosine_accuracy@3 | 0.54 | 0.54 |
372
+ | cosine_accuracy@5 | 0.62 | 0.58 |
373
+ | cosine_accuracy@10 | 0.74 | 0.66 |
374
+ | cosine_precision@1 | 0.32 | 0.34 |
375
+ | cosine_precision@3 | 0.18 | 0.1933 |
376
+ | cosine_precision@5 | 0.124 | 0.124 |
377
+ | cosine_precision@10 | 0.074 | 0.072 |
378
+ | cosine_recall@1 | 0.32 | 0.31 |
379
+ | cosine_recall@3 | 0.54 | 0.52 |
380
+ | cosine_recall@5 | 0.62 | 0.56 |
381
+ | cosine_recall@10 | 0.74 | 0.65 |
382
+ | **cosine_ndcg@10** | **0.519** | **0.4959** |
383
+ | cosine_mrr@10 | 0.4499 | 0.4554 |
384
+ | cosine_map@100 | 0.4617 | 0.4552 |
385
 
386
  #### Nano BEIR
387
 
 
399
 
400
  | Metric | Value |
401
  |:--------------------|:-----------|
402
+ | cosine_accuracy@1 | 0.33 |
403
+ | cosine_accuracy@3 | 0.54 |
404
+ | cosine_accuracy@5 | 0.6 |
405
+ | cosine_accuracy@10 | 0.7 |
406
+ | cosine_precision@1 | 0.33 |
407
+ | cosine_precision@3 | 0.1867 |
408
+ | cosine_precision@5 | 0.124 |
409
+ | cosine_precision@10 | 0.073 |
410
+ | cosine_recall@1 | 0.315 |
411
+ | cosine_recall@3 | 0.53 |
412
+ | cosine_recall@5 | 0.59 |
413
+ | cosine_recall@10 | 0.695 |
414
+ | **cosine_ndcg@10** | **0.5075** |
415
+ | cosine_mrr@10 | 0.4527 |
416
+ | cosine_map@100 | 0.4584 |
417
 
418
  <!--
419
  ## Bias, Risks and Limitations
 
487
  - `eval_strategy`: steps
488
  - `per_device_train_batch_size`: 128
489
  - `per_device_eval_batch_size`: 128
490
+ - `learning_rate`: 1e-05
491
  - `weight_decay`: 0.001
492
  - `max_steps`: 3000
493
  - `warmup_ratio`: 0.1
 
516
  - `gradient_accumulation_steps`: 1
517
  - `eval_accumulation_steps`: None
518
  - `torch_empty_cache_steps`: None
519
+ - `learning_rate`: 1e-05
520
  - `weight_decay`: 0.001
521
  - `adam_beta1`: 0.9
522
  - `adam_beta2`: 0.999
 
630
  ### Training Logs
631
  | Epoch | Step | Training Loss | Validation Loss | NanoMSMARCO_cosine_ndcg@10 | NanoNQ_cosine_ndcg@10 | NanoBEIR_mean_cosine_ndcg@10 |
632
  |:------:|:----:|:-------------:|:---------------:|:--------------------------:|:---------------------:|:----------------------------:|
633
+ | 0 | 0 | - | 1.1445 | 0.5540 | 0.5931 | 0.5735 |
634
+ | 0.2874 | 250 | 1.1834 | 0.9135 | 0.4900 | 0.5098 | 0.4999 |
635
+ | 0.5747 | 500 | 1.0424 | 0.8651 | 0.4979 | 0.4970 | 0.4974 |
636
+ | 0.8621 | 750 | 1.0133 | 0.8524 | 0.5118 | 0.4987 | 0.5052 |
637
+ | 1.1494 | 1000 | 0.9885 | 0.8441 | 0.5232 | 0.5085 | 0.5159 |
638
+ | 1.4368 | 1250 | 0.9788 | 0.8383 | 0.5204 | 0.5039 | 0.5122 |
639
+ | 1.7241 | 1500 | 0.9689 | 0.8339 | 0.5184 | 0.5220 | 0.5202 |
640
+ | 2.0115 | 1750 | 0.9641 | 0.8292 | 0.5192 | 0.4993 | 0.5093 |
641
+ | 2.2989 | 2000 | 0.9523 | 0.8269 | 0.5173 | 0.4896 | 0.5034 |
642
+ | 2.5862 | 2250 | 0.9492 | 0.8248 | 0.5181 | 0.5132 | 0.5157 |
643
+ | 2.8736 | 2500 | 0.936 | 0.8241 | 0.5187 | 0.5082 | 0.5134 |
644
+ | 3.1609 | 2750 | 0.9427 | 0.8228 | 0.5193 | 0.4957 | 0.5075 |
645
+ | 3.4483 | 3000 | 0.9363 | 0.8225 | 0.5190 | 0.4959 | 0.5075 |
646
 
647
 
648
  ### Framework Versions
config_sentence_transformers.json CHANGED
@@ -1,10 +1,10 @@
1
  {
2
- "model_type": "SentenceTransformer",
3
  "__version__": {
4
  "sentence_transformers": "5.2.0",
5
  "transformers": "4.57.3",
6
  "pytorch": "2.9.1+cu128"
7
  },
 
8
  "prompts": {
9
  "query": "",
10
  "document": ""
 
1
  {
 
2
  "__version__": {
3
  "sentence_transformers": "5.2.0",
4
  "transformers": "4.57.3",
5
  "pytorch": "2.9.1+cu128"
6
  },
7
+ "model_type": "SentenceTransformer",
8
  "prompts": {
9
  "query": "",
10
  "document": ""