Upload folder using huggingface_hub
Browse files- 1_Pooling/config.json +10 -0
- README.md +62 -0
- checkpoint-24522/1_Pooling/config.json +10 -0
- checkpoint-24522/README.md +1440 -0
- checkpoint-24522/config.json +26 -0
- checkpoint-24522/config_sentence_transformers.json +10 -0
- checkpoint-24522/model.safetensors +3 -0
- checkpoint-24522/modules.json +20 -0
- checkpoint-24522/optimizer.pt +3 -0
- checkpoint-24522/rng_state.pth +3 -0
- checkpoint-24522/scheduler.pt +3 -0
- checkpoint-24522/sentence_bert_config.json +4 -0
- checkpoint-24522/special_tokens_map.json +37 -0
- checkpoint-24522/tokenizer.json +0 -0
- checkpoint-24522/tokenizer_config.json +65 -0
- checkpoint-24522/trainer_state.json +0 -0
- checkpoint-24522/training_args.bin +3 -0
- checkpoint-24522/vocab.txt +0 -0
- config.json +26 -0
- config_sentence_transformers.json +10 -0
- model.safetensors +3 -0
- modules.json +20 -0
- runs/Dec26_10-25-51_r-clairedhx-autotrain-advanced-phe-transcript-dpu1ifo-47f8f-5cy/events.out.tfevents.1735208755.r-clairedhx-autotrain-advanced-phe-transcript-dpu1ifo-47f8f-5cy.251.0 +2 -2
- runs/Dec26_10-25-51_r-clairedhx-autotrain-advanced-phe-transcript-dpu1ifo-47f8f-5cy/events.out.tfevents.1735269700.r-clairedhx-autotrain-advanced-phe-transcript-dpu1ifo-47f8f-5cy.251.1 +3 -0
- sentence_bert_config.json +4 -0
- special_tokens_map.json +37 -0
- tokenizer.json +0 -0
- tokenizer_config.json +65 -0
- training_args.bin +3 -0
- training_params.json +33 -0
- vocab.txt +0 -0
1_Pooling/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"word_embedding_dimension": 384,
|
| 3 |
+
"pooling_mode_cls_token": false,
|
| 4 |
+
"pooling_mode_mean_tokens": true,
|
| 5 |
+
"pooling_mode_max_tokens": false,
|
| 6 |
+
"pooling_mode_mean_sqrt_len_tokens": false,
|
| 7 |
+
"pooling_mode_weightedmean_tokens": false,
|
| 8 |
+
"pooling_mode_lasttoken": false,
|
| 9 |
+
"include_prompt": true
|
| 10 |
+
}
|
README.md
ADDED
|
@@ -0,0 +1,62 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
|
| 2 |
+
---
|
| 3 |
+
library_name: sentence-transformers
|
| 4 |
+
tags:
|
| 5 |
+
- sentence-transformers
|
| 6 |
+
- sentence-similarity
|
| 7 |
+
- feature-extraction
|
| 8 |
+
- autotrain
|
| 9 |
+
base_model: sentence-transformers/multi-qa-MiniLM-L6-cos-v1
|
| 10 |
+
widget:
|
| 11 |
+
- source_sentence: 'search_query: i love autotrain'
|
| 12 |
+
sentences:
|
| 13 |
+
- 'search_query: huggingface auto train'
|
| 14 |
+
- 'search_query: hugging face auto train'
|
| 15 |
+
- 'search_query: i love autotrain'
|
| 16 |
+
pipeline_tag: sentence-similarity
|
| 17 |
+
---
|
| 18 |
+
|
| 19 |
+
# Model Trained Using AutoTrain
|
| 20 |
+
|
| 21 |
+
- Problem type: Sentence Transformers
|
| 22 |
+
|
| 23 |
+
## Validation Metrics
|
| 24 |
+
loss: 0.12438971549272537
|
| 25 |
+
|
| 26 |
+
runtime: 1178.6866
|
| 27 |
+
|
| 28 |
+
samples_per_second: 13.869
|
| 29 |
+
|
| 30 |
+
steps_per_second: 0.867
|
| 31 |
+
|
| 32 |
+
: 3.0
|
| 33 |
+
|
| 34 |
+
## Usage
|
| 35 |
+
|
| 36 |
+
### Direct Usage (Sentence Transformers)
|
| 37 |
+
|
| 38 |
+
First install the Sentence Transformers library:
|
| 39 |
+
|
| 40 |
+
```bash
|
| 41 |
+
pip install -U sentence-transformers
|
| 42 |
+
```
|
| 43 |
+
|
| 44 |
+
Then you can load this model and run inference.
|
| 45 |
+
```python
|
| 46 |
+
from sentence_transformers import SentenceTransformer
|
| 47 |
+
|
| 48 |
+
# Download from the Hugging Face Hub
|
| 49 |
+
model = SentenceTransformer("sentence_transformers_model_id")
|
| 50 |
+
# Run inference
|
| 51 |
+
sentences = [
|
| 52 |
+
'search_query: autotrain',
|
| 53 |
+
'search_query: auto train',
|
| 54 |
+
'search_query: i love autotrain',
|
| 55 |
+
]
|
| 56 |
+
embeddings = model.encode(sentences)
|
| 57 |
+
print(embeddings.shape)
|
| 58 |
+
|
| 59 |
+
# Get the similarity scores for the embeddings
|
| 60 |
+
similarities = model.similarity(embeddings, embeddings)
|
| 61 |
+
print(similarities.shape)
|
| 62 |
+
```
|
checkpoint-24522/1_Pooling/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"word_embedding_dimension": 384,
|
| 3 |
+
"pooling_mode_cls_token": false,
|
| 4 |
+
"pooling_mode_mean_tokens": true,
|
| 5 |
+
"pooling_mode_max_tokens": false,
|
| 6 |
+
"pooling_mode_mean_sqrt_len_tokens": false,
|
| 7 |
+
"pooling_mode_weightedmean_tokens": false,
|
| 8 |
+
"pooling_mode_lasttoken": false,
|
| 9 |
+
"include_prompt": true
|
| 10 |
+
}
|
checkpoint-24522/README.md
ADDED
|
@@ -0,0 +1,1440 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
tags:
|
| 3 |
+
- sentence-transformers
|
| 4 |
+
- sentence-similarity
|
| 5 |
+
- feature-extraction
|
| 6 |
+
- generated_from_trainer
|
| 7 |
+
- dataset_size:65385
|
| 8 |
+
- loss:MultipleNegativesRankingLoss
|
| 9 |
+
base_model: sentence-transformers/multi-qa-MiniLM-L6-cos-v1
|
| 10 |
+
widget:
|
| 11 |
+
- source_sentence: Quelles sont les considérations qui must être prises en compte
|
| 12 |
+
avant de prescrire des traitements médicaux supplémentaires après l'échec des
|
| 13 |
+
traitements conventionnels et des agents modificateurs du traitement de la maladie
|
| 14 |
+
auto-immune (anti-TNF) ?
|
| 15 |
+
sentences:
|
| 16 |
+
- 'Résumé de l''intervention : Syndrome main-pied lié aux fluoropyrimidines orales
|
| 17 |
+
dans le traitement du cancer du côlon..Intervention : Pour la deuxième question
|
| 18 |
+
concernant le syndrome main-pied, c’est effectivement un effet indésirable bien
|
| 19 |
+
connu des fluoropyrimidines. Il faut tout de même bien dire qu’il concerne surtout
|
| 20 |
+
la forme orale des fluoropyrimidines. J’ai repris dans une étude pivotale sur
|
| 21 |
+
des protocoles très habituels dans le cancer du côlon, le FOLFOX et le XELOX,
|
| 22 |
+
une incidence des syndromes main-pied d’à peu près 10 % pour le 5-FU, mais seulement
|
| 23 |
+
de 1 % de grade III, alors qu’il est de 30 % pour la capécitabine, dont 6 % de
|
| 24 |
+
grade III. Il est beaucoup plus important pour la forme orale.'
|
| 25 |
+
- 'Résumé de l''intervention : Positionnement des traitements médicaux après les
|
| 26 |
+
traitements conventionnels et anti-TNF..Intervention : C’était la position qui
|
| 27 |
+
avait été prise initialement, à savoir de les positionner après les autres traitements.
|
| 28 |
+
Si on cite les traitements, j’oublie les traitements conventionnels qui sont,
|
| 29 |
+
disons, les traitements de première ligne. On a ensuite les anti-TNF. On a mis
|
| 30 |
+
« à au moins un anti-TNF », c’était déjà le cas.'
|
| 31 |
+
- 'Résumé de l''intervention : Poursuite d''un traitement médical face à un patient
|
| 32 |
+
en situation critique..Intervention : Je ne suis pas d’accord. Devant ton malade,
|
| 33 |
+
tu dois quand même prendre une décision de savoir si tu le traites d’emblée ou
|
| 34 |
+
pas. Non ?'
|
| 35 |
+
- source_sentence: Quels sont les arguments mis en avant pour justifier l'absence
|
| 36 |
+
de données robustes de morbimortalité dans le dossier sur la CMHo ?
|
| 37 |
+
sentences:
|
| 38 |
+
- 'Résumé de l''intervention : Immunité éventuelle dans le contexte de l''hémophilie..Intervention
|
| 39 |
+
: J’avais juste une dernière petite question sur l’éventuelle immunisation. Nous
|
| 40 |
+
n’avons aucune donnée d’un patient qui se serait immunisé, comme dans l’hémophilie.'
|
| 41 |
+
- 'Résumé de l''intervention : Analyse des données de morbimortalité pour le dossier
|
| 42 |
+
sur la CMHo..Intervention : Je serai plus rapide. Je veux d’abord vous féliciter
|
| 43 |
+
pour cette présentation claire et, pour moi, convaincante. Néanmoins, je voudrais
|
| 44 |
+
revenir sur une limite actuelle du dossier qui a été soulignée par le chef de
|
| 45 |
+
projet au départ, qui est l’absence de données robustes de morbimortalité. Certes,
|
| 46 |
+
la CMHo est une maladie invalidante, mais c’est aussi une maladie grave.'
|
| 47 |
+
- 'Résumé de l''intervention : Étude du dossier TEZSPIRE dans le droit commun et
|
| 48 |
+
pour l''accès précoce..Intervention : Bonjour, Madame Barnig. Merci de nous rejoindre
|
| 49 |
+
pour ce dossier. Nous allons étudier le dossier de TEZSPIRE, à la fois dans le
|
| 50 |
+
droit commun et pour l’accès précoce. Il va nous être présenté par notre chef
|
| 51 |
+
de projet. Ensuite, nous vous passerons la parole, puis à notre expert interne,
|
| 52 |
+
le Docteur Kouzan. Ensuite, nous aurons deux contributions d’associations de patients.'
|
| 53 |
+
- source_sentence: Quels sont les facteurs qui influent sur la nature d'un marqueur
|
| 54 |
+
de surrogate dans des conditions médicales différentes ?
|
| 55 |
+
sentences:
|
| 56 |
+
- 'Résumé de l''intervention : La différence entre la cholangite sclérosante et
|
| 57 |
+
l''audc (autoimmune uveitis and demyelinating disease of the central nervous system)
|
| 58 |
+
en tant que marqueur de surrogate..Intervention : Dans la cholangite sclérosante,
|
| 59 |
+
le fait de normaliser ne change rien, mais ici avec l’AUDC, cela change. Comment
|
| 60 |
+
expliquer le fait que dans un cas c’est un surrogate marker et dans l’autre, pas
|
| 61 |
+
?'
|
| 62 |
+
- 'Résumé de l''intervention : Adoption de processus pour améliorer la prise de
|
| 63 |
+
décision dans l''entreprise..Intervention : L’adoptons-nous sur table ?'
|
| 64 |
+
- 'Résumé de l''intervention : Transmission de souches vaccinales à partir des personnes
|
| 65 |
+
vaccinées par voie nasale..Intervention : Bonjour. Le fait que le nouveau vaccin
|
| 66 |
+
contient des virus vivants et qu’il est administré par voie nasale, faut-il prévoir
|
| 67 |
+
un risque de transmission d’une souche vaccinale à une personne de l’entourage
|
| 68 |
+
non vaccinée. Ce risque existe-t-il ?'
|
| 69 |
+
- source_sentence: Quels sont les éléments supplémentaires que le professionnel de
|
| 70 |
+
santé souhaite connaître pour prendre une décision éclairée après avoir pris connaissance
|
| 71 |
+
d'un cas ?
|
| 72 |
+
sentences:
|
| 73 |
+
- 'Résumé de l''intervention : Étude sur les limites de la compréhension d''une
|
| 74 |
+
maladie rare et étonnante..Intervention : Je crois qu’il y a deux limites restantes.
|
| 75 |
+
D’une part, le recul est encore relativement faible même si l’étude d’extension
|
| 76 |
+
nous donne des données à 2 ans. D’autre part, il y a l’absence de données de morbimortalité.
|
| 77 |
+
Ceci dit, c’est une maladie extrêmement curieuse et étonnante dans son histoire
|
| 78 |
+
naturelle parce que la complication la plus grave, la mort subite, survient chez
|
| 79 |
+
l’adolescent ou l’adulte jeune. Elle est rare au-delà.'
|
| 80 |
+
- 'Résumé de l''intervention : Appel à l''experte pour obtenir des informations
|
| 81 |
+
complémentaires après une commission..Intervention : Ou alors, je peux essayer
|
| 82 |
+
d’appeler l’experte et de lui poser uniquement cette question au téléphone. Elle
|
| 83 |
+
a déjà fait la DPI, donc je peux l’appeler et lui dire que suite à la commission,
|
| 84 |
+
nous avons besoin de cette information complémentaire. Ce serait peut-être moins
|
| 85 |
+
lourd et plus facile pour elle que de faire un rapport, vu que cela a été un peu
|
| 86 |
+
long de l’avoir. Cela pourrait être une façon de l’avoir éventuellement pour la
|
| 87 |
+
prochaine commission. Je lui demande si elle l’a déjà utilisé et ce qu’elle en
|
| 88 |
+
pense.'
|
| 89 |
+
- 'Résumé de l''intervention : La commission de transparence examine les impacts
|
| 90 |
+
sanitaires et environnementaux des installations nucléaires sur la population..Intervention
|
| 91 |
+
: Commission de Transparence MANTADIX – Radiation'
|
| 92 |
+
- source_sentence: Quels sont les mécanismes mis à jour par la Haute Autorité de Santé
|
| 93 |
+
pour enhancer l'examen et la communication au sein des équipes de vente ?
|
| 94 |
+
sentences:
|
| 95 |
+
- 'Résumé de l''intervention : Amélioration des processus d''analyse et de reporting
|
| 96 |
+
pour les équipes de vente..Intervention : Michel Clanet, une question.'
|
| 97 |
+
- 'Résumé de l''intervention : Suivi médian de trois ans et demi des patients..Intervention
|
| 98 |
+
: Effectivement, c’était un suivi médian de trois ans et demi. C’est ce qu’on
|
| 99 |
+
a demandé de faire au biostatisticien en fonction des commentaires qu’on a reçus.
|
| 100 |
+
Le suivi médian de trois ans et demi fait qu’il y avait des patients qui ont actuellement
|
| 101 |
+
un suivi de six ans.'
|
| 102 |
+
- 'Résumé de l''intervention : Importance d''un élément dans un document stratégique
|
| 103 |
+
(RCP)..Intervention : C’est un élément majeur que tu dis là, Étienne. Je suis
|
| 104 |
+
surpris que cela ne figure pas dans le RCP.'
|
| 105 |
+
pipeline_tag: sentence-similarity
|
| 106 |
+
library_name: sentence-transformers
|
| 107 |
+
---
|
| 108 |
+
|
| 109 |
+
# SentenceTransformer based on sentence-transformers/multi-qa-MiniLM-L6-cos-v1
|
| 110 |
+
|
| 111 |
+
This is a [sentence-transformers](https://www.SBERT.net) model finetuned from [sentence-transformers/multi-qa-MiniLM-L6-cos-v1](https://huggingface.co/sentence-transformers/multi-qa-MiniLM-L6-cos-v1). It maps sentences & paragraphs to a 384-dimensional dense vector space and can be used for semantic textual similarity, semantic search, paraphrase mining, text classification, clustering, and more.
|
| 112 |
+
|
| 113 |
+
## Model Details
|
| 114 |
+
|
| 115 |
+
### Model Description
|
| 116 |
+
- **Model Type:** Sentence Transformer
|
| 117 |
+
- **Base model:** [sentence-transformers/multi-qa-MiniLM-L6-cos-v1](https://huggingface.co/sentence-transformers/multi-qa-MiniLM-L6-cos-v1) <!-- at revision b207367332321f8e44f96e224ef15bc607f4dbf0 -->
|
| 118 |
+
- **Maximum Sequence Length:** 512 tokens
|
| 119 |
+
- **Output Dimensionality:** 384 dimensions
|
| 120 |
+
- **Similarity Function:** Cosine Similarity
|
| 121 |
+
<!-- - **Training Dataset:** Unknown -->
|
| 122 |
+
<!-- - **Language:** Unknown -->
|
| 123 |
+
<!-- - **License:** Unknown -->
|
| 124 |
+
|
| 125 |
+
### Model Sources
|
| 126 |
+
|
| 127 |
+
- **Documentation:** [Sentence Transformers Documentation](https://sbert.net)
|
| 128 |
+
- **Repository:** [Sentence Transformers on GitHub](https://github.com/UKPLab/sentence-transformers)
|
| 129 |
+
- **Hugging Face:** [Sentence Transformers on Hugging Face](https://huggingface.co/models?library=sentence-transformers)
|
| 130 |
+
|
| 131 |
+
### Full Model Architecture
|
| 132 |
+
|
| 133 |
+
```
|
| 134 |
+
SentenceTransformer(
|
| 135 |
+
(0): Transformer({'max_seq_length': 512, 'do_lower_case': False}) with Transformer model: BertModel
|
| 136 |
+
(1): Pooling({'word_embedding_dimension': 384, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
|
| 137 |
+
(2): Normalize()
|
| 138 |
+
)
|
| 139 |
+
```
|
| 140 |
+
|
| 141 |
+
## Usage
|
| 142 |
+
|
| 143 |
+
### Direct Usage (Sentence Transformers)
|
| 144 |
+
|
| 145 |
+
First install the Sentence Transformers library:
|
| 146 |
+
|
| 147 |
+
```bash
|
| 148 |
+
pip install -U sentence-transformers
|
| 149 |
+
```
|
| 150 |
+
|
| 151 |
+
Then you can load this model and run inference.
|
| 152 |
+
```python
|
| 153 |
+
from sentence_transformers import SentenceTransformer
|
| 154 |
+
|
| 155 |
+
# Download from the 🤗 Hub
|
| 156 |
+
model = SentenceTransformer("sentence_transformers_model_id")
|
| 157 |
+
# Run inference
|
| 158 |
+
sentences = [
|
| 159 |
+
"Quels sont les mécanismes mis à jour par la Haute Autorité de Santé pour enhancer l'examen et la communication au sein des équipes de vente ?",
|
| 160 |
+
"Résumé de l'intervention : Amélioration des processus d'analyse et de reporting pour les équipes de vente..Intervention : Michel Clanet, une question.",
|
| 161 |
+
"Résumé de l'intervention : Suivi médian de trois ans et demi des patients..Intervention : Effectivement, c’était un suivi médian de trois ans et demi. C’est ce qu’on a demandé de faire au biostatisticien en fonction des commentaires qu’on a reçus. Le suivi médian de trois ans et demi fait qu’il y avait des patients qui ont actuellement un suivi de six ans.",
|
| 162 |
+
]
|
| 163 |
+
embeddings = model.encode(sentences)
|
| 164 |
+
print(embeddings.shape)
|
| 165 |
+
# [3, 384]
|
| 166 |
+
|
| 167 |
+
# Get the similarity scores for the embeddings
|
| 168 |
+
similarities = model.similarity(embeddings, embeddings)
|
| 169 |
+
print(similarities.shape)
|
| 170 |
+
# [3, 3]
|
| 171 |
+
```
|
| 172 |
+
|
| 173 |
+
<!--
|
| 174 |
+
### Direct Usage (Transformers)
|
| 175 |
+
|
| 176 |
+
<details><summary>Click to see the direct usage in Transformers</summary>
|
| 177 |
+
|
| 178 |
+
</details>
|
| 179 |
+
-->
|
| 180 |
+
|
| 181 |
+
<!--
|
| 182 |
+
### Downstream Usage (Sentence Transformers)
|
| 183 |
+
|
| 184 |
+
You can finetune this model on your own dataset.
|
| 185 |
+
|
| 186 |
+
<details><summary>Click to expand</summary>
|
| 187 |
+
|
| 188 |
+
</details>
|
| 189 |
+
-->
|
| 190 |
+
|
| 191 |
+
<!--
|
| 192 |
+
### Out-of-Scope Use
|
| 193 |
+
|
| 194 |
+
*List how the model may foreseeably be misused and address what users ought not to do with the model.*
|
| 195 |
+
-->
|
| 196 |
+
|
| 197 |
+
<!--
|
| 198 |
+
## Bias, Risks and Limitations
|
| 199 |
+
|
| 200 |
+
*What are the known or foreseeable issues stemming from this model? You could also flag here known failure cases or weaknesses of the model.*
|
| 201 |
+
-->
|
| 202 |
+
|
| 203 |
+
<!--
|
| 204 |
+
### Recommendations
|
| 205 |
+
|
| 206 |
+
*What are recommendations with respect to the foreseeable issues? For example, filtering explicit content.*
|
| 207 |
+
-->
|
| 208 |
+
|
| 209 |
+
## Training Details
|
| 210 |
+
|
| 211 |
+
### Training Dataset
|
| 212 |
+
|
| 213 |
+
#### Unnamed Dataset
|
| 214 |
+
|
| 215 |
+
|
| 216 |
+
* Size: 65,385 training samples
|
| 217 |
+
* Columns: <code>query</code> and <code>answer</code>
|
| 218 |
+
* Approximate statistics based on the first 1000 samples:
|
| 219 |
+
| | query | answer |
|
| 220 |
+
|:--------|:----------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------|
|
| 221 |
+
| type | string | string |
|
| 222 |
+
| details | <ul><li>min: 6 tokens</li><li>mean: 39.67 tokens</li><li>max: 85 tokens</li></ul> | <ul><li>min: 15 tokens</li><li>mean: 125.37 tokens</li><li>max: 512 tokens</li></ul> |
|
| 223 |
+
* Samples:
|
| 224 |
+
| query | answer |
|
| 225 |
+
|:----------------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
| 226 |
+
| <code>Quel est le rôle des patients dans les soins médicaux, selon la Commission de Transparence de la Haute Autorité de Santé ?</code> | <code>Résumé de l'intervention : Importance des patients dans les soins médicaux..Intervention : Ils sont importants quand même, sous réserve de l’effectif de patients, bien sûr.</code> |
|
| 227 |
+
| <code>Quel est le rôle que les médecins généralistes jouent dans le système de santé, selon l'avis des experts ?</code> | <code>Résumé de l'intervention : Rôle des médecins généralistes dans le système de santé..Intervention : Et les médecins généralistes. </code> |
|
| 228 |
+
| <code>Quelle élection a-t-elle été approuvée par un résultat de 17 votes ?</code> | <code>Résumé de l'intervention : Résultats d'un vote..Intervention : Résultats du vote : 17</code> |
|
| 229 |
+
* Loss: [<code>MultipleNegativesRankingLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#multiplenegativesrankingloss) with these parameters:
|
| 230 |
+
```json
|
| 231 |
+
{
|
| 232 |
+
"scale": 20.0,
|
| 233 |
+
"similarity_fct": "cos_sim"
|
| 234 |
+
}
|
| 235 |
+
```
|
| 236 |
+
|
| 237 |
+
### Evaluation Dataset
|
| 238 |
+
|
| 239 |
+
#### Unnamed Dataset
|
| 240 |
+
|
| 241 |
+
|
| 242 |
+
* Size: 16,347 evaluation samples
|
| 243 |
+
* Columns: <code>query</code> and <code>answer</code>
|
| 244 |
+
* Approximate statistics based on the first 1000 samples:
|
| 245 |
+
| | query | answer |
|
| 246 |
+
|:--------|:-----------------------------------------------------------------------------------|:------------------------------------------------------------------------------------|
|
| 247 |
+
| type | string | string |
|
| 248 |
+
| details | <ul><li>min: 11 tokens</li><li>mean: 40.91 tokens</li><li>max: 90 tokens</li></ul> | <ul><li>min: 22 tokens</li><li>mean: 128.9 tokens</li><li>max: 456 tokens</li></ul> |
|
| 249 |
+
* Samples:
|
| 250 |
+
| query | answer |
|
| 251 |
+
|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
| 252 |
+
| <code>Quels sont les arguments avancés en faveur de l'utilisation de la médiane de rémission durable (MRD) comme critère intermédiaire pour les autorisations de traitement médicamenteuse précoce ?</code> | <code>Résumé de l'intervention : Utilisation de la médiane de rémission durable (MRD) comme critère intermédiaire pour les autorisations de traitement médicamenteuse précoce..Intervention : Cette évaluation de la possibilité d’utiliser la MRD comme surrogate est également faite au niveau réglementaire pour les AMM. Cela permettrait, si nous pouvons obtenir plus rapidement ces données sur la MRD, de donner des AMM plus précoces. Maintenant, jusqu’à présent, le guideline qui est en train d’être développé par l’EMA va plus dans le sens de l’utiliser comme un critère intermédiaire et non pas un surrogate et de confirmer les AMM précoces par les données de survie globale. L’obtention des données de survie globale reste la règle, mais nous pouvons avoir des AMM plus précoces sur la base de la MRD. C’est en réflexion à l’EMA et il n’y a aucune étude correctement menée d’un point de vue méthodologique pour valider le fait que la MRD soit un surrogate, pour l’OS ou la PFS. Il y a énormément de publi...</code> |
|
| 253 |
+
| <code>Quels sont les éléments-clés de la stratégie thérapeutique améliorée pour les patients atteints d'un cancer pulmonaire avec des mutations spécifiques dans les exons 19 et 21 ?</code> | <code>Résumé de l'intervention : Amélioration de la stratégie thérapeutique pour les patients atteints d'un cancer pulmonaire avec des mutations spécifiques dans les exons 19 et 21..Intervention : En accord avec vos discussions, nous rajouterons dans la stratégie thérapeutique que les données sont uniquement dans la mutation activatrice exon 19 et exon 21. Par ailleurs, nous préciserons que tout ce qui concerne les mutations 790M, il faut se référer à l’avis spécifique de TAGRISSO pour que ce soit très clair. Nous vous soumettrons le libellé le 18 pour relecture.</code> |
|
| 254 |
+
| <code>Quels sont les résultats attendus du programme TSOLUDOSE ?</code> | <code>Résumé de l'intervention : Présentation du projet TSOLUDOSE et plan de réunion..Intervention : On démarre avec TSOLUDOSE. Bonjour Madame Stoupa. Merci de nous avoir rejoints. Nous allons voir le dossier TSOLUDOSE qui va nous être présenté par la chef de projet. Ensuite, on vous passera la parole, puis il y aura un temps d’échange.</code> |
|
| 255 |
+
* Loss: [<code>MultipleNegativesRankingLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#multiplenegativesrankingloss) with these parameters:
|
| 256 |
+
```json
|
| 257 |
+
{
|
| 258 |
+
"scale": 20.0,
|
| 259 |
+
"similarity_fct": "cos_sim"
|
| 260 |
+
}
|
| 261 |
+
```
|
| 262 |
+
|
| 263 |
+
### Training Hyperparameters
|
| 264 |
+
#### Non-Default Hyperparameters
|
| 265 |
+
|
| 266 |
+
- `eval_strategy`: epoch
|
| 267 |
+
- `per_device_eval_batch_size`: 16
|
| 268 |
+
- `learning_rate`: 3e-05
|
| 269 |
+
- `warmup_ratio`: 0.1
|
| 270 |
+
- `fp16`: True
|
| 271 |
+
- `load_best_model_at_end`: True
|
| 272 |
+
- `ddp_find_unused_parameters`: False
|
| 273 |
+
|
| 274 |
+
#### All Hyperparameters
|
| 275 |
+
<details><summary>Click to expand</summary>
|
| 276 |
+
|
| 277 |
+
- `overwrite_output_dir`: False
|
| 278 |
+
- `do_predict`: False
|
| 279 |
+
- `eval_strategy`: epoch
|
| 280 |
+
- `prediction_loss_only`: True
|
| 281 |
+
- `per_device_train_batch_size`: 8
|
| 282 |
+
- `per_device_eval_batch_size`: 16
|
| 283 |
+
- `per_gpu_train_batch_size`: None
|
| 284 |
+
- `per_gpu_eval_batch_size`: None
|
| 285 |
+
- `gradient_accumulation_steps`: 1
|
| 286 |
+
- `eval_accumulation_steps`: None
|
| 287 |
+
- `torch_empty_cache_steps`: None
|
| 288 |
+
- `learning_rate`: 3e-05
|
| 289 |
+
- `weight_decay`: 0.0
|
| 290 |
+
- `adam_beta1`: 0.9
|
| 291 |
+
- `adam_beta2`: 0.999
|
| 292 |
+
- `adam_epsilon`: 1e-08
|
| 293 |
+
- `max_grad_norm`: 1.0
|
| 294 |
+
- `num_train_epochs`: 3
|
| 295 |
+
- `max_steps`: -1
|
| 296 |
+
- `lr_scheduler_type`: linear
|
| 297 |
+
- `lr_scheduler_kwargs`: {}
|
| 298 |
+
- `warmup_ratio`: 0.1
|
| 299 |
+
- `warmup_steps`: 0
|
| 300 |
+
- `log_level`: passive
|
| 301 |
+
- `log_level_replica`: warning
|
| 302 |
+
- `log_on_each_node`: True
|
| 303 |
+
- `logging_nan_inf_filter`: True
|
| 304 |
+
- `save_safetensors`: True
|
| 305 |
+
- `save_on_each_node`: False
|
| 306 |
+
- `save_only_model`: False
|
| 307 |
+
- `restore_callback_states_from_checkpoint`: False
|
| 308 |
+
- `no_cuda`: False
|
| 309 |
+
- `use_cpu`: False
|
| 310 |
+
- `use_mps_device`: False
|
| 311 |
+
- `seed`: 42
|
| 312 |
+
- `data_seed`: None
|
| 313 |
+
- `jit_mode_eval`: False
|
| 314 |
+
- `use_ipex`: False
|
| 315 |
+
- `bf16`: False
|
| 316 |
+
- `fp16`: True
|
| 317 |
+
- `fp16_opt_level`: O1
|
| 318 |
+
- `half_precision_backend`: auto
|
| 319 |
+
- `bf16_full_eval`: False
|
| 320 |
+
- `fp16_full_eval`: False
|
| 321 |
+
- `tf32`: None
|
| 322 |
+
- `local_rank`: 0
|
| 323 |
+
- `ddp_backend`: None
|
| 324 |
+
- `tpu_num_cores`: None
|
| 325 |
+
- `tpu_metrics_debug`: False
|
| 326 |
+
- `debug`: []
|
| 327 |
+
- `dataloader_drop_last`: False
|
| 328 |
+
- `dataloader_num_workers`: 0
|
| 329 |
+
- `dataloader_prefetch_factor`: None
|
| 330 |
+
- `past_index`: -1
|
| 331 |
+
- `disable_tqdm`: False
|
| 332 |
+
- `remove_unused_columns`: True
|
| 333 |
+
- `label_names`: None
|
| 334 |
+
- `load_best_model_at_end`: True
|
| 335 |
+
- `ignore_data_skip`: False
|
| 336 |
+
- `fsdp`: []
|
| 337 |
+
- `fsdp_min_num_params`: 0
|
| 338 |
+
- `fsdp_config`: {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}
|
| 339 |
+
- `fsdp_transformer_layer_cls_to_wrap`: None
|
| 340 |
+
- `accelerator_config`: {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True, 'non_blocking': False, 'gradient_accumulation_kwargs': None}
|
| 341 |
+
- `deepspeed`: None
|
| 342 |
+
- `label_smoothing_factor`: 0.0
|
| 343 |
+
- `optim`: adamw_torch
|
| 344 |
+
- `optim_args`: None
|
| 345 |
+
- `adafactor`: False
|
| 346 |
+
- `group_by_length`: False
|
| 347 |
+
- `length_column_name`: length
|
| 348 |
+
- `ddp_find_unused_parameters`: False
|
| 349 |
+
- `ddp_bucket_cap_mb`: None
|
| 350 |
+
- `ddp_broadcast_buffers`: False
|
| 351 |
+
- `dataloader_pin_memory`: True
|
| 352 |
+
- `dataloader_persistent_workers`: False
|
| 353 |
+
- `skip_memory_metrics`: True
|
| 354 |
+
- `use_legacy_prediction_loop`: False
|
| 355 |
+
- `push_to_hub`: False
|
| 356 |
+
- `resume_from_checkpoint`: None
|
| 357 |
+
- `hub_model_id`: None
|
| 358 |
+
- `hub_strategy`: every_save
|
| 359 |
+
- `hub_private_repo`: None
|
| 360 |
+
- `hub_always_push`: False
|
| 361 |
+
- `gradient_checkpointing`: False
|
| 362 |
+
- `gradient_checkpointing_kwargs`: None
|
| 363 |
+
- `include_inputs_for_metrics`: False
|
| 364 |
+
- `include_for_metrics`: []
|
| 365 |
+
- `eval_do_concat_batches`: True
|
| 366 |
+
- `fp16_backend`: auto
|
| 367 |
+
- `push_to_hub_model_id`: None
|
| 368 |
+
- `push_to_hub_organization`: None
|
| 369 |
+
- `mp_parameters`:
|
| 370 |
+
- `auto_find_batch_size`: False
|
| 371 |
+
- `full_determinism`: False
|
| 372 |
+
- `torchdynamo`: None
|
| 373 |
+
- `ray_scope`: last
|
| 374 |
+
- `ddp_timeout`: 1800
|
| 375 |
+
- `torch_compile`: False
|
| 376 |
+
- `torch_compile_backend`: None
|
| 377 |
+
- `torch_compile_mode`: None
|
| 378 |
+
- `dispatch_batches`: None
|
| 379 |
+
- `split_batches`: None
|
| 380 |
+
- `include_tokens_per_second`: False
|
| 381 |
+
- `include_num_input_tokens_seen`: False
|
| 382 |
+
- `neftune_noise_alpha`: None
|
| 383 |
+
- `optim_target_modules`: None
|
| 384 |
+
- `batch_eval_metrics`: False
|
| 385 |
+
- `eval_on_start`: False
|
| 386 |
+
- `use_liger_kernel`: False
|
| 387 |
+
- `eval_use_gather_object`: False
|
| 388 |
+
- `average_tokens_across_devices`: False
|
| 389 |
+
- `prompts`: None
|
| 390 |
+
- `batch_sampler`: batch_sampler
|
| 391 |
+
- `multi_dataset_batch_sampler`: proportional
|
| 392 |
+
|
| 393 |
+
</details>
|
| 394 |
+
|
| 395 |
+
### Training Logs
|
| 396 |
+
<details><summary>Click to expand</summary>
|
| 397 |
+
|
| 398 |
+
| Epoch | Step | Training Loss | Validation Loss |
|
| 399 |
+
|:------:|:-----:|:-------------:|:---------------:|
|
| 400 |
+
| 0.0031 | 25 | 0.4634 | - |
|
| 401 |
+
| 0.0061 | 50 | 0.6623 | - |
|
| 402 |
+
| 0.0092 | 75 | 0.5179 | - |
|
| 403 |
+
| 0.0122 | 100 | 0.5123 | - |
|
| 404 |
+
| 0.0153 | 125 | 0.5838 | - |
|
| 405 |
+
| 0.0184 | 150 | 0.3665 | - |
|
| 406 |
+
| 0.0214 | 175 | 0.4906 | - |
|
| 407 |
+
| 0.0245 | 200 | 0.3837 | - |
|
| 408 |
+
| 0.0275 | 225 | 0.4081 | - |
|
| 409 |
+
| 0.0306 | 250 | 0.4475 | - |
|
| 410 |
+
| 0.0336 | 275 | 0.2888 | - |
|
| 411 |
+
| 0.0367 | 300 | 0.3451 | - |
|
| 412 |
+
| 0.0398 | 325 | 0.3237 | - |
|
| 413 |
+
| 0.0428 | 350 | 0.3226 | - |
|
| 414 |
+
| 0.0459 | 375 | 0.2329 | - |
|
| 415 |
+
| 0.0489 | 400 | 0.3523 | - |
|
| 416 |
+
| 0.0520 | 425 | 0.2611 | - |
|
| 417 |
+
| 0.0551 | 450 | 0.3416 | - |
|
| 418 |
+
| 0.0581 | 475 | 0.3139 | - |
|
| 419 |
+
| 0.0612 | 500 | 0.3361 | - |
|
| 420 |
+
| 0.0642 | 525 | 0.2595 | - |
|
| 421 |
+
| 0.0673 | 550 | 0.3333 | - |
|
| 422 |
+
| 0.0703 | 575 | 0.2823 | - |
|
| 423 |
+
| 0.0734 | 600 | 0.2506 | - |
|
| 424 |
+
| 0.0765 | 625 | 0.2207 | - |
|
| 425 |
+
| 0.0795 | 650 | 0.2047 | - |
|
| 426 |
+
| 0.0826 | 675 | 0.3488 | - |
|
| 427 |
+
| 0.0856 | 700 | 0.1989 | - |
|
| 428 |
+
| 0.0887 | 725 | 0.3214 | - |
|
| 429 |
+
| 0.0918 | 750 | 0.2545 | - |
|
| 430 |
+
| 0.0948 | 775 | 0.2292 | - |
|
| 431 |
+
| 0.0979 | 800 | 0.262 | - |
|
| 432 |
+
| 0.1009 | 825 | 0.2708 | - |
|
| 433 |
+
| 0.1040 | 850 | 0.1642 | - |
|
| 434 |
+
| 0.1070 | 875 | 0.217 | - |
|
| 435 |
+
| 0.1101 | 900 | 0.3412 | - |
|
| 436 |
+
| 0.1132 | 925 | 0.1924 | - |
|
| 437 |
+
| 0.1162 | 950 | 0.2345 | - |
|
| 438 |
+
| 0.1193 | 975 | 0.2333 | - |
|
| 439 |
+
| 0.1223 | 1000 | 0.2043 | - |
|
| 440 |
+
| 0.1254 | 1025 | 0.3976 | - |
|
| 441 |
+
| 0.1285 | 1050 | 0.153 | - |
|
| 442 |
+
| 0.1315 | 1075 | 0.2229 | - |
|
| 443 |
+
| 0.1346 | 1100 | 0.2586 | - |
|
| 444 |
+
| 0.1376 | 1125 | 0.1891 | - |
|
| 445 |
+
| 0.1407 | 1150 | 0.2397 | - |
|
| 446 |
+
| 0.1437 | 1175 | 0.1781 | - |
|
| 447 |
+
| 0.1468 | 1200 | 0.1496 | - |
|
| 448 |
+
| 0.1499 | 1225 | 0.1242 | - |
|
| 449 |
+
| 0.1529 | 1250 | 0.2476 | - |
|
| 450 |
+
| 0.1560 | 1275 | 0.3055 | - |
|
| 451 |
+
| 0.1590 | 1300 | 0.2766 | - |
|
| 452 |
+
| 0.1621 | 1325 | 0.2105 | - |
|
| 453 |
+
| 0.1652 | 1350 | 0.1651 | - |
|
| 454 |
+
| 0.1682 | 1375 | 0.1884 | - |
|
| 455 |
+
| 0.1713 | 1400 | 0.1208 | - |
|
| 456 |
+
| 0.1743 | 1425 | 0.2336 | - |
|
| 457 |
+
| 0.1774 | 1450 | 0.2143 | - |
|
| 458 |
+
| 0.1805 | 1475 | 0.2266 | - |
|
| 459 |
+
| 0.1835 | 1500 | 0.2439 | - |
|
| 460 |
+
| 0.1866 | 1525 | 0.2025 | - |
|
| 461 |
+
| 0.1896 | 1550 | 0.1633 | - |
|
| 462 |
+
| 0.1927 | 1575 | 0.2456 | - |
|
| 463 |
+
| 0.1957 | 1600 | 0.2107 | - |
|
| 464 |
+
| 0.1988 | 1625 | 0.1787 | - |
|
| 465 |
+
| 0.2019 | 1650 | 0.2183 | - |
|
| 466 |
+
| 0.2049 | 1675 | 0.1278 | - |
|
| 467 |
+
| 0.2080 | 1700 | 0.2737 | - |
|
| 468 |
+
| 0.2110 | 1725 | 0.1527 | - |
|
| 469 |
+
| 0.2141 | 1750 | 0.1925 | - |
|
| 470 |
+
| 0.2172 | 1775 | 0.209 | - |
|
| 471 |
+
| 0.2202 | 1800 | 0.1842 | - |
|
| 472 |
+
| 0.2233 | 1825 | 0.1556 | - |
|
| 473 |
+
| 0.2263 | 1850 | 0.252 | - |
|
| 474 |
+
| 0.2294 | 1875 | 0.1634 | - |
|
| 475 |
+
| 0.2324 | 1900 | 0.2165 | - |
|
| 476 |
+
| 0.2355 | 1925 | 0.2361 | - |
|
| 477 |
+
| 0.2386 | 1950 | 0.1322 | - |
|
| 478 |
+
| 0.2416 | 1975 | 0.1764 | - |
|
| 479 |
+
| 0.2447 | 2000 | 0.2004 | - |
|
| 480 |
+
| 0.2477 | 2025 | 0.1567 | - |
|
| 481 |
+
| 0.2508 | 2050 | 0.2997 | - |
|
| 482 |
+
| 0.2539 | 2075 | 0.1592 | - |
|
| 483 |
+
| 0.2569 | 2100 | 0.169 | - |
|
| 484 |
+
| 0.2600 | 2125 | 0.1631 | - |
|
| 485 |
+
| 0.2630 | 2150 | 0.2428 | - |
|
| 486 |
+
| 0.2661 | 2175 | 0.1245 | - |
|
| 487 |
+
| 0.2691 | 2200 | 0.1736 | - |
|
| 488 |
+
| 0.2722 | 2225 | 0.238 | - |
|
| 489 |
+
| 0.2753 | 2250 | 0.2143 | - |
|
| 490 |
+
| 0.2783 | 2275 | 0.2033 | - |
|
| 491 |
+
| 0.2814 | 2300 | 0.1806 | - |
|
| 492 |
+
| 0.2844 | 2325 | 0.1792 | - |
|
| 493 |
+
| 0.2875 | 2350 | 0.2122 | - |
|
| 494 |
+
| 0.2906 | 2375 | 0.0998 | - |
|
| 495 |
+
| 0.2936 | 2400 | 0.167 | - |
|
| 496 |
+
| 0.2967 | 2425 | 0.2307 | - |
|
| 497 |
+
| 0.2997 | 2450 | 0.2054 | - |
|
| 498 |
+
| 0.3028 | 2475 | 0.2324 | - |
|
| 499 |
+
| 0.3058 | 2500 | 0.1587 | - |
|
| 500 |
+
| 0.3089 | 2525 | 0.1573 | - |
|
| 501 |
+
| 0.3120 | 2550 | 0.164 | - |
|
| 502 |
+
| 0.3150 | 2575 | 0.2213 | - |
|
| 503 |
+
| 0.3181 | 2600 | 0.2469 | - |
|
| 504 |
+
| 0.3211 | 2625 | 0.1845 | - |
|
| 505 |
+
| 0.3242 | 2650 | 0.2174 | - |
|
| 506 |
+
| 0.3273 | 2675 | 0.1759 | - |
|
| 507 |
+
| 0.3303 | 2700 | 0.159 | - |
|
| 508 |
+
| 0.3334 | 2725 | 0.2561 | - |
|
| 509 |
+
| 0.3364 | 2750 | 0.1521 | - |
|
| 510 |
+
| 0.3395 | 2775 | 0.2799 | - |
|
| 511 |
+
| 0.3425 | 2800 | 0.1329 | - |
|
| 512 |
+
| 0.3456 | 2825 | 0.2202 | - |
|
| 513 |
+
| 0.3487 | 2850 | 0.2064 | - |
|
| 514 |
+
| 0.3517 | 2875 | 0.2542 | - |
|
| 515 |
+
| 0.3548 | 2900 | 0.1496 | - |
|
| 516 |
+
| 0.3578 | 2925 | 0.2602 | - |
|
| 517 |
+
| 0.3609 | 2950 | 0.1359 | - |
|
| 518 |
+
| 0.3640 | 2975 | 0.2042 | - |
|
| 519 |
+
| 0.3670 | 3000 | 0.2078 | - |
|
| 520 |
+
| 0.3701 | 3025 | 0.2054 | - |
|
| 521 |
+
| 0.3731 | 3050 | 0.1562 | - |
|
| 522 |
+
| 0.3762 | 3075 | 0.1402 | - |
|
| 523 |
+
| 0.3793 | 3100 | 0.1389 | - |
|
| 524 |
+
| 0.3823 | 3125 | 0.2483 | - |
|
| 525 |
+
| 0.3854 | 3150 | 0.2094 | - |
|
| 526 |
+
| 0.3884 | 3175 | 0.1625 | - |
|
| 527 |
+
| 0.3915 | 3200 | 0.1478 | - |
|
| 528 |
+
| 0.3945 | 3225 | 0.1862 | - |
|
| 529 |
+
| 0.3976 | 3250 | 0.1577 | - |
|
| 530 |
+
| 0.4007 | 3275 | 0.1787 | - |
|
| 531 |
+
| 0.4037 | 3300 | 0.2874 | - |
|
| 532 |
+
| 0.4068 | 3325 | 0.183 | - |
|
| 533 |
+
| 0.4098 | 3350 | 0.2002 | - |
|
| 534 |
+
| 0.4129 | 3375 | 0.1167 | - |
|
| 535 |
+
| 0.4160 | 3400 | 0.1524 | - |
|
| 536 |
+
| 0.4190 | 3425 | 0.1635 | - |
|
| 537 |
+
| 0.4221 | 3450 | 0.1452 | - |
|
| 538 |
+
| 0.4251 | 3475 | 0.2315 | - |
|
| 539 |
+
| 0.4282 | 3500 | 0.2945 | - |
|
| 540 |
+
| 0.4312 | 3525 | 0.2186 | - |
|
| 541 |
+
| 0.4343 | 3550 | 0.1973 | - |
|
| 542 |
+
| 0.4374 | 3575 | 0.1809 | - |
|
| 543 |
+
| 0.4404 | 3600 | 0.1561 | - |
|
| 544 |
+
| 0.4435 | 3625 | 0.2043 | - |
|
| 545 |
+
| 0.4465 | 3650 | 0.1558 | - |
|
| 546 |
+
| 0.4496 | 3675 | 0.1111 | - |
|
| 547 |
+
| 0.4527 | 3700 | 0.1182 | - |
|
| 548 |
+
| 0.4557 | 3725 | 0.1715 | - |
|
| 549 |
+
| 0.4588 | 3750 | 0.1547 | - |
|
| 550 |
+
| 0.4618 | 3775 | 0.1352 | - |
|
| 551 |
+
| 0.4649 | 3800 | 0.1227 | - |
|
| 552 |
+
| 0.4679 | 3825 | 0.1636 | - |
|
| 553 |
+
| 0.4710 | 3850 | 0.1404 | - |
|
| 554 |
+
| 0.4741 | 3875 | 0.1296 | - |
|
| 555 |
+
| 0.4771 | 3900 | 0.0998 | - |
|
| 556 |
+
| 0.4802 | 3925 | 0.1905 | - |
|
| 557 |
+
| 0.4832 | 3950 | 0.1034 | - |
|
| 558 |
+
| 0.4863 | 3975 | 0.138 | - |
|
| 559 |
+
| 0.4894 | 4000 | 0.1816 | - |
|
| 560 |
+
| 0.4924 | 4025 | 0.1009 | - |
|
| 561 |
+
| 0.4955 | 4050 | 0.1532 | - |
|
| 562 |
+
| 0.4985 | 4075 | 0.2212 | - |
|
| 563 |
+
| 0.5016 | 4100 | 0.1275 | - |
|
| 564 |
+
| 0.5046 | 4125 | 0.1396 | - |
|
| 565 |
+
| 0.5077 | 4150 | 0.2306 | - |
|
| 566 |
+
| 0.5108 | 4175 | 0.1919 | - |
|
| 567 |
+
| 0.5138 | 4200 | 0.1542 | - |
|
| 568 |
+
| 0.5169 | 4225 | 0.0823 | - |
|
| 569 |
+
| 0.5199 | 4250 | 0.1708 | - |
|
| 570 |
+
| 0.5230 | 4275 | 0.0686 | - |
|
| 571 |
+
| 0.5261 | 4300 | 0.1135 | - |
|
| 572 |
+
| 0.5291 | 4325 | 0.1168 | - |
|
| 573 |
+
| 0.5322 | 4350 | 0.1454 | - |
|
| 574 |
+
| 0.5352 | 4375 | 0.2107 | - |
|
| 575 |
+
| 0.5383 | 4400 | 0.1899 | - |
|
| 576 |
+
| 0.5414 | 4425 | 0.1785 | - |
|
| 577 |
+
| 0.5444 | 4450 | 0.1686 | - |
|
| 578 |
+
| 0.5475 | 4475 | 0.1685 | - |
|
| 579 |
+
| 0.5505 | 4500 | 0.069 | - |
|
| 580 |
+
| 0.5536 | 4525 | 0.1539 | - |
|
| 581 |
+
| 0.5566 | 4550 | 0.2239 | - |
|
| 582 |
+
| 0.5597 | 4575 | 0.1803 | - |
|
| 583 |
+
| 0.5628 | 4600 | 0.151 | - |
|
| 584 |
+
| 0.5658 | 4625 | 0.1476 | - |
|
| 585 |
+
| 0.5689 | 4650 | 0.1146 | - |
|
| 586 |
+
| 0.5719 | 4675 | 0.0769 | - |
|
| 587 |
+
| 0.5750 | 4700 | 0.2031 | - |
|
| 588 |
+
| 0.5781 | 4725 | 0.2583 | - |
|
| 589 |
+
| 0.5811 | 4750 | 0.1913 | - |
|
| 590 |
+
| 0.5842 | 4775 | 0.0624 | - |
|
| 591 |
+
| 0.5872 | 4800 | 0.146 | - |
|
| 592 |
+
| 0.5903 | 4825 | 0.1735 | - |
|
| 593 |
+
| 0.5933 | 4850 | 0.2367 | - |
|
| 594 |
+
| 0.5964 | 4875 | 0.0404 | - |
|
| 595 |
+
| 0.5995 | 4900 | 0.1317 | - |
|
| 596 |
+
| 0.6025 | 4925 | 0.1505 | - |
|
| 597 |
+
| 0.6056 | 4950 | 0.1177 | - |
|
| 598 |
+
| 0.6086 | 4975 | 0.1686 | - |
|
| 599 |
+
| 0.6117 | 5000 | 0.1565 | - |
|
| 600 |
+
| 0.6148 | 5025 | 0.1782 | - |
|
| 601 |
+
| 0.6178 | 5050 | 0.1167 | - |
|
| 602 |
+
| 0.6209 | 5075 | 0.093 | - |
|
| 603 |
+
| 0.6239 | 5100 | 0.1211 | - |
|
| 604 |
+
| 0.6270 | 5125 | 0.1342 | - |
|
| 605 |
+
| 0.6300 | 5150 | 0.0902 | - |
|
| 606 |
+
| 0.6331 | 5175 | 0.1725 | - |
|
| 607 |
+
| 0.6362 | 5200 | 0.1993 | - |
|
| 608 |
+
| 0.6392 | 5225 | 0.1347 | - |
|
| 609 |
+
| 0.6423 | 5250 | 0.1646 | - |
|
| 610 |
+
| 0.6453 | 5275 | 0.2363 | - |
|
| 611 |
+
| 0.6484 | 5300 | 0.1263 | - |
|
| 612 |
+
| 0.6515 | 5325 | 0.1422 | - |
|
| 613 |
+
| 0.6545 | 5350 | 0.105 | - |
|
| 614 |
+
| 0.6576 | 5375 | 0.1369 | - |
|
| 615 |
+
| 0.6606 | 5400 | 0.117 | - |
|
| 616 |
+
| 0.6637 | 5425 | 0.0791 | - |
|
| 617 |
+
| 0.6667 | 5450 | 0.1199 | - |
|
| 618 |
+
| 0.6698 | 5475 | 0.1223 | - |
|
| 619 |
+
| 0.6729 | 5500 | 0.1639 | - |
|
| 620 |
+
| 0.6759 | 5525 | 0.2 | - |
|
| 621 |
+
| 0.6790 | 5550 | 0.22 | - |
|
| 622 |
+
| 0.6820 | 5575 | 0.1644 | - |
|
| 623 |
+
| 0.6851 | 5600 | 0.1913 | - |
|
| 624 |
+
| 0.6882 | 5625 | 0.1272 | - |
|
| 625 |
+
| 0.6912 | 5650 | 0.0729 | - |
|
| 626 |
+
| 0.6943 | 5675 | 0.1621 | - |
|
| 627 |
+
| 0.6973 | 5700 | 0.1247 | - |
|
| 628 |
+
| 0.7004 | 5725 | 0.1516 | - |
|
| 629 |
+
| 0.7034 | 5750 | 0.1615 | - |
|
| 630 |
+
| 0.7065 | 5775 | 0.0847 | - |
|
| 631 |
+
| 0.7096 | 5800 | 0.1375 | - |
|
| 632 |
+
| 0.7126 | 5825 | 0.1516 | - |
|
| 633 |
+
| 0.7157 | 5850 | 0.0965 | - |
|
| 634 |
+
| 0.7187 | 5875 | 0.1871 | - |
|
| 635 |
+
| 0.7218 | 5900 | 0.1718 | - |
|
| 636 |
+
| 0.7249 | 5925 | 0.1618 | - |
|
| 637 |
+
| 0.7279 | 5950 | 0.1375 | - |
|
| 638 |
+
| 0.7310 | 5975 | 0.128 | - |
|
| 639 |
+
| 0.7340 | 6000 | 0.0923 | - |
|
| 640 |
+
| 0.7371 | 6025 | 0.1471 | - |
|
| 641 |
+
| 0.7402 | 6050 | 0.1342 | - |
|
| 642 |
+
| 0.7432 | 6075 | 0.1736 | - |
|
| 643 |
+
| 0.7463 | 6100 | 0.1238 | - |
|
| 644 |
+
| 0.7493 | 6125 | 0.1637 | - |
|
| 645 |
+
| 0.7524 | 6150 | 0.1393 | - |
|
| 646 |
+
| 0.7554 | 6175 | 0.1169 | - |
|
| 647 |
+
| 0.7585 | 6200 | 0.077 | - |
|
| 648 |
+
| 0.7616 | 6225 | 0.1255 | - |
|
| 649 |
+
| 0.7646 | 6250 | 0.2669 | - |
|
| 650 |
+
| 0.7677 | 6275 | 0.1876 | - |
|
| 651 |
+
| 0.7707 | 6300 | 0.244 | - |
|
| 652 |
+
| 0.7738 | 6325 | 0.1419 | - |
|
| 653 |
+
| 0.7769 | 6350 | 0.1104 | - |
|
| 654 |
+
| 0.7799 | 6375 | 0.2065 | - |
|
| 655 |
+
| 0.7830 | 6400 | 0.193 | - |
|
| 656 |
+
| 0.7860 | 6425 | 0.1584 | - |
|
| 657 |
+
| 0.7891 | 6450 | 0.1375 | - |
|
| 658 |
+
| 0.7921 | 6475 | 0.1323 | - |
|
| 659 |
+
| 0.7952 | 6500 | 0.1611 | - |
|
| 660 |
+
| 0.7983 | 6525 | 0.1505 | - |
|
| 661 |
+
| 0.8013 | 6550 | 0.1574 | - |
|
| 662 |
+
| 0.8044 | 6575 | 0.1829 | - |
|
| 663 |
+
| 0.8074 | 6600 | 0.202 | - |
|
| 664 |
+
| 0.8105 | 6625 | 0.0985 | - |
|
| 665 |
+
| 0.8136 | 6650 | 0.0793 | - |
|
| 666 |
+
| 0.8166 | 6675 | 0.0722 | - |
|
| 667 |
+
| 0.8197 | 6700 | 0.1207 | - |
|
| 668 |
+
| 0.8227 | 6725 | 0.1363 | - |
|
| 669 |
+
| 0.8258 | 6750 | 0.0958 | - |
|
| 670 |
+
| 0.8288 | 6775 | 0.1127 | - |
|
| 671 |
+
| 0.8319 | 6800 | 0.1096 | - |
|
| 672 |
+
| 0.8350 | 6825 | 0.1482 | - |
|
| 673 |
+
| 0.8380 | 6850 | 0.113 | - |
|
| 674 |
+
| 0.8411 | 6875 | 0.0796 | - |
|
| 675 |
+
| 0.8441 | 6900 | 0.1477 | - |
|
| 676 |
+
| 0.8472 | 6925 | 0.1802 | - |
|
| 677 |
+
| 0.8503 | 6950 | 0.1102 | - |
|
| 678 |
+
| 0.8533 | 6975 | 0.1849 | - |
|
| 679 |
+
| 0.8564 | 7000 | 0.2046 | - |
|
| 680 |
+
| 0.8594 | 7025 | 0.1423 | - |
|
| 681 |
+
| 0.8625 | 7050 | 0.1597 | - |
|
| 682 |
+
| 0.8655 | 7075 | 0.0725 | - |
|
| 683 |
+
| 0.8686 | 7100 | 0.1276 | - |
|
| 684 |
+
| 0.8717 | 7125 | 0.1398 | - |
|
| 685 |
+
| 0.8747 | 7150 | 0.1849 | - |
|
| 686 |
+
| 0.8778 | 7175 | 0.1851 | - |
|
| 687 |
+
| 0.8808 | 7200 | 0.1774 | - |
|
| 688 |
+
| 0.8839 | 7225 | 0.1711 | - |
|
| 689 |
+
| 0.8870 | 7250 | 0.2344 | - |
|
| 690 |
+
| 0.8900 | 7275 | 0.0976 | - |
|
| 691 |
+
| 0.8931 | 7300 | 0.087 | - |
|
| 692 |
+
| 0.8961 | 7325 | 0.1068 | - |
|
| 693 |
+
| 0.8992 | 7350 | 0.1098 | - |
|
| 694 |
+
| 0.9023 | 7375 | 0.2501 | - |
|
| 695 |
+
| 0.9053 | 7400 | 0.088 | - |
|
| 696 |
+
| 0.9084 | 7425 | 0.1312 | - |
|
| 697 |
+
| 0.9114 | 7450 | 0.161 | - |
|
| 698 |
+
| 0.9145 | 7475 | 0.1018 | - |
|
| 699 |
+
| 0.9175 | 7500 | 0.162 | - |
|
| 700 |
+
| 0.9206 | 7525 | 0.07 | - |
|
| 701 |
+
| 0.9237 | 7550 | 0.1093 | - |
|
| 702 |
+
| 0.9267 | 7575 | 0.0929 | - |
|
| 703 |
+
| 0.9298 | 7600 | 0.0705 | - |
|
| 704 |
+
| 0.9328 | 7625 | 0.0587 | - |
|
| 705 |
+
| 0.9359 | 7650 | 0.0791 | - |
|
| 706 |
+
| 0.9390 | 7675 | 0.129 | - |
|
| 707 |
+
| 0.9420 | 7700 | 0.0551 | - |
|
| 708 |
+
| 0.9451 | 7725 | 0.0804 | - |
|
| 709 |
+
| 0.9481 | 7750 | 0.1837 | - |
|
| 710 |
+
| 0.9512 | 7775 | 0.0925 | - |
|
| 711 |
+
| 0.9542 | 7800 | 0.1046 | - |
|
| 712 |
+
| 0.9573 | 7825 | 0.0974 | - |
|
| 713 |
+
| 0.9604 | 7850 | 0.1121 | - |
|
| 714 |
+
| 0.9634 | 7875 | 0.1107 | - |
|
| 715 |
+
| 0.9665 | 7900 | 0.0976 | - |
|
| 716 |
+
| 0.9695 | 7925 | 0.176 | - |
|
| 717 |
+
| 0.9726 | 7950 | 0.175 | - |
|
| 718 |
+
| 0.9757 | 7975 | 0.1344 | - |
|
| 719 |
+
| 0.9787 | 8000 | 0.1202 | - |
|
| 720 |
+
| 0.9818 | 8025 | 0.1596 | - |
|
| 721 |
+
| 0.9848 | 8050 | 0.0497 | - |
|
| 722 |
+
| 0.9879 | 8075 | 0.0747 | - |
|
| 723 |
+
| 0.9909 | 8100 | 0.0899 | - |
|
| 724 |
+
| 0.9940 | 8125 | 0.1125 | - |
|
| 725 |
+
| 0.9971 | 8150 | 0.0722 | - |
|
| 726 |
+
| 1.0 | 8174 | - | 0.1978 |
|
| 727 |
+
| 1.0001 | 8175 | 0.2191 | - |
|
| 728 |
+
| 1.0032 | 8200 | 0.1448 | - |
|
| 729 |
+
| 1.0062 | 8225 | 0.0707 | - |
|
| 730 |
+
| 1.0093 | 8250 | 0.0966 | - |
|
| 731 |
+
| 1.0124 | 8275 | 0.076 | - |
|
| 732 |
+
| 1.0154 | 8300 | 0.0869 | - |
|
| 733 |
+
| 1.0185 | 8325 | 0.1216 | - |
|
| 734 |
+
| 1.0215 | 8350 | 0.0646 | - |
|
| 735 |
+
| 1.0246 | 8375 | 0.0421 | - |
|
| 736 |
+
| 1.0276 | 8400 | 0.1311 | - |
|
| 737 |
+
| 1.0307 | 8425 | 0.0735 | - |
|
| 738 |
+
| 1.0338 | 8450 | 0.0598 | - |
|
| 739 |
+
| 1.0368 | 8475 | 0.1033 | - |
|
| 740 |
+
| 1.0399 | 8500 | 0.0892 | - |
|
| 741 |
+
| 1.0429 | 8525 | 0.0882 | - |
|
| 742 |
+
| 1.0460 | 8550 | 0.0569 | - |
|
| 743 |
+
| 1.0491 | 8575 | 0.103 | - |
|
| 744 |
+
| 1.0521 | 8600 | 0.0994 | - |
|
| 745 |
+
| 1.0552 | 8625 | 0.0929 | - |
|
| 746 |
+
| 1.0582 | 8650 | 0.065 | - |
|
| 747 |
+
| 1.0613 | 8675 | 0.08 | - |
|
| 748 |
+
| 1.0644 | 8700 | 0.073 | - |
|
| 749 |
+
| 1.0674 | 8725 | 0.0834 | - |
|
| 750 |
+
| 1.0705 | 8750 | 0.0464 | - |
|
| 751 |
+
| 1.0735 | 8775 | 0.0794 | - |
|
| 752 |
+
| 1.0766 | 8800 | 0.0807 | - |
|
| 753 |
+
| 1.0796 | 8825 | 0.0741 | - |
|
| 754 |
+
| 1.0827 | 8850 | 0.1589 | - |
|
| 755 |
+
| 1.0858 | 8875 | 0.1023 | - |
|
| 756 |
+
| 1.0888 | 8900 | 0.1037 | - |
|
| 757 |
+
| 1.0919 | 8925 | 0.0755 | - |
|
| 758 |
+
| 1.0949 | 8950 | 0.0719 | - |
|
| 759 |
+
| 1.0980 | 8975 | 0.0658 | - |
|
| 760 |
+
| 1.1011 | 9000 | 0.1037 | - |
|
| 761 |
+
| 1.1041 | 9025 | 0.091 | - |
|
| 762 |
+
| 1.1072 | 9050 | 0.1357 | - |
|
| 763 |
+
| 1.1102 | 9075 | 0.0799 | - |
|
| 764 |
+
| 1.1133 | 9100 | 0.0722 | - |
|
| 765 |
+
| 1.1163 | 9125 | 0.0493 | - |
|
| 766 |
+
| 1.1194 | 9150 | 0.0886 | - |
|
| 767 |
+
| 1.1225 | 9175 | 0.0493 | - |
|
| 768 |
+
| 1.1255 | 9200 | 0.1204 | - |
|
| 769 |
+
| 1.1286 | 9225 | 0.1312 | - |
|
| 770 |
+
| 1.1316 | 9250 | 0.1059 | - |
|
| 771 |
+
| 1.1347 | 9275 | 0.1395 | - |
|
| 772 |
+
| 1.1378 | 9300 | 0.0929 | - |
|
| 773 |
+
| 1.1408 | 9325 | 0.0951 | - |
|
| 774 |
+
| 1.1439 | 9350 | 0.0958 | - |
|
| 775 |
+
| 1.1469 | 9375 | 0.0517 | - |
|
| 776 |
+
| 1.1500 | 9400 | 0.0569 | - |
|
| 777 |
+
| 1.1530 | 9425 | 0.0697 | - |
|
| 778 |
+
| 1.1561 | 9450 | 0.0761 | - |
|
| 779 |
+
| 1.1592 | 9475 | 0.0828 | - |
|
| 780 |
+
| 1.1622 | 9500 | 0.0619 | - |
|
| 781 |
+
| 1.1653 | 9525 | 0.0331 | - |
|
| 782 |
+
| 1.1683 | 9550 | 0.0782 | - |
|
| 783 |
+
| 1.1714 | 9575 | 0.0747 | - |
|
| 784 |
+
| 1.1745 | 9600 | 0.0141 | - |
|
| 785 |
+
| 1.1775 | 9625 | 0.1185 | - |
|
| 786 |
+
| 1.1806 | 9650 | 0.0369 | - |
|
| 787 |
+
| 1.1836 | 9675 | 0.0346 | - |
|
| 788 |
+
| 1.1867 | 9700 | 0.0695 | - |
|
| 789 |
+
| 1.1897 | 9725 | 0.0962 | - |
|
| 790 |
+
| 1.1928 | 9750 | 0.0359 | - |
|
| 791 |
+
| 1.1959 | 9775 | 0.1082 | - |
|
| 792 |
+
| 1.1989 | 9800 | 0.0727 | - |
|
| 793 |
+
| 1.2020 | 9825 | 0.0778 | - |
|
| 794 |
+
| 1.2050 | 9850 | 0.029 | - |
|
| 795 |
+
| 1.2081 | 9875 | 0.0927 | - |
|
| 796 |
+
| 1.2112 | 9900 | 0.0901 | - |
|
| 797 |
+
| 1.2142 | 9925 | 0.0984 | - |
|
| 798 |
+
| 1.2173 | 9950 | 0.1247 | - |
|
| 799 |
+
| 1.2203 | 9975 | 0.0413 | - |
|
| 800 |
+
| 1.2234 | 10000 | 0.1201 | - |
|
| 801 |
+
| 1.2264 | 10025 | 0.1157 | - |
|
| 802 |
+
| 1.2295 | 10050 | 0.0765 | - |
|
| 803 |
+
| 1.2326 | 10075 | 0.0472 | - |
|
| 804 |
+
| 1.2356 | 10100 | 0.0703 | - |
|
| 805 |
+
| 1.2387 | 10125 | 0.0758 | - |
|
| 806 |
+
| 1.2417 | 10150 | 0.091 | - |
|
| 807 |
+
| 1.2448 | 10175 | 0.061 | - |
|
| 808 |
+
| 1.2479 | 10200 | 0.1161 | - |
|
| 809 |
+
| 1.2509 | 10225 | 0.1283 | - |
|
| 810 |
+
| 1.2540 | 10250 | 0.0534 | - |
|
| 811 |
+
| 1.2570 | 10275 | 0.0592 | - |
|
| 812 |
+
| 1.2601 | 10300 | 0.0552 | - |
|
| 813 |
+
| 1.2632 | 10325 | 0.0639 | - |
|
| 814 |
+
| 1.2662 | 10350 | 0.0258 | - |
|
| 815 |
+
| 1.2693 | 10375 | 0.1358 | - |
|
| 816 |
+
| 1.2723 | 10400 | 0.0721 | - |
|
| 817 |
+
| 1.2754 | 10425 | 0.1038 | - |
|
| 818 |
+
| 1.2784 | 10450 | 0.0853 | - |
|
| 819 |
+
| 1.2815 | 10475 | 0.1488 | - |
|
| 820 |
+
| 1.2846 | 10500 | 0.0714 | - |
|
| 821 |
+
| 1.2876 | 10525 | 0.0903 | - |
|
| 822 |
+
| 1.2907 | 10550 | 0.0651 | - |
|
| 823 |
+
| 1.2937 | 10575 | 0.1198 | - |
|
| 824 |
+
| 1.2968 | 10600 | 0.1924 | - |
|
| 825 |
+
| 1.2999 | 10625 | 0.0729 | - |
|
| 826 |
+
| 1.3029 | 10650 | 0.0764 | - |
|
| 827 |
+
| 1.3060 | 10675 | 0.0615 | - |
|
| 828 |
+
| 1.3090 | 10700 | 0.0896 | - |
|
| 829 |
+
| 1.3121 | 10725 | 0.1147 | - |
|
| 830 |
+
| 1.3151 | 10750 | 0.0783 | - |
|
| 831 |
+
| 1.3182 | 10775 | 0.0898 | - |
|
| 832 |
+
| 1.3213 | 10800 | 0.0626 | - |
|
| 833 |
+
| 1.3243 | 10825 | 0.1563 | - |
|
| 834 |
+
| 1.3274 | 10850 | 0.1237 | - |
|
| 835 |
+
| 1.3304 | 10875 | 0.0816 | - |
|
| 836 |
+
| 1.3335 | 10900 | 0.0749 | - |
|
| 837 |
+
| 1.3366 | 10925 | 0.0724 | - |
|
| 838 |
+
| 1.3396 | 10950 | 0.0582 | - |
|
| 839 |
+
| 1.3427 | 10975 | 0.0695 | - |
|
| 840 |
+
| 1.3457 | 11000 | 0.1045 | - |
|
| 841 |
+
| 1.3488 | 11025 | 0.0875 | - |
|
| 842 |
+
| 1.3518 | 11050 | 0.049 | - |
|
| 843 |
+
| 1.3549 | 11075 | 0.0615 | - |
|
| 844 |
+
| 1.3580 | 11100 | 0.0801 | - |
|
| 845 |
+
| 1.3610 | 11125 | 0.1718 | - |
|
| 846 |
+
| 1.3641 | 11150 | 0.0813 | - |
|
| 847 |
+
| 1.3671 | 11175 | 0.0657 | - |
|
| 848 |
+
| 1.3702 | 11200 | 0.1482 | - |
|
| 849 |
+
| 1.3733 | 11225 | 0.0574 | - |
|
| 850 |
+
| 1.3763 | 11250 | 0.0277 | - |
|
| 851 |
+
| 1.3794 | 11275 | 0.0878 | - |
|
| 852 |
+
| 1.3824 | 11300 | 0.0735 | - |
|
| 853 |
+
| 1.3855 | 11325 | 0.1031 | - |
|
| 854 |
+
| 1.3885 | 11350 | 0.0332 | - |
|
| 855 |
+
| 1.3916 | 11375 | 0.0332 | - |
|
| 856 |
+
| 1.3947 | 11400 | 0.0327 | - |
|
| 857 |
+
| 1.3977 | 11425 | 0.0957 | - |
|
| 858 |
+
| 1.4008 | 11450 | 0.107 | - |
|
| 859 |
+
| 1.4038 | 11475 | 0.1233 | - |
|
| 860 |
+
| 1.4069 | 11500 | 0.0731 | - |
|
| 861 |
+
| 1.4100 | 11525 | 0.1019 | - |
|
| 862 |
+
| 1.4130 | 11550 | 0.2015 | - |
|
| 863 |
+
| 1.4161 | 11575 | 0.0329 | - |
|
| 864 |
+
| 1.4191 | 11600 | 0.0932 | - |
|
| 865 |
+
| 1.4222 | 11625 | 0.0409 | - |
|
| 866 |
+
| 1.4253 | 11650 | 0.1182 | - |
|
| 867 |
+
| 1.4283 | 11675 | 0.0845 | - |
|
| 868 |
+
| 1.4314 | 11700 | 0.0422 | - |
|
| 869 |
+
| 1.4344 | 11725 | 0.0836 | - |
|
| 870 |
+
| 1.4375 | 11750 | 0.086 | - |
|
| 871 |
+
| 1.4405 | 11775 | 0.0473 | - |
|
| 872 |
+
| 1.4436 | 11800 | 0.0614 | - |
|
| 873 |
+
| 1.4467 | 11825 | 0.1038 | - |
|
| 874 |
+
| 1.4497 | 11850 | 0.0709 | - |
|
| 875 |
+
| 1.4528 | 11875 | 0.0793 | - |
|
| 876 |
+
| 1.4558 | 11900 | 0.1258 | - |
|
| 877 |
+
| 1.4589 | 11925 | 0.0348 | - |
|
| 878 |
+
| 1.4620 | 11950 | 0.0427 | - |
|
| 879 |
+
| 1.4650 | 11975 | 0.0885 | - |
|
| 880 |
+
| 1.4681 | 12000 | 0.0758 | - |
|
| 881 |
+
| 1.4711 | 12025 | 0.15 | - |
|
| 882 |
+
| 1.4742 | 12050 | 0.0575 | - |
|
| 883 |
+
| 1.4772 | 12075 | 0.0878 | - |
|
| 884 |
+
| 1.4803 | 12100 | 0.0951 | - |
|
| 885 |
+
| 1.4834 | 12125 | 0.0334 | - |
|
| 886 |
+
| 1.4864 | 12150 | 0.1157 | - |
|
| 887 |
+
| 1.4895 | 12175 | 0.0891 | - |
|
| 888 |
+
| 1.4925 | 12200 | 0.1109 | - |
|
| 889 |
+
| 1.4956 | 12225 | 0.0411 | - |
|
| 890 |
+
| 1.4987 | 12250 | 0.0973 | - |
|
| 891 |
+
| 1.5017 | 12275 | 0.0762 | - |
|
| 892 |
+
| 1.5048 | 12300 | 0.0885 | - |
|
| 893 |
+
| 1.5078 | 12325 | 0.1157 | - |
|
| 894 |
+
| 1.5109 | 12350 | 0.0911 | - |
|
| 895 |
+
| 1.5139 | 12375 | 0.0961 | - |
|
| 896 |
+
| 1.5170 | 12400 | 0.0331 | - |
|
| 897 |
+
| 1.5201 | 12425 | 0.1411 | - |
|
| 898 |
+
| 1.5231 | 12450 | 0.1192 | - |
|
| 899 |
+
| 1.5262 | 12475 | 0.08 | - |
|
| 900 |
+
| 1.5292 | 12500 | 0.0739 | - |
|
| 901 |
+
| 1.5323 | 12525 | 0.1156 | - |
|
| 902 |
+
| 1.5354 | 12550 | 0.0361 | - |
|
| 903 |
+
| 1.5384 | 12575 | 0.0651 | - |
|
| 904 |
+
| 1.5415 | 12600 | 0.1214 | - |
|
| 905 |
+
| 1.5445 | 12625 | 0.076 | - |
|
| 906 |
+
| 1.5476 | 12650 | 0.0605 | - |
|
| 907 |
+
| 1.5506 | 12675 | 0.0782 | - |
|
| 908 |
+
| 1.5537 | 12700 | 0.0665 | - |
|
| 909 |
+
| 1.5568 | 12725 | 0.0608 | - |
|
| 910 |
+
| 1.5598 | 12750 | 0.0605 | - |
|
| 911 |
+
| 1.5629 | 12775 | 0.0644 | - |
|
| 912 |
+
| 1.5659 | 12800 | 0.1538 | - |
|
| 913 |
+
| 1.5690 | 12825 | 0.0872 | - |
|
| 914 |
+
| 1.5721 | 12850 | 0.0665 | - |
|
| 915 |
+
| 1.5751 | 12875 | 0.1038 | - |
|
| 916 |
+
| 1.5782 | 12900 | 0.0716 | - |
|
| 917 |
+
| 1.5812 | 12925 | 0.0334 | - |
|
| 918 |
+
| 1.5843 | 12950 | 0.0756 | - |
|
| 919 |
+
| 1.5874 | 12975 | 0.0531 | - |
|
| 920 |
+
| 1.5904 | 13000 | 0.122 | - |
|
| 921 |
+
| 1.5935 | 13025 | 0.0695 | - |
|
| 922 |
+
| 1.5965 | 13050 | 0.0456 | - |
|
| 923 |
+
| 1.5996 | 13075 | 0.0527 | - |
|
| 924 |
+
| 1.6026 | 13100 | 0.0155 | - |
|
| 925 |
+
| 1.6057 | 13125 | 0.054 | - |
|
| 926 |
+
| 1.6088 | 13150 | 0.0196 | - |
|
| 927 |
+
| 1.6118 | 13175 | 0.0474 | - |
|
| 928 |
+
| 1.6149 | 13200 | 0.0435 | - |
|
| 929 |
+
| 1.6179 | 13225 | 0.0186 | - |
|
| 930 |
+
| 1.6210 | 13250 | 0.0279 | - |
|
| 931 |
+
| 1.6241 | 13275 | 0.111 | - |
|
| 932 |
+
| 1.6271 | 13300 | 0.0736 | - |
|
| 933 |
+
| 1.6302 | 13325 | 0.069 | - |
|
| 934 |
+
| 1.6332 | 13350 | 0.1208 | - |
|
| 935 |
+
| 1.6363 | 13375 | 0.031 | - |
|
| 936 |
+
| 1.6393 | 13400 | 0.0523 | - |
|
| 937 |
+
| 1.6424 | 13425 | 0.0703 | - |
|
| 938 |
+
| 1.6455 | 13450 | 0.0809 | - |
|
| 939 |
+
| 1.6485 | 13475 | 0.0757 | - |
|
| 940 |
+
| 1.6516 | 13500 | 0.0814 | - |
|
| 941 |
+
| 1.6546 | 13525 | 0.0835 | - |
|
| 942 |
+
| 1.6577 | 13550 | 0.0381 | - |
|
| 943 |
+
| 1.6608 | 13575 | 0.0652 | - |
|
| 944 |
+
| 1.6638 | 13600 | 0.0209 | - |
|
| 945 |
+
| 1.6669 | 13625 | 0.058 | - |
|
| 946 |
+
| 1.6699 | 13650 | 0.103 | - |
|
| 947 |
+
| 1.6730 | 13675 | 0.0967 | - |
|
| 948 |
+
| 1.6760 | 13700 | 0.0886 | - |
|
| 949 |
+
| 1.6791 | 13725 | 0.0756 | - |
|
| 950 |
+
| 1.6822 | 13750 | 0.0843 | - |
|
| 951 |
+
| 1.6852 | 13775 | 0.0683 | - |
|
| 952 |
+
| 1.6883 | 13800 | 0.0297 | - |
|
| 953 |
+
| 1.6913 | 13825 | 0.1176 | - |
|
| 954 |
+
| 1.6944 | 13850 | 0.1119 | - |
|
| 955 |
+
| 1.6975 | 13875 | 0.0698 | - |
|
| 956 |
+
| 1.7005 | 13900 | 0.0564 | - |
|
| 957 |
+
| 1.7036 | 13925 | 0.0454 | - |
|
| 958 |
+
| 1.7066 | 13950 | 0.0747 | - |
|
| 959 |
+
| 1.7097 | 13975 | 0.0686 | - |
|
| 960 |
+
| 1.7127 | 14000 | 0.0899 | - |
|
| 961 |
+
| 1.7158 | 14025 | 0.0858 | - |
|
| 962 |
+
| 1.7189 | 14050 | 0.0526 | - |
|
| 963 |
+
| 1.7219 | 14075 | 0.1271 | - |
|
| 964 |
+
| 1.7250 | 14100 | 0.0589 | - |
|
| 965 |
+
| 1.7280 | 14125 | 0.0455 | - |
|
| 966 |
+
| 1.7311 | 14150 | 0.1218 | - |
|
| 967 |
+
| 1.7342 | 14175 | 0.0454 | - |
|
| 968 |
+
| 1.7372 | 14200 | 0.1021 | - |
|
| 969 |
+
| 1.7403 | 14225 | 0.0975 | - |
|
| 970 |
+
| 1.7433 | 14250 | 0.1124 | - |
|
| 971 |
+
| 1.7464 | 14275 | 0.0203 | - |
|
| 972 |
+
| 1.7494 | 14300 | 0.064 | - |
|
| 973 |
+
| 1.7525 | 14325 | 0.079 | - |
|
| 974 |
+
| 1.7556 | 14350 | 0.0769 | - |
|
| 975 |
+
| 1.7586 | 14375 | 0.0695 | - |
|
| 976 |
+
| 1.7617 | 14400 | 0.0684 | - |
|
| 977 |
+
| 1.7647 | 14425 | 0.0958 | - |
|
| 978 |
+
| 1.7678 | 14450 | 0.0709 | - |
|
| 979 |
+
| 1.7709 | 14475 | 0.0552 | - |
|
| 980 |
+
| 1.7739 | 14500 | 0.0989 | - |
|
| 981 |
+
| 1.7770 | 14525 | 0.038 | - |
|
| 982 |
+
| 1.7800 | 14550 | 0.0601 | - |
|
| 983 |
+
| 1.7831 | 14575 | 0.0931 | - |
|
| 984 |
+
| 1.7862 | 14600 | 0.1052 | - |
|
| 985 |
+
| 1.7892 | 14625 | 0.0767 | - |
|
| 986 |
+
| 1.7923 | 14650 | 0.0295 | - |
|
| 987 |
+
| 1.7953 | 14675 | 0.0838 | - |
|
| 988 |
+
| 1.7984 | 14700 | 0.064 | - |
|
| 989 |
+
| 1.8014 | 14725 | 0.013 | - |
|
| 990 |
+
| 1.8045 | 14750 | 0.0708 | - |
|
| 991 |
+
| 1.8076 | 14775 | 0.0558 | - |
|
| 992 |
+
| 1.8106 | 14800 | 0.0594 | - |
|
| 993 |
+
| 1.8137 | 14825 | 0.054 | - |
|
| 994 |
+
| 1.8167 | 14850 | 0.0715 | - |
|
| 995 |
+
| 1.8198 | 14875 | 0.0825 | - |
|
| 996 |
+
| 1.8229 | 14900 | 0.0737 | - |
|
| 997 |
+
| 1.8259 | 14925 | 0.1511 | - |
|
| 998 |
+
| 1.8290 | 14950 | 0.0757 | - |
|
| 999 |
+
| 1.8320 | 14975 | 0.091 | - |
|
| 1000 |
+
| 1.8351 | 15000 | 0.0635 | - |
|
| 1001 |
+
| 1.8381 | 15025 | 0.0484 | - |
|
| 1002 |
+
| 1.8412 | 15050 | 0.0857 | - |
|
| 1003 |
+
| 1.8443 | 15075 | 0.0737 | - |
|
| 1004 |
+
| 1.8473 | 15100 | 0.0504 | - |
|
| 1005 |
+
| 1.8504 | 15125 | 0.0622 | - |
|
| 1006 |
+
| 1.8534 | 15150 | 0.1256 | - |
|
| 1007 |
+
| 1.8565 | 15175 | 0.0683 | - |
|
| 1008 |
+
| 1.8596 | 15200 | 0.0684 | - |
|
| 1009 |
+
| 1.8626 | 15225 | 0.0984 | - |
|
| 1010 |
+
| 1.8657 | 15250 | 0.0593 | - |
|
| 1011 |
+
| 1.8687 | 15275 | 0.0636 | - |
|
| 1012 |
+
| 1.8718 | 15300 | 0.1062 | - |
|
| 1013 |
+
| 1.8748 | 15325 | 0.1255 | - |
|
| 1014 |
+
| 1.8779 | 15350 | 0.1083 | - |
|
| 1015 |
+
| 1.8810 | 15375 | 0.0307 | - |
|
| 1016 |
+
| 1.8840 | 15400 | 0.1091 | - |
|
| 1017 |
+
| 1.8871 | 15425 | 0.0604 | - |
|
| 1018 |
+
| 1.8901 | 15450 | 0.0717 | - |
|
| 1019 |
+
| 1.8932 | 15475 | 0.0458 | - |
|
| 1020 |
+
| 1.8963 | 15500 | 0.0645 | - |
|
| 1021 |
+
| 1.8993 | 15525 | 0.0968 | - |
|
| 1022 |
+
| 1.9024 | 15550 | 0.0835 | - |
|
| 1023 |
+
| 1.9054 | 15575 | 0.0664 | - |
|
| 1024 |
+
| 1.9085 | 15600 | 0.0647 | - |
|
| 1025 |
+
| 1.9115 | 15625 | 0.1058 | - |
|
| 1026 |
+
| 1.9146 | 15650 | 0.0688 | - |
|
| 1027 |
+
| 1.9177 | 15675 | 0.1614 | - |
|
| 1028 |
+
| 1.9207 | 15700 | 0.0257 | - |
|
| 1029 |
+
| 1.9238 | 15725 | 0.1044 | - |
|
| 1030 |
+
| 1.9268 | 15750 | 0.0805 | - |
|
| 1031 |
+
| 1.9299 | 15775 | 0.1012 | - |
|
| 1032 |
+
| 1.9330 | 15800 | 0.0805 | - |
|
| 1033 |
+
| 1.9360 | 15825 | 0.0878 | - |
|
| 1034 |
+
| 1.9391 | 15850 | 0.034 | - |
|
| 1035 |
+
| 1.9421 | 15875 | 0.0589 | - |
|
| 1036 |
+
| 1.9452 | 15900 | 0.0912 | - |
|
| 1037 |
+
| 1.9483 | 15925 | 0.0635 | - |
|
| 1038 |
+
| 1.9513 | 15950 | 0.0467 | - |
|
| 1039 |
+
| 1.9544 | 15975 | 0.0365 | - |
|
| 1040 |
+
| 1.9574 | 16000 | 0.032 | - |
|
| 1041 |
+
| 1.9605 | 16025 | 0.0729 | - |
|
| 1042 |
+
| 1.9635 | 16050 | 0.0263 | - |
|
| 1043 |
+
| 1.9666 | 16075 | 0.1227 | - |
|
| 1044 |
+
| 1.9697 | 16100 | 0.0317 | - |
|
| 1045 |
+
| 1.9727 | 16125 | 0.0329 | - |
|
| 1046 |
+
| 1.9758 | 16150 | 0.0795 | - |
|
| 1047 |
+
| 1.9788 | 16175 | 0.0805 | - |
|
| 1048 |
+
| 1.9819 | 16200 | 0.0762 | - |
|
| 1049 |
+
| 1.9850 | 16225 | 0.0147 | - |
|
| 1050 |
+
| 1.9880 | 16250 | 0.1105 | - |
|
| 1051 |
+
| 1.9911 | 16275 | 0.0944 | - |
|
| 1052 |
+
| 1.9941 | 16300 | 0.029 | - |
|
| 1053 |
+
| 1.9972 | 16325 | 0.0627 | - |
|
| 1054 |
+
| 2.0 | 16348 | - | 0.1471 |
|
| 1055 |
+
| 2.0002 | 16350 | 0.0878 | - |
|
| 1056 |
+
| 2.0033 | 16375 | 0.0469 | - |
|
| 1057 |
+
| 2.0064 | 16400 | 0.0463 | - |
|
| 1058 |
+
| 2.0094 | 16425 | 0.0414 | - |
|
| 1059 |
+
| 2.0125 | 16450 | 0.0386 | - |
|
| 1060 |
+
| 2.0155 | 16475 | 0.0489 | - |
|
| 1061 |
+
| 2.0186 | 16500 | 0.0437 | - |
|
| 1062 |
+
| 2.0217 | 16525 | 0.0238 | - |
|
| 1063 |
+
| 2.0247 | 16550 | 0.023 | - |
|
| 1064 |
+
| 2.0278 | 16575 | 0.0424 | - |
|
| 1065 |
+
| 2.0308 | 16600 | 0.0457 | - |
|
| 1066 |
+
| 2.0339 | 16625 | 0.0374 | - |
|
| 1067 |
+
| 2.0369 | 16650 | 0.0774 | - |
|
| 1068 |
+
| 2.0400 | 16675 | 0.0363 | - |
|
| 1069 |
+
| 2.0431 | 16700 | 0.0237 | - |
|
| 1070 |
+
| 2.0461 | 16725 | 0.0365 | - |
|
| 1071 |
+
| 2.0492 | 16750 | 0.0211 | - |
|
| 1072 |
+
| 2.0522 | 16775 | 0.0434 | - |
|
| 1073 |
+
| 2.0553 | 16800 | 0.0309 | - |
|
| 1074 |
+
| 2.0584 | 16825 | 0.0325 | - |
|
| 1075 |
+
| 2.0614 | 16850 | 0.056 | - |
|
| 1076 |
+
| 2.0645 | 16875 | 0.0362 | - |
|
| 1077 |
+
| 2.0675 | 16900 | 0.0259 | - |
|
| 1078 |
+
| 2.0706 | 16925 | 0.027 | - |
|
| 1079 |
+
| 2.0736 | 16950 | 0.0544 | - |
|
| 1080 |
+
| 2.0767 | 16975 | 0.0518 | - |
|
| 1081 |
+
| 2.0798 | 17000 | 0.0311 | - |
|
| 1082 |
+
| 2.0828 | 17025 | 0.0345 | - |
|
| 1083 |
+
| 2.0859 | 17050 | 0.0506 | - |
|
| 1084 |
+
| 2.0889 | 17075 | 0.0217 | - |
|
| 1085 |
+
| 2.0920 | 17100 | 0.0492 | - |
|
| 1086 |
+
| 2.0951 | 17125 | 0.0598 | - |
|
| 1087 |
+
| 2.0981 | 17150 | 0.0982 | - |
|
| 1088 |
+
| 2.1012 | 17175 | 0.0464 | - |
|
| 1089 |
+
| 2.1042 | 17200 | 0.0296 | - |
|
| 1090 |
+
| 2.1073 | 17225 | 0.051 | - |
|
| 1091 |
+
| 2.1103 | 17250 | 0.0383 | - |
|
| 1092 |
+
| 2.1134 | 17275 | 0.0133 | - |
|
| 1093 |
+
| 2.1165 | 17300 | 0.0297 | - |
|
| 1094 |
+
| 2.1195 | 17325 | 0.0523 | - |
|
| 1095 |
+
| 2.1226 | 17350 | 0.0701 | - |
|
| 1096 |
+
| 2.1256 | 17375 | 0.0483 | - |
|
| 1097 |
+
| 2.1287 | 17400 | 0.0839 | - |
|
| 1098 |
+
| 2.1318 | 17425 | 0.0265 | - |
|
| 1099 |
+
| 2.1348 | 17450 | 0.0478 | - |
|
| 1100 |
+
| 2.1379 | 17475 | 0.0264 | - |
|
| 1101 |
+
| 2.1409 | 17500 | 0.0561 | - |
|
| 1102 |
+
| 2.1440 | 17525 | 0.0564 | - |
|
| 1103 |
+
| 2.1471 | 17550 | 0.0649 | - |
|
| 1104 |
+
| 2.1501 | 17575 | 0.0355 | - |
|
| 1105 |
+
| 2.1532 | 17600 | 0.0642 | - |
|
| 1106 |
+
| 2.1562 | 17625 | 0.0624 | - |
|
| 1107 |
+
| 2.1593 | 17650 | 0.0252 | - |
|
| 1108 |
+
| 2.1623 | 17675 | 0.0411 | - |
|
| 1109 |
+
| 2.1654 | 17700 | 0.0218 | - |
|
| 1110 |
+
| 2.1685 | 17725 | 0.0236 | - |
|
| 1111 |
+
| 2.1715 | 17750 | 0.0943 | - |
|
| 1112 |
+
| 2.1746 | 17775 | 0.0405 | - |
|
| 1113 |
+
| 2.1776 | 17800 | 0.0532 | - |
|
| 1114 |
+
| 2.1807 | 17825 | 0.0951 | - |
|
| 1115 |
+
| 2.1838 | 17850 | 0.0246 | - |
|
| 1116 |
+
| 2.1868 | 17875 | 0.0319 | - |
|
| 1117 |
+
| 2.1899 | 17900 | 0.0426 | - |
|
| 1118 |
+
| 2.1929 | 17925 | 0.0308 | - |
|
| 1119 |
+
| 2.1960 | 17950 | 0.0529 | - |
|
| 1120 |
+
| 2.1990 | 17975 | 0.046 | - |
|
| 1121 |
+
| 2.2021 | 18000 | 0.0474 | - |
|
| 1122 |
+
| 2.2052 | 18025 | 0.0758 | - |
|
| 1123 |
+
| 2.2082 | 18050 | 0.0318 | - |
|
| 1124 |
+
| 2.2113 | 18075 | 0.044 | - |
|
| 1125 |
+
| 2.2143 | 18100 | 0.0148 | - |
|
| 1126 |
+
| 2.2174 | 18125 | 0.035 | - |
|
| 1127 |
+
| 2.2205 | 18150 | 0.0601 | - |
|
| 1128 |
+
| 2.2235 | 18175 | 0.0613 | - |
|
| 1129 |
+
| 2.2266 | 18200 | 0.0126 | - |
|
| 1130 |
+
| 2.2296 | 18225 | 0.0422 | - |
|
| 1131 |
+
| 2.2327 | 18250 | 0.0544 | - |
|
| 1132 |
+
| 2.2357 | 18275 | 0.0362 | - |
|
| 1133 |
+
| 2.2388 | 18300 | 0.0439 | - |
|
| 1134 |
+
| 2.2419 | 18325 | 0.053 | - |
|
| 1135 |
+
| 2.2449 | 18350 | 0.0403 | - |
|
| 1136 |
+
| 2.2480 | 18375 | 0.0256 | - |
|
| 1137 |
+
| 2.2510 | 18400 | 0.0381 | - |
|
| 1138 |
+
| 2.2541 | 18425 | 0.0342 | - |
|
| 1139 |
+
| 2.2572 | 18450 | 0.0391 | - |
|
| 1140 |
+
| 2.2602 | 18475 | 0.027 | - |
|
| 1141 |
+
| 2.2633 | 18500 | 0.0165 | - |
|
| 1142 |
+
| 2.2663 | 18525 | 0.0333 | - |
|
| 1143 |
+
| 2.2694 | 18550 | 0.0314 | - |
|
| 1144 |
+
| 2.2724 | 18575 | 0.0489 | - |
|
| 1145 |
+
| 2.2755 | 18600 | 0.0423 | - |
|
| 1146 |
+
| 2.2786 | 18625 | 0.0693 | - |
|
| 1147 |
+
| 2.2816 | 18650 | 0.0477 | - |
|
| 1148 |
+
| 2.2847 | 18675 | 0.0509 | - |
|
| 1149 |
+
| 2.2877 | 18700 | 0.0585 | - |
|
| 1150 |
+
| 2.2908 | 18725 | 0.061 | - |
|
| 1151 |
+
| 2.2939 | 18750 | 0.0209 | - |
|
| 1152 |
+
| 2.2969 | 18775 | 0.0242 | - |
|
| 1153 |
+
| 2.3000 | 18800 | 0.0546 | - |
|
| 1154 |
+
| 2.3030 | 18825 | 0.0227 | - |
|
| 1155 |
+
| 2.3061 | 18850 | 0.0685 | - |
|
| 1156 |
+
| 2.3092 | 18875 | 0.0864 | - |
|
| 1157 |
+
| 2.3122 | 18900 | 0.0796 | - |
|
| 1158 |
+
| 2.3153 | 18925 | 0.0056 | - |
|
| 1159 |
+
| 2.3183 | 18950 | 0.0353 | - |
|
| 1160 |
+
| 2.3214 | 18975 | 0.0457 | - |
|
| 1161 |
+
| 2.3244 | 19000 | 0.0261 | - |
|
| 1162 |
+
| 2.3275 | 19025 | 0.0345 | - |
|
| 1163 |
+
| 2.3306 | 19050 | 0.0605 | - |
|
| 1164 |
+
| 2.3336 | 19075 | 0.0364 | - |
|
| 1165 |
+
| 2.3367 | 19100 | 0.0133 | - |
|
| 1166 |
+
| 2.3397 | 19125 | 0.045 | - |
|
| 1167 |
+
| 2.3428 | 19150 | 0.0432 | - |
|
| 1168 |
+
| 2.3459 | 19175 | 0.0633 | - |
|
| 1169 |
+
| 2.3489 | 19200 | 0.0258 | - |
|
| 1170 |
+
| 2.3520 | 19225 | 0.0179 | - |
|
| 1171 |
+
| 2.3550 | 19250 | 0.0261 | - |
|
| 1172 |
+
| 2.3581 | 19275 | 0.0538 | - |
|
| 1173 |
+
| 2.3611 | 19300 | 0.0891 | - |
|
| 1174 |
+
| 2.3642 | 19325 | 0.0171 | - |
|
| 1175 |
+
| 2.3673 | 19350 | 0.0429 | - |
|
| 1176 |
+
| 2.3703 | 19375 | 0.0964 | - |
|
| 1177 |
+
| 2.3734 | 19400 | 0.0521 | - |
|
| 1178 |
+
| 2.3764 | 19425 | 0.01 | - |
|
| 1179 |
+
| 2.3795 | 19450 | 0.0503 | - |
|
| 1180 |
+
| 2.3826 | 19475 | 0.0179 | - |
|
| 1181 |
+
| 2.3856 | 19500 | 0.0393 | - |
|
| 1182 |
+
| 2.3887 | 19525 | 0.0203 | - |
|
| 1183 |
+
| 2.3917 | 19550 | 0.0381 | - |
|
| 1184 |
+
| 2.3948 | 19575 | 0.0524 | - |
|
| 1185 |
+
| 2.3978 | 19600 | 0.027 | - |
|
| 1186 |
+
| 2.4009 | 19625 | 0.0503 | - |
|
| 1187 |
+
| 2.4040 | 19650 | 0.0115 | - |
|
| 1188 |
+
| 2.4070 | 19675 | 0.0633 | - |
|
| 1189 |
+
| 2.4101 | 19700 | 0.0571 | - |
|
| 1190 |
+
| 2.4131 | 19725 | 0.0179 | - |
|
| 1191 |
+
| 2.4162 | 19750 | 0.0346 | - |
|
| 1192 |
+
| 2.4193 | 19775 | 0.069 | - |
|
| 1193 |
+
| 2.4223 | 19800 | 0.0208 | - |
|
| 1194 |
+
| 2.4254 | 19825 | 0.0878 | - |
|
| 1195 |
+
| 2.4284 | 19850 | 0.0261 | - |
|
| 1196 |
+
| 2.4315 | 19875 | 0.0676 | - |
|
| 1197 |
+
| 2.4345 | 19900 | 0.0507 | - |
|
| 1198 |
+
| 2.4376 | 19925 | 0.0395 | - |
|
| 1199 |
+
| 2.4407 | 19950 | 0.0701 | - |
|
| 1200 |
+
| 2.4437 | 19975 | 0.0352 | - |
|
| 1201 |
+
| 2.4468 | 20000 | 0.0651 | - |
|
| 1202 |
+
| 2.4498 | 20025 | 0.0488 | - |
|
| 1203 |
+
| 2.4529 | 20050 | 0.036 | - |
|
| 1204 |
+
| 2.4560 | 20075 | 0.0305 | - |
|
| 1205 |
+
| 2.4590 | 20100 | 0.0282 | - |
|
| 1206 |
+
| 2.4621 | 20125 | 0.0391 | - |
|
| 1207 |
+
| 2.4651 | 20150 | 0.0723 | - |
|
| 1208 |
+
| 2.4682 | 20175 | 0.0659 | - |
|
| 1209 |
+
| 2.4713 | 20200 | 0.0818 | - |
|
| 1210 |
+
| 2.4743 | 20225 | 0.0342 | - |
|
| 1211 |
+
| 2.4774 | 20250 | 0.0465 | - |
|
| 1212 |
+
| 2.4804 | 20275 | 0.041 | - |
|
| 1213 |
+
| 2.4835 | 20300 | 0.0737 | - |
|
| 1214 |
+
| 2.4865 | 20325 | 0.0853 | - |
|
| 1215 |
+
| 2.4896 | 20350 | 0.0333 | - |
|
| 1216 |
+
| 2.4927 | 20375 | 0.0446 | - |
|
| 1217 |
+
| 2.4957 | 20400 | 0.0303 | - |
|
| 1218 |
+
| 2.4988 | 20425 | 0.0449 | - |
|
| 1219 |
+
| 2.5018 | 20450 | 0.0228 | - |
|
| 1220 |
+
| 2.5049 | 20475 | 0.0376 | - |
|
| 1221 |
+
| 2.5080 | 20500 | 0.0618 | - |
|
| 1222 |
+
| 2.5110 | 20525 | 0.074 | - |
|
| 1223 |
+
| 2.5141 | 20550 | 0.0322 | - |
|
| 1224 |
+
| 2.5171 | 20575 | 0.0205 | - |
|
| 1225 |
+
| 2.5202 | 20600 | 0.0578 | - |
|
| 1226 |
+
| 2.5232 | 20625 | 0.0376 | - |
|
| 1227 |
+
| 2.5263 | 20650 | 0.0699 | - |
|
| 1228 |
+
| 2.5294 | 20675 | 0.0862 | - |
|
| 1229 |
+
| 2.5324 | 20700 | 0.0253 | - |
|
| 1230 |
+
| 2.5355 | 20725 | 0.1087 | - |
|
| 1231 |
+
| 2.5385 | 20750 | 0.0528 | - |
|
| 1232 |
+
| 2.5416 | 20775 | 0.0472 | - |
|
| 1233 |
+
| 2.5447 | 20800 | 0.0367 | - |
|
| 1234 |
+
| 2.5477 | 20825 | 0.0244 | - |
|
| 1235 |
+
| 2.5508 | 20850 | 0.0504 | - |
|
| 1236 |
+
| 2.5538 | 20875 | 0.0539 | - |
|
| 1237 |
+
| 2.5569 | 20900 | 0.0719 | - |
|
| 1238 |
+
| 2.5599 | 20925 | 0.0218 | - |
|
| 1239 |
+
| 2.5630 | 20950 | 0.0427 | - |
|
| 1240 |
+
| 2.5661 | 20975 | 0.0347 | - |
|
| 1241 |
+
| 2.5691 | 21000 | 0.0475 | - |
|
| 1242 |
+
| 2.5722 | 21025 | 0.0555 | - |
|
| 1243 |
+
| 2.5752 | 21050 | 0.0171 | - |
|
| 1244 |
+
| 2.5783 | 21075 | 0.0452 | - |
|
| 1245 |
+
| 2.5814 | 21100 | 0.061 | - |
|
| 1246 |
+
| 2.5844 | 21125 | 0.0559 | - |
|
| 1247 |
+
| 2.5875 | 21150 | 0.0398 | - |
|
| 1248 |
+
| 2.5905 | 21175 | 0.0274 | - |
|
| 1249 |
+
| 2.5936 | 21200 | 0.0212 | - |
|
| 1250 |
+
| 2.5966 | 21225 | 0.0525 | - |
|
| 1251 |
+
| 2.5997 | 21250 | 0.0254 | - |
|
| 1252 |
+
| 2.6028 | 21275 | 0.0425 | - |
|
| 1253 |
+
| 2.6058 | 21300 | 0.0207 | - |
|
| 1254 |
+
| 2.6089 | 21325 | 0.0255 | - |
|
| 1255 |
+
| 2.6119 | 21350 | 0.0411 | - |
|
| 1256 |
+
| 2.6150 | 21375 | 0.0117 | - |
|
| 1257 |
+
| 2.6181 | 21400 | 0.0552 | - |
|
| 1258 |
+
| 2.6211 | 21425 | 0.0399 | - |
|
| 1259 |
+
| 2.6242 | 21450 | 0.0526 | - |
|
| 1260 |
+
| 2.6272 | 21475 | 0.1184 | - |
|
| 1261 |
+
| 2.6303 | 21500 | 0.056 | - |
|
| 1262 |
+
| 2.6333 | 21525 | 0.0483 | - |
|
| 1263 |
+
| 2.6364 | 21550 | 0.032 | - |
|
| 1264 |
+
| 2.6395 | 21575 | 0.0097 | - |
|
| 1265 |
+
| 2.6425 | 21600 | 0.0546 | - |
|
| 1266 |
+
| 2.6456 | 21625 | 0.0137 | - |
|
| 1267 |
+
| 2.6486 | 21650 | 0.0107 | - |
|
| 1268 |
+
| 2.6517 | 21675 | 0.0338 | - |
|
| 1269 |
+
| 2.6548 | 21700 | 0.0223 | - |
|
| 1270 |
+
| 2.6578 | 21725 | 0.0257 | - |
|
| 1271 |
+
| 2.6609 | 21750 | 0.075 | - |
|
| 1272 |
+
| 2.6639 | 21775 | 0.036 | - |
|
| 1273 |
+
| 2.6670 | 21800 | 0.0413 | - |
|
| 1274 |
+
| 2.6701 | 21825 | 0.0378 | - |
|
| 1275 |
+
| 2.6731 | 21850 | 0.0425 | - |
|
| 1276 |
+
| 2.6762 | 21875 | 0.0383 | - |
|
| 1277 |
+
| 2.6792 | 21900 | 0.0296 | - |
|
| 1278 |
+
| 2.6823 | 21925 | 0.0416 | - |
|
| 1279 |
+
| 2.6853 | 21950 | 0.0121 | - |
|
| 1280 |
+
| 2.6884 | 21975 | 0.0091 | - |
|
| 1281 |
+
| 2.6915 | 22000 | 0.0388 | - |
|
| 1282 |
+
| 2.6945 | 22025 | 0.0462 | - |
|
| 1283 |
+
| 2.6976 | 22050 | 0.0723 | - |
|
| 1284 |
+
| 2.7006 | 22075 | 0.0389 | - |
|
| 1285 |
+
| 2.7037 | 22100 | 0.0399 | - |
|
| 1286 |
+
| 2.7068 | 22125 | 0.0124 | - |
|
| 1287 |
+
| 2.7098 | 22150 | 0.0259 | - |
|
| 1288 |
+
| 2.7129 | 22175 | 0.0208 | - |
|
| 1289 |
+
| 2.7159 | 22200 | 0.0302 | - |
|
| 1290 |
+
| 2.7190 | 22225 | 0.118 | - |
|
| 1291 |
+
| 2.7220 | 22250 | 0.0355 | - |
|
| 1292 |
+
| 2.7251 | 22275 | 0.0306 | - |
|
| 1293 |
+
| 2.7282 | 22300 | 0.1136 | - |
|
| 1294 |
+
| 2.7312 | 22325 | 0.062 | - |
|
| 1295 |
+
| 2.7343 | 22350 | 0.0578 | - |
|
| 1296 |
+
| 2.7373 | 22375 | 0.0528 | - |
|
| 1297 |
+
| 2.7404 | 22400 | 0.0723 | - |
|
| 1298 |
+
| 2.7435 | 22425 | 0.082 | - |
|
| 1299 |
+
| 2.7465 | 22450 | 0.0428 | - |
|
| 1300 |
+
| 2.7496 | 22475 | 0.0772 | - |
|
| 1301 |
+
| 2.7526 | 22500 | 0.0426 | - |
|
| 1302 |
+
| 2.7557 | 22525 | 0.0298 | - |
|
| 1303 |
+
| 2.7587 | 22550 | 0.0534 | - |
|
| 1304 |
+
| 2.7618 | 22575 | 0.0418 | - |
|
| 1305 |
+
| 2.7649 | 22600 | 0.0244 | - |
|
| 1306 |
+
| 2.7679 | 22625 | 0.0436 | - |
|
| 1307 |
+
| 2.7710 | 22650 | 0.0578 | - |
|
| 1308 |
+
| 2.7740 | 22675 | 0.0707 | - |
|
| 1309 |
+
| 2.7771 | 22700 | 0.062 | - |
|
| 1310 |
+
| 2.7802 | 22725 | 0.0342 | - |
|
| 1311 |
+
| 2.7832 | 22750 | 0.0147 | - |
|
| 1312 |
+
| 2.7863 | 22775 | 0.0392 | - |
|
| 1313 |
+
| 2.7893 | 22800 | 0.0317 | - |
|
| 1314 |
+
| 2.7924 | 22825 | 0.0351 | - |
|
| 1315 |
+
| 2.7954 | 22850 | 0.0409 | - |
|
| 1316 |
+
| 2.7985 | 22875 | 0.0595 | - |
|
| 1317 |
+
| 2.8016 | 22900 | 0.0593 | - |
|
| 1318 |
+
| 2.8046 | 22925 | 0.0071 | - |
|
| 1319 |
+
| 2.8077 | 22950 | 0.0339 | - |
|
| 1320 |
+
| 2.8107 | 22975 | 0.0158 | - |
|
| 1321 |
+
| 2.8138 | 23000 | 0.0302 | - |
|
| 1322 |
+
| 2.8169 | 23025 | 0.0411 | - |
|
| 1323 |
+
| 2.8199 | 23050 | 0.0262 | - |
|
| 1324 |
+
| 2.8230 | 23075 | 0.0319 | - |
|
| 1325 |
+
| 2.8260 | 23100 | 0.0189 | - |
|
| 1326 |
+
| 2.8291 | 23125 | 0.0353 | - |
|
| 1327 |
+
| 2.8322 | 23150 | 0.0382 | - |
|
| 1328 |
+
| 2.8352 | 23175 | 0.0492 | - |
|
| 1329 |
+
| 2.8383 | 23200 | 0.0159 | - |
|
| 1330 |
+
| 2.8413 | 23225 | 0.0606 | - |
|
| 1331 |
+
| 2.8444 | 23250 | 0.0418 | - |
|
| 1332 |
+
| 2.8474 | 23275 | 0.0408 | - |
|
| 1333 |
+
| 2.8505 | 23300 | 0.0424 | - |
|
| 1334 |
+
| 2.8536 | 23325 | 0.0075 | - |
|
| 1335 |
+
| 2.8566 | 23350 | 0.0079 | - |
|
| 1336 |
+
| 2.8597 | 23375 | 0.0811 | - |
|
| 1337 |
+
| 2.8627 | 23400 | 0.0546 | - |
|
| 1338 |
+
| 2.8658 | 23425 | 0.0648 | - |
|
| 1339 |
+
| 2.8689 | 23450 | 0.0682 | - |
|
| 1340 |
+
| 2.8719 | 23475 | 0.0402 | - |
|
| 1341 |
+
| 2.8750 | 23500 | 0.0907 | - |
|
| 1342 |
+
| 2.8780 | 23525 | 0.0324 | - |
|
| 1343 |
+
| 2.8811 | 23550 | 0.0185 | - |
|
| 1344 |
+
| 2.8841 | 23575 | 0.0439 | - |
|
| 1345 |
+
| 2.8872 | 23600 | 0.0348 | - |
|
| 1346 |
+
| 2.8903 | 23625 | 0.0392 | - |
|
| 1347 |
+
| 2.8933 | 23650 | 0.1026 | - |
|
| 1348 |
+
| 2.8964 | 23675 | 0.0606 | - |
|
| 1349 |
+
| 2.8994 | 23700 | 0.0341 | - |
|
| 1350 |
+
| 2.9025 | 23725 | 0.0386 | - |
|
| 1351 |
+
| 2.9056 | 23750 | 0.0575 | - |
|
| 1352 |
+
| 2.9086 | 23775 | 0.0713 | - |
|
| 1353 |
+
| 2.9117 | 23800 | 0.0302 | - |
|
| 1354 |
+
| 2.9147 | 23825 | 0.0253 | - |
|
| 1355 |
+
| 2.9178 | 23850 | 0.0094 | - |
|
| 1356 |
+
| 2.9208 | 23875 | 0.0332 | - |
|
| 1357 |
+
| 2.9239 | 23900 | 0.045 | - |
|
| 1358 |
+
| 2.9270 | 23925 | 0.0126 | - |
|
| 1359 |
+
| 2.9300 | 23950 | 0.0541 | - |
|
| 1360 |
+
| 2.9331 | 23975 | 0.0397 | - |
|
| 1361 |
+
| 2.9361 | 24000 | 0.0574 | - |
|
| 1362 |
+
| 2.9392 | 24025 | 0.0289 | - |
|
| 1363 |
+
| 2.9423 | 24050 | 0.0492 | - |
|
| 1364 |
+
| 2.9453 | 24075 | 0.0457 | - |
|
| 1365 |
+
| 2.9484 | 24100 | 0.0217 | - |
|
| 1366 |
+
| 2.9514 | 24125 | 0.0107 | - |
|
| 1367 |
+
| 2.9545 | 24150 | 0.0422 | - |
|
| 1368 |
+
| 2.9575 | 24175 | 0.0498 | - |
|
| 1369 |
+
| 2.9606 | 24200 | 0.0454 | - |
|
| 1370 |
+
| 2.9637 | 24225 | 0.0339 | - |
|
| 1371 |
+
| 2.9667 | 24250 | 0.0446 | - |
|
| 1372 |
+
| 2.9698 | 24275 | 0.0286 | - |
|
| 1373 |
+
| 2.9728 | 24300 | 0.0506 | - |
|
| 1374 |
+
| 2.9759 | 24325 | 0.0432 | - |
|
| 1375 |
+
| 2.9790 | 24350 | 0.0251 | - |
|
| 1376 |
+
| 2.9820 | 24375 | 0.0099 | - |
|
| 1377 |
+
| 2.9851 | 24400 | 0.0621 | - |
|
| 1378 |
+
| 2.9881 | 24425 | 0.0118 | - |
|
| 1379 |
+
| 2.9912 | 24450 | 0.0259 | - |
|
| 1380 |
+
| 2.9943 | 24475 | 0.079 | - |
|
| 1381 |
+
| 2.9973 | 24500 | 0.0552 | - |
|
| 1382 |
+
| 3.0 | 24522 | - | 0.1244 |
|
| 1383 |
+
|
| 1384 |
+
</details>
|
| 1385 |
+
|
| 1386 |
+
### Framework Versions
|
| 1387 |
+
- Python: 3.10.14
|
| 1388 |
+
- Sentence Transformers: 3.3.1
|
| 1389 |
+
- Transformers: 4.47.1
|
| 1390 |
+
- PyTorch: 2.3.0
|
| 1391 |
+
- Accelerate: 1.2.1
|
| 1392 |
+
- Datasets: 3.2.0
|
| 1393 |
+
- Tokenizers: 0.21.0
|
| 1394 |
+
|
| 1395 |
+
## Citation
|
| 1396 |
+
|
| 1397 |
+
### BibTeX
|
| 1398 |
+
|
| 1399 |
+
#### Sentence Transformers
|
| 1400 |
+
```bibtex
|
| 1401 |
+
@inproceedings{reimers-2019-sentence-bert,
|
| 1402 |
+
title = "Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks",
|
| 1403 |
+
author = "Reimers, Nils and Gurevych, Iryna",
|
| 1404 |
+
booktitle = "Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing",
|
| 1405 |
+
month = "11",
|
| 1406 |
+
year = "2019",
|
| 1407 |
+
publisher = "Association for Computational Linguistics",
|
| 1408 |
+
url = "https://arxiv.org/abs/1908.10084",
|
| 1409 |
+
}
|
| 1410 |
+
```
|
| 1411 |
+
|
| 1412 |
+
#### MultipleNegativesRankingLoss
|
| 1413 |
+
```bibtex
|
| 1414 |
+
@misc{henderson2017efficient,
|
| 1415 |
+
title={Efficient Natural Language Response Suggestion for Smart Reply},
|
| 1416 |
+
author={Matthew Henderson and Rami Al-Rfou and Brian Strope and Yun-hsuan Sung and Laszlo Lukacs and Ruiqi Guo and Sanjiv Kumar and Balint Miklos and Ray Kurzweil},
|
| 1417 |
+
year={2017},
|
| 1418 |
+
eprint={1705.00652},
|
| 1419 |
+
archivePrefix={arXiv},
|
| 1420 |
+
primaryClass={cs.CL}
|
| 1421 |
+
}
|
| 1422 |
+
```
|
| 1423 |
+
|
| 1424 |
+
<!--
|
| 1425 |
+
## Glossary
|
| 1426 |
+
|
| 1427 |
+
*Clearly define terms in order to be accessible across audiences.*
|
| 1428 |
+
-->
|
| 1429 |
+
|
| 1430 |
+
<!--
|
| 1431 |
+
## Model Card Authors
|
| 1432 |
+
|
| 1433 |
+
*Lists the people who create the model card, providing recognition and accountability for the detailed work that goes into its construction.*
|
| 1434 |
+
-->
|
| 1435 |
+
|
| 1436 |
+
<!--
|
| 1437 |
+
## Model Card Contact
|
| 1438 |
+
|
| 1439 |
+
*Provides a way for people who have updates to the Model Card, suggestions, or questions, to contact the Model Card authors.*
|
| 1440 |
+
-->
|
checkpoint-24522/config.json
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"_name_or_path": "sentence-transformers/multi-qa-MiniLM-L6-cos-v1",
|
| 3 |
+
"architectures": [
|
| 4 |
+
"BertModel"
|
| 5 |
+
],
|
| 6 |
+
"attention_probs_dropout_prob": 0.1,
|
| 7 |
+
"classifier_dropout": null,
|
| 8 |
+
"gradient_checkpointing": false,
|
| 9 |
+
"hidden_act": "gelu",
|
| 10 |
+
"hidden_dropout_prob": 0.1,
|
| 11 |
+
"hidden_size": 384,
|
| 12 |
+
"initializer_range": 0.02,
|
| 13 |
+
"intermediate_size": 1536,
|
| 14 |
+
"layer_norm_eps": 1e-12,
|
| 15 |
+
"max_position_embeddings": 512,
|
| 16 |
+
"model_type": "bert",
|
| 17 |
+
"num_attention_heads": 12,
|
| 18 |
+
"num_hidden_layers": 6,
|
| 19 |
+
"pad_token_id": 0,
|
| 20 |
+
"position_embedding_type": "absolute",
|
| 21 |
+
"torch_dtype": "float32",
|
| 22 |
+
"transformers_version": "4.47.1",
|
| 23 |
+
"type_vocab_size": 2,
|
| 24 |
+
"use_cache": true,
|
| 25 |
+
"vocab_size": 30522
|
| 26 |
+
}
|
checkpoint-24522/config_sentence_transformers.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"__version__": {
|
| 3 |
+
"sentence_transformers": "3.3.1",
|
| 4 |
+
"transformers": "4.47.1",
|
| 5 |
+
"pytorch": "2.3.0"
|
| 6 |
+
},
|
| 7 |
+
"prompts": {},
|
| 8 |
+
"default_prompt_name": null,
|
| 9 |
+
"similarity_fn_name": "cosine"
|
| 10 |
+
}
|
checkpoint-24522/model.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:93d0d4ed1f12c6f13fbda20a5ec622675df2235303cc463bebe5e53025562617
|
| 3 |
+
size 90864192
|
checkpoint-24522/modules.json
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[
|
| 2 |
+
{
|
| 3 |
+
"idx": 0,
|
| 4 |
+
"name": "0",
|
| 5 |
+
"path": "",
|
| 6 |
+
"type": "sentence_transformers.models.Transformer"
|
| 7 |
+
},
|
| 8 |
+
{
|
| 9 |
+
"idx": 1,
|
| 10 |
+
"name": "1",
|
| 11 |
+
"path": "1_Pooling",
|
| 12 |
+
"type": "sentence_transformers.models.Pooling"
|
| 13 |
+
},
|
| 14 |
+
{
|
| 15 |
+
"idx": 2,
|
| 16 |
+
"name": "2",
|
| 17 |
+
"path": "2_Normalize",
|
| 18 |
+
"type": "sentence_transformers.models.Normalize"
|
| 19 |
+
}
|
| 20 |
+
]
|
checkpoint-24522/optimizer.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:cb741042095a88a7bf28f27ac2062f95e8b7b5768eca21774bcb19681c0b4173
|
| 3 |
+
size 180604922
|
checkpoint-24522/rng_state.pth
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:ce180fbf290eda3dd7fbb6e7d8fada6a19aae45174fa522943af99104566ca5f
|
| 3 |
+
size 13990
|
checkpoint-24522/scheduler.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:5c2aad5ff0b0241dafec8bf96d3c9262d82b3b9568c93ec00590a92541c352a1
|
| 3 |
+
size 1064
|
checkpoint-24522/sentence_bert_config.json
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"max_seq_length": 512,
|
| 3 |
+
"do_lower_case": false
|
| 4 |
+
}
|
checkpoint-24522/special_tokens_map.json
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"cls_token": {
|
| 3 |
+
"content": "[CLS]",
|
| 4 |
+
"lstrip": false,
|
| 5 |
+
"normalized": false,
|
| 6 |
+
"rstrip": false,
|
| 7 |
+
"single_word": false
|
| 8 |
+
},
|
| 9 |
+
"mask_token": {
|
| 10 |
+
"content": "[MASK]",
|
| 11 |
+
"lstrip": false,
|
| 12 |
+
"normalized": false,
|
| 13 |
+
"rstrip": false,
|
| 14 |
+
"single_word": false
|
| 15 |
+
},
|
| 16 |
+
"pad_token": {
|
| 17 |
+
"content": "[PAD]",
|
| 18 |
+
"lstrip": false,
|
| 19 |
+
"normalized": false,
|
| 20 |
+
"rstrip": false,
|
| 21 |
+
"single_word": false
|
| 22 |
+
},
|
| 23 |
+
"sep_token": {
|
| 24 |
+
"content": "[SEP]",
|
| 25 |
+
"lstrip": false,
|
| 26 |
+
"normalized": false,
|
| 27 |
+
"rstrip": false,
|
| 28 |
+
"single_word": false
|
| 29 |
+
},
|
| 30 |
+
"unk_token": {
|
| 31 |
+
"content": "[UNK]",
|
| 32 |
+
"lstrip": false,
|
| 33 |
+
"normalized": false,
|
| 34 |
+
"rstrip": false,
|
| 35 |
+
"single_word": false
|
| 36 |
+
}
|
| 37 |
+
}
|
checkpoint-24522/tokenizer.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
checkpoint-24522/tokenizer_config.json
ADDED
|
@@ -0,0 +1,65 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"added_tokens_decoder": {
|
| 3 |
+
"0": {
|
| 4 |
+
"content": "[PAD]",
|
| 5 |
+
"lstrip": false,
|
| 6 |
+
"normalized": false,
|
| 7 |
+
"rstrip": false,
|
| 8 |
+
"single_word": false,
|
| 9 |
+
"special": true
|
| 10 |
+
},
|
| 11 |
+
"100": {
|
| 12 |
+
"content": "[UNK]",
|
| 13 |
+
"lstrip": false,
|
| 14 |
+
"normalized": false,
|
| 15 |
+
"rstrip": false,
|
| 16 |
+
"single_word": false,
|
| 17 |
+
"special": true
|
| 18 |
+
},
|
| 19 |
+
"101": {
|
| 20 |
+
"content": "[CLS]",
|
| 21 |
+
"lstrip": false,
|
| 22 |
+
"normalized": false,
|
| 23 |
+
"rstrip": false,
|
| 24 |
+
"single_word": false,
|
| 25 |
+
"special": true
|
| 26 |
+
},
|
| 27 |
+
"102": {
|
| 28 |
+
"content": "[SEP]",
|
| 29 |
+
"lstrip": false,
|
| 30 |
+
"normalized": false,
|
| 31 |
+
"rstrip": false,
|
| 32 |
+
"single_word": false,
|
| 33 |
+
"special": true
|
| 34 |
+
},
|
| 35 |
+
"103": {
|
| 36 |
+
"content": "[MASK]",
|
| 37 |
+
"lstrip": false,
|
| 38 |
+
"normalized": false,
|
| 39 |
+
"rstrip": false,
|
| 40 |
+
"single_word": false,
|
| 41 |
+
"special": true
|
| 42 |
+
}
|
| 43 |
+
},
|
| 44 |
+
"clean_up_tokenization_spaces": false,
|
| 45 |
+
"cls_token": "[CLS]",
|
| 46 |
+
"do_basic_tokenize": true,
|
| 47 |
+
"do_lower_case": true,
|
| 48 |
+
"extra_special_tokens": {},
|
| 49 |
+
"mask_token": "[MASK]",
|
| 50 |
+
"max_length": 250,
|
| 51 |
+
"model_max_length": 512,
|
| 52 |
+
"never_split": null,
|
| 53 |
+
"pad_to_multiple_of": null,
|
| 54 |
+
"pad_token": "[PAD]",
|
| 55 |
+
"pad_token_type_id": 0,
|
| 56 |
+
"padding_side": "right",
|
| 57 |
+
"sep_token": "[SEP]",
|
| 58 |
+
"stride": 0,
|
| 59 |
+
"strip_accents": null,
|
| 60 |
+
"tokenize_chinese_chars": true,
|
| 61 |
+
"tokenizer_class": "BertTokenizer",
|
| 62 |
+
"truncation_side": "right",
|
| 63 |
+
"truncation_strategy": "longest_first",
|
| 64 |
+
"unk_token": "[UNK]"
|
| 65 |
+
}
|
checkpoint-24522/trainer_state.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
checkpoint-24522/training_args.bin
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:286361f7b6a7f6523641ed4f4e1fc273fed6aac87919e8decf87c2000151588c
|
| 3 |
+
size 5624
|
checkpoint-24522/vocab.txt
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
config.json
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"_name_or_path": "sentence-transformers/multi-qa-MiniLM-L6-cos-v1",
|
| 3 |
+
"architectures": [
|
| 4 |
+
"BertModel"
|
| 5 |
+
],
|
| 6 |
+
"attention_probs_dropout_prob": 0.1,
|
| 7 |
+
"classifier_dropout": null,
|
| 8 |
+
"gradient_checkpointing": false,
|
| 9 |
+
"hidden_act": "gelu",
|
| 10 |
+
"hidden_dropout_prob": 0.1,
|
| 11 |
+
"hidden_size": 384,
|
| 12 |
+
"initializer_range": 0.02,
|
| 13 |
+
"intermediate_size": 1536,
|
| 14 |
+
"layer_norm_eps": 1e-12,
|
| 15 |
+
"max_position_embeddings": 512,
|
| 16 |
+
"model_type": "bert",
|
| 17 |
+
"num_attention_heads": 12,
|
| 18 |
+
"num_hidden_layers": 6,
|
| 19 |
+
"pad_token_id": 0,
|
| 20 |
+
"position_embedding_type": "absolute",
|
| 21 |
+
"torch_dtype": "float32",
|
| 22 |
+
"transformers_version": "4.47.1",
|
| 23 |
+
"type_vocab_size": 2,
|
| 24 |
+
"use_cache": true,
|
| 25 |
+
"vocab_size": 30522
|
| 26 |
+
}
|
config_sentence_transformers.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"__version__": {
|
| 3 |
+
"sentence_transformers": "3.3.1",
|
| 4 |
+
"transformers": "4.47.1",
|
| 5 |
+
"pytorch": "2.3.0"
|
| 6 |
+
},
|
| 7 |
+
"prompts": {},
|
| 8 |
+
"default_prompt_name": null,
|
| 9 |
+
"similarity_fn_name": "cosine"
|
| 10 |
+
}
|
model.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:93d0d4ed1f12c6f13fbda20a5ec622675df2235303cc463bebe5e53025562617
|
| 3 |
+
size 90864192
|
modules.json
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[
|
| 2 |
+
{
|
| 3 |
+
"idx": 0,
|
| 4 |
+
"name": "0",
|
| 5 |
+
"path": "",
|
| 6 |
+
"type": "sentence_transformers.models.Transformer"
|
| 7 |
+
},
|
| 8 |
+
{
|
| 9 |
+
"idx": 1,
|
| 10 |
+
"name": "1",
|
| 11 |
+
"path": "1_Pooling",
|
| 12 |
+
"type": "sentence_transformers.models.Pooling"
|
| 13 |
+
},
|
| 14 |
+
{
|
| 15 |
+
"idx": 2,
|
| 16 |
+
"name": "2",
|
| 17 |
+
"path": "2_Normalize",
|
| 18 |
+
"type": "sentence_transformers.models.Normalize"
|
| 19 |
+
}
|
| 20 |
+
]
|
runs/Dec26_10-25-51_r-clairedhx-autotrain-advanced-phe-transcript-dpu1ifo-47f8f-5cy/events.out.tfevents.1735208755.r-clairedhx-autotrain-advanced-phe-transcript-dpu1ifo-47f8f-5cy.251.0
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:0fd09f1bbfbe7cd5a11fc4aeae0c72227a1f07b95305d390ff895750896840e2
|
| 3 |
+
size 213689
|
runs/Dec26_10-25-51_r-clairedhx-autotrain-advanced-phe-transcript-dpu1ifo-47f8f-5cy/events.out.tfevents.1735269700.r-clairedhx-autotrain-advanced-phe-transcript-dpu1ifo-47f8f-5cy.251.1
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:46727abd8e0f65bae47d0b0fd6ebf54c86ead382cf13bb02625ed43331828f6a
|
| 3 |
+
size 364
|
sentence_bert_config.json
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"max_seq_length": 512,
|
| 3 |
+
"do_lower_case": false
|
| 4 |
+
}
|
special_tokens_map.json
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"cls_token": {
|
| 3 |
+
"content": "[CLS]",
|
| 4 |
+
"lstrip": false,
|
| 5 |
+
"normalized": false,
|
| 6 |
+
"rstrip": false,
|
| 7 |
+
"single_word": false
|
| 8 |
+
},
|
| 9 |
+
"mask_token": {
|
| 10 |
+
"content": "[MASK]",
|
| 11 |
+
"lstrip": false,
|
| 12 |
+
"normalized": false,
|
| 13 |
+
"rstrip": false,
|
| 14 |
+
"single_word": false
|
| 15 |
+
},
|
| 16 |
+
"pad_token": {
|
| 17 |
+
"content": "[PAD]",
|
| 18 |
+
"lstrip": false,
|
| 19 |
+
"normalized": false,
|
| 20 |
+
"rstrip": false,
|
| 21 |
+
"single_word": false
|
| 22 |
+
},
|
| 23 |
+
"sep_token": {
|
| 24 |
+
"content": "[SEP]",
|
| 25 |
+
"lstrip": false,
|
| 26 |
+
"normalized": false,
|
| 27 |
+
"rstrip": false,
|
| 28 |
+
"single_word": false
|
| 29 |
+
},
|
| 30 |
+
"unk_token": {
|
| 31 |
+
"content": "[UNK]",
|
| 32 |
+
"lstrip": false,
|
| 33 |
+
"normalized": false,
|
| 34 |
+
"rstrip": false,
|
| 35 |
+
"single_word": false
|
| 36 |
+
}
|
| 37 |
+
}
|
tokenizer.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
tokenizer_config.json
ADDED
|
@@ -0,0 +1,65 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"added_tokens_decoder": {
|
| 3 |
+
"0": {
|
| 4 |
+
"content": "[PAD]",
|
| 5 |
+
"lstrip": false,
|
| 6 |
+
"normalized": false,
|
| 7 |
+
"rstrip": false,
|
| 8 |
+
"single_word": false,
|
| 9 |
+
"special": true
|
| 10 |
+
},
|
| 11 |
+
"100": {
|
| 12 |
+
"content": "[UNK]",
|
| 13 |
+
"lstrip": false,
|
| 14 |
+
"normalized": false,
|
| 15 |
+
"rstrip": false,
|
| 16 |
+
"single_word": false,
|
| 17 |
+
"special": true
|
| 18 |
+
},
|
| 19 |
+
"101": {
|
| 20 |
+
"content": "[CLS]",
|
| 21 |
+
"lstrip": false,
|
| 22 |
+
"normalized": false,
|
| 23 |
+
"rstrip": false,
|
| 24 |
+
"single_word": false,
|
| 25 |
+
"special": true
|
| 26 |
+
},
|
| 27 |
+
"102": {
|
| 28 |
+
"content": "[SEP]",
|
| 29 |
+
"lstrip": false,
|
| 30 |
+
"normalized": false,
|
| 31 |
+
"rstrip": false,
|
| 32 |
+
"single_word": false,
|
| 33 |
+
"special": true
|
| 34 |
+
},
|
| 35 |
+
"103": {
|
| 36 |
+
"content": "[MASK]",
|
| 37 |
+
"lstrip": false,
|
| 38 |
+
"normalized": false,
|
| 39 |
+
"rstrip": false,
|
| 40 |
+
"single_word": false,
|
| 41 |
+
"special": true
|
| 42 |
+
}
|
| 43 |
+
},
|
| 44 |
+
"clean_up_tokenization_spaces": false,
|
| 45 |
+
"cls_token": "[CLS]",
|
| 46 |
+
"do_basic_tokenize": true,
|
| 47 |
+
"do_lower_case": true,
|
| 48 |
+
"extra_special_tokens": {},
|
| 49 |
+
"mask_token": "[MASK]",
|
| 50 |
+
"max_length": 250,
|
| 51 |
+
"model_max_length": 512,
|
| 52 |
+
"never_split": null,
|
| 53 |
+
"pad_to_multiple_of": null,
|
| 54 |
+
"pad_token": "[PAD]",
|
| 55 |
+
"pad_token_type_id": 0,
|
| 56 |
+
"padding_side": "right",
|
| 57 |
+
"sep_token": "[SEP]",
|
| 58 |
+
"stride": 0,
|
| 59 |
+
"strip_accents": null,
|
| 60 |
+
"tokenize_chinese_chars": true,
|
| 61 |
+
"tokenizer_class": "BertTokenizer",
|
| 62 |
+
"truncation_side": "right",
|
| 63 |
+
"truncation_strategy": "longest_first",
|
| 64 |
+
"unk_token": "[UNK]"
|
| 65 |
+
}
|
training_args.bin
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:286361f7b6a7f6523641ed4f4e1fc273fed6aac87919e8decf87c2000151588c
|
| 3 |
+
size 5624
|
training_params.json
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"data_path": "autotrain-transcriptions/autotrain-data",
|
| 3 |
+
"model": "sentence-transformers/multi-qa-MiniLM-L6-cos-v1",
|
| 4 |
+
"lr": 3e-05,
|
| 5 |
+
"epochs": 3,
|
| 6 |
+
"max_seq_length": 128,
|
| 7 |
+
"batch_size": 8,
|
| 8 |
+
"warmup_ratio": 0.1,
|
| 9 |
+
"gradient_accumulation": 1,
|
| 10 |
+
"optimizer": "adamw_torch",
|
| 11 |
+
"scheduler": "linear",
|
| 12 |
+
"weight_decay": 0.0,
|
| 13 |
+
"max_grad_norm": 1.0,
|
| 14 |
+
"seed": 42,
|
| 15 |
+
"train_split": "train",
|
| 16 |
+
"valid_split": "validation",
|
| 17 |
+
"logging_steps": -1,
|
| 18 |
+
"project_name": "autotrain-transcriptions",
|
| 19 |
+
"auto_find_batch_size": false,
|
| 20 |
+
"mixed_precision": "fp16",
|
| 21 |
+
"save_total_limit": 1,
|
| 22 |
+
"push_to_hub": true,
|
| 23 |
+
"eval_strategy": "epoch",
|
| 24 |
+
"username": "clairedhx",
|
| 25 |
+
"log": "tensorboard",
|
| 26 |
+
"early_stopping_patience": 5,
|
| 27 |
+
"early_stopping_threshold": 0.01,
|
| 28 |
+
"trainer": "qa",
|
| 29 |
+
"sentence1_column": "autotrain_sentence1",
|
| 30 |
+
"sentence2_column": "autotrain_sentence2",
|
| 31 |
+
"sentence3_column": "autotrain_sentence3",
|
| 32 |
+
"target_column": "autotrain_target"
|
| 33 |
+
}
|
vocab.txt
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|