KhaledReda commited on
Commit
f2ced8f
·
verified ·
1 Parent(s): 5e78bb6

Upload folder using huggingface_hub

Browse files
1_Pooling/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "word_embedding_dimension": 384,
3
+ "pooling_mode_cls_token": false,
4
+ "pooling_mode_mean_tokens": true,
5
+ "pooling_mode_max_tokens": false,
6
+ "pooling_mode_mean_sqrt_len_tokens": false,
7
+ "pooling_mode_weightedmean_tokens": false,
8
+ "pooling_mode_lasttoken": false,
9
+ "include_prompt": true
10
+ }
README.md ADDED
@@ -0,0 +1,1458 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ language:
3
+ - en
4
+ license: apache-2.0
5
+ tags:
6
+ - sentence-transformers
7
+ - sentence-similarity
8
+ - feature-extraction
9
+ - dense
10
+ - generated_from_trainer
11
+ - dataset_size:12582766
12
+ - loss:CoSENTLoss
13
+ base_model: KhaledReda/all-MiniLM-L6-v23-pair_score
14
+ widget:
15
+ - source_sentence: police pyramid north
16
+ sentences:
17
+ - silver sparkle category fashion footwear shoe heels tags silver heels women heels
18
+ thin straps heels rhinestones heels heels sparkle heels keywords heels sparkle
19
+ heels attrs gender women brand unidentical generic name heels size 37 features
20
+ thin straps sparkle material rhinestones color silver description thin straps
21
+ with rhinestones heels. height 7 cm.
22
+ - colorful printed long cardigan category fashion casual wear outerwear outerwear
23
+ tags cardigan colorful cardigan long cardigan printed cardigan keywords cardigan
24
+ colorful cardigan long cardigan printed cardigan attrs gender women brand u modest
25
+ generic name cardigan features long types of fashion styles casual everyday wear
26
+ color colorful pattern printed
27
+ - courtly black category fashion footwear shoe flats tags flat shoe black shoe black
28
+ flat shoe courtly keywords courtly attrs color black
29
+ - source_sentence: pre play fanny pack
30
+ sentences:
31
+ - streptoquin - antidiarrheal antispasmodic - 20 tablets category pharmacies medicine
32
+ stomach medicine stomach medicine tags antidiarrheal streptoquin antispasmodic
33
+ streptoquin streptoquin streptoquin tablets keywords antidiarrheal streptoquin
34
+ antispasmodic streptoquin streptoquin streptoquin tablets attrs pharmacies form
35
+ tablets
36
+ - sante - gold brownie cherry granola - 300 gr category groceries supermarkets grain
37
+ granola tags sante palm oil free granola wheat free granola brownie granola cherry
38
+ granola gold granola granola sante granola keywords brownie granola cherry granola
39
+ gold granola granola sante granola description palm oil free no added wheat
40
+ - kataflam henedy edition category kids toys and games game card game tags women
41
+ cards game men cards game unisex cards game card games henedy card games kataflam
42
+ kataflam card games kataflam henedy card games keywords card games henedy card
43
+ games kataflam kataflam card games kataflam henedy card games
44
+ - source_sentence: yellow
45
+ sentences:
46
+ - koi category fashion accessories neckwear and scarf scarf tags youkai scarf fabrics
47
+ scarf polyfibre fabric scarf satin scarf chiffon scarf lightweight scarf semi
48
+ transparent scarf soft scarf blue scarf white scarf headscarf koi scarf scarf
49
+ keywords koi scarf scarf attrs gender women brand pavo generic name scarf product
50
+ name koi measurements 200 70 cm features lightweight material polyfibre color
51
+ blue and white pattern abstract description japanese heritage harnessed to be
52
+ a piece of art for youkoi comes in two fabrics chiffon and silk to suit your look
53
+ material polyfibre fabric finish satin and chiffon characteristic light weight
54
+ semi transparent shiny smooth very soft size kindly note that the scarf may vary
55
+ by a couple of centimeters 200 x 70 color blue white style textured abstract border
56
+ care instructions put a cloth on top while ironing on low setting wash under running
57
+ water not in a bucket or full sink. do not soak.hand wash with care exchange policy
58
+ exchange is only offered within 48 hours of receiving a wrong scarf other than
59
+ the one you ordererd or one that has a defect. plea
60
+ - maalox plus lemon 180 ml susp category pharmacies medicine stomach medicine stomach
61
+ medicine tags maalox plus maalox plus lemon keywords maalox plus maalox plus lemon
62
+ attrs units 180 millilitre
63
+ - white in green leaves category fashion accessories neckwear and scarf scarf tags
64
+ crepe scarf chiffon scarf white scarf leaves keywords leaves attrs gender women
65
+ brand souple generic name scarf measurements 200 75 cm material crepe chiffon
66
+ color white green pattern leaves description imported crepe chiffon. size 200
67
+ x 75. colors included white different shades of green.
68
+ - source_sentence: ankh scarab royal ring
69
+ sentences:
70
+ - masterpiece ethereal ring mother of pearl category fashion jewelry ring ring tags
71
+ silver ring men ring ethereal ring masterpiece ring mother pearl ring ring keywords
72
+ ethereal ring masterpiece ring mother pearl ring ring attrs gender men brand nora
73
+ el batran jewellery generic name ring product name ethereal features masterpiece
74
+ material silver mother of pearl color silver
75
+ - omegapress 0.1 mg 30/tab new category health and nutrition dietary supplements
76
+ omega omega tags omegapress keywords omegapress attrs pharmacies form tab units
77
+ 0.0001 gram
78
+ - avene physiolift smooth.night balm 30 m category beauty skincare anti-aging anti-aging
79
+ tags avene night balm avene physiolift avene smoothing night balm physiolift smoothing
80
+ night balm smoothing night balm keywords avene night balm avene physiolift avene
81
+ smoothing night balm physiolift smoothing night balm smoothing night balm attrs
82
+ units 30 m
83
+ - source_sentence: laces boot
84
+ sentences:
85
+ - baked savory tart crust 3.5 cm - 12 pieces only cairo giza category groceries
86
+ specialty foods bakery pastry tags savory tart shells mini quiches shells canapés
87
+ shells pre made tart shells pre cooked tart shells ready to bake baked tart crust
88
+ crust tart savory tart crust tart keywords baked tart crust crust tart savory
89
+ tart crust tart attrs units 12 pieces size numeric 3.5 cm description 12 pieces
90
+ in each pack. each shell has a diameter of 3.5 cm. also known as savory tart shells.
91
+ suitable for savory creations such as mini quiches and canapés. save time with
92
+ these pre made tart shells. just prep your favorite filling and it s ready to
93
+ be served fill them with cheese smoked salmon eggs vegetables cooked beef or chicken.
94
+ they re not frozen. tart crusts are pre cooked and stored at room temperature.
95
+ perishable. available for delivery only to cairo and giza residents.
96
+ - raw african coffee soap category beauty skincare face soap face soap tags shea
97
+ butter soap coconut oil soap antioxidant soap firming skin soap dark spots soap
98
+ coffee soap raw african raw african soap soap ahwa soap kahwa soap kahwah soap
99
+ qahwa soap raw african ahwa soap raw african kahwa soap raw african kahwah soap
100
+ raw african qahwa soap keywords coffee soap raw african raw african soap soap
101
+ ahwa soap kahwa soap kahwah soap qahwa soap raw african ahwa soap raw african
102
+ kahwa soap raw african kahwah soap raw african qahwa soap description our coffee-based
103
+ soap bar gives you a boosting and energizing sensation this soap is rich in antioxidants
104
+ and nutrients that fight age signs firms and tighten the skin and gives you a
105
+ youthful look. it helps in reducing dark spots and acne scars. for all skin types.
106
+ this product is free of harsh chemicals like parabens sulphates or mineral oils.
107
+ we never test our products on animals and we don t deal with suppliers who test
108
+ their products on animals. ingredients shea butter coconut oil olive oil coffee.
109
+ - nursing covers mustard flowers category kids baby care breastfeeding aid breastfeeding
110
+ aid tags breathable nursing cover full coverage nursing cover foldable nursing
111
+ cover pouch nursing cover colorful nursing cover flowers nursing covers mustard
112
+ nursing covers nursing covers keywords flowers nursing covers mustard nursing
113
+ covers nursing covers description breastfeeding is one of the most special yet
114
+ challenging things in motherhood we just wanted to add some more colors to this
115
+ special moment with all its colors product details soft light breathable fabric
116
+ machine washable full coverage comes with its pouch foldable in seconds
117
+ datasets:
118
+ - KhaledReda/pairs_with_scores_v120_tag_true_positives_and_false_negatives_description
119
+ pipeline_tag: sentence-similarity
120
+ library_name: sentence-transformers
121
+ ---
122
+
123
+ # all-MiniLM-L6-v24-pair_score
124
+
125
+ This is a [sentence-transformers](https://www.SBERT.net) model finetuned from [KhaledReda/all-MiniLM-L6-v23-pair_score](https://huggingface.co/KhaledReda/all-MiniLM-L6-v23-pair_score) on the [pairs_with_scores_v120_tag_true_positives_and_false_negatives_description](https://huggingface.co/datasets/KhaledReda/pairs_with_scores_v120_tag_true_positives_and_false_negatives_description) dataset. It maps sentences & paragraphs to a 384-dimensional dense vector space and can be used for semantic textual similarity, semantic search, paraphrase mining, text classification, clustering, and more.
126
+
127
+ ## Model Details
128
+
129
+ ### Model Description
130
+ - **Model Type:** Sentence Transformer
131
+ - **Base model:** [KhaledReda/all-MiniLM-L6-v23-pair_score](https://huggingface.co/KhaledReda/all-MiniLM-L6-v23-pair_score) <!-- at revision ecc0fc98a44e832815c4ccf46162422ded24993e -->
132
+ - **Maximum Sequence Length:** 256 tokens
133
+ - **Output Dimensionality:** 384 dimensions
134
+ - **Similarity Function:** Cosine Similarity
135
+ - **Training Dataset:**
136
+ - [pairs_with_scores_v120_tag_true_positives_and_false_negatives_description](https://huggingface.co/datasets/KhaledReda/pairs_with_scores_v120_tag_true_positives_and_false_negatives_description)
137
+ - **Language:** en
138
+ - **License:** apache-2.0
139
+
140
+ ### Model Sources
141
+
142
+ - **Documentation:** [Sentence Transformers Documentation](https://sbert.net)
143
+ - **Repository:** [Sentence Transformers on GitHub](https://github.com/UKPLab/sentence-transformers)
144
+ - **Hugging Face:** [Sentence Transformers on Hugging Face](https://huggingface.co/models?library=sentence-transformers)
145
+
146
+ ### Full Model Architecture
147
+
148
+ ```
149
+ SentenceTransformer(
150
+ (0): Transformer({'max_seq_length': 256, 'do_lower_case': False, 'architecture': 'BertModel'})
151
+ (1): Pooling({'word_embedding_dimension': 384, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
152
+ (2): Normalize()
153
+ )
154
+ ```
155
+
156
+ ## Usage
157
+
158
+ ### Direct Usage (Sentence Transformers)
159
+
160
+ First install the Sentence Transformers library:
161
+
162
+ ```bash
163
+ pip install -U sentence-transformers
164
+ ```
165
+
166
+ Then you can load this model and run inference.
167
+ ```python
168
+ from sentence_transformers import SentenceTransformer
169
+
170
+ # Download from the 🤗 Hub
171
+ model = SentenceTransformer("sentence_transformers_model_id")
172
+ # Run inference
173
+ sentences = [
174
+ 'laces boot',
175
+ 'nursing covers mustard flowers category kids baby care breastfeeding aid breastfeeding aid tags breathable nursing cover full coverage nursing cover foldable nursing cover pouch nursing cover colorful nursing cover flowers nursing covers mustard nursing covers nursing covers keywords flowers nursing covers mustard nursing covers nursing covers description breastfeeding is one of the most special yet challenging things in motherhood we just wanted to add some more colors to this special moment with all its colors product details soft light breathable fabric machine washable full coverage comes with its pouch foldable in seconds',
176
+ 'raw african coffee soap category beauty skincare face soap face soap tags shea butter soap coconut oil soap antioxidant soap firming skin soap dark spots soap coffee soap raw african raw african soap soap ahwa soap kahwa soap kahwah soap qahwa soap raw african ahwa soap raw african kahwa soap raw african kahwah soap raw african qahwa soap keywords coffee soap raw african raw african soap soap ahwa soap kahwa soap kahwah soap qahwa soap raw african ahwa soap raw african kahwa soap raw african kahwah soap raw african qahwa soap description our coffee-based soap bar gives you a boosting and energizing sensation this soap is rich in antioxidants and nutrients that fight age signs firms and tighten the skin and gives you a youthful look. it helps in reducing dark spots and acne scars. for all skin types. this product is free of harsh chemicals like parabens sulphates or mineral oils. we never test our products on animals and we don t deal with suppliers who test their products on animals. ingredients shea butter coconut oil olive oil coffee.',
177
+ ]
178
+ embeddings = model.encode(sentences)
179
+ print(embeddings.shape)
180
+ # [3, 384]
181
+
182
+ # Get the similarity scores for the embeddings
183
+ similarities = model.similarity(embeddings, embeddings)
184
+ print(similarities)
185
+ # tensor([[ 1.0000, -0.0817, -0.0683],
186
+ # [-0.0817, 1.0000, -0.0380],
187
+ # [-0.0683, -0.0380, 1.0000]])
188
+ ```
189
+
190
+ <!--
191
+ ### Direct Usage (Transformers)
192
+
193
+ <details><summary>Click to see the direct usage in Transformers</summary>
194
+
195
+ </details>
196
+ -->
197
+
198
+ <!--
199
+ ### Downstream Usage (Sentence Transformers)
200
+
201
+ You can finetune this model on your own dataset.
202
+
203
+ <details><summary>Click to expand</summary>
204
+
205
+ </details>
206
+ -->
207
+
208
+ <!--
209
+ ### Out-of-Scope Use
210
+
211
+ *List how the model may foreseeably be misused and address what users ought not to do with the model.*
212
+ -->
213
+
214
+ <!--
215
+ ## Bias, Risks and Limitations
216
+
217
+ *What are the known or foreseeable issues stemming from this model? You could also flag here known failure cases or weaknesses of the model.*
218
+ -->
219
+
220
+ <!--
221
+ ### Recommendations
222
+
223
+ *What are recommendations with respect to the foreseeable issues? For example, filtering explicit content.*
224
+ -->
225
+
226
+ ## Training Details
227
+
228
+ ### Training Dataset
229
+
230
+ #### pairs_with_scores_v120_tag_true_positives_and_false_negatives_description
231
+
232
+ * Dataset: [pairs_with_scores_v120_tag_true_positives_and_false_negatives_description](https://huggingface.co/datasets/KhaledReda/pairs_with_scores_v120_tag_true_positives_and_false_negatives_description) at [25785dc](https://huggingface.co/datasets/KhaledReda/pairs_with_scores_v120_tag_true_positives_and_false_negatives_description/tree/25785dc72f76c6af9685abb8377e1ed5ed04e9aa)
233
+ * Size: 12,582,766 training samples
234
+ * Columns: <code>sentence1</code>, <code>sentence2</code>, and <code>score</code>
235
+ * Approximate statistics based on the first 1000 samples:
236
+ | | sentence1 | sentence2 | score |
237
+ |:--------|:---------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------|:--------------------------------------------------------------|
238
+ | type | string | string | float |
239
+ | details | <ul><li>min: 3 tokens</li><li>mean: 5.59 tokens</li><li>max: 22 tokens</li></ul> | <ul><li>min: 11 tokens</li><li>mean: 104.43 tokens</li><li>max: 256 tokens</li></ul> | <ul><li>min: 0.0</li><li>mean: 0.1</li><li>max: 1.0</li></ul> |
240
+ * Samples:
241
+ | sentence1 | sentence2 | score |
242
+ |:--------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:-----------------|
243
+ | <code>bergamot eau de toilette</code> | <code>knitted crop top in beige category fashion casual wear top top tags women top beige top comfort top breathable top fabric top stretch top knitted sets crop top top keywords crop top top attrs gender women brand psych generic name top size s features high-waisted soft breathable cut cropped material knitted color beige occasion beach description elevate your style with our chic knitted set featuring a matching pair of knitted pants and a knitted crop top. designed for comfort and effortless elegance this set is perfect for any occasion. the knitted crop top offers a flattering fit with a touch of stretch while the high-waisted knitted pants provide a sleek silhouette and ultimate comfort. made from soft breathable fabric this set is perfect for both enjoying a day at the beach and stepping out in style. versatile and stylish this knitted set is a must-have addition to your wardrobe. mix and match with your favorite accessories for a look that s uniquely you. model is 177 cm wearing size...</code> | <code>0.0</code> |
244
+ | <code>wide leg pants</code> | <code>titania solingen no/1063 category beauty cosmetics make-up tool tweezers tags solingen tweezers titania titania tweezers tweezers keywords solingen tweezers titania titania tweezers tweezers</code> | <code>0.0</code> |
245
+ | <code>women pumps</code> | <code>neurimax 30/cap 2 ex.new category health and nutrition dietary supplements joint supplement joint supplement tags neurimax neurimax supplement keywords neurimax neurimax supplement attrs pharmacies form cap</code> | <code>0.0</code> |
246
+ * Loss: [<code>CoSENTLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#cosentloss) with these parameters:
247
+ ```json
248
+ {
249
+ "scale": 20.0,
250
+ "similarity_fct": "pairwise_cos_sim"
251
+ }
252
+ ```
253
+
254
+ ### Evaluation Dataset
255
+
256
+ #### pairs_with_scores_v120_tag_true_positives_and_false_negatives_description
257
+
258
+ * Dataset: [pairs_with_scores_v120_tag_true_positives_and_false_negatives_description](https://huggingface.co/datasets/KhaledReda/pairs_with_scores_v120_tag_true_positives_and_false_negatives_description) at [25785dc](https://huggingface.co/datasets/KhaledReda/pairs_with_scores_v120_tag_true_positives_and_false_negatives_description/tree/25785dc72f76c6af9685abb8377e1ed5ed04e9aa)
259
+ * Size: 63,230 evaluation samples
260
+ * Columns: <code>sentence1</code>, <code>sentence2</code>, and <code>score</code>
261
+ * Approximate statistics based on the first 1000 samples:
262
+ | | sentence1 | sentence2 | score |
263
+ |:--------|:---------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------|:--------------------------------------------------------------|
264
+ | type | string | string | float |
265
+ | details | <ul><li>min: 3 tokens</li><li>mean: 5.51 tokens</li><li>max: 20 tokens</li></ul> | <ul><li>min: 13 tokens</li><li>mean: 104.12 tokens</li><li>max: 256 tokens</li></ul> | <ul><li>min: 0.0</li><li>mean: 0.1</li><li>max: 1.0</li></ul> |
266
+ * Samples:
267
+ | sentence1 | sentence2 | score |
268
+ |:-------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:-----------------|
269
+ | <code>always maxi long</code> | <code>american - buffalo chicken hot category restaurants pizza deli pizza tags cheesy american pizza thick pizza mozzarella pizza cheese pizza hot chicken pizza american pizza buffalo chicken pizza chicken pizza pizza keywords american pizza buffalo chicken pizza chicken pizza pizza description a thick pizza with a generous amount of cheese mozzarella tomato sauce chicken buffalo sauce</code> | <code>0.0</code> |
270
+ | <code>sushi</code> | <code>lemon fluffy set category fashion casual wear outfit outfit tags linen outfit summer outfit women outfit blouse skirt fluffy outfit lemon outfit outfit outfit set keywords fluffy outfit lemon outfit outfit outfit set attrs gender women brand dovera generic name outfit size one size features fluffy outfit style skirt top material linen color lemon season summer description modest comfyand summery set with fluffy skirt and top comes in 3 colors apple green - brown - blue one size. outside materials linen. blouse length 55 cm width 65 cm shoulder 25 cm skirt length 100 cm</code> | <code>0.0</code> |
271
+ | <code>eyefree lid wipes</code> | <code>disposable - 5 ml syringe - latex free - 1 pcs category pharmacies first aid and medical equipment medical accessory medical accessory tags disposable syringe syringe keywords disposable syringe syringe attrs units 1 pcs 5 millilitre</code> | <code>0.0</code> |
272
+ * Loss: [<code>CoSENTLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#cosentloss) with these parameters:
273
+ ```json
274
+ {
275
+ "scale": 20.0,
276
+ "similarity_fct": "pairwise_cos_sim"
277
+ }
278
+ ```
279
+
280
+ ### Training Hyperparameters
281
+ #### Non-Default Hyperparameters
282
+
283
+ - `eval_strategy`: steps
284
+ - `per_device_train_batch_size`: 128
285
+ - `per_device_eval_batch_size`: 128
286
+ - `learning_rate`: 2e-05
287
+ - `num_train_epochs`: 1
288
+ - `warmup_ratio`: 0.1
289
+ - `fp16`: True
290
+
291
+ #### All Hyperparameters
292
+ <details><summary>Click to expand</summary>
293
+
294
+ - `overwrite_output_dir`: False
295
+ - `do_predict`: False
296
+ - `eval_strategy`: steps
297
+ - `prediction_loss_only`: True
298
+ - `per_device_train_batch_size`: 128
299
+ - `per_device_eval_batch_size`: 128
300
+ - `per_gpu_train_batch_size`: None
301
+ - `per_gpu_eval_batch_size`: None
302
+ - `gradient_accumulation_steps`: 1
303
+ - `eval_accumulation_steps`: None
304
+ - `torch_empty_cache_steps`: None
305
+ - `learning_rate`: 2e-05
306
+ - `weight_decay`: 0.0
307
+ - `adam_beta1`: 0.9
308
+ - `adam_beta2`: 0.999
309
+ - `adam_epsilon`: 1e-08
310
+ - `max_grad_norm`: 1.0
311
+ - `num_train_epochs`: 1
312
+ - `max_steps`: -1
313
+ - `lr_scheduler_type`: linear
314
+ - `lr_scheduler_kwargs`: {}
315
+ - `warmup_ratio`: 0.1
316
+ - `warmup_steps`: 0
317
+ - `log_level`: passive
318
+ - `log_level_replica`: warning
319
+ - `log_on_each_node`: True
320
+ - `logging_nan_inf_filter`: True
321
+ - `save_safetensors`: True
322
+ - `save_on_each_node`: False
323
+ - `save_only_model`: False
324
+ - `restore_callback_states_from_checkpoint`: False
325
+ - `no_cuda`: False
326
+ - `use_cpu`: False
327
+ - `use_mps_device`: False
328
+ - `seed`: 42
329
+ - `data_seed`: None
330
+ - `jit_mode_eval`: False
331
+ - `use_ipex`: False
332
+ - `bf16`: False
333
+ - `fp16`: True
334
+ - `fp16_opt_level`: O1
335
+ - `half_precision_backend`: auto
336
+ - `bf16_full_eval`: False
337
+ - `fp16_full_eval`: False
338
+ - `tf32`: None
339
+ - `local_rank`: 0
340
+ - `ddp_backend`: None
341
+ - `tpu_num_cores`: None
342
+ - `tpu_metrics_debug`: False
343
+ - `debug`: []
344
+ - `dataloader_drop_last`: False
345
+ - `dataloader_num_workers`: 0
346
+ - `dataloader_prefetch_factor`: None
347
+ - `past_index`: -1
348
+ - `disable_tqdm`: False
349
+ - `remove_unused_columns`: True
350
+ - `label_names`: None
351
+ - `load_best_model_at_end`: False
352
+ - `ignore_data_skip`: False
353
+ - `fsdp`: []
354
+ - `fsdp_min_num_params`: 0
355
+ - `fsdp_config`: {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}
356
+ - `fsdp_transformer_layer_cls_to_wrap`: None
357
+ - `accelerator_config`: {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True, 'non_blocking': False, 'gradient_accumulation_kwargs': None}
358
+ - `deepspeed`: None
359
+ - `label_smoothing_factor`: 0.0
360
+ - `optim`: adamw_torch
361
+ - `optim_args`: None
362
+ - `adafactor`: False
363
+ - `group_by_length`: False
364
+ - `length_column_name`: length
365
+ - `ddp_find_unused_parameters`: None
366
+ - `ddp_bucket_cap_mb`: None
367
+ - `ddp_broadcast_buffers`: False
368
+ - `dataloader_pin_memory`: True
369
+ - `dataloader_persistent_workers`: False
370
+ - `skip_memory_metrics`: True
371
+ - `use_legacy_prediction_loop`: False
372
+ - `push_to_hub`: False
373
+ - `resume_from_checkpoint`: None
374
+ - `hub_model_id`: None
375
+ - `hub_strategy`: every_save
376
+ - `hub_private_repo`: None
377
+ - `hub_always_push`: False
378
+ - `hub_revision`: None
379
+ - `gradient_checkpointing`: False
380
+ - `gradient_checkpointing_kwargs`: None
381
+ - `include_inputs_for_metrics`: False
382
+ - `include_for_metrics`: []
383
+ - `eval_do_concat_batches`: True
384
+ - `fp16_backend`: auto
385
+ - `push_to_hub_model_id`: None
386
+ - `push_to_hub_organization`: None
387
+ - `mp_parameters`:
388
+ - `auto_find_batch_size`: False
389
+ - `full_determinism`: False
390
+ - `torchdynamo`: None
391
+ - `ray_scope`: last
392
+ - `ddp_timeout`: 1800
393
+ - `torch_compile`: False
394
+ - `torch_compile_backend`: None
395
+ - `torch_compile_mode`: None
396
+ - `include_tokens_per_second`: False
397
+ - `include_num_input_tokens_seen`: False
398
+ - `neftune_noise_alpha`: None
399
+ - `optim_target_modules`: None
400
+ - `batch_eval_metrics`: False
401
+ - `eval_on_start`: False
402
+ - `use_liger_kernel`: False
403
+ - `liger_kernel_config`: None
404
+ - `eval_use_gather_object`: False
405
+ - `average_tokens_across_devices`: False
406
+ - `prompts`: None
407
+ - `batch_sampler`: batch_sampler
408
+ - `multi_dataset_batch_sampler`: proportional
409
+ - `router_mapping`: {}
410
+ - `learning_rate_mapping`: {}
411
+
412
+ </details>
413
+
414
+ ### Training Logs
415
+ <details><summary>Click to expand</summary>
416
+
417
+ | Epoch | Step | Training Loss |
418
+ |:------:|:-----:|:-------------:|
419
+ | 0.0010 | 100 | 5.7173 |
420
+ | 0.0020 | 200 | 5.7311 |
421
+ | 0.0031 | 300 | 5.4355 |
422
+ | 0.0041 | 400 | 5.4133 |
423
+ | 0.0051 | 500 | 5.1707 |
424
+ | 0.0061 | 600 | 5.1954 |
425
+ | 0.0071 | 700 | 4.9123 |
426
+ | 0.0081 | 800 | 4.852 |
427
+ | 0.0092 | 900 | 4.8442 |
428
+ | 0.0102 | 1000 | 4.4788 |
429
+ | 0.0112 | 1100 | 4.5735 |
430
+ | 0.0122 | 1200 | 4.3171 |
431
+ | 0.0132 | 1300 | 4.3714 |
432
+ | 0.0142 | 1400 | 4.2536 |
433
+ | 0.0153 | 1500 | 4.1945 |
434
+ | 0.0163 | 1600 | 4.0586 |
435
+ | 0.0173 | 1700 | 3.8934 |
436
+ | 0.0183 | 1800 | 3.9997 |
437
+ | 0.0193 | 1900 | 3.6822 |
438
+ | 0.0203 | 2000 | 3.6699 |
439
+ | 0.0214 | 2100 | 3.7657 |
440
+ | 0.0224 | 2200 | 3.6693 |
441
+ | 0.0234 | 2300 | 3.52 |
442
+ | 0.0244 | 2400 | 3.5535 |
443
+ | 0.0254 | 2500 | 3.3532 |
444
+ | 0.0264 | 2600 | 3.3329 |
445
+ | 0.0275 | 2700 | 3.3375 |
446
+ | 0.0285 | 2800 | 3.1954 |
447
+ | 0.0295 | 2900 | 3.1045 |
448
+ | 0.0305 | 3000 | 3.2362 |
449
+ | 0.0315 | 3100 | 3.1298 |
450
+ | 0.0326 | 3200 | 2.9862 |
451
+ | 0.0336 | 3300 | 2.9966 |
452
+ | 0.0346 | 3400 | 2.9131 |
453
+ | 0.0356 | 3500 | 3.0058 |
454
+ | 0.0366 | 3600 | 2.5357 |
455
+ | 0.0376 | 3700 | 2.7563 |
456
+ | 0.0387 | 3800 | 2.8085 |
457
+ | 0.0397 | 3900 | 2.6729 |
458
+ | 0.0407 | 4000 | 2.5785 |
459
+ | 0.0417 | 4100 | 2.7879 |
460
+ | 0.0427 | 4200 | 2.502 |
461
+ | 0.0437 | 4300 | 2.3824 |
462
+ | 0.0448 | 4400 | 2.4391 |
463
+ | 0.0458 | 4500 | 2.3122 |
464
+ | 0.0468 | 4600 | 2.2223 |
465
+ | 0.0478 | 4700 | 2.4876 |
466
+ | 0.0488 | 4800 | 2.5127 |
467
+ | 0.0498 | 4900 | 2.3576 |
468
+ | 0.0509 | 5000 | 1.961 |
469
+ | 0.0519 | 5100 | 2.4402 |
470
+ | 0.0529 | 5200 | 2.145 |
471
+ | 0.0539 | 5300 | 2.2863 |
472
+ | 0.0549 | 5400 | 2.2647 |
473
+ | 0.0559 | 5500 | 2.1835 |
474
+ | 0.0570 | 5600 | 2.0451 |
475
+ | 0.0580 | 5700 | 2.0484 |
476
+ | 0.0590 | 5800 | 2.1578 |
477
+ | 0.0600 | 5900 | 2.1455 |
478
+ | 0.0610 | 6000 | 2.0281 |
479
+ | 0.0621 | 6100 | 2.0751 |
480
+ | 0.0631 | 6200 | 1.9221 |
481
+ | 0.0641 | 6300 | 1.8355 |
482
+ | 0.0651 | 6400 | 1.9353 |
483
+ | 0.0661 | 6500 | 1.8617 |
484
+ | 0.0671 | 6600 | 1.8399 |
485
+ | 0.0682 | 6700 | 1.927 |
486
+ | 0.0692 | 6800 | 1.6166 |
487
+ | 0.0702 | 6900 | 2.1288 |
488
+ | 0.0712 | 7000 | 1.7884 |
489
+ | 0.0722 | 7100 | 1.8565 |
490
+ | 0.0732 | 7200 | 1.85 |
491
+ | 0.0743 | 7300 | 1.7127 |
492
+ | 0.0753 | 7400 | 1.7836 |
493
+ | 0.0763 | 7500 | 1.6113 |
494
+ | 0.0773 | 7600 | 1.8484 |
495
+ | 0.0783 | 7700 | 1.8673 |
496
+ | 0.0793 | 7800 | 1.6261 |
497
+ | 0.0804 | 7900 | 1.6207 |
498
+ | 0.0814 | 8000 | 2.0533 |
499
+ | 0.0824 | 8100 | 1.729 |
500
+ | 0.0834 | 8200 | 1.5739 |
501
+ | 0.0844 | 8300 | 1.7526 |
502
+ | 0.0855 | 8400 | 1.7466 |
503
+ | 0.0865 | 8500 | 1.6939 |
504
+ | 0.0875 | 8600 | 1.4806 |
505
+ | 0.0885 | 8700 | 1.6851 |
506
+ | 0.0895 | 8800 | 1.6117 |
507
+ | 0.0905 | 8900 | 1.5053 |
508
+ | 0.0916 | 9000 | 1.6736 |
509
+ | 0.0926 | 9100 | 1.5396 |
510
+ | 0.0936 | 9200 | 1.5309 |
511
+ | 0.0946 | 9300 | 1.5081 |
512
+ | 0.0956 | 9400 | 1.34 |
513
+ | 0.0966 | 9500 | 1.5146 |
514
+ | 0.0977 | 9600 | 1.3838 |
515
+ | 0.0987 | 9700 | 1.559 |
516
+ | 0.0997 | 9800 | 1.5523 |
517
+ | 0.1007 | 9900 | 1.3153 |
518
+ | 0.1017 | 10000 | 1.277 |
519
+ | 0.1027 | 10100 | 1.5285 |
520
+ | 0.1038 | 10200 | 1.3658 |
521
+ | 0.1048 | 10300 | 1.4931 |
522
+ | 0.1058 | 10400 | 1.3631 |
523
+ | 0.1068 | 10500 | 1.3536 |
524
+ | 0.1078 | 10600 | 1.4563 |
525
+ | 0.1088 | 10700 | 1.4296 |
526
+ | 0.1099 | 10800 | 1.4555 |
527
+ | 0.1109 | 10900 | 1.5459 |
528
+ | 0.1119 | 11000 | 1.4178 |
529
+ | 0.1129 | 11100 | 1.4425 |
530
+ | 0.1139 | 11200 | 1.3951 |
531
+ | 0.1150 | 11300 | 1.2531 |
532
+ | 0.1160 | 11400 | 1.4435 |
533
+ | 0.1170 | 11500 | 1.168 |
534
+ | 0.1180 | 11600 | 1.3839 |
535
+ | 0.1190 | 11700 | 1.4541 |
536
+ | 0.1200 | 11800 | 1.2666 |
537
+ | 0.1211 | 11900 | 1.3136 |
538
+ | 0.1221 | 12000 | 1.3001 |
539
+ | 0.1231 | 12100 | 1.1904 |
540
+ | 0.1241 | 12200 | 1.2617 |
541
+ | 0.1251 | 12300 | 1.2397 |
542
+ | 0.1261 | 12400 | 1.5342 |
543
+ | 0.1272 | 12500 | 1.3735 |
544
+ | 0.1282 | 12600 | 1.2123 |
545
+ | 0.1292 | 12700 | 1.28 |
546
+ | 0.1302 | 12800 | 1.3773 |
547
+ | 0.1312 | 12900 | 1.3931 |
548
+ | 0.1322 | 13000 | 1.4614 |
549
+ | 0.1333 | 13100 | 1.3945 |
550
+ | 0.1343 | 13200 | 1.4541 |
551
+ | 0.1353 | 13300 | 1.2571 |
552
+ | 0.1363 | 13400 | 1.1574 |
553
+ | 0.1373 | 13500 | 1.2597 |
554
+ | 0.1383 | 13600 | 1.2595 |
555
+ | 0.1394 | 13700 | 1.218 |
556
+ | 0.1404 | 13800 | 1.262 |
557
+ | 0.1414 | 13900 | 1.0565 |
558
+ | 0.1424 | 14000 | 1.1767 |
559
+ | 0.1434 | 14100 | 1.2089 |
560
+ | 0.1445 | 14200 | 1.211 |
561
+ | 0.1455 | 14300 | 0.8943 |
562
+ | 0.1465 | 14400 | 1.2541 |
563
+ | 0.1475 | 14500 | 1.1358 |
564
+ | 0.1485 | 14600 | 0.9817 |
565
+ | 0.1495 | 14700 | 1.1535 |
566
+ | 0.1506 | 14800 | 1.2066 |
567
+ | 0.1516 | 14900 | 1.2272 |
568
+ | 0.1526 | 15000 | 0.9362 |
569
+ | 0.1536 | 15100 | 1.3058 |
570
+ | 0.1546 | 15200 | 1.2812 |
571
+ | 0.1556 | 15300 | 1.1447 |
572
+ | 0.1567 | 15400 | 1.2213 |
573
+ | 0.1577 | 15500 | 1.1535 |
574
+ | 0.1587 | 15600 | 1.5273 |
575
+ | 0.1597 | 15700 | 1.0432 |
576
+ | 0.1607 | 15800 | 1.3215 |
577
+ | 0.1617 | 15900 | 1.0787 |
578
+ | 0.1628 | 16000 | 1.1641 |
579
+ | 0.1638 | 16100 | 1.0483 |
580
+ | 0.1648 | 16200 | 1.3148 |
581
+ | 0.1658 | 16300 | 1.0111 |
582
+ | 0.1668 | 16400 | 1.1823 |
583
+ | 0.1678 | 16500 | 1.2526 |
584
+ | 0.1689 | 16600 | 0.8983 |
585
+ | 0.1699 | 16700 | 1.1997 |
586
+ | 0.1709 | 16800 | 1.1394 |
587
+ | 0.1719 | 16900 | 1.1923 |
588
+ | 0.1729 | 17000 | 1.1439 |
589
+ | 0.1740 | 17100 | 1.259 |
590
+ | 0.1750 | 17200 | 1.3803 |
591
+ | 0.1760 | 17300 | 1.1672 |
592
+ | 0.1770 | 17400 | 1.149 |
593
+ | 0.1780 | 17500 | 1.0019 |
594
+ | 0.1790 | 17600 | 0.9692 |
595
+ | 0.1801 | 17700 | 1.1611 |
596
+ | 0.1811 | 17800 | 1.111 |
597
+ | 0.1821 | 17900 | 0.9874 |
598
+ | 0.1831 | 18000 | 1.2028 |
599
+ | 0.1841 | 18100 | 0.9416 |
600
+ | 0.1851 | 18200 | 1.1619 |
601
+ | 0.1862 | 18300 | 1.17 |
602
+ | 0.1872 | 18400 | 1.003 |
603
+ | 0.1882 | 18500 | 0.9409 |
604
+ | 0.1892 | 18600 | 0.9224 |
605
+ | 0.1902 | 18700 | 0.9215 |
606
+ | 0.1912 | 18800 | 1.2007 |
607
+ | 0.1923 | 18900 | 1.0021 |
608
+ | 0.1933 | 19000 | 1.0305 |
609
+ | 0.1943 | 19100 | 1.1084 |
610
+ | 0.1953 | 19200 | 0.961 |
611
+ | 0.1963 | 19300 | 0.9769 |
612
+ | 0.1973 | 19400 | 1.218 |
613
+ | 0.1984 | 19500 | 1.043 |
614
+ | 0.1994 | 19600 | 1.0366 |
615
+ | 0.2004 | 19700 | 0.9459 |
616
+ | 0.2014 | 19800 | 1.0557 |
617
+ | 0.2024 | 19900 | 1.0953 |
618
+ | 0.2035 | 20000 | 1.0327 |
619
+ | 0.2045 | 20100 | 1.0284 |
620
+ | 0.2055 | 20200 | 0.9376 |
621
+ | 0.2065 | 20300 | 1.1122 |
622
+ | 0.2075 | 20400 | 0.9807 |
623
+ | 0.2085 | 20500 | 0.9054 |
624
+ | 0.2096 | 20600 | 1.069 |
625
+ | 0.2106 | 20700 | 1.0802 |
626
+ | 0.2116 | 20800 | 0.9857 |
627
+ | 0.2126 | 20900 | 1.1127 |
628
+ | 0.2136 | 21000 | 1.2601 |
629
+ | 0.2146 | 21100 | 0.9709 |
630
+ | 0.2157 | 21200 | 0.9984 |
631
+ | 0.2167 | 21300 | 1.1281 |
632
+ | 0.2177 | 21400 | 0.8692 |
633
+ | 0.2187 | 21500 | 1.1773 |
634
+ | 0.2197 | 21600 | 0.9221 |
635
+ | 0.2207 | 21700 | 0.9007 |
636
+ | 0.2218 | 21800 | 1.0686 |
637
+ | 0.2228 | 21900 | 1.1078 |
638
+ | 0.2238 | 22000 | 0.999 |
639
+ | 0.2248 | 22100 | 0.8577 |
640
+ | 0.2258 | 22200 | 1.0215 |
641
+ | 0.2268 | 22300 | 0.9952 |
642
+ | 0.2279 | 22400 | 0.9597 |
643
+ | 0.2289 | 22500 | 0.79 |
644
+ | 0.2299 | 22600 | 1.1086 |
645
+ | 0.2309 | 22700 | 1.1255 |
646
+ | 0.2319 | 22800 | 1.0515 |
647
+ | 0.2330 | 22900 | 0.9184 |
648
+ | 0.2340 | 23000 | 1.0096 |
649
+ | 0.2350 | 23100 | 1.0243 |
650
+ | 0.2360 | 23200 | 1.0578 |
651
+ | 0.2370 | 23300 | 0.9486 |
652
+ | 0.2380 | 23400 | 1.0553 |
653
+ | 0.2391 | 23500 | 0.9279 |
654
+ | 0.2401 | 23600 | 0.9487 |
655
+ | 0.2411 | 23700 | 1.0134 |
656
+ | 0.2421 | 23800 | 0.7462 |
657
+ | 0.2431 | 23900 | 0.7586 |
658
+ | 0.2441 | 24000 | 0.9968 |
659
+ | 0.2452 | 24100 | 1.1576 |
660
+ | 0.2462 | 24200 | 0.8984 |
661
+ | 0.2472 | 24300 | 1.0449 |
662
+ | 0.2482 | 24400 | 0.886 |
663
+ | 0.2492 | 24500 | 0.9021 |
664
+ | 0.2502 | 24600 | 1.1053 |
665
+ | 0.2513 | 24700 | 0.9241 |
666
+ | 0.2523 | 24800 | 1.0178 |
667
+ | 0.2533 | 24900 | 1.0758 |
668
+ | 0.2543 | 25000 | 0.8807 |
669
+ | 0.2553 | 25100 | 0.9876 |
670
+ | 0.2564 | 25200 | 1.0116 |
671
+ | 0.2574 | 25300 | 0.7735 |
672
+ | 0.2584 | 25400 | 1.0378 |
673
+ | 0.2594 | 25500 | 1.0 |
674
+ | 0.2604 | 25600 | 0.8934 |
675
+ | 0.2614 | 25700 | 0.9769 |
676
+ | 0.2625 | 25800 | 1.2004 |
677
+ | 0.2635 | 25900 | 0.9047 |
678
+ | 0.2645 | 26000 | 0.8331 |
679
+ | 0.2655 | 26100 | 1.0331 |
680
+ | 0.2665 | 26200 | 1.0265 |
681
+ | 0.2675 | 26300 | 0.8131 |
682
+ | 0.2686 | 26400 | 1.0083 |
683
+ | 0.2696 | 26500 | 1.0486 |
684
+ | 0.2706 | 26600 | 0.8721 |
685
+ | 0.2716 | 26700 | 0.9227 |
686
+ | 0.2726 | 26800 | 1.0438 |
687
+ | 0.2736 | 26900 | 0.6701 |
688
+ | 0.2747 | 27000 | 0.8246 |
689
+ | 0.2757 | 27100 | 0.8877 |
690
+ | 0.2767 | 27200 | 0.8974 |
691
+ | 0.2777 | 27300 | 0.9877 |
692
+ | 0.2787 | 27400 | 0.8809 |
693
+ | 0.2797 | 27500 | 0.8058 |
694
+ | 0.2808 | 27600 | 1.0499 |
695
+ | 0.2818 | 27700 | 1.0949 |
696
+ | 0.2828 | 27800 | 1.0794 |
697
+ | 0.2838 | 27900 | 0.7273 |
698
+ | 0.2848 | 28000 | 0.8775 |
699
+ | 0.2859 | 28100 | 0.7947 |
700
+ | 0.2869 | 28200 | 0.9967 |
701
+ | 0.2879 | 28300 | 1.0834 |
702
+ | 0.2889 | 28400 | 0.8397 |
703
+ | 0.2899 | 28500 | 0.9808 |
704
+ | 0.2909 | 28600 | 0.8525 |
705
+ | 0.2920 | 28700 | 0.6795 |
706
+ | 0.2930 | 28800 | 0.8213 |
707
+ | 0.2940 | 28900 | 0.7962 |
708
+ | 0.2950 | 29000 | 0.7181 |
709
+ | 0.2960 | 29100 | 0.7304 |
710
+ | 0.2970 | 29200 | 0.8983 |
711
+ | 0.2981 | 29300 | 0.8157 |
712
+ | 0.2991 | 29400 | 0.9902 |
713
+ | 0.3001 | 29500 | 1.106 |
714
+ | 0.3011 | 29600 | 0.9016 |
715
+ | 0.3021 | 29700 | 0.9756 |
716
+ | 0.3031 | 29800 | 0.9426 |
717
+ | 0.3042 | 29900 | 0.8033 |
718
+ | 0.3052 | 30000 | 0.7583 |
719
+ | 0.3062 | 30100 | 0.8602 |
720
+ | 0.3072 | 30200 | 0.8691 |
721
+ | 0.3082 | 30300 | 1.0453 |
722
+ | 0.3092 | 30400 | 0.9485 |
723
+ | 0.3103 | 30500 | 0.9637 |
724
+ | 0.3113 | 30600 | 0.8028 |
725
+ | 0.3123 | 30700 | 0.9261 |
726
+ | 0.3133 | 30800 | 0.7166 |
727
+ | 0.3143 | 30900 | 0.8809 |
728
+ | 0.3154 | 31000 | 0.8061 |
729
+ | 0.3164 | 31100 | 0.9817 |
730
+ | 0.3174 | 31200 | 0.94 |
731
+ | 0.3184 | 31300 | 0.7935 |
732
+ | 0.3194 | 31400 | 0.8372 |
733
+ | 0.3204 | 31500 | 1.1727 |
734
+ | 0.3215 | 31600 | 0.7606 |
735
+ | 0.3225 | 31700 | 0.9101 |
736
+ | 0.3235 | 31800 | 0.681 |
737
+ | 0.3245 | 31900 | 0.9235 |
738
+ | 0.3255 | 32000 | 0.7649 |
739
+ | 0.3265 | 32100 | 0.7917 |
740
+ | 0.3276 | 32200 | 0.9602 |
741
+ | 0.3286 | 32300 | 0.8561 |
742
+ | 0.3296 | 32400 | 0.7201 |
743
+ | 0.3306 | 32500 | 0.9261 |
744
+ | 0.3316 | 32600 | 0.9769 |
745
+ | 0.3326 | 32700 | 0.7281 |
746
+ | 0.3337 | 32800 | 0.8497 |
747
+ | 0.3347 | 32900 | 0.935 |
748
+ | 0.3357 | 33000 | 0.8837 |
749
+ | 0.3367 | 33100 | 0.6759 |
750
+ | 0.3377 | 33200 | 0.9258 |
751
+ | 0.3387 | 33300 | 0.8128 |
752
+ | 0.3398 | 33400 | 0.8352 |
753
+ | 0.3408 | 33500 | 0.7642 |
754
+ | 0.3418 | 33600 | 0.8117 |
755
+ | 0.3428 | 33700 | 0.8024 |
756
+ | 0.3438 | 33800 | 0.6297 |
757
+ | 0.3449 | 33900 | 0.8447 |
758
+ | 0.3459 | 34000 | 0.9483 |
759
+ | 0.3469 | 34100 | 0.6316 |
760
+ | 0.3479 | 34200 | 0.9778 |
761
+ | 0.3489 | 34300 | 1.2536 |
762
+ | 0.3499 | 34400 | 0.8554 |
763
+ | 0.3510 | 34500 | 0.7636 |
764
+ | 0.3520 | 34600 | 0.9228 |
765
+ | 0.3530 | 34700 | 1.2065 |
766
+ | 0.3540 | 34800 | 0.7422 |
767
+ | 0.3550 | 34900 | 0.836 |
768
+ | 0.3560 | 35000 | 0.7612 |
769
+ | 0.3571 | 35100 | 1.0686 |
770
+ | 0.3581 | 35200 | 0.8227 |
771
+ | 0.3591 | 35300 | 0.8035 |
772
+ | 0.3601 | 35400 | 0.8518 |
773
+ | 0.3611 | 35500 | 0.7877 |
774
+ | 0.3621 | 35600 | 0.977 |
775
+ | 0.3632 | 35700 | 0.7444 |
776
+ | 0.3642 | 35800 | 1.0152 |
777
+ | 0.3652 | 35900 | 0.9753 |
778
+ | 0.3662 | 36000 | 0.7451 |
779
+ | 0.3672 | 36100 | 0.9164 |
780
+ | 0.3682 | 36200 | 0.8737 |
781
+ | 0.3693 | 36300 | 0.7609 |
782
+ | 0.3703 | 36400 | 0.9682 |
783
+ | 0.3713 | 36500 | 0.7839 |
784
+ | 0.3723 | 36600 | 0.7669 |
785
+ | 0.3733 | 36700 | 0.7462 |
786
+ | 0.3744 | 36800 | 0.816 |
787
+ | 0.3754 | 36900 | 0.7701 |
788
+ | 0.3764 | 37000 | 0.9624 |
789
+ | 0.3774 | 37100 | 0.7194 |
790
+ | 0.3784 | 37200 | 0.8559 |
791
+ | 0.3794 | 37300 | 1.0938 |
792
+ | 0.3805 | 37400 | 0.7587 |
793
+ | 0.3815 | 37500 | 0.641 |
794
+ | 0.3825 | 37600 | 0.891 |
795
+ | 0.3835 | 37700 | 0.6906 |
796
+ | 0.3845 | 37800 | 1.0998 |
797
+ | 0.3855 | 37900 | 0.7198 |
798
+ | 0.3866 | 38000 | 0.8502 |
799
+ | 0.3876 | 38100 | 0.8793 |
800
+ | 0.3886 | 38200 | 0.6859 |
801
+ | 0.3896 | 38300 | 1.0219 |
802
+ | 0.3906 | 38400 | 0.7076 |
803
+ | 0.3916 | 38500 | 0.6722 |
804
+ | 0.3927 | 38600 | 0.9803 |
805
+ | 0.3937 | 38700 | 0.7202 |
806
+ | 0.3947 | 38800 | 0.9244 |
807
+ | 0.3957 | 38900 | 0.6677 |
808
+ | 0.3967 | 39000 | 0.7115 |
809
+ | 0.3977 | 39100 | 0.8265 |
810
+ | 0.3988 | 39200 | 0.7452 |
811
+ | 0.3998 | 39300 | 0.9035 |
812
+ | 0.4008 | 39400 | 0.8995 |
813
+ | 0.4018 | 39500 | 0.8057 |
814
+ | 0.4028 | 39600 | 0.5763 |
815
+ | 0.4039 | 39700 | 0.8714 |
816
+ | 0.4049 | 39800 | 0.8986 |
817
+ | 0.4059 | 39900 | 0.9301 |
818
+ | 0.4069 | 40000 | 0.6497 |
819
+ | 0.4079 | 40100 | 0.6254 |
820
+ | 0.4089 | 40200 | 0.6554 |
821
+ | 0.4100 | 40300 | 0.6868 |
822
+ | 0.4110 | 40400 | 0.7385 |
823
+ | 0.4120 | 40500 | 0.7142 |
824
+ | 0.4130 | 40600 | 0.6881 |
825
+ | 0.4140 | 40700 | 0.692 |
826
+ | 0.4150 | 40800 | 0.642 |
827
+ | 0.4161 | 40900 | 0.6089 |
828
+ | 0.4171 | 41000 | 0.8139 |
829
+ | 0.4181 | 41100 | 0.8346 |
830
+ | 0.4191 | 41200 | 0.7895 |
831
+ | 0.4201 | 41300 | 0.7008 |
832
+ | 0.4211 | 41400 | 0.8188 |
833
+ | 0.4222 | 41500 | 0.7435 |
834
+ | 0.4232 | 41600 | 0.791 |
835
+ | 0.4242 | 41700 | 0.6331 |
836
+ | 0.4252 | 41800 | 1.0351 |
837
+ | 0.4262 | 41900 | 0.6224 |
838
+ | 0.4273 | 42000 | 0.8503 |
839
+ | 0.4283 | 42100 | 0.6022 |
840
+ | 0.4293 | 42200 | 0.6865 |
841
+ | 0.4303 | 42300 | 0.7772 |
842
+ | 0.4313 | 42400 | 0.8394 |
843
+ | 0.4323 | 42500 | 0.878 |
844
+ | 0.4334 | 42600 | 0.7826 |
845
+ | 0.4344 | 42700 | 0.7188 |
846
+ | 0.4354 | 42800 | 0.8372 |
847
+ | 0.4364 | 42900 | 0.5603 |
848
+ | 0.4374 | 43000 | 0.8899 |
849
+ | 0.4384 | 43100 | 0.7556 |
850
+ | 0.4395 | 43200 | 0.7705 |
851
+ | 0.4405 | 43300 | 0.6577 |
852
+ | 0.4415 | 43400 | 0.7987 |
853
+ | 0.4425 | 43500 | 0.8235 |
854
+ | 0.4435 | 43600 | 0.7176 |
855
+ | 0.4445 | 43700 | 0.9219 |
856
+ | 0.4456 | 43800 | 0.7193 |
857
+ | 0.4466 | 43900 | 0.8563 |
858
+ | 0.4476 | 44000 | 0.821 |
859
+ | 0.4486 | 44100 | 0.7397 |
860
+ | 0.4496 | 44200 | 0.6185 |
861
+ | 0.4506 | 44300 | 0.8103 |
862
+ | 0.4517 | 44400 | 0.7249 |
863
+ | 0.4527 | 44500 | 0.5748 |
864
+ | 0.4537 | 44600 | 0.502 |
865
+ | 0.4547 | 44700 | 0.6905 |
866
+ | 0.4557 | 44800 | 0.5475 |
867
+ | 0.4568 | 44900 | 0.8287 |
868
+ | 0.4578 | 45000 | 0.8498 |
869
+ | 0.4588 | 45100 | 0.7757 |
870
+ | 0.4598 | 45200 | 0.7711 |
871
+ | 0.4608 | 45300 | 0.602 |
872
+ | 0.4618 | 45400 | 0.7462 |
873
+ | 0.4629 | 45500 | 0.8515 |
874
+ | 0.4639 | 45600 | 0.7722 |
875
+ | 0.4649 | 45700 | 0.8844 |
876
+ | 0.4659 | 45800 | 0.5903 |
877
+ | 0.4669 | 45900 | 0.5556 |
878
+ | 0.4679 | 46000 | 0.7143 |
879
+ | 0.4690 | 46100 | 0.7083 |
880
+ | 0.4700 | 46200 | 0.6673 |
881
+ | 0.4710 | 46300 | 0.7972 |
882
+ | 0.4720 | 46400 | 0.6685 |
883
+ | 0.4730 | 46500 | 0.751 |
884
+ | 0.4740 | 46600 | 0.5364 |
885
+ | 0.4751 | 46700 | 0.7858 |
886
+ | 0.4761 | 46800 | 0.7102 |
887
+ | 0.4771 | 46900 | 0.6758 |
888
+ | 0.4781 | 47000 | 0.8075 |
889
+ | 0.4791 | 47100 | 0.785 |
890
+ | 0.4801 | 47200 | 0.602 |
891
+ | 0.4812 | 47300 | 0.619 |
892
+ | 0.4822 | 47400 | 0.8525 |
893
+ | 0.4832 | 47500 | 0.6255 |
894
+ | 0.4842 | 47600 | 0.7516 |
895
+ | 0.4852 | 47700 | 0.6707 |
896
+ | 0.4863 | 47800 | 0.5144 |
897
+ | 0.4873 | 47900 | 0.7654 |
898
+ | 0.4883 | 48000 | 0.9047 |
899
+ | 0.4893 | 48100 | 0.786 |
900
+ | 0.4903 | 48200 | 0.6384 |
901
+ | 0.4913 | 48300 | 0.6442 |
902
+ | 0.4924 | 48400 | 0.7419 |
903
+ | 0.4934 | 48500 | 0.6694 |
904
+ | 0.4944 | 48600 | 0.7353 |
905
+ | 0.4954 | 48700 | 0.7712 |
906
+ | 0.4964 | 48800 | 0.6879 |
907
+ | 0.4974 | 48900 | 0.5942 |
908
+ | 0.4985 | 49000 | 0.678 |
909
+ | 0.4995 | 49100 | 0.6405 |
910
+ | 0.5005 | 49200 | 0.7724 |
911
+ | 0.5015 | 49300 | 0.8365 |
912
+ | 0.5025 | 49400 | 0.7915 |
913
+ | 0.5035 | 49500 | 0.8199 |
914
+ | 0.5046 | 49600 | 0.8333 |
915
+ | 0.5056 | 49700 | 0.8168 |
916
+ | 0.5066 | 49800 | 0.7845 |
917
+ | 0.5076 | 49900 | 0.8433 |
918
+ | 0.5086 | 50000 | 0.6277 |
919
+ | 0.5096 | 50100 | 0.8093 |
920
+ | 0.5107 | 50200 | 0.574 |
921
+ | 0.5117 | 50300 | 0.6589 |
922
+ | 0.5127 | 50400 | 0.7758 |
923
+ | 0.5137 | 50500 | 0.6896 |
924
+ | 0.5147 | 50600 | 0.6508 |
925
+ | 0.5158 | 50700 | 0.6148 |
926
+ | 0.5168 | 50800 | 0.7687 |
927
+ | 0.5178 | 50900 | 0.6126 |
928
+ | 0.5188 | 51000 | 0.7048 |
929
+ | 0.5198 | 51100 | 0.7072 |
930
+ | 0.5208 | 51200 | 0.5995 |
931
+ | 0.5219 | 51300 | 0.5265 |
932
+ | 0.5229 | 51400 | 0.6596 |
933
+ | 0.5239 | 51500 | 0.6224 |
934
+ | 0.5249 | 51600 | 0.7283 |
935
+ | 0.5259 | 51700 | 0.7278 |
936
+ | 0.5269 | 51800 | 0.6278 |
937
+ | 0.5280 | 51900 | 0.8234 |
938
+ | 0.5290 | 52000 | 0.5623 |
939
+ | 0.5300 | 52100 | 0.6815 |
940
+ | 0.5310 | 52200 | 0.671 |
941
+ | 0.5320 | 52300 | 0.6753 |
942
+ | 0.5330 | 52400 | 0.777 |
943
+ | 0.5341 | 52500 | 0.6418 |
944
+ | 0.5351 | 52600 | 0.8762 |
945
+ | 0.5361 | 52700 | 0.6557 |
946
+ | 0.5371 | 52800 | 0.8074 |
947
+ | 0.5381 | 52900 | 0.6798 |
948
+ | 0.5391 | 53000 | 0.7247 |
949
+ | 0.5402 | 53100 | 0.9169 |
950
+ | 0.5412 | 53200 | 0.5862 |
951
+ | 0.5422 | 53300 | 0.7443 |
952
+ | 0.5432 | 53400 | 0.7391 |
953
+ | 0.5442 | 53500 | 0.6815 |
954
+ | 0.5453 | 53600 | 0.6833 |
955
+ | 0.5463 | 53700 | 0.7782 |
956
+ | 0.5473 | 53800 | 0.7014 |
957
+ | 0.5483 | 53900 | 0.555 |
958
+ | 0.5493 | 54000 | 0.579 |
959
+ | 0.5503 | 54100 | 0.5532 |
960
+ | 0.5514 | 54200 | 0.7326 |
961
+ | 0.5524 | 54300 | 0.7446 |
962
+ | 0.5534 | 54400 | 0.6812 |
963
+ | 0.5544 | 54500 | 0.7733 |
964
+ | 0.5554 | 54600 | 0.8537 |
965
+ | 0.5564 | 54700 | 0.7317 |
966
+ | 0.5575 | 54800 | 0.4924 |
967
+ | 0.5585 | 54900 | 0.7506 |
968
+ | 0.5595 | 55000 | 0.7103 |
969
+ | 0.5605 | 55100 | 0.7394 |
970
+ | 0.5615 | 55200 | 0.7605 |
971
+ | 0.5625 | 55300 | 0.4556 |
972
+ | 0.5636 | 55400 | 0.7929 |
973
+ | 0.5646 | 55500 | 0.6873 |
974
+ | 0.5656 | 55600 | 0.6985 |
975
+ | 0.5666 | 55700 | 0.6687 |
976
+ | 0.5676 | 55800 | 0.5939 |
977
+ | 0.5686 | 55900 | 0.7572 |
978
+ | 0.5697 | 56000 | 0.8489 |
979
+ | 0.5707 | 56100 | 0.6354 |
980
+ | 0.5717 | 56200 | 0.85 |
981
+ | 0.5727 | 56300 | 0.8828 |
982
+ | 0.5737 | 56400 | 0.652 |
983
+ | 0.5748 | 56500 | 0.7322 |
984
+ | 0.5758 | 56600 | 0.6399 |
985
+ | 0.5768 | 56700 | 0.6225 |
986
+ | 0.5778 | 56800 | 0.6981 |
987
+ | 0.5788 | 56900 | 0.6672 |
988
+ | 0.5798 | 57000 | 0.6847 |
989
+ | 0.5809 | 57100 | 0.7851 |
990
+ | 0.5819 | 57200 | 0.8353 |
991
+ | 0.5829 | 57300 | 0.7278 |
992
+ | 0.5839 | 57400 | 0.8386 |
993
+ | 0.5849 | 57500 | 0.5678 |
994
+ | 0.5859 | 57600 | 0.6292 |
995
+ | 0.5870 | 57700 | 0.6984 |
996
+ | 0.5880 | 57800 | 0.6169 |
997
+ | 0.5890 | 57900 | 0.7627 |
998
+ | 0.5900 | 58000 | 0.7501 |
999
+ | 0.5910 | 58100 | 0.7363 |
1000
+ | 0.5920 | 58200 | 0.7827 |
1001
+ | 0.5931 | 58300 | 0.6598 |
1002
+ | 0.5941 | 58400 | 0.6824 |
1003
+ | 0.5951 | 58500 | 0.583 |
1004
+ | 0.5961 | 58600 | 0.5993 |
1005
+ | 0.5971 | 58700 | 0.4432 |
1006
+ | 0.5982 | 58800 | 0.9913 |
1007
+ | 0.5992 | 58900 | 0.7253 |
1008
+ | 0.6002 | 59000 | 0.7429 |
1009
+ | 0.6012 | 59100 | 0.6201 |
1010
+ | 0.6022 | 59200 | 0.6567 |
1011
+ | 0.6032 | 59300 | 0.6578 |
1012
+ | 0.6043 | 59400 | 0.7048 |
1013
+ | 0.6053 | 59500 | 0.8529 |
1014
+ | 0.6063 | 59600 | 0.6652 |
1015
+ | 0.6073 | 59700 | 0.7866 |
1016
+ | 0.6083 | 59800 | 0.4627 |
1017
+ | 0.6093 | 59900 | 0.6565 |
1018
+ | 0.6104 | 60000 | 0.6052 |
1019
+ | 0.6114 | 60100 | 0.5639 |
1020
+ | 0.6124 | 60200 | 0.5185 |
1021
+ | 0.6134 | 60300 | 0.5568 |
1022
+ | 0.6144 | 60400 | 0.5924 |
1023
+ | 0.6154 | 60500 | 0.664 |
1024
+ | 0.6165 | 60600 | 0.6261 |
1025
+ | 0.6175 | 60700 | 0.8437 |
1026
+ | 0.6185 | 60800 | 0.654 |
1027
+ | 0.6195 | 60900 | 0.5362 |
1028
+ | 0.6205 | 61000 | 0.6213 |
1029
+ | 0.6215 | 61100 | 0.7202 |
1030
+ | 0.6226 | 61200 | 0.633 |
1031
+ | 0.6236 | 61300 | 0.8508 |
1032
+ | 0.6246 | 61400 | 0.6462 |
1033
+ | 0.6256 | 61500 | 0.63 |
1034
+ | 0.6266 | 61600 | 0.8234 |
1035
+ | 0.6277 | 61700 | 0.5974 |
1036
+ | 0.6287 | 61800 | 0.7921 |
1037
+ | 0.6297 | 61900 | 0.5961 |
1038
+ | 0.6307 | 62000 | 0.614 |
1039
+ | 0.6317 | 62100 | 0.6615 |
1040
+ | 0.6327 | 62200 | 0.6531 |
1041
+ | 0.6338 | 62300 | 0.4864 |
1042
+ | 0.6348 | 62400 | 0.647 |
1043
+ | 0.6358 | 62500 | 0.6113 |
1044
+ | 0.6368 | 62600 | 0.6921 |
1045
+ | 0.6378 | 62700 | 0.5747 |
1046
+ | 0.6388 | 62800 | 0.7385 |
1047
+ | 0.6399 | 62900 | 0.5917 |
1048
+ | 0.6409 | 63000 | 0.5889 |
1049
+ | 0.6419 | 63100 | 0.6054 |
1050
+ | 0.6429 | 63200 | 0.561 |
1051
+ | 0.6439 | 63300 | 0.5997 |
1052
+ | 0.6449 | 63400 | 0.794 |
1053
+ | 0.6460 | 63500 | 0.7496 |
1054
+ | 0.6470 | 63600 | 0.6024 |
1055
+ | 0.6480 | 63700 | 0.5696 |
1056
+ | 0.6490 | 63800 | 0.5421 |
1057
+ | 0.6500 | 63900 | 0.4456 |
1058
+ | 0.6510 | 64000 | 0.6023 |
1059
+ | 0.6521 | 64100 | 0.4959 |
1060
+ | 0.6531 | 64200 | 0.5642 |
1061
+ | 0.6541 | 64300 | 0.6949 |
1062
+ | 0.6551 | 64400 | 0.6484 |
1063
+ | 0.6561 | 64500 | 0.7129 |
1064
+ | 0.6572 | 64600 | 0.6671 |
1065
+ | 0.6582 | 64700 | 0.4386 |
1066
+ | 0.6592 | 64800 | 0.6304 |
1067
+ | 0.6602 | 64900 | 0.7319 |
1068
+ | 0.6612 | 65000 | 0.5852 |
1069
+ | 0.6622 | 65100 | 0.6596 |
1070
+ | 0.6633 | 65200 | 0.5671 |
1071
+ | 0.6643 | 65300 | 0.738 |
1072
+ | 0.6653 | 65400 | 0.6173 |
1073
+ | 0.6663 | 65500 | 0.6302 |
1074
+ | 0.6673 | 65600 | 0.6919 |
1075
+ | 0.6683 | 65700 | 0.8582 |
1076
+ | 0.6694 | 65800 | 0.7258 |
1077
+ | 0.6704 | 65900 | 0.6371 |
1078
+ | 0.6714 | 66000 | 0.6237 |
1079
+ | 0.6724 | 66100 | 0.5698 |
1080
+ | 0.6734 | 66200 | 0.6232 |
1081
+ | 0.6744 | 66300 | 0.5277 |
1082
+ | 0.6755 | 66400 | 0.7142 |
1083
+ | 0.6765 | 66500 | 0.3874 |
1084
+ | 0.6775 | 66600 | 0.7239 |
1085
+ | 0.6785 | 66700 | 0.649 |
1086
+ | 0.6795 | 66800 | 0.5919 |
1087
+ | 0.6805 | 66900 | 0.611 |
1088
+ | 0.6816 | 67000 | 0.6857 |
1089
+ | 0.6826 | 67100 | 0.7571 |
1090
+ | 0.6836 | 67200 | 0.6295 |
1091
+ | 0.6846 | 67300 | 0.6233 |
1092
+ | 0.6856 | 67400 | 0.4612 |
1093
+ | 0.6867 | 67500 | 0.6029 |
1094
+ | 0.6877 | 67600 | 0.8331 |
1095
+ | 0.6887 | 67700 | 0.5839 |
1096
+ | 0.6897 | 67800 | 0.7239 |
1097
+ | 0.6907 | 67900 | 0.7111 |
1098
+ | 0.6917 | 68000 | 0.4719 |
1099
+ | 0.6928 | 68100 | 0.6431 |
1100
+ | 0.6938 | 68200 | 0.5993 |
1101
+ | 0.6948 | 68300 | 0.5523 |
1102
+ | 0.6958 | 68400 | 0.7109 |
1103
+ | 0.6968 | 68500 | 0.7398 |
1104
+ | 0.6978 | 68600 | 0.5519 |
1105
+ | 0.6989 | 68700 | 0.6474 |
1106
+ | 0.6999 | 68800 | 0.7263 |
1107
+ | 0.7009 | 68900 | 0.5115 |
1108
+ | 0.7019 | 69000 | 0.4325 |
1109
+ | 0.7029 | 69100 | 0.5022 |
1110
+ | 0.7039 | 69200 | 0.5915 |
1111
+ | 0.7050 | 69300 | 0.3593 |
1112
+ | 0.7060 | 69400 | 0.6064 |
1113
+ | 0.7070 | 69500 | 0.9334 |
1114
+ | 0.7080 | 69600 | 0.5801 |
1115
+ | 0.7090 | 69700 | 0.7087 |
1116
+ | 0.7100 | 69800 | 0.5999 |
1117
+ | 0.7111 | 69900 | 0.6629 |
1118
+ | 0.7121 | 70000 | 0.5959 |
1119
+ | 0.7131 | 70100 | 0.7499 |
1120
+ | 0.7141 | 70200 | 0.5318 |
1121
+ | 0.7151 | 70300 | 0.5121 |
1122
+ | 0.7162 | 70400 | 0.9055 |
1123
+ | 0.7172 | 70500 | 0.4307 |
1124
+ | 0.7182 | 70600 | 0.4902 |
1125
+ | 0.7192 | 70700 | 0.5367 |
1126
+ | 0.7202 | 70800 | 0.4899 |
1127
+ | 0.7212 | 70900 | 0.6768 |
1128
+ | 0.7223 | 71000 | 0.7288 |
1129
+ | 0.7233 | 71100 | 0.5998 |
1130
+ | 0.7243 | 71200 | 0.7799 |
1131
+ | 0.7253 | 71300 | 0.5984 |
1132
+ | 0.7263 | 71400 | 0.7752 |
1133
+ | 0.7273 | 71500 | 0.4164 |
1134
+ | 0.7284 | 71600 | 0.71 |
1135
+ | 0.7294 | 71700 | 0.5335 |
1136
+ | 0.7304 | 71800 | 0.5932 |
1137
+ | 0.7314 | 71900 | 0.6342 |
1138
+ | 0.7324 | 72000 | 0.5675 |
1139
+ | 0.7334 | 72100 | 0.7243 |
1140
+ | 0.7345 | 72200 | 0.7112 |
1141
+ | 0.7355 | 72300 | 0.6712 |
1142
+ | 0.7365 | 72400 | 0.6164 |
1143
+ | 0.7375 | 72500 | 0.5798 |
1144
+ | 0.7385 | 72600 | 0.5249 |
1145
+ | 0.7396 | 72700 | 0.4702 |
1146
+ | 0.7406 | 72800 | 0.4924 |
1147
+ | 0.7416 | 72900 | 0.598 |
1148
+ | 0.7426 | 73000 | 0.6151 |
1149
+ | 0.7436 | 73100 | 0.7369 |
1150
+ | 0.7446 | 73200 | 0.5661 |
1151
+ | 0.7457 | 73300 | 0.8368 |
1152
+ | 0.7467 | 73400 | 0.604 |
1153
+ | 0.7477 | 73500 | 0.5657 |
1154
+ | 0.7487 | 73600 | 0.4921 |
1155
+ | 0.7497 | 73700 | 0.5238 |
1156
+ | 0.7507 | 73800 | 0.6692 |
1157
+ | 0.7518 | 73900 | 0.6181 |
1158
+ | 0.7528 | 74000 | 0.6532 |
1159
+ | 0.7538 | 74100 | 0.5932 |
1160
+ | 0.7548 | 74200 | 0.6546 |
1161
+ | 0.7558 | 74300 | 0.7575 |
1162
+ | 0.7568 | 74400 | 0.6888 |
1163
+ | 0.7579 | 74500 | 0.6133 |
1164
+ | 0.7589 | 74600 | 0.6941 |
1165
+ | 0.7599 | 74700 | 0.6219 |
1166
+ | 0.7609 | 74800 | 0.6053 |
1167
+ | 0.7619 | 74900 | 0.5401 |
1168
+ | 0.7629 | 75000 | 0.6957 |
1169
+ | 0.7640 | 75100 | 0.7152 |
1170
+ | 0.7650 | 75200 | 0.5549 |
1171
+ | 0.7660 | 75300 | 0.7595 |
1172
+ | 0.7670 | 75400 | 0.6008 |
1173
+ | 0.7680 | 75500 | 0.6865 |
1174
+ | 0.7691 | 75600 | 0.6998 |
1175
+ | 0.7701 | 75700 | 0.5809 |
1176
+ | 0.7711 | 75800 | 0.6945 |
1177
+ | 0.7721 | 75900 | 0.5277 |
1178
+ | 0.7731 | 76000 | 0.4838 |
1179
+ | 0.7741 | 76100 | 0.6694 |
1180
+ | 0.7752 | 76200 | 0.7267 |
1181
+ | 0.7762 | 76300 | 0.5172 |
1182
+ | 0.7772 | 76400 | 0.6081 |
1183
+ | 0.7782 | 76500 | 0.5904 |
1184
+ | 0.7792 | 76600 | 0.7423 |
1185
+ | 0.7802 | 76700 | 0.5854 |
1186
+ | 0.7813 | 76800 | 0.5187 |
1187
+ | 0.7823 | 76900 | 0.5163 |
1188
+ | 0.7833 | 77000 | 0.59 |
1189
+ | 0.7843 | 77100 | 0.6303 |
1190
+ | 0.7853 | 77200 | 0.7633 |
1191
+ | 0.7863 | 77300 | 0.3922 |
1192
+ | 0.7874 | 77400 | 0.5958 |
1193
+ | 0.7884 | 77500 | 0.5794 |
1194
+ | 0.7894 | 77600 | 0.7614 |
1195
+ | 0.7904 | 77700 | 0.6195 |
1196
+ | 0.7914 | 77800 | 0.6392 |
1197
+ | 0.7924 | 77900 | 0.5152 |
1198
+ | 0.7935 | 78000 | 0.6551 |
1199
+ | 0.7945 | 78100 | 0.6728 |
1200
+ | 0.7955 | 78200 | 0.4994 |
1201
+ | 0.7965 | 78300 | 0.4807 |
1202
+ | 0.7975 | 78400 | 0.5193 |
1203
+ | 0.7986 | 78500 | 0.6285 |
1204
+ | 0.7996 | 78600 | 0.4851 |
1205
+ | 0.8006 | 78700 | 0.5756 |
1206
+ | 0.8016 | 78800 | 0.5533 |
1207
+ | 0.8026 | 78900 | 0.705 |
1208
+ | 0.8036 | 79000 | 0.5025 |
1209
+ | 0.8047 | 79100 | 0.463 |
1210
+ | 0.8057 | 79200 | 0.6687 |
1211
+ | 0.8067 | 79300 | 0.5076 |
1212
+ | 0.8077 | 79400 | 0.6565 |
1213
+ | 0.8087 | 79500 | 0.6617 |
1214
+ | 0.8097 | 79600 | 0.4685 |
1215
+ | 0.8108 | 79700 | 0.6223 |
1216
+ | 0.8118 | 79800 | 0.6922 |
1217
+ | 0.8128 | 79900 | 0.7718 |
1218
+ | 0.8138 | 80000 | 0.5657 |
1219
+ | 0.8148 | 80100 | 0.543 |
1220
+ | 0.8158 | 80200 | 0.7921 |
1221
+ | 0.8169 | 80300 | 0.6572 |
1222
+ | 0.8179 | 80400 | 0.7411 |
1223
+ | 0.8189 | 80500 | 0.5726 |
1224
+ | 0.8199 | 80600 | 0.6093 |
1225
+ | 0.8209 | 80700 | 0.5758 |
1226
+ | 0.8219 | 80800 | 0.518 |
1227
+ | 0.8230 | 80900 | 0.694 |
1228
+ | 0.8240 | 81000 | 0.7515 |
1229
+ | 0.8250 | 81100 | 0.6002 |
1230
+ | 0.8260 | 81200 | 0.4633 |
1231
+ | 0.8270 | 81300 | 0.6218 |
1232
+ | 0.8281 | 81400 | 0.5532 |
1233
+ | 0.8291 | 81500 | 0.4466 |
1234
+ | 0.8301 | 81600 | 0.5202 |
1235
+ | 0.8311 | 81700 | 0.6743 |
1236
+ | 0.8321 | 81800 | 0.5741 |
1237
+ | 0.8331 | 81900 | 0.6996 |
1238
+ | 0.8342 | 82000 | 0.7846 |
1239
+ | 0.8352 | 82100 | 0.6618 |
1240
+ | 0.8362 | 82200 | 0.6033 |
1241
+ | 0.8372 | 82300 | 0.4995 |
1242
+ | 0.8382 | 82400 | 0.5191 |
1243
+ | 0.8392 | 82500 | 0.6053 |
1244
+ | 0.8403 | 82600 | 0.525 |
1245
+ | 0.8413 | 82700 | 0.6632 |
1246
+ | 0.8423 | 82800 | 0.4557 |
1247
+ | 0.8433 | 82900 | 0.4545 |
1248
+ | 0.8443 | 83000 | 0.582 |
1249
+ | 0.8453 | 83100 | 0.4116 |
1250
+ | 0.8464 | 83200 | 0.7503 |
1251
+ | 0.8474 | 83300 | 0.8223 |
1252
+ | 0.8484 | 83400 | 0.6802 |
1253
+ | 0.8494 | 83500 | 0.4549 |
1254
+ | 0.8504 | 83600 | 0.6192 |
1255
+ | 0.8514 | 83700 | 0.5877 |
1256
+ | 0.8525 | 83800 | 0.6831 |
1257
+ | 0.8535 | 83900 | 0.6177 |
1258
+ | 0.8545 | 84000 | 0.5918 |
1259
+ | 0.8555 | 84100 | 0.6674 |
1260
+ | 0.8565 | 84200 | 0.518 |
1261
+ | 0.8576 | 84300 | 0.6378 |
1262
+ | 0.8586 | 84400 | 0.6648 |
1263
+ | 0.8596 | 84500 | 0.6655 |
1264
+ | 0.8606 | 84600 | 0.5005 |
1265
+ | 0.8616 | 84700 | 0.5276 |
1266
+ | 0.8626 | 84800 | 0.6636 |
1267
+ | 0.8637 | 84900 | 0.6573 |
1268
+ | 0.8647 | 85000 | 0.6104 |
1269
+ | 0.8657 | 85100 | 0.606 |
1270
+ | 0.8667 | 85200 | 0.537 |
1271
+ | 0.8677 | 85300 | 0.5331 |
1272
+ | 0.8687 | 85400 | 0.6714 |
1273
+ | 0.8698 | 85500 | 0.5361 |
1274
+ | 0.8708 | 85600 | 0.6583 |
1275
+ | 0.8718 | 85700 | 0.6888 |
1276
+ | 0.8728 | 85800 | 0.5044 |
1277
+ | 0.8738 | 85900 | 0.5655 |
1278
+ | 0.8748 | 86000 | 0.4413 |
1279
+ | 0.8759 | 86100 | 0.5836 |
1280
+ | 0.8769 | 86200 | 0.9184 |
1281
+ | 0.8779 | 86300 | 0.4408 |
1282
+ | 0.8789 | 86400 | 0.4715 |
1283
+ | 0.8799 | 86500 | 0.6001 |
1284
+ | 0.8809 | 86600 | 0.7137 |
1285
+ | 0.8820 | 86700 | 0.4078 |
1286
+ | 0.8830 | 86800 | 0.5395 |
1287
+ | 0.8840 | 86900 | 0.6508 |
1288
+ | 0.8850 | 87000 | 0.5879 |
1289
+ | 0.8860 | 87100 | 0.747 |
1290
+ | 0.8871 | 87200 | 0.4727 |
1291
+ | 0.8881 | 87300 | 0.5537 |
1292
+ | 0.8891 | 87400 | 0.6939 |
1293
+ | 0.8901 | 87500 | 0.612 |
1294
+ | 0.8911 | 87600 | 0.6922 |
1295
+ | 0.8921 | 87700 | 0.5248 |
1296
+ | 0.8932 | 87800 | 0.7751 |
1297
+ | 0.8942 | 87900 | 0.5789 |
1298
+ | 0.8952 | 88000 | 0.548 |
1299
+ | 0.8962 | 88100 | 0.5582 |
1300
+ | 0.8972 | 88200 | 0.5283 |
1301
+ | 0.8982 | 88300 | 0.67 |
1302
+ | 0.8993 | 88400 | 0.4805 |
1303
+ | 0.9003 | 88500 | 0.5471 |
1304
+ | 0.9013 | 88600 | 0.6269 |
1305
+ | 0.9023 | 88700 | 0.5893 |
1306
+ | 0.9033 | 88800 | 0.6513 |
1307
+ | 0.9043 | 88900 | 0.3424 |
1308
+ | 0.9054 | 89000 | 0.521 |
1309
+ | 0.9064 | 89100 | 0.7 |
1310
+ | 0.9074 | 89200 | 0.4389 |
1311
+ | 0.9084 | 89300 | 0.7586 |
1312
+ | 0.9094 | 89400 | 0.6371 |
1313
+ | 0.9105 | 89500 | 0.4141 |
1314
+ | 0.9115 | 89600 | 0.6428 |
1315
+ | 0.9125 | 89700 | 0.5555 |
1316
+ | 0.9135 | 89800 | 0.5973 |
1317
+ | 0.9145 | 89900 | 0.4516 |
1318
+ | 0.9155 | 90000 | 0.5601 |
1319
+ | 0.9166 | 90100 | 0.3904 |
1320
+ | 0.9176 | 90200 | 0.4576 |
1321
+ | 0.9186 | 90300 | 0.6065 |
1322
+ | 0.9196 | 90400 | 0.448 |
1323
+ | 0.9206 | 90500 | 0.5387 |
1324
+ | 0.9216 | 90600 | 0.7406 |
1325
+ | 0.9227 | 90700 | 0.5682 |
1326
+ | 0.9237 | 90800 | 0.6075 |
1327
+ | 0.9247 | 90900 | 0.5166 |
1328
+ | 0.9257 | 91000 | 0.6627 |
1329
+ | 0.9267 | 91100 | 0.6125 |
1330
+ | 0.9277 | 91200 | 0.6151 |
1331
+ | 0.9288 | 91300 | 0.376 |
1332
+ | 0.9298 | 91400 | 0.7488 |
1333
+ | 0.9308 | 91500 | 0.3872 |
1334
+ | 0.9318 | 91600 | 0.622 |
1335
+ | 0.9328 | 91700 | 0.6095 |
1336
+ | 0.9338 | 91800 | 0.4772 |
1337
+ | 0.9349 | 91900 | 0.4708 |
1338
+ | 0.9359 | 92000 | 0.5463 |
1339
+ | 0.9369 | 92100 | 0.7436 |
1340
+ | 0.9379 | 92200 | 0.698 |
1341
+ | 0.9389 | 92300 | 0.3119 |
1342
+ | 0.9400 | 92400 | 0.4237 |
1343
+ | 0.9410 | 92500 | 0.5579 |
1344
+ | 0.9420 | 92600 | 0.6101 |
1345
+ | 0.9430 | 92700 | 0.6106 |
1346
+ | 0.9440 | 92800 | 0.614 |
1347
+ | 0.9450 | 92900 | 0.6228 |
1348
+ | 0.9461 | 93000 | 0.5155 |
1349
+ | 0.9471 | 93100 | 0.6098 |
1350
+ | 0.9481 | 93200 | 0.6685 |
1351
+ | 0.9491 | 93300 | 0.3962 |
1352
+ | 0.9501 | 93400 | 0.5151 |
1353
+ | 0.9511 | 93500 | 0.4819 |
1354
+ | 0.9522 | 93600 | 0.5941 |
1355
+ | 0.9532 | 93700 | 0.5932 |
1356
+ | 0.9542 | 93800 | 0.6307 |
1357
+ | 0.9552 | 93900 | 0.6368 |
1358
+ | 0.9562 | 94000 | 0.6799 |
1359
+ | 0.9572 | 94100 | 0.5089 |
1360
+ | 0.9583 | 94200 | 0.5623 |
1361
+ | 0.9593 | 94300 | 0.4027 |
1362
+ | 0.9603 | 94400 | 0.6181 |
1363
+ | 0.9613 | 94500 | 0.5755 |
1364
+ | 0.9623 | 94600 | 0.5631 |
1365
+ | 0.9633 | 94700 | 0.4376 |
1366
+ | 0.9644 | 94800 | 0.429 |
1367
+ | 0.9654 | 94900 | 0.4997 |
1368
+ | 0.9664 | 95000 | 0.5789 |
1369
+ | 0.9674 | 95100 | 0.5636 |
1370
+ | 0.9684 | 95200 | 0.6638 |
1371
+ | 0.9695 | 95300 | 0.8632 |
1372
+ | 0.9705 | 95400 | 0.5708 |
1373
+ | 0.9715 | 95500 | 0.5817 |
1374
+ | 0.9725 | 95600 | 0.5245 |
1375
+ | 0.9735 | 95700 | 0.5836 |
1376
+ | 0.9745 | 95800 | 0.5696 |
1377
+ | 0.9756 | 95900 | 0.5988 |
1378
+ | 0.9766 | 96000 | 0.5597 |
1379
+ | 0.9776 | 96100 | 0.5968 |
1380
+ | 0.9786 | 96200 | 0.7544 |
1381
+ | 0.9796 | 96300 | 0.6484 |
1382
+ | 0.9806 | 96400 | 0.3758 |
1383
+ | 0.9817 | 96500 | 0.6732 |
1384
+ | 0.9827 | 96600 | 0.5634 |
1385
+ | 0.9837 | 96700 | 0.4491 |
1386
+ | 0.9847 | 96800 | 0.349 |
1387
+ | 0.9857 | 96900 | 0.6564 |
1388
+ | 0.9867 | 97000 | 0.5724 |
1389
+ | 0.9878 | 97100 | 0.6022 |
1390
+ | 0.9888 | 97200 | 0.3853 |
1391
+ | 0.9898 | 97300 | 0.6601 |
1392
+ | 0.9908 | 97400 | 0.6511 |
1393
+ | 0.9918 | 97500 | 0.4784 |
1394
+ | 0.9928 | 97600 | 0.5943 |
1395
+ | 0.9939 | 97700 | 0.8411 |
1396
+ | 0.9949 | 97800 | 0.5165 |
1397
+ | 0.9959 | 97900 | 0.4567 |
1398
+ | 0.9969 | 98000 | 0.492 |
1399
+ | 0.9979 | 98100 | 0.5838 |
1400
+ | 0.9990 | 98200 | 0.5109 |
1401
+ | 1.0000 | 98300 | 0.4494 |
1402
+
1403
+ </details>
1404
+
1405
+ ### Framework Versions
1406
+ - Python: 3.12.3
1407
+ - Sentence Transformers: 5.1.0
1408
+ - Transformers: 4.55.4
1409
+ - PyTorch: 2.6.0+cu124
1410
+ - Accelerate: 1.10.1
1411
+ - Datasets: 4.0.0
1412
+ - Tokenizers: 0.21.4
1413
+
1414
+ ## Citation
1415
+
1416
+ ### BibTeX
1417
+
1418
+ #### Sentence Transformers
1419
+ ```bibtex
1420
+ @inproceedings{reimers-2019-sentence-bert,
1421
+ title = "Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks",
1422
+ author = "Reimers, Nils and Gurevych, Iryna",
1423
+ booktitle = "Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing",
1424
+ month = "11",
1425
+ year = "2019",
1426
+ publisher = "Association for Computational Linguistics",
1427
+ url = "https://arxiv.org/abs/1908.10084",
1428
+ }
1429
+ ```
1430
+
1431
+ #### CoSENTLoss
1432
+ ```bibtex
1433
+ @online{kexuefm-8847,
1434
+ title={CoSENT: A more efficient sentence vector scheme than Sentence-BERT},
1435
+ author={Su Jianlin},
1436
+ year={2022},
1437
+ month={Jan},
1438
+ url={https://kexue.fm/archives/8847},
1439
+ }
1440
+ ```
1441
+
1442
+ <!--
1443
+ ## Glossary
1444
+
1445
+ *Clearly define terms in order to be accessible across audiences.*
1446
+ -->
1447
+
1448
+ <!--
1449
+ ## Model Card Authors
1450
+
1451
+ *Lists the people who create the model card, providing recognition and accountability for the detailed work that goes into its construction.*
1452
+ -->
1453
+
1454
+ <!--
1455
+ ## Model Card Contact
1456
+
1457
+ *Provides a way for people who have updates to the Model Card, suggestions, or questions, to contact the Model Card authors.*
1458
+ -->
config.json ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "BertModel"
4
+ ],
5
+ "attention_probs_dropout_prob": 0.1,
6
+ "classifier_dropout": null,
7
+ "gradient_checkpointing": false,
8
+ "hidden_act": "gelu",
9
+ "hidden_dropout_prob": 0.1,
10
+ "hidden_size": 384,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 1536,
13
+ "layer_norm_eps": 1e-12,
14
+ "max_position_embeddings": 512,
15
+ "model_type": "bert",
16
+ "num_attention_heads": 12,
17
+ "num_hidden_layers": 6,
18
+ "pad_token_id": 0,
19
+ "position_embedding_type": "absolute",
20
+ "torch_dtype": "float32",
21
+ "transformers_version": "4.55.4",
22
+ "type_vocab_size": 2,
23
+ "use_cache": true,
24
+ "vocab_size": 30522
25
+ }
config_sentence_transformers.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "__version__": {
3
+ "sentence_transformers": "5.1.0",
4
+ "transformers": "4.55.4",
5
+ "pytorch": "2.6.0+cu124"
6
+ },
7
+ "model_type": "SentenceTransformer",
8
+ "prompts": {
9
+ "query": "",
10
+ "document": ""
11
+ },
12
+ "default_prompt_name": null,
13
+ "similarity_fn_name": "cosine"
14
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:02d05b5b2b940b0e03bafb0ff61c7589a51f2c84c751811834089ead4cc3b960
3
+ size 90864192
modules.json ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [
2
+ {
3
+ "idx": 0,
4
+ "name": "0",
5
+ "path": "",
6
+ "type": "sentence_transformers.models.Transformer"
7
+ },
8
+ {
9
+ "idx": 1,
10
+ "name": "1",
11
+ "path": "1_Pooling",
12
+ "type": "sentence_transformers.models.Pooling"
13
+ },
14
+ {
15
+ "idx": 2,
16
+ "name": "2",
17
+ "path": "2_Normalize",
18
+ "type": "sentence_transformers.models.Normalize"
19
+ }
20
+ ]
sentence_bert_config.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "max_seq_length": 256,
3
+ "do_lower_case": false
4
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cls_token": {
3
+ "content": "[CLS]",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "mask_token": {
10
+ "content": "[MASK]",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "[PAD]",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "sep_token": {
24
+ "content": "[SEP]",
25
+ "lstrip": false,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ },
30
+ "unk_token": {
31
+ "content": "[UNK]",
32
+ "lstrip": false,
33
+ "normalized": false,
34
+ "rstrip": false,
35
+ "single_word": false
36
+ }
37
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,65 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "0": {
4
+ "content": "[PAD]",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "100": {
12
+ "content": "[UNK]",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "101": {
20
+ "content": "[CLS]",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "102": {
28
+ "content": "[SEP]",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ },
35
+ "103": {
36
+ "content": "[MASK]",
37
+ "lstrip": false,
38
+ "normalized": false,
39
+ "rstrip": false,
40
+ "single_word": false,
41
+ "special": true
42
+ }
43
+ },
44
+ "clean_up_tokenization_spaces": false,
45
+ "cls_token": "[CLS]",
46
+ "do_basic_tokenize": true,
47
+ "do_lower_case": true,
48
+ "extra_special_tokens": {},
49
+ "mask_token": "[MASK]",
50
+ "max_length": 128,
51
+ "model_max_length": 256,
52
+ "never_split": null,
53
+ "pad_to_multiple_of": null,
54
+ "pad_token": "[PAD]",
55
+ "pad_token_type_id": 0,
56
+ "padding_side": "right",
57
+ "sep_token": "[SEP]",
58
+ "stride": 0,
59
+ "strip_accents": null,
60
+ "tokenize_chinese_chars": true,
61
+ "tokenizer_class": "BertTokenizer",
62
+ "truncation_side": "right",
63
+ "truncation_strategy": "longest_first",
64
+ "unk_token": "[UNK]"
65
+ }
vocab.txt ADDED
The diff for this file is too large to render. See raw diff