Training in progress, epoch 1
Browse files- eval/triplet_evaluation_results.csv +2 -7
- model.safetensors +1 -1
- tokenizer.json +1 -1
- tokenizer_config.json +1 -8
- training_args.bin +1 -1
eval/triplet_evaluation_results.csv
CHANGED
|
@@ -1,8 +1,3 @@
|
|
| 1 |
epoch,steps,accuracy_cosine
|
| 2 |
-
0.4231908590774439,1000,0.
|
| 3 |
-
0.8463817181548878,2000,0.
|
| 4 |
-
1.2693446088794926,3000,0.9767588376998901
|
| 5 |
-
1.6921775898520086,4000,0.9795982837677002
|
| 6 |
-
2.1150105708245244,5000,0.9786518216133118
|
| 7 |
-
2.53784355179704,6000,0.9790724515914917
|
| 8 |
-
2.960676532769556,7000,0.979282796382904
|
|
|
|
| 1 |
epoch,steps,accuracy_cosine
|
| 2 |
+
0.4231908590774439,1000,0.955726146697998
|
| 3 |
+
0.8463817181548878,2000,0.9670838117599487
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
model.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 90864192
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:ef37ee5f36232d507045bb9c1c0ac1259ddd6f4aef5331faf8e4cf01d0389b50
|
| 3 |
size 90864192
|
tokenizer.json
CHANGED
|
@@ -2,7 +2,7 @@
|
|
| 2 |
"version": "1.0",
|
| 3 |
"truncation": {
|
| 4 |
"direction": "Right",
|
| 5 |
-
"max_length":
|
| 6 |
"strategy": "LongestFirst",
|
| 7 |
"stride": 0
|
| 8 |
},
|
|
|
|
| 2 |
"version": "1.0",
|
| 3 |
"truncation": {
|
| 4 |
"direction": "Right",
|
| 5 |
+
"max_length": 512,
|
| 6 |
"strategy": "LongestFirst",
|
| 7 |
"stride": 0
|
| 8 |
},
|
tokenizer_config.json
CHANGED
|
@@ -47,19 +47,12 @@
|
|
| 47 |
"do_lower_case": true,
|
| 48 |
"extra_special_tokens": {},
|
| 49 |
"mask_token": "[MASK]",
|
| 50 |
-
"
|
| 51 |
-
"model_max_length": 256,
|
| 52 |
"never_split": null,
|
| 53 |
-
"pad_to_multiple_of": null,
|
| 54 |
"pad_token": "[PAD]",
|
| 55 |
-
"pad_token_type_id": 0,
|
| 56 |
-
"padding_side": "right",
|
| 57 |
"sep_token": "[SEP]",
|
| 58 |
-
"stride": 0,
|
| 59 |
"strip_accents": null,
|
| 60 |
"tokenize_chinese_chars": true,
|
| 61 |
"tokenizer_class": "BertTokenizer",
|
| 62 |
-
"truncation_side": "right",
|
| 63 |
-
"truncation_strategy": "longest_first",
|
| 64 |
"unk_token": "[UNK]"
|
| 65 |
}
|
|
|
|
| 47 |
"do_lower_case": true,
|
| 48 |
"extra_special_tokens": {},
|
| 49 |
"mask_token": "[MASK]",
|
| 50 |
+
"model_max_length": 512,
|
|
|
|
| 51 |
"never_split": null,
|
|
|
|
| 52 |
"pad_token": "[PAD]",
|
|
|
|
|
|
|
| 53 |
"sep_token": "[SEP]",
|
|
|
|
| 54 |
"strip_accents": null,
|
| 55 |
"tokenize_chinese_chars": true,
|
| 56 |
"tokenizer_class": "BertTokenizer",
|
|
|
|
|
|
|
| 57 |
"unk_token": "[UNK]"
|
| 58 |
}
|
training_args.bin
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 5752
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:205941f36c3b4c9d679bfe2bc0c478b9f96e84c22521706b4a131ca81a09243f
|
| 3 |
size 5752
|