Fix: set pad_token_id for correct batching/attention masking
Browse files- config.json +3 -2
config.json
CHANGED
|
@@ -26,5 +26,6 @@
|
|
| 26 |
"transformers_version": "4.51.0",
|
| 27 |
"use_cache": true,
|
| 28 |
"use_sliding_window": false,
|
| 29 |
-
"vocab_size": 151936
|
| 30 |
-
|
|
|
|
|
|
| 26 |
"transformers_version": "4.51.0",
|
| 27 |
"use_cache": true,
|
| 28 |
"use_sliding_window": false,
|
| 29 |
+
"vocab_size": 151936,
|
| 30 |
+
"pad_token_id": 151643
|
| 31 |
+
}
|