datnguyentien204 commited on
Commit
fe9d57d
·
verified ·
1 Parent(s): aa4defa

Upload config.json

Browse files
Files changed (1) hide show
  1. config.json +2 -2
config.json CHANGED
@@ -15,7 +15,7 @@
15
  "attention_probs_dropout_prob": 0.0,
16
  "bad_words_ids": null,
17
  "begin_suppress_tokens": null,
18
- "bos_token_id": 64001,
19
  "chunk_size_feed_forward": 0,
20
  "cross_attention_hidden_size": null,
21
  "decoder_start_token_id": null,
@@ -84,7 +84,7 @@
84
  "typical_p": 1.0,
85
  "use_bfloat16": false,
86
  "use_cache": true,
87
- "vocab_size": 64003
88
  },
89
  "torch_dtype": "float32",
90
  "transformers_version": null,
 
15
  "attention_probs_dropout_prob": 0.0,
16
  "bad_words_ids": null,
17
  "begin_suppress_tokens": null,
18
+ "bos_token_id": 119547,
19
  "chunk_size_feed_forward": 0,
20
  "cross_attention_hidden_size": null,
21
  "decoder_start_token_id": null,
 
84
  "typical_p": 1.0,
85
  "use_bfloat16": false,
86
  "use_cache": true,
87
+ "vocab_size": 119549
88
  },
89
  "torch_dtype": "float32",
90
  "transformers_version": null,