YoheiOhto commited on
Commit
4d40010
·
verified ·
1 Parent(s): 80aa750

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +6 -6
config.json CHANGED
@@ -4,18 +4,18 @@
4
  ],
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
- "bos_token_id": 50281,
8
  "classifier_activation": "gelu",
9
  "classifier_bias": false,
10
  "classifier_dropout": 0.0,
11
  "classifier_pooling": "mean",
12
- "cls_token_id": 50281,
13
  "decoder_bias": true,
14
  "deterministic_flash_attn": false,
15
  "embedding_dropout": 0.0,
16
- "eos_token_id": 50282,
17
  "global_attn_every_n_layers": 3,
18
- "global_rope_theta": 160000.0,
19
  "gradient_checkpointing": false,
20
  "hidden_activation": "gelu",
21
  "hidden_size": 768,
@@ -33,10 +33,10 @@
33
  "norm_eps": 1e-05,
34
  "num_attention_heads": 12,
35
  "num_hidden_layers": 22,
36
- "pad_token_id": 50283,
37
  "position_embedding_type": "absolute",
38
  "repad_logits_with_grad": false,
39
- "sep_token_id": 50282,
40
  "sparse_pred_ignore_index": -100,
41
  "sparse_prediction": false,
42
  "torch_dtype": "float32",
 
4
  ],
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
+ "bos_token_id": 101,
8
  "classifier_activation": "gelu",
9
  "classifier_bias": false,
10
  "classifier_dropout": 0.0,
11
  "classifier_pooling": "mean",
12
+ "cls_token_id": 101,
13
  "decoder_bias": true,
14
  "deterministic_flash_attn": false,
15
  "embedding_dropout": 0.0,
16
+ "eos_token_id": 102,
17
  "global_attn_every_n_layers": 3,
18
+ "global_rope_theta": 10000.0,
19
  "gradient_checkpointing": false,
20
  "hidden_activation": "gelu",
21
  "hidden_size": 768,
 
33
  "norm_eps": 1e-05,
34
  "num_attention_heads": 12,
35
  "num_hidden_layers": 22,
36
+ "pad_token_id": 0,
37
  "position_embedding_type": "absolute",
38
  "repad_logits_with_grad": false,
39
+ "sep_token_id": 102,
40
  "sparse_pred_ignore_index": -100,
41
  "sparse_prediction": false,
42
  "torch_dtype": "float32",