JuIm commited on
Commit
b74bb71
·
verified ·
1 Parent(s): 4dec132

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +3 -3
config.json CHANGED
@@ -6,9 +6,9 @@
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
8
  "attn_logit_softcapping": 50.0,
9
- "bos_token_id": 1,
10
  "cache_implementation": "hybrid",
11
- "eos_token_id": 2,
12
  "final_logit_softcapping": 30.0,
13
  "head_dim": 256,
14
  "hidden_activation": "gelu_pytorch_tanh",
@@ -20,7 +20,7 @@
20
  "num_attention_heads": 16,
21
  "num_hidden_layers": 14,
22
  "num_key_value_heads": 16,
23
- "pad_token_id": 3,
24
  "query_pre_attn_scalar": 224,
25
  "rms_norm_eps": 1e-06,
26
  "rope_theta": 10000.0,
 
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
8
  "attn_logit_softcapping": 50.0,
9
+ "bos_token_id": 20,
10
  "cache_implementation": "hybrid",
11
+ "eos_token_id": 21,
12
  "final_logit_softcapping": 30.0,
13
  "head_dim": 256,
14
  "hidden_activation": "gelu_pytorch_tanh",
 
20
  "num_attention_heads": 16,
21
  "num_hidden_layers": 14,
22
  "num_key_value_heads": 16,
23
+ "pad_token_id": 22,
24
  "query_pre_attn_scalar": 224,
25
  "rms_norm_eps": 1e-06,
26
  "rope_theta": 10000.0,