Update config.json

#1
by iamPi - opened
Files changed (1) hide show
  1. config.json +5 -6
config.json CHANGED
@@ -4,8 +4,7 @@
4
  ],
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
- "bos_token_id": 14,
8
- "dtype": "bfloat16",
9
  "eos_token_id": 15,
10
  "head_dim": 128,
11
  "hidden_act": "silu",
@@ -56,14 +55,14 @@
56
  "num_attention_heads": 32,
57
  "num_hidden_layers": 36,
58
  "num_key_value_heads": 8,
59
- "padded_vocab_size": 16,
60
  "rms_norm_eps": 1e-06,
61
  "rope_scaling": null,
62
  "rope_theta": 5000000,
63
  "sliding_window": null,
64
- "tie_word_embeddings": true,
65
- "transformers_version": "4.57.1",
 
66
  "use_cache": true,
67
  "use_sliding_window": false,
68
  "vocab_size": 16
69
- }
 
4
  ],
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
+ "bos_token_id": 13,
 
8
  "eos_token_id": 15,
9
  "head_dim": 128,
10
  "hidden_act": "silu",
 
55
  "num_attention_heads": 32,
56
  "num_hidden_layers": 36,
57
  "num_key_value_heads": 8,
 
58
  "rms_norm_eps": 1e-06,
59
  "rope_scaling": null,
60
  "rope_theta": 5000000,
61
  "sliding_window": null,
62
+ "tie_word_embeddings": false,
63
+ "torch_dtype": "bfloat16",
64
+ "transformers_version": "4.53.3",
65
  "use_cache": true,
66
  "use_sliding_window": false,
67
  "vocab_size": 16
68
+ }