JonusNattapong commited on
Commit
75e868b
·
verified ·
1 Parent(s): bf725a9

Upload config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +12 -10
config.json CHANGED
@@ -1,23 +1,25 @@
1
  {
2
- "_name_or_path": "OpensourceThai/Wilai",
3
  "architectures": [
4
- "OpenThaiWilai"
5
  ],
6
- "bos_token_id": 1,
7
- "eos_token_id": 2,
8
- "pad_token_id": 0,
9
  "hidden_size": 768,
10
  "intermediate_size": 3072,
11
- "max_position_embeddings": 512,
12
  "model_type": "OpenThaiWilai",
 
13
  "num_heads": 8,
 
 
14
  "num_layers": 6,
15
- "num_experts": 4,
 
16
  "top_k": 2,
 
 
17
  "vocab_size": 48000,
18
- "torch_dtype": "float32",
19
- "transformers_version": "4.44.0",
20
- "use_cache": true,
21
  "auto_map": {
22
  "AutoConfig": "configuration_openthaiwilai.OpenThaiWilaiConfig",
23
  "AutoModelForCausalLM": "modeling_openthaiwilai.OpenThaiWilaiForCausalLM"
 
1
  {
 
2
  "architectures": [
3
+ "OpenThaiWilaiForCausalLM"
4
  ],
5
+ "bos_token_id": 2,
6
+ "dtype": "float32",
7
+ "eos_token_id": 3,
8
  "hidden_size": 768,
9
  "intermediate_size": 3072,
10
+ "max_position_embeddings": 2048,
11
  "model_type": "OpenThaiWilai",
12
+ "num_experts": 4,
13
  "num_heads": 8,
14
+ "num_hidden_layers": 6,
15
+ "num_key_value_heads": 8,
16
  "num_layers": 6,
17
+ "pad_token_id": 0,
18
+ "rope": true,
19
  "top_k": 2,
20
+ "transformers_version": "4.56.1",
21
+ "use_flashattn": true,
22
  "vocab_size": 48000,
 
 
 
23
  "auto_map": {
24
  "AutoConfig": "configuration_openthaiwilai.OpenThaiWilaiConfig",
25
  "AutoModelForCausalLM": "modeling_openthaiwilai.OpenThaiWilaiForCausalLM"