HarshvardhanCn01 commited on
Commit
e1c2140
·
verified ·
1 Parent(s): 4beed0c

Update mlc-chat-config.json

Browse files
Files changed (1) hide show
  1. mlc-chat-config.json +14 -7
mlc-chat-config.json CHANGED
@@ -8,7 +8,7 @@
8
  "num_attention_heads": 32,
9
  "num_hidden_layers": 16,
10
  "rms_norm_eps": 1e-05,
11
- "vocab_size": 128258,
12
  "tie_word_embeddings": true,
13
  "position_embedding_base": 500000.0,
14
  "rope_scaling": {
@@ -26,7 +26,7 @@
26
  "pipeline_parallel_stages": 1,
27
  "max_batch_size": 128
28
  },
29
- "vocab_size": 128258,
30
  "context_window_size": 131072,
31
  "sliding_window_size": -1,
32
  "prefill_chunk_size": 8192,
@@ -38,9 +38,12 @@
38
  "frequency_penalty": 0.0,
39
  "repetition_penalty": 1.0,
40
  "top_p": 0.9,
41
- "tokenizer_files": ["tokenizer.json", "tokenizer_config.json"],
 
 
 
42
  "tokenizer_info": {
43
- "token_postproc_method": "byte_fallback",
44
  "prepend_space_in_encode": false,
45
  "strip_space_in_decode": false
46
  },
@@ -77,7 +80,11 @@
77
  "function_string": "",
78
  "use_function_calling": false
79
  },
80
- "pad_token_id": 128257,
81
- "bos_token_id": 128256,
82
- "eos_token_id": 128257
 
 
 
 
83
  }
 
8
  "num_attention_heads": 32,
9
  "num_hidden_layers": 16,
10
  "rms_norm_eps": 1e-05,
11
+ "vocab_size": 128256,
12
  "tie_word_embeddings": true,
13
  "position_embedding_base": 500000.0,
14
  "rope_scaling": {
 
26
  "pipeline_parallel_stages": 1,
27
  "max_batch_size": 128
28
  },
29
+ "vocab_size": 128256,
30
  "context_window_size": 131072,
31
  "sliding_window_size": -1,
32
  "prefill_chunk_size": 8192,
 
38
  "frequency_penalty": 0.0,
39
  "repetition_penalty": 1.0,
40
  "top_p": 0.9,
41
+ "tokenizer_files": [
42
+ "tokenizer.json",
43
+ "tokenizer_config.json"
44
+ ],
45
  "tokenizer_info": {
46
+ "token_postproc_method": "byte_level",
47
  "prepend_space_in_encode": false,
48
  "strip_space_in_decode": false
49
  },
 
80
  "function_string": "",
81
  "use_function_calling": false
82
  },
83
+ "pad_token_id": 0,
84
+ "bos_token_id": 128000,
85
+ "eos_token_id": [
86
+ 128001,
87
+ 128008,
88
+ 128009
89
+ ]
90
  }