CKenJa commited on
Commit
254a4c6
·
verified ·
1 Parent(s): d2af198

fix not generated config by qwen2 conv template

Browse files
Files changed (1) hide show
  1. mlc-chat-config.json +82 -80
mlc-chat-config.json CHANGED
@@ -1,81 +1,83 @@
1
- {
2
- "version": "0.1.0",
3
- "model_type": "qwen2",
4
- "quantization": "q4f16_1",
5
- "model_config": {
6
- "hidden_act": "silu",
7
- "hidden_size": 1536,
8
- "intermediate_size": 8960,
9
- "num_attention_heads": 12,
10
- "num_hidden_layers": 28,
11
- "num_key_value_heads": 2,
12
- "rms_norm_eps": 1e-06,
13
- "rope_theta": 1000000.0,
14
- "vocab_size": 151936,
15
- "tie_word_embeddings": true,
16
- "context_window_size": 131072,
17
- "prefill_chunk_size": 8192,
18
- "tensor_parallel_shards": 1,
19
- "head_dim": 128,
20
- "dtype": "float32",
21
- "max_batch_size": 128
22
- },
23
- "vocab_size": 151936,
24
- "context_window_size": 131072,
25
- "sliding_window_size": -1,
26
- "prefill_chunk_size": 8192,
27
- "attention_sink_size": -1,
28
- "tensor_parallel_shards": 1,
29
- "pipeline_parallel_stages": 1,
30
- "active_vocab_size": 151665,
31
- "temperature": 1.0,
32
- "presence_penalty": 0.0,
33
- "frequency_penalty": 0.0,
34
- "repetition_penalty": 1.0,
35
- "top_p": 1.0,
36
- "tokenizer_files": [
37
- "tokenizer.json",
38
- "vocab.json",
39
- "merges.txt",
40
- "added_tokens.json",
41
- "tokenizer_config.json"
42
- ],
43
- "tokenizer_info": {
44
- "token_postproc_method": "byte_level",
45
- "prepend_space_in_encode": false,
46
- "strip_space_in_decode": false
47
- },
48
- "conv_template": {
49
- "name": "chatml",
50
- "system_template": "<|im_start|>system\n{system_message}<|im_end|>\n",
51
- "system_message": "A conversation between a user and an LLM-based AI assistant. The assistant gives helpful and honest answers.",
52
- "system_prefix_token_ids": null,
53
- "add_role_after_system_message": true,
54
- "roles": {
55
- "user": "<|im_start|>user",
56
- "assistant": "<|im_start|>assistant"
57
- },
58
- "role_templates": {
59
- "user": "{user_message}",
60
- "assistant": "{assistant_message}",
61
- "tool": "{tool_message}"
62
- },
63
- "messages": [],
64
- "seps": [
65
- "<|im_end|>\n"
66
- ],
67
- "role_content_sep": "\n",
68
- "role_empty_sep": "\n",
69
- "stop_str": [
70
- "<|im_end|>"
71
- ],
72
- "stop_token_ids": [
73
- 2
74
- ],
75
- "function_string": "",
76
- "use_function_calling": false
77
- },
78
- "pad_token_id": 0,
79
- "bos_token_id": 151644,
80
- "eos_token_id": 151645
 
 
81
  }
 
1
+ {
2
+ "version": "0.1.0",
3
+ "model_type": "qwen2",
4
+ "quantization": "q4f16_1",
5
+ "model_config": {
6
+ "hidden_act": "silu",
7
+ "hidden_size": 1536,
8
+ "intermediate_size": 8960,
9
+ "num_attention_heads": 12,
10
+ "num_hidden_layers": 28,
11
+ "num_key_value_heads": 2,
12
+ "rms_norm_eps": 1e-06,
13
+ "rope_theta": 1000000.0,
14
+ "vocab_size": 151936,
15
+ "tie_word_embeddings": true,
16
+ "context_window_size": 131072,
17
+ "prefill_chunk_size": 8192,
18
+ "tensor_parallel_shards": 1,
19
+ "head_dim": 128,
20
+ "dtype": "float32",
21
+ "max_batch_size": 128
22
+ },
23
+ "vocab_size": 151936,
24
+ "context_window_size": 131072,
25
+ "sliding_window_size": -1,
26
+ "prefill_chunk_size": 8192,
27
+ "attention_sink_size": -1,
28
+ "tensor_parallel_shards": 1,
29
+ "pipeline_parallel_stages": 1,
30
+ "active_vocab_size": 151665,
31
+ "temperature": 1.0,
32
+ "presence_penalty": 0.0,
33
+ "frequency_penalty": 0.0,
34
+ "repetition_penalty": 1.0,
35
+ "top_p": 1.0,
36
+ "tokenizer_files": [
37
+ "tokenizer.json",
38
+ "vocab.json",
39
+ "merges.txt",
40
+ "added_tokens.json",
41
+ "tokenizer_config.json"
42
+ ],
43
+ "tokenizer_info": {
44
+ "token_postproc_method": "byte_level",
45
+ "prepend_space_in_encode": false,
46
+ "strip_space_in_decode": false
47
+ },
48
+ "conv_template": {
49
+ "name": "qwen2",
50
+ "system_template": "<|im_start|>system\n{system_message}<|im_end|>\n",
51
+ "system_message": "You are a helpful assistant.",
52
+ "system_prefix_token_ids": null,
53
+ "add_role_after_system_message": true,
54
+ "roles": {
55
+ "user": "<|im_start|>user",
56
+ "assistant": "<|im_start|>assistant"
57
+ },
58
+ "role_templates": {
59
+ "user": "{user_message}",
60
+ "assistant": "{assistant_message}",
61
+ "tool": "{tool_message}"
62
+ },
63
+ "messages": [],
64
+ "seps": [
65
+ "<|im_end|>\n"
66
+ ],
67
+ "role_content_sep": "\n",
68
+ "role_empty_sep": "\n",
69
+ "stop_str": [
70
+ "<|endoftext|>",
71
+ "<|im_end|>"
72
+ ],
73
+ "stop_token_ids": [
74
+ 151643,
75
+ 151645
76
+ ],
77
+ "function_string": "",
78
+ "use_function_calling": false
79
+ },
80
+ "pad_token_id": 0,
81
+ "bos_token_id": 151644,
82
+ "eos_token_id": 151645
83
  }