Hanzo Dev commited on
Commit
a03e17d
·
1 Parent(s): 23f0fa8

Add model configuration and tokenizer files from Kimi K2 Thinking

Browse files

- config.json: DeepseekV3 architecture with 384 experts, 8 active per token
- generation_config.json: Generation parameters
- tokenizer_config.json: Tokenizer configuration
- tiktoken.model: Tokenizer model (2.7MB)

Base model: moonshotai/Kimi-K2-Thinking
Architecture: DeepseekV3ForCausalLM (671B total, ~14B active)

Files changed (4) hide show
  1. config.json +150 -0
  2. generation_config.json +4 -0
  3. tiktoken.model +3 -0
  4. tokenizer_config.json +180 -0
config.json ADDED
@@ -0,0 +1,150 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_attn_implementation_autoset": false,
3
+ "_name_or_path": "",
4
+ "add_cross_attention": false,
5
+ "architectures": [
6
+ "DeepseekV3ForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.0,
10
+ "auto_map": {
11
+ "AutoConfig": "configuration_deepseek.DeepseekV3Config",
12
+ "AutoModel": "modeling_deepseek.DeepseekV3Model",
13
+ "AutoModelForCausalLM": "modeling_deepseek.DeepseekV3ForCausalLM"
14
+ },
15
+ "aux_loss_alpha": 0.001,
16
+ "bad_words_ids": null,
17
+ "begin_suppress_tokens": null,
18
+ "bos_token_id": 163584,
19
+ "chunk_size_feed_forward": 0,
20
+ "cross_attention_hidden_size": null,
21
+ "decoder_start_token_id": null,
22
+ "diversity_penalty": 0.0,
23
+ "do_sample": false,
24
+ "early_stopping": false,
25
+ "encoder_no_repeat_ngram_size": 0,
26
+ "eos_token_id": 163586,
27
+ "ep_size": 1,
28
+ "exponential_decay_length_penalty": null,
29
+ "finetuning_task": null,
30
+ "first_k_dense_replace": 1,
31
+ "forced_bos_token_id": null,
32
+ "forced_eos_token_id": null,
33
+ "hidden_act": "silu",
34
+ "hidden_size": 7168,
35
+ "id2label": {
36
+ "0": "LABEL_0",
37
+ "1": "LABEL_1"
38
+ },
39
+ "initializer_range": 0.02,
40
+ "intermediate_size": 18432,
41
+ "is_decoder": false,
42
+ "is_encoder_decoder": false,
43
+ "kv_lora_rank": 512,
44
+ "label2id": {
45
+ "LABEL_0": 0,
46
+ "LABEL_1": 1
47
+ },
48
+ "length_penalty": 1.0,
49
+ "max_length": 20,
50
+ "max_position_embeddings": 262144,
51
+ "min_length": 0,
52
+ "model_type": "kimi_k2",
53
+ "moe_intermediate_size": 2048,
54
+ "moe_layer_freq": 1,
55
+ "n_group": 1,
56
+ "n_routed_experts": 384,
57
+ "n_shared_experts": 1,
58
+ "no_repeat_ngram_size": 0,
59
+ "norm_topk_prob": true,
60
+ "num_attention_heads": 64,
61
+ "num_beam_groups": 1,
62
+ "num_beams": 1,
63
+ "num_experts_per_tok": 8,
64
+ "num_hidden_layers": 61,
65
+ "num_key_value_heads": 64,
66
+ "num_nextn_predict_layers": 0,
67
+ "num_return_sequences": 1,
68
+ "output_attentions": false,
69
+ "output_hidden_states": false,
70
+ "output_scores": false,
71
+ "pad_token_id": 163839,
72
+ "prefix": null,
73
+ "pretraining_tp": 1,
74
+ "problem_type": null,
75
+ "pruned_heads": {},
76
+ "q_lora_rank": 1536,
77
+ "qk_nope_head_dim": 128,
78
+ "qk_rope_head_dim": 64,
79
+ "quantization_config": {
80
+ "config_groups": {
81
+ "group_0": {
82
+ "input_activations": null,
83
+ "output_activations": null,
84
+ "targets": [
85
+ "Linear"
86
+ ],
87
+ "weights": {
88
+ "actorder": null,
89
+ "block_structure": null,
90
+ "dynamic": false,
91
+ "group_size": 32,
92
+ "num_bits": 4,
93
+ "observer": "minmax",
94
+ "observer_kwargs": {},
95
+ "strategy": "group",
96
+ "symmetric": true,
97
+ "type": "int"
98
+ }
99
+ }
100
+ },
101
+ "format": "pack-quantized",
102
+ "ignore": [
103
+ "lm_head",
104
+ "re:.*self_attn.*",
105
+ "re:.*shared_experts.*",
106
+ "re:.*mlp\\.(gate|up|gate_up|down)_proj.*"
107
+ ],
108
+ "kv_cache_scheme": null,
109
+ "quant_method": "compressed-tensors",
110
+ "quantization_status": "compressed"
111
+ },
112
+ "remove_invalid_values": false,
113
+ "repetition_penalty": 1.0,
114
+ "return_dict": true,
115
+ "return_dict_in_generate": false,
116
+ "rms_norm_eps": 1e-05,
117
+ "rope_scaling": {
118
+ "beta_fast": 1.0,
119
+ "beta_slow": 1.0,
120
+ "factor": 64.0,
121
+ "mscale": 1.0,
122
+ "mscale_all_dim": 1.0,
123
+ "original_max_position_embeddings": 4096,
124
+ "type": "yarn"
125
+ },
126
+ "rope_theta": 50000.0,
127
+ "routed_scaling_factor": 2.827,
128
+ "scoring_func": "sigmoid",
129
+ "sep_token_id": null,
130
+ "seq_aux": true,
131
+ "suppress_tokens": null,
132
+ "task_specific_params": null,
133
+ "temperature": 1.0,
134
+ "tf_legacy_loss": false,
135
+ "tie_encoder_decoder": false,
136
+ "tie_word_embeddings": false,
137
+ "tokenizer_class": null,
138
+ "top_k": 50,
139
+ "top_p": 1.0,
140
+ "topk_group": 1,
141
+ "topk_method": "noaux_tc",
142
+ "torch_dtype": "bfloat16",
143
+ "torchscript": false,
144
+ "transformers_version": "4.51.3",
145
+ "typical_p": 1.0,
146
+ "use_bfloat16": false,
147
+ "use_cache": true,
148
+ "v_head_dim": 128,
149
+ "vocab_size": 163840
150
+ }
generation_config.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "max_length": 262144,
3
+ "eos_token_id": 163586
4
+ }
tiktoken.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b6c497a7469b33ced9c38afb1ad6e47f03f5e5dc05f15930799210ec050c5103
3
+ size 2795286
tokenizer_config.json ADDED
@@ -0,0 +1,180 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "163584": {
4
+ "content": "[BOS]",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "163585": {
12
+ "content": "[EOS]",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "163586": {
20
+ "content": "<|im_end|>",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "163587": {
28
+ "content": "<|im_user|>",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ },
35
+ "163588": {
36
+ "content": "<|im_assistant|>",
37
+ "lstrip": false,
38
+ "normalized": false,
39
+ "rstrip": false,
40
+ "single_word": false,
41
+ "special": true
42
+ },
43
+ "163590": {
44
+ "content": "<|start_header_id|>",
45
+ "lstrip": false,
46
+ "normalized": false,
47
+ "rstrip": false,
48
+ "single_word": false,
49
+ "special": true
50
+ },
51
+ "163591": {
52
+ "content": "<|end_header_id|>",
53
+ "lstrip": false,
54
+ "normalized": false,
55
+ "rstrip": false,
56
+ "single_word": false,
57
+ "special": true
58
+ },
59
+ "163593": {
60
+ "content": "[EOT]",
61
+ "lstrip": false,
62
+ "normalized": false,
63
+ "rstrip": false,
64
+ "single_word": false,
65
+ "special": true
66
+ },
67
+ "163594": {
68
+ "content": "<|im_system|>",
69
+ "lstrip": false,
70
+ "normalized": false,
71
+ "rstrip": false,
72
+ "single_word": false,
73
+ "special": true
74
+ },
75
+ "163595": {
76
+ "content": "<|tool_calls_section_begin|>",
77
+ "lstrip": false,
78
+ "normalized": false,
79
+ "rstrip": false,
80
+ "single_word": false,
81
+ "special": false
82
+ },
83
+ "163596": {
84
+ "content": "<|tool_calls_section_end|>",
85
+ "lstrip": false,
86
+ "normalized": false,
87
+ "rstrip": false,
88
+ "single_word": false,
89
+ "special": false
90
+ },
91
+ "163597": {
92
+ "content": "<|tool_call_begin|>",
93
+ "lstrip": false,
94
+ "normalized": false,
95
+ "rstrip": false,
96
+ "single_word": false,
97
+ "special": false
98
+ },
99
+ "163598": {
100
+ "content": "<|tool_call_argument_begin|>",
101
+ "lstrip": false,
102
+ "normalized": false,
103
+ "rstrip": false,
104
+ "single_word": false,
105
+ "special": false
106
+ },
107
+ "163599": {
108
+ "content": "<|tool_call_end|>",
109
+ "lstrip": false,
110
+ "normalized": false,
111
+ "rstrip": false,
112
+ "single_word": false,
113
+ "special": false
114
+ },
115
+ "163601": {
116
+ "content": "<|im_middle|>",
117
+ "lstrip": false,
118
+ "normalized": false,
119
+ "rstrip": false,
120
+ "single_word": false,
121
+ "special": true
122
+ },
123
+ "163606": {
124
+ "content": "<think>",
125
+ "lstrip": false,
126
+ "normalized": false,
127
+ "rstrip": false,
128
+ "single_word": false,
129
+ "special": false
130
+ },
131
+ "163607": {
132
+ "content": "</think>",
133
+ "lstrip": false,
134
+ "normalized": false,
135
+ "rstrip": false,
136
+ "single_word": false,
137
+ "special": false
138
+ },
139
+ "163838": {
140
+ "content": "[UNK]",
141
+ "lstrip": false,
142
+ "normalized": false,
143
+ "rstrip": false,
144
+ "single_word": false,
145
+ "special": true
146
+ },
147
+ "163839": {
148
+ "content": "[PAD]",
149
+ "lstrip": false,
150
+ "normalized": false,
151
+ "rstrip": false,
152
+ "single_word": false,
153
+ "special": true
154
+ }
155
+ },
156
+ "additional_special_tokens": [
157
+ "<|im_end|>",
158
+ "<|im_user|>",
159
+ "<|im_assistant|>",
160
+ "<|start_header_id|>",
161
+ "<|end_header_id|>",
162
+ "[EOT]",
163
+ "<|im_system|>",
164
+ "<|im_middle|>"
165
+ ],
166
+ "bos_token": "[BOS]",
167
+ "clean_up_tokenization_spaces": false,
168
+ "eos_token": "[EOS]",
169
+ "extra_special_tokens": {},
170
+ "model_max_length": 1000000000000000019884624838656,
171
+ "pad_token": "[PAD]",
172
+ "tokenizer_class": "TikTokenTokenizer",
173
+ "unk_token": "[UNK]",
174
+ "auto_map": {
175
+ "AutoTokenizer": [
176
+ "tokenization_kimi.TikTokenTokenizer",
177
+ null
178
+ ]
179
+ }
180
+ }