ABrain-One commited on
Commit
4c0bf98
·
verified ·
1 Parent(s): 18f7d9d

Delete ABrain

Browse files
ABrain/NNGPT-DeepSeek-Coder-1.3B-Instruct/config.json DELETED
@@ -1,34 +0,0 @@
1
- {
2
- "_name_or_path": "/home/dm/prj/py/nn-gpt/out/upload/Models/ABrain/NNGPT-DeepSeek-Coder-1.3B-Instruct",
3
- "architectures": [
4
- "LlamaForCausalLM"
5
- ],
6
- "attention_bias": false,
7
- "attention_dropout": 0.0,
8
- "bos_token_id": 32013,
9
- "eos_token_id": 32021,
10
- "head_dim": 128,
11
- "hidden_act": "silu",
12
- "hidden_size": 2048,
13
- "initializer_range": 0.02,
14
- "intermediate_size": 5504,
15
- "max_position_embeddings": 16384,
16
- "mlp_bias": false,
17
- "model_type": "llama",
18
- "num_attention_heads": 16,
19
- "num_hidden_layers": 24,
20
- "num_key_value_heads": 16,
21
- "pretraining_tp": 1,
22
- "rms_norm_eps": 1e-06,
23
- "rope_scaling": {
24
- "factor": 4.0,
25
- "rope_type": "linear",
26
- "type": "linear"
27
- },
28
- "rope_theta": 100000,
29
- "tie_word_embeddings": false,
30
- "torch_dtype": "float16",
31
- "transformers_version": "4.48.3",
32
- "use_cache": true,
33
- "vocab_size": 32256
34
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ABrain/NNGPT-DeepSeek-Coder-1.3B-Instruct/generation_config.json DELETED
@@ -1,6 +0,0 @@
1
- {
2
- "_from_model_config": true,
3
- "bos_token_id": 32013,
4
- "eos_token_id": 32021,
5
- "transformers_version": "4.48.3"
6
- }
 
 
 
 
 
 
 
ABrain/NNGPT-DeepSeek-Coder-1.3B-Instruct/model-00001-of-00002.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:a6fa8663157f2b1ee79d943f48e39ae1cc00f3b36af4914fa5c067a57333778c
3
- size 4989350312
 
 
 
 
ABrain/NNGPT-DeepSeek-Coder-1.3B-Instruct/model-00002-of-00002.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:cc6b0314faf3b20778b12edad6acc738d5ef8ab351204c60cd061ee807110009
3
- size 132120704
 
 
 
 
ABrain/NNGPT-DeepSeek-Coder-1.3B-Instruct/model.safetensors.index.json DELETED
@@ -1,226 +0,0 @@
1
- {
2
- "metadata": {
3
- "total_size": 5121445888
4
- },
5
- "weight_map": {
6
- "lm_head.weight": "model-00002-of-00002.safetensors",
7
- "model.embed_tokens.weight": "model-00001-of-00002.safetensors",
8
- "model.layers.0.input_layernorm.weight": "model-00001-of-00002.safetensors",
9
- "model.layers.0.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
10
- "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
11
- "model.layers.0.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
12
- "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
13
- "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
14
- "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
15
- "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
16
- "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
17
- "model.layers.1.input_layernorm.weight": "model-00001-of-00002.safetensors",
18
- "model.layers.1.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
19
- "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
20
- "model.layers.1.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
21
- "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
22
- "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
23
- "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
24
- "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
25
- "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
26
- "model.layers.10.input_layernorm.weight": "model-00001-of-00002.safetensors",
27
- "model.layers.10.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
28
- "model.layers.10.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
29
- "model.layers.10.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
30
- "model.layers.10.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
31
- "model.layers.10.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
32
- "model.layers.10.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
33
- "model.layers.10.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
34
- "model.layers.10.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
35
- "model.layers.11.input_layernorm.weight": "model-00001-of-00002.safetensors",
36
- "model.layers.11.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
37
- "model.layers.11.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
38
- "model.layers.11.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
39
- "model.layers.11.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
40
- "model.layers.11.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
41
- "model.layers.11.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
42
- "model.layers.11.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
43
- "model.layers.11.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
44
- "model.layers.12.input_layernorm.weight": "model-00001-of-00002.safetensors",
45
- "model.layers.12.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
46
- "model.layers.12.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
47
- "model.layers.12.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
48
- "model.layers.12.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
49
- "model.layers.12.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
50
- "model.layers.12.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
51
- "model.layers.12.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
52
- "model.layers.12.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
53
- "model.layers.13.input_layernorm.weight": "model-00001-of-00002.safetensors",
54
- "model.layers.13.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
55
- "model.layers.13.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
56
- "model.layers.13.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
57
- "model.layers.13.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
58
- "model.layers.13.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
59
- "model.layers.13.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
60
- "model.layers.13.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
61
- "model.layers.13.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
62
- "model.layers.14.input_layernorm.weight": "model-00001-of-00002.safetensors",
63
- "model.layers.14.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
64
- "model.layers.14.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
65
- "model.layers.14.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
66
- "model.layers.14.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
67
- "model.layers.14.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
68
- "model.layers.14.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
69
- "model.layers.14.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
70
- "model.layers.14.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
71
- "model.layers.15.input_layernorm.weight": "model-00001-of-00002.safetensors",
72
- "model.layers.15.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
73
- "model.layers.15.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
74
- "model.layers.15.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
75
- "model.layers.15.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
76
- "model.layers.15.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
77
- "model.layers.15.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
78
- "model.layers.15.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
79
- "model.layers.15.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
80
- "model.layers.16.input_layernorm.weight": "model-00001-of-00002.safetensors",
81
- "model.layers.16.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
82
- "model.layers.16.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
83
- "model.layers.16.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
84
- "model.layers.16.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
85
- "model.layers.16.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
86
- "model.layers.16.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
87
- "model.layers.16.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
88
- "model.layers.16.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
89
- "model.layers.17.input_layernorm.weight": "model-00001-of-00002.safetensors",
90
- "model.layers.17.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
91
- "model.layers.17.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
92
- "model.layers.17.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
93
- "model.layers.17.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
94
- "model.layers.17.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
95
- "model.layers.17.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
96
- "model.layers.17.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
97
- "model.layers.17.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
98
- "model.layers.18.input_layernorm.weight": "model-00001-of-00002.safetensors",
99
- "model.layers.18.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
100
- "model.layers.18.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
101
- "model.layers.18.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
102
- "model.layers.18.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
103
- "model.layers.18.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
104
- "model.layers.18.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
105
- "model.layers.18.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
106
- "model.layers.18.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
107
- "model.layers.19.input_layernorm.weight": "model-00001-of-00002.safetensors",
108
- "model.layers.19.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
109
- "model.layers.19.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
110
- "model.layers.19.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
111
- "model.layers.19.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
112
- "model.layers.19.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
113
- "model.layers.19.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
114
- "model.layers.19.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
115
- "model.layers.19.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
116
- "model.layers.2.input_layernorm.weight": "model-00001-of-00002.safetensors",
117
- "model.layers.2.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
118
- "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
119
- "model.layers.2.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
120
- "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
121
- "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
122
- "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
123
- "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
124
- "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
125
- "model.layers.20.input_layernorm.weight": "model-00001-of-00002.safetensors",
126
- "model.layers.20.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
127
- "model.layers.20.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
128
- "model.layers.20.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
129
- "model.layers.20.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
130
- "model.layers.20.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
131
- "model.layers.20.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
132
- "model.layers.20.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
133
- "model.layers.20.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
134
- "model.layers.21.input_layernorm.weight": "model-00001-of-00002.safetensors",
135
- "model.layers.21.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
136
- "model.layers.21.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
137
- "model.layers.21.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
138
- "model.layers.21.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
139
- "model.layers.21.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
140
- "model.layers.21.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
141
- "model.layers.21.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
142
- "model.layers.21.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
143
- "model.layers.22.input_layernorm.weight": "model-00001-of-00002.safetensors",
144
- "model.layers.22.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
145
- "model.layers.22.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
146
- "model.layers.22.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
147
- "model.layers.22.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
148
- "model.layers.22.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
149
- "model.layers.22.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
150
- "model.layers.22.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
151
- "model.layers.22.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
152
- "model.layers.23.input_layernorm.weight": "model-00001-of-00002.safetensors",
153
- "model.layers.23.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
154
- "model.layers.23.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
155
- "model.layers.23.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
156
- "model.layers.23.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
157
- "model.layers.23.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
158
- "model.layers.23.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
159
- "model.layers.23.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
160
- "model.layers.23.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
161
- "model.layers.3.input_layernorm.weight": "model-00001-of-00002.safetensors",
162
- "model.layers.3.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
163
- "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
164
- "model.layers.3.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
165
- "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
166
- "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
167
- "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
168
- "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
169
- "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
170
- "model.layers.4.input_layernorm.weight": "model-00001-of-00002.safetensors",
171
- "model.layers.4.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
172
- "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
173
- "model.layers.4.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
174
- "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
175
- "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
176
- "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
177
- "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
178
- "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
179
- "model.layers.5.input_layernorm.weight": "model-00001-of-00002.safetensors",
180
- "model.layers.5.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
181
- "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
182
- "model.layers.5.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
183
- "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
184
- "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
185
- "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
186
- "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
187
- "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
188
- "model.layers.6.input_layernorm.weight": "model-00001-of-00002.safetensors",
189
- "model.layers.6.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
190
- "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
191
- "model.layers.6.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
192
- "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
193
- "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
194
- "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
195
- "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
196
- "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
197
- "model.layers.7.input_layernorm.weight": "model-00001-of-00002.safetensors",
198
- "model.layers.7.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
199
- "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
200
- "model.layers.7.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
201
- "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
202
- "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
203
- "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
204
- "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
205
- "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
206
- "model.layers.8.input_layernorm.weight": "model-00001-of-00002.safetensors",
207
- "model.layers.8.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
208
- "model.layers.8.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
209
- "model.layers.8.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
210
- "model.layers.8.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
211
- "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
212
- "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
213
- "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
214
- "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
215
- "model.layers.9.input_layernorm.weight": "model-00001-of-00002.safetensors",
216
- "model.layers.9.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
217
- "model.layers.9.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
218
- "model.layers.9.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
219
- "model.layers.9.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
220
- "model.layers.9.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
221
- "model.layers.9.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
222
- "model.layers.9.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
223
- "model.layers.9.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
224
- "model.norm.weight": "model-00001-of-00002.safetensors"
225
- }
226
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ABrain/NNGPT-DeepSeek-Coder-1.3B-Instruct/special_tokens_map.json DELETED
@@ -1,23 +0,0 @@
1
- {
2
- "bos_token": {
3
- "content": "<|begin▁of▁sentence|>",
4
- "lstrip": false,
5
- "normalized": true,
6
- "rstrip": false,
7
- "single_word": false
8
- },
9
- "eos_token": {
10
- "content": "<|EOT|>",
11
- "lstrip": false,
12
- "normalized": true,
13
- "rstrip": false,
14
- "single_word": false
15
- },
16
- "pad_token": {
17
- "content": "<|EOT|>",
18
- "lstrip": false,
19
- "normalized": true,
20
- "rstrip": false,
21
- "single_word": false
22
- }
23
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ABrain/NNGPT-DeepSeek-Coder-1.3B-Instruct/tokenizer.json DELETED
The diff for this file is too large to render. See raw diff
 
ABrain/NNGPT-DeepSeek-Coder-1.3B-Instruct/tokenizer_config.json DELETED
@@ -1,195 +0,0 @@
1
- {
2
- "add_bos_token": true,
3
- "add_eos_token": false,
4
- "add_prefix_space": null,
5
- "added_tokens_decoder": {
6
- "32000": {
7
- "content": "õ",
8
- "lstrip": false,
9
- "normalized": true,
10
- "rstrip": false,
11
- "single_word": false,
12
- "special": false
13
- },
14
- "32001": {
15
- "content": "÷",
16
- "lstrip": false,
17
- "normalized": true,
18
- "rstrip": false,
19
- "single_word": false,
20
- "special": false
21
- },
22
- "32002": {
23
- "content": "Á",
24
- "lstrip": false,
25
- "normalized": true,
26
- "rstrip": false,
27
- "single_word": false,
28
- "special": false
29
- },
30
- "32003": {
31
- "content": "ý",
32
- "lstrip": false,
33
- "normalized": true,
34
- "rstrip": false,
35
- "single_word": false,
36
- "special": false
37
- },
38
- "32004": {
39
- "content": "À",
40
- "lstrip": false,
41
- "normalized": true,
42
- "rstrip": false,
43
- "single_word": false,
44
- "special": false
45
- },
46
- "32005": {
47
- "content": "ÿ",
48
- "lstrip": false,
49
- "normalized": true,
50
- "rstrip": false,
51
- "single_word": false,
52
- "special": false
53
- },
54
- "32006": {
55
- "content": "ø",
56
- "lstrip": false,
57
- "normalized": true,
58
- "rstrip": false,
59
- "single_word": false,
60
- "special": false
61
- },
62
- "32007": {
63
- "content": "ú",
64
- "lstrip": false,
65
- "normalized": true,
66
- "rstrip": false,
67
- "single_word": false,
68
- "special": false
69
- },
70
- "32008": {
71
- "content": "þ",
72
- "lstrip": false,
73
- "normalized": true,
74
- "rstrip": false,
75
- "single_word": false,
76
- "special": false
77
- },
78
- "32009": {
79
- "content": "ü",
80
- "lstrip": false,
81
- "normalized": true,
82
- "rstrip": false,
83
- "single_word": false,
84
- "special": false
85
- },
86
- "32010": {
87
- "content": "ù",
88
- "lstrip": false,
89
- "normalized": true,
90
- "rstrip": false,
91
- "single_word": false,
92
- "special": false
93
- },
94
- "32011": {
95
- "content": "ö",
96
- "lstrip": false,
97
- "normalized": true,
98
- "rstrip": false,
99
- "single_word": false,
100
- "special": false
101
- },
102
- "32012": {
103
- "content": "û",
104
- "lstrip": false,
105
- "normalized": true,
106
- "rstrip": false,
107
- "single_word": false,
108
- "special": false
109
- },
110
- "32013": {
111
- "content": "<|begin▁of▁sentence|>",
112
- "lstrip": false,
113
- "normalized": true,
114
- "rstrip": false,
115
- "single_word": false,
116
- "special": true
117
- },
118
- "32014": {
119
- "content": "<|end▁of▁sentence|>",
120
- "lstrip": false,
121
- "normalized": true,
122
- "rstrip": false,
123
- "single_word": false,
124
- "special": true
125
- },
126
- "32015": {
127
- "content": "<|fim▁hole|>",
128
- "lstrip": false,
129
- "normalized": true,
130
- "rstrip": false,
131
- "single_word": false,
132
- "special": false
133
- },
134
- "32016": {
135
- "content": "<|fim▁begin|>",
136
- "lstrip": false,
137
- "normalized": true,
138
- "rstrip": false,
139
- "single_word": false,
140
- "special": false
141
- },
142
- "32017": {
143
- "content": "<|fim▁end|>",
144
- "lstrip": false,
145
- "normalized": true,
146
- "rstrip": false,
147
- "single_word": false,
148
- "special": false
149
- },
150
- "32018": {
151
- "content": "<pad>",
152
- "lstrip": false,
153
- "normalized": true,
154
- "rstrip": false,
155
- "single_word": false,
156
- "special": false
157
- },
158
- "32019": {
159
- "content": "<|User|>",
160
- "lstrip": false,
161
- "normalized": true,
162
- "rstrip": false,
163
- "single_word": false,
164
- "special": false
165
- },
166
- "32020": {
167
- "content": "<|Assistant|>",
168
- "lstrip": false,
169
- "normalized": true,
170
- "rstrip": false,
171
- "single_word": false,
172
- "special": false
173
- },
174
- "32021": {
175
- "content": "<|EOT|>",
176
- "lstrip": false,
177
- "normalized": true,
178
- "rstrip": false,
179
- "single_word": false,
180
- "special": true
181
- }
182
- },
183
- "bos_token": "<|begin▁of▁sentence|>",
184
- "chat_template": "{% if not add_generation_prompt is defined %}\n{% set add_generation_prompt = false %}\n{% endif %}\n{%- set ns = namespace(found=false) -%}\n{%- for message in messages -%}\n {%- if message['role'] == 'system' -%}\n {%- set ns.found = true -%}\n {%- endif -%}\n{%- endfor -%}\n{{bos_token}}{%- if not ns.found -%}\n{{'You are an AI programming assistant, utilizing the Deepseek Coder model, developed by Deepseek Company, and you only answer questions related to computer science. For politically sensitive questions, security and privacy issues, and other non-computer science questions, you will refuse to answer\\n'}}\n{%- endif %}\n{%- for message in messages %}\n {%- if message['role'] == 'system' %}\n{{ message['content'] }}\n {%- else %}\n {%- if message['role'] == 'user' %}\n{{'### Instruction:\\n' + message['content'] + '\\n'}}\n {%- else %}\n{{'### Response:\\n' + message['content'] + '\\n<|EOT|>\\n'}}\n {%- endif %}\n {%- endif %}\n{%- endfor %}\n{% if add_generation_prompt %}\n{{'### Response:'}}\n{% endif %}",
185
- "clean_up_tokenization_spaces": false,
186
- "eos_token": "<|EOT|>",
187
- "extra_special_tokens": {},
188
- "legacy": true,
189
- "model_max_length": 16384,
190
- "pad_token": "<|EOT|>",
191
- "sp_model_kwargs": {},
192
- "tokenizer_class": "LlamaTokenizer",
193
- "unk_token": null,
194
- "use_default_system_prompt": false
195
- }