RuudFontys commited on
Commit
a87ce38
·
verified ·
1 Parent(s): 44c1fe4

Update tokenizer chat template with Socratic prompt

Browse files
Socratic-Tutor-Qwen2.5_Hf-7.6B-Q8_0.gguf CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7c83705ffd690e7a3e67b15e62b07e34e851f3d36a9f9c5927ab876fef38f3fa
3
- size 8098524960
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:67204dc2fd69aa83038db5293537b33cdbc83ee18347d829cc0e7d9924e2c068
3
+ size 135
added_tokens.json CHANGED
@@ -1,24 +1,5 @@
1
  {
2
- "</tool_call>": 151658,
3
- "<tool_call>": 151657,
4
- "<|box_end|>": 151649,
5
- "<|box_start|>": 151648,
6
  "<|endoftext|>": 151643,
7
- "<|file_sep|>": 151664,
8
- "<|fim_middle|>": 151660,
9
- "<|fim_pad|>": 151662,
10
- "<|fim_prefix|>": 151659,
11
- "<|fim_suffix|>": 151661,
12
  "<|im_end|>": 151645,
13
- "<|im_start|>": 151644,
14
- "<|image_pad|>": 151655,
15
- "<|object_ref_end|>": 151647,
16
- "<|object_ref_start|>": 151646,
17
- "<|quad_end|>": 151651,
18
- "<|quad_start|>": 151650,
19
- "<|repo_name|>": 151663,
20
- "<|video_pad|>": 151656,
21
- "<|vision_end|>": 151653,
22
- "<|vision_pad|>": 151654,
23
- "<|vision_start|>": 151652
24
  }
 
1
  {
 
 
 
 
2
  "<|endoftext|>": 151643,
 
 
 
 
 
3
  "<|im_end|>": 151645,
4
+ "<|im_start|>": 151644
 
 
 
 
 
 
 
 
 
 
5
  }
chat_template.jinja CHANGED
@@ -1,53 +1 @@
1
- {%- if tools %}
2
- {{- '<|im_start|>system\n' }}
3
- {%- if messages[0]['role'] == 'system' %}
4
- {{- messages[0]['content'] }}
5
- {%- else %}
6
- {{- 'You are Qwen, created by Alibaba Cloud. You are a helpful assistant.' }}
7
- {%- endif %}
8
- {{- "\n\n# Tools\n\nYou may call one or more functions to assist with the user query.\n\nYou are provided with function signatures within <tools></tools> XML tags:\n<tools>" }}
9
- {%- for tool in tools %}
10
- {{- "\n" }}
11
- {{- tool | tojson }}
12
- {%- endfor %}
13
- {{- "\n</tools>\n\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\n<tool_call>\n{\"name\": <function-name>, \"arguments\": <args-json-object>}\n</tool_call><|im_end|>\n" }}
14
- {%- else %}
15
- {%- if messages[0]['role'] == 'system' %}
16
- {{- '<|im_start|>system\n' + messages[0]['content'] + '<|im_end|>\n' }}
17
- {%- else %}
18
- {{- '<|im_start|>system\nYou are Qwen, created by Alibaba Cloud. You are a helpful assistant.<|im_end|>\n' }}
19
- {%- endif %}
20
- {%- endif %}
21
- {%- for message in messages %}
22
- {%- if (message.role == "user") or (message.role == "system" and not loop.first) or (message.role == "assistant" and not message.tool_calls) %}
23
- {{- '<|im_start|>' + message.role + '\n' + message.content + '<|im_end|>' + '\n' }}
24
- {%- elif message.role == "assistant" %}
25
- {{- '<|im_start|>' + message.role }}
26
- {%- if message.content %}
27
- {{- '\n' + message.content }}
28
- {%- endif %}
29
- {%- for tool_call in message.tool_calls %}
30
- {%- if tool_call.function is defined %}
31
- {%- set tool_call = tool_call.function %}
32
- {%- endif %}
33
- {{- '\n<tool_call>\n{"name": "' }}
34
- {{- tool_call.name }}
35
- {{- '", "arguments": ' }}
36
- {{- tool_call.arguments | tojson }}
37
- {{- '}\n</tool_call>' }}
38
- {%- endfor %}
39
- {{- '<|im_end|>\n' }}
40
- {%- elif message.role == "tool" %}
41
- {%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != "tool") %} {{- '<|im_start|>user' }}
42
- {%- endif %}
43
- {{- '\n<tool_response>\n' }}
44
- {{- message.content }}
45
- {{- '\n</tool_response>' }}
46
- {%- if loop.last or (messages[loop.index0 + 1].role != "tool") %}
47
- {{- '<|im_end|>\n' }}
48
- {%- endif %}
49
- {%- endif %}
50
- {%- endfor %}
51
- {%- if add_generation_prompt %}
52
- {{- '<|im_start|>assistant\n' }}
53
- {%- endif %}
 
1
+ {% for message in messages %}{% if loop.first and message['role'] != 'system' %}{{ '<|im_start|>system\n' + '''You are Socrates, a wise and patient tutor. Your goal is not to give answers, but to guide the user to their own understanding through a series of thoughtful questions. Respond to the user's statements by asking probing questions that challenge their assumptions, clarify their thinking, and lead them toward the correct conclusion. Never provide a direct answer unless explicitly asked.''' + '<|im_end|>\n' }}{% endif %}{{ '<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>\n' }}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
model-00001-of-00002.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:80ed0c8b432f995343ddfaa49b71c261da3d7da0e82673319c86e5987d996b65
3
- size 4999266524
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a9d6181e1f4819efc3c2bd24962cf11e704f8dbda5824ee84a5946d1cc733f38
3
+ size 135
model-00001-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:10e4bca43d1418f640a25a957c1c409f71acec32b9e17384729ef84aec114f8c
3
- size 4877660776
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a73a3895fb4421f8edf0201284ef3da16f5d60a9eeb8a1bc2f6615216b7ea719
3
+ size 135
model-00002-of-00002.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ec54d22f8824fd17b25aba5c2184f55597acaf357a705f971944c52098aaf1fd
3
- size 2308120383
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c8d2b513f7dd9907b80c15e23de7049a8bc7f9ac4e25e80684174d93e7930d8c
3
+ size 135
model-00002-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:81a8ede3bcd9e651883367d050af943cbcb31e59f61be191a8fcc043d902e3a3
3
- size 4932751008
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:acb77437208d2a61f1df3e0f9e60303ceb1f2e8a66e25537f8288a748a023adc
3
+ size 135
model-00003-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e95feed944bdd0440bccf80ed31f3430f3b043aec138ce9708fb090bc4268141
3
- size 4330865200
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3818826e702cfdd0bb13cb48fc4c375609b3709ec8c62695095e1833ed39701f
3
+ size 135
model-00004-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:06006972c3be88e8a44fe21cfe2b0472b130780c781a741f8f90f1fe5ba3aae2
3
- size 1089994880
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:36b455f5b40fc6c1b6e47259f40b4940a9364e634e66fccb2fec109a0cc92bdc
3
+ size 135
special_tokens_map.json CHANGED
@@ -1,18 +1,7 @@
1
  {
2
  "additional_special_tokens": [
3
  "<|im_start|>",
4
- "<|im_end|>",
5
- "<|object_ref_start|>",
6
- "<|object_ref_end|>",
7
- "<|box_start|>",
8
- "<|box_end|>",
9
- "<|quad_start|>",
10
- "<|quad_end|>",
11
- "<|vision_start|>",
12
- "<|vision_end|>",
13
- "<|vision_pad|>",
14
- "<|image_pad|>",
15
- "<|video_pad|>"
16
  ],
17
  "eos_token": {
18
  "content": "<|im_end|>",
@@ -22,7 +11,7 @@
22
  "single_word": false
23
  },
24
  "pad_token": {
25
- "content": "<|vision_pad|>",
26
  "lstrip": false,
27
  "normalized": false,
28
  "rstrip": false,
 
1
  {
2
  "additional_special_tokens": [
3
  "<|im_start|>",
4
+ "<|im_end|>"
 
 
 
 
 
 
 
 
 
 
 
5
  ],
6
  "eos_token": {
7
  "content": "<|im_end|>",
 
11
  "single_word": false
12
  },
13
  "pad_token": {
14
+ "content": "<|endoftext|>",
15
  "lstrip": false,
16
  "normalized": false,
17
  "rstrip": false,
tokenizer.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9c5ae00e602b8860cbd784ba82a8aa14e8feecec692e7076590d014d7b7fdafa
3
- size 11421896
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bcfe42da0a4497e8b2b172c1f9f4ec423a46dc12907f4349c55025f670422ba9
3
+ size 11418266
tokenizer_config.json CHANGED
@@ -1,5 +1,4 @@
1
  {
2
- "add_bos_token": false,
3
  "add_prefix_space": false,
4
  "added_tokens_decoder": {
5
  "151643": {
@@ -25,183 +24,19 @@
25
  "rstrip": false,
26
  "single_word": false,
27
  "special": true
28
- },
29
- "151646": {
30
- "content": "<|object_ref_start|>",
31
- "lstrip": false,
32
- "normalized": false,
33
- "rstrip": false,
34
- "single_word": false,
35
- "special": true
36
- },
37
- "151647": {
38
- "content": "<|object_ref_end|>",
39
- "lstrip": false,
40
- "normalized": false,
41
- "rstrip": false,
42
- "single_word": false,
43
- "special": true
44
- },
45
- "151648": {
46
- "content": "<|box_start|>",
47
- "lstrip": false,
48
- "normalized": false,
49
- "rstrip": false,
50
- "single_word": false,
51
- "special": true
52
- },
53
- "151649": {
54
- "content": "<|box_end|>",
55
- "lstrip": false,
56
- "normalized": false,
57
- "rstrip": false,
58
- "single_word": false,
59
- "special": true
60
- },
61
- "151650": {
62
- "content": "<|quad_start|>",
63
- "lstrip": false,
64
- "normalized": false,
65
- "rstrip": false,
66
- "single_word": false,
67
- "special": true
68
- },
69
- "151651": {
70
- "content": "<|quad_end|>",
71
- "lstrip": false,
72
- "normalized": false,
73
- "rstrip": false,
74
- "single_word": false,
75
- "special": true
76
- },
77
- "151652": {
78
- "content": "<|vision_start|>",
79
- "lstrip": false,
80
- "normalized": false,
81
- "rstrip": false,
82
- "single_word": false,
83
- "special": true
84
- },
85
- "151653": {
86
- "content": "<|vision_end|>",
87
- "lstrip": false,
88
- "normalized": false,
89
- "rstrip": false,
90
- "single_word": false,
91
- "special": true
92
- },
93
- "151654": {
94
- "content": "<|vision_pad|>",
95
- "lstrip": false,
96
- "normalized": false,
97
- "rstrip": false,
98
- "single_word": false,
99
- "special": true
100
- },
101
- "151655": {
102
- "content": "<|image_pad|>",
103
- "lstrip": false,
104
- "normalized": false,
105
- "rstrip": false,
106
- "single_word": false,
107
- "special": true
108
- },
109
- "151656": {
110
- "content": "<|video_pad|>",
111
- "lstrip": false,
112
- "normalized": false,
113
- "rstrip": false,
114
- "single_word": false,
115
- "special": true
116
- },
117
- "151657": {
118
- "content": "<tool_call>",
119
- "lstrip": false,
120
- "normalized": false,
121
- "rstrip": false,
122
- "single_word": false,
123
- "special": false
124
- },
125
- "151658": {
126
- "content": "</tool_call>",
127
- "lstrip": false,
128
- "normalized": false,
129
- "rstrip": false,
130
- "single_word": false,
131
- "special": false
132
- },
133
- "151659": {
134
- "content": "<|fim_prefix|>",
135
- "lstrip": false,
136
- "normalized": false,
137
- "rstrip": false,
138
- "single_word": false,
139
- "special": false
140
- },
141
- "151660": {
142
- "content": "<|fim_middle|>",
143
- "lstrip": false,
144
- "normalized": false,
145
- "rstrip": false,
146
- "single_word": false,
147
- "special": false
148
- },
149
- "151661": {
150
- "content": "<|fim_suffix|>",
151
- "lstrip": false,
152
- "normalized": false,
153
- "rstrip": false,
154
- "single_word": false,
155
- "special": false
156
- },
157
- "151662": {
158
- "content": "<|fim_pad|>",
159
- "lstrip": false,
160
- "normalized": false,
161
- "rstrip": false,
162
- "single_word": false,
163
- "special": false
164
- },
165
- "151663": {
166
- "content": "<|repo_name|>",
167
- "lstrip": false,
168
- "normalized": false,
169
- "rstrip": false,
170
- "single_word": false,
171
- "special": false
172
- },
173
- "151664": {
174
- "content": "<|file_sep|>",
175
- "lstrip": false,
176
- "normalized": false,
177
- "rstrip": false,
178
- "single_word": false,
179
- "special": false
180
  }
181
  },
182
  "additional_special_tokens": [
183
  "<|im_start|>",
184
- "<|im_end|>",
185
- "<|object_ref_start|>",
186
- "<|object_ref_end|>",
187
- "<|box_start|>",
188
- "<|box_end|>",
189
- "<|quad_start|>",
190
- "<|quad_end|>",
191
- "<|vision_start|>",
192
- "<|vision_end|>",
193
- "<|vision_pad|>",
194
- "<|image_pad|>",
195
- "<|video_pad|>"
196
  ],
197
  "bos_token": null,
198
  "clean_up_tokenization_spaces": false,
199
  "eos_token": "<|im_end|>",
200
  "errors": "replace",
201
  "extra_special_tokens": {},
202
- "model_max_length": 32768,
203
- "pad_token": "<|vision_pad|>",
204
- "padding_side": "right",
205
  "split_special_tokens": false,
206
  "tokenizer_class": "Qwen2Tokenizer",
207
  "unk_token": null
 
1
  {
 
2
  "add_prefix_space": false,
3
  "added_tokens_decoder": {
4
  "151643": {
 
24
  "rstrip": false,
25
  "single_word": false,
26
  "special": true
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
27
  }
28
  },
29
  "additional_special_tokens": [
30
  "<|im_start|>",
31
+ "<|im_end|>"
 
 
 
 
 
 
 
 
 
 
 
32
  ],
33
  "bos_token": null,
34
  "clean_up_tokenization_spaces": false,
35
  "eos_token": "<|im_end|>",
36
  "errors": "replace",
37
  "extra_special_tokens": {},
38
+ "model_max_length": 131072,
39
+ "pad_token": "<|endoftext|>",
 
40
  "split_special_tokens": false,
41
  "tokenizer_class": "Qwen2Tokenizer",
42
  "unk_token": null
update_prompt.py ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from transformers import AutoTokenizer
2
+
3
+ # Load the tokenizer from the Hub
4
+ model_id = "Qwen/Qwen2-7B-Instruct"
5
+ print(f"Loading tokenizer for '{model_id}' from the Hub...")
6
+ tokenizer = AutoTokenizer.from_pretrained(model_id)
7
+
8
+ # This is the key change: we're targeting the cloned repo for saving
9
+ model_dir = "/workspace/socratic-tutor-qwen2.5"
10
+
11
+ socratic_system_prompt = (
12
+ "You are Socrates, a wise and patient tutor. Your goal is not to give answers, "
13
+ "but to guide the user to their own understanding through a series of thoughtful questions. "
14
+ "Respond to the user's statements by asking probing questions that challenge their assumptions, "
15
+ "clarify their thinking, and lead them toward the correct conclusion. "
16
+ "Never provide a direct answer unless explicitly asked."
17
+ )
18
+
19
+ new_chat_template = (
20
+ "{% for message in messages %}"
21
+ "{% if loop.first and message['role'] != 'system' %}"
22
+ "{{ '<|im_start|>system\\n' + '''" + socratic_system_prompt + "''' + '<|im_end|>\\n' }}"
23
+ "{% endif %}"
24
+ "{{ '<|im_start|>' + message['role'] + '\\n' + message['content'] + '<|im_end|>\\n' }}"
25
+ "{% endfor %}"
26
+ "{% if add_generation_prompt %}{{ '<|im_start|>assistant\\n' }}{% endif %}"
27
+ )
28
+
29
+ tokenizer.chat_template = new_chat_template
30
+ tokenizer.save_pretrained(model_dir)
31
+ print(f"✅ Tokenizer in '{model_dir}' updated with the Socratic prompt.")