(Trained with Unsloth)
Browse files- config.json +1 -1
- tokenizer_config.json +1 -1
config.json
CHANGED
|
@@ -24,7 +24,7 @@
|
|
| 24 |
"rope_scaling": null,
|
| 25 |
"rope_theta": 250000,
|
| 26 |
"tie_word_embeddings": false,
|
| 27 |
-
"torch_dtype": "
|
| 28 |
"transformers_version": "4.51.3",
|
| 29 |
"unsloth_fixed": true,
|
| 30 |
"unsloth_version": "2025.6.1",
|
|
|
|
| 24 |
"rope_scaling": null,
|
| 25 |
"rope_theta": 250000,
|
| 26 |
"tie_word_embeddings": false,
|
| 27 |
+
"torch_dtype": "bfloat16",
|
| 28 |
"transformers_version": "4.51.3",
|
| 29 |
"unsloth_fixed": true,
|
| 30 |
"unsloth_version": "2025.6.1",
|
tokenizer_config.json
CHANGED
|
@@ -779,7 +779,7 @@
|
|
| 779 |
}
|
| 780 |
},
|
| 781 |
"bos_token": "<|endoftext|>",
|
| 782 |
-
"chat_template": "{%
|
| 783 |
"clean_up_tokenization_spaces": false,
|
| 784 |
"eos_token": "<|im_end|>",
|
| 785 |
"extra_special_tokens": {},
|
|
|
|
| 779 |
}
|
| 780 |
},
|
| 781 |
"bos_token": "<|endoftext|>",
|
| 782 |
+
"chat_template": "{% if messages[0]['role'] == 'system' %}{{ messages[0]['content'] + eos_token }}{% set loop_messages = messages[1:] %}{% else %}{{ 'You are given a problem.\nThink about the problem and provide your working out.\nPlace it between <start_working_out> and <end_working_out>.\nThen, provide your solution between <SOLUTION></SOLUTION>' + eos_token }}{% set loop_messages = messages %}{% endif %}{% for message in loop_messages %}{% if message['role'] == 'user' %}{{ message['content'] }}{% elif message['role'] == 'assistant' %}{{ message['content'] + eos_token }}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ '<start_working_out>' }}{% endif %}",
|
| 783 |
"clean_up_tokenization_spaces": false,
|
| 784 |
"eos_token": "<|im_end|>",
|
| 785 |
"extra_special_tokens": {},
|