Upload model files
Browse files- README.md +127 -0
- added_tokens.json +56 -0
- chat_template.jinja +159 -0
- config.json +614 -0
- generation_config.json +7 -0
- merges.txt +0 -0
README.md
ADDED
|
@@ -0,0 +1,127 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
license: mit
|
| 3 |
+
library_name: transformers
|
| 4 |
+
base_model:
|
| 5 |
+
- deepseek-ai/DeepSeek-V3.2-Exp-Base
|
| 6 |
+
base_model_relation: finetune
|
| 7 |
+
---
|
| 8 |
+
# DeepSeek-V3.2: Efficient Reasoning & Agentic AI
|
| 9 |
+
|
| 10 |
+
<!-- markdownlint-disable first-line-h1 -->
|
| 11 |
+
<!-- markdownlint-disable html -->
|
| 12 |
+
<!-- markdownlint-disable no-duplicate-header -->
|
| 13 |
+
|
| 14 |
+
<div align="center">
|
| 15 |
+
<img src="https://github.com/deepseek-ai/DeepSeek-V2/blob/main/figures/logo.svg?raw=true" width="60%" alt="DeepSeek-V3" />
|
| 16 |
+
</div>
|
| 17 |
+
<hr>
|
| 18 |
+
<div align="center" style="line-height: 1;">
|
| 19 |
+
<a href="https://www.deepseek.com/" target="_blank" style="margin: 2px;">
|
| 20 |
+
<img alt="Homepage" src="https://github.com/deepseek-ai/DeepSeek-V2/blob/main/figures/badge.svg?raw=true" style="display: inline-block; vertical-align: middle;"/>
|
| 21 |
+
</a>
|
| 22 |
+
<a href="https://chat.deepseek.com/" target="_blank" style="margin: 2px;">
|
| 23 |
+
<img alt="Chat" src="https://img.shields.io/badge/🤖%20Chat-DeepSeek%20V3-536af5?color=536af5&logoColor=white" style="display: inline-block; vertical-align: middle;"/>
|
| 24 |
+
</a>
|
| 25 |
+
<a href="https://huggingface.co/deepseek-ai" target="_blank" style="margin: 2px;">
|
| 26 |
+
<img alt="Hugging Face" src="https://img.shields.io/badge/%F0%9F%A4%97%20Hugging%20Face-DeepSeek%20AI-ffc107?color=ffc107&logoColor=white" style="display: inline-block; vertical-align: middle;"/>
|
| 27 |
+
</a>
|
| 28 |
+
</div>
|
| 29 |
+
<div align="center" style="line-height: 1;">
|
| 30 |
+
<a href="https://discord.gg/Tc7c45Zzu5" target="_blank" style="margin: 2px;">
|
| 31 |
+
<img alt="Discord" src="https://img.shields.io/badge/Discord-DeepSeek%20AI-7289da?logo=discord&logoColor=white&color=7289da" style="display: inline-block; vertical-align: middle;"/>
|
| 32 |
+
</a>
|
| 33 |
+
<a href="https://github.com/deepseek-ai/DeepSeek-V2/blob/main/figures/qr.jpeg?raw=true" target="_blank" style="margin: 2px;">
|
| 34 |
+
<img alt="Wechat" src="https://img.shields.io/badge/WeChat-DeepSeek%20AI-brightgreen?logo=wechat&logoColor=white" style="display: inline-block; vertical-align: middle;"/>
|
| 35 |
+
</a>
|
| 36 |
+
<a href="https://twitter.com/deepseek_ai" target="_blank" style="margin: 2px;">
|
| 37 |
+
<img alt="Twitter Follow" src="https://img.shields.io/badge/Twitter-deepseek_ai-white?logo=x&logoColor=white" style="display: inline-block; vertical-align: middle;"/>
|
| 38 |
+
</a>
|
| 39 |
+
</div>
|
| 40 |
+
<div align="center" style="line-height: 1;">
|
| 41 |
+
<a href="LICENSE" style="margin: 2px;">
|
| 42 |
+
<img alt="License" src="https://img.shields.io/badge/License-MIT-f5de53?&color=f5de53" style="display: inline-block; vertical-align: middle;"/>
|
| 43 |
+
</a>
|
| 44 |
+
</div>
|
| 45 |
+
|
| 46 |
+
<p align="center">
|
| 47 |
+
<a href="https://huggingface.co/deepseek-ai/DeepSeek-V3.2/blob/main/assets/paper.pdf"><b>Technical Report</b>👁️</a>
|
| 48 |
+
</p>
|
| 49 |
+
|
| 50 |
+
## Introduction
|
| 51 |
+
|
| 52 |
+
We introduce **DeepSeek-V3.2**, a model that harmonizes high computational efficiency with superior reasoning and agent performance. Our approach is built upon three key technical breakthroughs:
|
| 53 |
+
|
| 54 |
+
1. **DeepSeek Sparse Attention (DSA):** We introduce DSA, an efficient attention mechanism that substantially reduces computational complexity while preserving model performance, specifically optimized for long-context scenarios.
|
| 55 |
+
2. **Scalable Reinforcement Learning Framework:** By implementing a robust RL protocol and scaling post-training compute, *DeepSeek-V3.2* performs comparably to GPT-5. Notably, our high-compute variant, **DeepSeek-V3.2-Speciale**, **surpasses GPT-5** and exhibits reasoning proficiency on par with Gemini-3.0-Pro.
|
| 56 |
+
- *Achievement:* 🥇 **Gold-medal performance** in the 2025 International Mathematical Olympiad (IMO) and International Olympiad in Informatics (IOI).
|
| 57 |
+
3. **Large-Scale Agentic Task Synthesis Pipeline:** To integrate **reasoning into tool-use** scenarios, we developed a novel synthesis pipeline that systematically generates training data at scale. This facilitates scalable agentic post-training, improving compliance and generalization in complex interactive environments.
|
| 58 |
+
|
| 59 |
+
<div align="center">
|
| 60 |
+
<img src="assets/benchmark.png" >
|
| 61 |
+
</div>
|
| 62 |
+
|
| 63 |
+
We have also released the final submissions for IOI 2025, ICPC World Finals, IMO 2025 and CMO 2025, which were selected based on our designed pipeline. These materials are provided for the community to conduct secondary verification. The files can be accessed at `assets/olympiad_cases`.
|
| 64 |
+
|
| 65 |
+
## Chat Template
|
| 66 |
+
|
| 67 |
+
DeepSeek-V3.2 introduces significant updates to its chat template compared to prior versions. The primary changes involve a revised format for tool calling and the introduction of a "thinking with tools" capability.
|
| 68 |
+
|
| 69 |
+
To assist the community in understanding and adapting to this new template, we have provided a dedicated `encoding` folder, which contains Python scripts and test cases demonstrating how to encode messages in OpenAI-compatible format into input strings for the model and how to parse the model's text output.
|
| 70 |
+
|
| 71 |
+
A brief example is illustrated below:
|
| 72 |
+
|
| 73 |
+
```python
|
| 74 |
+
import transformers
|
| 75 |
+
# encoding/encoding_dsv32.py
|
| 76 |
+
from encoding_dsv32 import encode_messages, parse_message_from_completion_text
|
| 77 |
+
|
| 78 |
+
tokenizer = transformers.AutoTokenizer.from_pretrained("deepseek-ai/DeepSeek-V3.2")
|
| 79 |
+
|
| 80 |
+
messages = [
|
| 81 |
+
{"role": "user", "content": "hello"},
|
| 82 |
+
{"role": "assistant", "content": "Hello! I am DeepSeek.", "reasoning_content": "thinking..."},
|
| 83 |
+
{"role": "user", "content": "1+1=?"}
|
| 84 |
+
]
|
| 85 |
+
encode_config = dict(thinking_mode="thinking", drop_thinking=True, add_default_bos_token=True)
|
| 86 |
+
|
| 87 |
+
# messages -> string
|
| 88 |
+
prompt = encode_messages(messages, **encode_config)
|
| 89 |
+
# Output: "<|begin▁of▁sentence|><|User|>hello<|Assistant|></think>Hello! I am DeepSeek.<|end▁of▁sentence|><|User|>1+1=?<|Assistant|><think>"
|
| 90 |
+
|
| 91 |
+
# string -> tokens
|
| 92 |
+
tokens = tokenizer.encode(prompt)
|
| 93 |
+
# Output: [0, 128803, 33310, 128804, 128799, 19923, 3, 342, 1030, 22651, 4374, 1465, 16, 1, 128803, 19, 13, 19, 127252, 128804, 128798]
|
| 94 |
+
```
|
| 95 |
+
|
| 96 |
+
Important Notes:
|
| 97 |
+
|
| 98 |
+
1. This release does not include a Jinja-format chat template. Please refer to the Python code mentioned above.
|
| 99 |
+
2. The output parsing function included in the code is designed to handle well-formatted strings only. It does not attempt to correct or recover from malformed output that the model might occasionally generate. It is not suitable for production use without robust error handling.
|
| 100 |
+
3. A new role named `developer` has been introduced in the chat template. This role is dedicated exclusively to search agent scenarios and is designated for no other tasks. The official API does not accept messages assigned to `developer`.
|
| 101 |
+
|
| 102 |
+
## How to Run Locally
|
| 103 |
+
|
| 104 |
+
The model structure of DeepSeek-V3.2 and DeepSeek-V3.2-Speciale are the same as DeepSeek-V3.2-Exp. Please visit [DeepSeek-V3.2-Exp](https://github.com/deepseek-ai/DeepSeek-V3.2-Exp) repo for more information about running this model locally.
|
| 105 |
+
|
| 106 |
+
Usage Recommendations:
|
| 107 |
+
|
| 108 |
+
1. For local deployment, we recommend setting the sampling parameters to `temperature = 1.0, top_p = 0.95`.
|
| 109 |
+
2. Please note that the DeepSeek-V3.2-Speciale variant is designed exclusively for deep reasoning tasks and does not support the tool-calling functionality.
|
| 110 |
+
|
| 111 |
+
## License
|
| 112 |
+
|
| 113 |
+
This repository and the model weights are licensed under the [MIT License](LICENSE).
|
| 114 |
+
|
| 115 |
+
## Citation
|
| 116 |
+
|
| 117 |
+
```
|
| 118 |
+
@misc{deepseekai2025deepseekv32,
|
| 119 |
+
title={DeepSeek-V3.2: Pushing the Frontier of Open Large Language Models},
|
| 120 |
+
author={DeepSeek-AI},
|
| 121 |
+
year={2025},
|
| 122 |
+
}
|
| 123 |
+
```
|
| 124 |
+
|
| 125 |
+
## Contact
|
| 126 |
+
|
| 127 |
+
If you have any questions, please raise an issue or contact us at [service@deepseek.com](service@deepseek.com).
|
added_tokens.json
ADDED
|
@@ -0,0 +1,56 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"</minimax:tool_call>": 200053,
|
| 3 |
+
"</think>": 200051,
|
| 4 |
+
"<add_file>": 200036,
|
| 5 |
+
"<code_context>": 200043,
|
| 6 |
+
"<code_interpreter>": 200023,
|
| 7 |
+
"<commit_after>": 200018,
|
| 8 |
+
"<commit_before>": 200016,
|
| 9 |
+
"<commit_message>": 200040,
|
| 10 |
+
"<commit_msg>": 200017,
|
| 11 |
+
"<delete_file>": 200037,
|
| 12 |
+
"<edit_file>": 200039,
|
| 13 |
+
"<empty_output>": 200015,
|
| 14 |
+
"<empty_source_file>": 200041,
|
| 15 |
+
"<file_content>": 200044,
|
| 16 |
+
"<file_sep>": 200049,
|
| 17 |
+
"<filename>": 200006,
|
| 18 |
+
"<filepath>": 200048,
|
| 19 |
+
"<fim_middle>": 200002,
|
| 20 |
+
"<fim_pad>": 200004,
|
| 21 |
+
"<fim_prefix>": 200001,
|
| 22 |
+
"<fim_suffix>": 200003,
|
| 23 |
+
"<function_call>": 200022,
|
| 24 |
+
"<gh_stars>": 200007,
|
| 25 |
+
"<issue_closed>": 200010,
|
| 26 |
+
"<issue_comment>": 200009,
|
| 27 |
+
"<issue_start>": 200008,
|
| 28 |
+
"<jupyter_code>": 200013,
|
| 29 |
+
"<jupyter_error>": 200035,
|
| 30 |
+
"<jupyter_output>": 200014,
|
| 31 |
+
"<jupyter_start>": 200011,
|
| 32 |
+
"<jupyter_text>": 200012,
|
| 33 |
+
"<minimax:tool_call>": 200052,
|
| 34 |
+
"<pr_start>": 200046,
|
| 35 |
+
"<rename_file>": 200038,
|
| 36 |
+
"<repo_struct>": 200042,
|
| 37 |
+
"<reponame>": 200005,
|
| 38 |
+
"<review_comment>": 200047,
|
| 39 |
+
"<source_files>": 200045,
|
| 40 |
+
"<think>": 200050,
|
| 41 |
+
"[e~[": 200020,
|
| 42 |
+
"]!d~[": 200021,
|
| 43 |
+
"]!p~[": 200000,
|
| 44 |
+
"]<]end of image[>[": 200030,
|
| 45 |
+
"]<]end of speech[>[": 200028,
|
| 46 |
+
"]<]end of video[>[": 200032,
|
| 47 |
+
"]<]image[>[": 200025,
|
| 48 |
+
"]<]speech[>[": 200024,
|
| 49 |
+
"]<]start of image[>[": 200029,
|
| 50 |
+
"]<]start of speech[>[": 200027,
|
| 51 |
+
"]<]start of video[>[": 200031,
|
| 52 |
+
"]<]video[>[": 200026,
|
| 53 |
+
"]<]vision pad[>[": 200033,
|
| 54 |
+
"]~!b[": 200034,
|
| 55 |
+
"]~b]": 200019
|
| 56 |
+
}
|
chat_template.jinja
ADDED
|
@@ -0,0 +1,159 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{# ----------‑‑‑ special token variables ‑‑‑---------- #}
|
| 2 |
+
{%- set toolcall_begin_token = '<minimax:tool_call>' -%}
|
| 3 |
+
{%- set toolcall_end_token = '</minimax:tool_call>' -%}
|
| 4 |
+
{#- Tool Rendering Functions ============================================== -#}
|
| 5 |
+
{%- macro render_tool_namespace(namespace_name, tool_list) -%}
|
| 6 |
+
{%- for tool in tool_list -%}
|
| 7 |
+
<tool>{{ tool.function | tojson(ensure_ascii=False) }}</tool>
|
| 8 |
+
{% endfor -%}
|
| 9 |
+
{%- endmacro -%}
|
| 10 |
+
{%- macro visible_text(content) -%}
|
| 11 |
+
{%- if content is string -%}
|
| 12 |
+
{{ content }}
|
| 13 |
+
{%- elif content is iterable and content is not mapping -%}
|
| 14 |
+
{%- for item in content -%}
|
| 15 |
+
{%- if item is mapping and item.type == 'text' -%}
|
| 16 |
+
{{- item.text }}
|
| 17 |
+
{%- elif item is string -%}
|
| 18 |
+
{{- item }}
|
| 19 |
+
{%- endif -%}
|
| 20 |
+
{%- endfor -%}
|
| 21 |
+
{%- else -%}
|
| 22 |
+
{{- content }}
|
| 23 |
+
{%- endif -%}
|
| 24 |
+
{%- endmacro -%}
|
| 25 |
+
{#- System Message Construction ============================================ -#}
|
| 26 |
+
{%- macro build_system_message(system_message) -%}
|
| 27 |
+
{%- if system_message and system_message.content -%}
|
| 28 |
+
{{- visible_text(system_message.content) }}
|
| 29 |
+
{%- else -%}
|
| 30 |
+
{%- if model_identity is not defined -%}
|
| 31 |
+
{%- set model_identity = "You are a helpful assistant." -%}
|
| 32 |
+
{%- endif -%}
|
| 33 |
+
{{- model_identity }}
|
| 34 |
+
{%- endif -%}
|
| 35 |
+
|
| 36 |
+
{#- Handle current_date -#}
|
| 37 |
+
{%- if system_message and system_message.current_date -%}
|
| 38 |
+
{{- '\n' ~ 'Current date: ' + system_message.current_date }}
|
| 39 |
+
{%- endif -%}
|
| 40 |
+
{#- Handle current_location -#}
|
| 41 |
+
{%- if system_message and system_message.current_location -%}
|
| 42 |
+
{{- '\n' ~ 'Current location: ' + system_message.current_location }}
|
| 43 |
+
{%- endif -%}
|
| 44 |
+
{%- endmacro -%}
|
| 45 |
+
{#- Main Template Logic ================================================= -#}
|
| 46 |
+
{#- Extract system message (only first message if it's system) -#}
|
| 47 |
+
{%- set system_message = none -%}
|
| 48 |
+
{%- set conversation_messages = messages -%}
|
| 49 |
+
{%- if messages and messages[0].role == "system" -%}
|
| 50 |
+
{%- set system_message = messages[0] -%}
|
| 51 |
+
{%- set conversation_messages = messages[1:] -%}
|
| 52 |
+
{%- endif -%}
|
| 53 |
+
{#- Get the last user message turn, for interleved thinking -#}
|
| 54 |
+
{%- set ns = namespace(last_user_index=-1) %}
|
| 55 |
+
{% for m in conversation_messages %}
|
| 56 |
+
{%- if m.role == 'user' %}
|
| 57 |
+
{% set ns.last_user_index = loop.index0 -%}
|
| 58 |
+
{%- endif %}
|
| 59 |
+
{%- endfor %}
|
| 60 |
+
{#- Render system message -#}
|
| 61 |
+
{{- ']~!b[' ~ ']~b]system' ~ '\n' }}
|
| 62 |
+
{{- build_system_message(system_message) }}
|
| 63 |
+
{#- Render tools if available -#}
|
| 64 |
+
{%- if tools -%}
|
| 65 |
+
{{- '\n\n' ~ '# Tools' ~ '\n' ~ 'You may call one or more tools to assist with the user query.\nHere are the tools available in JSONSchema format:' ~ '\n' }}
|
| 66 |
+
{{- '\n' ~ '<tools>' ~ '\n' }}
|
| 67 |
+
{{- render_tool_namespace("functions", tools) }}
|
| 68 |
+
{{- '</tools>' ~ '\n\n' }}
|
| 69 |
+
{{- 'When making tool calls, use XML format to invoke tools and pass parameters:' ~ '\n' }}
|
| 70 |
+
{{- '\n' ~ toolcall_begin_token }}
|
| 71 |
+
<invoke name="tool-name-1">
|
| 72 |
+
<parameter name="param-key-1">param-value-1</parameter>
|
| 73 |
+
<parameter name="param-key-2">param-value-2</parameter>
|
| 74 |
+
...
|
| 75 |
+
</invoke>
|
| 76 |
+
{{- '\n' ~ toolcall_end_token }}
|
| 77 |
+
{%- endif -%}
|
| 78 |
+
{{- '[e~[\n' }}
|
| 79 |
+
|
| 80 |
+
{#- Render messages -#}
|
| 81 |
+
{%- set last_tool_call = namespace(name=none) -%}
|
| 82 |
+
{%- for message in conversation_messages -%}
|
| 83 |
+
{%- if message.role == 'assistant' -%}
|
| 84 |
+
{#- Only render reasoning_content if no user message follows -#}
|
| 85 |
+
{{- ']~b]ai' ~ '\n' }}
|
| 86 |
+
|
| 87 |
+
{%- set reasoning_content = '' %}
|
| 88 |
+
{%- set content = visible_text(message.content) %}
|
| 89 |
+
{%- if message.reasoning_content is string %}
|
| 90 |
+
{%- set reasoning_content = message.reasoning_content %}
|
| 91 |
+
{%- else %}
|
| 92 |
+
{%- if '</think>' in content %}
|
| 93 |
+
{%- set reasoning_content = content.split('</think>')[0].strip('\n').split('<think>')[-1].strip('\n') %}
|
| 94 |
+
{%- set content = content.split('</think>')[-1].strip('\n') %}
|
| 95 |
+
{%- endif %}
|
| 96 |
+
{%- endif %}
|
| 97 |
+
{%- if reasoning_content and loop.index0 > ns.last_user_index -%}
|
| 98 |
+
{{- '<think>' ~ '\n' ~ reasoning_content ~ '\n' ~ '</think>' ~ '\n\n' }}
|
| 99 |
+
{%- endif -%}
|
| 100 |
+
{%- if content -%}
|
| 101 |
+
{{- content }}
|
| 102 |
+
{%- endif -%}
|
| 103 |
+
{%- if message.tool_calls -%}
|
| 104 |
+
{{- '\n' ~ toolcall_begin_token ~ '\n' }}
|
| 105 |
+
|
| 106 |
+
{%- for tool_call in message.tool_calls -%}
|
| 107 |
+
{%- if tool_call.function %}
|
| 108 |
+
{%- set tool_call = tool_call.function %}
|
| 109 |
+
{%- endif %}
|
| 110 |
+
{{- '<invoke name="' + tool_call.name + '">' }}
|
| 111 |
+
{% set _args = tool_call.arguments %}
|
| 112 |
+
{%- for k, v in _args.items() %}
|
| 113 |
+
{{- '<parameter name="' + k + '">' }}
|
| 114 |
+
{{- v | tojson(ensure_ascii=False) if v is not string else v }}
|
| 115 |
+
{{- '</parameter>' }}
|
| 116 |
+
{% endfor %}
|
| 117 |
+
{{- '</invoke>' ~ '\n' }}
|
| 118 |
+
{%- endfor -%}
|
| 119 |
+
|
| 120 |
+
{{- toolcall_end_token}}
|
| 121 |
+
{%- set last_tool_call.name = message.tool_calls[-1].name -%}
|
| 122 |
+
{%- else -%}
|
| 123 |
+
{%- set last_tool_call.name = none -%}
|
| 124 |
+
{%- endif -%}
|
| 125 |
+
{{- '[e~[' ~ '\n' }}
|
| 126 |
+
|
| 127 |
+
{%- elif message.role == 'tool' -%}
|
| 128 |
+
{%- if last_tool_call.name is none -%}
|
| 129 |
+
{{- raise_exception("Message has tool role, but there was no previous assistant message with a tool call!") }}
|
| 130 |
+
{%- endif -%}
|
| 131 |
+
{%- if loop.first or (conversation_messages[loop.index0 - 1].role != 'tool') -%}
|
| 132 |
+
{{- ']~b]tool' }}
|
| 133 |
+
{%- endif -%}
|
| 134 |
+
{%- if message.content is string -%}
|
| 135 |
+
{{- '\n<response>' }}
|
| 136 |
+
{{- message.content }}
|
| 137 |
+
{{- '</response>' }}
|
| 138 |
+
{%- else -%}
|
| 139 |
+
{%- for tr in message.content -%}
|
| 140 |
+
{{- '\n<response>' }}
|
| 141 |
+
{{- tr.output if tr.output is defined else (tr.text if tr.type == 'text' and tr.text is defined else tr) }}
|
| 142 |
+
{{- '\n</response>' }}
|
| 143 |
+
{%- endfor -%}
|
| 144 |
+
{%- endif -%}
|
| 145 |
+
{%- if loop.last or (conversation_messages[loop.index0 + 1].role != 'tool') -%}
|
| 146 |
+
{{- '[e~[\n' -}}
|
| 147 |
+
{%- endif -%}
|
| 148 |
+
|
| 149 |
+
{%- elif message.role == 'user' -%}
|
| 150 |
+
{{- ']~b]user' ~ '\n' }}
|
| 151 |
+
{{- visible_text(message.content) }}
|
| 152 |
+
{{- '[e~[' ~ '\n' }}
|
| 153 |
+
{%- endif -%}
|
| 154 |
+
{%- endfor -%}
|
| 155 |
+
|
| 156 |
+
{#- Generation prompt -#}
|
| 157 |
+
{%- if add_generation_prompt -%}
|
| 158 |
+
{{- ']~b]ai' ~ '\n' ~ '<think>' ~ '\n' }}
|
| 159 |
+
{%- endif -%}
|
config.json
ADDED
|
@@ -0,0 +1,614 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"architectures": [
|
| 3 |
+
"MiniMaxM2ForCausalLM"
|
| 4 |
+
],
|
| 5 |
+
"attention_dropout": 0.0,
|
| 6 |
+
"attn_type_list": [
|
| 7 |
+
1,
|
| 8 |
+
1,
|
| 9 |
+
1,
|
| 10 |
+
1,
|
| 11 |
+
1,
|
| 12 |
+
1,
|
| 13 |
+
1,
|
| 14 |
+
1,
|
| 15 |
+
1,
|
| 16 |
+
1,
|
| 17 |
+
1,
|
| 18 |
+
1,
|
| 19 |
+
1,
|
| 20 |
+
1,
|
| 21 |
+
1,
|
| 22 |
+
1,
|
| 23 |
+
1,
|
| 24 |
+
1,
|
| 25 |
+
1,
|
| 26 |
+
1,
|
| 27 |
+
1,
|
| 28 |
+
1,
|
| 29 |
+
1,
|
| 30 |
+
1,
|
| 31 |
+
1,
|
| 32 |
+
1,
|
| 33 |
+
1,
|
| 34 |
+
1,
|
| 35 |
+
1,
|
| 36 |
+
1,
|
| 37 |
+
1,
|
| 38 |
+
1,
|
| 39 |
+
1,
|
| 40 |
+
1,
|
| 41 |
+
1,
|
| 42 |
+
1,
|
| 43 |
+
1,
|
| 44 |
+
1,
|
| 45 |
+
1,
|
| 46 |
+
1,
|
| 47 |
+
1,
|
| 48 |
+
1,
|
| 49 |
+
1,
|
| 50 |
+
1,
|
| 51 |
+
1,
|
| 52 |
+
1,
|
| 53 |
+
1,
|
| 54 |
+
1,
|
| 55 |
+
1,
|
| 56 |
+
1,
|
| 57 |
+
1,
|
| 58 |
+
1,
|
| 59 |
+
1,
|
| 60 |
+
1,
|
| 61 |
+
1,
|
| 62 |
+
1,
|
| 63 |
+
1,
|
| 64 |
+
1,
|
| 65 |
+
1,
|
| 66 |
+
1,
|
| 67 |
+
1,
|
| 68 |
+
1
|
| 69 |
+
],
|
| 70 |
+
"bos_token_id": null,
|
| 71 |
+
"eos_token_id": null,
|
| 72 |
+
"head_dim": 128,
|
| 73 |
+
"hidden_act": "silu",
|
| 74 |
+
"hidden_size": 3072,
|
| 75 |
+
"initializer_range": 0.02,
|
| 76 |
+
"intermediate_size": 1536,
|
| 77 |
+
"layernorm_full_attention_beta": 1.0,
|
| 78 |
+
"layernorm_linear_attention_beta": 1.0,
|
| 79 |
+
"layernorm_mlp_beta": 1.0,
|
| 80 |
+
"max_position_embeddings": 196608,
|
| 81 |
+
"mlp_intermediate_size": 8192,
|
| 82 |
+
"model_type": "minimax",
|
| 83 |
+
"mtp_transformer_layers": 1,
|
| 84 |
+
"num_attention_heads": 48,
|
| 85 |
+
"num_experts_per_tok": 8,
|
| 86 |
+
"num_hidden_layers": 62,
|
| 87 |
+
"num_key_value_heads": 8,
|
| 88 |
+
"num_local_experts": 256,
|
| 89 |
+
"num_mtp_modules": 3,
|
| 90 |
+
"output_router_logits": false,
|
| 91 |
+
"qk_norm_type": "per_layer",
|
| 92 |
+
"quantization": {
|
| 93 |
+
"group_size": 64,
|
| 94 |
+
"bits": 5,
|
| 95 |
+
"mode": "affine",
|
| 96 |
+
"model.layers.0.block_sparse_moe.gate": {
|
| 97 |
+
"group_size": 64,
|
| 98 |
+
"bits": 8
|
| 99 |
+
},
|
| 100 |
+
"model.layers.1.block_sparse_moe.gate": {
|
| 101 |
+
"group_size": 64,
|
| 102 |
+
"bits": 8
|
| 103 |
+
},
|
| 104 |
+
"model.layers.2.block_sparse_moe.gate": {
|
| 105 |
+
"group_size": 64,
|
| 106 |
+
"bits": 8
|
| 107 |
+
},
|
| 108 |
+
"model.layers.3.block_sparse_moe.gate": {
|
| 109 |
+
"group_size": 64,
|
| 110 |
+
"bits": 8
|
| 111 |
+
},
|
| 112 |
+
"model.layers.4.block_sparse_moe.gate": {
|
| 113 |
+
"group_size": 64,
|
| 114 |
+
"bits": 8
|
| 115 |
+
},
|
| 116 |
+
"model.layers.5.block_sparse_moe.gate": {
|
| 117 |
+
"group_size": 64,
|
| 118 |
+
"bits": 8
|
| 119 |
+
},
|
| 120 |
+
"model.layers.6.block_sparse_moe.gate": {
|
| 121 |
+
"group_size": 64,
|
| 122 |
+
"bits": 8
|
| 123 |
+
},
|
| 124 |
+
"model.layers.7.block_sparse_moe.gate": {
|
| 125 |
+
"group_size": 64,
|
| 126 |
+
"bits": 8
|
| 127 |
+
},
|
| 128 |
+
"model.layers.8.block_sparse_moe.gate": {
|
| 129 |
+
"group_size": 64,
|
| 130 |
+
"bits": 8
|
| 131 |
+
},
|
| 132 |
+
"model.layers.9.block_sparse_moe.gate": {
|
| 133 |
+
"group_size": 64,
|
| 134 |
+
"bits": 8
|
| 135 |
+
},
|
| 136 |
+
"model.layers.10.block_sparse_moe.gate": {
|
| 137 |
+
"group_size": 64,
|
| 138 |
+
"bits": 8
|
| 139 |
+
},
|
| 140 |
+
"model.layers.11.block_sparse_moe.gate": {
|
| 141 |
+
"group_size": 64,
|
| 142 |
+
"bits": 8
|
| 143 |
+
},
|
| 144 |
+
"model.layers.12.block_sparse_moe.gate": {
|
| 145 |
+
"group_size": 64,
|
| 146 |
+
"bits": 8
|
| 147 |
+
},
|
| 148 |
+
"model.layers.13.block_sparse_moe.gate": {
|
| 149 |
+
"group_size": 64,
|
| 150 |
+
"bits": 8
|
| 151 |
+
},
|
| 152 |
+
"model.layers.14.block_sparse_moe.gate": {
|
| 153 |
+
"group_size": 64,
|
| 154 |
+
"bits": 8
|
| 155 |
+
},
|
| 156 |
+
"model.layers.15.block_sparse_moe.gate": {
|
| 157 |
+
"group_size": 64,
|
| 158 |
+
"bits": 8
|
| 159 |
+
},
|
| 160 |
+
"model.layers.16.block_sparse_moe.gate": {
|
| 161 |
+
"group_size": 64,
|
| 162 |
+
"bits": 8
|
| 163 |
+
},
|
| 164 |
+
"model.layers.17.block_sparse_moe.gate": {
|
| 165 |
+
"group_size": 64,
|
| 166 |
+
"bits": 8
|
| 167 |
+
},
|
| 168 |
+
"model.layers.18.block_sparse_moe.gate": {
|
| 169 |
+
"group_size": 64,
|
| 170 |
+
"bits": 8
|
| 171 |
+
},
|
| 172 |
+
"model.layers.19.block_sparse_moe.gate": {
|
| 173 |
+
"group_size": 64,
|
| 174 |
+
"bits": 8
|
| 175 |
+
},
|
| 176 |
+
"model.layers.20.block_sparse_moe.gate": {
|
| 177 |
+
"group_size": 64,
|
| 178 |
+
"bits": 8
|
| 179 |
+
},
|
| 180 |
+
"model.layers.21.block_sparse_moe.gate": {
|
| 181 |
+
"group_size": 64,
|
| 182 |
+
"bits": 8
|
| 183 |
+
},
|
| 184 |
+
"model.layers.22.block_sparse_moe.gate": {
|
| 185 |
+
"group_size": 64,
|
| 186 |
+
"bits": 8
|
| 187 |
+
},
|
| 188 |
+
"model.layers.23.block_sparse_moe.gate": {
|
| 189 |
+
"group_size": 64,
|
| 190 |
+
"bits": 8
|
| 191 |
+
},
|
| 192 |
+
"model.layers.24.block_sparse_moe.gate": {
|
| 193 |
+
"group_size": 64,
|
| 194 |
+
"bits": 8
|
| 195 |
+
},
|
| 196 |
+
"model.layers.25.block_sparse_moe.gate": {
|
| 197 |
+
"group_size": 64,
|
| 198 |
+
"bits": 8
|
| 199 |
+
},
|
| 200 |
+
"model.layers.26.block_sparse_moe.gate": {
|
| 201 |
+
"group_size": 64,
|
| 202 |
+
"bits": 8
|
| 203 |
+
},
|
| 204 |
+
"model.layers.27.block_sparse_moe.gate": {
|
| 205 |
+
"group_size": 64,
|
| 206 |
+
"bits": 8
|
| 207 |
+
},
|
| 208 |
+
"model.layers.28.block_sparse_moe.gate": {
|
| 209 |
+
"group_size": 64,
|
| 210 |
+
"bits": 8
|
| 211 |
+
},
|
| 212 |
+
"model.layers.29.block_sparse_moe.gate": {
|
| 213 |
+
"group_size": 64,
|
| 214 |
+
"bits": 8
|
| 215 |
+
},
|
| 216 |
+
"model.layers.30.block_sparse_moe.gate": {
|
| 217 |
+
"group_size": 64,
|
| 218 |
+
"bits": 8
|
| 219 |
+
},
|
| 220 |
+
"model.layers.31.block_sparse_moe.gate": {
|
| 221 |
+
"group_size": 64,
|
| 222 |
+
"bits": 8
|
| 223 |
+
},
|
| 224 |
+
"model.layers.32.block_sparse_moe.gate": {
|
| 225 |
+
"group_size": 64,
|
| 226 |
+
"bits": 8
|
| 227 |
+
},
|
| 228 |
+
"model.layers.33.block_sparse_moe.gate": {
|
| 229 |
+
"group_size": 64,
|
| 230 |
+
"bits": 8
|
| 231 |
+
},
|
| 232 |
+
"model.layers.34.block_sparse_moe.gate": {
|
| 233 |
+
"group_size": 64,
|
| 234 |
+
"bits": 8
|
| 235 |
+
},
|
| 236 |
+
"model.layers.35.block_sparse_moe.gate": {
|
| 237 |
+
"group_size": 64,
|
| 238 |
+
"bits": 8
|
| 239 |
+
},
|
| 240 |
+
"model.layers.36.block_sparse_moe.gate": {
|
| 241 |
+
"group_size": 64,
|
| 242 |
+
"bits": 8
|
| 243 |
+
},
|
| 244 |
+
"model.layers.37.block_sparse_moe.gate": {
|
| 245 |
+
"group_size": 64,
|
| 246 |
+
"bits": 8
|
| 247 |
+
},
|
| 248 |
+
"model.layers.38.block_sparse_moe.gate": {
|
| 249 |
+
"group_size": 64,
|
| 250 |
+
"bits": 8
|
| 251 |
+
},
|
| 252 |
+
"model.layers.39.block_sparse_moe.gate": {
|
| 253 |
+
"group_size": 64,
|
| 254 |
+
"bits": 8
|
| 255 |
+
},
|
| 256 |
+
"model.layers.40.block_sparse_moe.gate": {
|
| 257 |
+
"group_size": 64,
|
| 258 |
+
"bits": 8
|
| 259 |
+
},
|
| 260 |
+
"model.layers.41.block_sparse_moe.gate": {
|
| 261 |
+
"group_size": 64,
|
| 262 |
+
"bits": 8
|
| 263 |
+
},
|
| 264 |
+
"model.layers.42.block_sparse_moe.gate": {
|
| 265 |
+
"group_size": 64,
|
| 266 |
+
"bits": 8
|
| 267 |
+
},
|
| 268 |
+
"model.layers.43.block_sparse_moe.gate": {
|
| 269 |
+
"group_size": 64,
|
| 270 |
+
"bits": 8
|
| 271 |
+
},
|
| 272 |
+
"model.layers.44.block_sparse_moe.gate": {
|
| 273 |
+
"group_size": 64,
|
| 274 |
+
"bits": 8
|
| 275 |
+
},
|
| 276 |
+
"model.layers.45.block_sparse_moe.gate": {
|
| 277 |
+
"group_size": 64,
|
| 278 |
+
"bits": 8
|
| 279 |
+
},
|
| 280 |
+
"model.layers.46.block_sparse_moe.gate": {
|
| 281 |
+
"group_size": 64,
|
| 282 |
+
"bits": 8
|
| 283 |
+
},
|
| 284 |
+
"model.layers.47.block_sparse_moe.gate": {
|
| 285 |
+
"group_size": 64,
|
| 286 |
+
"bits": 8
|
| 287 |
+
},
|
| 288 |
+
"model.layers.48.block_sparse_moe.gate": {
|
| 289 |
+
"group_size": 64,
|
| 290 |
+
"bits": 8
|
| 291 |
+
},
|
| 292 |
+
"model.layers.49.block_sparse_moe.gate": {
|
| 293 |
+
"group_size": 64,
|
| 294 |
+
"bits": 8
|
| 295 |
+
},
|
| 296 |
+
"model.layers.50.block_sparse_moe.gate": {
|
| 297 |
+
"group_size": 64,
|
| 298 |
+
"bits": 8
|
| 299 |
+
},
|
| 300 |
+
"model.layers.51.block_sparse_moe.gate": {
|
| 301 |
+
"group_size": 64,
|
| 302 |
+
"bits": 8
|
| 303 |
+
},
|
| 304 |
+
"model.layers.52.block_sparse_moe.gate": {
|
| 305 |
+
"group_size": 64,
|
| 306 |
+
"bits": 8
|
| 307 |
+
},
|
| 308 |
+
"model.layers.53.block_sparse_moe.gate": {
|
| 309 |
+
"group_size": 64,
|
| 310 |
+
"bits": 8
|
| 311 |
+
},
|
| 312 |
+
"model.layers.54.block_sparse_moe.gate": {
|
| 313 |
+
"group_size": 64,
|
| 314 |
+
"bits": 8
|
| 315 |
+
},
|
| 316 |
+
"model.layers.55.block_sparse_moe.gate": {
|
| 317 |
+
"group_size": 64,
|
| 318 |
+
"bits": 8
|
| 319 |
+
},
|
| 320 |
+
"model.layers.56.block_sparse_moe.gate": {
|
| 321 |
+
"group_size": 64,
|
| 322 |
+
"bits": 8
|
| 323 |
+
},
|
| 324 |
+
"model.layers.57.block_sparse_moe.gate": {
|
| 325 |
+
"group_size": 64,
|
| 326 |
+
"bits": 8
|
| 327 |
+
},
|
| 328 |
+
"model.layers.58.block_sparse_moe.gate": {
|
| 329 |
+
"group_size": 64,
|
| 330 |
+
"bits": 8
|
| 331 |
+
},
|
| 332 |
+
"model.layers.59.block_sparse_moe.gate": {
|
| 333 |
+
"group_size": 64,
|
| 334 |
+
"bits": 8
|
| 335 |
+
},
|
| 336 |
+
"model.layers.60.block_sparse_moe.gate": {
|
| 337 |
+
"group_size": 64,
|
| 338 |
+
"bits": 8
|
| 339 |
+
},
|
| 340 |
+
"model.layers.61.block_sparse_moe.gate": {
|
| 341 |
+
"group_size": 64,
|
| 342 |
+
"bits": 8
|
| 343 |
+
}
|
| 344 |
+
},
|
| 345 |
+
"quantization_config": {
|
| 346 |
+
"group_size": 64,
|
| 347 |
+
"bits": 5,
|
| 348 |
+
"mode": "affine",
|
| 349 |
+
"model.layers.0.block_sparse_moe.gate": {
|
| 350 |
+
"group_size": 64,
|
| 351 |
+
"bits": 8
|
| 352 |
+
},
|
| 353 |
+
"model.layers.1.block_sparse_moe.gate": {
|
| 354 |
+
"group_size": 64,
|
| 355 |
+
"bits": 8
|
| 356 |
+
},
|
| 357 |
+
"model.layers.2.block_sparse_moe.gate": {
|
| 358 |
+
"group_size": 64,
|
| 359 |
+
"bits": 8
|
| 360 |
+
},
|
| 361 |
+
"model.layers.3.block_sparse_moe.gate": {
|
| 362 |
+
"group_size": 64,
|
| 363 |
+
"bits": 8
|
| 364 |
+
},
|
| 365 |
+
"model.layers.4.block_sparse_moe.gate": {
|
| 366 |
+
"group_size": 64,
|
| 367 |
+
"bits": 8
|
| 368 |
+
},
|
| 369 |
+
"model.layers.5.block_sparse_moe.gate": {
|
| 370 |
+
"group_size": 64,
|
| 371 |
+
"bits": 8
|
| 372 |
+
},
|
| 373 |
+
"model.layers.6.block_sparse_moe.gate": {
|
| 374 |
+
"group_size": 64,
|
| 375 |
+
"bits": 8
|
| 376 |
+
},
|
| 377 |
+
"model.layers.7.block_sparse_moe.gate": {
|
| 378 |
+
"group_size": 64,
|
| 379 |
+
"bits": 8
|
| 380 |
+
},
|
| 381 |
+
"model.layers.8.block_sparse_moe.gate": {
|
| 382 |
+
"group_size": 64,
|
| 383 |
+
"bits": 8
|
| 384 |
+
},
|
| 385 |
+
"model.layers.9.block_sparse_moe.gate": {
|
| 386 |
+
"group_size": 64,
|
| 387 |
+
"bits": 8
|
| 388 |
+
},
|
| 389 |
+
"model.layers.10.block_sparse_moe.gate": {
|
| 390 |
+
"group_size": 64,
|
| 391 |
+
"bits": 8
|
| 392 |
+
},
|
| 393 |
+
"model.layers.11.block_sparse_moe.gate": {
|
| 394 |
+
"group_size": 64,
|
| 395 |
+
"bits": 8
|
| 396 |
+
},
|
| 397 |
+
"model.layers.12.block_sparse_moe.gate": {
|
| 398 |
+
"group_size": 64,
|
| 399 |
+
"bits": 8
|
| 400 |
+
},
|
| 401 |
+
"model.layers.13.block_sparse_moe.gate": {
|
| 402 |
+
"group_size": 64,
|
| 403 |
+
"bits": 8
|
| 404 |
+
},
|
| 405 |
+
"model.layers.14.block_sparse_moe.gate": {
|
| 406 |
+
"group_size": 64,
|
| 407 |
+
"bits": 8
|
| 408 |
+
},
|
| 409 |
+
"model.layers.15.block_sparse_moe.gate": {
|
| 410 |
+
"group_size": 64,
|
| 411 |
+
"bits": 8
|
| 412 |
+
},
|
| 413 |
+
"model.layers.16.block_sparse_moe.gate": {
|
| 414 |
+
"group_size": 64,
|
| 415 |
+
"bits": 8
|
| 416 |
+
},
|
| 417 |
+
"model.layers.17.block_sparse_moe.gate": {
|
| 418 |
+
"group_size": 64,
|
| 419 |
+
"bits": 8
|
| 420 |
+
},
|
| 421 |
+
"model.layers.18.block_sparse_moe.gate": {
|
| 422 |
+
"group_size": 64,
|
| 423 |
+
"bits": 8
|
| 424 |
+
},
|
| 425 |
+
"model.layers.19.block_sparse_moe.gate": {
|
| 426 |
+
"group_size": 64,
|
| 427 |
+
"bits": 8
|
| 428 |
+
},
|
| 429 |
+
"model.layers.20.block_sparse_moe.gate": {
|
| 430 |
+
"group_size": 64,
|
| 431 |
+
"bits": 8
|
| 432 |
+
},
|
| 433 |
+
"model.layers.21.block_sparse_moe.gate": {
|
| 434 |
+
"group_size": 64,
|
| 435 |
+
"bits": 8
|
| 436 |
+
},
|
| 437 |
+
"model.layers.22.block_sparse_moe.gate": {
|
| 438 |
+
"group_size": 64,
|
| 439 |
+
"bits": 8
|
| 440 |
+
},
|
| 441 |
+
"model.layers.23.block_sparse_moe.gate": {
|
| 442 |
+
"group_size": 64,
|
| 443 |
+
"bits": 8
|
| 444 |
+
},
|
| 445 |
+
"model.layers.24.block_sparse_moe.gate": {
|
| 446 |
+
"group_size": 64,
|
| 447 |
+
"bits": 8
|
| 448 |
+
},
|
| 449 |
+
"model.layers.25.block_sparse_moe.gate": {
|
| 450 |
+
"group_size": 64,
|
| 451 |
+
"bits": 8
|
| 452 |
+
},
|
| 453 |
+
"model.layers.26.block_sparse_moe.gate": {
|
| 454 |
+
"group_size": 64,
|
| 455 |
+
"bits": 8
|
| 456 |
+
},
|
| 457 |
+
"model.layers.27.block_sparse_moe.gate": {
|
| 458 |
+
"group_size": 64,
|
| 459 |
+
"bits": 8
|
| 460 |
+
},
|
| 461 |
+
"model.layers.28.block_sparse_moe.gate": {
|
| 462 |
+
"group_size": 64,
|
| 463 |
+
"bits": 8
|
| 464 |
+
},
|
| 465 |
+
"model.layers.29.block_sparse_moe.gate": {
|
| 466 |
+
"group_size": 64,
|
| 467 |
+
"bits": 8
|
| 468 |
+
},
|
| 469 |
+
"model.layers.30.block_sparse_moe.gate": {
|
| 470 |
+
"group_size": 64,
|
| 471 |
+
"bits": 8
|
| 472 |
+
},
|
| 473 |
+
"model.layers.31.block_sparse_moe.gate": {
|
| 474 |
+
"group_size": 64,
|
| 475 |
+
"bits": 8
|
| 476 |
+
},
|
| 477 |
+
"model.layers.32.block_sparse_moe.gate": {
|
| 478 |
+
"group_size": 64,
|
| 479 |
+
"bits": 8
|
| 480 |
+
},
|
| 481 |
+
"model.layers.33.block_sparse_moe.gate": {
|
| 482 |
+
"group_size": 64,
|
| 483 |
+
"bits": 8
|
| 484 |
+
},
|
| 485 |
+
"model.layers.34.block_sparse_moe.gate": {
|
| 486 |
+
"group_size": 64,
|
| 487 |
+
"bits": 8
|
| 488 |
+
},
|
| 489 |
+
"model.layers.35.block_sparse_moe.gate": {
|
| 490 |
+
"group_size": 64,
|
| 491 |
+
"bits": 8
|
| 492 |
+
},
|
| 493 |
+
"model.layers.36.block_sparse_moe.gate": {
|
| 494 |
+
"group_size": 64,
|
| 495 |
+
"bits": 8
|
| 496 |
+
},
|
| 497 |
+
"model.layers.37.block_sparse_moe.gate": {
|
| 498 |
+
"group_size": 64,
|
| 499 |
+
"bits": 8
|
| 500 |
+
},
|
| 501 |
+
"model.layers.38.block_sparse_moe.gate": {
|
| 502 |
+
"group_size": 64,
|
| 503 |
+
"bits": 8
|
| 504 |
+
},
|
| 505 |
+
"model.layers.39.block_sparse_moe.gate": {
|
| 506 |
+
"group_size": 64,
|
| 507 |
+
"bits": 8
|
| 508 |
+
},
|
| 509 |
+
"model.layers.40.block_sparse_moe.gate": {
|
| 510 |
+
"group_size": 64,
|
| 511 |
+
"bits": 8
|
| 512 |
+
},
|
| 513 |
+
"model.layers.41.block_sparse_moe.gate": {
|
| 514 |
+
"group_size": 64,
|
| 515 |
+
"bits": 8
|
| 516 |
+
},
|
| 517 |
+
"model.layers.42.block_sparse_moe.gate": {
|
| 518 |
+
"group_size": 64,
|
| 519 |
+
"bits": 8
|
| 520 |
+
},
|
| 521 |
+
"model.layers.43.block_sparse_moe.gate": {
|
| 522 |
+
"group_size": 64,
|
| 523 |
+
"bits": 8
|
| 524 |
+
},
|
| 525 |
+
"model.layers.44.block_sparse_moe.gate": {
|
| 526 |
+
"group_size": 64,
|
| 527 |
+
"bits": 8
|
| 528 |
+
},
|
| 529 |
+
"model.layers.45.block_sparse_moe.gate": {
|
| 530 |
+
"group_size": 64,
|
| 531 |
+
"bits": 8
|
| 532 |
+
},
|
| 533 |
+
"model.layers.46.block_sparse_moe.gate": {
|
| 534 |
+
"group_size": 64,
|
| 535 |
+
"bits": 8
|
| 536 |
+
},
|
| 537 |
+
"model.layers.47.block_sparse_moe.gate": {
|
| 538 |
+
"group_size": 64,
|
| 539 |
+
"bits": 8
|
| 540 |
+
},
|
| 541 |
+
"model.layers.48.block_sparse_moe.gate": {
|
| 542 |
+
"group_size": 64,
|
| 543 |
+
"bits": 8
|
| 544 |
+
},
|
| 545 |
+
"model.layers.49.block_sparse_moe.gate": {
|
| 546 |
+
"group_size": 64,
|
| 547 |
+
"bits": 8
|
| 548 |
+
},
|
| 549 |
+
"model.layers.50.block_sparse_moe.gate": {
|
| 550 |
+
"group_size": 64,
|
| 551 |
+
"bits": 8
|
| 552 |
+
},
|
| 553 |
+
"model.layers.51.block_sparse_moe.gate": {
|
| 554 |
+
"group_size": 64,
|
| 555 |
+
"bits": 8
|
| 556 |
+
},
|
| 557 |
+
"model.layers.52.block_sparse_moe.gate": {
|
| 558 |
+
"group_size": 64,
|
| 559 |
+
"bits": 8
|
| 560 |
+
},
|
| 561 |
+
"model.layers.53.block_sparse_moe.gate": {
|
| 562 |
+
"group_size": 64,
|
| 563 |
+
"bits": 8
|
| 564 |
+
},
|
| 565 |
+
"model.layers.54.block_sparse_moe.gate": {
|
| 566 |
+
"group_size": 64,
|
| 567 |
+
"bits": 8
|
| 568 |
+
},
|
| 569 |
+
"model.layers.55.block_sparse_moe.gate": {
|
| 570 |
+
"group_size": 64,
|
| 571 |
+
"bits": 8
|
| 572 |
+
},
|
| 573 |
+
"model.layers.56.block_sparse_moe.gate": {
|
| 574 |
+
"group_size": 64,
|
| 575 |
+
"bits": 8
|
| 576 |
+
},
|
| 577 |
+
"model.layers.57.block_sparse_moe.gate": {
|
| 578 |
+
"group_size": 64,
|
| 579 |
+
"bits": 8
|
| 580 |
+
},
|
| 581 |
+
"model.layers.58.block_sparse_moe.gate": {
|
| 582 |
+
"group_size": 64,
|
| 583 |
+
"bits": 8
|
| 584 |
+
},
|
| 585 |
+
"model.layers.59.block_sparse_moe.gate": {
|
| 586 |
+
"group_size": 64,
|
| 587 |
+
"bits": 8
|
| 588 |
+
},
|
| 589 |
+
"model.layers.60.block_sparse_moe.gate": {
|
| 590 |
+
"group_size": 64,
|
| 591 |
+
"bits": 8
|
| 592 |
+
},
|
| 593 |
+
"model.layers.61.block_sparse_moe.gate": {
|
| 594 |
+
"group_size": 64,
|
| 595 |
+
"bits": 8
|
| 596 |
+
}
|
| 597 |
+
},
|
| 598 |
+
"rms_norm_eps": 1e-06,
|
| 599 |
+
"rope_theta": 5000000,
|
| 600 |
+
"rotary_dim": 64,
|
| 601 |
+
"router_aux_loss_coef": 0.001,
|
| 602 |
+
"router_jitter_noise": 0.0,
|
| 603 |
+
"scoring_func": "sigmoid",
|
| 604 |
+
"shared_intermediate_size": 0,
|
| 605 |
+
"shared_moe_mode": "sigmoid",
|
| 606 |
+
"sliding_window": null,
|
| 607 |
+
"tie_word_embeddings": false,
|
| 608 |
+
"transformers_version": "4.46.1",
|
| 609 |
+
"use_cache": true,
|
| 610 |
+
"use_mtp": true,
|
| 611 |
+
"use_qk_norm": true,
|
| 612 |
+
"use_routing_bias": true,
|
| 613 |
+
"vocab_size": 200064
|
| 614 |
+
}
|
generation_config.json
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"do_sample": true,
|
| 3 |
+
"temperature": 1.0,
|
| 4 |
+
"top_p": 0.95,
|
| 5 |
+
"top_k": 40,
|
| 6 |
+
"transformers_version": "4.46.1"
|
| 7 |
+
}
|
merges.txt
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|