Update README.md
Browse files
README.md
CHANGED
|
@@ -16,7 +16,20 @@ Check the original model card for information about this model.
|
|
| 16 |
|
| 17 |
# Running the model with VLLM in Docker
|
| 18 |
```sh
|
| 19 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 20 |
```
|
| 21 |
This was tested on an RTX Pro 6000 Blackwell cloud instance.
|
| 22 |
|
|
|
|
| 16 |
|
| 17 |
# Running the model with VLLM in Docker
|
| 18 |
```sh
|
| 19 |
+
docker run --rm -ti --gpus all \
|
| 20 |
+
-v $(pwd)/Step-Audio-R1:/Step-Audio-R1 \
|
| 21 |
+
-p 9999:9999 \
|
| 22 |
+
stepfun2025/vllm:step-audio-2-v20250909 \
|
| 23 |
+
-- vllm serve /Step-Audio-R1 \
|
| 24 |
+
--served-model-name Step-Audio-R1 \
|
| 25 |
+
--port 9999 \
|
| 26 |
+
--max-model-len 16384 \
|
| 27 |
+
--max-num-seqs 32 \
|
| 28 |
+
--tensor-parallel-size 4 \
|
| 29 |
+
--chat-template '{%- macro render_content(content) -%}{%- if content is string -%}{{- content.replace("<audio_patch>\n", "<audio_patch>") -}}{%- elif content is mapping -%}{{- content['"'"'value'"'"'] if '"'"'value'"'"' in content else content['"'"'text'"'"'] -}}{%- elif content is iterable -%}{%- for item in content -%}{%- if item.type == '"'"'text'"'"' -%}{{- item['"'"'value'"'"'] if '"'"'value'"'"' in item else item['"'"'text'"'"'] -}}{%- elif item.type == '"'"'audio'"'"' -%}<audio_patch>{%- endif -%}{%- endfor -%}{%- endif -%}{%- endmacro -%}{%- if tools -%}{{- '"'"'<|BOT|>system\n'"'"' -}}{%- if messages[0]['"'"'role'"'"'] == '"'"'system'"'"' -%}{{- render_content(messages[0]['"'"'content'"'"']) + '"'"'<|EOT|>'"'"' -}}{%- endif -%}{{- '"'"'<|BOT|>tool_json_schemas\n'"'"' + tools|tojson + '"'"'<|EOT|>'"'"' -}}{%- else -%}{%- if messages[0]['"'"'role'"'"'] == '"'"'system'"'"' -%}{{- '"'"'<|BOT|>system\n'"'"' + render_content(messages[0]['"'"'content'"'"']) + '"'"'<|EOT|>'"'"' -}}{%- endif -%}{%- endif -%}{%- for message in messages -%}{%- if message["role"] == "user" -%}{{- '"'"'<|BOT|>human\n'"'"' + render_content(message["content"]) + '"'"'<|EOT|>'"'"' -}}{%- elif message["role"] == "assistant" -%}{{- '"'"'<|BOT|>assistant\n'"'"' + (render_content(message["content"]) if message["content"] else '"'"''"'"') -}}{%- set is_last_assistant = true -%}{%- for m in messages[loop.index:] -%}{%- if m["role"] == "assistant" -%}{%- set is_last_assistant = false -%}{%- endif -%}{%- endfor -%}{%- if not is_last_assistant -%}{{- '"'"'<|EOT|>'"'"' -}}{%- endif -%}{%- elif message["role"] == "function_output" -%}{%- else -%}{%- if not (loop.first and message["role"] == "system") -%}{{- '"'"'<|BOT|>'"'"' + message["role"] + '"'"'\n'"'"' + render_content(message["content"]) + '"'"'<|EOT|>'"'"' -}}{%- endif -%}{%- endif -%}{%- endfor -%}{%- if add_generation_prompt -%}{{- '"'"'<|BOT|>assistant\n<think>\n'"'"' -}}{%- endif -%}' \
|
| 30 |
+
--enable-log-requests \
|
| 31 |
+
--interleave-mm-strings \
|
| 32 |
+
--trust-remote-code
|
| 33 |
```
|
| 34 |
This was tested on an RTX Pro 6000 Blackwell cloud instance.
|
| 35 |
|