Update README.md
Browse files
README.md
CHANGED
|
@@ -74,7 +74,7 @@ modelscope download --model FlagRelease/MiniMax-M2-FlagOS --local_dir /share/Min
|
|
| 74 |
### Download FlagOS Image
|
| 75 |
|
| 76 |
```bash
|
| 77 |
-
docker pull harbor.baai.ac.cn/flagrelease-public/
|
| 78 |
```
|
| 79 |
|
| 80 |
### Start the inference service
|
|
@@ -85,7 +85,7 @@ docker run --init --detach --net=host --user 0 --ipc=host \
|
|
| 85 |
-v /share:/share --security-opt=seccomp=unconfined \
|
| 86 |
--privileged --ulimit=stack=67108864 --ulimit=memlock=-1 \
|
| 87 |
--shm-size=512G --gpus all -e USE_FLAGGEMS=1 \
|
| 88 |
-
--name qwen3_8b_release harbor.baai.ac.cn/flagrelease-public/
|
| 89 |
```
|
| 90 |
|
| 91 |
### Serve
|
|
|
|
| 74 |
### Download FlagOS Image
|
| 75 |
|
| 76 |
```bash
|
| 77 |
+
docker pull harbor.baai.ac.cn/flagrelease-public/flagrelease_nvidia_minimaxm2
|
| 78 |
```
|
| 79 |
|
| 80 |
### Start the inference service
|
|
|
|
| 85 |
-v /share:/share --security-opt=seccomp=unconfined \
|
| 86 |
--privileged --ulimit=stack=67108864 --ulimit=memlock=-1 \
|
| 87 |
--shm-size=512G --gpus all -e USE_FLAGGEMS=1 \
|
| 88 |
+
--name qwen3_8b_release harbor.baai.ac.cn/flagrelease-public/flagrelease_nvidia_minimaxm2 sleep infinity
|
| 89 |
```
|
| 90 |
|
| 91 |
### Serve
|