zazaman commited on
Commit
bb181f0
·
1 Parent(s): 474c1d6

Remove --stop argument (not supported in llama.cpp CLI)

Browse files
Files changed (1) hide show
  1. llm_clients/qwen_translator.py +1 -2
llm_clients/qwen_translator.py CHANGED
@@ -356,8 +356,7 @@ class QwenTranslatorClient(LlmClient):
356
  if self.n_gpu_layers > 0:
357
  cmd.extend(["-ngl", str(self.n_gpu_layers)])
358
 
359
- # Add stop sequences (llama.cpp uses --stop for each stop token)
360
- cmd.extend(["--stop", "<|im_end|>", "--stop", "<|im_start|>"])
361
 
362
  try:
363
  # Run the binary and capture output
 
356
  if self.n_gpu_layers > 0:
357
  cmd.extend(["-ngl", str(self.n_gpu_layers)])
358
 
359
+ # Note: Stop sequences are handled in post-processing since --stop may not be available in all llama.cpp versions
 
360
 
361
  try:
362
  # Run the binary and capture output