fix error: No module named 'vllm.entrypoints.openai.protocol', which is cause by refactor of the upstream VLLM project

#37
Files changed (1) hide show
  1. qwen3coder_tool_parser_vllm.py +18 -9
qwen3coder_tool_parser_vllm.py CHANGED
@@ -8,16 +8,25 @@ from typing import Any, List, Optional, Union
8
 
9
  import regex as re
10
 
11
- from vllm.entrypoints.openai.protocol import (ChatCompletionRequest,
12
- ChatCompletionToolsParam,
13
- DeltaFunctionCall, DeltaMessage,
14
- DeltaToolCall,
15
- ExtractedToolCallInformation,
16
- FunctionCall, ToolCall)
17
- from vllm.entrypoints.openai.tool_parsers.abstract_tool_parser import (
18
- ToolParser, ToolParserManager)
 
 
 
 
 
 
 
 
19
  from vllm.logger import init_logger
20
- from vllm.transformers_utils.tokenizer import AnyTokenizer
 
21
 
22
  logger = init_logger(__name__)
23
 
 
8
 
9
  import regex as re
10
 
11
+ from vllm.entrypoints.openai.chat_completion.protocol import (
12
+ ChatCompletionRequest,
13
+ ChatCompletionToolsParam,
14
+ )
15
+ from vllm.entrypoints.openai.engine.protocol import (
16
+ DeltaFunctionCall,
17
+ DeltaMessage,
18
+ DeltaToolCall,
19
+ ExtractedToolCallInformation,
20
+ FunctionCall,
21
+ ToolCall,
22
+ )
23
+ from vllm.tool_parsers.abstract_tool_parser import (
24
+ ToolParser,
25
+ ToolParserManager,
26
+ )
27
  from vllm.logger import init_logger
28
+ from vllm.tokenizers import TokenizerLike as AnyTokenizer
29
+
30
 
31
  logger = init_logger(__name__)
32