File size: 2,153 Bytes
895a63f 6851411 895a63f a82e45b 6851411 da484d7 6851411 da484d7 6851411 da484d7 895a63f a82e45b 895a63f 6851411 895a63f 6851411 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 |
from typing import List, Literal, Optional, Dict, Any, Union
from pydantic import BaseModel, Field
Role = Literal["system", "user", "assistant", "tool"]
class Message(BaseModel):
role: Role
content: str
class Function(BaseModel):
"""Function definition for tool calls."""
name: str
description: Optional[str] = None
parameters: Dict[str, Any] = Field(default_factory=dict)
class Tool(BaseModel):
"""Tool definition for OpenAI-compatible API."""
type: Literal["function"] = "function"
function: Function
class ResponseFormat(BaseModel):
"""Response format for structured outputs."""
type: Literal["text", "json_object"]
class ChatCompletionRequest(BaseModel):
model: Optional[str] = None # Optional, will use default from config
messages: List[Message]
temperature: Optional[float] = 0.7
max_tokens: Optional[int] = None
stream: Optional[bool] = False
top_p: Optional[float] = 1.0
tools: Optional[List[Tool]] = None # β
Tool definitions
tool_choice: Optional[Union[Literal["none", "auto", "required"], Dict[str, Any]]] = None # β
Tool choice (added "required" for output_type)
response_format: Optional[Union[ResponseFormat, Dict[str, Any]]] = None # β
Response format for structured outputs
class FunctionCall(BaseModel):
"""Function call in tool call."""
name: str
arguments: str # JSON string
class ToolCall(BaseModel):
"""Tool call in assistant message."""
id: str
type: Literal["function"] = "function"
function: FunctionCall
class ChoiceMessage(BaseModel):
role: Literal["assistant"]
content: Optional[str] = None
tool_calls: Optional[List[ToolCall]] = None # β
Tool calls support
class Choice(BaseModel):
index: int
message: ChoiceMessage
finish_reason: Optional[str] = None
class Usage(BaseModel):
prompt_tokens: int
completion_tokens: int
total_tokens: int
class ChatCompletionResponse(BaseModel):
id: str
object: Literal["chat.completion"] = "chat.completion"
created: int
model: str
choices: List[Choice]
usage: Optional[Usage] = None
|