File size: 2,001 Bytes
77169b4 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 | """协议层内部统一模型。"""
from __future__ import annotations
from typing import Any, Literal
from pydantic import BaseModel, Field
class CanonicalContentBlock(BaseModel):
type: Literal["text", "thinking", "tool_use", "tool_result", "image"]
text: str | None = None
id: str | None = None
name: str | None = None
input: dict[str, Any] | None = None
tool_use_id: str | None = None
is_error: bool | None = None
mime_type: str | None = None
data: str | None = None
url: str | None = None
class CanonicalMessage(BaseModel):
role: Literal["system", "user", "assistant", "tool"]
content: list[CanonicalContentBlock] = Field(default_factory=list)
class CanonicalToolSpec(BaseModel):
name: str
description: str = ""
input_schema: dict[str, Any] = Field(default_factory=dict)
strict: bool = False
class CanonicalChatRequest(BaseModel):
protocol: Literal["openai", "anthropic"]
provider: str
model: str
system: list[CanonicalContentBlock] = Field(default_factory=list)
messages: list[CanonicalMessage] = Field(default_factory=list)
stream: bool = False
max_tokens: int | None = None
temperature: float | None = None
top_p: float | None = None
stop_sequences: list[str] = Field(default_factory=list)
tools: list[CanonicalToolSpec] = Field(default_factory=list)
tool_choice: str | dict[str, Any] | None = None
resume_session_id: str | None = None
metadata: dict[str, Any] = Field(default_factory=dict)
class CanonicalStreamEvent(BaseModel):
type: Literal[
"message_start",
"text_delta",
"thinking_delta",
"tool_call",
"usage",
"message_stop",
"error",
]
text: str | None = None
id: str | None = None
name: str | None = None
arguments: str | None = None
stop_reason: str | None = None
session_id: str | None = None
usage: dict[str, int] | None = None
error: str | None = None
|