File size: 4,473 Bytes
b219d99 aed88a2 b219d99 aed88a2 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 |
from typing import List, Optional, Union, Dict, Any, Literal
from pydantic import BaseModel, Field
class ChatMessage(BaseModel):
role: str = Field()
content: str = Field()
class Logprob(BaseModel):
token: str
logprob: float
top_logprobs: Optional[List[Dict[str, Any]]] = None
class LogprobsContent(BaseModel):
content: Optional[List[Logprob]] = None
refusal: Optional[List[Logprob]] = None
class FunctionCall(BaseModel):
name: str
arguments: str
class ChatCompletionMessage(BaseModel):
role: Optional[str] = Field(
None, description="The role of the author of this message"
)
content: Optional[str] = Field(None, description="The contents of the message")
reasoning_content: Optional[str] = Field(
None, description="The reasoning contents of the message"
)
tool_calls: Optional[List[Dict[str, Any]]] = Field(
None, description="Tool calls generated by the model"
)
class SamplerConfig(BaseModel):
"""Sampler configuration used in API requests and model defaults.
This mirrors the server-side `SamplerConfig` and exposes an optional
`ALLOW_*` set of fields that can be used to override the model/global
allow flags per-request (when present)."""
max_tokens: Optional[int] = Field(512)
temperature: Optional[float] = Field(1.0)
top_p: Optional[float] = Field(0.3)
presence_penalty: Optional[float] = Field(0.5)
count_penalty: Optional[float] = Field(0.5)
penalty_decay: Optional[float] = Field(0.996)
stop: Optional[List[str]] = Field(default_factory=lambda: ["\n\n"])
stop_tokens: Optional[List[int]] = Field(default_factory=lambda: [0])
ALLOW_WEB_SEARCH: Optional[bool] = Field(None)
ALLOW_TOOLS: Optional[bool] = Field(None)
ALLOW_REASONING: Optional[bool] = Field(None)
ALLOW_FILE_TOOL: Optional[bool] = Field(None, description="Per-sampler override for allowing file tools (uploads/file_read).")
# UI flags so a client can show the controls for toggles
SHOW_WEB_SEARCH_BUTTON: Optional[bool] = Field(None, description="Whether the UI should show a web-search toggle for this sampler")
SHOW_FILE_UPLOAD_BUTTON: Optional[bool] = Field(None, description="Whether the UI should show a file upload control for this sampler")
SHOW_REASONING_TOGGLE: Optional[bool] = Field(None, description="Whether the UI should show a reasoning toggle for this sampler")
# UI style hints. e.g. 'whatsapp' style, compact, or 'expanded'
UI_STYLE: Optional[str] = Field(None, description="UI style hint that clients may use to render controls (example: 'whatsapp' or 'compact')")
class PromptTokensDetails(BaseModel):
cached_tokens: int
class CompletionTokensDetails(BaseModel):
reasoning_tokens: int
accepted_prediction_tokens: int
rejected_prediction_tokens: int
class Usage(BaseModel):
prompt_tokens: int
completion_tokens: int
total_tokens: int
prompt_tokens_details: Optional[PromptTokensDetails]
# completion_tokens_details: CompletionTokensDetails
class ChatCompletionChoice(BaseModel):
index: int
message: Optional[ChatCompletionMessage] = None
delta: Optional[ChatCompletionMessage] = None
logprobs: Optional[LogprobsContent] = None
finish_reason: Optional[str] = Field(
..., description="Reason for stopping: stop, length, content_filter, tool_calls"
)
class ChatCompletion(BaseModel):
id: str = Field(..., description="Unique identifier for the chat completion")
object: Literal["chat.completion"] = "chat.completion"
created: int = Field(..., description="Unix timestamp of creation")
model: str
choices: List[ChatCompletionChoice]
usage: Usage
class ChatCompletionChunk(BaseModel):
id: str = Field(..., description="Unique identifier for the chat completion")
object: Literal["chat.completion.chunk"] = "chat.completion.chunk"
created: int = Field(..., description="Unix timestamp of creation")
model: str
choices: List[ChatCompletionChoice]
usage: Optional[Usage]
class UploadedFile(BaseModel):
file_id: str
filename: str
size: int
mime_type: Optional[str] = None
path: Optional[str] = None
uploaded_at: Optional[int] = None
class FileUploadResponse(BaseModel):
success: bool = True
file: UploadedFile
|