Rwkv-xd / api_types.py
Ksjsjjdj's picture
Upload 42 files
aed88a2 verified
from typing import List, Optional, Union, Dict, Any, Literal
from pydantic import BaseModel, Field
class ChatMessage(BaseModel):
role: str = Field()
content: str = Field()
class Logprob(BaseModel):
token: str
logprob: float
top_logprobs: Optional[List[Dict[str, Any]]] = None
class LogprobsContent(BaseModel):
content: Optional[List[Logprob]] = None
refusal: Optional[List[Logprob]] = None
class FunctionCall(BaseModel):
name: str
arguments: str
class ChatCompletionMessage(BaseModel):
role: Optional[str] = Field(
None, description="The role of the author of this message"
)
content: Optional[str] = Field(None, description="The contents of the message")
reasoning_content: Optional[str] = Field(
None, description="The reasoning contents of the message"
)
tool_calls: Optional[List[Dict[str, Any]]] = Field(
None, description="Tool calls generated by the model"
)
class SamplerConfig(BaseModel):
"""Sampler configuration used in API requests and model defaults.
This mirrors the server-side `SamplerConfig` and exposes an optional
`ALLOW_*` set of fields that can be used to override the model/global
allow flags per-request (when present)."""
max_tokens: Optional[int] = Field(512)
temperature: Optional[float] = Field(1.0)
top_p: Optional[float] = Field(0.3)
presence_penalty: Optional[float] = Field(0.5)
count_penalty: Optional[float] = Field(0.5)
penalty_decay: Optional[float] = Field(0.996)
stop: Optional[List[str]] = Field(default_factory=lambda: ["\n\n"])
stop_tokens: Optional[List[int]] = Field(default_factory=lambda: [0])
ALLOW_WEB_SEARCH: Optional[bool] = Field(None)
ALLOW_TOOLS: Optional[bool] = Field(None)
ALLOW_REASONING: Optional[bool] = Field(None)
ALLOW_FILE_TOOL: Optional[bool] = Field(None, description="Per-sampler override for allowing file tools (uploads/file_read).")
# UI flags so a client can show the controls for toggles
SHOW_WEB_SEARCH_BUTTON: Optional[bool] = Field(None, description="Whether the UI should show a web-search toggle for this sampler")
SHOW_FILE_UPLOAD_BUTTON: Optional[bool] = Field(None, description="Whether the UI should show a file upload control for this sampler")
SHOW_REASONING_TOGGLE: Optional[bool] = Field(None, description="Whether the UI should show a reasoning toggle for this sampler")
# UI style hints. e.g. 'whatsapp' style, compact, or 'expanded'
UI_STYLE: Optional[str] = Field(None, description="UI style hint that clients may use to render controls (example: 'whatsapp' or 'compact')")
class PromptTokensDetails(BaseModel):
cached_tokens: int
class CompletionTokensDetails(BaseModel):
reasoning_tokens: int
accepted_prediction_tokens: int
rejected_prediction_tokens: int
class Usage(BaseModel):
prompt_tokens: int
completion_tokens: int
total_tokens: int
prompt_tokens_details: Optional[PromptTokensDetails]
# completion_tokens_details: CompletionTokensDetails
class ChatCompletionChoice(BaseModel):
index: int
message: Optional[ChatCompletionMessage] = None
delta: Optional[ChatCompletionMessage] = None
logprobs: Optional[LogprobsContent] = None
finish_reason: Optional[str] = Field(
..., description="Reason for stopping: stop, length, content_filter, tool_calls"
)
class ChatCompletion(BaseModel):
id: str = Field(..., description="Unique identifier for the chat completion")
object: Literal["chat.completion"] = "chat.completion"
created: int = Field(..., description="Unix timestamp of creation")
model: str
choices: List[ChatCompletionChoice]
usage: Usage
class ChatCompletionChunk(BaseModel):
id: str = Field(..., description="Unique identifier for the chat completion")
object: Literal["chat.completion.chunk"] = "chat.completion.chunk"
created: int = Field(..., description="Unix timestamp of creation")
model: str
choices: List[ChatCompletionChoice]
usage: Optional[Usage]
class UploadedFile(BaseModel):
file_id: str
filename: str
size: int
mime_type: Optional[str] = None
path: Optional[str] = None
uploaded_at: Optional[int] = None
class FileUploadResponse(BaseModel):
success: bool = True
file: UploadedFile