Spaces:
Sleeping
Sleeping
File size: 3,141 Bytes
f4baae1 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 |
"""
Application data models
"""
from typing import Dict, List, Optional, Any, Union, Literal
from pydantic import BaseModel
class ContentPart(BaseModel):
"""Content part model for OpenAI's new content format"""
type: str
text: Optional[str] = None
class Message(BaseModel):
"""Chat message model"""
role: str
content: Optional[Union[str, List[ContentPart]]] = None
reasoning_content: Optional[str] = None
tool_calls: Optional[List[Dict[str, Any]]] = None
class OpenAIRequest(BaseModel):
"""OpenAI-compatible request model"""
model: str
messages: List[Message]
stream: Optional[bool] = False
temperature: Optional[float] = None
max_tokens: Optional[int] = None
tools: Optional[List[Dict[str, Any]]] = None
tool_choice: Optional[Any] = None
class ModelItem(BaseModel):
"""Model information item"""
id: str
name: str
owned_by: str
class UpstreamRequest(BaseModel):
"""Upstream service request model"""
stream: bool
model: str
messages: List[Message]
params: Dict[str, Any] = {}
features: Dict[str, Any] = {}
background_tasks: Optional[Dict[str, bool]] = None
chat_id: Optional[str] = None
id: Optional[str] = None
mcp_servers: Optional[List[str]] = None
model_item: Optional[ModelItem] = None
tool_servers: Optional[List[str]] = None
variables: Optional[Dict[str, str]] = None
model_config = {"protected_namespaces": ()}
class Delta(BaseModel):
"""Stream delta model"""
role: Optional[str] = None
content: Optional[str] = "" or None
reasoning_content: Optional[str] = None
tool_calls: Optional[List[Dict[str, Any]]] = None
class Choice(BaseModel):
"""Response choice model"""
index: int
message: Optional[Message] = None
delta: Optional[Delta] = None
finish_reason: Optional[str] = None
class Usage(BaseModel):
"""Token usage statistics"""
prompt_tokens: int = 0
completion_tokens: int = 0
total_tokens: int = 0
class OpenAIResponse(BaseModel):
"""OpenAI-compatible response model"""
id: str
object: str
created: int
model: str
choices: List[Choice]
usage: Optional[Usage] = None
class UpstreamError(BaseModel):
"""Upstream error model"""
detail: str
code: int
class UpstreamDataInner(BaseModel):
"""Inner upstream data model"""
error: Optional[UpstreamError] = None
class UpstreamDataData(BaseModel):
"""Upstream data content model"""
delta_content: str = ""
edit_content: str = ""
phase: str = ""
done: bool = False
usage: Optional[Usage] = None
error: Optional[UpstreamError] = None
inner: Optional[UpstreamDataInner] = None
class UpstreamData(BaseModel):
"""Upstream data model"""
type: str
data: UpstreamDataData
error: Optional[UpstreamError] = None
class Model(BaseModel):
"""Model information for listing"""
id: str
object: str = "model"
created: int
owned_by: str
class ModelsResponse(BaseModel):
"""Models list response model"""
object: str = "list"
data: List[Model]
|