Spaces:
Sleeping
Sleeping
File size: 6,156 Bytes
5868187 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 |
from typing import List, Optional, Union, Dict, Any
from pydantic import BaseModel, Field
# Common Models
class Model(BaseModel):
id: str
object: str = "model"
created: Optional[int] = None
owned_by: Optional[str] = "google"
class ModelList(BaseModel):
object: str = "list"
data: List[Model]
# OpenAI Models
class OpenAIChatMessage(BaseModel):
role: str
content: Union[str, List[Dict[str, Any]], None] = None
reasoning_content: Optional[str] = None
name: Optional[str] = None
class OpenAIChatCompletionRequest(BaseModel):
model: str
messages: List[OpenAIChatMessage]
stream: bool = False
temperature: Optional[float] = Field(None, ge=0.0, le=2.0)
top_p: Optional[float] = Field(None, ge=0.0, le=1.0)
max_tokens: Optional[int] = Field(None, ge=1)
stop: Optional[Union[str, List[str]]] = None
frequency_penalty: Optional[float] = Field(None, ge=-2.0, le=2.0)
presence_penalty: Optional[float] = Field(None, ge=-2.0, le=2.0)
n: Optional[int] = Field(1, ge=1, le=128)
seed: Optional[int] = None
response_format: Optional[Dict[str, Any]] = None
top_k: Optional[int] = Field(None, ge=1)
enable_anti_truncation: Optional[bool] = False
class Config:
extra = "allow" # Allow additional fields not explicitly defined
# 通用的聊天完成请求模型(兼容OpenAI和其他格式)
ChatCompletionRequest = OpenAIChatCompletionRequest
class OpenAIChatCompletionChoice(BaseModel):
index: int
message: OpenAIChatMessage
finish_reason: Optional[str] = None
logprobs: Optional[Dict[str, Any]] = None
class OpenAIChatCompletionResponse(BaseModel):
id: str
object: str = "chat.completion"
created: int
model: str
choices: List[OpenAIChatCompletionChoice]
usage: Optional[Dict[str, int]] = None
system_fingerprint: Optional[str] = None
class OpenAIDelta(BaseModel):
role: Optional[str] = None
content: Optional[str] = None
reasoning_content: Optional[str] = None
class OpenAIChatCompletionStreamChoice(BaseModel):
index: int
delta: OpenAIDelta
finish_reason: Optional[str] = None
logprobs: Optional[Dict[str, Any]] = None
class OpenAIChatCompletionStreamResponse(BaseModel):
id: str
object: str = "chat.completion.chunk"
created: int
model: str
choices: List[OpenAIChatCompletionStreamChoice]
system_fingerprint: Optional[str] = None
# Gemini Models
class GeminiPart(BaseModel):
text: Optional[str] = None
inlineData: Optional[Dict[str, Any]] = None
fileData: Optional[Dict[str, Any]] = None
thought: Optional[bool] = False
class GeminiContent(BaseModel):
role: str
parts: List[GeminiPart]
class GeminiSystemInstruction(BaseModel):
parts: List[GeminiPart]
class GeminiGenerationConfig(BaseModel):
temperature: Optional[float] = Field(None, ge=0.0, le=2.0)
topP: Optional[float] = Field(None, ge=0.0, le=1.0)
topK: Optional[int] = Field(None, ge=1)
maxOutputTokens: Optional[int] = Field(None, ge=1)
stopSequences: Optional[List[str]] = None
responseMimeType: Optional[str] = None
responseSchema: Optional[Dict[str, Any]] = None
candidateCount: Optional[int] = Field(None, ge=1, le=8)
seed: Optional[int] = None
frequencyPenalty: Optional[float] = Field(None, ge=-2.0, le=2.0)
presencePenalty: Optional[float] = Field(None, ge=-2.0, le=2.0)
thinkingConfig: Optional[Dict[str, Any]] = None
class GeminiSafetySetting(BaseModel):
category: str
threshold: str
class GeminiRequest(BaseModel):
contents: List[GeminiContent]
systemInstruction: Optional[GeminiSystemInstruction] = None
generationConfig: Optional[GeminiGenerationConfig] = None
safetySettings: Optional[List[GeminiSafetySetting]] = None
tools: Optional[List[Dict[str, Any]]] = None
toolConfig: Optional[Dict[str, Any]] = None
cachedContent: Optional[str] = None
enable_anti_truncation: Optional[bool] = False
class Config:
extra = "allow" # 允许透传未定义的字段
class GeminiCandidate(BaseModel):
content: GeminiContent
finishReason: Optional[str] = None
index: int = 0
safetyRatings: Optional[List[Dict[str, Any]]] = None
citationMetadata: Optional[Dict[str, Any]] = None
tokenCount: Optional[int] = None
class GeminiUsageMetadata(BaseModel):
promptTokenCount: Optional[int] = None
candidatesTokenCount: Optional[int] = None
totalTokenCount: Optional[int] = None
class GeminiResponse(BaseModel):
candidates: List[GeminiCandidate]
usageMetadata: Optional[GeminiUsageMetadata] = None
modelVersion: Optional[str] = None
# Error Models
class APIError(BaseModel):
message: str
type: str = "api_error"
code: Optional[int] = None
class ErrorResponse(BaseModel):
error: APIError
# Control Panel Models
class SystemStatus(BaseModel):
status: str
timestamp: str
credentials: Dict[str, int]
config: Dict[str, Any]
current_credential: str
class CredentialInfo(BaseModel):
filename: str
project_id: Optional[str] = None
status: Dict[str, Any]
size: Optional[int] = None
modified_time: Optional[str] = None
error: Optional[str] = None
class LogEntry(BaseModel):
timestamp: str
level: str
message: str
module: Optional[str] = None
class ConfigValue(BaseModel):
key: str
value: Any
env_locked: bool = False
description: Optional[str] = None
# Authentication Models
class AuthRequest(BaseModel):
project_id: Optional[str] = None
user_session: Optional[str] = None
class AuthResponse(BaseModel):
success: bool
auth_url: Optional[str] = None
state: Optional[str] = None
error: Optional[str] = None
credentials: Optional[Dict[str, Any]] = None
file_path: Optional[str] = None
requires_manual_project_id: Optional[bool] = None
requires_project_selection: Optional[bool] = None
available_projects: Optional[List[Dict[str, str]]] = None
class CredentialStatus(BaseModel):
disabled: bool = False
error_codes: List[int] = []
last_success: Optional[str] = None |