Spaces:
Paused
Paused
File size: 1,733 Bytes
6480add |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 |
"""
Pydantic models for OpenAI API compatibility
"""
from typing import List, Optional, Dict, Any, Union, Literal
from pydantic import BaseModel, Field
class ChatMessage(BaseModel):
role: Literal["system", "user", "assistant"]
content: str
class ChatCompletionRequest(BaseModel):
model: str
messages: List[ChatMessage]
temperature: Optional[float] = 1.0
top_p: Optional[float] = 1.0
n: Optional[int] = 1
stream: Optional[bool] = False
stop: Optional[Union[str, List[str]]] = None
max_tokens: Optional[int] = None
presence_penalty: Optional[float] = 0.0
frequency_penalty: Optional[float] = 0.0
logit_bias: Optional[Dict[str, float]] = None
user: Optional[str] = None
class ChatCompletionChoice(BaseModel):
index: int
message: ChatMessage
finish_reason: Optional[str] = None
class ChatCompletionUsage(BaseModel):
prompt_tokens: int
completion_tokens: int
total_tokens: int
class ChatCompletionResponse(BaseModel):
id: str
object: str = "chat.completion"
created: int
model: str
choices: List[ChatCompletionChoice]
usage: Optional[ChatCompletionUsage] = None
class ChatCompletionStreamChoice(BaseModel):
index: int
delta: Dict[str, Any]
finish_reason: Optional[str] = None
class ChatCompletionStreamResponse(BaseModel):
id: str
object: str = "chat.completion.chunk"
created: int
model: str
choices: List[ChatCompletionStreamChoice]
class ModelInfo(BaseModel):
id: str
object: str = "model"
owned_by: str
permission: List[Any] = []
class ModelsResponse(BaseModel):
object: str = "list"
data: List[ModelInfo]
class ErrorResponse(BaseModel):
error: Dict[str, Any]
|