notion2api_test / models.py
bibibi12345's picture
bug fix
201f912
import time
import uuid
from pydantic import BaseModel, Field
from typing import List, Optional, Dict, Any, Literal, Union
# --- Models Moved from main.py ---
# Input Models (OpenAI-like)
class ChatMessage(BaseModel):
id: uuid.UUID = Field(default_factory=uuid.uuid4)
role: Literal["system", "user", "assistant"]
content: Union[str, List[Dict[str, Any]]]
userId: Optional[str] = None # Added for user messages
createdAt: Optional[str] = None # Added for timestamping
traceId: Optional[str] = None # Added for assistant messages
class ChatCompletionRequest(BaseModel):
messages: List[ChatMessage]
model: str = "notion-proxy" # Model name can be passed, but we map to Notion's model
stream: bool = False
# Add other potential OpenAI params if needed, though they might not map directly
# max_tokens: Optional[int] = None
# temperature: Optional[float] = None
# space_id and thread_id are now handled globally via environment variables
notion_model: str = "anthropic-opus-4" # Default Notion model, can be overridden
# Notion Models
class NotionTranscriptConfigValue(BaseModel):
type: str = "markdown-chat"
model: str # e.g., "anthropic-opus-4"
class NotionTranscriptContextValue(BaseModel):
userId: str
spaceId: str
surface: str = "home_module"
timezone: str = "America/Los_Angeles"
userName: str
spaceName: str
spaceViewId: str
currentDatetime: str
class NotionTranscriptItem(BaseModel):
id: uuid.UUID = Field(default_factory=uuid.uuid4)
type: Literal["config", "user", "markdown-chat", "agent-integration", "context"]
value: Optional[Union[List[List[str]], str, NotionTranscriptConfigValue, NotionTranscriptContextValue]] = None
userId: Optional[str] = None # Added for user messages in Notion transcript
createdAt: Optional[str] = None # Added for timestamping in Notion transcript
traceId: Optional[str] = None # Added for assistant messages in Notion transcript
class NotionDebugOverrides(BaseModel):
cachedInferences: Dict = Field(default_factory=dict)
annotationInferences: Dict = Field(default_factory=dict)
emitInferences: bool = False
class NotionRequestBody(BaseModel):
traceId: str = Field(default_factory=lambda: str(uuid.uuid4()))
spaceId: str
transcript: List[NotionTranscriptItem]
# threadId is removed, createThread will be set to true
createThread: bool = True
debugOverrides: NotionDebugOverrides = Field(default_factory=NotionDebugOverrides)
generateTitle: bool = False
saveAllThreadOperations: bool = True
class Config:
# Ensure UUIDs are serialized as strings in the final JSON request
json_encoders = {
uuid.UUID: str
}
# Output Models (OpenAI SSE)
class ChoiceDelta(BaseModel):
content: Optional[str] = None
class Choice(BaseModel):
index: int = 0
delta: ChoiceDelta
finish_reason: Optional[Literal["stop", "length"]] = None
class ChatCompletionChunk(BaseModel):
id: str = Field(default_factory=lambda: f"chatcmpl-{uuid.uuid4()}")
object: str = "chat.completion.chunk"
created: int = Field(default_factory=lambda: int(time.time()))
model: str = "notion-proxy" # Or could reflect the underlying Notion model
choices: List[Choice]
# --- Models for /v1/models Endpoint ---
class Model(BaseModel):
id: str
object: str = "model"
created: int = Field(default_factory=lambda: int(time.time()))
owned_by: str = "notion" # Or specify based on actual model origin if needed
class ModelList(BaseModel):
object: str = "list"
data: List[Model]