Spaces:
Paused
Paused
| import time | |
| import uuid | |
| from pydantic import BaseModel, Field | |
| from typing import List, Optional, Dict, Any, Literal, Union | |
| # --- Models Moved from main.py --- | |
| # Input Models (OpenAI-like) | |
| class ChatMessage(BaseModel): | |
| role: Literal["system", "user", "assistant"] | |
| content: str | |
| class ChatCompletionRequest(BaseModel): | |
| messages: List[ChatMessage] | |
| model: str = "notion-proxy" # Model name can be passed, but we map to Notion's model | |
| stream: bool = False | |
| # Add other potential OpenAI params if needed, though they might not map directly | |
| # max_tokens: Optional[int] = None | |
| # temperature: Optional[float] = None | |
| # space_id and thread_id are now handled globally via environment variables | |
| notion_model: str = "anthropic-opus-4" # Default Notion model, can be overridden | |
| # Notion Models | |
| class NotionTranscriptConfigValue(BaseModel): | |
| type: str = "markdown-chat" | |
| model: str # e.g., "anthropic-opus-4" | |
| class NotionTranscriptItem(BaseModel): | |
| type: Literal["config", "user", "markdown-chat"] | |
| value: Union[List[List[str]], str, NotionTranscriptConfigValue] | |
| class NotionDebugOverrides(BaseModel): | |
| cachedInferences: Dict = Field(default_factory=dict) | |
| annotationInferences: Dict = Field(default_factory=dict) | |
| emitInferences: bool = False | |
| class NotionRequestBody(BaseModel): | |
| traceId: str = Field(default_factory=lambda: str(uuid.uuid4())) | |
| spaceId: str | |
| transcript: List[NotionTranscriptItem] | |
| # threadId is removed, createThread will be set to true | |
| createThread: bool = True | |
| debugOverrides: NotionDebugOverrides = Field(default_factory=NotionDebugOverrides) | |
| generateTitle: bool = False | |
| saveAllThreadOperations: bool = True | |
| # Output Models (OpenAI SSE) | |
| class ChoiceDelta(BaseModel): | |
| content: Optional[str] = None | |
| class Choice(BaseModel): | |
| index: int = 0 | |
| delta: ChoiceDelta | |
| finish_reason: Optional[Literal["stop", "length"]] = None | |
| class ChatCompletionChunk(BaseModel): | |
| id: str = Field(default_factory=lambda: f"chatcmpl-{uuid.uuid4()}") | |
| object: str = "chat.completion.chunk" | |
| created: int = Field(default_factory=lambda: int(time.time())) | |
| model: str = "notion-proxy" # Or could reflect the underlying Notion model | |
| choices: List[Choice] | |
| # --- Models for /v1/models Endpoint --- | |
| class Model(BaseModel): | |
| id: str | |
| object: str = "model" | |
| created: int = Field(default_factory=lambda: int(time.time())) | |
| owned_by: str = "notion" # Or specify based on actual model origin if needed | |
| class ModelList(BaseModel): | |
| object: str = "list" | |
| data: List[Model] |