File size: 2,589 Bytes
2ca8db5
 
 
 
 
 
 
 
 
 
4726024
2ca8db5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4726024
 
2ca8db5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
import time
import uuid
from pydantic import BaseModel, Field
from typing import List, Optional, Dict, Any, Literal, Union

# --- Models Moved from main.py ---

# Input Models (OpenAI-like)
class ChatMessage(BaseModel):
    role: Literal["system", "user", "assistant"]
    content: str

class ChatCompletionRequest(BaseModel):
    messages: List[ChatMessage]
    model: str = "notion-proxy" # Model name can be passed, but we map to Notion's model
    stream: bool = False
    # Add other potential OpenAI params if needed, though they might not map directly
    # max_tokens: Optional[int] = None
    # temperature: Optional[float] = None
    # space_id and thread_id are now handled globally via environment variables
    notion_model: str = "anthropic-opus-4" # Default Notion model, can be overridden


# Notion Models
class NotionTranscriptConfigValue(BaseModel):
    type: str = "markdown-chat"
    model: str # e.g., "anthropic-opus-4"

class NotionTranscriptItem(BaseModel):
    type: Literal["config", "user", "markdown-chat"]
    value: Union[List[List[str]], str, NotionTranscriptConfigValue]

class NotionDebugOverrides(BaseModel):
    cachedInferences: Dict = Field(default_factory=dict)
    annotationInferences: Dict = Field(default_factory=dict)
    emitInferences: bool = False

class NotionRequestBody(BaseModel):
    traceId: str = Field(default_factory=lambda: str(uuid.uuid4()))
    spaceId: str
    transcript: List[NotionTranscriptItem]
    # threadId is removed, createThread will be set to true
    createThread: bool = True
    debugOverrides: NotionDebugOverrides = Field(default_factory=NotionDebugOverrides)
    generateTitle: bool = False
    saveAllThreadOperations: bool = True


# Output Models (OpenAI SSE)
class ChoiceDelta(BaseModel):
    content: Optional[str] = None

class Choice(BaseModel):
    index: int = 0
    delta: ChoiceDelta
    finish_reason: Optional[Literal["stop", "length"]] = None

class ChatCompletionChunk(BaseModel):
    id: str = Field(default_factory=lambda: f"chatcmpl-{uuid.uuid4()}")
    object: str = "chat.completion.chunk"
    created: int = Field(default_factory=lambda: int(time.time()))
    model: str = "notion-proxy" # Or could reflect the underlying Notion model
    choices: List[Choice]


# --- Models for /v1/models Endpoint ---

class Model(BaseModel):
    id: str
    object: str = "model"
    created: int = Field(default_factory=lambda: int(time.time()))
    owned_by: str = "notion" # Or specify based on actual model origin if needed

class ModelList(BaseModel):
    object: str = "list"
    data: List[Model]