Spaces:
Sleeping
Sleeping
File size: 1,455 Bytes
3194955 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 |
"""
Pydantic models for orchestration state and ChatUI input
"""
from typing import Optional, Dict, Any, List
from typing_extensions import TypedDict
from pydantic import BaseModel
from langchain_core.documents import Document
class GraphState(TypedDict, total=False):
"""State object passed through LangGraph workflow"""
query: str
context: str
raw_documents: List[Document] # Retrieved documents for generation
raw_context: List[Document] # Alias for backward compatibility
ingestor_context: str
result: str
sources: List[Dict[str, str]]
metadata: Dict[str, Any]
conversation_context: Optional[str] # Conversation history for multi-turn
file_content: Optional[bytes]
filename: Optional[str]
file_type: Optional[str]
workflow_type: Optional[str] # 'standard' or 'geojson_direct'
metadata_filters: Optional[Dict[str, Any]]
class Message(BaseModel):
"""Single message in conversation history"""
role: str # 'user', 'assistant', or 'system'
content: str
id: Optional[str] = None
class ChatUIInput(BaseModel):
"""Input model for text-only ChatUI requests"""
messages: Optional[List[Message]] = None
preprompt: Optional[str] = None
class ChatUIFileInput(BaseModel):
"""Input model for ChatUI requests with file attachments"""
files: Optional[List[Dict[str, Any]]] = None
messages: Optional[List[Message]] = None
preprompt: Optional[str] = None
|