Spaces:
Sleeping
Sleeping
File size: 1,922 Bytes
d6815ad |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 |
"""
Execution state and result models
"""
from pydantic import BaseModel, Field
from typing import Optional, Any
from enum import Enum
from datetime import datetime
import uuid
class ExecutionStatus(str, Enum):
"""Status of workflow execution"""
PENDING = "pending"
RUNNING = "running"
COMPLETED = "completed"
FAILED = "failed"
CANCELLED = "cancelled"
class NodeResult(BaseModel):
"""Result of executing a single node"""
node_id: str
status: ExecutionStatus
output: Optional[Any] = None
error: Optional[str] = None
started_at: Optional[str] = None
completed_at: Optional[str] = None
duration_ms: Optional[float] = None
class ExecutionState(BaseModel):
"""Current state of workflow execution"""
execution_id: str = Field(default_factory=lambda: str(uuid.uuid4()))
workflow_id: str
status: ExecutionStatus = ExecutionStatus.PENDING
current_node: Optional[str] = None
node_results: dict[str, NodeResult] = Field(default_factory=dict)
variables: dict[str, Any] = Field(default_factory=dict) # Shared state
started_at: Optional[str] = None
completed_at: Optional[str] = None
class ExecutionResult(BaseModel):
"""Final result of workflow execution"""
execution_id: str
workflow_id: str
status: ExecutionStatus
node_results: list[NodeResult]
final_output: Optional[Any] = None
error: Optional[str] = None
total_duration_ms: Optional[float] = None
started_at: str
completed_at: str
class GuardianAlert(BaseModel):
"""Alert from the Guardian system"""
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
level: str # info, warning, error
message: str
node_id: Optional[str] = None
suggestion: Optional[str] = None
timestamp: str = Field(default_factory=lambda: datetime.now().isoformat())
|