File size: 3,195 Bytes
8753d29
 
 
 
 
 
 
 
 
 
9f7921c
 
8753d29
9f7921c
8753d29
 
9f7921c
8753d29
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
87553a7
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
import uvicorn
from fastapi import FastAPI, HTTPException
from fastapi.middleware.cors import CORSMiddleware
from pydantic import BaseModel
from typing import List, Optional
from graph import app_graph

app = FastAPI(title="Clause.ai Backend")

origins = [
    "https://clause-ai-nbu8.vercel.app", 
    "http://localhost:3000"              
]

app.add_middleware(
    CORSMiddleware,
    allow_origins=origins,        
    allow_credentials=True,
    allow_methods=["*"],
    allow_headers=["*"],
)

# --- API MODELS ---
class DraftRequest(BaseModel):
    query: str

class AgentResponse(BaseModel):
    status: str
    phase: str
    message: str
    missing_info: List[str] = []
    draft: Optional[str] = None
    reference: Optional[str] = None

@app.get("/")
def home():
    return {"status": "Clause.ai Brain is Online"}

@app.post("/draft", response_model=AgentResponse)
async def generate_clause(request: DraftRequest):
    try:
        # Initialize the state
        initial_state = {
            "query": request.query,
            "messages": [],
            "context": "",
            "reference_clause": "",
            "final_draft": "",
            "phase": "",
            "missing_info": [],
            "clarification_question": ""
        }
        
        # Run the LangGraph Agent
        result = app_graph.invoke(initial_state)
        
        phase = result.get("phase", "")
        
        # --- SCENARIO 1: Guardrail stopped (general question/greeting/injection) ---
        if phase == "stopped":
            return {
                "status": "general_response",
                "phase": "stopped",
                "message": result.get("final_draft", ""),
                "missing_info": [],
                "draft": None,
                "reference": None
            }
        
        # --- SCENARIO 2: Triage needs clarification ---
        if phase == "planning":
            return {
                "status": "needs_info",
                "phase": "planning",
                "message": result.get("clarification_question", "Please provide more details."),
                "missing_info": result.get("missing_info", []),
                "draft": None,
                "reference": None
            }
        
        # --- SCENARIO 3: Draft completed successfully ---
        if phase == "drafting" or result.get("final_draft"):
            return {
                "status": "success",
                "phase": "drafting",
                "message": "Draft generated successfully.",
                "missing_info": [],
                "draft": result.get("final_draft", ""),
                "reference": result.get("reference_clause", "")
            }
        
        # --- FALLBACK: Unknown state ---
        return {
            "status": "error",
            "phase": "unknown",
            "message": "Unable to process your request. Please try again.",
            "missing_info": [],
            "draft": None,
            "reference": None
        }

    except Exception as e:
        print(f"❌ Error: {e}")
        raise HTTPException(status_code=500, detail=str(e))

if __name__ == "__main__":
    uvicorn.run(app, host="0.0.0.0", port=8000)