Spaces:
Sleeping
Sleeping
Update agents/graph_builder_langgraph.py
Browse files
agents/graph_builder_langgraph.py
CHANGED
|
@@ -1,86 +1,44 @@
|
|
| 1 |
import os
|
| 2 |
-
import json
|
| 3 |
-
import uuid
|
| 4 |
-
from typing import Dict, Any
|
| 5 |
-
from langgraph.graph import StateGraph, START, END
|
| 6 |
from .document_parser import parse_documents
|
| 7 |
from .requirements_extractor import extract_requirements
|
| 8 |
from .ui_generator import generate_ui_html
|
|
|
|
| 9 |
|
| 10 |
-
|
| 11 |
-
|
| 12 |
-
# ======================================================================
|
| 13 |
-
|
| 14 |
-
class State(Dict[str, Any]):
|
| 15 |
-
pass
|
| 16 |
-
|
| 17 |
-
# ======================================================================
|
| 18 |
-
# π Individual Agent Nodes
|
| 19 |
-
# ======================================================================
|
| 20 |
-
|
| 21 |
-
async def node_parse(state: State, config=None, runtime=None) -> State:
|
| 22 |
-
"""Extract plain text content from uploaded documents."""
|
| 23 |
-
print("π [Document Parser] Extracting text...")
|
| 24 |
files = state.get("files", [])
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 28 |
|
| 29 |
-
async def node_requirements(state
|
| 30 |
-
"""
|
| 31 |
print("π§© [Requirements Agent] Extracting structured info...")
|
| 32 |
-
text = state["text"]
|
| 33 |
requirements = await extract_requirements(text)
|
| 34 |
return {"requirements": requirements}
|
| 35 |
|
| 36 |
-
|
| 37 |
-
|
| 38 |
-
"
|
| 39 |
-
|
| 40 |
-
|
| 41 |
-
|
| 42 |
-
reference_path = os.path.join("templates", "demo_qms_design.html")
|
| 43 |
-
if not os.path.exists(reference_path):
|
| 44 |
-
raise FileNotFoundError("Reference design (demo_qms_design.html) not found")
|
| 45 |
-
|
| 46 |
-
with open(reference_path, "r", encoding="utf-8") as f:
|
| 47 |
reference_html = f.read()
|
| 48 |
-
|
| 49 |
-
|
| 50 |
-
|
| 51 |
-
|
| 52 |
-
|
| 53 |
-
|
| 54 |
-
|
| 55 |
-
|
| 56 |
-
|
| 57 |
-
|
| 58 |
-
|
| 59 |
-
|
| 60 |
-
|
| 61 |
-
public_url = f"/static/outputs/{session_id}.html"
|
| 62 |
-
|
| 63 |
-
return {
|
| 64 |
-
"html": html,
|
| 65 |
-
"public_url": public_url,
|
| 66 |
-
}
|
| 67 |
-
|
| 68 |
-
# ======================================================================
|
| 69 |
-
# πΈοΈ Graph Definition
|
| 70 |
-
# ======================================================================
|
| 71 |
-
|
| 72 |
-
def build_graph():
|
| 73 |
-
graph = StateGraph(State)
|
| 74 |
-
|
| 75 |
-
graph.add_node("Document Parser Agent", node_parse)
|
| 76 |
-
graph.add_node("Requirements Extraction Agent", node_requirements)
|
| 77 |
-
graph.add_node("UI Generator Agent", node_ui)
|
| 78 |
-
|
| 79 |
-
graph.add_edge(START, "Document Parser Agent")
|
| 80 |
-
graph.add_edge("Document Parser Agent", "Requirements Extraction Agent")
|
| 81 |
-
graph.add_edge("Requirements Extraction Agent", "UI Generator Agent")
|
| 82 |
-
graph.add_edge("UI Generator Agent", END)
|
| 83 |
-
|
| 84 |
-
return graph.compile()
|
| 85 |
-
|
| 86 |
-
graph = build_graph()
|
|
|
|
| 1 |
import os
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2 |
from .document_parser import parse_documents
|
| 3 |
from .requirements_extractor import extract_requirements
|
| 4 |
from .ui_generator import generate_ui_html
|
| 5 |
+
from langgraph.graph import StateGraph
|
| 6 |
|
| 7 |
+
async def node_parse_docs(state, config=None):
|
| 8 |
+
"""First agent node β extracts raw text from uploaded documents."""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 9 |
files = state.get("files", [])
|
| 10 |
+
if not files:
|
| 11 |
+
raise ValueError("No input files provided to document parser.")
|
| 12 |
+
print("π [Document Parser] Extracting text...")
|
| 13 |
+
text = await parse_documents(files)
|
| 14 |
+
if not text:
|
| 15 |
+
raise ValueError("Document parser returned empty text.")
|
| 16 |
+
return {"text": text}
|
| 17 |
|
| 18 |
+
async def node_requirements(state, config=None):
|
| 19 |
+
"""Second agent β extracts structured business requirements."""
|
| 20 |
print("π§© [Requirements Agent] Extracting structured info...")
|
| 21 |
+
text = state["text"] # <-- this line will now work
|
| 22 |
requirements = await extract_requirements(text)
|
| 23 |
return {"requirements": requirements}
|
| 24 |
|
| 25 |
+
async def node_ui_generator(state, config=None):
|
| 26 |
+
"""Third agent β generates final HTML UI."""
|
| 27 |
+
print("π¨ [UI Generator] Creating PowerApps-style interface...")
|
| 28 |
+
reqs = state["requirements"]
|
| 29 |
+
reference_html_path = os.path.join("templates", "demo_qms_design.html")
|
| 30 |
+
with open(reference_html_path, "r", encoding="utf-8") as f:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 31 |
reference_html = f.read()
|
| 32 |
+
ui = await generate_ui_html(reqs, reference_html)
|
| 33 |
+
return {"html": ui["html"]}
|
| 34 |
+
|
| 35 |
+
# Build pipeline
|
| 36 |
+
graph = StateGraph()
|
| 37 |
+
graph.add_node("parse_docs", node_parse_docs)
|
| 38 |
+
graph.add_node("requirements", node_requirements)
|
| 39 |
+
graph.add_node("ui_generator", node_ui_generator)
|
| 40 |
+
|
| 41 |
+
graph.set_entry_point("parse_docs")
|
| 42 |
+
graph.add_edge("parse_docs", "requirements")
|
| 43 |
+
graph.add_edge("requirements", "ui_generator")
|
| 44 |
+
graph.set_finish_point("ui_generator")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|