File size: 3,722 Bytes
2021f39
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
#!/usr/bin/env python3
"""
GraphRAG service: combines vector search + graph expansion to produce context.
Environment:
- VECTOR_BASE: http://host:7010
- GRAPH_BASE: http://host:7011
Endpoints:
- GET /health
- POST /graphrag {query_vector, seed_ids?, top_k?, depth?}
- POST /query     (alias for /graphrag)
"""

import os
from typing import Dict, Any, List

import requests
from fastapi import FastAPI, Request
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import JSONResponse
from prometheus_client import Counter, Histogram, make_asgi_app
import uvicorn

PORT = int(os.getenv("PORT", "7012"))
VECTOR_BASE = os.getenv("VECTOR_BASE", "http://127.0.0.1:7010").rstrip("/")
GRAPH_BASE = os.getenv("GRAPH_BASE", "http://127.0.0.1:7011").rstrip("/")
RANKER_BASE = os.getenv("RANKER_BASE", "http://127.0.0.1:7014").rstrip("/")

app = FastAPI(title="Nova GraphRAG", version="0.1.0")
app.add_middleware(
    CORSMiddleware,
    allow_origins=["*"],
    allow_credentials=True,
    allow_methods=["*"],
    allow_headers=["*"],
)

REQUESTS = Counter("graphrag_requests_total", "GraphRAG requests", ["route"])
LATENCY = Histogram("graphrag_request_latency_seconds", "Latency", ["route"])


@app.get("/health")
def health():
    REQUESTS.labels(route="health").inc()
    return {"status": "ok", "port": PORT, "vector": VECTOR_BASE, "graph": GRAPH_BASE}


@app.post("/graphrag")
async def graphrag(req: Request) -> JSONResponse:
    with LATENCY.labels(route="graphrag").time():
        REQUESTS.labels(route="graphrag").inc()
        body = await req.json()
        qv = body.get("query_vector")
        seeds = body.get("seed_ids", [])
        top_k = int(body.get("top_k", 5))
        depth = int(body.get("depth", 1))

        # Vector search
        vec_res = {"results": []}
        if qv is not None:
            try:
                vr = requests.post(f"{VECTOR_BASE}/search", json={"collection": body.get("collection", "default"), "query_vector": qv, "top_k": top_k}, timeout=10)
                vec_res = vr.json()
            except Exception:
                pass

        # Seeds from vector
        ids = list(seeds)
        for r in vec_res.get("results", []):
            if r.get("id") not in ids:
                ids.append(r.get("id"))

        # Graph expand
        neighbors: List[Dict[str, Any]] = []
        for sid in ids[:top_k]:
            try:
                nr = requests.post(f"{GRAPH_BASE}/neighbors", json={"id": sid, "depth": depth}, timeout=5)
                nb = nr.json().get("neighbors", [])
                neighbors.extend(nb)
            except Exception:
                continue

        vector_top = vec_res.get("results", [])[:top_k]
        # Optional rerank
        try:
            rr = requests.post(f"{RANKER_BASE}/rerank", json={"items": [
                {"id": r.get("id"), "score": r.get("score", 0.0), "recency": 0.5, "authority": 0.5, "coverage": 0.5}
                for r in vector_top
            ]}, timeout=5)
            if rr.status_code == 200:
                order = [it["id"] for it in rr.json().get("items", [])]
                id2 = {r["id"]: r for r in vector_top}
                vector_top = [id2[i] for i in order if i in id2]
        except Exception:
            pass

        context = {
            "seed_ids": ids,
            "vector_top": vector_top,
            "neighbors": neighbors,
        }
        return JSONResponse(status_code=200, content=context)


@app.post("/query")
async def query(req: Request) -> JSONResponse:
    return await graphrag(req)


# Prometheus metrics
metrics_app = make_asgi_app()
app.mount("/metrics", metrics_app)


if __name__ == "__main__":
    uvicorn.run(app, host="0.0.0.0", port=PORT)