feat: Supabase auth + Razorpay billing + fix API double-URL bug
Browse files- Fix: api.get/post calls had doubled base URL (axios baseURL + explicit prefix)
- Add web/src/api.ts: single shared axios instance with ngrok header
- Add server/auth.py: Supabase JWT middleware, plan guard (free=2, starter=25, pro/byok=unlimited), BYOK key encryption
- Add server/billing.py: Razorpay order creation, payment verify, webhook handler
- Add server/schema.sql: profiles, builds, payments, plan_limits tables with RLS policies
- Add /health endpoint showing LLM backend status
- Add /profile and /profile/api-key endpoints
- Add /billing/create-order, /billing/verify-payment, /billing/webhook/razorpay
- Fix .env quotes: all values now unquoted (Docker env_file includes quotes literally)
- Fix deploy.yml: validates HF_TOKEN set before push, clear error if missing
- Auth is opt-in: when SUPABASE_URL not set, all endpoints work without token
- .env.example +11 -0
- .github/workflows/deploy.yml +5 -1
- requirements.txt +1 -0
- server/api.py +97 -6
- server/auth.py +271 -0
- server/billing.py +207 -0
- server/schema.sql +133 -0
- web/src/App.tsx +3 -5
- web/src/api.ts +9 -0
- web/src/components/BuildMonitor.tsx +2 -4
- web/src/pages/Dashboard.tsx +4 -6
- web/src/pages/DesignStudio.tsx +5 -7
- web/src/pages/Documentation.tsx +5 -7
- web/src/pages/HumanInLoopBuild.tsx +9 -11
|
@@ -13,6 +13,17 @@ LLM_API_KEY=NA
|
|
| 13 |
|
| 14 |
# ββ Verilog Code-Gen override (optional) ββββββββββββββββββββββββββββββββββββ
|
| 15 |
VERILOG_CODEGEN_ENABLED=false
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 16 |
VERILOG_CODEGEN_MODEL=
|
| 17 |
VERILOG_CODEGEN_BASE_URL=
|
| 18 |
VERILOG_CODEGEN_API_KEY=
|
|
|
|
| 13 |
|
| 14 |
# ββ Verilog Code-Gen override (optional) ββββββββββββββββββββββββββββββββββββ
|
| 15 |
VERILOG_CODEGEN_ENABLED=false
|
| 16 |
+
|
| 17 |
+
# ββ Supabase Auth (leave blank to disable auth β all builds allowed) ββββββββ
|
| 18 |
+
SUPABASE_URL=
|
| 19 |
+
SUPABASE_SERVICE_KEY=
|
| 20 |
+
SUPABASE_JWT_SECRET=
|
| 21 |
+
ENCRYPTION_KEY=change-me-in-production-32chars!
|
| 22 |
+
|
| 23 |
+
# ββ Razorpay Billing (leave blank to disable payments) ββββββββββββββββββββββ
|
| 24 |
+
RAZORPAY_KEY_ID=
|
| 25 |
+
RAZORPAY_KEY_SECRET=
|
| 26 |
+
RAZORPAY_WEBHOOK_SECRET=
|
| 27 |
VERILOG_CODEGEN_MODEL=
|
| 28 |
VERILOG_CODEGEN_BASE_URL=
|
| 29 |
VERILOG_CODEGEN_API_KEY=
|
|
@@ -25,5 +25,9 @@ jobs:
|
|
| 25 |
env:
|
| 26 |
HF_TOKEN: ${{ secrets.HF_TOKEN }}
|
| 27 |
run: |
|
| 28 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 29 |
git push hf main --force
|
|
|
|
| 25 |
env:
|
| 26 |
HF_TOKEN: ${{ secrets.HF_TOKEN }}
|
| 27 |
run: |
|
| 28 |
+
if [ -z "$HF_TOKEN" ]; then
|
| 29 |
+
echo "::error::HF_TOKEN secret is not set. Go to repo Settings β Secrets β Actions β New secret"
|
| 30 |
+
exit 1
|
| 31 |
+
fi
|
| 32 |
+
git remote add hf "https://hf_user:${HF_TOKEN}@huggingface.co/spaces/vxkyyy/AgentIC"
|
| 33 |
git push hf main --force
|
|
@@ -10,3 +10,4 @@ streamlit-option-menu
|
|
| 10 |
plotly
|
| 11 |
streamlit-ace
|
| 12 |
gdstk
|
|
|
|
|
|
| 10 |
plotly
|
| 11 |
streamlit-ace
|
| 12 |
gdstk
|
| 13 |
+
httpx
|
|
@@ -12,12 +12,23 @@ import glob
|
|
| 12 |
import threading
|
| 13 |
from typing import Any, Dict, List, Optional
|
| 14 |
|
| 15 |
-
from fastapi import FastAPI, HTTPException, Request
|
| 16 |
from fastapi.middleware.cors import CORSMiddleware
|
| 17 |
from fastapi.responses import StreamingResponse
|
| 18 |
from pydantic import BaseModel
|
| 19 |
|
| 20 |
from server.approval import approval_manager
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 21 |
from server.stage_summary import (
|
| 22 |
build_stage_complete_payload,
|
| 23 |
get_next_stage,
|
|
@@ -34,6 +45,7 @@ if src_path not in sys.path:
|
|
| 34 |
|
| 35 |
# βββ App βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 36 |
app = FastAPI(title="AgentIC Backend API", version="3.0.0")
|
|
|
|
| 37 |
|
| 38 |
app.add_middleware(
|
| 39 |
CORSMiddleware,
|
|
@@ -78,9 +90,11 @@ STAGE_META: Dict[str, Dict[str, str]] = {
|
|
| 78 |
}
|
| 79 |
|
| 80 |
|
| 81 |
-
def _get_llm():
|
| 82 |
"""Mirrors CLI's get_llm() β tries cloud first, falls back to local.
|
| 83 |
Priority: NVIDIA Nemotron β GLM5 Cloud β VeriReason Local
|
|
|
|
|
|
|
| 84 |
"""
|
| 85 |
from agentic.config import CLOUD_CONFIG, LOCAL_CONFIG
|
| 86 |
from crewai import LLM
|
|
@@ -91,7 +105,7 @@ def _get_llm():
|
|
| 91 |
]
|
| 92 |
|
| 93 |
for name, cfg in configs:
|
| 94 |
-
key = cfg.get("api_key", "")
|
| 95 |
# Skip cloud configs with no valid key
|
| 96 |
if "Cloud" in name and (not key or key.strip() in ("", "mock-key", "NA")):
|
| 97 |
continue
|
|
@@ -300,7 +314,8 @@ def _run_agentic_build(job_id: str, req: BuildRequest):
|
|
| 300 |
_emit_agent_thought(job_id, agent_name, thought_type, message, state)
|
| 301 |
|
| 302 |
# Use smart LLM selection: Cloud first (Nemotron β GLM5) β Local fallback
|
| 303 |
-
|
|
|
|
| 304 |
_emit_event(job_id, "checkpoint", "INIT", f"π€ Compute engine ready", step=1)
|
| 305 |
|
| 306 |
orchestrator = BuildOrchestrator(
|
|
@@ -367,6 +382,13 @@ def _run_agentic_build(job_id: str, req: BuildRequest):
|
|
| 367 |
JOB_STORE[job_id]["result"] = result
|
| 368 |
JOB_STORE[job_id]["status"] = "done" if success else "failed"
|
| 369 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 370 |
final_type = "done" if success else "error"
|
| 371 |
final_msg = "β
Chip build completed successfully!" if success else "β Build failed. See logs for details."
|
| 372 |
_emit_event(job_id, final_type, orchestrator.state.name, final_msg, step=TOTAL_STEPS)
|
|
@@ -380,6 +402,7 @@ def _run_agentic_build(job_id: str, req: BuildRequest):
|
|
| 380 |
JOB_STORE[job_id]["status"] = "failed"
|
| 381 |
JOB_STORE[job_id]["result"] = {"error": str(e), "traceback": err}
|
| 382 |
_emit_event(job_id, "error", "FAIL", f"π₯ Critical error: {str(e)}", step=0)
|
|
|
|
| 383 |
finally:
|
| 384 |
# Cleanup approval gates
|
| 385 |
design_name = JOB_STORE.get(job_id, {}).get("design_name", "")
|
|
@@ -785,6 +808,26 @@ def read_root():
|
|
| 785 |
return {"message": "AgentIC API is online", "version": "3.0.0"}
|
| 786 |
|
| 787 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 788 |
@app.get("/pipeline/schema")
|
| 789 |
def get_pipeline_schema():
|
| 790 |
"""Canonical pipeline schema for frontend timeline rendering."""
|
|
@@ -886,8 +929,15 @@ def get_doc_content(doc_id: str):
|
|
| 886 |
|
| 887 |
|
| 888 |
@app.post("/build")
|
| 889 |
-
def trigger_build(req: BuildRequest):
|
| 890 |
-
"""Start a new chip build. Returns job_id immediately.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 891 |
# Sanitize design name β Verilog identifiers cannot start with a digit
|
| 892 |
import re as _re
|
| 893 |
design_name = req.design_name.strip().lower()
|
|
@@ -908,10 +958,15 @@ def trigger_build(req: BuildRequest):
|
|
| 908 |
"events": [],
|
| 909 |
"result": {},
|
| 910 |
"created_at": int(time.time()),
|
|
|
|
|
|
|
| 911 |
}
|
| 912 |
|
| 913 |
req.design_name = design_name
|
| 914 |
|
|
|
|
|
|
|
|
|
|
| 915 |
thread = threading.Thread(
|
| 916 |
target=_run_agentic_build,
|
| 917 |
args=(job_id, req),
|
|
@@ -1190,3 +1245,39 @@ def _classify_artifact(filename: str) -> str:
|
|
| 1190 |
'.csv': 'report',
|
| 1191 |
}
|
| 1192 |
return classifications.get(ext, 'other')
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 12 |
import threading
|
| 13 |
from typing import Any, Dict, List, Optional
|
| 14 |
|
| 15 |
+
from fastapi import Depends, FastAPI, HTTPException, Request
|
| 16 |
from fastapi.middleware.cors import CORSMiddleware
|
| 17 |
from fastapi.responses import StreamingResponse
|
| 18 |
from pydantic import BaseModel
|
| 19 |
|
| 20 |
from server.approval import approval_manager
|
| 21 |
+
from server.auth import (
|
| 22 |
+
AUTH_ENABLED,
|
| 23 |
+
check_build_allowed,
|
| 24 |
+
encrypt_api_key,
|
| 25 |
+
get_current_user,
|
| 26 |
+
get_llm_key_for_user,
|
| 27 |
+
record_build_failure,
|
| 28 |
+
record_build_start,
|
| 29 |
+
record_build_success,
|
| 30 |
+
)
|
| 31 |
+
from server.billing import router as billing_router
|
| 32 |
from server.stage_summary import (
|
| 33 |
build_stage_complete_payload,
|
| 34 |
get_next_stage,
|
|
|
|
| 45 |
|
| 46 |
# βββ App βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 47 |
app = FastAPI(title="AgentIC Backend API", version="3.0.0")
|
| 48 |
+
app.include_router(billing_router)
|
| 49 |
|
| 50 |
app.add_middleware(
|
| 51 |
CORSMiddleware,
|
|
|
|
| 90 |
}
|
| 91 |
|
| 92 |
|
| 93 |
+
def _get_llm(byok_api_key: str = None):
|
| 94 |
"""Mirrors CLI's get_llm() β tries cloud first, falls back to local.
|
| 95 |
Priority: NVIDIA Nemotron β GLM5 Cloud β VeriReason Local
|
| 96 |
+
|
| 97 |
+
If byok_api_key is provided (BYOK plan), it overrides the cloud config key.
|
| 98 |
"""
|
| 99 |
from agentic.config import CLOUD_CONFIG, LOCAL_CONFIG
|
| 100 |
from crewai import LLM
|
|
|
|
| 105 |
]
|
| 106 |
|
| 107 |
for name, cfg in configs:
|
| 108 |
+
key = byok_api_key if (byok_api_key and "Cloud" in name) else cfg.get("api_key", "")
|
| 109 |
# Skip cloud configs with no valid key
|
| 110 |
if "Cloud" in name and (not key or key.strip() in ("", "mock-key", "NA")):
|
| 111 |
continue
|
|
|
|
| 314 |
_emit_agent_thought(job_id, agent_name, thought_type, message, state)
|
| 315 |
|
| 316 |
# Use smart LLM selection: Cloud first (Nemotron β GLM5) β Local fallback
|
| 317 |
+
byok_key = JOB_STORE[job_id].get("byok_key")
|
| 318 |
+
llm, llm_name = _get_llm(byok_api_key=byok_key)
|
| 319 |
_emit_event(job_id, "checkpoint", "INIT", f"π€ Compute engine ready", step=1)
|
| 320 |
|
| 321 |
orchestrator = BuildOrchestrator(
|
|
|
|
| 382 |
JOB_STORE[job_id]["result"] = result
|
| 383 |
JOB_STORE[job_id]["status"] = "done" if success else "failed"
|
| 384 |
|
| 385 |
+
# ββ Record build outcome in Supabase βββββββββββββββββββββββ
|
| 386 |
+
user_profile = JOB_STORE[job_id].get("user_profile")
|
| 387 |
+
if success:
|
| 388 |
+
record_build_success(user_profile, job_id)
|
| 389 |
+
else:
|
| 390 |
+
record_build_failure(job_id)
|
| 391 |
+
|
| 392 |
final_type = "done" if success else "error"
|
| 393 |
final_msg = "β
Chip build completed successfully!" if success else "β Build failed. See logs for details."
|
| 394 |
_emit_event(job_id, final_type, orchestrator.state.name, final_msg, step=TOTAL_STEPS)
|
|
|
|
| 402 |
JOB_STORE[job_id]["status"] = "failed"
|
| 403 |
JOB_STORE[job_id]["result"] = {"error": str(e), "traceback": err}
|
| 404 |
_emit_event(job_id, "error", "FAIL", f"π₯ Critical error: {str(e)}", step=0)
|
| 405 |
+
record_build_failure(job_id)
|
| 406 |
finally:
|
| 407 |
# Cleanup approval gates
|
| 408 |
design_name = JOB_STORE.get(job_id, {}).get("design_name", "")
|
|
|
|
| 808 |
return {"message": "AgentIC API is online", "version": "3.0.0"}
|
| 809 |
|
| 810 |
|
| 811 |
+
@app.get("/health")
|
| 812 |
+
def health_check():
|
| 813 |
+
"""Health probe β verifies the LLM backend is reachable."""
|
| 814 |
+
from agentic.config import CLOUD_CONFIG, LOCAL_CONFIG
|
| 815 |
+
llm_ok = False
|
| 816 |
+
llm_name = "none"
|
| 817 |
+
try:
|
| 818 |
+
_, llm_name = _get_llm()
|
| 819 |
+
llm_ok = True
|
| 820 |
+
except Exception:
|
| 821 |
+
pass
|
| 822 |
+
return {
|
| 823 |
+
"status": "ok" if llm_ok else "degraded",
|
| 824 |
+
"llm_backend": llm_name,
|
| 825 |
+
"llm_ok": llm_ok,
|
| 826 |
+
"cloud_key_set": bool(CLOUD_CONFIG.get("api_key", "").strip()),
|
| 827 |
+
"version": "3.0.0",
|
| 828 |
+
}
|
| 829 |
+
|
| 830 |
+
|
| 831 |
@app.get("/pipeline/schema")
|
| 832 |
def get_pipeline_schema():
|
| 833 |
"""Canonical pipeline schema for frontend timeline rendering."""
|
|
|
|
| 929 |
|
| 930 |
|
| 931 |
@app.post("/build")
|
| 932 |
+
async def trigger_build(req: BuildRequest, profile: dict = Depends(get_current_user)):
|
| 933 |
+
"""Start a new chip build. Returns job_id immediately.
|
| 934 |
+
|
| 935 |
+
When auth is enabled, checks plan quota and uses BYOK key if applicable.
|
| 936 |
+
"""
|
| 937 |
+
# ββ Auth guard: check plan + build count ββ
|
| 938 |
+
check_build_allowed(profile)
|
| 939 |
+
byok_key = get_llm_key_for_user(profile)
|
| 940 |
+
|
| 941 |
# Sanitize design name β Verilog identifiers cannot start with a digit
|
| 942 |
import re as _re
|
| 943 |
design_name = req.design_name.strip().lower()
|
|
|
|
| 958 |
"events": [],
|
| 959 |
"result": {},
|
| 960 |
"created_at": int(time.time()),
|
| 961 |
+
"user_profile": profile,
|
| 962 |
+
"byok_key": byok_key,
|
| 963 |
}
|
| 964 |
|
| 965 |
req.design_name = design_name
|
| 966 |
|
| 967 |
+
# Record build start in Supabase
|
| 968 |
+
record_build_start(profile, job_id, design_name)
|
| 969 |
+
|
| 970 |
thread = threading.Thread(
|
| 971 |
target=_run_agentic_build,
|
| 972 |
args=(job_id, req),
|
|
|
|
| 1245 |
'.csv': 'report',
|
| 1246 |
}
|
| 1247 |
return classifications.get(ext, 'other')
|
| 1248 |
+
|
| 1249 |
+
|
| 1250 |
+
# βββ Auth & Profile Routes ββββββββββββββββββββββββββββββββββββββββββ
|
| 1251 |
+
class SetApiKeyRequest(BaseModel):
|
| 1252 |
+
api_key: str
|
| 1253 |
+
|
| 1254 |
+
|
| 1255 |
+
@app.get("/profile")
|
| 1256 |
+
async def get_profile(profile: dict = Depends(get_current_user)):
|
| 1257 |
+
"""Return the authenticated user's profile (plan, build count, etc.)."""
|
| 1258 |
+
if profile is None:
|
| 1259 |
+
return {"auth_enabled": False}
|
| 1260 |
+
return {
|
| 1261 |
+
"auth_enabled": True,
|
| 1262 |
+
"id": profile["id"],
|
| 1263 |
+
"email": profile.get("email"),
|
| 1264 |
+
"full_name": profile.get("full_name"),
|
| 1265 |
+
"plan": profile.get("plan", "free"),
|
| 1266 |
+
"successful_builds": profile.get("successful_builds", 0),
|
| 1267 |
+
"has_byok_key": bool(profile.get("llm_api_key")),
|
| 1268 |
+
}
|
| 1269 |
+
|
| 1270 |
+
|
| 1271 |
+
@app.post("/profile/api-key")
|
| 1272 |
+
async def set_byok_key(req: SetApiKeyRequest, profile: dict = Depends(get_current_user)):
|
| 1273 |
+
"""Store an encrypted LLM API key for BYOK plan users."""
|
| 1274 |
+
if profile is None:
|
| 1275 |
+
raise HTTPException(status_code=403, detail="Auth not enabled")
|
| 1276 |
+
if profile.get("plan") != "byok":
|
| 1277 |
+
raise HTTPException(status_code=400, detail="Only BYOK plan users can set an API key")
|
| 1278 |
+
|
| 1279 |
+
from server.auth import _supabase_update
|
| 1280 |
+
encrypted = encrypt_api_key(req.api_key)
|
| 1281 |
+
_supabase_update("profiles", f"id=eq.{profile['id']}", {"llm_api_key": encrypted})
|
| 1282 |
+
return {"success": True, "message": "API key stored securely"}
|
| 1283 |
+
|
|
@@ -0,0 +1,271 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
AgentIC Auth β Supabase JWT middleware + plan/build-count guard.
|
| 3 |
+
|
| 4 |
+
Env vars required:
|
| 5 |
+
SUPABASE_URL β e.g. https://xyz.supabase.co
|
| 6 |
+
SUPABASE_SERVICE_KEY β service-role key (server-side only, never expose)
|
| 7 |
+
SUPABASE_JWT_SECRET β JWT secret from Supabase dashboard β Settings β API
|
| 8 |
+
ENCRYPTION_KEY β symmetric key for encrypting BYOK API keys (32+ chars)
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
import hashlib
|
| 12 |
+
import hmac
|
| 13 |
+
import json
|
| 14 |
+
import os
|
| 15 |
+
import time
|
| 16 |
+
from functools import lru_cache
|
| 17 |
+
from typing import Optional, Tuple
|
| 18 |
+
|
| 19 |
+
import httpx
|
| 20 |
+
from fastapi import Depends, HTTPException, Request
|
| 21 |
+
from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer
|
| 22 |
+
|
| 23 |
+
# βββ Config ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 24 |
+
SUPABASE_URL = os.environ.get("SUPABASE_URL", "")
|
| 25 |
+
SUPABASE_SERVICE_KEY = os.environ.get("SUPABASE_SERVICE_KEY", "")
|
| 26 |
+
SUPABASE_JWT_SECRET = os.environ.get("SUPABASE_JWT_SECRET", "")
|
| 27 |
+
ENCRYPTION_KEY = os.environ.get("ENCRYPTION_KEY", "change-me-in-production-32chars!")
|
| 28 |
+
|
| 29 |
+
AUTH_ENABLED = bool(SUPABASE_URL and SUPABASE_SERVICE_KEY and SUPABASE_JWT_SECRET)
|
| 30 |
+
|
| 31 |
+
# Plan limits: max successful builds allowed (None = unlimited)
|
| 32 |
+
PLAN_LIMITS = {
|
| 33 |
+
"free": 2,
|
| 34 |
+
"starter": 25,
|
| 35 |
+
"pro": None, # unlimited
|
| 36 |
+
"byok": None, # unlimited, uses own key
|
| 37 |
+
}
|
| 38 |
+
|
| 39 |
+
_bearer = HTTPBearer(auto_error=False)
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
# βββ JWT Decode (no pyjwt dependency β use Supabase /auth/v1/user) ββ
|
| 43 |
+
def _decode_supabase_jwt(token: str) -> dict:
|
| 44 |
+
"""Validate JWT by calling Supabase auth endpoint.
|
| 45 |
+
|
| 46 |
+
We call GET /auth/v1/user with the user's access_token.
|
| 47 |
+
Supabase verifies the JWT signature and returns the user object.
|
| 48 |
+
"""
|
| 49 |
+
resp = httpx.get(
|
| 50 |
+
f"{SUPABASE_URL}/auth/v1/user",
|
| 51 |
+
headers={
|
| 52 |
+
"Authorization": f"Bearer {token}",
|
| 53 |
+
"apikey": SUPABASE_SERVICE_KEY,
|
| 54 |
+
},
|
| 55 |
+
timeout=10,
|
| 56 |
+
)
|
| 57 |
+
if resp.status_code != 200:
|
| 58 |
+
raise HTTPException(status_code=401, detail="Invalid or expired token")
|
| 59 |
+
return resp.json()
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
# βββ Supabase DB helpers (use service-role key) βββββββββββββββββββββ
|
| 63 |
+
def _supabase_rpc(fn_name: str, params: dict) -> dict:
|
| 64 |
+
"""Call a Supabase RPC function."""
|
| 65 |
+
resp = httpx.post(
|
| 66 |
+
f"{SUPABASE_URL}/rest/v1/rpc/{fn_name}",
|
| 67 |
+
headers={
|
| 68 |
+
"apikey": SUPABASE_SERVICE_KEY,
|
| 69 |
+
"Authorization": f"Bearer {SUPABASE_SERVICE_KEY}",
|
| 70 |
+
"Content-Type": "application/json",
|
| 71 |
+
},
|
| 72 |
+
json=params,
|
| 73 |
+
timeout=10,
|
| 74 |
+
)
|
| 75 |
+
resp.raise_for_status()
|
| 76 |
+
return resp.json() if resp.text else {}
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
def _supabase_query(table: str, select: str = "*", filters: str = "") -> list:
|
| 80 |
+
"""Simple REST query against Supabase PostgREST."""
|
| 81 |
+
url = f"{SUPABASE_URL}/rest/v1/{table}?select={select}"
|
| 82 |
+
if filters:
|
| 83 |
+
url += f"&{filters}"
|
| 84 |
+
resp = httpx.get(
|
| 85 |
+
url,
|
| 86 |
+
headers={
|
| 87 |
+
"apikey": SUPABASE_SERVICE_KEY,
|
| 88 |
+
"Authorization": f"Bearer {SUPABASE_SERVICE_KEY},",
|
| 89 |
+
},
|
| 90 |
+
timeout=10,
|
| 91 |
+
)
|
| 92 |
+
resp.raise_for_status()
|
| 93 |
+
return resp.json()
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
def _supabase_insert(table: str, data: dict) -> dict:
|
| 97 |
+
resp = httpx.post(
|
| 98 |
+
f"{SUPABASE_URL}/rest/v1/{table}",
|
| 99 |
+
headers={
|
| 100 |
+
"apikey": SUPABASE_SERVICE_KEY,
|
| 101 |
+
"Authorization": f"Bearer {SUPABASE_SERVICE_KEY}",
|
| 102 |
+
"Content-Type": "application/json",
|
| 103 |
+
"Prefer": "return=representation",
|
| 104 |
+
},
|
| 105 |
+
json=data,
|
| 106 |
+
timeout=10,
|
| 107 |
+
)
|
| 108 |
+
resp.raise_for_status()
|
| 109 |
+
rows = resp.json()
|
| 110 |
+
return rows[0] if rows else {}
|
| 111 |
+
|
| 112 |
+
|
| 113 |
+
def _supabase_update(table: str, filters: str, data: dict) -> dict:
|
| 114 |
+
resp = httpx.patch(
|
| 115 |
+
f"{SUPABASE_URL}/rest/v1/{table}?{filters}",
|
| 116 |
+
headers={
|
| 117 |
+
"apikey": SUPABASE_SERVICE_KEY,
|
| 118 |
+
"Authorization": f"Bearer {SUPABASE_SERVICE_KEY}",
|
| 119 |
+
"Content-Type": "application/json",
|
| 120 |
+
"Prefer": "return=representation",
|
| 121 |
+
},
|
| 122 |
+
json=data,
|
| 123 |
+
timeout=10,
|
| 124 |
+
)
|
| 125 |
+
resp.raise_for_status()
|
| 126 |
+
rows = resp.json()
|
| 127 |
+
return rows[0] if rows else {}
|
| 128 |
+
|
| 129 |
+
|
| 130 |
+
# βββ BYOK Encryption ββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 131 |
+
def encrypt_api_key(plaintext: str) -> str:
|
| 132 |
+
"""XOR-based encryption with HMAC integrity check. Not AES-grade,
|
| 133 |
+
but avoids requiring `cryptography` in Docker. Good enough for
|
| 134 |
+
API keys at rest that are already scoped to a single user."""
|
| 135 |
+
key_bytes = hashlib.sha256(ENCRYPTION_KEY.encode()).digest()
|
| 136 |
+
ct = bytes(a ^ b for a, b in zip(plaintext.encode(), (key_bytes * ((len(plaintext) // 32) + 1))))
|
| 137 |
+
mac = hmac.new(key_bytes, ct, hashlib.sha256).hexdigest()
|
| 138 |
+
import base64
|
| 139 |
+
return base64.urlsafe_b64encode(ct).decode() + "." + mac
|
| 140 |
+
|
| 141 |
+
|
| 142 |
+
def decrypt_api_key(ciphertext: str) -> str:
|
| 143 |
+
import base64
|
| 144 |
+
parts = ciphertext.split(".", 1)
|
| 145 |
+
if len(parts) != 2:
|
| 146 |
+
raise ValueError("Malformed encrypted key")
|
| 147 |
+
ct = base64.urlsafe_b64decode(parts[0])
|
| 148 |
+
mac = parts[1]
|
| 149 |
+
key_bytes = hashlib.sha256(ENCRYPTION_KEY.encode()).digest()
|
| 150 |
+
expected_mac = hmac.new(key_bytes, ct, hashlib.sha256).hexdigest()
|
| 151 |
+
if not hmac.compare_digest(mac, expected_mac):
|
| 152 |
+
raise ValueError("Integrity check failed β key may have been tampered with")
|
| 153 |
+
pt = bytes(a ^ b for a, b in zip(ct, (key_bytes * ((len(ct) // 32) + 1))))
|
| 154 |
+
return pt.decode()
|
| 155 |
+
|
| 156 |
+
|
| 157 |
+
# βββ FastAPI Dependency: get current user ββββββββββββββββββββββββββββ
|
| 158 |
+
async def get_current_user(
|
| 159 |
+
request: Request,
|
| 160 |
+
credentials: Optional[HTTPAuthorizationCredentials] = Depends(_bearer),
|
| 161 |
+
) -> Optional[dict]:
|
| 162 |
+
"""Extract and validate the Supabase JWT from the Authorization header.
|
| 163 |
+
|
| 164 |
+
Returns the user profile dict or None if auth is disabled.
|
| 165 |
+
When auth is enabled but no valid token is provided, raises 401.
|
| 166 |
+
"""
|
| 167 |
+
if not AUTH_ENABLED:
|
| 168 |
+
return None # Auth not configured β allow anonymous access
|
| 169 |
+
|
| 170 |
+
if not credentials:
|
| 171 |
+
raise HTTPException(status_code=401, detail="Missing Authorization header")
|
| 172 |
+
|
| 173 |
+
token = credentials.credentials
|
| 174 |
+
user = _decode_supabase_jwt(token)
|
| 175 |
+
uid = user.get("id")
|
| 176 |
+
if not uid:
|
| 177 |
+
raise HTTPException(status_code=401, detail="Invalid user")
|
| 178 |
+
|
| 179 |
+
# Fetch profile from DB
|
| 180 |
+
profiles = _supabase_query("profiles", filters=f"id=eq.{uid}")
|
| 181 |
+
if not profiles:
|
| 182 |
+
raise HTTPException(status_code=404, detail="Profile not found. Sign up first.")
|
| 183 |
+
|
| 184 |
+
return profiles[0]
|
| 185 |
+
|
| 186 |
+
|
| 187 |
+
# βββ Build Guard: check plan + build count βββββββββββββββββββββββββββ
|
| 188 |
+
def check_build_allowed(profile: Optional[dict]) -> None:
|
| 189 |
+
"""Raise 402 if the user has exhausted their plan's build quota.
|
| 190 |
+
|
| 191 |
+
Called before every /build request when auth is enabled.
|
| 192 |
+
"""
|
| 193 |
+
if profile is None:
|
| 194 |
+
return # Auth disabled β no restrictions
|
| 195 |
+
|
| 196 |
+
plan = profile.get("plan", "free")
|
| 197 |
+
builds = profile.get("successful_builds", 0)
|
| 198 |
+
limit = PLAN_LIMITS.get(plan)
|
| 199 |
+
|
| 200 |
+
if limit is not None and builds >= limit:
|
| 201 |
+
raise HTTPException(
|
| 202 |
+
status_code=402,
|
| 203 |
+
detail={
|
| 204 |
+
"error": "build_limit_reached",
|
| 205 |
+
"plan": plan,
|
| 206 |
+
"used": builds,
|
| 207 |
+
"limit": limit,
|
| 208 |
+
"message": f"You've used all {limit} builds on the {plan} plan. Upgrade to continue building chips.",
|
| 209 |
+
"upgrade_url": "/pricing",
|
| 210 |
+
},
|
| 211 |
+
)
|
| 212 |
+
|
| 213 |
+
|
| 214 |
+
def get_llm_key_for_user(profile: Optional[dict]) -> Optional[str]:
|
| 215 |
+
"""Return the user's own LLM API key if they're on the BYOK plan.
|
| 216 |
+
|
| 217 |
+
Returns None for all other plans (server uses global NVIDIA_API_KEY).
|
| 218 |
+
"""
|
| 219 |
+
if profile is None:
|
| 220 |
+
return None
|
| 221 |
+
|
| 222 |
+
if profile.get("plan") != "byok":
|
| 223 |
+
return None
|
| 224 |
+
|
| 225 |
+
encrypted_key = profile.get("llm_api_key")
|
| 226 |
+
if not encrypted_key:
|
| 227 |
+
raise HTTPException(
|
| 228 |
+
status_code=400,
|
| 229 |
+
detail="BYOK plan requires an API key. Set it in your profile settings.",
|
| 230 |
+
)
|
| 231 |
+
|
| 232 |
+
try:
|
| 233 |
+
return decrypt_api_key(encrypted_key)
|
| 234 |
+
except ValueError:
|
| 235 |
+
raise HTTPException(status_code=500, detail="Failed to decrypt stored API key")
|
| 236 |
+
|
| 237 |
+
|
| 238 |
+
def record_build_start(profile: Optional[dict], job_id: str, design_name: str) -> None:
|
| 239 |
+
"""Insert a build record into the builds table."""
|
| 240 |
+
if profile is None or not AUTH_ENABLED:
|
| 241 |
+
return
|
| 242 |
+
_supabase_insert("builds", {
|
| 243 |
+
"user_id": profile["id"],
|
| 244 |
+
"job_id": job_id,
|
| 245 |
+
"design_name": design_name,
|
| 246 |
+
"status": "queued",
|
| 247 |
+
})
|
| 248 |
+
|
| 249 |
+
|
| 250 |
+
def record_build_success(profile: Optional[dict], job_id: str) -> None:
|
| 251 |
+
"""Mark build as done and increment the user's successful_builds count."""
|
| 252 |
+
if profile is None or not AUTH_ENABLED:
|
| 253 |
+
return
|
| 254 |
+
uid = profile["id"]
|
| 255 |
+
# Update build row
|
| 256 |
+
_supabase_update("builds", f"job_id=eq.{job_id}", {
|
| 257 |
+
"status": "done",
|
| 258 |
+
"finished_at": "now()",
|
| 259 |
+
})
|
| 260 |
+
# Increment counter
|
| 261 |
+
_supabase_rpc("increment_successful_builds", {"uid": uid})
|
| 262 |
+
|
| 263 |
+
|
| 264 |
+
def record_build_failure(job_id: str) -> None:
|
| 265 |
+
"""Mark build as failed."""
|
| 266 |
+
if not AUTH_ENABLED:
|
| 267 |
+
return
|
| 268 |
+
_supabase_update("builds", f"job_id=eq.{job_id}", {
|
| 269 |
+
"status": "failed",
|
| 270 |
+
"finished_at": "now()",
|
| 271 |
+
})
|
|
@@ -0,0 +1,207 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
AgentIC Billing β Razorpay webhook handler + order creation.
|
| 3 |
+
|
| 4 |
+
Env vars required:
|
| 5 |
+
RAZORPAY_KEY_ID β Razorpay API key id
|
| 6 |
+
RAZORPAY_KEY_SECRET β Razorpay API key secret
|
| 7 |
+
RAZORPAY_WEBHOOK_SECRET β Webhook secret from Razorpay dashboard
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
import hashlib
|
| 11 |
+
import hmac
|
| 12 |
+
import json
|
| 13 |
+
import os
|
| 14 |
+
from typing import Optional
|
| 15 |
+
|
| 16 |
+
import httpx
|
| 17 |
+
from fastapi import APIRouter, HTTPException, Request
|
| 18 |
+
from pydantic import BaseModel
|
| 19 |
+
|
| 20 |
+
from server.auth import (
|
| 21 |
+
AUTH_ENABLED,
|
| 22 |
+
_supabase_insert,
|
| 23 |
+
_supabase_query,
|
| 24 |
+
_supabase_update,
|
| 25 |
+
)
|
| 26 |
+
|
| 27 |
+
router = APIRouter(prefix="/billing", tags=["billing"])
|
| 28 |
+
|
| 29 |
+
RAZORPAY_KEY_ID = os.environ.get("RAZORPAY_KEY_ID", "")
|
| 30 |
+
RAZORPAY_KEY_SECRET = os.environ.get("RAZORPAY_KEY_SECRET", "")
|
| 31 |
+
RAZORPAY_WEBHOOK_SECRET = os.environ.get("RAZORPAY_WEBHOOK_SECRET", "")
|
| 32 |
+
|
| 33 |
+
# Plan prices in paise (βΉ1 = 100 paise)
|
| 34 |
+
PLAN_PRICES = {
|
| 35 |
+
"starter": 49900, # βΉ499
|
| 36 |
+
"pro": 149900, # βΉ1,499
|
| 37 |
+
}
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
class CreateOrderRequest(BaseModel):
|
| 41 |
+
plan: str # "starter" or "pro"
|
| 42 |
+
user_id: str # Supabase user UUID
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
class VerifyPaymentRequest(BaseModel):
|
| 46 |
+
razorpay_order_id: str
|
| 47 |
+
razorpay_payment_id: str
|
| 48 |
+
razorpay_signature: str
|
| 49 |
+
user_id: str
|
| 50 |
+
plan: str
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
# βββ Create Razorpay Order ββββββββββββββββββββββββββββββββββββββββββ
|
| 54 |
+
@router.post("/create-order")
|
| 55 |
+
async def create_order(req: CreateOrderRequest):
|
| 56 |
+
"""Create a Razorpay order for plan upgrade."""
|
| 57 |
+
if not RAZORPAY_KEY_ID or not RAZORPAY_KEY_SECRET:
|
| 58 |
+
raise HTTPException(status_code=503, detail="Payment system not configured")
|
| 59 |
+
|
| 60 |
+
if req.plan not in PLAN_PRICES:
|
| 61 |
+
raise HTTPException(status_code=400, detail=f"Invalid plan: {req.plan}. Choose 'starter' or 'pro'.")
|
| 62 |
+
|
| 63 |
+
amount = PLAN_PRICES[req.plan]
|
| 64 |
+
|
| 65 |
+
# Create order via Razorpay API
|
| 66 |
+
resp = httpx.post(
|
| 67 |
+
"https://api.razorpay.com/v1/orders",
|
| 68 |
+
auth=(RAZORPAY_KEY_ID, RAZORPAY_KEY_SECRET),
|
| 69 |
+
json={
|
| 70 |
+
"amount": amount,
|
| 71 |
+
"currency": "INR",
|
| 72 |
+
"receipt": f"agentic_{req.user_id[:8]}_{req.plan}",
|
| 73 |
+
"notes": {
|
| 74 |
+
"user_id": req.user_id,
|
| 75 |
+
"plan": req.plan,
|
| 76 |
+
},
|
| 77 |
+
},
|
| 78 |
+
timeout=15,
|
| 79 |
+
)
|
| 80 |
+
if resp.status_code != 200:
|
| 81 |
+
raise HTTPException(status_code=502, detail="Failed to create Razorpay order")
|
| 82 |
+
|
| 83 |
+
order = resp.json()
|
| 84 |
+
|
| 85 |
+
# Record pending payment
|
| 86 |
+
if AUTH_ENABLED:
|
| 87 |
+
_supabase_insert("payments", {
|
| 88 |
+
"user_id": req.user_id,
|
| 89 |
+
"razorpay_order_id": order["id"],
|
| 90 |
+
"amount_paise": amount,
|
| 91 |
+
"plan": req.plan,
|
| 92 |
+
"status": "pending",
|
| 93 |
+
})
|
| 94 |
+
|
| 95 |
+
return {
|
| 96 |
+
"order_id": order["id"],
|
| 97 |
+
"amount": amount,
|
| 98 |
+
"currency": "INR",
|
| 99 |
+
"key_id": RAZORPAY_KEY_ID,
|
| 100 |
+
"plan": req.plan,
|
| 101 |
+
}
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
# βββ Verify Payment (client-side callback) βββββββββββββββββββββββββ
|
| 105 |
+
@router.post("/verify-payment")
|
| 106 |
+
async def verify_payment(req: VerifyPaymentRequest):
|
| 107 |
+
"""Verify Razorpay payment signature and upgrade user plan."""
|
| 108 |
+
if not RAZORPAY_KEY_SECRET:
|
| 109 |
+
raise HTTPException(status_code=503, detail="Payment system not configured")
|
| 110 |
+
|
| 111 |
+
# Verify signature: SHA256 HMAC of order_id|payment_id
|
| 112 |
+
message = f"{req.razorpay_order_id}|{req.razorpay_payment_id}"
|
| 113 |
+
expected = hmac.new(
|
| 114 |
+
RAZORPAY_KEY_SECRET.encode(),
|
| 115 |
+
message.encode(),
|
| 116 |
+
hashlib.sha256,
|
| 117 |
+
).hexdigest()
|
| 118 |
+
|
| 119 |
+
if not hmac.compare_digest(expected, req.razorpay_signature):
|
| 120 |
+
raise HTTPException(status_code=400, detail="Payment verification failed β signature mismatch")
|
| 121 |
+
|
| 122 |
+
if AUTH_ENABLED:
|
| 123 |
+
# Update payment record
|
| 124 |
+
_supabase_update(
|
| 125 |
+
"payments",
|
| 126 |
+
f"razorpay_order_id=eq.{req.razorpay_order_id}",
|
| 127 |
+
{
|
| 128 |
+
"razorpay_payment_id": req.razorpay_payment_id,
|
| 129 |
+
"razorpay_signature": req.razorpay_signature,
|
| 130 |
+
"status": "captured",
|
| 131 |
+
},
|
| 132 |
+
)
|
| 133 |
+
|
| 134 |
+
# Upgrade user plan
|
| 135 |
+
_supabase_update(
|
| 136 |
+
"profiles",
|
| 137 |
+
f"id=eq.{req.user_id}",
|
| 138 |
+
{"plan": req.plan, "successful_builds": 0},
|
| 139 |
+
)
|
| 140 |
+
|
| 141 |
+
return {"success": True, "plan": req.plan, "message": f"Upgraded to {req.plan} plan!"}
|
| 142 |
+
|
| 143 |
+
|
| 144 |
+
# βββ Razorpay Webhook (server-to-server) βββββββββββββββββββββββββββ
|
| 145 |
+
@router.post("/webhook/razorpay")
|
| 146 |
+
async def razorpay_webhook(request: Request):
|
| 147 |
+
"""Handle Razorpay webhook events (payment.captured, payment.failed).
|
| 148 |
+
|
| 149 |
+
Razorpay sends a POST with a JSON body and X-Razorpay-Signature header.
|
| 150 |
+
We verify the HMAC-SHA256 signature before processing.
|
| 151 |
+
"""
|
| 152 |
+
if not RAZORPAY_WEBHOOK_SECRET:
|
| 153 |
+
raise HTTPException(status_code=503, detail="Webhook secret not configured")
|
| 154 |
+
|
| 155 |
+
body = await request.body()
|
| 156 |
+
signature = request.headers.get("X-Razorpay-Signature", "")
|
| 157 |
+
|
| 158 |
+
# Verify webhook signature
|
| 159 |
+
expected = hmac.new(
|
| 160 |
+
RAZORPAY_WEBHOOK_SECRET.encode(),
|
| 161 |
+
body,
|
| 162 |
+
hashlib.sha256,
|
| 163 |
+
).hexdigest()
|
| 164 |
+
|
| 165 |
+
if not hmac.compare_digest(expected, signature):
|
| 166 |
+
raise HTTPException(status_code=400, detail="Invalid webhook signature")
|
| 167 |
+
|
| 168 |
+
payload = json.loads(body)
|
| 169 |
+
event = payload.get("event", "")
|
| 170 |
+
|
| 171 |
+
if event == "payment.captured":
|
| 172 |
+
payment = payload.get("payload", {}).get("payment", {}).get("entity", {})
|
| 173 |
+
order_id = payment.get("order_id", "")
|
| 174 |
+
notes = payment.get("notes", {})
|
| 175 |
+
user_id = notes.get("user_id", "")
|
| 176 |
+
plan = notes.get("plan", "")
|
| 177 |
+
|
| 178 |
+
if user_id and plan and AUTH_ENABLED:
|
| 179 |
+
# Update payment status
|
| 180 |
+
_supabase_update(
|
| 181 |
+
"payments",
|
| 182 |
+
f"razorpay_order_id=eq.{order_id}",
|
| 183 |
+
{
|
| 184 |
+
"razorpay_payment_id": payment.get("id", ""),
|
| 185 |
+
"status": "captured",
|
| 186 |
+
},
|
| 187 |
+
)
|
| 188 |
+
|
| 189 |
+
# Upgrade user plan and reset build count
|
| 190 |
+
_supabase_update(
|
| 191 |
+
"profiles",
|
| 192 |
+
f"id=eq.{user_id}",
|
| 193 |
+
{"plan": plan, "successful_builds": 0},
|
| 194 |
+
)
|
| 195 |
+
|
| 196 |
+
elif event == "payment.failed":
|
| 197 |
+
payment = payload.get("payload", {}).get("payment", {}).get("entity", {})
|
| 198 |
+
order_id = payment.get("order_id", "")
|
| 199 |
+
if order_id and AUTH_ENABLED:
|
| 200 |
+
_supabase_update(
|
| 201 |
+
"payments",
|
| 202 |
+
f"razorpay_order_id=eq.{order_id}",
|
| 203 |
+
{"status": "failed"},
|
| 204 |
+
)
|
| 205 |
+
|
| 206 |
+
# Razorpay expects 200 OK to acknowledge receipt
|
| 207 |
+
return {"status": "ok"}
|
|
@@ -0,0 +1,133 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
-- ============================================================
|
| 2 |
+
-- AgentIC Auth & Billing Schema β Supabase (PostgreSQL)
|
| 3 |
+
-- ============================================================
|
| 4 |
+
-- Run this in Supabase SQL Editor (Dashboard β SQL Editor β New query)
|
| 5 |
+
|
| 6 |
+
-- Enable Row Level Security on all tables
|
| 7 |
+
-- Enable the pgcrypto extension for encryption
|
| 8 |
+
create extension if not exists pgcrypto;
|
| 9 |
+
|
| 10 |
+
-- βββ 1. User Profiles ββββββββββββββββββββββββββββββββββββββ
|
| 11 |
+
-- Links to Supabase auth.users via id (UUID)
|
| 12 |
+
create table if not exists public.profiles (
|
| 13 |
+
id uuid primary key references auth.users(id) on delete cascade,
|
| 14 |
+
email text not null,
|
| 15 |
+
full_name text,
|
| 16 |
+
plan text not null default 'free'
|
| 17 |
+
check (plan in ('free', 'starter', 'pro', 'byok')),
|
| 18 |
+
successful_builds int not null default 0,
|
| 19 |
+
llm_api_key text, -- encrypted via pgp_sym_encrypt
|
| 20 |
+
razorpay_customer_id text,
|
| 21 |
+
created_at timestamptz not null default now(),
|
| 22 |
+
updated_at timestamptz not null default now()
|
| 23 |
+
);
|
| 24 |
+
|
| 25 |
+
alter table public.profiles enable row level security;
|
| 26 |
+
|
| 27 |
+
-- Users can read/update only their own profile
|
| 28 |
+
create policy "Users read own profile"
|
| 29 |
+
on public.profiles for select
|
| 30 |
+
using (auth.uid() = id);
|
| 31 |
+
|
| 32 |
+
create policy "Users update own profile"
|
| 33 |
+
on public.profiles for update
|
| 34 |
+
using (auth.uid() = id);
|
| 35 |
+
|
| 36 |
+
-- βββ 2. Build History ββββββββββββββββββββββββββββββββββββββ
|
| 37 |
+
create table if not exists public.builds (
|
| 38 |
+
id uuid primary key default gen_random_uuid(),
|
| 39 |
+
user_id uuid not null references public.profiles(id) on delete cascade,
|
| 40 |
+
job_id text not null, -- maps to backend JOB_STORE key
|
| 41 |
+
design_name text not null,
|
| 42 |
+
status text not null default 'queued'
|
| 43 |
+
check (status in ('queued', 'running', 'done', 'failed', 'cancelled')),
|
| 44 |
+
created_at timestamptz not null default now(),
|
| 45 |
+
finished_at timestamptz
|
| 46 |
+
);
|
| 47 |
+
|
| 48 |
+
alter table public.builds enable row level security;
|
| 49 |
+
|
| 50 |
+
create policy "Users read own builds"
|
| 51 |
+
on public.builds for select
|
| 52 |
+
using (auth.uid() = user_id);
|
| 53 |
+
|
| 54 |
+
create policy "Service role inserts builds"
|
| 55 |
+
on public.builds for insert
|
| 56 |
+
with check (true); -- insert via service-role key from backend
|
| 57 |
+
|
| 58 |
+
create policy "Service role updates builds"
|
| 59 |
+
on public.builds for update
|
| 60 |
+
using (true);
|
| 61 |
+
|
| 62 |
+
-- βββ 3. Payment Events ββββββββββββββββββββββββββββββββββββ
|
| 63 |
+
create table if not exists public.payments (
|
| 64 |
+
id uuid primary key default gen_random_uuid(),
|
| 65 |
+
user_id uuid not null references public.profiles(id) on delete cascade,
|
| 66 |
+
razorpay_order_id text,
|
| 67 |
+
razorpay_payment_id text,
|
| 68 |
+
razorpay_signature text,
|
| 69 |
+
amount_paise int not null, -- amount in paise (βΉ1 = 100 paise)
|
| 70 |
+
plan text not null
|
| 71 |
+
check (plan in ('starter', 'pro', 'byok')),
|
| 72 |
+
status text not null default 'pending'
|
| 73 |
+
check (status in ('pending', 'captured', 'failed', 'refunded')),
|
| 74 |
+
created_at timestamptz not null default now()
|
| 75 |
+
);
|
| 76 |
+
|
| 77 |
+
alter table public.payments enable row level security;
|
| 78 |
+
|
| 79 |
+
create policy "Users view own payments"
|
| 80 |
+
on public.payments for select
|
| 81 |
+
using (auth.uid() = user_id);
|
| 82 |
+
|
| 83 |
+
-- βββ 4. Plan Limits (reference table) βββββββββββββββββββββ
|
| 84 |
+
create table if not exists public.plan_limits (
|
| 85 |
+
plan text primary key
|
| 86 |
+
check (plan in ('free', 'starter', 'pro', 'byok')),
|
| 87 |
+
max_builds int, -- NULL = unlimited
|
| 88 |
+
price_paise int not null default 0,
|
| 89 |
+
label text not null
|
| 90 |
+
);
|
| 91 |
+
|
| 92 |
+
insert into public.plan_limits (plan, max_builds, price_paise, label) values
|
| 93 |
+
('free', 2, 0, 'Free Tier β 2 builds'),
|
| 94 |
+
('starter', 25, 49900, 'Starter β 25 builds (βΉ499)'),
|
| 95 |
+
('pro', null, 149900, 'Pro β Unlimited builds (βΉ1,499)'),
|
| 96 |
+
('byok', null, 0, 'BYOK β Bring Your Own Key')
|
| 97 |
+
on conflict (plan) do nothing;
|
| 98 |
+
|
| 99 |
+
-- βββ 5. Auto-create profile on signup ββββββββββββββββββββββ
|
| 100 |
+
create or replace function public.handle_new_user()
|
| 101 |
+
returns trigger
|
| 102 |
+
language plpgsql
|
| 103 |
+
security definer set search_path = public
|
| 104 |
+
as $$
|
| 105 |
+
begin
|
| 106 |
+
insert into public.profiles (id, email, full_name)
|
| 107 |
+
values (
|
| 108 |
+
new.id,
|
| 109 |
+
new.email,
|
| 110 |
+
coalesce(new.raw_user_meta_data->>'full_name', split_part(new.email, '@', 1))
|
| 111 |
+
);
|
| 112 |
+
return new;
|
| 113 |
+
end;
|
| 114 |
+
$$;
|
| 115 |
+
|
| 116 |
+
drop trigger if exists on_auth_user_created on auth.users;
|
| 117 |
+
create trigger on_auth_user_created
|
| 118 |
+
after insert on auth.users
|
| 119 |
+
for each row execute procedure public.handle_new_user();
|
| 120 |
+
|
| 121 |
+
-- βββ 6. Helper: increment builds ββββββββββββββββββββββββββ
|
| 122 |
+
create or replace function public.increment_successful_builds(uid uuid)
|
| 123 |
+
returns void
|
| 124 |
+
language plpgsql
|
| 125 |
+
security definer
|
| 126 |
+
as $$
|
| 127 |
+
begin
|
| 128 |
+
update public.profiles
|
| 129 |
+
set successful_builds = successful_builds + 1,
|
| 130 |
+
updated_at = now()
|
| 131 |
+
where id = uid;
|
| 132 |
+
end;
|
| 133 |
+
$$;
|
|
@@ -1,11 +1,11 @@
|
|
| 1 |
import { useEffect, useMemo, useState } from 'react';
|
| 2 |
-
import axios from 'axios';
|
| 3 |
import { Dashboard } from './pages/Dashboard';
|
| 4 |
import { DesignStudio } from './pages/DesignStudio';
|
| 5 |
import { HumanInLoopBuild } from './pages/HumanInLoopBuild';
|
| 6 |
import { Benchmarking } from './pages/Benchmarking';
|
| 7 |
import { Fabrication } from './pages/Fabrication';
|
| 8 |
import { Documentation } from './pages/Documentation';
|
|
|
|
| 9 |
import './index.css';
|
| 10 |
|
| 11 |
const App = () => {
|
|
@@ -17,8 +17,7 @@ const App = () => {
|
|
| 17 |
return saved === 'dark' ? 'dark' : 'light';
|
| 18 |
});
|
| 19 |
|
| 20 |
-
|
| 21 |
-
axios.defaults.headers.common['ngrok-skip-browser-warning'] = 'true';
|
| 22 |
|
| 23 |
useEffect(() => {
|
| 24 |
document.documentElement.setAttribute('data-theme', theme);
|
|
@@ -26,8 +25,7 @@ const App = () => {
|
|
| 26 |
}, [theme]);
|
| 27 |
|
| 28 |
useEffect(() => {
|
| 29 |
-
|
| 30 |
-
axios.get(`${API_BASE_URL}/designs`)
|
| 31 |
.then(res => {
|
| 32 |
const data = res.data?.designs || [];
|
| 33 |
setDesigns(data);
|
|
|
|
| 1 |
import { useEffect, useMemo, useState } from 'react';
|
|
|
|
| 2 |
import { Dashboard } from './pages/Dashboard';
|
| 3 |
import { DesignStudio } from './pages/DesignStudio';
|
| 4 |
import { HumanInLoopBuild } from './pages/HumanInLoopBuild';
|
| 5 |
import { Benchmarking } from './pages/Benchmarking';
|
| 6 |
import { Fabrication } from './pages/Fabrication';
|
| 7 |
import { Documentation } from './pages/Documentation';
|
| 8 |
+
import { api } from './api';
|
| 9 |
import './index.css';
|
| 10 |
|
| 11 |
const App = () => {
|
|
|
|
| 17 |
return saved === 'dark' ? 'dark' : 'light';
|
| 18 |
});
|
| 19 |
|
| 20 |
+
|
|
|
|
| 21 |
|
| 22 |
useEffect(() => {
|
| 23 |
document.documentElement.setAttribute('data-theme', theme);
|
|
|
|
| 25 |
}, [theme]);
|
| 26 |
|
| 27 |
useEffect(() => {
|
| 28 |
+
api.get('/designs')
|
|
|
|
| 29 |
.then(res => {
|
| 30 |
const data = res.data?.designs || [];
|
| 31 |
setDesigns(data);
|
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import axios from 'axios';
|
| 2 |
+
|
| 3 |
+
export const API_BASE = (import.meta.env.VITE_API_BASE_URL || 'http://localhost:7860').replace(/\/$/, '');
|
| 4 |
+
|
| 5 |
+
// Pre-configured axios instance with ngrok header
|
| 6 |
+
export const api = axios.create({
|
| 7 |
+
baseURL: API_BASE,
|
| 8 |
+
headers: { 'ngrok-skip-browser-warning': 'true' },
|
| 9 |
+
});
|
|
@@ -1,8 +1,6 @@
|
|
| 1 |
import React, { useEffect, useRef } from 'react';
|
| 2 |
import { motion } from 'framer-motion';
|
| 3 |
-
import
|
| 4 |
-
|
| 5 |
-
const API = (import.meta.env.VITE_API_BASE_URL || 'http://localhost:7860').replace(/\/$/, '');
|
| 6 |
|
| 7 |
const STATES_DISPLAY: Record<string, { label: string; icon: string }> = {
|
| 8 |
INIT: { label: 'Initializing Workspace', icon: 'π§' },
|
|
@@ -92,7 +90,7 @@ export const BuildMonitor: React.FC<Props> = ({ designName, jobId, events, jobSt
|
|
| 92 |
if (!jobId || cancelling) return;
|
| 93 |
setCancelling(true);
|
| 94 |
try {
|
| 95 |
-
await
|
| 96 |
} catch {
|
| 97 |
setCancelling(false);
|
| 98 |
}
|
|
|
|
| 1 |
import React, { useEffect, useRef } from 'react';
|
| 2 |
import { motion } from 'framer-motion';
|
| 3 |
+
import { api } from '../api';
|
|
|
|
|
|
|
| 4 |
|
| 5 |
const STATES_DISPLAY: Record<string, { label: string; icon: string }> = {
|
| 6 |
INIT: { label: 'Initializing Workspace', icon: 'π§' },
|
|
|
|
| 90 |
if (!jobId || cancelling) return;
|
| 91 |
setCancelling(true);
|
| 92 |
try {
|
| 93 |
+
await api.post(`/build/cancel/${jobId}`);
|
| 94 |
} catch {
|
| 95 |
setCancelling(false);
|
| 96 |
}
|
|
@@ -1,5 +1,5 @@
|
|
| 1 |
import React, { useState, useEffect } from 'react';
|
| 2 |
-
import
|
| 3 |
|
| 4 |
interface DashboardProps {
|
| 5 |
selectedDesign: string;
|
|
@@ -17,10 +17,8 @@ export const Dashboard: React.FC<DashboardProps> = ({ selectedDesign }) => {
|
|
| 17 |
if (!selectedDesign) return;
|
| 18 |
setLoading(true);
|
| 19 |
|
| 20 |
-
const API_BASE_URL = (import.meta.env.VITE_API_BASE_URL || 'http://localhost:7860').replace(/\/$/, '');
|
| 21 |
-
|
| 22 |
// Fetch Quick Metrics
|
| 23 |
-
|
| 24 |
.then(res => {
|
| 25 |
if (res.data.metrics) setMetrics(res.data.metrics);
|
| 26 |
})
|
|
@@ -29,7 +27,7 @@ export const Dashboard: React.FC<DashboardProps> = ({ selectedDesign }) => {
|
|
| 29 |
});
|
| 30 |
|
| 31 |
// Fetch Full LLM Signoff Report
|
| 32 |
-
|
| 33 |
.then(res => {
|
| 34 |
setSignoffData({ report: res.data.report, pass: res.data.success });
|
| 35 |
})
|
|
@@ -39,7 +37,7 @@ export const Dashboard: React.FC<DashboardProps> = ({ selectedDesign }) => {
|
|
| 39 |
.finally(() => setLoading(false));
|
| 40 |
|
| 41 |
// Fetch recent jobs
|
| 42 |
-
|
| 43 |
.then(res => {
|
| 44 |
const jobs = (res.data?.jobs || [])
|
| 45 |
.filter((j: any) => j.design_name === selectedDesign)
|
|
|
|
| 1 |
import React, { useState, useEffect } from 'react';
|
| 2 |
+
import { api } from '../api';
|
| 3 |
|
| 4 |
interface DashboardProps {
|
| 5 |
selectedDesign: string;
|
|
|
|
| 17 |
if (!selectedDesign) return;
|
| 18 |
setLoading(true);
|
| 19 |
|
|
|
|
|
|
|
| 20 |
// Fetch Quick Metrics
|
| 21 |
+
api.get(`/metrics/${selectedDesign}`)
|
| 22 |
.then(res => {
|
| 23 |
if (res.data.metrics) setMetrics(res.data.metrics);
|
| 24 |
})
|
|
|
|
| 27 |
});
|
| 28 |
|
| 29 |
// Fetch Full LLM Signoff Report
|
| 30 |
+
api.get(`/signoff/${selectedDesign}`)
|
| 31 |
.then(res => {
|
| 32 |
setSignoffData({ report: res.data.report, pass: res.data.success });
|
| 33 |
})
|
|
|
|
| 37 |
.finally(() => setLoading(false));
|
| 38 |
|
| 39 |
// Fetch recent jobs
|
| 40 |
+
api.get(`/jobs`)
|
| 41 |
.then(res => {
|
| 42 |
const jobs = (res.data?.jobs || [])
|
| 43 |
.filter((j: any) => j.design_name === selectedDesign)
|
|
@@ -1,11 +1,9 @@
|
|
| 1 |
import { useState, useEffect, useRef } from 'react';
|
| 2 |
import { motion, AnimatePresence } from 'framer-motion';
|
| 3 |
-
import axios from 'axios';
|
| 4 |
import { BuildMonitor } from '../components/BuildMonitor';
|
| 5 |
import { ChipSummary } from '../components/ChipSummary';
|
| 6 |
import { fetchEventSource } from '@microsoft/fetch-event-source';
|
| 7 |
-
|
| 8 |
-
const API = (import.meta.env.VITE_API_BASE_URL || 'http://localhost:7860').replace(/\/$/, '');
|
| 9 |
|
| 10 |
type Phase = 'prompt' | 'building' | 'done';
|
| 11 |
|
|
@@ -80,7 +78,7 @@ export const DesignStudio = () => {
|
|
| 80 |
if (!prompt.trim()) return;
|
| 81 |
setError('');
|
| 82 |
try {
|
| 83 |
-
const res = await
|
| 84 |
design_name: designName || slugify(prompt),
|
| 85 |
description: prompt,
|
| 86 |
skip_openlane: skipOpenlane,
|
|
@@ -123,7 +121,7 @@ export const DesignStudio = () => {
|
|
| 123 |
// (server replays all events from the beginning on each connection)
|
| 124 |
setEvents([]);
|
| 125 |
|
| 126 |
-
fetchEventSource(`${
|
| 127 |
method: 'GET',
|
| 128 |
headers: {
|
| 129 |
'ngrok-skip-browser-warning': 'true',
|
|
@@ -160,7 +158,7 @@ export const DesignStudio = () => {
|
|
| 160 |
const fetchResult = async (jid: string, status: string) => {
|
| 161 |
setJobStatus(status === 'done' ? 'done' : 'failed');
|
| 162 |
try {
|
| 163 |
-
const res = await
|
| 164 |
setResult(res.data.result);
|
| 165 |
} catch { /* result might not exist if failed early */ }
|
| 166 |
setPhase('done');
|
|
@@ -190,7 +188,7 @@ export const DesignStudio = () => {
|
|
| 190 |
if ('Notification' in window && Notification.permission === 'default') {
|
| 191 |
Notification.requestPermission();
|
| 192 |
}
|
| 193 |
-
|
| 194 |
.then(res => setStageSchema(res.data?.stages || []))
|
| 195 |
.catch(() => setStageSchema([]));
|
| 196 |
return () => abortCtrlRef.current?.abort();
|
|
|
|
| 1 |
import { useState, useEffect, useRef } from 'react';
|
| 2 |
import { motion, AnimatePresence } from 'framer-motion';
|
|
|
|
| 3 |
import { BuildMonitor } from '../components/BuildMonitor';
|
| 4 |
import { ChipSummary } from '../components/ChipSummary';
|
| 5 |
import { fetchEventSource } from '@microsoft/fetch-event-source';
|
| 6 |
+
import { api, API_BASE } from '../api';
|
|
|
|
| 7 |
|
| 8 |
type Phase = 'prompt' | 'building' | 'done';
|
| 9 |
|
|
|
|
| 78 |
if (!prompt.trim()) return;
|
| 79 |
setError('');
|
| 80 |
try {
|
| 81 |
+
const res = await api.post(`/build`, {
|
| 82 |
design_name: designName || slugify(prompt),
|
| 83 |
description: prompt,
|
| 84 |
skip_openlane: skipOpenlane,
|
|
|
|
| 121 |
// (server replays all events from the beginning on each connection)
|
| 122 |
setEvents([]);
|
| 123 |
|
| 124 |
+
fetchEventSource(`${API_BASE}/build/stream/${jid}`, {
|
| 125 |
method: 'GET',
|
| 126 |
headers: {
|
| 127 |
'ngrok-skip-browser-warning': 'true',
|
|
|
|
| 158 |
const fetchResult = async (jid: string, status: string) => {
|
| 159 |
setJobStatus(status === 'done' ? 'done' : 'failed');
|
| 160 |
try {
|
| 161 |
+
const res = await api.get(`/build/result/${jid}`);
|
| 162 |
setResult(res.data.result);
|
| 163 |
} catch { /* result might not exist if failed early */ }
|
| 164 |
setPhase('done');
|
|
|
|
| 188 |
if ('Notification' in window && Notification.permission === 'default') {
|
| 189 |
Notification.requestPermission();
|
| 190 |
}
|
| 191 |
+
api.get(`/pipeline/schema`)
|
| 192 |
.then(res => setStageSchema(res.data?.stages || []))
|
| 193 |
.catch(() => setStageSchema([]));
|
| 194 |
return () => abortCtrlRef.current?.abort();
|
|
@@ -1,9 +1,7 @@
|
|
| 1 |
import { useEffect, useMemo, useState, useRef, useCallback } from 'react';
|
| 2 |
-
import axios from 'axios';
|
| 3 |
import ReactMarkdown from 'react-markdown';
|
| 4 |
import remarkGfm from 'remark-gfm';
|
| 5 |
-
|
| 6 |
-
const API = (import.meta.env.VITE_API_BASE_URL || 'http://localhost:7860').replace(/\/$/, '');
|
| 7 |
|
| 8 |
interface DocItem {
|
| 9 |
id: string;
|
|
@@ -66,9 +64,9 @@ export const Documentation = () => {
|
|
| 66 |
const loadIndex = async () => {
|
| 67 |
try {
|
| 68 |
const [docsRes, optionsRes, schemaRes] = await Promise.all([
|
| 69 |
-
|
| 70 |
-
|
| 71 |
-
|
| 72 |
]);
|
| 73 |
const docsData: DocItem[] = docsRes.data?.docs || [];
|
| 74 |
setDocs(docsData);
|
|
@@ -88,7 +86,7 @@ export const Documentation = () => {
|
|
| 88 |
useEffect(() => {
|
| 89 |
if (!selectedDoc) return;
|
| 90 |
setLoading(true);
|
| 91 |
-
|
| 92 |
.then((res) => {
|
| 93 |
setDocTitle(res.data?.title || selectedDoc);
|
| 94 |
setContent(res.data?.content || 'No content available.');
|
|
|
|
| 1 |
import { useEffect, useMemo, useState, useRef, useCallback } from 'react';
|
|
|
|
| 2 |
import ReactMarkdown from 'react-markdown';
|
| 3 |
import remarkGfm from 'remark-gfm';
|
| 4 |
+
import { api } from '../api';
|
|
|
|
| 5 |
|
| 6 |
interface DocItem {
|
| 7 |
id: string;
|
|
|
|
| 64 |
const loadIndex = async () => {
|
| 65 |
try {
|
| 66 |
const [docsRes, optionsRes, schemaRes] = await Promise.all([
|
| 67 |
+
api.get(`/docs/index`),
|
| 68 |
+
api.get(`/build/options`),
|
| 69 |
+
api.get(`/pipeline/schema`),
|
| 70 |
]);
|
| 71 |
const docsData: DocItem[] = docsRes.data?.docs || [];
|
| 72 |
setDocs(docsData);
|
|
|
|
| 86 |
useEffect(() => {
|
| 87 |
if (!selectedDoc) return;
|
| 88 |
setLoading(true);
|
| 89 |
+
api.get(`/docs/content/${selectedDoc}`)
|
| 90 |
.then((res) => {
|
| 91 |
setDocTitle(res.data?.title || selectedDoc);
|
| 92 |
setContent(res.data?.content || 'No content available.');
|
|
@@ -1,13 +1,11 @@
|
|
| 1 |
import { useState, useEffect, useRef } from 'react';
|
| 2 |
-
import axios from 'axios';
|
| 3 |
import { fetchEventSource } from '@microsoft/fetch-event-source';
|
| 4 |
import { ActivityFeed } from '../components/ActivityFeed';
|
| 5 |
import { StageProgressBar } from '../components/StageProgressBar';
|
| 6 |
import { ApprovalCard } from '../components/ApprovalCard';
|
|
|
|
| 7 |
import '../hitl.css';
|
| 8 |
|
| 9 |
-
const API = (import.meta.env.VITE_API_BASE_URL || 'http://localhost:7860').replace(/\/$/, '');
|
| 10 |
-
|
| 11 |
const PIPELINE_STAGES = [
|
| 12 |
'INIT', 'SPEC', 'RTL_GEN', 'RTL_FIX', 'VERIFICATION', 'FORMAL_VERIFY',
|
| 13 |
'COVERAGE_CHECK', 'REGRESSION', 'SDC_GEN', 'FLOORPLAN', 'HARDENING',
|
|
@@ -136,7 +134,7 @@ export const HumanInLoopBuild = () => {
|
|
| 136 |
const effectiveSkipOpenlane = buildMode === 'quick' || skipOpenlane;
|
| 137 |
const effectiveSkipCoverage = skipCoverage || skipStages.has('COVERAGE_CHECK');
|
| 138 |
try {
|
| 139 |
-
const res = await
|
| 140 |
design_name: designName || slugify(prompt),
|
| 141 |
description: prompt,
|
| 142 |
skip_openlane: effectiveSkipOpenlane,
|
|
@@ -169,7 +167,7 @@ export const HumanInLoopBuild = () => {
|
|
| 169 |
abortCtrlRef.current = ctrl;
|
| 170 |
setEvents([]);
|
| 171 |
|
| 172 |
-
fetchEventSource(`${
|
| 173 |
method: 'GET',
|
| 174 |
headers: {
|
| 175 |
'ngrok-skip-browser-warning': 'true',
|
|
@@ -259,13 +257,13 @@ export const HumanInLoopBuild = () => {
|
|
| 259 |
const fetchResult = async (jid: string, status: string) => {
|
| 260 |
setJobStatus(status === 'done' ? 'done' : 'failed');
|
| 261 |
try {
|
| 262 |
-
const res = await
|
| 263 |
setResult(res.data.result);
|
| 264 |
} catch { /* */ }
|
| 265 |
// On failure, fetch partial artifacts from disk
|
| 266 |
if (status !== 'done' && designName) {
|
| 267 |
try {
|
| 268 |
-
const artRes = await
|
| 269 |
setPartialArtifacts(artRes.data.artifacts || []);
|
| 270 |
} catch { /* */ }
|
| 271 |
}
|
|
@@ -276,7 +274,7 @@ export const HumanInLoopBuild = () => {
|
|
| 276 |
if (!approvalData || isSubmitting) return;
|
| 277 |
setIsSubmitting(true);
|
| 278 |
try {
|
| 279 |
-
await
|
| 280 |
stage: approvalData.stage_name,
|
| 281 |
design_name: designName,
|
| 282 |
});
|
|
@@ -309,7 +307,7 @@ export const HumanInLoopBuild = () => {
|
|
| 309 |
if (!approvalData || isSubmitting) return;
|
| 310 |
setIsSubmitting(true);
|
| 311 |
try {
|
| 312 |
-
await
|
| 313 |
stage: approvalData.stage_name,
|
| 314 |
design_name: designName,
|
| 315 |
feedback: feedback || undefined,
|
|
@@ -370,7 +368,7 @@ export const HumanInLoopBuild = () => {
|
|
| 370 |
const handleCancel = async () => {
|
| 371 |
if (abortCtrlRef.current) abortCtrlRef.current.abort();
|
| 372 |
if (jobId) {
|
| 373 |
-
try { await
|
| 374 |
}
|
| 375 |
handleReset();
|
| 376 |
};
|
|
@@ -719,7 +717,7 @@ export const HumanInLoopBuild = () => {
|
|
| 719 |
{a.size > 1024 ? `${(a.size / 1024).toFixed(1)} KB` : `${a.size} B`}
|
| 720 |
</span>
|
| 721 |
<a
|
| 722 |
-
href={`${
|
| 723 |
className="hitl-fail-artifact-dl"
|
| 724 |
download
|
| 725 |
>
|
|
|
|
| 1 |
import { useState, useEffect, useRef } from 'react';
|
|
|
|
| 2 |
import { fetchEventSource } from '@microsoft/fetch-event-source';
|
| 3 |
import { ActivityFeed } from '../components/ActivityFeed';
|
| 4 |
import { StageProgressBar } from '../components/StageProgressBar';
|
| 5 |
import { ApprovalCard } from '../components/ApprovalCard';
|
| 6 |
+
import { api, API_BASE } from '../api';
|
| 7 |
import '../hitl.css';
|
| 8 |
|
|
|
|
|
|
|
| 9 |
const PIPELINE_STAGES = [
|
| 10 |
'INIT', 'SPEC', 'RTL_GEN', 'RTL_FIX', 'VERIFICATION', 'FORMAL_VERIFY',
|
| 11 |
'COVERAGE_CHECK', 'REGRESSION', 'SDC_GEN', 'FLOORPLAN', 'HARDENING',
|
|
|
|
| 134 |
const effectiveSkipOpenlane = buildMode === 'quick' || skipOpenlane;
|
| 135 |
const effectiveSkipCoverage = skipCoverage || skipStages.has('COVERAGE_CHECK');
|
| 136 |
try {
|
| 137 |
+
const res = await api.post(`/build`, {
|
| 138 |
design_name: designName || slugify(prompt),
|
| 139 |
description: prompt,
|
| 140 |
skip_openlane: effectiveSkipOpenlane,
|
|
|
|
| 167 |
abortCtrlRef.current = ctrl;
|
| 168 |
setEvents([]);
|
| 169 |
|
| 170 |
+
fetchEventSource(`${API_BASE}/build/stream/${jid}`, {
|
| 171 |
method: 'GET',
|
| 172 |
headers: {
|
| 173 |
'ngrok-skip-browser-warning': 'true',
|
|
|
|
| 257 |
const fetchResult = async (jid: string, status: string) => {
|
| 258 |
setJobStatus(status === 'done' ? 'done' : 'failed');
|
| 259 |
try {
|
| 260 |
+
const res = await api.get(`/build/result/${jid}`);
|
| 261 |
setResult(res.data.result);
|
| 262 |
} catch { /* */ }
|
| 263 |
// On failure, fetch partial artifacts from disk
|
| 264 |
if (status !== 'done' && designName) {
|
| 265 |
try {
|
| 266 |
+
const artRes = await api.get(`/build/artifacts/${designName}`);
|
| 267 |
setPartialArtifacts(artRes.data.artifacts || []);
|
| 268 |
} catch { /* */ }
|
| 269 |
}
|
|
|
|
| 274 |
if (!approvalData || isSubmitting) return;
|
| 275 |
setIsSubmitting(true);
|
| 276 |
try {
|
| 277 |
+
await api.post(`/approve`, {
|
| 278 |
stage: approvalData.stage_name,
|
| 279 |
design_name: designName,
|
| 280 |
});
|
|
|
|
| 307 |
if (!approvalData || isSubmitting) return;
|
| 308 |
setIsSubmitting(true);
|
| 309 |
try {
|
| 310 |
+
await api.post(`/reject`, {
|
| 311 |
stage: approvalData.stage_name,
|
| 312 |
design_name: designName,
|
| 313 |
feedback: feedback || undefined,
|
|
|
|
| 368 |
const handleCancel = async () => {
|
| 369 |
if (abortCtrlRef.current) abortCtrlRef.current.abort();
|
| 370 |
if (jobId) {
|
| 371 |
+
try { await api.post(`/build/cancel/${jobId}`); } catch { /* */ }
|
| 372 |
}
|
| 373 |
handleReset();
|
| 374 |
};
|
|
|
|
| 717 |
{a.size > 1024 ? `${(a.size / 1024).toFixed(1)} KB` : `${a.size} B`}
|
| 718 |
</span>
|
| 719 |
<a
|
| 720 |
+
href={`${API_BASE}/build/artifacts/${designName}/${encodeURIComponent(a.name)}`}
|
| 721 |
className="hitl-fail-artifact-dl"
|
| 722 |
download
|
| 723 |
>
|