Idle / server.py
MarneMorgan's picture
Rename server.pu to server.py
4fdfed7 verified
import os, time, json, uuid, shlex, asyncio
from pathlib import Path
from typing import Optional, Dict, Any, List
import subprocess
from fastapi import FastAPI, Header, HTTPException, Request
from fastapi.responses import StreamingResponse
from pydantic import BaseModel
from cryptography.fernet import Fernet
# -------------------------
# Config
# -------------------------
APP_TOKEN = os.getenv("APP_TOKEN", "") # required auth token
BASE = Path(os.getenv("WORKDIR", "/home/appuser/workspace")).resolve()
BASE.mkdir(parents=True, exist_ok=True)
MASTER_KEY = os.getenv("MASTER_KEY", "") # encrypts in-app secrets (project secrets)
FERNET = Fernet(MASTER_KEY.encode()) if MASTER_KEY else None
MAX_LOG_LINES = 5000
DEFAULT_TIMEOUT = 600 # seconds
app = FastAPI(title="Executor API", version="0.2")
# -------------------------
# Helpers
# -------------------------
def require_auth(x_token: Optional[str]):
if not APP_TOKEN:
raise HTTPException(status_code=500, detail="APP_TOKEN is not set")
if x_token != APP_TOKEN:
raise HTTPException(status_code=401, detail="Invalid token")
def jail(project_id: str, rel: str = "") -> Path:
root = (BASE / project_id).resolve()
target = (root / rel).resolve() if rel else root
if not str(target).startswith(str(root)):
raise HTTPException(status_code=400, detail="Path escapes project workspace")
return target
def proj_paths(project_id: str) -> Dict[str, Path]:
root = jail(project_id)
return {
"root": root,
"log": root / ".executor.log",
"cfg": root / ".aibuilder.json",
"secrets": root / ".secrets.json.enc",
}
def log_append(log_file: Path, line: str):
log_file.parent.mkdir(parents=True, exist_ok=True)
if not log_file.exists():
log_file.write_text("", encoding="utf-8")
lines = log_file.read_text(encoding="utf-8", errors="ignore").splitlines()
lines.append(line)
if len(lines) > MAX_LOG_LINES:
lines = lines[-MAX_LOG_LINES:]
log_file.write_text("\n".join(lines) + "\n", encoding="utf-8")
def read_cfg(cfg_path: Path) -> Dict[str, Any]:
if not cfg_path.exists():
return {}
try:
return json.loads(cfg_path.read_text(encoding="utf-8"))
except Exception:
return {}
def write_cfg(cfg_path: Path, cfg: Dict[str, Any]):
cfg_path.write_text(json.dumps(cfg, indent=2), encoding="utf-8")
def detect_stack(root: Path) -> List[str]:
stacks = []
if (root / "package.json").exists():
stacks.append("node")
if (root / "pyproject.toml").exists() or (root / "requirements.txt").exists():
stacks.append("python")
if (root / "go.mod").exists():
stacks.append("go")
if (root / "Cargo.toml").exists():
stacks.append("rust")
if (root / "pom.xml").exists() or (root / "build.gradle").exists() or (root / "build.gradle.kts").exists():
stacks.append("java")
if (root / "docker-compose.yml").exists() or (root / "compose.yml").exists():
stacks.append("compose")
return stacks or ["unknown"]
def load_secrets(paths: Dict[str, Path]) -> Dict[str, str]:
if not FERNET or not paths["secrets"].exists():
return {}
token = paths["secrets"].read_bytes()
raw = FERNET.decrypt(token)
return json.loads(raw.decode("utf-8"))
def save_secrets(paths: Dict[str, Path], secrets: Dict[str, str]):
if not FERNET:
raise HTTPException(status_code=500, detail="MASTER_KEY not set; secrets disabled")
raw = json.dumps(secrets).encode("utf-8")
token = FERNET.encrypt(raw)
paths["secrets"].write_bytes(token)
def run_process(
*,
cmd: str,
cwd: Path,
log_file: Path,
env_extra: Optional[Dict[str, str]] = None,
timeout: int = DEFAULT_TIMEOUT,
) -> Dict[str, Any]:
run_id = str(uuid.uuid4())[:8]
started = time.time()
log_append(log_file, f"\n[{run_id}]$ {cmd}")
args = shlex.split(cmd) # no shell by default
env = os.environ.copy()
if env_extra:
env.update(env_extra)
try:
p = subprocess.run(
args,
cwd=str(cwd),
capture_output=True,
text=True,
env=env,
timeout=timeout,
)
except subprocess.TimeoutExpired:
log_append(log_file, f"[{run_id}] TIMEOUT after {timeout}s")
return {"id": run_id, "code": 124, "stdout": "", "stderr": f"TIMEOUT after {timeout}s", "seconds": timeout}
dur = time.time() - started
if p.stdout:
log_append(log_file, p.stdout.rstrip())
if p.stderr:
log_append(log_file, "[stderr]")
log_append(log_file, p.stderr.rstrip())
log_append(log_file, f"[{run_id}] exit={p.returncode} time={dur:.2f}s")
return {"id": run_id, "code": p.returncode, "stdout": p.stdout, "stderr": p.stderr, "seconds": dur}
# -------------------------
# Models
# -------------------------
class CreateProjectReq(BaseModel):
name: Optional[str] = None
class ApplyReq(BaseModel):
path: str
content: str
class ExecReq(BaseModel):
cmd: str
cwd: Optional[str] = None
timeout_sec: int = DEFAULT_TIMEOUT
class SecretSetReq(BaseModel):
key: str
value: str
class VerifyReq(BaseModel):
install: Optional[List[str]] = None
build: Optional[List[str]] = None
test: Optional[List[str]] = None
# -------------------------
# Routes
# -------------------------
@app.get("/")
def root():
return {"ok": True, "service": "executor", "docs": "/docs"}
@app.post("/projects")
def create_project(req: CreateProjectReq, x_token: Optional[str] = Header(default=None)):
require_auth(x_token)
pid = str(uuid.uuid4())[:12]
paths = proj_paths(pid)
paths["root"].mkdir(parents=True, exist_ok=True)
paths["log"].write_text("", encoding="utf-8")
cfg = {
"name": req.name or pid,
"created_at": time.time(),
# Defaults (override per project via /apply to .aibuilder.json if you want)
"pip_install_cmd": "python -m pip install -r requirements.txt",
"pytest_cmd": "pytest -q",
"node_install_cmd": "npm ci",
"node_build_cmd": "npm run build",
"node_test_cmd": "npm test",
}
write_cfg(paths["cfg"], cfg)
log_append(paths["log"], f"[system] project created: {cfg['name']} ({pid})")
return {"project_id": pid, "name": cfg["name"]}
@app.post("/projects/{project_id}/apply")
def apply_file(project_id: str, req: ApplyReq, x_token: Optional[str] = Header(default=None)):
require_auth(x_token)
paths = proj_paths(project_id)
fp = jail(project_id, req.path)
fp.parent.mkdir(parents=True, exist_ok=True)
fp.write_text(req.content, encoding="utf-8")
log_append(paths["log"], f"[write] {req.path} ({len(req.content)} bytes)")
return {"ok": True, "path": req.path}
@app.post("/projects/{project_id}/exec")
def exec_cmd(project_id: str, req: ExecReq, x_token: Optional[str] = Header(default=None)):
require_auth(x_token)
paths = proj_paths(project_id)
cwd = jail(project_id, req.cwd) if req.cwd else paths["root"]
secrets = load_secrets(paths)
return run_process(
cmd=req.cmd,
cwd=cwd,
log_file=paths["log"],
env_extra=secrets,
timeout=req.timeout_sec,
)
@app.get("/projects/{project_id}/logs")
def get_logs(project_id: str, lines: int = 200, x_token: Optional[str] = Header(default=None)):
require_auth(x_token)
paths = proj_paths(project_id)
text = paths["log"].read_text(encoding="utf-8", errors="ignore").splitlines()
lines = max(1, min(lines, 2000))
return {"lines": text[-lines:]}
@app.get("/projects/{project_id}/events")
async def sse_events(project_id: str, request: Request, x_token: Optional[str] = Header(default=None)):
require_auth(x_token)
paths = proj_paths(project_id)
async def gen():
last_len = 0
while True:
if await request.is_disconnected():
break
content = paths["log"].read_text(encoding="utf-8", errors="ignore")
if len(content) != last_len:
chunk = content[last_len:]
last_len = len(content)
for line in chunk.splitlines():
yield f"data: {line}\n\n"
await asyncio.sleep(0.5)
return StreamingResponse(gen(), media_type="text/event-stream")
# In-app secrets (per project)
@app.post("/projects/{project_id}/secrets/set")
def set_secret(project_id: str, req: SecretSetReq, x_token: Optional[str] = Header(default=None)):
require_auth(x_token)
paths = proj_paths(project_id)
secrets = load_secrets(paths)
secrets[req.key] = req.value
save_secrets(paths, secrets)
log_append(paths["log"], f"[secret] set {req.key}")
return {"ok": True}
@app.post("/projects/{project_id}/verify")
def verify(project_id: str, req: VerifyReq, x_token: Optional[str] = Header(default=None)):
"""
"Done means done": ok=True only if install+build+test all succeed.
"""
require_auth(x_token)
paths = proj_paths(project_id)
root = paths["root"]
cfg = read_cfg(paths["cfg"])
secrets = load_secrets(paths)
stacks = detect_stack(root)
install_cmds: List[str] = []
build_cmds: List[str] = []
test_cmds: List[str] = []
if "node" in stacks:
install_cmds.append(cfg.get("node_install_cmd", "npm ci"))
build_cmds.append(cfg.get("node_build_cmd", "npm run build"))
test_cmds.append(cfg.get("node_test_cmd", "npm test"))
if "python" in stacks:
if (root / "requirements.txt").exists():
install_cmds.append(cfg.get("pip_install_cmd", "python -m pip install -r requirements.txt"))
if (root / "pyproject.toml").exists():
install_cmds.append("python -m pip install -U pip && python -m pip install .")
test_cmds.append(cfg.get("pytest_cmd", "pytest -q"))
# allow overrides
if req.install is not None: install_cmds = req.install
if req.build is not None: build_cmds = req.build
if req.test is not None: test_cmds = req.test
log_append(paths["log"], f"[verify] stacks={stacks}")
results = {"install": [], "build": [], "test": []}
ok = True
for cmd in install_cmds:
r = run_process(cmd=cmd, cwd=root, log_file=paths["log"], env_extra=secrets, timeout=DEFAULT_TIMEOUT)
results["install"].append(r)
if r["code"] != 0:
ok = False
break
if ok:
for cmd in build_cmds:
r = run_process(cmd=cmd, cwd=root, log_file=paths["log"], env_extra=secrets, timeout=DEFAULT_TIMEOUT)
results["build"].append(r)
if r["code"] != 0:
ok = False
break
if ok:
for cmd in test_cmds:
r = run_process(cmd=cmd, cwd=root, log_file=paths["log"], env_extra=secrets, timeout=DEFAULT_TIMEOUT)
results["test"].append(r)
if r["code"] != 0:
ok = False
break
return {
"ok": ok,
"stacks": stacks,
"pipeline": {"install": install_cmds, "build": build_cmds, "test": test_cmds},
"results": results,
"done_definition": "ok==True means install+build+test all exited with code 0"
}