File size: 3,691 Bytes
9b7e0a7 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 | #!/usr/bin/env python
"""Minimal API smoke test for fic-agent.
Checks:
1) LLM chat completion
2) Embedding API call
Exit code:
0 = all requested checks passed
1 = at least one requested check failed
"""
from __future__ import annotations
import argparse
import sys
from fic_agent.config import RuntimeConfig
def _mask_len(value: str | None) -> str:
if not value:
return "0"
return str(len(value))
def _test_llm(cfg: RuntimeConfig) -> bool:
if not cfg.llm_api_key:
print("[LLM] FAIL: missing llm_api_key")
return False
try:
from openai import OpenAI # type: ignore
except Exception as e:
print(f"[LLM] FAIL: openai import error: {e}")
return False
try:
client = OpenAI(base_url=cfg.llm_base_url, api_key=cfg.llm_api_key)
resp = client.chat.completions.create(
model=cfg.llm_model,
messages=[
{"role": "system", "content": "You are a concise assistant."},
{"role": "user", "content": "Reply with exactly: API_OK"},
],
temperature=0.0,
max_tokens=20,
)
text = (resp.choices[0].message.content or "").strip()
usage = getattr(resp, "usage", None)
total = getattr(usage, "total_tokens", None) if usage is not None else None
print(f"[LLM] PASS: model={cfg.llm_model} total_tokens={total} reply={text!r}")
return True
except Exception as e:
print(f"[LLM] FAIL: {type(e).__name__}: {e}")
return False
def _test_embedding(cfg: RuntimeConfig) -> bool:
if not cfg.embedding_api_key:
print("[EMBED] FAIL: missing embedding_api_key")
return False
try:
from openai import OpenAI # type: ignore
except Exception as e:
print(f"[EMBED] FAIL: openai import error: {e}")
return False
try:
client = OpenAI(base_url=cfg.embedding_base_url, api_key=cfg.embedding_api_key)
resp = client.embeddings.create(
model=cfg.embedding_model,
input=["api smoke test"],
)
data = getattr(resp, "data", None) or []
if not data:
print("[EMBED] FAIL: empty data")
return False
vec = getattr(data[0], "embedding", None)
dim = len(vec) if isinstance(vec, list) else 0
usage = getattr(resp, "usage", None)
total = getattr(usage, "total_tokens", None) if usage is not None else None
print(f"[EMBED] PASS: model={cfg.embedding_model} dim={dim} total_tokens={total}")
return True
except Exception as e:
print(f"[EMBED] FAIL: {type(e).__name__}: {e}")
return False
def main() -> int:
parser = argparse.ArgumentParser(description="Minimal API smoke test for fic-agent")
parser.add_argument("--skip-llm", action="store_true", help="Skip LLM chat test")
parser.add_argument("--skip-embedding", action="store_true", help="Skip embedding test")
args = parser.parse_args()
cfg = RuntimeConfig()
print(
"[CFG] "
f"llm_base_url={cfg.llm_base_url} llm_model={cfg.llm_model} llm_key_len={_mask_len(cfg.llm_api_key)}"
)
print(
"[CFG] "
f"embedding_base_url={cfg.embedding_base_url} embedding_model={cfg.embedding_model} "
f"embedding_key_len={_mask_len(cfg.embedding_api_key)}"
)
ok = True
if not args.skip_llm:
ok = _test_llm(cfg) and ok
if not args.skip_embedding:
ok = _test_embedding(cfg) and ok
if ok:
print("API smoke test: PASS")
return 0
print("API smoke test: FAIL")
return 1
if __name__ == "__main__":
sys.exit(main())
|