| """ | |
| id: rag | |
| title: Lightweight RAG | |
| author: admin | |
| description: Store documents and retrieve by simple keyword scoring. | |
| version: 0.1.0 | |
| license: Proprietary | |
| """ | |
| import json | |
| import re | |
| import time | |
| from pathlib import Path | |
| ROOT = Path("/data/adaptai/rag") | |
| ROOT.mkdir(parents=True, exist_ok=True) | |
| INDEX = ROOT / "index.json" | |
| class Tools: | |
| def add_text(self, text: str, doc_id: str = "") -> dict: | |
| doc_id = doc_id or f"doc_{int(time.time())}" | |
| (ROOT / f"{doc_id}.txt").write_text(text, encoding="utf-8") | |
| self._reindex() | |
| return {"ok": True, "doc_id": doc_id} | |
| def add_file(self, path: str, doc_id: str = "") -> dict: | |
| p = Path(path) | |
| if not p.exists(): | |
| return {"ok": False, "error": "file not found"} | |
| text = p.read_text(encoding="utf-8", errors="replace") | |
| return self.add_text(text, doc_id) | |
| def query(self, q: str, top_k: int = 5) -> dict: | |
| idx = self._load_index() | |
| tokens = self._tok(q) | |
| scored = [] | |
| for doc_id, dt in idx.items(): | |
| score = sum(dt.get(t, 0) for t in tokens) | |
| if score: | |
| scored.append((score, doc_id)) | |
| scored.sort(reverse=True) | |
| hits = [] | |
| for score, doc_id in scored[:top_k]: | |
| text = (ROOT / f"{doc_id}.txt").read_text( | |
| encoding="utf-8", errors="replace" | |
| ) | |
| hits.append({"doc_id": doc_id, "score": score, "preview": text[:400]}) | |
| return {"query": q, "hits": hits} | |
| def list(self) -> dict: | |
| idx = self._load_index() | |
| return {"docs": list(idx.keys())} | |
| def remove(self, doc_id: str) -> dict: | |
| try: | |
| (ROOT / f"{doc_id}.txt").unlink(missing_ok=True) | |
| self._reindex() | |
| return {"ok": True} | |
| except Exception as e: | |
| return {"ok": False, "error": str(e)} | |
| def _tok(self, s: str): | |
| s = s.lower() | |
| return re.findall(r"[a-z0-9]+", s) | |
| def _reindex(self): | |
| idx = {} | |
| for f in ROOT.glob("*.txt"): | |
| counts = {} | |
| for t in self._tok(f.read_text(encoding="utf-8", errors="replace")): | |
| counts[t] = counts.get(t, 0) + 1 | |
| idx[f.stem] = counts | |
| INDEX.write_text(json.dumps(idx), encoding="utf-8") | |
| def _load_index(self): | |
| if not INDEX.exists(): | |
| return {} | |
| return json.loads(INDEX.read_text(encoding="utf-8")) | |