Custom-LLM-Chat / tests /smoke_test.py
Bhaskar Ram
feat: apply all 15 upgrades — BGE embeddings, cosine FAISS, streaming LLM, tenacity retry, dotenv, Dockerfile, tests
a465955
"""
tests/smoke_test.py
Quick sanity check — verifies imports and a basic FAISS index round-trip.
Run with: python -m pytest tests/smoke_test.py -v
"""
import sys
import os
# Make sure the project root is on the path
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
def test_imports():
"""All RAG modules should import without error."""
from rag import document_loader, embedder, retriever, chain # noqa: F401
def test_index_and_retrieve():
"""Build a tiny FAISS index and assert we get a result back."""
from rag.embedder import build_index
from rag.retriever import retrieve
docs = [
{"source": "test.txt", "text": "The refund policy allows returns within 30 days of purchase."},
{"source": "test.txt", "text": "Contact support at support@example.com for assistance."},
]
idx = build_index(docs)
assert idx.index.ntotal > 0, "Index should have at least one vector"
results = retrieve("What is the refund policy?", idx, top_k=2)
assert len(results) > 0, "Should return at least one result"
# Cosine similarity scores should be in (0, 1] range
for r in results:
assert 0.0 <= r["score"] <= 1.01, f"Unexpected score: {r['score']}"
assert "source" in r and "text" in r
def test_chunk_not_empty():
"""Chunker should produce non-empty chunks."""
from rag.embedder import _chunk_text
chunks = _chunk_text("doc.txt", "Hello world. " * 100)
assert len(chunks) > 0
for c in chunks:
assert c["text"].strip()