File size: 1,589 Bytes
2dd29df
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
import os
import pickle
import faiss
import gradio as gr
from sentence_transformers import SentenceTransformer

## App Originally Created for Chroma, However Apple Silicon Errors provided too tough to resolve. Accordingly ChatGPT 5.1 was consulted at 12:50pm on 11/25/25 for assistance in understanding FAISS as suitable alterantive.

ROOT = os.path.dirname(os.path.abspath(__file__))

INDEX_PATH = os.path.join(ROOT, "faiss_index.bin")
META_PATH = os.path.join(ROOT, "faiss_meta.pkl")

# Load FAISS index
index = faiss.read_index(INDEX_PATH)

# Load metadata
texts, ids, meta = pickle.load(open(META_PATH, "rb"))

# Load embedding model
model = SentenceTransformer("sentence-transformers/all-MiniLM-L6-v2")

def semantic_search(query, k=3):
    if not query.strip():
        return "Enter a search query."

    q_emb = model.encode([query]).astype("float32")
    D, I = index.search(q_emb, k)

    out = "# Search Results\n\n"
    for rank, idx in enumerate(I[0], start=1):
        src = meta[idx]["source"]
        chunk = meta[idx]["chunk"]
        text = texts[idx]
        out += f"### Result {rank}\n"
        out += f"**Source:** {src} | **Chunk:** {chunk}\n\n"
        out += f"{text}\n\n---\n\n"

    return out

demo = gr.Interface(
    fn=semantic_search,
    inputs=[
        gr.Textbox(label="Query", lines=2),
        gr.Slider(1, 10, value=3, step=1, label="Results")
    ],
    outputs=gr.Markdown(label="Results"),
    title="FAISS Semantic Search Engine",
    description="Search Substack posts using semantic similarity."
)

if __name__ == "__main__":
    demo.launch()