gk2410 commited on
Commit
dea41fe
·
verified ·
1 Parent(s): d687f6f

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +106 -0
app.py ADDED
@@ -0,0 +1,106 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import requests
3
+ import numpy as np
4
+ import faiss
5
+ import gradio as gr
6
+ from sentence_transformers import SentenceTransformer
7
+ from huggingface_hub import InferenceClient
8
+
9
+ # 1. INITIALIZE MODELS
10
+ # Embedder runs on CPU; Llama 3.3 runs via the API
11
+ embedder = SentenceTransformer('all-MiniLM-L6-v2')
12
+ client = InferenceClient("meta-llama/Llama-3.3-70B-Instruct", token=os.getenv("HF_TOKEN"))
13
+
14
+ def fetch_and_index(query):
15
+ """Fetches live book data and indexes it in FAISS."""
16
+ try:
17
+ url = f"https://openlibrary.org/search.json?q={query}&limit=8"
18
+ data = requests.get(url, timeout=5).json().get("docs", [])
19
+
20
+ if not data: return None, None
21
+
22
+ # Extract strictly Title and Author
23
+ catalog = [
24
+ f"{d.get('title')} by {', '.join(d.get('author_name', ['Unknown Author']))}"
25
+ for d in data
26
+ ]
27
+
28
+ # Build Vector Index
29
+ embeddings = embedder.encode(catalog)
30
+ index = faiss.IndexFlatL2(embeddings.shape[1])
31
+ index.add(np.array(embeddings).astype('float32'))
32
+
33
+ return index, catalog
34
+ except Exception:
35
+ return None, None
36
+
37
+ def librarian_logic(message, history, user_state):
38
+ # Initialize State
39
+ if user_state is None:
40
+ user_state = {"step": "ASK_AGE", "age": None, "location": None}
41
+
42
+ # --- PHASE 1: SAFETY GATE ---
43
+ if user_state["step"] == "ASK_AGE":
44
+ if message.isdigit():
45
+ user_state["age"] = int(message)
46
+ user_state["step"] = "ASK_LOCATION"
47
+ reply = "Understood. For regional safety compliance, what is your general location (City/Country)?"
48
+ history.append((message, reply))
49
+ return history, user_state, ""
50
+
51
+ reply = "Welcome to the AI Library! Before we start, for safety and compliance: How old are you?"
52
+ history.append((message, reply))
53
+ return history, user_state, ""
54
+
55
+ if user_state["step"] == "ASK_LOCATION":
56
+ user_state["location"] = message
57
+ user_state["step"] = "SEARCH_READY"
58
+ reply = f"System verified for {user_state['location']}. I am now your active Librarian. What books are you looking for?"
59
+ history.append((message, reply))
60
+ return history, user_state, ""
61
+
62
+ # --- PHASE 2: SEARCH ACTION ---
63
+ index, catalog = fetch_and_index(message)
64
+
65
+ if not index:
66
+ reply = "I couldn't find any live records for that. Try another title or author?"
67
+ history.append((message, reply))
68
+ return history, user_state, ""
69
+
70
+ # Semantic Retrieval
71
+ query_vec = embedder.encode([message])
72
+ _, I = index.search(np.array(query_vec).astype('float32'), k=min(3, len(catalog)))
73
+ results = [catalog[i] for i in I[0]]
74
+
75
+ # Agent Synthesis
76
+ safety_rule = "The user is a child. Strictly recommend age-appropriate titles." if user_state["age"] < 13 else ""
77
+ prompt = (
78
+ f"Context: {results}\n"
79
+ f"User (Age {user_state['age']}, Loc {user_state['location']}) asks: {message}\n"
80
+ f"{safety_rule}\n"
81
+ "Present the Title and Author of these matches clearly and briefly."
82
+ )
83
+
84
+ response = client.chat_completion(
85
+ [{"role": "system", "content": "You are a professional librarian agent."},
86
+ {"role": "user", "content": prompt}],
87
+ max_tokens=250
88
+ ).choices[0].message.content
89
+
90
+ history.append((message, response))
91
+ return history, user_state, ""
92
+
93
+ # --- UI SETUP ---
94
+ with gr.Blocks(theme=gr.themes.Soft()) as demo:
95
+ gr.Markdown("# 📚 AI Librarian Agent\n*Live Web Search + Semantic FAISS Ranking*")
96
+
97
+ user_state = gr.State()
98
+ chatbot = gr.Chatbot()
99
+ msg = gr.Textbox(label="Your Message", placeholder="Type age first, then chat...")
100
+ clear = gr.Button("Reset Session")
101
+
102
+ msg.submit(librarian_logic, [msg, chatbot, user_state], [chatbot, user_state, msg])
103
+ clear.click(lambda: (None, None, ""), None, [chatbot, user_state, msg])
104
+
105
+ if __name__ == "__main__":
106
+ demo.launch()