gk2410 commited on
Commit
2d6b1ed
·
verified ·
1 Parent(s): 4b8bfab

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +37 -49
app.py CHANGED
@@ -1,39 +1,27 @@
1
- import os
2
- import requests
3
- import numpy as np
4
- import faiss
5
  import gradio as gr
6
  from sentence_transformers import SentenceTransformer
7
  from huggingface_hub import InferenceClient
8
 
9
- # 1. INITIALIZE MODELS
10
  embedder = SentenceTransformer('all-MiniLM-L6-v2')
11
- # Ensure you have HF_TOKEN in your Space Secrets!
12
  client = InferenceClient("meta-llama/Llama-3.3-70B-Instruct", token=os.getenv("HF_TOKEN"))
13
 
14
  def fetch_and_index(query):
15
- """Fetches live book data and indexes it in FAISS."""
16
  try:
17
- url = f"https://openlibrary.org/search.json?q={query}&limit=8"
18
- response = requests.get(url, timeout=5)
19
- data = response.json().get("docs", [])
20
-
21
- if not data: return None, None
22
-
23
- catalog = [
24
- f"{d.get('title')} by {', '.join(d.get('author_name', ['Unknown Author']))}"
25
- for d in data
26
- ]
27
 
28
  embeddings = embedder.encode(catalog)
29
  index = faiss.IndexFlatL2(embeddings.shape[1])
30
  index.add(np.array(embeddings).astype('float32'))
31
-
32
  return index, catalog
33
- except Exception:
34
- return None, None
35
 
36
- def librarian_logic(message, history, user_state):
 
37
  if user_state is None:
38
  user_state = {"step": "ASK_AGE", "age": None, "location": None}
39
 
@@ -42,47 +30,47 @@ def librarian_logic(message, history, user_state):
42
  if message.isdigit():
43
  user_state["age"] = int(message)
44
  user_state["step"] = "ASK_LOCATION"
45
- reply = "Got it. For safety compliance, what is your general location (City/Country)?"
46
- return history + [[message, reply]], user_state, ""
47
- return history + [[message, "How old are you?"]], user_state, ""
 
48
 
49
  if user_state["step"] == "ASK_LOCATION":
50
  user_state["location"] = message
51
  user_state["step"] = "SEARCH_READY"
52
- reply = f"System verified for {user_state['location']}. What book can I find for you?"
53
- return history + [[message, reply]], user_state, ""
54
 
55
  # --- PHASE 2: SEARCH & LLM ---
56
  index, catalog = fetch_and_index(message)
57
  context = "\n- ".join(catalog[:3]) if catalog else "No books found."
58
-
59
- # THE FIX: This specific list-of-dicts format
60
  messages = [
61
- {"role": "system", "content": "You are a professional librarian. Respond only with book titles and authors based on the provided context."},
62
- {"role": "user", "content": f"User Age: {user_state['age']}. Context: {context}. Query: {message}"}
63
  ]
64
 
 
65
  response = ""
66
- # Explicitly use messages=messages
67
- for msg in client.chat_completion(
68
- messages=messages,
69
- max_tokens=300,
70
- stream=True
71
- ):
72
- token = msg.choices[0].delta.content
73
- if token:
74
- response += token
75
- yield history + [[message, response]], user_state, ""
76
 
77
- # --- UI SETUP ---
78
- with gr.Blocks(theme=gr.themes.Soft()) as demo:
79
- gr.Markdown("# 📚 AI Librarian Agent\n*Search for books by title or author.*")
80
-
81
- user_state = gr.State()
 
82
  chatbot = gr.Chatbot()
83
- msg = gr.Textbox(label="Your Input", placeholder="Type age first...")
84
 
85
- msg.submit(librarian_logic, [msg, chatbot, user_state], [chatbot, user_state, msg])
 
 
 
86
 
87
- if __name__ == "__main__":
88
- demo.launch()
 
1
+ import os, requests, numpy as np, faiss
 
 
 
2
  import gradio as gr
3
  from sentence_transformers import SentenceTransformer
4
  from huggingface_hub import InferenceClient
5
 
6
+ # 1. MODELS
7
  embedder = SentenceTransformer('all-MiniLM-L6-v2')
 
8
  client = InferenceClient("meta-llama/Llama-3.3-70B-Instruct", token=os.getenv("HF_TOKEN"))
9
 
10
  def fetch_and_index(query):
 
11
  try:
12
+ url = f"https://openlibrary.org/search.json?q={query}&limit=5"
13
+ data = requests.get(url, timeout=5).json().get("docs", [])
14
+ if not data: return None
15
+ catalog = [f"{d.get('title')} by {', '.join(d.get('author_name', ['Unknown']))}" for d in data]
 
 
 
 
 
 
16
 
17
  embeddings = embedder.encode(catalog)
18
  index = faiss.IndexFlatL2(embeddings.shape[1])
19
  index.add(np.array(embeddings).astype('float32'))
 
20
  return index, catalog
21
+ except: return None, None
 
22
 
23
+ def librarian_bot(message, history, user_state):
24
+ # Initialize State
25
  if user_state is None:
26
  user_state = {"step": "ASK_AGE", "age": None, "location": None}
27
 
 
30
  if message.isdigit():
31
  user_state["age"] = int(message)
32
  user_state["step"] = "ASK_LOCATION"
33
+ reply = "Got it. What is your City/Country for compliance?"
34
+ else:
35
+ reply = "Welcome! I am your Librarian. To start, how old are you?"
36
+ return history + [[message, reply]], user_state
37
 
38
  if user_state["step"] == "ASK_LOCATION":
39
  user_state["location"] = message
40
  user_state["step"] = "SEARCH_READY"
41
+ reply = f"Verification complete. I'm searching for books in {message}. What can I find for you?"
42
+ return history + [[message, reply]], user_state
43
 
44
  # --- PHASE 2: SEARCH & LLM ---
45
  index, catalog = fetch_and_index(message)
46
  context = "\n- ".join(catalog[:3]) if catalog else "No books found."
47
+
 
48
  messages = [
49
+ {"role": "system", "content": "You are a professional librarian. Give titles and authors."},
50
+ {"role": "user", "content": f"Context: {context}\nQuery: {message}"}
51
  ]
52
 
53
+ # Generating response
54
  response = ""
55
+ try:
56
+ # Non-streaming call for maximum reliability in this debug phase
57
+ completion = client.chat_completion(messages=messages, max_tokens=200)
58
+ response = completion.choices[0].message.content
59
+ except Exception as e:
60
+ response = f"Connection error: {str(e)}"
 
 
 
 
61
 
62
+ return history + [[message, response]], user_state
63
+
64
+ # --- CLEAN UI SETUP ---
65
+ with gr.Blocks() as demo:
66
+ gr.Markdown("# 📚 AI Librarian Agent")
67
+ state = gr.State()
68
  chatbot = gr.Chatbot()
69
+ msg = gr.Textbox(placeholder="Enter age, then location, then book query...")
70
 
71
+ # We use a simple .submit() that updates the chatbot and state simultaneously
72
+ msg.submit(librarian_bot, [msg, chatbot, state], [chatbot, state])
73
+ # Clear the textbox after submit
74
+ msg.submit(lambda: "", None, [msg])
75
 
76
+ demo.launch()