gk2410 commited on
Commit
96828e9
·
verified ·
1 Parent(s): b5401b5

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +54 -22
app.py CHANGED
@@ -3,42 +3,74 @@ import requests
3
  import gradio as gr
4
  from huggingface_hub import InferenceClient
5
 
6
- # Initialize the LLM
7
  client = InferenceClient("meta-llama/Llama-3.3-70B-Instruct", token=os.getenv("HF_TOKEN"))
8
 
9
  def search_books(query):
10
- """Simple Web Search for books."""
11
  try:
12
  url = f"https://openlibrary.org/search.json?q={query}&limit=3"
13
  data = requests.get(url, timeout=5).json().get("docs", [])
14
- if not data: return "No books found."
15
  return "\n".join([f"{d.get('title')} by {d.get('author_name', ['Unknown'])[0]}" for d in data])
16
  except:
17
- return "Search error."
18
 
19
- def chat_fn(message, history):
20
- # 1. Get live data
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
21
  context = search_books(message)
22
 
23
- # 2. Build the API prompt
 
 
24
  messages = [
25
- {"role": "system", "content": "You are a librarian. Use the context to suggest books."},
26
- {"role": "user", "content": f"Context: {context}\nUser Query: {message}"}
27
  ]
28
-
29
- # 3. Get LLM response
30
  try:
31
- response = client.chat_completion(messages=messages, max_tokens=200)
32
- return response.choices[0].message.content
33
  except Exception as e:
34
- return f"Error: {str(e)}"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
35
 
36
- # The most basic Gradio Interface - No custom state, no complex types.
37
- demo = gr.ChatInterface(
38
- fn=chat_fn,
39
- title="AI Librarian (Minimalist Build)",
40
- description="Ask me about any book. I'll search the web and answer."
41
- )
42
 
43
- if __name__ == "__main__":
44
- demo.launch()
 
3
  import gradio as gr
4
  from huggingface_hub import InferenceClient
5
 
 
6
  client = InferenceClient("meta-llama/Llama-3.3-70B-Instruct", token=os.getenv("HF_TOKEN"))
7
 
8
  def search_books(query):
 
9
  try:
10
  url = f"https://openlibrary.org/search.json?q={query}&limit=3"
11
  data = requests.get(url, timeout=5).json().get("docs", [])
12
+ if not data: return "No catalog records found."
13
  return "\n".join([f"{d.get('title')} by {d.get('author_name', ['Unknown'])[0]}" for d in data])
14
  except:
15
+ return "Library catalog is currently unreachable."
16
 
17
+ def librarian_bot(message, history, session_state):
18
+ """
19
+ session_state tracks:
20
+ - 'verified': bool
21
+ - 'age': int
22
+ """
23
+ # 1. Initialize State
24
+ if session_state is None:
25
+ session_state = {"verified": False, "age": None}
26
+
27
+ # 2. ETHICAL GATE: Check Age
28
+ if not session_state["verified"]:
29
+ if message.isdigit():
30
+ age = int(message)
31
+ if age < 13:
32
+ reply = "I'm sorry, but I can only assist users aged 13 and older for safety reasons. Please ask a parent for help."
33
+ return reply, session_state
34
+ else:
35
+ session_state["age"] = age
36
+ session_state["verified"] = True
37
+ return f"Age {age} verified. Welcome! How can I help you in the library today?", session_state
38
+ else:
39
+ return "Hello! I am your AI Librarian. To comply with safety guidelines, please enter your age as a number to continue.", session_state
40
+
41
+ # 3. CORE LOGIC: RAG + LLM (Only reached if verified)
42
  context = search_books(message)
43
 
44
+ # Adjusting prompt based on age (Dynamic Ethics)
45
+ tone = "simple and educational" if session_state["age"] < 18 else "professional and academic"
46
+
47
  messages = [
48
+ {"role": "system", "content": f"You are a librarian for a {tone} audience. Use the context to answer."},
49
+ {"role": "user", "content": f"Context: {context}\nQuestion: {message}"}
50
  ]
51
+
 
52
  try:
53
+ response = client.chat_completion(messages=messages, max_tokens=1000)
54
+ return response.choices[0].message.content, session_state
55
  except Exception as e:
56
+ return f"Error connecting to the stacks: {str(e)}", session_state
57
+
58
+ # --- UI with State Management ---
59
+ with gr.Blocks() as demo:
60
+ gr.Markdown("# 📚 Ethical AI Librarian")
61
+
62
+ # Session state persists for this user's tab
63
+ state = gr.State(None)
64
+
65
+ chatbot = gr.Chatbot()
66
+ msg = gr.Textbox(placeholder="Enter age to start, then ask about books...")
67
+
68
+ # We use a wrapper to handle the state update
69
+ def user_interact(user_input, chat_history, current_state):
70
+ bot_reply, new_state = librarian_bot(user_input, chat_history, current_state)
71
+ chat_history.append((user_input, bot_reply))
72
+ return "", chat_history, new_state
73
 
74
+ msg.submit(user_interact, [msg, chatbot, state], [msg, chatbot, state])
 
 
 
 
 
75
 
76
+ demo.launch()