gk2410 commited on
Commit
4976770
Β·
verified Β·
1 Parent(s): 6e14405

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +59 -32
app.py CHANGED
@@ -1,21 +1,26 @@
1
  import os, requests, gradio as gr
2
  from huggingface_hub import InferenceClient
3
 
 
4
  client = InferenceClient("meta-llama/Llama-3.3-70B-Instruct", token=os.getenv("HF_TOKEN"))
5
 
6
  def safe_int(val, default=0):
 
7
  try:
8
  return int(val) if val is not None else default
9
  except (ValueError, TypeError):
10
  return default
11
 
12
  def fetch_catalog_detailed(query, limit):
13
- """Fetches and normalizes data based on user limit."""
 
 
14
  try:
15
- # Use the limit provided by the slider
16
  url = f"https://openlibrary.org/search.json?q={query}&limit={limit}"
17
- resp = requests.get(url, timeout=5).json()
18
- docs = resp.get("docs", [])
 
19
 
20
  books = []
21
  for d in docs:
@@ -26,85 +31,107 @@ def fetch_catalog_detailed(query, limit):
26
  "editions": safe_int(d.get("edition_count"), 0)
27
  })
28
  return books
29
- except:
 
30
  return []
31
 
32
- # --- FUNCTION ARGUMENTS MUST MATCH gr.submit(inputs=[...]) ORDER ---
33
  def librarian_agent(message, history, session_state, sort_by, num_results):
 
 
 
 
 
 
 
34
  if session_state is None:
35
  session_state = {"verified": False, "age": None}
36
 
37
- # 1. Gate Logic
38
  if not session_state["verified"]:
39
  age_digits = "".join(filter(str.isdigit, message))
40
  if age_digits:
41
  session_state["age"] = int(age_digits)
42
  session_state["verified"] = True
43
- reply = "βœ… Verified. What topic are we researching today?"
44
  else:
45
- reply = "πŸ‘‹ Please enter your age to start."
46
  history.append({"role": "assistant", "content": reply})
47
  return history, session_state
48
 
49
- # 2. Retrieval with the Slider Value
50
  raw_books = fetch_catalog_detailed(message, int(num_results))
51
 
52
- # 3. Sorting Logic
53
  if sort_by == "Newest First":
54
  raw_books = sorted(raw_books, key=lambda x: x['year'], reverse=True)
55
  elif sort_by == "Popularity":
56
  raw_books = sorted(raw_books, key=lambda x: x['editions'], reverse=True)
57
 
58
- # 4. Context Preparation
59
  catalog_summary = ""
60
  for b in raw_books:
61
- year_str = b['year'] if b['year'] > 1000 else "Unknown"
62
- catalog_summary += f"BOOK: {b['title']} | BY: {b['author']} | YEAR: {year_str} | EDITIONS: {b['editions']}\n"
63
 
64
- # 5. LLM Synthesis
65
  llm_messages = [
66
  {
67
  "role": "system",
68
  "content": (
69
- f"You are a librarian for a {session_state['age']} year old. "
70
- "Output ONLY a Markdown table: | Book & Author | Year | Editions | Summary |."
 
 
71
  )
72
  },
73
- {"role": "user", "content": f"Catalog:\n{catalog_summary}\n\nUser: {message}"}
74
  ]
75
 
76
  try:
77
  output = client.chat_completion(messages=llm_messages, max_tokens=1500)
78
  bot_res = output.choices[0].message.content
79
  except Exception as e:
80
- bot_res = f"Service error: {str(e)}"
81
 
82
  history.append({"role": "user", "content": message})
83
  history.append({"role": "assistant", "content": bot_res})
84
  return history, session_state
85
 
86
- # --- GRADIO UI ---
87
- with gr.Blocks() as demo:
88
- gr.Markdown("## πŸ“š AI Librarian Agent")
89
  state = gr.State(None)
90
 
91
  with gr.Row():
92
  # Input 4
93
- sort_option = gr.Dropdown(["Relevance", "Newest First", "Popularity"], label="Sort", value="Relevance")
 
 
 
 
94
  # Input 5
95
- result_count = gr.Slider(minimum=3, maximum=15, step=1, value=5, label="Count")
 
 
 
 
 
 
96
 
97
  # Input 2
98
- chatbot = gr.Chatbot(label="Catalog", value=[{"role": "assistant", "content": "πŸ‘‹ Enter your **age**."}])
 
 
 
99
  # Input 1
100
- msg = gr.Textbox(label="Query", placeholder="Search...")
101
 
102
- # THE CRITICAL STEP: Mapping UI components to function arguments
103
- # 1: msg -> message
104
- # 2: chatbot -> history
105
- # 3: state -> session_state
106
- # 4: sort_option -> sort_by
107
- # 5: result_count -> num_results
108
  msg.submit(
109
  fn=librarian_agent,
110
  inputs=[msg, chatbot, state, sort_option, result_count],
 
1
  import os, requests, gradio as gr
2
  from huggingface_hub import InferenceClient
3
 
4
+ # Using a high-capacity model for better tabular summaries
5
  client = InferenceClient("meta-llama/Llama-3.3-70B-Instruct", token=os.getenv("HF_TOKEN"))
6
 
7
  def safe_int(val, default=0):
8
+ """Data normalization helper."""
9
  try:
10
  return int(val) if val is not None else default
11
  except (ValueError, TypeError):
12
  return default
13
 
14
  def fetch_catalog_detailed(query, limit):
15
+ """
16
+ Retrieves exactly the number of results requested by the user.
17
+ """
18
  try:
19
+ # Crucial: Passing the 'limit' directly to the API
20
  url = f"https://openlibrary.org/search.json?q={query}&limit={limit}"
21
+ response = requests.get(url, timeout=5)
22
+ response.raise_for_status()
23
+ docs = response.json().get("docs", [])
24
 
25
  books = []
26
  for d in docs:
 
31
  "editions": safe_int(d.get("edition_count"), 0)
32
  })
33
  return books
34
+ except Exception as e:
35
+ print(f"API Error: {e}")
36
  return []
37
 
 
38
  def librarian_agent(message, history, session_state, sort_by, num_results):
39
+ """
40
+ Main Agent Logic.
41
+ Order of arguments MUST match the 'inputs' list in msg.submit below.
42
+ """
43
+ if not message:
44
+ return history, session_state
45
+
46
  if session_state is None:
47
  session_state = {"verified": False, "age": None}
48
 
49
+ # --- 1. ETHICAL GATE ---
50
  if not session_state["verified"]:
51
  age_digits = "".join(filter(str.isdigit, message))
52
  if age_digits:
53
  session_state["age"] = int(age_digits)
54
  session_state["verified"] = True
55
+ reply = "βœ… **Verified.** My archives are now open. What subject can I help you with?"
56
  else:
57
+ reply = "πŸ‘‹ Welcome. To comply with safety guidelines, please enter your **age** to begin."
58
  history.append({"role": "assistant", "content": reply})
59
  return history, session_state
60
 
61
+ # --- 2. RAG RETRIEVAL (Respecting Slider) ---
62
  raw_books = fetch_catalog_detailed(message, int(num_results))
63
 
64
+ # --- 3. DETERMINISTIC SORTING ---
65
  if sort_by == "Newest First":
66
  raw_books = sorted(raw_books, key=lambda x: x['year'], reverse=True)
67
  elif sort_by == "Popularity":
68
  raw_books = sorted(raw_books, key=lambda x: x['editions'], reverse=True)
69
 
70
+ # Prepare data for LLM
71
  catalog_summary = ""
72
  for b in raw_books:
73
+ yr = b['year'] if b['year'] > 1000 else "Unknown"
74
+ catalog_summary += f"BOOK: {b['title']} | AUTHOR: {b['author']} | YEAR: {yr} | EDITIONS: {b['editions']}\n"
75
 
76
+ # --- 4. AGENTIC SYNTHESIS ---
77
  llm_messages = [
78
  {
79
  "role": "system",
80
  "content": (
81
+ f"You are a professional librarian for a {session_state['age']}-year-old. "
82
+ "You provide high-density information. "
83
+ "Respond ONLY with a Markdown table: | Book & Author | Year | Editions | Summary |."
84
+ "Do not mention internal lists. Summaries must be 1-2 lines of insight."
85
  )
86
  },
87
+ {"role": "user", "content": f"INTERNAL CATALOG DATA:\n{catalog_summary}\n\nUSER QUERY: {message}"}
88
  ]
89
 
90
  try:
91
  output = client.chat_completion(messages=llm_messages, max_tokens=1500)
92
  bot_res = output.choices[0].message.content
93
  except Exception as e:
94
+ bot_res = f"The library is experiencing technical issues: {str(e)}"
95
 
96
  history.append({"role": "user", "content": message})
97
  history.append({"role": "assistant", "content": bot_res})
98
  return history, session_state
99
 
100
+ # --- GRADIO INTERFACE ---
101
+ with gr.Blocks(title="AI Librarian Agent") as demo:
102
+ gr.Markdown("## πŸ“š AI Librarian Agent (RAG Optimized)")
103
  state = gr.State(None)
104
 
105
  with gr.Row():
106
  # Input 4
107
+ sort_option = gr.Dropdown(
108
+ choices=["Relevance", "Newest First", "Popularity"],
109
+ label="Sort Priority",
110
+ value="Relevance"
111
+ )
112
  # Input 5
113
+ result_count = gr.Slider(
114
+ minimum=3,
115
+ maximum=15,
116
+ step=1,
117
+ value=5,
118
+ label="Results to Retrieve"
119
+ )
120
 
121
  # Input 2
122
+ chatbot = gr.Chatbot(
123
+ label="Librarian Consultation",
124
+ value=[{"role": "assistant", "content": "πŸ‘‹ Please enter your **age** to access the library."}]
125
+ )
126
  # Input 1
127
+ msg = gr.Textbox(label="Message", placeholder="Enter age first, then search books...")
128
 
129
+ # INPUT MAPPING CHECK:
130
+ # 1. message (msg)
131
+ # 2. history (chatbot)
132
+ # 3. session_state (state)
133
+ # 4. sort_by (sort_option)
134
+ # 5. num_results (result_count)
135
  msg.submit(
136
  fn=librarian_agent,
137
  inputs=[msg, chatbot, state, sort_option, result_count],