gk2410 commited on
Commit
9fb864c
Β·
verified Β·
1 Parent(s): 6168601

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +19 -17
app.py CHANGED
@@ -18,6 +18,7 @@ def fetch_catalog(query):
18
  return "Search service offline."
19
 
20
  def librarian_agent(message, history, session_state):
 
21
  if session_state is None:
22
  session_state = {"verified": False, "age": None}
23
 
@@ -27,7 +28,7 @@ def librarian_agent(message, history, session_state):
27
  if age_input:
28
  age = int(age_input)
29
  if age < 13:
30
- reply = "πŸ›‘οΈ Safety: You must be 13+ to use this service."
31
  else:
32
  session_state["age"] = age
33
  session_state["verified"] = True
@@ -35,50 +36,51 @@ def librarian_agent(message, history, session_state):
35
  else:
36
  reply = "πŸ‘‹ Please enter your age as a number to start."
37
 
38
- # GRADIO 6 FORMAT: Role/Content Dictionaries
39
  history.append({"role": "user", "content": message})
40
  history.append({"role": "assistant", "content": reply})
41
  return history, session_state
42
 
43
  # --- PHASE 2: RAG PIPELINE ---
44
  catalog_context = fetch_catalog(message)
45
- messages = [
 
 
46
  {"role": "system", "content": f"You are a librarian for a {session_state['age']} year old. Use: {catalog_context}"},
47
  {"role": "user", "content": message}
48
  ]
49
 
50
  try:
51
- output = client.chat_completion(messages=messages, max_tokens=1000)
52
  bot_res = output.choices[0].message.content
53
  except Exception as e:
54
- bot_res = f"Error: {str(e)}"
55
 
56
  history.append({"role": "user", "content": message})
57
  history.append({"role": "assistant", "content": bot_res})
58
  return history, session_state
59
 
60
- # --- GRADIO 6 UI ---
61
  with gr.Blocks() as demo:
62
- gr.Markdown("## πŸ“š AI Librarian (Gradio 6 Build)")
63
 
64
  state = gr.State(None)
65
 
66
- # NEW: Initial value must follow {"role": "...", "content": "..."} format
67
- initial_msg = [
68
- {"role": "assistant", "content": "πŸ‘‹ Welcome! Please enter your **age** to begin."}
69
- ]
70
- chatbot = gr.Chatbot(label="Librarian Consultation", value=initial_msg, type="messages")
 
71
 
72
  msg = gr.Textbox(label="Input", placeholder="Enter age (e.g. 25)")
73
- clear = gr.Button("Clear")
74
 
75
  def respond(user_input, chat_history, current_state):
 
76
  updated_history, updated_state = librarian_agent(user_input, chat_history, current_state)
77
  return "", updated_history, updated_state
78
 
79
  msg.submit(respond, [msg, chatbot, state], [msg, chatbot, state])
80
- clear.click(lambda: (None, initial_msg, None), None, [msg, chatbot, state])
81
 
82
- # NEW: Theme moved to launch()
83
- if __name__ == "__main__":
84
- demo.launch(theme=gr.themes.Soft())
 
18
  return "Search service offline."
19
 
20
  def librarian_agent(message, history, session_state):
21
+ # Standardize state initialization
22
  if session_state is None:
23
  session_state = {"verified": False, "age": None}
24
 
 
28
  if age_input:
29
  age = int(age_input)
30
  if age < 13:
31
+ reply = "πŸ›‘οΈ Safety: Access restricted for users under 13."
32
  else:
33
  session_state["age"] = age
34
  session_state["verified"] = True
 
36
  else:
37
  reply = "πŸ‘‹ Please enter your age as a number to start."
38
 
39
+ # history is a list of {"role": "...", "content": "..."}
40
  history.append({"role": "user", "content": message})
41
  history.append({"role": "assistant", "content": reply})
42
  return history, session_state
43
 
44
  # --- PHASE 2: RAG PIPELINE ---
45
  catalog_context = fetch_catalog(message)
46
+
47
+ # Building the internal LLM prompt
48
+ llm_messages = [
49
  {"role": "system", "content": f"You are a librarian for a {session_state['age']} year old. Use: {catalog_context}"},
50
  {"role": "user", "content": message}
51
  ]
52
 
53
  try:
54
+ output = client.chat_completion(messages=llm_messages, max_tokens=1000)
55
  bot_res = output.choices[0].message.content
56
  except Exception as e:
57
+ bot_res = f"Service Error: {str(e)}"
58
 
59
  history.append({"role": "user", "content": message})
60
  history.append({"role": "assistant", "content": bot_res})
61
  return history, session_state
62
 
63
+ # --- UI BLOCK (Minimalist) ---
64
  with gr.Blocks() as demo:
65
+ gr.Markdown("## πŸ“š AI Librarian Agent")
66
 
67
  state = gr.State(None)
68
 
69
+ # We initialize with a system message in the new dict format
70
+ # We REMOVE 'type="messages"' to avoid the keyword error
71
+ chatbot = gr.Chatbot(
72
+ label="Librarian Consultation",
73
+ value=[{"role": "assistant", "content": "πŸ‘‹ Welcome! Please enter your **age** to begin."}]
74
+ )
75
 
76
  msg = gr.Textbox(label="Input", placeholder="Enter age (e.g. 25)")
 
77
 
78
  def respond(user_input, chat_history, current_state):
79
+ # Gradio automatically detects the format if 'type' is omitted
80
  updated_history, updated_state = librarian_agent(user_input, chat_history, current_state)
81
  return "", updated_history, updated_state
82
 
83
  msg.submit(respond, [msg, chatbot, state], [msg, chatbot, state])
 
84
 
85
+ # Launch without passing theme to Blocks, handle it here if needed
86
+ demo.launch()