arjunverma2004 commited on
Commit
8008b94
·
verified ·
1 Parent(s): f493bd5

Update src/streamlit_app.py

Browse files
Files changed (1) hide show
  1. src/streamlit_app.py +53 -44
src/streamlit_app.py CHANGED
@@ -1,37 +1,61 @@
1
  import os
2
- import json
3
  import streamlit as st
4
  import google.generativeai as genai
5
  from dotenv import load_dotenv
 
6
 
7
  # --- CONFIGURATION ---
8
- # Load environment variables from .env file for local development
9
  load_dotenv()
10
 
11
- # Configure the Gemini API key
12
  try:
13
  genai.configure(api_key=os.getenv("GEMINI_API_KEY"))
14
- except AttributeError:
15
- st.error("⚠️ Gemini API key not found. Please set it in your secrets.")
16
- st.stop() # Halts execution if no key is found
17
-
18
- # --- 1. CONTEXT PROVIDER (Simulated Notion) ---
19
- # Use st.cache_data to load the database only once
20
-
21
- @st.cache_data
22
- def load_mock_db():
23
- """Loads the mock database from the JSON file."""
24
- # Get the absolute path to the directory where this script is located
25
- script_dir = os.path.dirname(os.path.abspath(__file__))
26
- # Join the script's directory path with the filename
27
- file_path = os.path.join(script_dir, 'mock-notion-db.json')
28
- with open(file_path, 'r') as f:
29
- return json.load(f)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
30
 
31
- notion_data = load_mock_db()
 
32
 
33
  def get_context(query: str) -> str | None:
34
- """Finds the most relevant context using keyword matching."""
 
 
 
35
  query_words = set(query.lower().split())
36
  best_match = None
37
  max_score = 0
@@ -44,16 +68,14 @@ def get_context(query: str) -> str | None:
44
  best_match = item
45
  return best_match["content"] if best_match else None
46
 
47
- # --- 2. LLM PROVIDER (Gemini) ---
48
  model = genai.GenerativeModel('gemini-1.5-flash')
49
 
50
  def generate_response(query: str, context: str) -> str:
51
- """Generates a response using the Gemini model with provided context."""
52
  prompt = f"""
53
- You are a helpful and friendly campus assistant chatbot named Campus Helper Bot.
54
  Use the following piece of context to answer the user's question.
55
- If the context doesn't contain the answer, state that you don't have information on that topic. Do not make up information.
56
- Keep your answer concise and clear.
57
 
58
  Context: "{context or 'No context available.'}"
59
  Question: "{query}"
@@ -63,43 +85,30 @@ def generate_response(query: str, context: str) -> str:
63
  response = model.generate_content(prompt)
64
  return response.text
65
  except Exception as e:
66
- print(f"Error generating response: {e}")
67
- return "Sorry, I'm having trouble connecting right now. Please try again later."
68
-
69
- # --- 3. STREAMLIT UI AND CHAT LOGIC ---
70
 
71
- # Set page title and icon
72
  st.set_page_config(page_title="Campus Helper Bot", page_icon="🤖")
73
-
74
- # Display header
75
  st.title("🤖 Campus Helper Bot")
76
- st.caption("Your AI-powered guide to campus information")
77
 
78
- # Initialize chat history in session state if it doesn't exist
79
  if "messages" not in st.session_state:
80
  st.session_state.messages = [
81
- {"role": "assistant", "content": "Hello! How can I help you with campus information today?"}
82
  ]
83
 
84
- # Display past messages from session state
85
  for message in st.session_state.messages:
86
  with st.chat_message(message["role"]):
87
  st.markdown(message["content"])
88
 
89
- # Main chat input logic
90
  if prompt := st.chat_input("Ask about fee deadlines, scholarships, etc."):
91
- # Add user message to session state and display it
92
  st.session_state.messages.append({"role": "user", "content": prompt})
93
  with st.chat_message("user"):
94
  st.markdown(prompt)
95
 
96
- # Get and display bot response
97
  with st.chat_message("assistant"):
98
- with st.spinner("Thinking..."):
99
- # 1. Retrieve context
100
  context = get_context(prompt)
101
- # 2. Generate response
102
  response = generate_response(prompt, context)
103
- # 3. Display response and add to session state
104
  st.markdown(response)
105
  st.session_state.messages.append({"role": "assistant", "content": response})
 
1
  import os
 
2
  import streamlit as st
3
  import google.generativeai as genai
4
  from dotenv import load_dotenv
5
+ from notion_client import Client
6
 
7
  # --- CONFIGURATION ---
 
8
  load_dotenv()
9
 
10
+ # Configure APIs
11
  try:
12
  genai.configure(api_key=os.getenv("GEMINI_API_KEY"))
13
+ notion = Client(auth=os.getenv("NOTION_KEY"))
14
+ NOTION_DATABASE_ID = os.getenv("NOTION_DATABASE_ID")
15
+ except (AttributeError, TypeError):
16
+ st.error("⚠️ API keys or Database ID not found. Please set them in your secrets.")
17
+ st.stop()
18
+
19
+ # --- 1. CONTEXT PROVIDER (Live from Notion) ---
20
+ @st.cache_data(ttl=600) # Cache the data for 10 minutes
21
+ def fetch_notion_database():
22
+ """Fetches and parses the Notion database."""
23
+ try:
24
+ response = notion.databases.query(database_id=NOTION_DATABASE_ID)
25
+ results = []
26
+ for page in response.get("results", []):
27
+ properties = page.get("properties", {})
28
+
29
+ # Extract data from Notion properties
30
+ topic_prop = properties.get("Topic", {}).get("title", [])
31
+ content_prop = properties.get("Content", {}).get("rich_text", [])
32
+ keywords_prop = properties.get("Keywords", {}).get("rich_text", [])
33
+
34
+ # Safely get the plain text content
35
+ topic = topic_prop[0]["plain_text"] if topic_prop else "No Topic"
36
+ content = content_prop[0]["plain_text"] if content_prop else ""
37
+ keywords_str = keywords_prop[0]["plain_text"] if keywords_prop else ""
38
+
39
+ # Format into the structure our app expects
40
+ results.append({
41
+ "id": topic.lower().replace(" ", "-"),
42
+ "keywords": [k.strip() for k in keywords_str.split(',')],
43
+ "content": content
44
+ })
45
+ st.success("Successfully connected to Notion!")
46
+ return results
47
+ except Exception as e:
48
+ st.error(f"Failed to connect to Notion: {e}")
49
+ return []
50
 
51
+ # Fetch the data
52
+ notion_data = fetch_notion_database()
53
 
54
  def get_context(query: str) -> str | None:
55
+ """Finds the most relevant context from the fetched Notion data."""
56
+ if not notion_data:
57
+ return None
58
+
59
  query_words = set(query.lower().split())
60
  best_match = None
61
  max_score = 0
 
68
  best_match = item
69
  return best_match["content"] if best_match else None
70
 
71
+ # --- 2. LLM PROVIDER (Gemini - No changes here) ---
72
  model = genai.GenerativeModel('gemini-1.5-flash')
73
 
74
  def generate_response(query: str, context: str) -> str:
 
75
  prompt = f"""
76
+ You are a helpful and friendly campus assistant chatbot.
77
  Use the following piece of context to answer the user's question.
78
+ If the context doesn't contain the answer, state that you don't have information on that topic.
 
79
 
80
  Context: "{context or 'No context available.'}"
81
  Question: "{query}"
 
85
  response = model.generate_content(prompt)
86
  return response.text
87
  except Exception as e:
88
+ return f"Error generating response: {e}"
 
 
 
89
 
90
+ # --- 3. STREAMLIT UI (No changes here) ---
91
  st.set_page_config(page_title="Campus Helper Bot", page_icon="🤖")
 
 
92
  st.title("🤖 Campus Helper Bot")
93
+ st.caption("Your AI-powered guide, now connected to Notion!")
94
 
 
95
  if "messages" not in st.session_state:
96
  st.session_state.messages = [
97
+ {"role": "assistant", "content": "Hello! I'm now connected to a live Notion database. How can I help?"}
98
  ]
99
 
 
100
  for message in st.session_state.messages:
101
  with st.chat_message(message["role"]):
102
  st.markdown(message["content"])
103
 
 
104
  if prompt := st.chat_input("Ask about fee deadlines, scholarships, etc."):
 
105
  st.session_state.messages.append({"role": "user", "content": prompt})
106
  with st.chat_message("user"):
107
  st.markdown(prompt)
108
 
 
109
  with st.chat_message("assistant"):
110
+ with st.spinner("Searching Notion..."):
 
111
  context = get_context(prompt)
 
112
  response = generate_response(prompt, context)
 
113
  st.markdown(response)
114
  st.session_state.messages.append({"role": "assistant", "content": response})