import os import streamlit as st import google.generativeai as genai from dotenv import load_dotenv from notion_client import Client # --- CONFIGURATION --- load_dotenv() # Configure APIs try: genai.configure(api_key=os.getenv("GEMINI_API_KEY")) notion = Client(auth=os.getenv("NOTION_KEY")) NOTION_DATABASE_ID = os.getenv("NOTION_DATABASE_ID") except (AttributeError, TypeError): st.error("⚠️ API keys or Database ID not found. Please set them in your secrets.") st.stop() # --- 1. CONTEXT PROVIDER (Live from Notion) --- @st.cache_data(ttl=600) # Cache the data for 10 minutes def fetch_notion_database(): """Fetches and parses the Notion database.""" try: response = notion.databases.query(database_id=NOTION_DATABASE_ID) results = [] for page in response.get("results", []): properties = page.get("properties", {}) # Extract data from Notion properties topic_prop = properties.get("Topic", {}).get("title", []) content_prop = properties.get("Content", {}).get("rich_text", []) keywords_prop = properties.get("Keywords", {}).get("rich_text", []) # Safely get the plain text content topic = topic_prop[0]["plain_text"] if topic_prop else "No Topic" content = content_prop[0]["plain_text"] if content_prop else "" keywords_str = keywords_prop[0]["plain_text"] if keywords_prop else "" # Format into the structure our app expects results.append({ "id": topic.lower().replace(" ", "-"), "keywords": [k.strip() for k in keywords_str.split(',')], "content": content }) #st.success("Successfully connected to Notion!") return results except Exception as e: st.error(f"Failed to connect to Notion: {e}") return [] # Fetch the data notion_data = fetch_notion_database() def get_context(query: str) -> str | None: """Finds the most relevant context from the fetched Notion data.""" if not notion_data: return None query_words = set(query.lower().split()) best_match = None max_score = 0 for item in notion_data: keywords = set(item.get("keywords", [])) score = len(query_words.intersection(keywords)) if score > max_score: max_score = score best_match = item return best_match["content"] if best_match else None # --- 2. LLM PROVIDER (Gemini - No changes here) --- model = genai.GenerativeModel('gemini-1.5-flash') def generate_response(query: str, context: str) -> str: prompt = f""" You are a helpful and friendly campus assistant chatbot. Use the following piece of context to answer the user's question. If the context doesn't contain the answer, state that you don't have information on that topic. Context: "{context or 'No context available.'}" Question: "{query}" Answer: """ try: response = model.generate_content(prompt) return response.text except Exception as e: return f"Error generating response: {e}" # --- 3. STREAMLIT UI (No changes here) --- st.set_page_config(page_title="Campus Helper Bot", page_icon="🤖") st.title("🤖 Campus Helper Bot") st.caption("Your AI-powered guide!") if "messages" not in st.session_state: st.session_state.messages = [ {"role": "assistant", "content": "Hello! I'm now connected to a live Notion database. How can I help?"} ] for message in st.session_state.messages: with st.chat_message(message["role"]): st.markdown(message["content"]) # Main chat input logic if prompt := st.chat_input("Ask about fee deadlines, scholarships, etc."): # Add user message to session state and display it st.session_state.messages.append({"role": "user", "content": prompt}) with st.chat_message("user"): st.markdown(prompt) # Get and display bot response with st.chat_message("assistant"): with st.spinner("Thinking..."): # 1. Retrieve context context = get_context(prompt) # 2. Generate response response = generate_response(prompt, context) # 3. Display response INSIDE the bubble st.markdown(response) # 4. Add the response to the history for the next re-run st.session_state.messages.append({"role": "assistant", "content": response})