|
|
import streamlit as st |
|
|
import google.generativeai as genai |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
gemini_api_key = st.secrets.get("GEN_API_KEY", "") |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
st.set_page_config(page_title="Academic Tutor AI", layout="wide") |
|
|
st.title("📚 Academic Tutor AI") |
|
|
st.write("Ask questions about your courses and get clear explanations, examples, and study tips.") |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if not gemini_api_key: |
|
|
st.error("⚠️ Please set your 'GEN_API_KEY' in Streamlit secrets.") |
|
|
st.stop() |
|
|
|
|
|
genai.configure(api_key=gemini_api_key) |
|
|
|
|
|
|
|
|
available_models = [ |
|
|
m.name for m in genai.list_models() |
|
|
if "generateContent" in m.supported_generation_methods |
|
|
] |
|
|
|
|
|
if not available_models: |
|
|
st.error("⚠️ No Gemini models available for your API key.") |
|
|
st.stop() |
|
|
|
|
|
|
|
|
if "model" in st.session_state and st.session_state["model"] not in available_models: |
|
|
del st.session_state["model"] |
|
|
|
|
|
model = st.sidebar.selectbox("Model", available_models, index=0, |
|
|
help ="Choose which AI model to use. Most users can keep the default model.") |
|
|
|
|
|
|
|
|
if "gemini_chat" not in st.session_state or st.session_state.get("model") != model: |
|
|
st.session_state.model = model |
|
|
try: |
|
|
gemini_model = genai.GenerativeModel(model) |
|
|
st.session_state.gemini_chat = gemini_model.start_chat(history=[]) |
|
|
except Exception as e: |
|
|
st.error(f"⚠️ Could not initialize Gemini model: {e}") |
|
|
st.stop() |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
system_prompt = st.sidebar.text_area( |
|
|
"System Prompt", |
|
|
"You are a friendly academic tutor for college students. Provide clear explanations, examples, and study tips. Encourage understanding rather than just giving answers.", |
|
|
help = "This defines how the AI behaves. You can customize it if you want the AI to act differently." |
|
|
) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if "messages" not in st.session_state: |
|
|
st.session_state.messages = [] |
|
|
|
|
|
|
|
|
if st.sidebar.button("Reset Conversation"): |
|
|
st.session_state.messages = [] |
|
|
gemini_model = genai.GenerativeModel(model) |
|
|
st.session_state.gemini_chat = gemini_model.start_chat(history=[]) |
|
|
st.experimental_rerun() |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
for msg in st.session_state.messages: |
|
|
with st.chat_message(msg["role"]): |
|
|
st.markdown(msg["content"]) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
user_input = st.chat_input("Type your academic question here (e.g., 'Explain Bayes' Theorem with an example.')") |
|
|
|
|
|
if user_input: |
|
|
|
|
|
st.chat_message("user").markdown(user_input) |
|
|
st.session_state.messages.append({"role": "user", "content": user_input}) |
|
|
|
|
|
try: |
|
|
with st.spinner("🤔 Thinking..."): |
|
|
|
|
|
full_input = f"{system_prompt}\n\nUser: {user_input}" |
|
|
resp = st.session_state.gemini_chat.send_message(full_input) |
|
|
bot_text = resp.text |
|
|
except Exception as e: |
|
|
bot_text = f"⚠️ Gemini could not respond right now. Please try again. ({e})" |
|
|
|
|
|
with st.chat_message("assistant"): |
|
|
st.markdown(bot_text) |
|
|
|
|
|
st.session_state.messages.append({"role": "assistant", "content": bot_text}) |
|
|
st.experimental_rerun() |
|
|
|