File size: 1,430 Bytes
07875b5 99f5605 07875b5 99f5605 07875b5 99f5605 07875b5 5514049 99f5605 5514049 99f5605 5514049 99f5605 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 |
import os
import streamlit as st
from langchain.memory import ConversationBufferMemory
from langchain.chains import ConversationChain
from langchain_google_genai import ChatGoogleGenerativeAI
GEMINI_API_KEY = os.getenv("GEMINI_API_KEY")
st.set_page_config(page_title="Conversational AI Data Science Tutor", page_icon="π€")
st.title("π€ Conversational AI Data Science Tutor")
st.write("Ask me any **Data Science** related question!")
if not GEMINI_API_KEY:
st.error("β GEMINI_API_KEY not found. Please add it in Hugging Face β Settings β Variables and secrets.")
else:
llm = ChatGoogleGenerativeAI(
model="gemini-1.5-pro",
google_api_key=GEMINI_API_KEY
)
memory = ConversationBufferMemory()
conversation = ConversationChain(
llm=llm,
memory=memory,
verbose=False
)
if "messages" not in st.session_state:
st.session_state.messages = []
for msg in st.session_state.messages:
st.chat_message(msg["role"]).markdown(msg["content"])
if prompt := st.chat_input("Ask a data science question..."):
st.session_state.messages.append({"role": "user", "content": prompt})
st.chat_message("user").markdown(prompt)
response = conversation.predict(input=prompt)
st.session_state.messages.append({"role": "assistant", "content": response})
st.chat_message("assistant").markdown(response)
|