Spaces:
Sleeping
Sleeping
| import streamlit as st | |
| from langchain_google_genai import ChatGoogleGenerativeAI | |
| from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder | |
| from langchain_core.output_parsers import StrOutputParser | |
| from langchain_community.chat_message_histories import SQLChatMessageHistory | |
| from langchain_core.runnables.history import RunnableWithMessageHistory | |
| import os | |
| # Use environment variable for security | |
| API_KEY = os.getenv("GOOGLE_API_KEY") | |
| # Define Chat Prompt Template | |
| template = ChatPromptTemplate( | |
| messages=[ | |
| ("system", """You are a highly knowledgeable and helpful AI assistant specializing in Data Science. | |
| Your primary objective is to assist users with topics related to Data Science, including but not limited to Machine Learning, | |
| Deep Learning, Natural Language Processing (NLP), Computer Vision (CV), Data Engineering, Statistics, | |
| Data Analysis, and related programming techniques in Python, SQL, and relevant tools. | |
| You should provide accurate, well-explained, and relevant answers, ensuring clarity and conciseness. | |
| If a query is outside the scope of Data Science, politely inform the user that you can only answer Data Science-related questions."""), | |
| MessagesPlaceholder(variable_name="chat_history"), | |
| ("human", "{input}") | |
| ] | |
| ) | |
| # Initialize AI Model | |
| model = ChatGoogleGenerativeAI(api_key=API_KEY, model='models/gemini-2.0-flash') | |
| output = StrOutputParser() | |
| chain = template | model | output | |
| # Chat History Management | |
| def messages_history(session_id): | |
| return SQLChatMessageHistory(session_id=session_id, connection="sqlite:///sqlite.db") | |
| conversation_chain = RunnableWithMessageHistory( | |
| chain, messages_history, input_message_key="input", history_messages_key="chat_history" | |
| ) | |
| # Streamlit UI Enhancements | |
| st.set_page_config(page_title="AI Data Science Chatbot", layout="wide") | |
| st.markdown(""" | |
| <style> | |
| body { background-color: #f5f5f5; } | |
| .stChatMessage { border-radius: 12px; padding: 10px; } | |
| </style> | |
| """, unsafe_allow_html=True) | |
| # Sidebar - User Login | |
| with st.sidebar: | |
| st.image("b48c8274-61df-480f-9cd9-47d697ef03e9.jpg", width=150) # Optional: Add chatbot logo | |
| st.title("π€ AI Data Science Chatbot") | |
| st.markdown("π‘ Ask me anything about Data Science!") | |
| st.divider() | |
| st.header("User Login") | |
| user_id = st.text_input("Enter your User ID:", key="user_id_input") | |
| if st.button("Logout"): | |
| st.session_state.clear() | |
| st.rerun() | |
| if not user_id: | |
| st.warning("Please enter a User ID to start chatting.") | |
| st.stop() | |
| if "last_user_id" not in st.session_state or st.session_state.last_user_id != user_id: | |
| st.session_state.chat_history = [] | |
| st.session_state.last_user_id = user_id | |
| chat_history = messages_history(user_id).messages | |
| st.session_state.chat_history = [(msg.type, msg.content) for msg in chat_history] | |
| st.markdown("<h2 style='text-align: center;'>π¬ Chat with the AI Assistant</h2>", unsafe_allow_html=True) | |
| for role, message in st.session_state.chat_history: | |
| if role == "user": | |
| st.chat_message("user").markdown(f"**π§βπ» You:** {message}") | |
| else: | |
| st.chat_message("assistant").markdown(f"**π€ AI:** {message}") | |
| # User Input & AI Response | |
| user_input = st.chat_input("Type your message...") | |
| if user_input: | |
| st.session_state.chat_history.append(("user", user_input)) | |
| st.chat_message("user").write(user_input) | |
| config = {"configurable": {"session_id": user_id}} | |
| input_prompt = {"input": user_input} | |
| response = conversation_chain.invoke(input_prompt, config=config) | |
| st.session_state.chat_history.append(("assistant", response)) | |
| st.chat_message("assistant").write(response) | |