Spaces:
Build error
Build error
| import os | |
| import streamlit as st | |
| #from dotenv import load_dotenv | |
| #import google.generativeai as gen_ai | |
| # Load environment variables | |
| load_dotenv() | |
| # Configure Streamlit page settings | |
| st.set_page_config( | |
| page_title="ML Galaxy!", | |
| page_icon=":brain:", # Favicon emoji | |
| layout="centered", # Page layout option | |
| ) | |
| # Retrieve the Google API key from the environment | |
| GOOGLE_API_KEY = os.getenv("GOOGLE_API_KEY") | |
| # Check if the API key is loaded | |
| if not GOOGLE_API_KEY: | |
| st.error("API key not found! Please set the GOOGLE_API_KEY in your .env file.") | |
| st.stop() | |
| # Configure the Generative AI model | |
| try: | |
| gen_ai.configure(api_key=GOOGLE_API_KEY) | |
| model = gen_ai.GenerativeModel("gemini-pro") | |
| except Exception as e: | |
| st.error(f"Error initializing the Gemini-Pro model: {e}") | |
| st.stop() | |
| # Function to translate roles between Gemini-Pro and Streamlit terminology | |
| def translate_role_for_streamlit(user_role): | |
| return "assistant" if user_role == "model" else user_role | |
| # Initialize the chat session if not already present in session state | |
| if "chat_session" not in st.session_state: | |
| try: | |
| st.session_state.chat_session = model.start_chat(history=[]) | |
| except Exception as e: | |
| st.error(f"Error initializing chat session: {e}") | |
| st.stop() | |
| # Display the chatbot's title | |
| st.title("🤖 ML Galaxy") | |
| # Display the chat history | |
| try: | |
| for message in st.session_state.chat_session.history: | |
| with st.chat_message(translate_role_for_streamlit(message.role)): | |
| st.markdown(message.parts[0].text) | |
| except Exception as e: | |
| st.error(f"Error displaying chat history: {e}") | |
| # Input field for user's message | |
| user_prompt = st.chat_input("Ask Gemini-Pro...") | |
| if user_prompt: | |
| # Add the user's message to the chat and display it | |
| st.chat_message("user").markdown(user_prompt) | |
| # Send the user's message to Gemini-Pro and get the response | |
| try: | |
| gemini_response = st.session_state.chat_session.send_message(user_prompt) | |
| # Display Gemini-Pro's response | |
| with st.chat_message("assistant"): | |
| st.markdown(gemini_response.text) | |
| except Exception as e: | |
| st.error(f"Error processing your message: {e}") | |