Spaces:
Sleeping
Sleeping
| from dotenv import load_dotenv | |
| import streamlit as st | |
| import os | |
| import google.generativeai as genai | |
| load_dotenv() | |
| # loading all the environment variables: | |
| genai.configure(api_key=os.environ["google_api_key"]) | |
| ## creating a pipeline to load the model and take input and give the response in return: | |
| model=genai.GenerativeModel('gemini-pro') | |
| chat=model.start_chat(history=[]) | |
| def gemini_response(question): | |
| response=chat.send_message(question,stream=True) | |
| # stream is true means to keep the track of the previous messages: | |
| return response | |
| st.set_page_config(page_title="Gemini QnA ChatBot") | |
| st.header("Gemini LLM Application") | |
| #Initialize the session state for chat history if it doesn't exist | |
| if 'chat_hisotry' not in st.session_state: | |
| st.session_state['chat_history']=[] | |
| input=st.text_input("Input",key='input') | |
| submit=st.button("Ask the question") | |
| if submit and input: | |
| response=gemini_response(input) | |
| # add user query and response to the session chat history | |
| st.session_state['chat_history'].append(("You",input)) | |
| st.subheader("The Response is:") | |
| for chunk in response: | |
| st.write(chunk.text) | |
| st.session_state['chat_history'].append(("Bot",chunk.text)) | |
| st.subheader("The Chat History is") | |
| for role,text in st.session_state['chat_history']: | |
| st.write(f"{role}:{text}") |