File size: 1,330 Bytes
8bf2613
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
from dotenv import load_dotenv 
import streamlit as st
import os
import google.generativeai as genai

load_dotenv() 
# loading all the environment variables:
genai.configure(api_key=os.environ["google_api_key"])
## creating a pipeline to load the model and take input and give the response in return:


model=genai.GenerativeModel('gemini-pro')

chat=model.start_chat(history=[])

def gemini_response(question):
    response=chat.send_message(question,stream=True)
    # stream is true means to keep the track of the previous messages:
    return response
st.set_page_config(page_title="Gemini QnA ChatBot")
st.header("Gemini LLM Application")
#Initialize the session state for chat history if it doesn't exist

if 'chat_hisotry' not in st.session_state:
    st.session_state['chat_history']=[]
input=st.text_input("Input",key='input')
submit=st.button("Ask the question")
if submit and input:
    response=gemini_response(input)
    # add user query and response to the session chat history 
    st.session_state['chat_history'].append(("You",input))
    st.subheader("The Response is:")
    for chunk in response:
        st.write(chunk.text)
        st.session_state['chat_history'].append(("Bot",chunk.text))
st.subheader("The Chat History is")
for role,text in st.session_state['chat_history']:
    st.write(f"{role}:{text}")