Mithu96 commited on
Commit
4cb4ae7
·
verified ·
1 Parent(s): 68c1344

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +61 -0
app.py ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import os
3
+ import google.generativeai as genai
4
+
5
+ # st.title("Chatbot with Streamlit and GenAI")
6
+ st.title("CHATBOT")
7
+
8
+ os.environ['GOOGLE_API_KEY'] = "AIzaSyAh0J6mHZCEnYs0XDSB7QheB1QVZCjxO7k"
9
+ genai.configure(api_key = os.environ['GOOGLE_API_KEY'])
10
+
11
+ # Select the model
12
+ model = genai.GenerativeModel('gemini-pro')
13
+
14
+ # Initialize chat history
15
+ if "messages" not in st.session_state:
16
+ st.session_state.messages = [
17
+ {
18
+ "role":"assistant",
19
+ "content":"Ask me Anything"
20
+ }
21
+ ]
22
+
23
+ # Display chat messages from history on app rerun
24
+ for message in st.session_state.messages:
25
+ with st.chat_message(message["role"]):
26
+ st.markdown(message["content"])
27
+
28
+ # Process and store Query and Response
29
+ def llm_function(query):
30
+ response = model.generate_content(query)
31
+
32
+ # Displaying the Assistant Message
33
+ with st.chat_message("assistant"):
34
+ st.markdown(response.text)
35
+
36
+ # Storing the User Message
37
+ st.session_state.messages.append(
38
+ {
39
+ "role":"user",
40
+ "content": query
41
+ }
42
+ )
43
+
44
+ # Storing the User Message
45
+ st.session_state.messages.append(
46
+ {
47
+ "role":"assistant",
48
+ "content": response.text
49
+ }
50
+ )
51
+
52
+ # Accept user input
53
+ query = st.chat_input("What's up?")
54
+
55
+ # Calling the Function when Input is Provided
56
+ if query:
57
+ # Displaying the User Message
58
+ with st.chat_message("user"):
59
+ st.markdown(query)
60
+
61
+ llm_function(query)