tayawelba commited on
Commit
5d63ac2
·
verified ·
1 Parent(s): fa40f1e

Upload 2 files

Browse files
Files changed (2) hide show
  1. .env +1 -0
  2. app.py +50 -0
.env ADDED
@@ -0,0 +1 @@
 
 
1
+ GOOGLE_API_KEY = "AIzaSyBC_sJWMRwEMXXzZjKUz6rmMLYpgOY6cAM"
app.py ADDED
@@ -0,0 +1,50 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from dotenv import load_dotenv
2
+
3
+ load_dotenv()
4
+
5
+ import streamlit as st
6
+ import os
7
+ import google.generativeai as genai
8
+
9
+
10
+ genai.configure(api_key=os.getenv("GOOGLE_API_KEY"))
11
+
12
+
13
+ #loaad gemini et get responses
14
+
15
+ model = genai.GenerativeModel("gemini-pro")
16
+
17
+
18
+ chat = model.start_chat(history=[])
19
+
20
+ def get_gemini_response(question):
21
+ response = chat.send_message(question,stream=True)
22
+ return response
23
+
24
+
25
+ #streamlit APP
26
+
27
+ st.set_page_config(page_title="Q&A DEMO")
28
+ st.header("GEMINI LLM APPLICATION")
29
+
30
+ #lancer une session de chat si il n y en a pas
31
+
32
+
33
+ if 'chat_history' not in st.session_state:
34
+ st.session_state['chat_history'] = []
35
+
36
+ input = st.text_input("Input: ",key="input")
37
+ submit = st.button("Ask the question")
38
+
39
+ if submit and input:
40
+ response = get_gemini_response(input)
41
+ #add user query reponse in chat history
42
+ st.session_state['chat_history'].append(("You ",input))
43
+ st.subheader("The response is ")
44
+ for chunk in response:
45
+ st.write(chunk.text)
46
+ st.session_state['chat_history'].append(("chatbot",chunk.text))
47
+ st.subheader("the chat history is")
48
+
49
+ for role, text in st.session_state['chat_history']:
50
+ st.write(f"{role}: {text}")