nedum commited on
Commit
3cb9827
·
verified ·
1 Parent(s): bacda12
Files changed (1) hide show
  1. app.py +44 -0
app.py ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from openai import OpenAI
2
+ import streamlit as st
3
+ import os
4
+ import sys
5
+ from dotenv import load_dotenv, dotenv_values
6
+ load_dotenv()
7
+
8
+
9
+ # initialize the client
10
+ client = OpenAI(
11
+ base_url="https://api-inference.huggingface.co/v1",
12
+ api_key=os.environ.get('HUGGINGFACEHUB_API_TOKEN') #"hf_xxx" # Replace with your token
13
+ )
14
+
15
+
16
+ st.title("💬 Chatbot")
17
+ st.caption("🚀 A streamlit chatbot powered by Google Gemma")
18
+
19
+ # Initialize chat history
20
+ if 'messages' not in st.session_state:
21
+ st.session_state['messages'] = [] #[{"role": "assistant", "content": "How can I help you?"}]
22
+
23
+ # Display chat messages from history on app rerun
24
+ for messasge in st.session_state.messages:
25
+ st.chat_message(messasge["role"]).write(messasge["content"])
26
+
27
+ # React to user input
28
+ if prompt := st.chat_input():
29
+
30
+ # Display user message in chat message container
31
+ st.chat_message("user").write(prompt)
32
+ # Add user message to chat history
33
+ st.session_state.messages.append({"role": "user", "content": prompt})
34
+
35
+ ##Get response to the message using client
36
+ response = client.chat.completions.create(model="google/gemma-2b-it", messages=st.session_state.messages)
37
+
38
+ msg = response.choices[0].message.content
39
+
40
+ # Display assistant response in chat message container
41
+ st.chat_message("assistant").write(msg)
42
+
43
+ # Add assistant response to chat history
44
+ st.session_state.messages.append({"role": "assistant", "content": msg})