S.Sai Yashasvini commited on
Commit
22a4d86
·
verified ·
1 Parent(s): d39a89c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +28 -30
app.py CHANGED
@@ -1,63 +1,61 @@
1
  import os
2
- import json
3
-
4
  import streamlit as st
5
  from groq import Groq
6
 
7
-
8
- # streamlit page configuration
9
  st.set_page_config(
10
- page_title="LLAMA 3.1. Chat",
11
  page_icon="🦙",
12
  layout="centered"
13
  )
14
 
15
- working_dir = os.path.dirname(os.path.abspath(__file__))
16
- config_data = json.load(open(f"{working_dir}/config.json"))
17
 
18
- GROQ_API_KEY = config_data["GROQ_API_KEY"]
 
 
19
 
20
- # save the api key to environment variable
21
- os.environ["GROQ_API_KEY"] = GROQ_API_KEY
22
 
23
- client = Groq()
24
-
25
- # initialize the chat history as streamlit session state of not present already
26
  if "chat_history" not in st.session_state:
27
  st.session_state.chat_history = []
28
 
 
 
29
 
30
- # streamlit page title
31
- st.title("🦙 LLAMA 3.1. ChatBot")
32
-
33
- # display chat history
34
  for message in st.session_state.chat_history:
35
  with st.chat_message(message["role"]):
36
  st.markdown(message["content"])
37
 
38
-
39
- # input field for user's message:
40
  user_prompt = st.chat_input("Ask LLAMA...")
41
 
42
  if user_prompt:
43
-
44
  st.chat_message("user").markdown(user_prompt)
45
  st.session_state.chat_history.append({"role": "user", "content": user_prompt})
46
 
47
- # sens user's message to the LLM and get a response
48
  messages = [
49
  {"role": "system", "content": "You are a helpful assistant"},
50
  *st.session_state.chat_history
51
  ]
52
 
53
- response = client.chat.completions.create(
54
- model="llama-3.1-8b-instant",
55
- messages=messages
56
- )
 
 
 
 
57
 
58
- assistant_response = response.choices[0].message.content
59
- st.session_state.chat_history.append({"role": "assistant", "content": assistant_response})
 
60
 
61
- # display the LLM's response
62
- with st.chat_message("assistant"):
63
- st.markdown(assistant_response)
 
1
  import os
 
 
2
  import streamlit as st
3
  from groq import Groq
4
 
5
+ # Streamlit page configuration
 
6
  st.set_page_config(
7
+ page_title="LLAMA 3.1 Chat",
8
  page_icon="🦙",
9
  layout="centered"
10
  )
11
 
12
+ # Retrieve API key from Hugging Face Secrets
13
+ GROQ_API_KEY = os.getenv("GROQ_API_KEY")
14
 
15
+ if not GROQ_API_KEY:
16
+ st.error("⚠️ Error: GROQ_API_KEY is missing! Please add it to Hugging Face Secrets.")
17
+ st.stop()
18
 
19
+ # Initialize Groq client with API key
20
+ client = Groq(api_key=GROQ_API_KEY)
21
 
22
+ # Initialize the chat history in Streamlit session state if not present already
 
 
23
  if "chat_history" not in st.session_state:
24
  st.session_state.chat_history = []
25
 
26
+ # Streamlit page title
27
+ st.title("🦙 LLAMA 3.1 ChatBot")
28
 
29
+ # Display chat history
 
 
 
30
  for message in st.session_state.chat_history:
31
  with st.chat_message(message["role"]):
32
  st.markdown(message["content"])
33
 
34
+ # Input field for user's message
 
35
  user_prompt = st.chat_input("Ask LLAMA...")
36
 
37
  if user_prompt:
 
38
  st.chat_message("user").markdown(user_prompt)
39
  st.session_state.chat_history.append({"role": "user", "content": user_prompt})
40
 
41
+ # Send user's message to the LLM and get a response
42
  messages = [
43
  {"role": "system", "content": "You are a helpful assistant"},
44
  *st.session_state.chat_history
45
  ]
46
 
47
+ try:
48
+ response = client.chat.completions.create(
49
+ model="llama-3.1-8b-instant",
50
+ messages=messages
51
+ )
52
+
53
+ assistant_response = response.choices[0].message.content
54
+ st.session_state.chat_history.append({"role": "assistant", "content": assistant_response})
55
 
56
+ # Display the LLM's response
57
+ with st.chat_message("assistant"):
58
+ st.markdown(assistant_response)
59
 
60
+ except Exception as e:
61
+ st.error(f"⚠️ Error: {str(e)}")