ZealAI commited on
Commit
27873f7
·
verified ·
1 Parent(s): 44c8073

Update src/streamlit_app.py

Browse files
Files changed (1) hide show
  1. src/streamlit_app.py +68 -28
src/streamlit_app.py CHANGED
@@ -1,6 +1,9 @@
1
-
2
  import streamlit as st
3
  from transformers import pipeline
 
 
 
 
4
 
5
  # ---------------------------
6
  # Config
@@ -12,58 +15,95 @@ st.set_page_config(
12
  )
13
 
14
  # ---------------------------
15
- # Load model (Hugging Face free ungated model)
 
 
 
 
 
 
 
 
 
 
16
  # ---------------------------
17
- @st.cache_resource
18
- def load_pipeline():
19
  return pipeline(
20
  "text-generation",
21
- model="meta-llama/Llama-2-7b-chat-hf",
22
  device_map="auto",
 
 
23
  )
24
 
25
- chatbot = load_pipeline()
26
 
27
  # ---------------------------
28
- # Chat memory
29
  # ---------------------------
30
  if "chat_history" not in st.session_state:
31
  st.session_state.chat_history = []
32
 
 
 
 
 
 
 
 
 
33
  # ---------------------------
34
- # Chat function
35
  # ---------------------------
36
  def get_ai_response(user_text):
37
- # Build chat context
38
- prompt = "You are ZEAL AI, a Bible-based assistant. Always answer with scripture, wisdom, and encouragement.\n\n"
39
- for msg in st.session_state.chat_history[-5:]:
40
- role = "User" if msg["role"] == "user" else "ZEAL AI"
41
- prompt += f"{role}: {msg['content']}\n"
42
- prompt += f"User: {user_text}\nZEAL AI:"
 
 
 
 
43
 
44
- response = chatbot(prompt, max_new_tokens=300, temperature=0.7, do_sample=True)
45
- reply = response[0]["generated_text"].split("ZEAL AI:")[-1].strip()
46
 
47
- # Save chat
48
  st.session_state.chat_history.append({"role": "user", "content": user_text})
49
  st.session_state.chat_history.append({"role": "assistant", "content": reply})
50
  return reply
51
 
 
 
 
 
 
 
 
 
 
52
  # ---------------------------
53
  # Streamlit UI
54
  # ---------------------------
55
- st.title("🕊️ Zeal AI Bible Assistant")
 
 
 
 
56
 
57
- user_input = st.chat_input("Type your message here...")
58
 
59
  if user_input:
60
- with st.spinner("Praying over your answer... 🙏"):
61
- reply = get_ai_response(user_input)
62
- st.chat_message("assistant").markdown(reply)
63
 
64
- # Show history
65
- for msg in st.session_state.chat_history:
66
- if msg["role"] == "user":
67
- st.chat_message("user").markdown(msg["content"])
68
- else:
69
- st.chat_message("assistant").markdown(msg["content"])
 
 
 
1
  import streamlit as st
2
  from transformers import pipeline
3
+ import random
4
+ from gtts import gTTS
5
+ import os
6
+ import time
7
 
8
  # ---------------------------
9
  # Config
 
15
  )
16
 
17
  # ---------------------------
18
+ # Custom CSS
19
+ # ---------------------------
20
+ _CUSTOM_CSS = """
21
+ #MainMenu {visibility: hidden;}
22
+ footer {visibility: hidden;}
23
+ .block-container {max-width: 980px; padding-top: 1.5rem;}
24
+ """
25
+ st.markdown(f"<style>{_CUSTOM_CSS}</style>", unsafe_allow_html=True)
26
+
27
+ # ---------------------------
28
+ # Load RedPajama Model
29
  # ---------------------------
30
+ @st.cache_resource(show_spinner=True)
31
+ def load_model():
32
  return pipeline(
33
  "text-generation",
34
+ model="togethercomputer/RedPajama-INCITE-7B-Chat",
35
  device_map="auto",
36
+ temperature=0.7,
37
+ max_new_tokens=300
38
  )
39
 
40
+ chatbot = load_model()
41
 
42
  # ---------------------------
43
+ # Global chat history
44
  # ---------------------------
45
  if "chat_history" not in st.session_state:
46
  st.session_state.chat_history = []
47
 
48
+ BIBLE_FACTS = [
49
+ "The Bible has around 611,000 words.",
50
+ "Psalm 119 is the longest chapter with 176 verses!",
51
+ "The shortest verse is John 11:35: 'Jesus wept.'",
52
+ "The word 'Christian' appears only three times.",
53
+ "Job is believed to be the oldest book."
54
+ ]
55
+
56
  # ---------------------------
57
+ # Chat Function
58
  # ---------------------------
59
  def get_ai_response(user_text):
60
+ # Bible Fact Mode
61
+ if "bible fact" in user_text.lower() or "tell me something cool" in user_text.lower():
62
+ reply = random.choice(BIBLE_FACTS)
63
+ else:
64
+ # Build prompt with recent chat history
65
+ prompt = "You are ZEAL AI, a Bible-based assistant.\n"
66
+ for msg in st.session_state.chat_history[-5:]:
67
+ role = "User" if msg["role"] == "user" else "ZEAL AI"
68
+ prompt += f"{role}: {msg['content']}\n"
69
+ prompt += f"User: {user_text}\nZEAL AI:"
70
 
71
+ response = chatbot(prompt)
72
+ reply = response[0]["generated_text"].split("ZEAL AI:")[-1].strip()
73
 
74
+ # Save chat history
75
  st.session_state.chat_history.append({"role": "user", "content": user_text})
76
  st.session_state.chat_history.append({"role": "assistant", "content": reply})
77
  return reply
78
 
79
+ # ---------------------------
80
+ # TTS Function
81
+ # ---------------------------
82
+ def text_to_speech(text):
83
+ tts = gTTS(text=text, lang="en")
84
+ path = "response.mp3"
85
+ tts.save(path)
86
+ return path
87
+
88
  # ---------------------------
89
  # Streamlit UI
90
  # ---------------------------
91
+ st.title("🕊️ ZealAI - Bible-Based Assistant")
92
+
93
+ for msg in st.session_state.chat_history:
94
+ with st.chat_message(msg["role"]):
95
+ st.markdown(msg["content"])
96
 
97
+ user_input = st.chat_input("Message ZEAL AI…")
98
 
99
  if user_input:
100
+ with st.chat_message("user"):
101
+ st.markdown(user_input)
 
102
 
103
+ reply = get_ai_response(user_input)
104
+
105
+ with st.chat_message("assistant"):
106
+ st.markdown(reply)
107
+
108
+ audio_file = text_to_speech(reply)
109
+ st.audio(audio_file, format="audio/mp3")