File size: 3,048 Bytes
8e06301 81b1ae3 27873f7 8e06301 da5b5f1 8e06301 d16d772 8e06301 da5b5f1 27873f7 8e06301 27873f7 cf09827 27873f7 cf09827 27873f7 cf09827 374d49a 27873f7 8e06301 27873f7 8e06301 cf09827 8e06301 27873f7 8e06301 27873f7 8e06301 cf09827 27873f7 cf09827 27873f7 cf09827 27873f7 cf09827 8e06301 27873f7 8e06301 cf09827 8e06301 27873f7 8e06301 27873f7 8e06301 cf09827 27873f7 8e06301 27873f7 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 |
import streamlit as st
from transformers import pipeline
import random
from gtts import gTTS
import os
import time
# ---------------------------
# Config
# ---------------------------
st.set_page_config(
page_title="ZealAI",
page_icon="🕊️",
layout="wide"
)
# ---------------------------
# Custom CSS
# ---------------------------
_CUSTOM_CSS = """
#MainMenu {visibility: hidden;}
footer {visibility: hidden;}
.block-container {max-width: 980px; padding-top: 1.5rem;}
"""
st.markdown(f"<style>{_CUSTOM_CSS}</style>", unsafe_allow_html=True)
# ---------------------------
# Load RedPajama Model
# ---------------------------
@st.cache_resource(show_spinner=True)
def load_model():
return pipeline(
"text-generation",
model="togethercomputer/RedPajama-INCITE-7B-Chat",
device_map="auto",
temperature=0.7,
max_new_tokens=300
)
chatbot = load_model()
# ---------------------------
# Global chat history
# ---------------------------
if "chat_history" not in st.session_state:
st.session_state.chat_history = []
BIBLE_FACTS = [
"The Bible has around 611,000 words.",
"Psalm 119 is the longest chapter with 176 verses!",
"The shortest verse is John 11:35: 'Jesus wept.'",
"The word 'Christian' appears only three times.",
"Job is believed to be the oldest book."
]
# ---------------------------
# Chat Function
# ---------------------------
def get_ai_response(user_text):
# Bible Fact Mode
if "bible fact" in user_text.lower() or "tell me something cool" in user_text.lower():
reply = random.choice(BIBLE_FACTS)
else:
# Build prompt with recent chat history
prompt = "You are ZEAL AI, a Bible-based assistant.\n"
for msg in st.session_state.chat_history[-5:]:
role = "User" if msg["role"] == "user" else "ZEAL AI"
prompt += f"{role}: {msg['content']}\n"
prompt += f"User: {user_text}\nZEAL AI:"
response = chatbot(prompt)
reply = response[0]["generated_text"].split("ZEAL AI:")[-1].strip()
# Save chat history
st.session_state.chat_history.append({"role": "user", "content": user_text})
st.session_state.chat_history.append({"role": "assistant", "content": reply})
return reply
# ---------------------------
# TTS Function
# ---------------------------
def text_to_speech(text):
tts = gTTS(text=text, lang="en")
path = "response.mp3"
tts.save(path)
return path
# ---------------------------
# Streamlit UI
# ---------------------------
st.title("🕊️ ZealAI - Bible-Based Assistant")
for msg in st.session_state.chat_history:
with st.chat_message(msg["role"]):
st.markdown(msg["content"])
user_input = st.chat_input("Message ZEAL AI…")
if user_input:
with st.chat_message("user"):
st.markdown(user_input)
reply = get_ai_response(user_input)
with st.chat_message("assistant"):
st.markdown(reply)
audio_file = text_to_speech(reply)
st.audio(audio_file, format="audio/mp3")
|