Update app.py
Browse files
app.py
CHANGED
|
@@ -45,7 +45,7 @@ if "llama_model" not in st.session_state or "llama_tokenizer" not in st.session_
|
|
| 45 |
st.session_state.llama_model = model
|
| 46 |
st.session_state.llama_tokenizer = tokenizer
|
| 47 |
|
| 48 |
-
st.set_page_config(page_title="
|
| 49 |
|
| 50 |
def get_llama_response(query):
|
| 51 |
# model = st.session_state.llama_model
|
|
@@ -69,9 +69,9 @@ def generate_pdf(convo, topic):
|
|
| 69 |
html = f"<h2>{topic}</h2><hr>"
|
| 70 |
for msg in convo:
|
| 71 |
if msg["role"] == "user":
|
| 72 |
-
html += f"<p><strong
|
| 73 |
elif msg["role"] == "assistant":
|
| 74 |
-
html += f"<p><strong
|
| 75 |
|
| 76 |
result = io.BytesIO()
|
| 77 |
pisa_status = pisa.CreatePDF(io.StringIO(html), dest=result)
|
|
@@ -88,7 +88,7 @@ if "edit_mode" not in st.session_state:
|
|
| 88 |
st.session_state.edit_mode = {}
|
| 89 |
|
| 90 |
# --- App Title ---
|
| 91 |
-
st.title("π¬
|
| 92 |
|
| 93 |
# --- Custom CSS ---
|
| 94 |
st.markdown("""
|
|
@@ -215,7 +215,7 @@ if st.session_state.current_conversation:
|
|
| 215 |
try:
|
| 216 |
new_response = get_llama_response(prompt)
|
| 217 |
except:
|
| 218 |
-
new_response = "Failed to retrieve
|
| 219 |
if idx + 1 < len(convo) and convo[idx + 1]["role"] == "assistant":
|
| 220 |
convo[idx + 1]["text"] = new_response
|
| 221 |
st.session_state.edit_mode[idx] = False
|
|
@@ -234,7 +234,7 @@ if st.session_state.current_conversation:
|
|
| 234 |
with col2:
|
| 235 |
st.markdown(f'''
|
| 236 |
<div class="user-bubble">
|
| 237 |
-
<div class="bubble-header">π€ <span>
|
| 238 |
{msg["text"]}
|
| 239 |
</div>
|
| 240 |
''', unsafe_allow_html=True)
|
|
@@ -242,7 +242,7 @@ if st.session_state.current_conversation:
|
|
| 242 |
elif msg["role"] == "assistant":
|
| 243 |
st.markdown(f'''
|
| 244 |
<div class="assistant-bubble">
|
| 245 |
-
<div class="bubble-header">π <span>
|
| 246 |
{msg["text"]}
|
| 247 |
</div>
|
| 248 |
''', unsafe_allow_html=True)
|
|
|
|
| 45 |
st.session_state.llama_model = model
|
| 46 |
st.session_state.llama_tokenizer = tokenizer
|
| 47 |
|
| 48 |
+
st.set_page_config(page_title="Tell Me Why", page_icon="β", layout="wide")
|
| 49 |
|
| 50 |
def get_llama_response(query):
|
| 51 |
# model = st.session_state.llama_model
|
|
|
|
| 69 |
html = f"<h2>{topic}</h2><hr>"
|
| 70 |
for msg in convo:
|
| 71 |
if msg["role"] == "user":
|
| 72 |
+
html += f"<p><strong>You:</strong> {msg['text']}</p>"
|
| 73 |
elif msg["role"] == "assistant":
|
| 74 |
+
html += f"<p><strong>AI Assistant:</strong> {msg['text']}</p>"
|
| 75 |
|
| 76 |
result = io.BytesIO()
|
| 77 |
pisa_status = pisa.CreatePDF(io.StringIO(html), dest=result)
|
|
|
|
| 88 |
st.session_state.edit_mode = {}
|
| 89 |
|
| 90 |
# --- App Title ---
|
| 91 |
+
st.title("π¬ Tell Me Why")
|
| 92 |
|
| 93 |
# --- Custom CSS ---
|
| 94 |
st.markdown("""
|
|
|
|
| 215 |
try:
|
| 216 |
new_response = get_llama_response(prompt)
|
| 217 |
except:
|
| 218 |
+
new_response = "Failed to retrieve response."
|
| 219 |
if idx + 1 < len(convo) and convo[idx + 1]["role"] == "assistant":
|
| 220 |
convo[idx + 1]["text"] = new_response
|
| 221 |
st.session_state.edit_mode[idx] = False
|
|
|
|
| 234 |
with col2:
|
| 235 |
st.markdown(f'''
|
| 236 |
<div class="user-bubble">
|
| 237 |
+
<div class="bubble-header">π€ <span>You</span></div>
|
| 238 |
{msg["text"]}
|
| 239 |
</div>
|
| 240 |
''', unsafe_allow_html=True)
|
|
|
|
| 242 |
elif msg["role"] == "assistant":
|
| 243 |
st.markdown(f'''
|
| 244 |
<div class="assistant-bubble">
|
| 245 |
+
<div class="bubble-header">π <span>AI Assistant</span></div>
|
| 246 |
{msg["text"]}
|
| 247 |
</div>
|
| 248 |
''', unsafe_allow_html=True)
|