istiak101 commited on
Commit
c68cbf1
Β·
verified Β·
1 Parent(s): 121eaed

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +42 -25
app.py CHANGED
@@ -1,25 +1,36 @@
1
  import os
 
2
  import streamlit as st
3
  from dotenv import load_dotenv
4
- import wikipedia
5
  from xhtml2pdf import pisa
6
  import io
7
-
8
- # Load environment variables
9
- load_dotenv()
 
 
 
 
 
 
 
 
 
 
 
 
 
10
 
11
  st.set_page_config(page_title="Ask Wikipedia", page_icon="πŸ“˜", layout="wide")
12
 
13
- # --- Wikipedia Summary ---
14
- def get_wikipedia_summary(query):
15
- try:
16
- return wikipedia.summary(query, sentences=2)
17
- except wikipedia.exceptions.DisambiguationError as e:
18
- return f"Your query is ambiguous, here are some options: {e.options}"
19
- except wikipedia.exceptions.HTTPTimeoutError:
20
- return "Request timed out. Please try again later."
21
- except Exception as e:
22
- return f"An error occurred: {e}"
23
 
24
  # --- PDF Generation ---
25
  def generate_pdf(convo, topic):
@@ -153,10 +164,7 @@ if st.session_state.current_conversation:
153
  with col1:
154
  if st.button("βœ… Save", key=f"save_{idx}"):
155
  msg["text"] = new_text
156
- try:
157
- new_response = get_wikipedia_summary(new_text)
158
- except:
159
- new_response = "Failed to retrieve summary."
160
  if idx + 1 < len(convo) and convo[idx + 1]["role"] == "assistant":
161
  convo[idx + 1]["text"] = new_response
162
  st.session_state.edit_mode[idx] = False
@@ -191,9 +199,9 @@ if st.session_state.current_conversation:
191
 
192
  # --- Export PDF ---
193
  if st.button("πŸ“₯ Export Conversation as PDF"):
194
- pdf_bytes = generate_pdf(convo,st.session_state.current_conversation)
195
  if pdf_bytes:
196
- st.download_button("Download PDF", pdf_bytes, file_name="AskWikipedia_Conversation.pdf", mime="application/pdf")
197
  else:
198
  st.error("❌ Failed to generate PDF.")
199
 
@@ -201,9 +209,18 @@ if st.session_state.current_conversation:
201
  user_input = st.chat_input("Ask Wikipedia...")
202
  if user_input:
203
  convo.append({"role": "user", "text": user_input})
 
 
 
 
 
 
 
 
204
  try:
205
- reply = get_wikipedia_summary(user_input)
206
- except:
207
- reply = "Could not fetch response."
208
- convo.append({"role": "assistant", "text": reply})
209
- st.rerun()
 
 
1
  import os
2
+ import subprocess
3
  import streamlit as st
4
  from dotenv import load_dotenv
 
5
  from xhtml2pdf import pisa
6
  import io
7
+ from transformers import AutoTokenizer, AutoModelForCausalLM # for loading llama tokenizer
8
+
9
+ # --- Load Model Resources ---
10
+ def load_resources():
11
+ load_dotenv()
12
+ huggingface_token = os.getenv("HUGGINGFACE_TOKEN")
13
+ subprocess.run(["huggingface-cli", "login", "--token", huggingface_token], capture_output=True)
14
+ tokenizer = AutoTokenizer.from_pretrained("istiak101/TinyLlama-1.1B-Finetuned")
15
+ model = AutoModelForCausalLM.from_pretrained("istiak101/TinyLlama-1.1B-Finetuned")
16
+ return model, tokenizer
17
+
18
+ # --- Store model and tokenizer in session state ---
19
+ if "llama_model" not in st.session_state or "llama_tokenizer" not in st.session_state:
20
+ model, tokenizer = load_resources()
21
+ st.session_state.llama_model = model
22
+ st.session_state.llama_tokenizer = tokenizer
23
 
24
  st.set_page_config(page_title="Ask Wikipedia", page_icon="πŸ“˜", layout="wide")
25
 
26
+ def get_llama_response(query):
27
+ model = st.session_state.llama_model
28
+ tokenizer = st.session_state.llama_tokenizer
29
+
30
+ inputs = tokenizer(query, return_tensors="pt")
31
+ outputs = model.generate(**inputs, max_new_tokens=300)
32
+ response = tokenizer.decode(outputs[0], skip_special_tokens=True)
33
+ return response
 
 
34
 
35
  # --- PDF Generation ---
36
  def generate_pdf(convo, topic):
 
164
  with col1:
165
  if st.button("βœ… Save", key=f"save_{idx}"):
166
  msg["text"] = new_text
167
+ new_response = get_llama_response(new_text)
 
 
 
168
  if idx + 1 < len(convo) and convo[idx + 1]["role"] == "assistant":
169
  convo[idx + 1]["text"] = new_response
170
  st.session_state.edit_mode[idx] = False
 
199
 
200
  # --- Export PDF ---
201
  if st.button("πŸ“₯ Export Conversation as PDF"):
202
+ pdf_bytes = generate_pdf(convo, st.session_state.current_conversation)
203
  if pdf_bytes:
204
+ st.download_button("Download PDF", pdf_bytes, file_name=f"{st.session_state.current_conversation}_Conversation.pdf", mime="application/pdf")
205
  else:
206
  st.error("❌ Failed to generate PDF.")
207
 
 
209
  user_input = st.chat_input("Ask Wikipedia...")
210
  if user_input:
211
  convo.append({"role": "user", "text": user_input})
212
+ st.rerun()
213
+
214
+ # Display assistant response after rerun
215
+ if st.session_state.current_conversation and len(st.session_state.chat_sessions[st.session_state.current_conversation]) % 2 == 1:
216
+ convo = st.session_state.chat_sessions[st.session_state.current_conversation]
217
+ last_user_msg = convo[-1]["text"]
218
+
219
+ with st.spinner("Generating response..."):
220
  try:
221
+ assistant_reply = get_llama_response(last_user_msg)
222
+ except Exception as e:
223
+ assistant_reply = f"⚠️ Failed to generate response"
224
+
225
+ convo.append({"role": "assistant", "text": assistant_reply})
226
+ st.rerun()