Ani14 commited on
Commit
83d8666
Β·
verified Β·
1 Parent(s): 5c479dc

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +53 -21
app.py CHANGED
@@ -2,6 +2,7 @@ import os
2
  import streamlit as st
3
  import requests
4
  import datetime
 
5
  from dotenv import load_dotenv
6
  from tavily import TavilyClient
7
  import feedparser
@@ -29,13 +30,23 @@ def call_llm(messages, model="deepseek/deepseek-chat-v3-0324:free", max_tokens=3
29
  "model": model,
30
  "messages": messages,
31
  "max_tokens": max_tokens,
32
- "temperature": temperature
 
33
  }
34
- response = requests.post(url, headers=headers, json=data)
35
- result = response.json()
36
- if response.status_code != 200:
37
- raise RuntimeError(result.get("error", {}).get("message", "LLM API error"))
38
- return result["choices"][0]["message"]["content"]
 
 
 
 
 
 
 
 
 
39
 
40
  def get_sources(topic, domains=None):
41
  query = topic
@@ -47,6 +58,7 @@ def get_sources(topic, domains=None):
47
  "title": r["title"],
48
  "url": r["url"],
49
  "snippet": r.get("content", ""),
 
50
  "source": "web"
51
  } for r in response.get("results", [])]
52
 
@@ -125,7 +137,6 @@ def generate_download_button(file, label, mime_type):
125
  </a>
126
  """
127
 
128
- # --- Streamlit UI ---
129
  st.set_page_config("Deep Research Bot", layout="centered")
130
  st.markdown("""
131
  <style>
@@ -148,17 +159,35 @@ st.title("πŸ“˜ Research Output")
148
 
149
  if research_button and topic:
150
  try:
151
- with st.spinner("Gathering sources and analyzing deeply..."):
152
- all_sources = []
153
- if source_type in ["Web Only", "Hybrid"]:
154
- all_sources += get_sources(topic, custom_domains)
155
- if source_type in ["Academic Only", "Hybrid"]:
156
- all_sources += get_arxiv_papers(topic)
157
- all_sources += get_semantic_papers(topic)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
158
 
159
  merged = merge_duplicates(all_sources)
160
  citations = [generate_apa_citation(m['title'], m['url'], m['source']) for m in merged]
161
- combined_text = "\n\n".join([f"- [{m['title']}]({m['url']})\n> {m.get('snippet', m.get('summary', ''))[:300]}..." for m in merged])
 
 
 
162
 
163
  prompt = f"""
164
  You are an expert assistant. Based on the following sources, write a {report_type.lower()} in a {tone.lower()} tone on the topic: {topic}.
@@ -171,16 +200,19 @@ APA Citations:
171
  """
172
 
173
  st.subheader(f"πŸ“ {report_type} on '{topic}'")
174
- final_output = call_llm([{"role": "user", "content": prompt}])
175
- st.markdown(final_output, unsafe_allow_html=True)
 
 
 
176
 
177
  if report_type == "Thorough Academic Research":
178
  st.markdown("---")
179
  st.subheader("πŸ“„ Downloads")
180
- st.markdown(generate_download_button(generate_pdf(final_output), "Research_Report.pdf", "application/pdf"), unsafe_allow_html=True)
181
- st.markdown(generate_download_button(generate_latex(final_output), "Research_Report.tex", "application/x-latex"), unsafe_allow_html=True)
182
 
183
- overlaps = check_plagiarism(final_output, topic)
184
  st.markdown("---")
185
  st.subheader("πŸ”Ž Plagiarism Check")
186
  if overlaps:
@@ -191,4 +223,4 @@ APA Citations:
191
  st.success("βœ… No major overlaps found.")
192
 
193
  except Exception as e:
194
- st.error(f"❌ Error occurred: {e}")
 
2
  import streamlit as st
3
  import requests
4
  import datetime
5
+ import time
6
  from dotenv import load_dotenv
7
  from tavily import TavilyClient
8
  import feedparser
 
30
  "model": model,
31
  "messages": messages,
32
  "max_tokens": max_tokens,
33
+ "temperature": temperature,
34
+ "stream": True
35
  }
36
+ with requests.post(url, headers=headers, json=data, stream=True) as response:
37
+ content = ""
38
+ for line in response.iter_lines():
39
+ if line:
40
+ decoded = line.decode("utf-8")
41
+ if decoded.startswith("data: "):
42
+ piece = decoded.replace("data: ", "").strip()
43
+ if piece != "[DONE]":
44
+ try:
45
+ content += eval(piece)['choices'][0]['delta'].get('content', '')
46
+ yield content
47
+ except:
48
+ continue
49
+
50
 
51
  def get_sources(topic, domains=None):
52
  query = topic
 
58
  "title": r["title"],
59
  "url": r["url"],
60
  "snippet": r.get("content", ""),
61
+ "image_url": r.get("image_url"),
62
  "source": "web"
63
  } for r in response.get("results", [])]
64
 
 
137
  </a>
138
  """
139
 
 
140
  st.set_page_config("Deep Research Bot", layout="centered")
141
  st.markdown("""
142
  <style>
 
159
 
160
  if research_button and topic:
161
  try:
162
+ with st.status("πŸ” Gathering and analyzing sources...") as status:
163
+ def fetch_all_sources():
164
+ sources = []
165
+ if source_type in ["Web Only", "Hybrid"]:
166
+ sources += get_sources(topic, custom_domains)
167
+ if source_type in ["Academic Only", "Hybrid"]:
168
+ status.update(label="πŸ“š Fetching academic sources...")
169
+ sources += get_arxiv_papers(topic) or []
170
+ sources += get_semantic_papers(topic) or []
171
+ return sources
172
+
173
+ all_sources, retries = [], 0
174
+ while retries < 3:
175
+ all_sources = fetch_all_sources()
176
+ if all_sources:
177
+ break
178
+ retries += 1
179
+ status.update(label=f"πŸ” Retrying... ({retries}) Deeper research underway...")
180
+ time.sleep(2)
181
+
182
+ if not all_sources:
183
+ raise ValueError("Unable to fetch any sources. Please try again later.")
184
 
185
  merged = merge_duplicates(all_sources)
186
  citations = [generate_apa_citation(m['title'], m['url'], m['source']) for m in merged]
187
+ combined_text = "\n\n".join(
188
+ [f"- [{m['title']}]({m['url']})\n> {m.get('snippet', m.get('summary', ''))[:300]}..." for m in merged]
189
+ )
190
+ status.update(label="🧠 Synthesizing final report...")
191
 
192
  prompt = f"""
193
  You are an expert assistant. Based on the following sources, write a {report_type.lower()} in a {tone.lower()} tone on the topic: {topic}.
 
200
  """
201
 
202
  st.subheader(f"πŸ“ {report_type} on '{topic}'")
203
+ output_placeholder = st.empty()
204
+ full_output = ""
205
+ for chunk in call_llm([{"role": "user", "content": prompt}]):
206
+ full_output = chunk
207
+ output_placeholder.markdown(full_output, unsafe_allow_html=True)
208
 
209
  if report_type == "Thorough Academic Research":
210
  st.markdown("---")
211
  st.subheader("πŸ“„ Downloads")
212
+ st.markdown(generate_download_button(generate_pdf(full_output), "Research_Report.pdf", "application/pdf"), unsafe_allow_html=True)
213
+ st.markdown(generate_download_button(generate_latex(full_output), "Research_Report.tex", "application/x-latex"), unsafe_allow_html=True)
214
 
215
+ overlaps = check_plagiarism(full_output, topic)
216
  st.markdown("---")
217
  st.subheader("πŸ”Ž Plagiarism Check")
218
  if overlaps:
 
223
  st.success("βœ… No major overlaps found.")
224
 
225
  except Exception as e:
226
+ st.error(f"❌ Error occurred: {e}")