Dmitry Beresnev
commited on
Commit
Β·
534d90b
1
Parent(s):
8ff4dc9
add progressive loading (show results as they arrive)
Browse files- app/pages/05_Dashboard.py +58 -47
app/pages/05_Dashboard.py
CHANGED
|
@@ -204,7 +204,7 @@ force_refresh = st.session_state.get('force_refresh', False)
|
|
| 204 |
|
| 205 |
# Fetch news from all sources IN PARALLEL for maximum performance
|
| 206 |
import pandas as pd
|
| 207 |
-
from concurrent.futures import ThreadPoolExecutor
|
| 208 |
|
| 209 |
twitter_df = pd.DataFrame()
|
| 210 |
reddit_df = pd.DataFrame()
|
|
@@ -294,52 +294,63 @@ def fetch_ai_tech_news():
|
|
| 294 |
return pd.DataFrame(), f"AI/Tech news unavailable: {e}"
|
| 295 |
return pd.DataFrame(), None
|
| 296 |
|
| 297 |
-
|
| 298 |
-
|
| 299 |
-
|
| 300 |
-
|
| 301 |
-
|
| 302 |
-
|
| 303 |
-
|
| 304 |
-
|
| 305 |
-
|
| 306 |
-
|
| 307 |
-
|
| 308 |
-
|
| 309 |
-
|
| 310 |
-
|
| 311 |
-
|
| 312 |
-
|
| 313 |
-
|
| 314 |
-
|
| 315 |
-
|
| 316 |
-
|
| 317 |
-
|
| 318 |
-
|
| 319 |
-
|
| 320 |
-
|
| 321 |
-
|
| 322 |
-
|
| 323 |
-
|
| 324 |
-
|
| 325 |
-
|
| 326 |
-
|
| 327 |
-
|
| 328 |
-
|
| 329 |
-
|
| 330 |
-
|
| 331 |
-
|
| 332 |
-
|
| 333 |
-
|
| 334 |
-
|
| 335 |
-
|
| 336 |
-
|
| 337 |
-
|
| 338 |
-
|
| 339 |
-
|
| 340 |
-
|
| 341 |
-
|
| 342 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 343 |
|
| 344 |
# Debug output (remove in production)
|
| 345 |
if st.session_state.get('debug_mode', False):
|
|
|
|
| 204 |
|
| 205 |
# Fetch news from all sources IN PARALLEL for maximum performance
|
| 206 |
import pandas as pd
|
| 207 |
+
from concurrent.futures import ThreadPoolExecutor, as_completed
|
| 208 |
|
| 209 |
twitter_df = pd.DataFrame()
|
| 210 |
reddit_df = pd.DataFrame()
|
|
|
|
| 294 |
return pd.DataFrame(), f"AI/Tech news unavailable: {e}"
|
| 295 |
return pd.DataFrame(), None
|
| 296 |
|
| 297 |
+
# Progressive loading: Display results as they arrive
|
| 298 |
+
# Create a status placeholder to show progress
|
| 299 |
+
status_placeholder = st.empty()
|
| 300 |
+
status_placeholder.info("π Starting news fetch from 4 sources...")
|
| 301 |
+
|
| 302 |
+
# Execute all news fetching operations in parallel using ThreadPoolExecutor
|
| 303 |
+
with ThreadPoolExecutor(max_workers=4) as executor:
|
| 304 |
+
# Submit all tasks with source name attached
|
| 305 |
+
futures_map = {
|
| 306 |
+
executor.submit(fetch_twitter_news): 'twitter',
|
| 307 |
+
executor.submit(fetch_reddit_news): 'reddit',
|
| 308 |
+
executor.submit(fetch_rss_news): 'rss',
|
| 309 |
+
executor.submit(fetch_ai_tech_news): 'ai_tech'
|
| 310 |
+
}
|
| 311 |
+
|
| 312 |
+
# Track errors and completion
|
| 313 |
+
fetch_errors = []
|
| 314 |
+
completed_sources = []
|
| 315 |
+
|
| 316 |
+
# Process results as they complete (progressive loading)
|
| 317 |
+
for future in as_completed(futures_map, timeout=90):
|
| 318 |
+
source_name = futures_map[future]
|
| 319 |
+
|
| 320 |
+
try:
|
| 321 |
+
result_df, error = future.result()
|
| 322 |
+
|
| 323 |
+
# Update status
|
| 324 |
+
completed_sources.append(source_name)
|
| 325 |
+
status_placeholder.info(f"π Loaded {len(completed_sources)}/4 sources ({', '.join(completed_sources)})")
|
| 326 |
+
|
| 327 |
+
if source_name == 'twitter':
|
| 328 |
+
twitter_df = result_df
|
| 329 |
+
if error:
|
| 330 |
+
fetch_errors.append(error)
|
| 331 |
+
elif source_name == 'reddit':
|
| 332 |
+
reddit_df = result_df
|
| 333 |
+
if error:
|
| 334 |
+
fetch_errors.append(error)
|
| 335 |
+
elif source_name == 'rss':
|
| 336 |
+
rss_all_df = result_df
|
| 337 |
+
if error:
|
| 338 |
+
fetch_errors.append(error)
|
| 339 |
+
# Get main page news subset for RSS
|
| 340 |
+
if not rss_all_df.empty and 'from_web' in rss_all_df.columns:
|
| 341 |
+
rss_main_df = rss_all_df[rss_all_df['from_web'] == True].copy()
|
| 342 |
+
elif source_name == 'ai_tech':
|
| 343 |
+
ai_tech_df = result_df
|
| 344 |
+
if error:
|
| 345 |
+
fetch_errors.append(error)
|
| 346 |
+
|
| 347 |
+
except Exception as e:
|
| 348 |
+
fetch_errors.append(f"Error fetching {source_name} news: {e}")
|
| 349 |
+
completed_sources.append(f"{source_name} (error)")
|
| 350 |
+
status_placeholder.warning(f"β οΈ {source_name} failed, continuing with other sources...")
|
| 351 |
+
|
| 352 |
+
# Clear the status message after all sources complete
|
| 353 |
+
status_placeholder.success(f"β
Loaded {len(completed_sources)}/4 sources successfully")
|
| 354 |
|
| 355 |
# Debug output (remove in production)
|
| 356 |
if st.session_state.get('debug_mode', False):
|