Dmitry Beresnev commited on
Commit
a0ea8f4
Β·
1 Parent(s): 534d90b

fix news dashboard

Browse files
Files changed (1) hide show
  1. app/pages/05_Dashboard.py +63 -50
app/pages/05_Dashboard.py CHANGED
@@ -297,60 +297,73 @@ def fetch_ai_tech_news():
297
  # Progressive loading: Display results as they arrive
298
  # Create a status placeholder to show progress
299
  status_placeholder = st.empty()
300
- status_placeholder.info("πŸ” Starting news fetch from 4 sources...")
301
 
302
  # Execute all news fetching operations in parallel using ThreadPoolExecutor
303
- with ThreadPoolExecutor(max_workers=4) as executor:
304
- # Submit all tasks with source name attached
305
- futures_map = {
306
- executor.submit(fetch_twitter_news): 'twitter',
307
- executor.submit(fetch_reddit_news): 'reddit',
308
- executor.submit(fetch_rss_news): 'rss',
309
- executor.submit(fetch_ai_tech_news): 'ai_tech'
310
- }
311
-
312
- # Track errors and completion
313
- fetch_errors = []
314
- completed_sources = []
315
-
316
- # Process results as they complete (progressive loading)
317
- for future in as_completed(futures_map, timeout=90):
318
- source_name = futures_map[future]
319
 
 
320
  try:
321
- result_df, error = future.result()
322
-
323
- # Update status
324
- completed_sources.append(source_name)
325
- status_placeholder.info(f"πŸ” Loaded {len(completed_sources)}/4 sources ({', '.join(completed_sources)})")
326
-
327
- if source_name == 'twitter':
328
- twitter_df = result_df
329
- if error:
330
- fetch_errors.append(error)
331
- elif source_name == 'reddit':
332
- reddit_df = result_df
333
- if error:
334
- fetch_errors.append(error)
335
- elif source_name == 'rss':
336
- rss_all_df = result_df
337
- if error:
338
- fetch_errors.append(error)
339
- # Get main page news subset for RSS
340
- if not rss_all_df.empty and 'from_web' in rss_all_df.columns:
341
- rss_main_df = rss_all_df[rss_all_df['from_web'] == True].copy()
342
- elif source_name == 'ai_tech':
343
- ai_tech_df = result_df
344
- if error:
345
- fetch_errors.append(error)
346
-
347
- except Exception as e:
348
- fetch_errors.append(f"Error fetching {source_name} news: {e}")
349
- completed_sources.append(f"{source_name} (error)")
350
- status_placeholder.warning(f"⚠️ {source_name} failed, continuing with other sources...")
351
-
352
- # Clear the status message after all sources complete
353
- status_placeholder.success(f"βœ… Loaded {len(completed_sources)}/4 sources successfully")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
354
 
355
  # Debug output (remove in production)
356
  if st.session_state.get('debug_mode', False):
 
297
  # Progressive loading: Display results as they arrive
298
  # Create a status placeholder to show progress
299
  status_placeholder = st.empty()
 
300
 
301
  # Execute all news fetching operations in parallel using ThreadPoolExecutor
302
+ with st.spinner("πŸ” Loading news from 4 sources..."):
303
+ with ThreadPoolExecutor(max_workers=4) as executor:
304
+ # Submit all tasks with source name attached
305
+ futures_map = {
306
+ executor.submit(fetch_twitter_news): 'twitter',
307
+ executor.submit(fetch_reddit_news): 'reddit',
308
+ executor.submit(fetch_rss_news): 'rss',
309
+ executor.submit(fetch_ai_tech_news): 'ai_tech'
310
+ }
311
+
312
+ # Track errors and completion
313
+ fetch_errors = []
314
+ completed_sources = []
 
 
 
315
 
316
+ # Process results as they complete (progressive loading)
317
  try:
318
+ for future in as_completed(futures_map, timeout=90):
319
+ source_name = futures_map[future]
320
+
321
+ try:
322
+ result_df, error = future.result()
323
+
324
+ # Update status
325
+ completed_sources.append(source_name)
326
+ status_placeholder.info(f"πŸ” Loaded {len(completed_sources)}/4 sources ({', '.join(completed_sources)})")
327
+
328
+ if source_name == 'twitter':
329
+ twitter_df = result_df
330
+ if error:
331
+ fetch_errors.append(error)
332
+ elif source_name == 'reddit':
333
+ reddit_df = result_df
334
+ if error:
335
+ fetch_errors.append(error)
336
+ elif source_name == 'rss':
337
+ rss_all_df = result_df
338
+ if error:
339
+ fetch_errors.append(error)
340
+ # Get main page news subset for RSS
341
+ if not rss_all_df.empty and 'from_web' in rss_all_df.columns:
342
+ rss_main_df = rss_all_df[rss_all_df['from_web'] == True].copy()
343
+ elif source_name == 'ai_tech':
344
+ ai_tech_df = result_df
345
+ if error:
346
+ fetch_errors.append(error)
347
+
348
+ except Exception as e:
349
+ fetch_errors.append(f"Error fetching {source_name} news: {e}")
350
+ completed_sources.append(f"{source_name} (error)")
351
+ status_placeholder.warning(f"⚠️ {source_name} failed, continuing with other sources...")
352
+
353
+ except TimeoutError:
354
+ # Handle timeout gracefully - continue with whatever results we have
355
+ fetch_errors.append("⏱️ Some sources timed out after 90 seconds - displaying available results")
356
+ status_placeholder.warning(f"⚠️ {len(completed_sources)}/4 sources loaded (some timed out)")
357
+
358
+ # Mark incomplete sources
359
+ all_sources = set(futures_map.values())
360
+ incomplete_sources = all_sources - set(completed_sources)
361
+ for source in incomplete_sources:
362
+ fetch_errors.append(f"{source} timed out - skipped")
363
+ completed_sources.append(f"{source} (timeout)")
364
+
365
+ # Clear the status message after all sources complete
366
+ status_placeholder.success(f"βœ… Loaded {len(completed_sources)}/4 sources successfully")
367
 
368
  # Debug output (remove in production)
369
  if st.session_state.get('debug_mode', False):