Dmitry Beresnev
commited on
Commit
Β·
a0ea8f4
1
Parent(s):
534d90b
fix news dashboard
Browse files- app/pages/05_Dashboard.py +63 -50
app/pages/05_Dashboard.py
CHANGED
|
@@ -297,60 +297,73 @@ def fetch_ai_tech_news():
|
|
| 297 |
# Progressive loading: Display results as they arrive
|
| 298 |
# Create a status placeholder to show progress
|
| 299 |
status_placeholder = st.empty()
|
| 300 |
-
status_placeholder.info("π Starting news fetch from 4 sources...")
|
| 301 |
|
| 302 |
# Execute all news fetching operations in parallel using ThreadPoolExecutor
|
| 303 |
-
with
|
| 304 |
-
|
| 305 |
-
|
| 306 |
-
|
| 307 |
-
|
| 308 |
-
|
| 309 |
-
|
| 310 |
-
|
| 311 |
-
|
| 312 |
-
|
| 313 |
-
|
| 314 |
-
|
| 315 |
-
|
| 316 |
-
# Process results as they complete (progressive loading)
|
| 317 |
-
for future in as_completed(futures_map, timeout=90):
|
| 318 |
-
source_name = futures_map[future]
|
| 319 |
|
|
|
|
| 320 |
try:
|
| 321 |
-
|
| 322 |
-
|
| 323 |
-
|
| 324 |
-
|
| 325 |
-
|
| 326 |
-
|
| 327 |
-
|
| 328 |
-
|
| 329 |
-
|
| 330 |
-
|
| 331 |
-
|
| 332 |
-
|
| 333 |
-
|
| 334 |
-
|
| 335 |
-
|
| 336 |
-
|
| 337 |
-
|
| 338 |
-
|
| 339 |
-
|
| 340 |
-
|
| 341 |
-
|
| 342 |
-
|
| 343 |
-
|
| 344 |
-
|
| 345 |
-
|
| 346 |
-
|
| 347 |
-
|
| 348 |
-
|
| 349 |
-
|
| 350 |
-
|
| 351 |
-
|
| 352 |
-
|
| 353 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 354 |
|
| 355 |
# Debug output (remove in production)
|
| 356 |
if st.session_state.get('debug_mode', False):
|
|
|
|
| 297 |
# Progressive loading: Display results as they arrive
|
| 298 |
# Create a status placeholder to show progress
|
| 299 |
status_placeholder = st.empty()
|
|
|
|
| 300 |
|
| 301 |
# Execute all news fetching operations in parallel using ThreadPoolExecutor
|
| 302 |
+
with st.spinner("π Loading news from 4 sources..."):
|
| 303 |
+
with ThreadPoolExecutor(max_workers=4) as executor:
|
| 304 |
+
# Submit all tasks with source name attached
|
| 305 |
+
futures_map = {
|
| 306 |
+
executor.submit(fetch_twitter_news): 'twitter',
|
| 307 |
+
executor.submit(fetch_reddit_news): 'reddit',
|
| 308 |
+
executor.submit(fetch_rss_news): 'rss',
|
| 309 |
+
executor.submit(fetch_ai_tech_news): 'ai_tech'
|
| 310 |
+
}
|
| 311 |
+
|
| 312 |
+
# Track errors and completion
|
| 313 |
+
fetch_errors = []
|
| 314 |
+
completed_sources = []
|
|
|
|
|
|
|
|
|
|
| 315 |
|
| 316 |
+
# Process results as they complete (progressive loading)
|
| 317 |
try:
|
| 318 |
+
for future in as_completed(futures_map, timeout=90):
|
| 319 |
+
source_name = futures_map[future]
|
| 320 |
+
|
| 321 |
+
try:
|
| 322 |
+
result_df, error = future.result()
|
| 323 |
+
|
| 324 |
+
# Update status
|
| 325 |
+
completed_sources.append(source_name)
|
| 326 |
+
status_placeholder.info(f"π Loaded {len(completed_sources)}/4 sources ({', '.join(completed_sources)})")
|
| 327 |
+
|
| 328 |
+
if source_name == 'twitter':
|
| 329 |
+
twitter_df = result_df
|
| 330 |
+
if error:
|
| 331 |
+
fetch_errors.append(error)
|
| 332 |
+
elif source_name == 'reddit':
|
| 333 |
+
reddit_df = result_df
|
| 334 |
+
if error:
|
| 335 |
+
fetch_errors.append(error)
|
| 336 |
+
elif source_name == 'rss':
|
| 337 |
+
rss_all_df = result_df
|
| 338 |
+
if error:
|
| 339 |
+
fetch_errors.append(error)
|
| 340 |
+
# Get main page news subset for RSS
|
| 341 |
+
if not rss_all_df.empty and 'from_web' in rss_all_df.columns:
|
| 342 |
+
rss_main_df = rss_all_df[rss_all_df['from_web'] == True].copy()
|
| 343 |
+
elif source_name == 'ai_tech':
|
| 344 |
+
ai_tech_df = result_df
|
| 345 |
+
if error:
|
| 346 |
+
fetch_errors.append(error)
|
| 347 |
+
|
| 348 |
+
except Exception as e:
|
| 349 |
+
fetch_errors.append(f"Error fetching {source_name} news: {e}")
|
| 350 |
+
completed_sources.append(f"{source_name} (error)")
|
| 351 |
+
status_placeholder.warning(f"β οΈ {source_name} failed, continuing with other sources...")
|
| 352 |
+
|
| 353 |
+
except TimeoutError:
|
| 354 |
+
# Handle timeout gracefully - continue with whatever results we have
|
| 355 |
+
fetch_errors.append("β±οΈ Some sources timed out after 90 seconds - displaying available results")
|
| 356 |
+
status_placeholder.warning(f"β οΈ {len(completed_sources)}/4 sources loaded (some timed out)")
|
| 357 |
+
|
| 358 |
+
# Mark incomplete sources
|
| 359 |
+
all_sources = set(futures_map.values())
|
| 360 |
+
incomplete_sources = all_sources - set(completed_sources)
|
| 361 |
+
for source in incomplete_sources:
|
| 362 |
+
fetch_errors.append(f"{source} timed out - skipped")
|
| 363 |
+
completed_sources.append(f"{source} (timeout)")
|
| 364 |
+
|
| 365 |
+
# Clear the status message after all sources complete
|
| 366 |
+
status_placeholder.success(f"β
Loaded {len(completed_sources)}/4 sources successfully")
|
| 367 |
|
| 368 |
# Debug output (remove in production)
|
| 369 |
if st.session_state.get('debug_mode', False):
|