Dmitry Beresnev commited on
Commit ·
84f7cdc
1
Parent(s): 92860fb
fix AI news in dashboard
Browse files- app/pages/05_Dashboard.py +41 -14
- app/utils/news_cache.py +10 -4
app/pages/05_Dashboard.py
CHANGED
|
@@ -341,6 +341,18 @@ with st.spinner("🔍 Fetching latest financial & tech news in parallel..."):
|
|
| 341 |
except Exception as e:
|
| 342 |
fetch_errors.append(f"Error fetching {source_name} news: {e}")
|
| 343 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 344 |
# Clear force refresh flag after fetching is complete
|
| 345 |
if force_refresh:
|
| 346 |
st.session_state.force_refresh = False
|
|
@@ -515,20 +527,35 @@ with col4:
|
|
| 515 |
height="700px"
|
| 516 |
)
|
| 517 |
else:
|
| 518 |
-
|
| 519 |
-
|
| 520 |
-
|
| 521 |
-
|
| 522 |
-
|
| 523 |
-
|
| 524 |
-
|
| 525 |
-
|
| 526 |
-
|
| 527 |
-
|
| 528 |
-
|
| 529 |
-
|
| 530 |
-
|
| 531 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 532 |
|
| 533 |
# Display fetch errors in expander (less intrusive)
|
| 534 |
if 'fetch_errors' in locals() and fetch_errors:
|
|
|
|
| 341 |
except Exception as e:
|
| 342 |
fetch_errors.append(f"Error fetching {source_name} news: {e}")
|
| 343 |
|
| 344 |
+
# Debug output (remove in production)
|
| 345 |
+
if st.session_state.get('debug_mode', False):
|
| 346 |
+
st.write("### Debug: News Fetch Results")
|
| 347 |
+
st.write(f"- Twitter: {len(twitter_df)} items")
|
| 348 |
+
st.write(f"- Reddit: {len(reddit_df)} items")
|
| 349 |
+
st.write(f"- RSS: {len(rss_all_df)} items")
|
| 350 |
+
st.write(f"- AI/Tech: {len(ai_tech_df)} items")
|
| 351 |
+
if fetch_errors:
|
| 352 |
+
st.write(f"- Errors: {len(fetch_errors)}")
|
| 353 |
+
for err in fetch_errors:
|
| 354 |
+
st.write(f" - {err}")
|
| 355 |
+
|
| 356 |
# Clear force refresh flag after fetching is complete
|
| 357 |
if force_refresh:
|
| 358 |
st.session_state.force_refresh = False
|
|
|
|
| 527 |
height="700px"
|
| 528 |
)
|
| 529 |
else:
|
| 530 |
+
# Debug: Check if there's an AI/Tech specific error
|
| 531 |
+
ai_tech_error = next((err for err in fetch_errors if 'ai_tech' in err.lower() or 'AI/Tech' in err), None) if 'fetch_errors' in locals() else None
|
| 532 |
+
|
| 533 |
+
if ai_tech_error:
|
| 534 |
+
# Show error message
|
| 535 |
+
st.markdown(f"""
|
| 536 |
+
<div style="background: linear-gradient(135deg, #1E222D 0%, #131722 100%); border: 1px solid #2A2E39; border-radius: 8px; padding: 30px; text-align: center;">
|
| 537 |
+
<div style="font-size: 48px; margin-bottom: 16px;">⚠️</div>
|
| 538 |
+
<div style="color: #D1D4DC; font-size: 16px; font-weight: 600; margin-bottom: 8px;">AI & Tech News Unavailable</div>
|
| 539 |
+
<div style="color: #787B86; font-size: 13px;">{ai_tech_error}</div>
|
| 540 |
+
</div>
|
| 541 |
+
""", unsafe_allow_html=True)
|
| 542 |
+
else:
|
| 543 |
+
# Show loading message
|
| 544 |
+
st.markdown("""
|
| 545 |
+
<div style="background: linear-gradient(135deg, #1E222D 0%, #131722 100%); border: 1px solid #2A2E39; border-radius: 8px; padding: 30px; text-align: center;">
|
| 546 |
+
<div style="font-size: 48px; margin-bottom: 16px; animation: pulse 2s ease-in-out infinite;">⏳</div>
|
| 547 |
+
<div style="color: #D1D4DC; font-size: 16px; font-weight: 600; margin-bottom: 8px;">Loading AI & Tech News</div>
|
| 548 |
+
<div style="color: #787B86; font-size: 13px;">Aggregating from tech blogs & research...</div>
|
| 549 |
+
<div style="color: #787B86; font-size: 12px; margin-top: 8px; opacity: 0.7;">OpenAI, Google AI, Microsoft, Meta & more</div>
|
| 550 |
+
<div style="color: #FF9500; font-size: 12px; margin-top: 12px;">If this persists, check the "Source Fetch Warnings" section below</div>
|
| 551 |
+
</div>
|
| 552 |
+
<style>
|
| 553 |
+
@keyframes pulse {
|
| 554 |
+
0%, 100% { opacity: 1; transform: scale(1); }
|
| 555 |
+
50% { opacity: 0.6; transform: scale(1.1); }
|
| 556 |
+
}
|
| 557 |
+
</style>
|
| 558 |
+
""", unsafe_allow_html=True)
|
| 559 |
|
| 560 |
# Display fetch errors in expander (less intrusive)
|
| 561 |
if 'fetch_errors' in locals() and fetch_errors:
|
app/utils/news_cache.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
| 1 |
"""
|
| 2 |
Unified News Caching System
|
| 3 |
-
Centralized cache manager for Twitter, Reddit, and
|
| 4 |
"""
|
| 5 |
|
| 6 |
import hashlib
|
|
@@ -33,6 +33,7 @@ class NewsCacheManager:
|
|
| 33 |
'twitter': {'raw_news': [], 'last_fetch': None, 'ttl': default_ttl},
|
| 34 |
'reddit': {'raw_news': [], 'last_fetch': None, 'ttl': default_ttl},
|
| 35 |
'rss': {'raw_news': [], 'last_fetch': None, 'ttl': default_ttl},
|
|
|
|
| 36 |
'dedup_index': {}, # Global deduplication index
|
| 37 |
'filtered_cache': {} # Cached filtered results
|
| 38 |
}
|
|
@@ -49,7 +50,7 @@ class NewsCacheManager:
|
|
| 49 |
Get news from cache or fetch fresh if needed
|
| 50 |
|
| 51 |
Args:
|
| 52 |
-
source: News source ('twitter', 'reddit', 'rss')
|
| 53 |
fetcher_func: Function to fetch fresh news
|
| 54 |
force_refresh: If True, bypass cache and fetch fresh
|
| 55 |
**kwargs: Arguments to pass to fetcher_func
|
|
@@ -57,7 +58,7 @@ class NewsCacheManager:
|
|
| 57 |
Returns:
|
| 58 |
List of news items
|
| 59 |
"""
|
| 60 |
-
if source not in ['twitter', 'reddit', 'rss']:
|
| 61 |
logger.error(f"Invalid source: {source}")
|
| 62 |
return []
|
| 63 |
|
|
@@ -311,7 +312,7 @@ class NewsCacheManager:
|
|
| 311 |
self._clear_source_from_dedup(source)
|
| 312 |
logger.info(f"🗑️ Cleared cache for {source}")
|
| 313 |
else:
|
| 314 |
-
for src in ['twitter', 'reddit', 'rss']:
|
| 315 |
self.cache[src] = {'raw_news': [], 'last_fetch': None, 'ttl': 180}
|
| 316 |
self.cache['dedup_index'] = {}
|
| 317 |
self.cache['filtered_cache'] = {}
|
|
@@ -340,6 +341,11 @@ class NewsCacheManager:
|
|
| 340 |
'age_seconds': self._get_cache_age('rss'),
|
| 341 |
'is_valid': self._is_cache_valid('rss')
|
| 342 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 343 |
'dedup_index_size': len(self.cache['dedup_index']),
|
| 344 |
'filtered_cache_size': len(self.cache['filtered_cache'])
|
| 345 |
}
|
|
|
|
| 1 |
"""
|
| 2 |
Unified News Caching System
|
| 3 |
+
Centralized cache manager for Twitter, Reddit, RSS, and AI/Tech news feeds
|
| 4 |
"""
|
| 5 |
|
| 6 |
import hashlib
|
|
|
|
| 33 |
'twitter': {'raw_news': [], 'last_fetch': None, 'ttl': default_ttl},
|
| 34 |
'reddit': {'raw_news': [], 'last_fetch': None, 'ttl': default_ttl},
|
| 35 |
'rss': {'raw_news': [], 'last_fetch': None, 'ttl': default_ttl},
|
| 36 |
+
'ai_tech': {'raw_news': [], 'last_fetch': None, 'ttl': default_ttl},
|
| 37 |
'dedup_index': {}, # Global deduplication index
|
| 38 |
'filtered_cache': {} # Cached filtered results
|
| 39 |
}
|
|
|
|
| 50 |
Get news from cache or fetch fresh if needed
|
| 51 |
|
| 52 |
Args:
|
| 53 |
+
source: News source ('twitter', 'reddit', 'rss', 'ai_tech')
|
| 54 |
fetcher_func: Function to fetch fresh news
|
| 55 |
force_refresh: If True, bypass cache and fetch fresh
|
| 56 |
**kwargs: Arguments to pass to fetcher_func
|
|
|
|
| 58 |
Returns:
|
| 59 |
List of news items
|
| 60 |
"""
|
| 61 |
+
if source not in ['twitter', 'reddit', 'rss', 'ai_tech']:
|
| 62 |
logger.error(f"Invalid source: {source}")
|
| 63 |
return []
|
| 64 |
|
|
|
|
| 312 |
self._clear_source_from_dedup(source)
|
| 313 |
logger.info(f"🗑️ Cleared cache for {source}")
|
| 314 |
else:
|
| 315 |
+
for src in ['twitter', 'reddit', 'rss', 'ai_tech']:
|
| 316 |
self.cache[src] = {'raw_news': [], 'last_fetch': None, 'ttl': 180}
|
| 317 |
self.cache['dedup_index'] = {}
|
| 318 |
self.cache['filtered_cache'] = {}
|
|
|
|
| 341 |
'age_seconds': self._get_cache_age('rss'),
|
| 342 |
'is_valid': self._is_cache_valid('rss')
|
| 343 |
},
|
| 344 |
+
'ai_tech': {
|
| 345 |
+
'items': len(self.cache['ai_tech']['raw_news']),
|
| 346 |
+
'age_seconds': self._get_cache_age('ai_tech'),
|
| 347 |
+
'is_valid': self._is_cache_valid('ai_tech')
|
| 348 |
+
},
|
| 349 |
'dedup_index_size': len(self.cache['dedup_index']),
|
| 350 |
'filtered_cache_size': len(self.cache['filtered_cache'])
|
| 351 |
}
|