Riy777 commited on
Commit
aad7415
·
1 Parent(s): 35b000d

Create sentiment_news.py

Browse files
Files changed (1) hide show
  1. sentiment_news.py +200 -0
sentiment_news.py ADDED
@@ -0,0 +1,200 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os, asyncio
2
+ import httpx
3
+ from gnews import GNews
4
+ import feedparser
5
+ from datetime import datetime
6
+
7
+ CRYPTO_RSS_FEEDS = {
8
+ "Cointelegraph": "https://cointelegraph.com/rss",
9
+ "CoinDesk": "https://www.coindesk.com/arc/outboundfeeds/rss/",
10
+ "CryptoSlate": "https://cryptoslate.com/feed/",
11
+ "NewsBTC": "https://www.newsbtc.com/feed/",
12
+ "Bitcoin.com": "https://news.bitcoin.com/feed/"
13
+ }
14
+
15
+ class NewsFetcher:
16
+ def __init__(self):
17
+ self.http_client = httpx.AsyncClient(
18
+ timeout=10.0, follow_redirects=True,
19
+ headers={
20
+ 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36',
21
+ 'Accept': 'application/json, text/plain, */*',
22
+ 'Accept-Language': 'en-US,en;q=0.9',
23
+ 'Cache-Control': 'no-cache'
24
+ }
25
+ )
26
+ self.gnews = GNews(language='en', country='US', period='3h', max_results=8)
27
+
28
+ async def _fetch_from_gnews(self, symbol: str) -> list:
29
+ try:
30
+ base_symbol = symbol.split("/")[0]
31
+ query = f'"{base_symbol}" cryptocurrency -bitcoin -ethereum -BTC -ETH'
32
+ news_items = await asyncio.to_thread(self.gnews.get_news, query)
33
+ return news_items
34
+ except Exception as e:
35
+ print(f"Failed to fetch specific news from GNews for {symbol}: {e}")
36
+ return []
37
+
38
+ async def _fetch_from_rss_feed(self, feed_url: str, source_name: str, symbol: str) -> list:
39
+ try:
40
+ base_symbol = symbol.split('/')[0]
41
+ max_redirects = 2
42
+ current_url = feed_url
43
+ for attempt in range(max_redirects):
44
+ try:
45
+ response = await self.http_client.get(current_url)
46
+ response.raise_for_status()
47
+ break
48
+ except httpx.HTTPStatusError as e:
49
+ if e.response.status_code in [301, 302, 307, 308] and 'Location' in e.response.headers:
50
+ current_url = e.response.headers['Location']
51
+ continue
52
+ else:
53
+ raise
54
+
55
+ feed = feedparser.parse(response.text)
56
+ news_items = []
57
+ search_term = base_symbol.lower()
58
+
59
+ for entry in feed.entries[:15]:
60
+ title = entry.title.lower() if hasattr(entry, 'title') else ''
61
+ summary = entry.summary.lower() if hasattr(entry, 'summary') else entry.description.lower() if hasattr(entry, 'description') else ''
62
+ if search_term in title or search_term in summary:
63
+ news_items.append({
64
+ 'title': entry.title,
65
+ 'description': summary,
66
+ 'source': source_name,
67
+ 'published': entry.get('published', '')
68
+ })
69
+ return news_items
70
+ except Exception as e:
71
+ print(f"Failed to fetch specific news from {source_name} RSS for {symbol}: {e}")
72
+ return []
73
+
74
+ async def get_news_for_symbol(self, symbol: str) -> str:
75
+ base_symbol = symbol.split("/")[0]
76
+ tasks = [self._fetch_from_gnews(symbol)]
77
+ for name, url in CRYPTO_RSS_FEEDS.items():
78
+ tasks.append(self._fetch_from_rss_feed(url, name, symbol))
79
+
80
+ results = await asyncio.gather(*tasks, return_exceptions=True)
81
+ all_news_text = []
82
+
83
+ for result in results:
84
+ if isinstance(result, Exception):
85
+ continue
86
+ for item in result:
87
+ if self._is_directly_relevant_to_symbol(item, base_symbol):
88
+ title = item.get('title', 'No Title')
89
+ description = item.get('description', 'No Description')
90
+ source = item.get('source', 'Unknown Source')
91
+ published = item.get('published', '')
92
+
93
+ news_entry = f"[{source}] {title}. {description}"
94
+ if published:
95
+ news_entry += f" (Published: {published})"
96
+
97
+ all_news_text.append(news_entry)
98
+
99
+ if not all_news_text:
100
+ return f"No specific news found for {base_symbol} in the last 3 hours."
101
+
102
+ important_news = all_news_text[:5]
103
+ return " | ".join(important_news)
104
+
105
+ def _is_directly_relevant_to_symbol(self, news_item, base_symbol):
106
+ title = news_item.get('title', '').lower()
107
+ description = news_item.get('description', '').lower()
108
+ symbol_lower = base_symbol.lower()
109
+
110
+ if symbol_lower not in title and symbol_lower not in description:
111
+ return False
112
+
113
+ crypto_keywords = [
114
+ 'crypto', 'cryptocurrency', 'token', 'blockchain',
115
+ 'price', 'market', 'trading', 'exchange', 'defi',
116
+ 'coin', 'digital currency', 'altcoin'
117
+ ]
118
+
119
+ return any(keyword in title or keyword in description for keyword in crypto_keywords)
120
+
121
+ class SentimentAnalyzer:
122
+ def __init__(self, data_manager):
123
+ self.data_manager = data_manager
124
+
125
+ async def get_market_sentiment(self):
126
+ try:
127
+ market_context = await self.data_manager.get_market_context_async()
128
+ if not market_context:
129
+ return await self.get_fallback_market_context()
130
+ return market_context
131
+ except Exception as e:
132
+ print(f"Failed to get market sentiment: {e}")
133
+ return await self.get_fallback_market_context()
134
+
135
+ async def get_fallback_market_context(self):
136
+ return {
137
+ 'timestamp': datetime.now().isoformat(),
138
+ 'general_whale_activity': {
139
+ 'sentiment': 'NEUTRAL',
140
+ 'description': 'Fallback mode - system initializing',
141
+ 'critical_alert': False,
142
+ 'transaction_count': 0,
143
+ 'total_volume_usd': 0,
144
+ 'netflow_analysis': {
145
+ 'net_flow': 0,
146
+ 'flow_direction': 'BALANCED',
147
+ 'market_impact': 'LOW'
148
+ }
149
+ },
150
+ 'btc_sentiment': 'NEUTRAL',
151
+ 'fear_and_greed_index': 50
152
+ }
153
+
154
+ def format_whale_analysis(self, general_whale_activity, symbol_whale_data, symbol):
155
+ analysis_parts = []
156
+
157
+ if general_whale_activity.get('data_available', False):
158
+ netflow_analysis = general_whale_activity.get('netflow_analysis', {})
159
+ critical_flag = " CRITICAL ALERT" if general_whale_activity.get('critical_alert') else ''
160
+
161
+ if netflow_analysis:
162
+ inflow = netflow_analysis.get('inflow_to_exchanges', 0)
163
+ outflow = netflow_analysis.get('outflow_from_exchanges', 0)
164
+ net_flow = netflow_analysis.get('net_flow', 0)
165
+ flow_direction = netflow_analysis.get('flow_direction', 'BALANCED')
166
+ market_impact = netflow_analysis.get('market_impact', 'UNKNOWN')
167
+
168
+ analysis_parts.append(f"General Market Netflow Analysis:")
169
+ analysis_parts.append(f" • Inflow to Exchanges: ${inflow:,.0f}")
170
+ analysis_parts.append(f" • Outflow from Exchanges: ${outflow:,.0f}")
171
+ analysis_parts.append(f" • Net Flow: ${net_flow:,.0f} ({flow_direction})")
172
+ analysis_parts.append(f" • Market Impact: {market_impact}{critical_flag}")
173
+
174
+ trading_signals = general_whale_activity.get('trading_signals', [])
175
+ if trading_signals:
176
+ analysis_parts.append(f" • Trading Signals: {len(trading_signals)} active signals")
177
+ for signal in trading_signals[:3]:
178
+ analysis_parts.append(f" ◦ {signal.get('action')}: {signal.get('reason')} (Confidence: {signal.get('confidence', 0):.2f})")
179
+ else:
180
+ analysis_parts.append(f"General Market: {general_whale_activity.get('description', 'Activity detected')}{critical_flag}")
181
+ else:
182
+ analysis_parts.append("General Market: No significant general whale data available")
183
+
184
+ if symbol_whale_data.get('data_available', False):
185
+ activity_level = symbol_whale_data.get('activity_level', 'UNKNOWN')
186
+ large_transfers = symbol_whale_data.get('large_transfers_count', 0)
187
+ total_volume = symbol_whale_data.get('total_volume', 0)
188
+
189
+ analysis_parts.append(f"{symbol} Specific Whale Activity:")
190
+ analysis_parts.append(f" • Activity Level: {activity_level}")
191
+ analysis_parts.append(f" • Large Transfers: {large_transfers}")
192
+ analysis_parts.append(f" • Total Volume: ${total_volume:,.0f}")
193
+
194
+ recent_transfers = symbol_whale_data.get('recent_large_transfers', [])
195
+ if recent_transfers:
196
+ analysis_parts.append(f" • Recent Large Transfers: {len(recent_transfers)}")
197
+ else:
198
+ analysis_parts.append(f"{symbol} Specific: No contract-based whale data available")
199
+
200
+ return "\n".join(analysis_parts)