Dmitry Beresnev commited on
Commit
d34f6ef
·
1 Parent(s): 9e5d0ad

fix logger

Browse files
app/services/news_monitor.py CHANGED
@@ -9,13 +9,19 @@ from datetime import datetime, timedelta
9
  from typing import List, Dict, Optional
10
  import streamlit as st
11
  import time
 
 
 
 
 
 
12
 
13
  try:
14
  import snscrape.modules.twitter as sntwitter
15
  SNSCRAPE_AVAILABLE = True
16
  except ImportError:
17
  SNSCRAPE_AVAILABLE = False
18
- print("Warning: snscrape not available. Install with: pip install snscrape")
19
 
20
 
21
  class FinanceNewsMonitor:
@@ -207,7 +213,7 @@ class FinanceNewsMonitor:
207
  max_tweets: Total tweets to fetch (distributed across sources)
208
  """
209
  if not SNSCRAPE_AVAILABLE:
210
- print("⚠️ snscrape not available - using mock data")
211
  return _self._get_mock_news()
212
 
213
  all_tweets = []
@@ -256,14 +262,14 @@ class FinanceNewsMonitor:
256
  failed_sources += 1
257
  error_msg = str(e).lower()
258
  if 'blocked' in error_msg or '404' in error_msg:
259
- print(f"⚠️ Twitter/X API blocked access for {source_name}")
260
  else:
261
- print(f"Error scraping {source_name}: {e}")
262
  continue
263
 
264
  # If Twitter/X blocked all sources, fall back to mock data
265
  if failed_sources >= len(_self.SOURCES) or len(all_tweets) == 0:
266
- print("⚠️ Twitter/X API unavailable - falling back to mock data for demonstration")
267
  return _self._get_mock_news()
268
 
269
  # Sort by impact and timestamp
 
9
  from typing import List, Dict, Optional
10
  import streamlit as st
11
  import time
12
+ import logging
13
+ import re
14
+
15
+ # Configure logging
16
+ logging.basicConfig(level=logging.INFO)
17
+ logger = logging.getLogger(__name__)
18
 
19
  try:
20
  import snscrape.modules.twitter as sntwitter
21
  SNSCRAPE_AVAILABLE = True
22
  except ImportError:
23
  SNSCRAPE_AVAILABLE = False
24
+ logger.warning("snscrape not available. Install with: pip install snscrape")
25
 
26
 
27
  class FinanceNewsMonitor:
 
213
  max_tweets: Total tweets to fetch (distributed across sources)
214
  """
215
  if not SNSCRAPE_AVAILABLE:
216
+ logger.info("snscrape not available - using mock data")
217
  return _self._get_mock_news()
218
 
219
  all_tweets = []
 
262
  failed_sources += 1
263
  error_msg = str(e).lower()
264
  if 'blocked' in error_msg or '404' in error_msg:
265
+ logger.warning(f"Twitter/X API blocked access for {source_name}")
266
  else:
267
+ logger.error(f"Error scraping {source_name}: {e}")
268
  continue
269
 
270
  # If Twitter/X blocked all sources, fall back to mock data
271
  if failed_sources >= len(_self.SOURCES) or len(all_tweets) == 0:
272
+ logger.warning("Twitter/X API unavailable - falling back to mock data for demonstration")
273
  return _self._get_mock_news()
274
 
275
  # Sort by impact and timestamp
app/services/news_monitor_twikit.py CHANGED
@@ -11,8 +11,13 @@ import streamlit as st
11
  import os
12
  import asyncio
13
  import re
 
14
  from dotenv import load_dotenv
15
 
 
 
 
 
16
  # Load environment variables
17
  load_dotenv()
18
 
@@ -21,7 +26,7 @@ try:
21
  TWIKIT_AVAILABLE = True
22
  except ImportError:
23
  TWIKIT_AVAILABLE = False
24
- print("Warning: twikit not available. Install with: pip install twikit")
25
 
26
 
27
  class FinanceNewsMonitor:
@@ -208,8 +213,8 @@ class FinanceNewsMonitor:
208
  password = os.getenv('TWITTER_PASSWORD')
209
 
210
  if not all([username, email, password]):
211
- print("⚠️ Twitter credentials not found in environment variables")
212
- print(" Set TWITTER_USERNAME, TWITTER_EMAIL, TWITTER_PASSWORD in .env")
213
  return False
214
 
215
  await self.client.login(
@@ -219,11 +224,11 @@ class FinanceNewsMonitor:
219
  )
220
 
221
  self.authenticated = True
222
- print("Successfully authenticated with Twitter/X")
223
  return True
224
 
225
  except Exception as e:
226
- print(f"⚠️ Twitter authentication failed: {e}")
227
  return False
228
 
229
  async def _scrape_twitter_async(self, max_tweets: int = 100) -> List[Dict]:
@@ -284,16 +289,16 @@ class FinanceNewsMonitor:
284
  failed_sources += 1
285
  error_msg = str(e).lower()
286
  if 'rate limit' in error_msg:
287
- print(f"⚠️ Rate limited for {source_name}")
288
  elif 'unauthorized' in error_msg or 'forbidden' in error_msg:
289
- print(f"⚠️ Access denied for {source_name}")
290
  else:
291
- print(f"Error scraping {source_name}: {e}")
292
  continue
293
 
294
  # If all sources failed, fall back to mock data
295
  if failed_sources >= len(self.SOURCES) or len(all_tweets) == 0:
296
- print("⚠️ Twitter/X scraping failed - falling back to mock data")
297
  return self._get_mock_news()
298
 
299
  # Sort by impact and timestamp
@@ -311,7 +316,7 @@ class FinanceNewsMonitor:
311
  max_tweets: Total tweets to fetch (distributed across sources)
312
  """
313
  if not TWIKIT_AVAILABLE:
314
- print("⚠️ Twikit not available - using mock data")
315
  return _self._get_mock_news()
316
 
317
  try:
@@ -322,7 +327,7 @@ class FinanceNewsMonitor:
322
  loop.close()
323
  return result
324
  except Exception as e:
325
- print(f"⚠️ Error in async scraping: {e}")
326
  return _self._get_mock_news()
327
 
328
  def _categorize_tweet(self, text: str, source_specialization: List[str]) -> str:
 
11
  import os
12
  import asyncio
13
  import re
14
+ import logging
15
  from dotenv import load_dotenv
16
 
17
+ # Configure logging
18
+ logging.basicConfig(level=logging.INFO)
19
+ logger = logging.getLogger(__name__)
20
+
21
  # Load environment variables
22
  load_dotenv()
23
 
 
26
  TWIKIT_AVAILABLE = True
27
  except ImportError:
28
  TWIKIT_AVAILABLE = False
29
+ logger.warning("twikit not available. Install with: pip install twikit")
30
 
31
 
32
  class FinanceNewsMonitor:
 
213
  password = os.getenv('TWITTER_PASSWORD')
214
 
215
  if not all([username, email, password]):
216
+ logger.warning("Twitter credentials not found in environment variables")
217
+ logger.info("Set TWITTER_USERNAME, TWITTER_EMAIL, TWITTER_PASSWORD in .env")
218
  return False
219
 
220
  await self.client.login(
 
224
  )
225
 
226
  self.authenticated = True
227
+ logger.info("Successfully authenticated with Twitter/X")
228
  return True
229
 
230
  except Exception as e:
231
+ logger.error(f"Twitter authentication failed: {e}")
232
  return False
233
 
234
  async def _scrape_twitter_async(self, max_tweets: int = 100) -> List[Dict]:
 
289
  failed_sources += 1
290
  error_msg = str(e).lower()
291
  if 'rate limit' in error_msg:
292
+ logger.warning(f"Rate limited for {source_name}")
293
  elif 'unauthorized' in error_msg or 'forbidden' in error_msg:
294
+ logger.warning(f"Access denied for {source_name}")
295
  else:
296
+ logger.error(f"Error scraping {source_name}: {e}")
297
  continue
298
 
299
  # If all sources failed, fall back to mock data
300
  if failed_sources >= len(self.SOURCES) or len(all_tweets) == 0:
301
+ logger.warning("Twitter/X scraping failed - falling back to mock data")
302
  return self._get_mock_news()
303
 
304
  # Sort by impact and timestamp
 
316
  max_tweets: Total tweets to fetch (distributed across sources)
317
  """
318
  if not TWIKIT_AVAILABLE:
319
+ logger.info("Twikit not available - using mock data")
320
  return _self._get_mock_news()
321
 
322
  try:
 
327
  loop.close()
328
  return result
329
  except Exception as e:
330
+ logger.error(f"Error in async scraping: {e}")
331
  return _self._get_mock_news()
332
 
333
  def _categorize_tweet(self, text: str, source_specialization: List[str]) -> str: