jetpackjules Claude commited on
Commit
86aebf5
Β·
1 Parent(s): 2c3b593

Add comprehensive logging for debugging Hugging Face deployment

Browse files

πŸ” Enhanced Logging Features:
- Added detailed startup logging with timestamps and component initialization
- Comprehensive sentiment analysis logging with timing information
- Step-by-step progress tracking for each stock analysis
- Error handling with detailed tracebacks and error types
- Performance timing for Reddit/Google News searches
- Sample content logging for debugging data quality

πŸ“Š Startup Logging:
- Python version and working directory info
- Alpaca client initialization status
- Sentiment analyzer (VADER/TextBlob) initialization
- Dashboard creation and event handler setup

🧠 Sentiment Analysis Logging:
- Individual stock analysis progress with timing
- News source breakdown (Reddit posts vs Google News articles)
- Sample titles from each source for verification
- Prediction results with confidence levels
- Detailed error handling for failed analyses

⚑ Performance Tracking:
- Time spent on Reddit searches vs Google News
- Total analysis time per stock
- Overall table refresh completion status

This will help identify exactly where the deployment gets stuck during the "Preparing Space" phase.

πŸ€– Generated with [Claude Code](https://claude.ai/code)

Co-Authored-By: Claude <noreply@anthropic.com>

Files changed (1) hide show
  1. app.py +127 -18
app.py CHANGED
@@ -5,6 +5,7 @@ Beautiful Vercel-style dashboard with VM data integration
5
  """
6
 
7
  import os
 
8
  import pandas as pd
9
  import gradio as gr
10
  import plotly.graph_objects as go
@@ -27,17 +28,58 @@ API_KEY = os.getenv('ALPACA_API_KEY', 'PK2FD9B2S86LHR7ZBHG1')
27
  SECRET_KEY = os.getenv('ALPACA_SECRET_KEY', 'QPmGPDgbPArvHv6cldBXc7uWddapYcIAnBhtkuBW')
28
  VM_API_URL = os.getenv('VM_API_URL', 'http://34.56.193.18:8090') # Set this in Hugging Face
29
 
30
- # Configure logging
31
- logging.basicConfig(level=logging.INFO)
 
 
 
 
 
 
32
  logger = logging.getLogger(__name__)
33
 
 
 
 
 
 
34
  # Initialize Alpaca clients
35
- trading_client = TradingClient(api_key=API_KEY, secret_key=SECRET_KEY)
36
- data_client = StockHistoricalDataClient(API_KEY, SECRET_KEY)
 
 
 
 
 
 
 
 
 
 
 
 
 
37
 
38
  # Initialize sentiment analyzers
39
- vader = SentimentIntensityAnalyzer()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
40
  headers = {'User-Agent': 'TradingHistoryBacktester/1.0'}
 
41
 
42
  # Modern color scheme
43
  COLORS = {
@@ -353,10 +395,20 @@ def get_order_history():
353
 
354
  def refresh_investment_performance_table():
355
  """Refresh investment performance table with P&L and sentiment analysis for all trading symbols"""
 
 
356
  # Get IPO data and orders
 
357
  ipos = fetch_from_vm('ipos?limit=100', [])
 
 
 
358
  orders = get_order_history()
 
 
 
359
  positions = get_current_positions()
 
360
 
361
  # Create proper empty DataFrame with correct column names
362
  columns = ['Symbol', 'Status', 'IPO Price', 'Buy Price', 'Sell Price', 'Investment', 'P&L ($)', 'P&L (%)', 'Sentiment', 'Predicted', 'Date']
@@ -465,14 +517,21 @@ def refresh_investment_performance_table():
465
  pl_percent_str = f"{pl_arrow} <span style='color: {pl_color}; font-weight: 600;'>{abs(pl_percent):.2f}%</span>"
466
 
467
  # ADD SENTIMENT ANALYSIS FOR EACH STOCK
468
- logger.info(f"Running sentiment analysis for {symbol}...")
 
469
  try:
470
  # Get pre-investment news (quick version)
 
471
  news_items = get_pre_investment_news(symbol, investment_time, hours_before=12)
 
472
 
473
  # Analyze sentiment
 
474
  avg_sentiment, predicted_change, prediction_label, source_breakdown = analyze_pre_investment_sentiment(news_items)
475
 
 
 
 
476
  # Format sentiment display
477
  if prediction_label == "bullish":
478
  sentiment_display = f"<span style='color: #00d647; font-weight: 600;'>πŸš€ {prediction_label.title()}</span>"
@@ -491,12 +550,27 @@ def refresh_investment_performance_table():
491
 
492
  reddit_count = len(source_breakdown.get('Reddit', []))
493
  news_count = len(source_breakdown.get('Google News', []))
494
- logger.info(f"{symbol} sentiment: {prediction_label} ({predicted_change:+.1f}%) - Reddit: {reddit_count}, News: {news_count}")
 
 
 
 
 
 
 
 
495
 
496
  except Exception as e:
497
- logger.warning(f"Sentiment analysis failed for {symbol}: {e}")
 
 
 
 
498
  sentiment_display = "<span style='color: #8b949e;'>❓ Error</span>"
499
  predicted_display = "<span style='color: #8b949e;'>N/A</span>"
 
 
 
500
 
501
  invested_data.append({
502
  'Symbol': symbol,
@@ -516,8 +590,10 @@ def refresh_investment_performance_table():
516
 
517
  # Sort by date (most recent first)
518
  invested_data.sort(key=lambda x: x['_sort_date'], reverse=True)
 
519
 
520
  df = pd.DataFrame(invested_data)
 
521
  return df
522
 
523
  def refresh_investment_performance_html():
@@ -1313,19 +1389,29 @@ def get_pre_investment_news(symbol, investment_time, hours_before=12):
1313
  cutoff_time = investment_time - timedelta(minutes=30) # 30 min buffer
1314
  search_start = investment_time - timedelta(hours=hours_before)
1315
 
1316
- logger.info(f"Getting news for {symbol} between {search_start.strftime('%Y-%m-%d %H:%M')} and {cutoff_time.strftime('%Y-%m-%d %H:%M')}")
 
 
1317
 
1318
  all_news = []
1319
 
1320
  # Get Reddit posts
 
 
1321
  reddit_posts = get_reddit_pre_investment(symbol, search_start, cutoff_time)
 
 
1322
  all_news.extend(reddit_posts)
1323
 
1324
  # Get Google News
 
 
1325
  google_news = get_google_news_pre_investment(symbol, search_start, cutoff_time)
 
 
1326
  all_news.extend(google_news)
1327
 
1328
- logger.info(f"Total news sources found: {len(all_news)}")
1329
  return all_news
1330
 
1331
  def get_reddit_pre_investment(symbol, start_time, cutoff_time):
@@ -1910,11 +1996,15 @@ custom_css = """
1910
  """
1911
 
1912
  def create_dashboard():
1913
- with gr.Blocks(
1914
- title="πŸš€ Premium Trading Dashboard",
1915
- theme=gr.themes.Soft(primary_hue="blue"),
1916
- css=custom_css
1917
- ) as demo:
 
 
 
 
1918
 
1919
  # Header
1920
  gr.HTML("""
@@ -2096,6 +2186,8 @@ def create_dashboard():
2096
  </div>
2097
  """)
2098
 
 
 
2099
  # Event Handlers
2100
 
2101
  # Portfolio tab
@@ -2322,10 +2414,27 @@ def create_dashboard():
2322
  demo.load(fn=refresh_ipo_discoveries_table, outputs=[ipo_table])
2323
  demo.load(fn=refresh_investment_performance_html, outputs=[investment_performance_table])
2324
 
2325
- return demo
 
 
 
 
 
2326
 
2327
  # Create and launch
2328
- demo = create_dashboard()
 
 
 
 
 
 
2329
 
2330
  if __name__ == "__main__":
2331
- demo.launch()
 
 
 
 
 
 
 
5
  """
6
 
7
  import os
8
+ import sys
9
  import pandas as pd
10
  import gradio as gr
11
  import plotly.graph_objects as go
 
28
  SECRET_KEY = os.getenv('ALPACA_SECRET_KEY', 'QPmGPDgbPArvHv6cldBXc7uWddapYcIAnBhtkuBW')
29
  VM_API_URL = os.getenv('VM_API_URL', 'http://34.56.193.18:8090') # Set this in Hugging Face
30
 
31
+ # Configure detailed logging for debugging
32
+ logging.basicConfig(
33
+ level=logging.INFO,
34
+ format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
35
+ handlers=[
36
+ logging.StreamHandler(),
37
+ ]
38
+ )
39
  logger = logging.getLogger(__name__)
40
 
41
+ # Log startup information
42
+ logger.info("πŸš€ Starting Premium Trading Dashboard...")
43
+ logger.info(f"Python version: {sys.version}")
44
+ logger.info(f"Working directory: {os.getcwd()}")
45
+
46
  # Initialize Alpaca clients
47
+ logger.info("πŸ”Œ Initializing Alpaca trading client...")
48
+ try:
49
+ trading_client = TradingClient(api_key=API_KEY, secret_key=SECRET_KEY)
50
+ logger.info("βœ… Alpaca trading client initialized successfully")
51
+ except Exception as e:
52
+ logger.error(f"❌ Failed to initialize Alpaca trading client: {e}")
53
+ raise
54
+
55
+ logger.info("πŸ“Š Initializing Alpaca data client...")
56
+ try:
57
+ data_client = StockHistoricalDataClient(API_KEY, SECRET_KEY)
58
+ logger.info("βœ… Alpaca data client initialized successfully")
59
+ except Exception as e:
60
+ logger.error(f"❌ Failed to initialize Alpaca data client: {e}")
61
+ raise
62
 
63
  # Initialize sentiment analyzers
64
+ logger.info("🧠 Initializing sentiment analysis engines...")
65
+ try:
66
+ vader = SentimentIntensityAnalyzer()
67
+ logger.info("βœ… VADER sentiment analyzer initialized")
68
+ except Exception as e:
69
+ logger.error(f"❌ Failed to initialize VADER: {e}")
70
+ raise
71
+
72
+ try:
73
+ from textblob import TextBlob
74
+ # Test TextBlob
75
+ test_blob = TextBlob("test")
76
+ logger.info("βœ… TextBlob sentiment analyzer initialized")
77
+ except Exception as e:
78
+ logger.error(f"❌ Failed to initialize TextBlob: {e}")
79
+ raise
80
+
81
  headers = {'User-Agent': 'TradingHistoryBacktester/1.0'}
82
+ logger.info("βœ… HTTP headers configured")
83
 
84
  # Modern color scheme
85
  COLORS = {
 
395
 
396
  def refresh_investment_performance_table():
397
  """Refresh investment performance table with P&L and sentiment analysis for all trading symbols"""
398
+ logger.info("πŸ“Š Starting investment performance table refresh...")
399
+
400
  # Get IPO data and orders
401
+ logger.info("πŸ”Œ Fetching IPO data from VM...")
402
  ipos = fetch_from_vm('ipos?limit=100', [])
403
+ logger.info(f"πŸ“ˆ Retrieved {len(ipos)} IPO records from VM")
404
+
405
+ logger.info("πŸ“‹ Fetching order history from Alpaca...")
406
  orders = get_order_history()
407
+ logger.info(f"πŸ“ Retrieved {len(orders)} orders from Alpaca")
408
+
409
+ logger.info("πŸ’Ό Fetching current positions from Alpaca...")
410
  positions = get_current_positions()
411
+ logger.info(f"🏦 Retrieved {len(positions)} current positions")
412
 
413
  # Create proper empty DataFrame with correct column names
414
  columns = ['Symbol', 'Status', 'IPO Price', 'Buy Price', 'Sell Price', 'Investment', 'P&L ($)', 'P&L (%)', 'Sentiment', 'Predicted', 'Date']
 
517
  pl_percent_str = f"{pl_arrow} <span style='color: {pl_color}; font-weight: 600;'>{abs(pl_percent):.2f}%</span>"
518
 
519
  # ADD SENTIMENT ANALYSIS FOR EACH STOCK
520
+ logger.info(f"🧠 Starting sentiment analysis for {symbol}...")
521
+ start_time = time.time()
522
  try:
523
  # Get pre-investment news (quick version)
524
+ logger.info(f"πŸ“° Gathering pre-investment news for {symbol}...")
525
  news_items = get_pre_investment_news(symbol, investment_time, hours_before=12)
526
+ logger.info(f"πŸ“‘ Found {len(news_items)} total news items for {symbol}")
527
 
528
  # Analyze sentiment
529
+ logger.info(f"πŸ” Analyzing sentiment for {symbol}...")
530
  avg_sentiment, predicted_change, prediction_label, source_breakdown = analyze_pre_investment_sentiment(news_items)
531
 
532
+ analysis_time = time.time() - start_time
533
+ logger.info(f"⚑ Sentiment analysis for {symbol} completed in {analysis_time:.1f}s")
534
+
535
  # Format sentiment display
536
  if prediction_label == "bullish":
537
  sentiment_display = f"<span style='color: #00d647; font-weight: 600;'>πŸš€ {prediction_label.title()}</span>"
 
550
 
551
  reddit_count = len(source_breakdown.get('Reddit', []))
552
  news_count = len(source_breakdown.get('Google News', []))
553
+ logger.info(f"🎯 {symbol} RESULTS: {prediction_label.upper()} ({predicted_change:+.1f}%) | Reddit: {reddit_count} posts | News: {news_count} articles")
554
+
555
+ # Log sample titles for debugging
556
+ if reddit_count > 0:
557
+ sample_reddit = source_breakdown['Reddit'][0]['title'][:50]
558
+ logger.info(f"πŸ“± Sample Reddit: {sample_reddit}...")
559
+ if news_count > 0:
560
+ sample_news = source_breakdown['Google News'][0]['title'][:50]
561
+ logger.info(f"πŸ“° Sample News: {sample_news}...")
562
 
563
  except Exception as e:
564
+ analysis_time = time.time() - start_time
565
+ logger.error(f"❌ Sentiment analysis failed for {symbol} after {analysis_time:.1f}s: {str(e)}")
566
+ logger.error(f"πŸ” Error type: {type(e).__name__}")
567
+ import traceback
568
+ logger.error(f"πŸ“‹ Traceback: {traceback.format_exc()[:200]}...")
569
  sentiment_display = "<span style='color: #8b949e;'>❓ Error</span>"
570
  predicted_display = "<span style='color: #8b949e;'>N/A</span>"
571
+
572
+ # Continue with next stock instead of failing completely
573
+ pass
574
 
575
  invested_data.append({
576
  'Symbol': symbol,
 
590
 
591
  # Sort by date (most recent first)
592
  invested_data.sort(key=lambda x: x['_sort_date'], reverse=True)
593
+ logger.info(f"πŸ“‹ Processed {len(invested_data)} investments with sentiment analysis")
594
 
595
  df = pd.DataFrame(invested_data)
596
+ logger.info(f"βœ… Investment performance table refresh completed - {len(df)} rows")
597
  return df
598
 
599
  def refresh_investment_performance_html():
 
1389
  cutoff_time = investment_time - timedelta(minutes=30) # 30 min buffer
1390
  search_start = investment_time - timedelta(hours=hours_before)
1391
 
1392
+ logger.info(f"πŸ” NEWS SEARCH for {symbol}:")
1393
+ logger.info(f" πŸ“… Time window: {search_start.strftime('%Y-%m-%d %H:%M')} β†’ {cutoff_time.strftime('%Y-%m-%d %H:%M')}")
1394
+ logger.info(f" ⏰ Search duration: {hours_before} hours before investment")
1395
 
1396
  all_news = []
1397
 
1398
  # Get Reddit posts
1399
+ logger.info(f"🧡 Starting Reddit search for {symbol}...")
1400
+ reddit_start = time.time()
1401
  reddit_posts = get_reddit_pre_investment(symbol, search_start, cutoff_time)
1402
+ reddit_time = time.time() - reddit_start
1403
+ logger.info(f"βœ… Reddit search completed in {reddit_time:.1f}s - found {len(reddit_posts)} posts")
1404
  all_news.extend(reddit_posts)
1405
 
1406
  # Get Google News
1407
+ logger.info(f"πŸ“° Starting Google News search for {symbol}...")
1408
+ news_start = time.time()
1409
  google_news = get_google_news_pre_investment(symbol, search_start, cutoff_time)
1410
+ news_time = time.time() - news_start
1411
+ logger.info(f"βœ… Google News search completed in {news_time:.1f}s - found {len(google_news)} articles")
1412
  all_news.extend(google_news)
1413
 
1414
+ logger.info(f"πŸ“Š TOTAL NEWS GATHERED for {symbol}: {len(all_news)} items ({len(reddit_posts)} Reddit + {len(google_news)} News)")
1415
  return all_news
1416
 
1417
  def get_reddit_pre_investment(symbol, start_time, cutoff_time):
 
1996
  """
1997
 
1998
  def create_dashboard():
1999
+ logger.info("🎨 Creating Gradio dashboard interface...")
2000
+
2001
+ try:
2002
+ with gr.Blocks(
2003
+ title="πŸš€ Premium Trading Dashboard",
2004
+ theme=gr.themes.Soft(primary_hue="blue"),
2005
+ css=custom_css
2006
+ ) as demo:
2007
+ logger.info("πŸ–ΌοΈ Dashboard blocks created successfully")
2008
 
2009
  # Header
2010
  gr.HTML("""
 
2186
  </div>
2187
  """)
2188
 
2189
+ logger.info("πŸ”— Setting up event handlers...")
2190
+
2191
  # Event Handlers
2192
 
2193
  # Portfolio tab
 
2414
  demo.load(fn=refresh_ipo_discoveries_table, outputs=[ipo_table])
2415
  demo.load(fn=refresh_investment_performance_html, outputs=[investment_performance_table])
2416
 
2417
+ logger.info("βœ… All event handlers configured successfully")
2418
+ return demo
2419
+
2420
+ except Exception as e:
2421
+ logger.error(f"❌ Failed to create dashboard: {e}")
2422
+ raise
2423
 
2424
  # Create and launch
2425
+ logger.info("πŸ—οΈ Building dashboard...")
2426
+ try:
2427
+ demo = create_dashboard()
2428
+ logger.info("βœ… Dashboard created successfully!")
2429
+ except Exception as e:
2430
+ logger.error(f"❌ Dashboard creation failed: {e}")
2431
+ raise
2432
 
2433
  if __name__ == "__main__":
2434
+ logger.info("πŸš€ Launching dashboard server...")
2435
+ try:
2436
+ demo.launch()
2437
+ logger.info("βœ… Dashboard launched successfully!")
2438
+ except Exception as e:
2439
+ logger.error(f"❌ Dashboard launch failed: {e}")
2440
+ raise