khjhs60199 commited on
Commit
deb1a9b
·
verified ·
1 Parent(s): 6616e71

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +270 -125
app.py CHANGED
@@ -8,11 +8,8 @@ import time
8
  from datetime import datetime, timedelta
9
  from typing import List, Dict, Optional
10
  import os
11
- from fastapi import FastAPI, HTTPException
12
- from fastapi.middleware.cors import CORSMiddleware
13
- from fastapi.responses import JSONResponse
14
  import json
15
- from pydantic import BaseModel
16
 
17
  from crawler import CnYesNewsCrawler
18
  from sentiment_analyzer import SentimentAnalyzer
@@ -24,6 +21,9 @@ from utils import setup_logging, format_news_for_display
24
  setup_logging()
25
  logger = logging.getLogger(__name__)
26
 
 
 
 
27
  class NewsApp:
28
  def __init__(self):
29
  self.db = NewsDatabase()
@@ -37,7 +37,7 @@ class NewsApp:
37
  self.current_progress = "正在初始化系統..."
38
  self.is_crawling = False
39
  self.is_initialized = False
40
- self.auto_crawl_completed = False
41
 
42
  # 上次新聞更新時間,用於防止無意義的刷新
43
  self.last_news_update = 0
@@ -74,7 +74,7 @@ class NewsApp:
74
  self.update_progress("系統初始化完成,開始自動爬取新聞...")
75
  logger.info("所有組件初始化完成")
76
 
77
- # 自動執行第一次爬取
78
  self._auto_initial_crawl()
79
 
80
  except Exception as e:
@@ -90,6 +90,7 @@ class NewsApp:
90
  """自動執行初始爬取"""
91
  def auto_crawl_task():
92
  try:
 
93
  time.sleep(3)
94
 
95
  self.update_progress("🚀 自動開始首次爬取...")
@@ -98,7 +99,7 @@ class NewsApp:
98
  # 檢查資料庫是否已有最近的新聞
99
  recent_news = self.db.get_recent_news(category="all", days=1)
100
 
101
- if len(recent_news) < 10:
102
  self.update_progress("📊 檢測到新聞數量較少,開始自動爬取...")
103
  results = self.crawler.crawl_all_categories(unlimited=True)
104
 
@@ -120,6 +121,7 @@ class NewsApp:
120
  finally:
121
  self.is_crawling = False
122
 
 
123
  auto_crawl_thread = threading.Thread(target=auto_crawl_task, daemon=True)
124
  auto_crawl_thread.start()
125
 
@@ -133,20 +135,24 @@ class NewsApp:
133
  def get_progress(self) -> tuple:
134
  """獲取當前進度和是否需要更新"""
135
  current_time = time.time()
136
- needs_update = (current_time - self.last_progress_update) < 5
 
137
  return self.current_progress, needs_update
138
 
139
  def get_latest_news(self, category: str = "all", days: int = 7,
140
  keyword: str = "", sentiment_filter: str = "all",
141
  force_refresh: bool = False) -> str:
142
- """獲取最新新聞並格式化顯示"""
143
  try:
 
144
  current_time = time.time()
145
  if not force_refresh and (current_time - self.last_news_update) < 5:
 
146
  pass
147
 
148
  self.last_news_update = current_time
149
 
 
150
  logger.info(f"獲取新聞 - 分類: {category}, 天數: {days}, 關鍵字: '{keyword}', 情緒: {sentiment_filter}")
151
 
152
  news_data = self.db.get_recent_news(
@@ -157,6 +163,7 @@ class NewsApp:
157
  )
158
 
159
  if not news_data:
 
160
  if not self.auto_crawl_completed:
161
  return "⏳ 系統正在自動爬取新聞,請稍候..."
162
 
@@ -173,6 +180,7 @@ class NewsApp:
173
  filter_text = "、".join(filter_desc) if filter_desc else "所有條件"
174
  return f"📰 暫無符合條件的新聞資料 ({filter_text}),請調整篩選條件或執行爬蟲任務"
175
 
 
176
  filter_parts = []
177
  if category != "all":
178
  filter_parts.append(self._get_category_name(category))
@@ -217,7 +225,7 @@ class NewsApp:
217
  return sentiment_names.get(sentiment, sentiment)
218
 
219
  def manual_crawl(self, unlimited: bool = True) -> str:
220
- """手動觸發爬蟲"""
221
  if not self.is_initialized:
222
  return "⚠️ 系統還在初始化���,請稍後再試"
223
 
@@ -230,6 +238,7 @@ class NewsApp:
230
 
231
  self.update_progress(f"🚀 手動爬蟲開始({mode_text}模式)")
232
 
 
233
  results = self.crawler.crawl_all_categories(unlimited=unlimited)
234
 
235
  total_articles = sum(len(articles) for articles in results.values())
@@ -250,6 +259,7 @@ class NewsApp:
250
  try:
251
  stats = self.db.get_statistics()
252
 
 
253
  auto_status = "✅ 已完成" if self.auto_crawl_completed else "⏳ 進行中" if self.is_crawling else "⚠️ 未執行"
254
 
255
  return f"""
@@ -303,8 +313,7 @@ class NewsApp:
303
  'success': True,
304
  'count': len(api_data),
305
  'data': api_data,
306
- 'auto_crawl_completed': self.auto_crawl_completed,
307
- 'timestamp': datetime.now().isoformat()
308
  }
309
 
310
  except Exception as e:
@@ -312,16 +321,89 @@ class NewsApp:
312
  return {
313
  'success': False,
314
  'error': str(e),
315
- 'data': [],
316
- 'timestamp': datetime.now().isoformat()
317
  }
318
 
319
  # 初始化應用
320
  app = NewsApp()
321
 
322
- # 定義請求模型
323
- class CrawlRequest(BaseModel):
324
- unlimited: bool = True
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
325
 
326
  # 創建 Gradio 介面
327
  def create_interface():
@@ -350,128 +432,191 @@ def create_interface():
350
  🎯 **智能分析**: 使用 RoBERTa 模型進行情緒分析
351
  🔍 **多條件篩選**: 支援時間段、關鍵字、情緒篩選
352
  📊 **即時統計**: 提供詳細的新聞統計資訊
353
- 📡 **API接口**: https://khjhs60199-pycrawing.hf.space/api
354
  """)
355
 
356
- # ... (其餘Gradio介面代碼保持不變) ...
357
-
358
- return interface
359
-
360
- # 設定API路由
361
- def setup_api_routes(app_instance):
362
- """設定API路由"""
363
-
364
- @app_instance.get("/api")
365
- async def api_info():
366
- """API資訊頁面"""
367
- return {
368
- 'name': '股市新聞情緒分析API',
369
- 'version': '1.0',
370
- 'status': 'running',
371
- 'endpoints': {
372
- 'news': '/api/news',
373
- 'stats': '/api/stats',
374
- 'crawl': '/api/crawl',
375
- 'progress': '/api/progress'
376
- },
377
- 'timestamp': datetime.now().isoformat()
378
- }
379
-
380
- @app_instance.get("/api/news")
381
- async def api_get_news(
382
- category: str = "all",
383
- days: int = 7,
384
- keyword: str = "",
385
- sentiment: str = "all"
386
- ):
387
- """獲取新聞列表API"""
388
- try:
389
- result = app.get_news_api_data(category, days, keyword, sentiment)
390
- return result
391
- except Exception as e:
392
- raise HTTPException(status_code=500, detail=str(e))
393
-
394
- @app_instance.get("/api/stats")
395
- async def api_get_stats():
396
- """獲取統計信息API"""
397
- try:
398
- stats = app.db.get_statistics()
399
- return {
400
- 'success': True,
401
- 'data': stats,
402
- 'auto_crawl_completed': app.auto_crawl_completed,
403
- 'is_initialized': app.is_initialized,
404
- 'is_crawling': app.is_crawling,
405
- 'timestamp': datetime.now().isoformat()
406
- }
407
- except Exception as e:
408
- raise HTTPException(status_code=500, detail=str(e))
409
-
410
- @app_instance.post("/api/crawl")
411
- async def api_manual_crawl(request: CrawlRequest):
412
- """手動觸發爬蟲API"""
413
- try:
414
- if not app.is_initialized:
415
- raise HTTPException(status_code=400, detail="系統還在初始化中")
416
 
417
- if app.is_crawling:
418
- raise HTTPException(status_code=400, detail="爬蟲正在運行中")
 
 
 
 
 
 
 
 
 
 
 
 
419
 
420
- # 在背景執行爬蟲
421
- def run_crawl():
422
- app.manual_crawl(unlimited=request.unlimited)
 
 
423
 
424
- threading.Thread(target=run_crawl, daemon=True).start()
 
 
 
 
 
 
 
425
 
426
- mode_text = "無限制" if request.unlimited else "限制"
427
- return {
428
- 'success': True,
429
- 'message': f'爬蟲任務已啟動({mode_text}模式)',
430
- 'timestamp': datetime.now().isoformat()
431
- }
432
- except HTTPException:
433
- raise
434
- except Exception as e:
435
- raise HTTPException(status_code=500, detail=str(e))
436
-
437
- @app_instance.get("/api/progress")
438
- async def api_get_progress():
439
- """獲取爬蟲進度API"""
440
- try:
441
- progress, needs_update = app.get_progress()
442
- return {
443
- 'progress': progress,
444
- 'is_crawling': app.is_crawling,
445
- 'is_initialized': app.is_initialized,
446
- 'needs_update': needs_update,
447
- 'auto_crawl_completed': app.auto_crawl_completed,
448
- 'timestamp': datetime.now().isoformat()
449
- }
450
- except Exception as e:
451
- raise HTTPException(status_code=500, detail=str(e))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
452
 
453
  # 啟動應用
454
  if __name__ == "__main__":
455
- print("🚀 啟動股市新聞情緒分析器...")
456
- print("📊 API接口: https://khjhs60199-pycrawing.hf.space/api")
457
- print("⚡ 自動功能: 系統啟動後自動檢測並爬取新聞")
458
 
459
- # 創建Gradio介面
460
- interface = create_interface()
 
461
 
462
- # 設定CORS
463
- interface.app.add_middleware(
464
- CORSMiddleware,
465
- allow_origins=["*"],
466
- allow_credentials=True,
467
- allow_methods=["*"],
468
- allow_headers=["*"],
469
- )
470
 
471
- # 設定API路由
472
- setup_api_routes(interface.app)
 
 
 
473
 
474
  # 啟動Gradio介面
 
475
  interface.launch(
476
  server_name="0.0.0.0",
477
  server_port=7860,
 
8
  from datetime import datetime, timedelta
9
  from typing import List, Dict, Optional
10
  import os
11
+ from flask import Flask, jsonify, request
 
 
12
  import json
 
13
 
14
  from crawler import CnYesNewsCrawler
15
  from sentiment_analyzer import SentimentAnalyzer
 
21
  setup_logging()
22
  logger = logging.getLogger(__name__)
23
 
24
+ # Flask API 應用
25
+ flask_app = Flask(__name__)
26
+
27
  class NewsApp:
28
  def __init__(self):
29
  self.db = NewsDatabase()
 
37
  self.current_progress = "正在初始化系統..."
38
  self.is_crawling = False
39
  self.is_initialized = False
40
+ self.auto_crawl_completed = False # 新增:追蹤自動爬取是否完成
41
 
42
  # 上次新聞更新時間,用於防止無意義的刷新
43
  self.last_news_update = 0
 
74
  self.update_progress("系統初始化完成,開始自動爬取新聞...")
75
  logger.info("所有組件初始化完成")
76
 
77
+ # **新增:自動執行第一次爬取**
78
  self._auto_initial_crawl()
79
 
80
  except Exception as e:
 
90
  """自動執行初始爬取"""
91
  def auto_crawl_task():
92
  try:
93
+ # 等待一小段時間確保系統完全就緒
94
  time.sleep(3)
95
 
96
  self.update_progress("🚀 自動開始首次爬取...")
 
99
  # 檢查資料庫是否已有最近的新聞
100
  recent_news = self.db.get_recent_news(category="all", days=1)
101
 
102
+ if len(recent_news) < 10: # 如果最近1天的新聞少於10篇,就執行爬取
103
  self.update_progress("📊 檢測到新聞數量較少,開始自動爬取...")
104
  results = self.crawler.crawl_all_categories(unlimited=True)
105
 
 
121
  finally:
122
  self.is_crawling = False
123
 
124
+ # 在獨立線程中執行自動爬取
125
  auto_crawl_thread = threading.Thread(target=auto_crawl_task, daemon=True)
126
  auto_crawl_thread.start()
127
 
 
135
  def get_progress(self) -> tuple:
136
  """獲取當前進度和是否需要更新"""
137
  current_time = time.time()
138
+ # 只有在進度真的有更新時才返回新內容
139
+ needs_update = (current_time - self.last_progress_update) < 5 # 5秒內的更新才顯示
140
  return self.current_progress, needs_update
141
 
142
  def get_latest_news(self, category: str = "all", days: int = 7,
143
  keyword: str = "", sentiment_filter: str = "all",
144
  force_refresh: bool = False) -> str:
145
+ """獲取最新新聞並格式化顯示 - 增強版"""
146
  try:
147
+ # 檢查是否需要刷新(避免無意義的閃爍)
148
  current_time = time.time()
149
  if not force_refresh and (current_time - self.last_news_update) < 5:
150
+ # 5秒內不重複查詢,除非強制刷新
151
  pass
152
 
153
  self.last_news_update = current_time
154
 
155
+ # 記錄查詢參數
156
  logger.info(f"獲取新聞 - 分類: {category}, 天數: {days}, 關鍵字: '{keyword}', 情緒: {sentiment_filter}")
157
 
158
  news_data = self.db.get_recent_news(
 
163
  )
164
 
165
  if not news_data:
166
+ # 如果沒有新聞且系統剛初始化,顯示等待訊息
167
  if not self.auto_crawl_completed:
168
  return "⏳ 系統正在自動爬取新聞,請稍候..."
169
 
 
180
  filter_text = "、".join(filter_desc) if filter_desc else "所有條件"
181
  return f"📰 暫無符合條件的新聞資料 ({filter_text}),請調整篩選條件或執行爬蟲任務"
182
 
183
+ # 添加查詢結果標題
184
  filter_parts = []
185
  if category != "all":
186
  filter_parts.append(self._get_category_name(category))
 
225
  return sentiment_names.get(sentiment, sentiment)
226
 
227
  def manual_crawl(self, unlimited: bool = True) -> str:
228
+ """手動觸發爬蟲 - 支援無限制模式"""
229
  if not self.is_initialized:
230
  return "⚠️ 系統還在初始化���,請稍後再試"
231
 
 
238
 
239
  self.update_progress(f"🚀 手動爬蟲開始({mode_text}模式)")
240
 
241
+ # **關鍵修正:使用unlimited參數而非max_articles_per_category**
242
  results = self.crawler.crawl_all_categories(unlimited=unlimited)
243
 
244
  total_articles = sum(len(articles) for articles in results.values())
 
259
  try:
260
  stats = self.db.get_statistics()
261
 
262
+ # 新增自動爬取狀態
263
  auto_status = "✅ 已完成" if self.auto_crawl_completed else "⏳ 進行中" if self.is_crawling else "⚠️ 未執行"
264
 
265
  return f"""
 
313
  'success': True,
314
  'count': len(api_data),
315
  'data': api_data,
316
+ 'auto_crawl_completed': self.auto_crawl_completed
 
317
  }
318
 
319
  except Exception as e:
 
321
  return {
322
  'success': False,
323
  'error': str(e),
324
+ 'data': []
 
325
  }
326
 
327
  # 初始化應用
328
  app = NewsApp()
329
 
330
+ # API 路由
331
+ @flask_app.route('/api/news', methods=['GET'])
332
+ def api_get_news():
333
+ """獲取新聞列表API - 增強版"""
334
+ category = request.args.get('category', 'all')
335
+ days = int(request.args.get('days', 7))
336
+ keyword = request.args.get('keyword', '')
337
+ sentiment_filter = request.args.get('sentiment', 'all')
338
+
339
+ result = app.get_news_api_data(category, days, keyword, sentiment_filter)
340
+ return jsonify(result)
341
+
342
+ @flask_app.route('/api/stats', methods=['GET'])
343
+ def api_get_stats():
344
+ """獲取統計信息API"""
345
+ try:
346
+ stats = app.db.get_statistics()
347
+ return jsonify({
348
+ 'success': True,
349
+ 'data': stats,
350
+ 'auto_crawl_completed': app.auto_crawl_completed,
351
+ 'is_initialized': app.is_initialized,
352
+ 'is_crawling': app.is_crawling
353
+ })
354
+ except Exception as e:
355
+ return jsonify({
356
+ 'success': False,
357
+ 'error': str(e)
358
+ })
359
+
360
+ @flask_app.route('/api/crawl', methods=['POST'])
361
+ def api_manual_crawl():
362
+ """手動觸發爬蟲API"""
363
+ try:
364
+ if not app.is_initialized:
365
+ return jsonify({
366
+ 'success': False,
367
+ 'message': '系統還在初始化中'
368
+ })
369
+
370
+ if app.is_crawling:
371
+ return jsonify({
372
+ 'success': False,
373
+ 'message': '爬蟲正在運行中'
374
+ })
375
+
376
+ # 檢查是否要求無限制模式
377
+ unlimited = request.json.get('unlimited', True) if request.json else True
378
+
379
+ # 在背景執行爬蟲
380
+ def run_crawl():
381
+ app.manual_crawl(unlimited=unlimited)
382
+
383
+ threading.Thread(target=run_crawl, daemon=True).start()
384
+
385
+ mode_text = "無限制" if unlimited else "限制"
386
+ return jsonify({
387
+ 'success': True,
388
+ 'message': f'爬蟲任務已啟動({mode_text}模式)'
389
+ })
390
+ except Exception as e:
391
+ return jsonify({
392
+ 'success': False,
393
+ 'error': str(e)
394
+ })
395
+
396
+ @flask_app.route('/api/progress', methods=['GET'])
397
+ def api_get_progress():
398
+ """獲取爬蟲進度API"""
399
+ progress, needs_update = app.get_progress()
400
+ return jsonify({
401
+ 'progress': progress,
402
+ 'is_crawling': app.is_crawling,
403
+ 'is_initialized': app.is_initialized,
404
+ 'needs_update': needs_update,
405
+ 'auto_crawl_completed': app.auto_crawl_completed
406
+ })
407
 
408
  # 創建 Gradio 介面
409
  def create_interface():
 
432
  🎯 **智能分析**: 使用 RoBERTa 模型進行情緒分析
433
  🔍 **多條件篩選**: 支援時間段、關鍵字、情緒篩選
434
  📊 **即時統計**: 提供詳細的新聞統計資訊
 
435
  """)
436
 
437
+ with gr.Tab("📰 最新新聞"):
438
+ with gr.Row():
439
+ with gr.Column(scale=1):
440
+ category_radio = gr.Radio(
441
+ choices=[
442
+ ("所有新聞", "all"),
443
+ ("美股新聞", "us_stock"),
444
+ ("台股新聞", "tw_stock")
445
+ ],
446
+ value="all",
447
+ label="📋 新聞分類"
448
+ )
449
+
450
+ days_slider = gr.Slider(
451
+ minimum=0,
452
+ maximum=30,
453
+ value=7,
454
+ step=1,
455
+ label="📅 時間範圍 (天)",
456
+ info="0表示不限制時間"
457
+ )
458
+
459
+ keyword_input = gr.Textbox(
460
+ label="🔍 關鍵字搜尋",
461
+ placeholder="輸入關鍵字搜尋新聞...",
462
+ value=""
463
+ )
464
+
465
+ sentiment_radio = gr.Radio(
466
+ choices=[
467
+ ("所有情緒", "all"),
468
+ ("正面情緒", "positive"),
469
+ ("負面情緒", "negative"),
470
+ ("中性情緒", "neutral")
471
+ ],
472
+ value="all",
473
+ label="😊 情緒篩選"
474
+ )
475
+
476
+ # 爬蟲模式選擇
477
+ crawl_mode = gr.Radio(
478
+ choices=[
479
+ ("無限制爬取 (全部文章)", True),
480
+ ("限制爬取 (20篇)", False)
481
+ ],
482
+ value=True,
483
+ label="🚀 爬蟲模式",
484
+ info="選擇爬取模式"
485
+ )
 
 
 
 
 
 
 
 
 
 
 
486
 
487
+ with gr.Column(scale=2):
488
+ with gr.Row():
489
+ search_btn = gr.Button("🔍 搜尋新聞", variant="primary")
490
+ refresh_btn = gr.Button("🔄 重新整理", variant="secondary")
491
+ manual_crawl_btn = gr.Button("🚀 手動爬取", variant="secondary")
492
+
493
+ # 進度顯示
494
+ progress_display = gr.Textbox(
495
+ label="📊 系統狀態",
496
+ value=app.current_progress,
497
+ interactive=False,
498
+ elem_classes=["progress-box"],
499
+ lines=1
500
+ )
501
 
502
+ news_display = gr.HTML(
503
+ label="新聞內容",
504
+ value="⏳ 系統正在初始化並自動爬取新聞,請稍候..."
505
+ )
506
+ crawl_result = gr.Textbox(label="爬取結果", visible=False)
507
 
508
+ # 更新函數
509
+ def update_progress_only():
510
+ """只更新進度,不更新新聞"""
511
+ progress, needs_update = app.get_progress()
512
+ if needs_update or app.is_crawling:
513
+ return progress
514
+ else:
515
+ return gr.update()
516
 
517
+ def update_news_automatically():
518
+ """自動更新新聞內容"""
519
+ if app.auto_crawl_completed:
520
+ return app.get_latest_news("all", 7, "", "all", force_refresh=True)
521
+ else:
522
+ return gr.update()
523
+
524
+ def search_news(category, days, keyword, sentiment):
525
+ """搜尋新聞"""
526
+ logger.info(f"搜尋新聞 - 分類: {category}, 天數: {days}, 關鍵字: '{keyword}', 情緒: {sentiment}")
527
+ return app.get_latest_news(category, days, keyword, sentiment, force_refresh=True)
528
+
529
+ def refresh_current_search(category, days, keyword, sentiment):
530
+ """刷新當前搜尋"""
531
+ return app.get_latest_news(category, days, keyword, sentiment, force_refresh=True)
532
+
533
+ def handle_manual_crawl(category, days, keyword, sentiment, unlimited_mode):
534
+ """處理手動爬蟲"""
535
+ result = app.manual_crawl(unlimited=unlimited_mode)
536
+ # 爬取完成後自動刷新當前搜尋
537
+ news = app.get_latest_news(category, days, keyword, sentiment, force_refresh=True)
538
+ return result, news
539
+
540
+ # 進度更新定時器
541
+ progress_timer = gr.Timer(value=10)
542
+ progress_timer.tick(
543
+ fn=update_progress_only,
544
+ outputs=[progress_display]
545
+ )
546
+
547
+ # 新聞自動更新定時器
548
+ news_timer = gr.Timer(value=15) # 每15秒檢查一次
549
+ news_timer.tick(
550
+ fn=update_news_automatically,
551
+ outputs=[news_display]
552
+ )
553
+
554
+ # 綁定事件
555
+ search_btn.click(
556
+ search_news,
557
+ inputs=[category_radio, days_slider, keyword_input, sentiment_radio],
558
+ outputs=[news_display]
559
+ )
560
+
561
+ refresh_btn.click(
562
+ refresh_current_search,
563
+ inputs=[category_radio, days_slider, keyword_input, sentiment_radio],
564
+ outputs=[news_display]
565
+ )
566
+
567
+ manual_crawl_btn.click(
568
+ handle_manual_crawl,
569
+ inputs=[category_radio, days_slider, keyword_input, sentiment_radio, crawl_mode],
570
+ outputs=[crawl_result, news_display]
571
+ ).then(
572
+ lambda: gr.update(visible=True),
573
+ outputs=[crawl_result]
574
+ )
575
+
576
+ # 分類改變時自動搜尋
577
+ category_radio.change(
578
+ search_news,
579
+ inputs=[category_radio, days_slider, keyword_input, sentiment_radio],
580
+ outputs=[news_display]
581
+ )
582
+
583
+ # 初始載入時顯示等待訊息
584
+ interface.load(
585
+ lambda: "⏳ 系統正在自動爬取新聞,請稍候...",
586
+ outputs=[news_display]
587
+ )
588
+
589
+ with gr.Tab("📊 統計資訊"):
590
+ stats_display = gr.Markdown()
591
+ stats_refresh_btn = gr.Button("🔄 更新統計")
592
+
593
+ stats_refresh_btn.click(app.get_statistics, outputs=[stats_display])
594
+ interface.load(app.get_statistics, outputs=[stats_display])
595
+
596
+ # 只保留兩個分頁:最新新聞 和 統計資訊
597
+ # 移除了 "🔌 API接口" 和 "ℹ️ 關於" 分頁
598
+
599
+ return interface
600
 
601
  # 啟動應用
602
  if __name__ == "__main__":
603
+ import threading
 
 
604
 
605
+ # 在背景啟動Flask API
606
+ def run_flask():
607
+ flask_app.run(host='127.0.0.1', port=5000, debug=False)
608
 
609
+ flask_thread = threading.Thread(target=run_flask, daemon=True)
610
+ flask_thread.start()
 
 
 
 
 
 
611
 
612
+ print("🚀 啟動股市新聞情緒分析器(自動版)...")
613
+ print("📊 網頁介面: http://localhost:7860")
614
+ print("🔒 API接口: http://127.0.0.1:5000 (僅限本機存取)")
615
+ print("⚡ 自動功能: 系統啟動後自動檢測並爬取新聞")
616
+ print("💡 特色: 無需手動設定,啟動即可使用")
617
 
618
  # 啟動Gradio介面
619
+ interface = create_interface()
620
  interface.launch(
621
  server_name="0.0.0.0",
622
  server_port=7860,