Spaces:
Running
Running
优化消息系统
Browse files- router_comments.py +4 -0
- router_wallet.py +3 -2
- 数据库连接.py +19 -2
router_comments.py
CHANGED
|
@@ -57,9 +57,13 @@ async def post_comment(comment: CommentCreate):
|
|
| 57 |
else:
|
| 58 |
item_comments.append(new_comment)
|
| 59 |
items_db = db.load_data("items.json", default_data=[])
|
|
|
|
| 60 |
target_item = next((item for item in items_db if item["id"] == comment.item_id), None)
|
|
|
|
| 61 |
if target_item:
|
| 62 |
add_notification(target_item.get("author", ""), {"type": "comment", "from_user": comment.author, "target_item_id": comment.item_id, "target_item_title": target_item["title"], "content": comment.content})
|
|
|
|
|
|
|
| 63 |
elif comment.item_id in users_db:
|
| 64 |
add_notification(comment.item_id, {"type": "comment", "from_user": comment.author, "target_item_id": comment.item_id, "target_item_title": "您的个人留言板", "content": comment.content})
|
| 65 |
|
|
|
|
| 57 |
else:
|
| 58 |
item_comments.append(new_comment)
|
| 59 |
items_db = db.load_data("items.json", default_data=[])
|
| 60 |
+
posts_db = db.load_data("posts.json", default_data=[])
|
| 61 |
target_item = next((item for item in items_db if item["id"] == comment.item_id), None)
|
| 62 |
+
target_post = next((post for post in posts_db if post["id"] == comment.item_id), None)
|
| 63 |
if target_item:
|
| 64 |
add_notification(target_item.get("author", ""), {"type": "comment", "from_user": comment.author, "target_item_id": comment.item_id, "target_item_title": target_item["title"], "content": comment.content})
|
| 65 |
+
elif target_post:
|
| 66 |
+
add_notification(target_post.get("author", ""), {"type": "comment", "from_user": comment.author, "target_item_id": comment.item_id, "target_item_title": target_post.get("title", "") or "", "content": comment.content})
|
| 67 |
elif comment.item_id in users_db:
|
| 68 |
add_notification(comment.item_id, {"type": "comment", "from_user": comment.author, "target_item_id": comment.item_id, "target_item_title": "您的个人留言板", "content": comment.content})
|
| 69 |
|
router_wallet.py
CHANGED
|
@@ -472,12 +472,13 @@ async def tip_user(request: Request, req: TipRequest, db: Session = Depends(get_
|
|
| 472 |
|
| 473 |
# 🔔 打赏通知(考虑匿名)
|
| 474 |
if not req.is_anonymous:
|
|
|
|
| 475 |
add_notification(req.target_account, {
|
| 476 |
"type": "tip",
|
| 477 |
"from_user": req.sender_account,
|
| 478 |
"target_item_id": req.item_id or "",
|
| 479 |
-
"target_item_title": "",
|
| 480 |
-
"content": f"您收到来自 {
|
| 481 |
})
|
| 482 |
else:
|
| 483 |
add_notification(req.target_account, {
|
|
|
|
| 472 |
|
| 473 |
# 🔔 打赏通知(考虑匿名)
|
| 474 |
if not req.is_anonymous:
|
| 475 |
+
sender_display = sender_user_name if sender_user_name else req.sender_account
|
| 476 |
add_notification(req.target_account, {
|
| 477 |
"type": "tip",
|
| 478 |
"from_user": req.sender_account,
|
| 479 |
"target_item_id": req.item_id or "",
|
| 480 |
+
"target_item_title": item_title or "",
|
| 481 |
+
"content": f"您收到来自 {sender_display} 的 {req.amount} 积分打赏"
|
| 482 |
})
|
| 483 |
else:
|
| 484 |
add_notification(req.target_account, {
|
数据库连接.py
CHANGED
|
@@ -55,6 +55,9 @@ _dirty_files = set() # 记录自上次同步以来有变更的
|
|
| 55 |
_dirty_files_lock = threading.Lock()
|
| 56 |
_BATCH_SYNC_INTERVAL = 30 # 批量同步间隔(秒),默认30秒(HF限制128 commits/hour,30秒间隔=最多120 commits/hour)
|
| 57 |
_batch_sync_timer = None # 定时器引用
|
|
|
|
|
|
|
|
|
|
| 58 |
|
| 59 |
# 确保目录存在
|
| 60 |
os.makedirs(LOCAL_DB_DIR, exist_ok=True)
|
|
@@ -428,6 +431,8 @@ def _mark_dirty(file_name: str):
|
|
| 428 |
|
| 429 |
def _batch_sync_to_hf(schedule_next=True):
|
| 430 |
"""批量同步所有脏文件到 HF Dataset(单次 commit)"""
|
|
|
|
|
|
|
| 431 |
# 取出脏文件列表并清空
|
| 432 |
with _dirty_files_lock:
|
| 433 |
files_to_sync = list(_dirty_files)
|
|
@@ -459,6 +464,9 @@ def _batch_sync_to_hf(schedule_next=True):
|
|
| 459 |
commit_message=f"batch sync: {', '.join(files_to_sync)}"
|
| 460 |
)
|
| 461 |
logger.info(f"✅ 批量同步成功: {len(operations)} 个文件 ({', '.join(files_to_sync)})")
|
|
|
|
|
|
|
|
|
|
| 462 |
else:
|
| 463 |
logger.info("⏭️ 批量同步: 脏文件均不存在,跳过")
|
| 464 |
except Exception as e:
|
|
@@ -466,6 +474,14 @@ def _batch_sync_to_hf(schedule_next=True):
|
|
| 466 |
# 失败的文件重新标记为脏,下次重试
|
| 467 |
with _dirty_files_lock:
|
| 468 |
_dirty_files.update(files_to_sync)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 469 |
finally:
|
| 470 |
if schedule_next:
|
| 471 |
_schedule_next_sync()
|
|
@@ -473,8 +489,9 @@ def _batch_sync_to_hf(schedule_next=True):
|
|
| 473 |
|
| 474 |
def _schedule_next_sync():
|
| 475 |
"""调度下一轮批量同步"""
|
| 476 |
-
global _batch_sync_timer
|
| 477 |
-
|
|
|
|
| 478 |
_batch_sync_timer.daemon = True
|
| 479 |
_batch_sync_timer.start()
|
| 480 |
|
|
|
|
| 55 |
_dirty_files_lock = threading.Lock()
|
| 56 |
_BATCH_SYNC_INTERVAL = 30 # 批量同步间隔(秒),默认30秒(HF限制128 commits/hour,30秒间隔=最多120 commits/hour)
|
| 57 |
_batch_sync_timer = None # 定时器引用
|
| 58 |
+
_sync_backoff_seconds = 0 # 当前退避秒数(0=正常)
|
| 59 |
+
_sync_consecutive_failures = 0 # 连续失败次数
|
| 60 |
+
_SYNC_MAX_BACKOFF = 300 # 最大退避:5分钟
|
| 61 |
|
| 62 |
# 确保目录存在
|
| 63 |
os.makedirs(LOCAL_DB_DIR, exist_ok=True)
|
|
|
|
| 431 |
|
| 432 |
def _batch_sync_to_hf(schedule_next=True):
|
| 433 |
"""批量同步所有脏文件到 HF Dataset(单次 commit)"""
|
| 434 |
+
global _sync_consecutive_failures, _sync_backoff_seconds
|
| 435 |
+
|
| 436 |
# 取出脏文件列表并清空
|
| 437 |
with _dirty_files_lock:
|
| 438 |
files_to_sync = list(_dirty_files)
|
|
|
|
| 464 |
commit_message=f"batch sync: {', '.join(files_to_sync)}"
|
| 465 |
)
|
| 466 |
logger.info(f"✅ 批量同步成功: {len(operations)} 个文件 ({', '.join(files_to_sync)})")
|
| 467 |
+
# 同步成功:重置退避状态
|
| 468 |
+
_sync_consecutive_failures = 0
|
| 469 |
+
_sync_backoff_seconds = 0
|
| 470 |
else:
|
| 471 |
logger.info("⏭️ 批量同步: 脏文件均不存在,跳过")
|
| 472 |
except Exception as e:
|
|
|
|
| 474 |
# 失败的文件重新标记为脏,下次重试
|
| 475 |
with _dirty_files_lock:
|
| 476 |
_dirty_files.update(files_to_sync)
|
| 477 |
+
# 429 限流:启用指数退避
|
| 478 |
+
if "429" in str(e) or "Too Many Requests" in str(e):
|
| 479 |
+
_sync_consecutive_failures += 1
|
| 480 |
+
_sync_backoff_seconds = min(
|
| 481 |
+
_BATCH_SYNC_INTERVAL * (2 ** _sync_consecutive_failures),
|
| 482 |
+
_SYNC_MAX_BACKOFF
|
| 483 |
+
)
|
| 484 |
+
logger.warning(f"⚠️ HF 限流,退避 {_sync_backoff_seconds} 秒后重试")
|
| 485 |
finally:
|
| 486 |
if schedule_next:
|
| 487 |
_schedule_next_sync()
|
|
|
|
| 489 |
|
| 490 |
def _schedule_next_sync():
|
| 491 |
"""调度下一轮批量同步"""
|
| 492 |
+
global _batch_sync_timer, _sync_backoff_seconds
|
| 493 |
+
interval = _sync_backoff_seconds if _sync_backoff_seconds > 0 else _BATCH_SYNC_INTERVAL
|
| 494 |
+
_batch_sync_timer = threading.Timer(interval, _batch_sync_to_hf)
|
| 495 |
_batch_sync_timer.daemon = True
|
| 496 |
_batch_sync_timer.start()
|
| 497 |
|