TrBn17
reset repo without Dataset.xlsx
80dbe44
"""
Save Cache Node - Node cuối để lưu conversation vào cache
"""
import json
import logging
from pathlib import Path
from datetime import datetime
from src.state.graph_state import TransportationState
from src.config.logging_config import get_logger
logger = get_logger(__name__)
class SaveCacheNode:
"""Node lưu conversation vào cache file"""
def __init__(self):
self.name = "save_cache_node"
self.cache_file = Path(__file__).resolve().parents[3] / "conversation_cache.json"
def load_cache(self) -> list:
"""Load conversation cache từ file"""
try:
if self.cache_file.exists():
with open(self.cache_file, 'r', encoding='utf-8') as f:
return json.load(f)
return []
except Exception as e:
logger.error(f"Error loading cache: {e}")
return []
def save_cache(self, conversations: list):
"""Save conversation cache vào file"""
try:
# Keep only last 100 messages in file (50 pairs of user/assistant)
if len(conversations) > 100:
conversations = conversations[-100:]
with open(self.cache_file, 'w', encoding='utf-8') as f:
json.dump(conversations, f, ensure_ascii=False, indent=2)
logger.info(f"Saved {len(conversations)} messages to cache")
except Exception as e:
logger.error(f"Error saving cache: {e}")
def process_save(self, state: TransportationState) -> TransportationState:
"""
Lưu conversation hiện tại vào cache theo format role/content
Args:
state: Current state với user_message và ai_response
Returns:
Updated state
"""
try:
if state["user_message"] and state["ai_response"]:
# Load existing cache
conversations = self.load_cache()
# Add current conversation với format role/content
new_conversation = [
{
"role": "user",
"content": state["user_message"],
"timestamp": datetime.now().isoformat()
},
{
"role": "assistant",
"content": state["ai_response"],
"timestamp": datetime.now().isoformat(),
"function_calls": state["function_calls_made"] if state["function_calls_made"] else [],
"has_functions": len(state["function_calls_made"]) > 0 if state["function_calls_made"] else False
}
]
# Extend conversation list với messages mới
conversations.extend(new_conversation)
# Save back to file
self.save_cache(conversations)
# Update state với conversation history cho lần sau (lấy 20 messages cuối)
state["conversation_cache"] = conversations[-20:] if len(conversations) > 20 else conversations
logger.info("Conversation saved to cache in role/content format successfully")
else:
logger.warning("No complete conversation to save")
except Exception as e:
error_msg = f"Error saving conversation to cache: {str(e)}"
logger.error(error_msg)
state["error_message"] = error_msg
return state
def __call__(self, state: TransportationState) -> TransportationState:
"""Callable interface for LangGraph"""
return self.process_save(state)
def create_save_cache_node() -> SaveCacheNode:
"""Factory function để tạo save_cache node"""
return SaveCacheNode()