File size: 3,986 Bytes
80dbe44
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
"""
Save Cache Node - Node cuối để lưu conversation vào cache
"""
import json
import logging
from pathlib import Path
from datetime import datetime

from src.state.graph_state import TransportationState
from src.config.logging_config import get_logger

logger = get_logger(__name__)

class SaveCacheNode:
    """Node lưu conversation vào cache file"""
    
    def __init__(self):
        self.name = "save_cache_node"
        self.cache_file = Path(__file__).resolve().parents[3] / "conversation_cache.json"
        
    def load_cache(self) -> list:
        """Load conversation cache từ file"""
        try:
            if self.cache_file.exists():
                with open(self.cache_file, 'r', encoding='utf-8') as f:
                    return json.load(f)
            return []
        except Exception as e:
            logger.error(f"Error loading cache: {e}")
            return []
    
    def save_cache(self, conversations: list):
        """Save conversation cache vào file"""
        try:
            # Keep only last 100 messages in file (50 pairs of user/assistant)
            if len(conversations) > 100:
                conversations = conversations[-100:]
                
            with open(self.cache_file, 'w', encoding='utf-8') as f:
                json.dump(conversations, f, ensure_ascii=False, indent=2)
            logger.info(f"Saved {len(conversations)} messages to cache")
        except Exception as e:
            logger.error(f"Error saving cache: {e}")
    
    def process_save(self, state: TransportationState) -> TransportationState:
        """
        Lưu conversation hiện tại vào cache theo format role/content
        
        Args:
            state: Current state với user_message và ai_response
            
        Returns:
            Updated state
        """
        try:
            if state["user_message"] and state["ai_response"]:
                # Load existing cache
                conversations = self.load_cache()
                
                # Add current conversation với format role/content
                new_conversation = [
                    {
                        "role": "user",
                        "content": state["user_message"],
                        "timestamp": datetime.now().isoformat()
                    },
                    {
                        "role": "assistant", 
                        "content": state["ai_response"],
                        "timestamp": datetime.now().isoformat(),
                        "function_calls": state["function_calls_made"] if state["function_calls_made"] else [],
                        "has_functions": len(state["function_calls_made"]) > 0 if state["function_calls_made"] else False
                    }
                ]
                
                # Extend conversation list với messages mới
                conversations.extend(new_conversation)
                
                # Save back to file
                self.save_cache(conversations)
                
                # Update state với conversation history cho lần sau (lấy 20 messages cuối)
                state["conversation_cache"] = conversations[-20:] if len(conversations) > 20 else conversations
                
                logger.info("Conversation saved to cache in role/content format successfully")
            else:
                logger.warning("No complete conversation to save")
                
        except Exception as e:
            error_msg = f"Error saving conversation to cache: {str(e)}"
            logger.error(error_msg)
            state["error_message"] = error_msg
        
        return state
    
    def __call__(self, state: TransportationState) -> TransportationState:
        """Callable interface for LangGraph"""
        return self.process_save(state)

def create_save_cache_node() -> SaveCacheNode:
    """Factory function để tạo save_cache node"""
    return SaveCacheNode()