File size: 3,089 Bytes
db33ebc
 
 
 
 
 
 
 
 
 
 
be16ac4
 
 
 
 
 
9409f90
be16ac4
db33ebc
 
 
 
 
 
9409f90
db33ebc
 
 
 
 
 
 
 
9409f90
 
 
db33ebc
 
 
 
 
 
 
9409f90
 
db33ebc
 
9409f90
db33ebc
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
import json
import uuid
from langgraph.errors import GraphRecursionError
from prompts import REACT_SYSTEM_PROMPT


class ChatHandler:
    def __init__(self, graph, rag_setup):
        self.graph = graph
        self.rag = rag_setup
    
    def chat(self, user_message, uploaded_file, message_history, user_state, session_state):
        if not user_state or not session_state:
            warning = {
                "role": "assistant",
                "content": "Please log in and start a session before chatting."
            }
            return message_history + [warning], user_message, uploaded_file
        
        user_query_parts = []
        try: 
            if user_message and user_message.strip():
                user_query_parts.append(user_message)
            
            if uploaded_file is not None:
                result = self.rag.store_data(uploaded_file, user_state["user_id"])
                result_str = json.dumps(result, indent=2)
                user_query_parts.append(f"""A medical document was uploaded. Here are the upload details: {result_str} Please inform the user about the upload status in a friendly, professional way.""")

            if not user_query_parts:
                return message_history, "", None, None
            
            user_query = (' ').join(user_query_parts)
            
            # Use session_id from session_state instead of self.session_id
            thread_id = session_state["session_id"]
            config = {"configurable": {"thread_id": thread_id}, "recursion_limit" : 25}
            current_state = self.graph.get_state(config)
            
            if not current_state.values.get("messages"):
                messages = {
                    "messages": [
                        {"role": "system", "content": REACT_SYSTEM_PROMPT},
                        {"role": "user", "content": user_query}
                    ],
                    "user_id": user_state["user_id"]
                }
            else:
                messages = {"messages": [{"role": "user", "content": user_query}], "user_id": user_state["user_id"]}

            result = self.graph.invoke(
                messages,
                config=config
            )
            
            last_message = result["messages"][-1].content
            
            updated_history = message_history + [
                {"role": "user", "content": user_message},
                {"role": "assistant", "content": last_message}
            ]
            
            return updated_history, "", None
            
        except GraphRecursionError:
            error_message = "This query is too complex and exceeded the reasoning limit. Please simplify or break it into smaller questions."
            return message_history + [
                {"role": "assistant", "content": error_message}
            ], "", None
           
        except Exception as e:
            error_message = f"Error: {str(e)}"
            return message_history + [
                {"role": "assistant", "content": error_message}
            ], "", None