Davidtran99 commited on
Commit
3e6bf7c
·
1 Parent(s): 1987c97

Feat: Hỗ trợ follow-up legal (tóm tắt, điều khoản liên quan) trong SMALL_TALK

Browse files
Files changed (1) hide show
  1. hue_portal/chatbot/chatbot.py +426 -0
hue_portal/chatbot/chatbot.py ADDED
@@ -0,0 +1,426 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Chatbot wrapper that integrates core chatbot with router, LLM, and context management.
3
+ """
4
+ import os
5
+ import copy
6
+ import logging
7
+ from typing import Dict, Any, Optional
8
+ from hue_portal.core.chatbot import Chatbot as CoreChatbot, get_chatbot as get_core_chatbot
9
+ from hue_portal.chatbot.router import decide_route, IntentRoute, RouteDecision
10
+ from hue_portal.chatbot.context_manager import ConversationContext
11
+ from hue_portal.chatbot.llm_integration import LLMGenerator
12
+ from hue_portal.core.models import LegalSection
13
+ from hue_portal.chatbot.exact_match_cache import ExactMatchCache
14
+ from hue_portal.chatbot.slow_path_handler import SlowPathHandler
15
+
16
+ logger = logging.getLogger(__name__)
17
+
18
+ EXACT_MATCH_CACHE = ExactMatchCache(
19
+ max_size=int(os.environ.get("EXACT_MATCH_CACHE_MAX", "256")),
20
+ ttl_seconds=int(os.environ.get("EXACT_MATCH_CACHE_TTL_SECONDS", "43200")),
21
+ )
22
+
23
+
24
+ class Chatbot(CoreChatbot):
25
+ """
26
+ Enhanced chatbot with session support, routing, and RAG capabilities.
27
+ """
28
+
29
+ def __init__(self):
30
+ super().__init__()
31
+ self.llm_generator = None
32
+ self._initialize_llm()
33
+
34
+ def _initialize_llm(self):
35
+ """Initialize LLM generator if needed."""
36
+ try:
37
+ self.llm_generator = LLMGenerator()
38
+ except Exception as e:
39
+ print(f"⚠️ LLM generator not available: {e}")
40
+ self.llm_generator = None
41
+
42
+ def generate_response(self, query: str, session_id: Optional[str] = None) -> Dict[str, Any]:
43
+ """
44
+ Generate chatbot response with session support and routing.
45
+
46
+ Args:
47
+ query: User query string
48
+ session_id: Optional session ID for conversation context
49
+
50
+ Returns:
51
+ Response dictionary with message, intent, results, etc.
52
+ """
53
+ query = query.strip()
54
+
55
+ # Save user message to context
56
+ if session_id:
57
+ try:
58
+ ConversationContext.add_message(
59
+ session_id=session_id,
60
+ role="user",
61
+ content=query
62
+ )
63
+ except Exception as e:
64
+ print(f"⚠️ Failed to save user message: {e}")
65
+
66
+ # Classify intent
67
+ intent, confidence = self.classify_intent(query)
68
+
69
+ # Router decision
70
+ route_decision = decide_route(query, intent, confidence)
71
+
72
+ # Use forced intent if router suggests it
73
+ if route_decision.forced_intent:
74
+ intent = route_decision.forced_intent
75
+
76
+ # Instant exact-match cache lookup
77
+ cached_response = EXACT_MATCH_CACHE.get(query, intent)
78
+ if cached_response:
79
+ cached_response["_cache"] = "exact_match"
80
+ cached_response["_source"] = cached_response.get("_source", "cache")
81
+ cached_response.setdefault("routing", route_decision.route.value)
82
+ logger.info(
83
+ "[CACHE] Hit for intent=%s route=%s source=%s",
84
+ intent,
85
+ route_decision.route.value,
86
+ cached_response["_source"],
87
+ )
88
+ if session_id:
89
+ cached_response["session_id"] = session_id
90
+ if session_id:
91
+ try:
92
+ ConversationContext.add_message(
93
+ session_id=session_id,
94
+ role="bot",
95
+ content=cached_response.get("message", ""),
96
+ intent=intent,
97
+ )
98
+ except Exception as e:
99
+ print(f"⚠️ Failed to save cached bot message: {e}")
100
+ return cached_response
101
+
102
+ # Always send legal intent through Slow Path RAG
103
+ if intent == "search_legal":
104
+ response = self._run_slow_path_legal(query, intent, session_id, route_decision)
105
+ elif route_decision.route == IntentRoute.GREETING:
106
+ response = {
107
+ "message": "Xin chào! Tôi có thể giúp bạn tra cứu các thông tin liên quan về các văn bản quy định pháp luật về xử lí kỷ luật cán bộ đảng viên",
108
+ "intent": "greeting",
109
+ "confidence": 0.9,
110
+ "results": [],
111
+ "count": 0,
112
+ "routing": "greeting"
113
+ }
114
+
115
+ elif route_decision.route == IntentRoute.SMALL_TALK:
116
+ # Xử lý follow-up questions trong context cho các câu như:
117
+ # - "Có điều khoản liên quan nào khác không?"
118
+ # - "Tóm tắt nội dung chính của điều này?"
119
+ follow_up_keywords = ["có điều khoản", "liên quan", "khác", "nữa", "thêm", "tóm tắt", "tải file"]
120
+ query_lower = query.lower()
121
+ is_follow_up = any(kw in query_lower for kw in follow_up_keywords)
122
+
123
+ response = None
124
+
125
+ # Nếu là follow-up question, thử tìm context từ conversation trước
126
+ if is_follow_up and session_id:
127
+ try:
128
+ recent_messages = ConversationContext.get_recent_messages(session_id, limit=5)
129
+ # Tìm message bot cuối cùng có intent search_legal
130
+ for msg in reversed(recent_messages):
131
+ if msg.role == "bot" and msg.intent == "search_legal":
132
+ # Có context về legal query trước đó, thử search lại với query mới (BM25/hybrid)
133
+ enhanced_query = f\"{query} {msg.content[:100]}\"
134
+ search_result = self.search_by_intent(\"search_legal\", enhanced_query, limit=3)
135
+ if search_result[\"count\"] > 0 and search_result[\"results\"]:
136
+ top_result = search_result[\"results\"][0]
137
+ top_data = top_result.get(\"data\", {})
138
+ doc_code = top_data.get(\"document_code\", \"\")
139
+ doc_title = top_data.get(\"document_title\", \"văn bản pháp luật\")
140
+ section_code = top_data.get(\"section_code\", \"\")
141
+ section_title = top_data.get(\"section_title\", \"\")
142
+ content = top_data.get(\"content\", \"\") or top_data.get(\"excerpt\", \"\")
143
+
144
+ if \"tóm tắt\" in query_lower:
145
+ content_preview = content[:400] + \"...\" if len(content) > 400 else content
146
+ message = (
147
+ f\"**Tóm tắt {section_code}**: {section_title or 'Nội dung chính'}\\n\\n\"
148
+ f\"{content_preview}\\n\\n\"
149
+ f\"Nguồn: {doc_title}\" + (f\" ({doc_code})\" if doc_code else \"\")
150
+ )
151
+ elif \"tải\" in query_lower or \"download\" in query_lower:
152
+ message = (
153
+ f\"Bạn có thể tải file gốc của {doc_title}\" + (f\" ({doc_code})\" if doc_code else \"\") +
154
+ f\" từ link download trong kết quả tìm kiếm.\"
155
+ )
156
+ else:
157
+ # Câu hỏi \"có điều khoản liên quan nào khác không?\"
158
+ if search_result[\"count\"] > 1:
159
+ message = (
160
+ f\"Có, tôi tìm thấy {search_result['count']} điều khoản liên quan:\\n\\n\"
161
+ )
162
+ for i, result in enumerate(search_result[\"results\"][:3], 1):
163
+ data = result.get(\"data\", {})
164
+ sec_code = data.get(\"section_code\", \"\")
165
+ sec_title = data.get(\"section_title\", \"\")
166
+ message += f\"{i}. **{sec_code}**: {sec_title or 'Nội dung liên quan'}\\n\"
167
+ message += f\"\\nNguồn: {doc_title}\" + (f\" ({doc_code})\" if doc_code else \"\")
168
+ else:
169
+ message = (
170
+ f\"Tôi đã tìm thấy điều khoản liên quan:\\n\\n\"
171
+ f\"**{section_code}**: {section_title or 'Nội dung liên quan'}\\n\\n\"
172
+ f\"{content[:300]}...\\n\\n\"
173
+ f\"Nguồn: {doc_title}\" + (f\" ({doc_code})\" if doc_code else \"\")
174
+ )
175
+
176
+ response = {
177
+ \"message\": message,
178
+ \"intent\": \"search_legal\",
179
+ \"confidence\": 0.85,
180
+ \"results\": search_result[\"results\"][:3],
181
+ \"count\": search_result[\"count\"],
182
+ \"routing\": \"follow_up\",
183
+ }
184
+ break
185
+ except Exception as e:
186
+ logger.warning(\"[FOLLOW_UP] Failed to process follow-up: %s\", e)
187
+
188
+ # Nếu không phải follow-up hoặc không tìm thấy context, trả về message thân thiện mặc định
189
+ if response is None:
190
+ response = {
191
+ \"message\": \"Tôi có thể giúp bạn tra cứu các văn bản quy định pháp luật về xử lí kỷ luật cán bộ đảng viên. Bạn muốn tìm gì?\",
192
+ \"intent\": intent,
193
+ \"confidence\": confidence,
194
+ \"results\": [],
195
+ \"count\": 0,
196
+ \"routing\": \"small_talk\",
197
+ }
198
+
199
+ else: # IntentRoute.SEARCH
200
+ # Use core chatbot search for other intents
201
+ search_result = self.search_by_intent(intent, query, limit=5)
202
+
203
+ # Generate response message
204
+ if search_result["count"] > 0:
205
+ template = self._get_response_template(intent)
206
+ message = template.format(
207
+ count=search_result["count"],
208
+ query=query
209
+ )
210
+ else:
211
+ message = f"Xin lỗi, tôi không tìm thấy thông tin liên quan đến '{query}'. Vui lòng thử lại với từ khóa khác."
212
+
213
+ response = {
214
+ "message": message,
215
+ "intent": intent,
216
+ "confidence": confidence,
217
+ "results": search_result["results"],
218
+ "count": search_result["count"],
219
+ "routing": "search"
220
+ }
221
+
222
+ # Add session_id
223
+ if session_id:
224
+ response["session_id"] = session_id
225
+
226
+ # Save bot response to context
227
+ if session_id:
228
+ try:
229
+ ConversationContext.add_message(
230
+ session_id=session_id,
231
+ role="bot",
232
+ content=response.get("message", ""),
233
+ intent=intent
234
+ )
235
+ except Exception as e:
236
+ print(f"⚠️ Failed to save bot message: {e}")
237
+
238
+ self._cache_response(query, intent, response)
239
+
240
+ return response
241
+
242
+ def _run_slow_path_legal(
243
+ self,
244
+ query: str,
245
+ intent: str,
246
+ session_id: Optional[str],
247
+ route_decision: RouteDecision,
248
+ ) -> Dict[str, Any]:
249
+ """Execute Slow Path legal handler (with fast-path + structured output)."""
250
+ slow_handler = SlowPathHandler()
251
+ response = slow_handler.handle(query, intent, session_id)
252
+ response.setdefault("routing", "slow_path")
253
+ response.setdefault(
254
+ "_routing",
255
+ {
256
+ "path": "slow_path",
257
+ "method": getattr(route_decision, "rationale", "router"),
258
+ "confidence": route_decision.confidence,
259
+ },
260
+ )
261
+ logger.info(
262
+ "[LEGAL] Slow path response - source=%s count=%s routing=%s",
263
+ response.get("_source"),
264
+ response.get("count"),
265
+ response.get("_routing"),
266
+ )
267
+ return response
268
+
269
+ def _cache_response(self, query: str, intent: str, response: Dict[str, Any]) -> None:
270
+ """Store response in exact-match cache if eligible."""
271
+ if not self._should_cache_response(intent, response):
272
+ logger.debug(
273
+ "[CACHE] Skip storing response (intent=%s, results=%s)",
274
+ intent,
275
+ response.get("count"),
276
+ )
277
+ return
278
+ payload = copy.deepcopy(response)
279
+ payload.pop("session_id", None)
280
+ payload.pop("_cache", None)
281
+ EXACT_MATCH_CACHE.set(query, intent, payload)
282
+ logger.info(
283
+ "[CACHE] Stored response for intent=%s (results=%s, source=%s)",
284
+ intent,
285
+ response.get("count"),
286
+ response.get("_source"),
287
+ )
288
+
289
+ def _should_cache_response(self, intent: str, response: Dict[str, Any]) -> bool:
290
+ """Determine if response should be cached for exact matches."""
291
+ cacheable_intents = {
292
+ "search_legal",
293
+ "search_fine",
294
+ "search_procedure",
295
+ "search_office",
296
+ "search_advisory",
297
+ }
298
+ if intent not in cacheable_intents:
299
+ return False
300
+ if response.get("count", 0) <= 0:
301
+ return False
302
+ if not response.get("results"):
303
+ return False
304
+ return True
305
+
306
+ def _handle_legal_query(self, query: str, session_id: Optional[str] = None) -> Dict[str, Any]:
307
+ """
308
+ Handle legal document queries with RAG pipeline.
309
+
310
+ Args:
311
+ query: User query
312
+ session_id: Optional session ID
313
+
314
+ Returns:
315
+ Response dictionary
316
+ """
317
+ # Search legal sections
318
+ qs = LegalSection.objects.select_related("document").all()
319
+ text_fields = ["section_title", "section_code", "content"]
320
+ legal_sections = self._search_legal_sections(qs, query, text_fields, top_k=5)
321
+
322
+ if not legal_sections:
323
+ return {
324
+ "message": f"Xin lỗi, tôi không tìm thấy văn bản pháp luật liên quan đến '{query}'.",
325
+ "intent": "search_legal",
326
+ "confidence": 0.5,
327
+ "results": [],
328
+ "count": 0,
329
+ "routing": "search"
330
+ }
331
+
332
+ # Try LLM generation if available
333
+ if self.llm_generator and self.llm_generator.provider != "none":
334
+ try:
335
+ answer = self.llm_generator.generate_structured_legal_answer(
336
+ query=query,
337
+ documents=legal_sections,
338
+ max_attempts=2
339
+ )
340
+ message = answer.summary
341
+ except Exception as e:
342
+ print(f"⚠️ LLM generation failed: {e}")
343
+ message = self._format_legal_results(legal_sections, query)
344
+ else:
345
+ # Template-based response
346
+ message = self._format_legal_results(legal_sections, query)
347
+
348
+ # Format results
349
+ results = []
350
+ for section in legal_sections:
351
+ doc = section.document
352
+ results.append({
353
+ "type": "legal",
354
+ "data": {
355
+ "id": section.id,
356
+ "section_code": section.section_code,
357
+ "section_title": section.section_title or "",
358
+ "content": section.content[:500] + "..." if len(section.content) > 500 else section.content,
359
+ "excerpt": section.excerpt or "",
360
+ "document_code": doc.code if doc else "",
361
+ "document_title": doc.title if doc else "",
362
+ "page_start": section.page_start,
363
+ "page_end": section.page_end,
364
+ "download_url": f"/api/legal-documents/{doc.id}/download/" if doc and doc.id else None,
365
+ "source_url": doc.source_url if doc else ""
366
+ }
367
+ })
368
+
369
+ return {
370
+ "message": message,
371
+ "intent": "search_legal",
372
+ "confidence": 0.9,
373
+ "results": results,
374
+ "count": len(results),
375
+ "routing": "search"
376
+ }
377
+
378
+ def _search_legal_sections(self, qs, query: str, text_fields: list, top_k: int = 5):
379
+ """Search legal sections using ML search."""
380
+ from hue_portal.core.search_ml import search_with_ml
381
+ return search_with_ml(qs, query, text_fields, top_k=top_k, min_score=0.1)
382
+
383
+ def _format_legal_results(self, sections, query: str) -> str:
384
+ """Format legal sections into response message."""
385
+ if not sections:
386
+ return f"Xin lỗi, tôi không tìm thấy văn bản pháp luật liên quan đến '{query}'."
387
+
388
+ doc = sections[0].document
389
+ doc_info = f"{doc.code}: {doc.title}" if doc else "Văn bản pháp luật"
390
+
391
+ message = f"Tôi tìm thấy {len(sections)} điều khoản liên quan đến '{query}' trong {doc_info}:\n\n"
392
+
393
+ for i, section in enumerate(sections[:3], 1):
394
+ section_text = f"{section.section_code}: {section.section_title or ''}\n"
395
+ section_text += section.content[:200] + "..." if len(section.content) > 200 else section.content
396
+ message += f"{i}. {section_text}\n\n"
397
+
398
+ if len(sections) > 3:
399
+ message += f"... và {len(sections) - 3} điều khoản khác."
400
+
401
+ return message
402
+
403
+ def _get_response_template(self, intent: str) -> str:
404
+ """Get response template for intent."""
405
+ templates = {
406
+ "search_fine": "Tôi tìm thấy {count} mức phạt liên quan đến '{query}':",
407
+ "search_procedure": "Tôi tìm thấy {count} thủ tục liên quan đến '{query}':",
408
+ "search_office": "Tôi tìm thấy {count} đơn vị liên quan đến '{query}':",
409
+ "search_advisory": "Tôi tìm thấy {count} cảnh báo liên quan đến '{query}':",
410
+ }
411
+ return templates.get(intent, "Tôi tìm thấy {count} kết quả liên quan đến '{query}':")
412
+
413
+
414
+ # Global chatbot instance
415
+ _chatbot_instance = None
416
+
417
+
418
+ def get_chatbot() -> Chatbot:
419
+ """Get or create enhanced chatbot instance."""
420
+ global _chatbot_instance
421
+ if _chatbot_instance is None:
422
+ _chatbot_instance = Chatbot()
423
+ return _chatbot_instance
424
+
425
+
426
+