Update conversation_logic.py
Browse files- conversation_logic.py +32 -2
conversation_logic.py
CHANGED
|
@@ -526,6 +526,34 @@ def _is_bad_generated_reply(text: str, user_text: str = "") -> bool:
|
|
| 526 |
|
| 527 |
return False
|
| 528 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 529 |
|
| 530 |
class ConversationEngine:
|
| 531 |
def __init__(
|
|
@@ -648,8 +676,10 @@ class ConversationEngine:
|
|
| 648 |
result.used_retrieval = True
|
| 649 |
result.teaching_chunks = filtered
|
| 650 |
|
| 651 |
-
if selected_chunks and resolved_help_mode
|
| 652 |
-
|
|
|
|
|
|
|
| 653 |
|
| 654 |
should_try_generator = (
|
| 655 |
self.generator is not None
|
|
|
|
| 526 |
|
| 527 |
return False
|
| 528 |
|
| 529 |
+
def _pick_teaching_line(
|
| 530 |
+
chunks: List[RetrievedChunk],
|
| 531 |
+
current_reply: str,
|
| 532 |
+
) -> Optional[str]:
|
| 533 |
+
if not chunks:
|
| 534 |
+
return None
|
| 535 |
+
|
| 536 |
+
reply_keywords = _extract_keywords(current_reply)
|
| 537 |
+
best_line = None
|
| 538 |
+
best_score = -1
|
| 539 |
+
|
| 540 |
+
for chunk in chunks:
|
| 541 |
+
text = (chunk.text or "").strip().replace("\n", " ")
|
| 542 |
+
if not text:
|
| 543 |
+
continue
|
| 544 |
+
if len(text) > 180:
|
| 545 |
+
text = text[:177].rstrip() + "..."
|
| 546 |
+
|
| 547 |
+
chunk_keywords = _extract_keywords(text)
|
| 548 |
+
novelty = len(chunk_keywords - reply_keywords)
|
| 549 |
+
overlap = len(chunk_keywords & reply_keywords)
|
| 550 |
+
|
| 551 |
+
score = novelty - 0.3 * overlap
|
| 552 |
+
if score > best_score:
|
| 553 |
+
best_score = score
|
| 554 |
+
best_line = text
|
| 555 |
+
|
| 556 |
+
return best_line
|
| 557 |
|
| 558 |
class ConversationEngine:
|
| 559 |
def __init__(
|
|
|
|
| 676 |
result.used_retrieval = True
|
| 677 |
result.teaching_chunks = filtered
|
| 678 |
|
| 679 |
+
if selected_chunks and resolved_help_mode in {"walkthrough", "step_by_step", "method", "explain", "concept"}:
|
| 680 |
+
teaching_line = _pick_teaching_line(selected_chunks, reply)
|
| 681 |
+
if teaching_line:
|
| 682 |
+
reply = f"{reply}\n\nKey idea: {teaching_line}"
|
| 683 |
|
| 684 |
should_try_generator = (
|
| 685 |
self.generator is not None
|