File size: 1,683 Bytes
e46711a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
import search_engine
import random

def generate_hybrid_response(question, rag_chain):
    """

    Generate response using RAG context + Gemini Search synthesis.

    """
    print(f"\nAnalyzing: {question}...")

    # Phase 1: Local RAG
    # We always get RAG context even if it's empty, to pass to Gemini
    try:
        rag_response = rag_chain.invoke(question)
    except Exception as e:
        print(f"RAG Error: {e}")
        rag_response = "Error retrieving local context."

    # Phase 2: Combined Synthesis via Gemini
    print("Fetching information from Gemini (Context + Web)...")
    final_answer = search_engine.search_and_synthesize(question, rag_response)

    # Phase 3: Error Handling & Formatting
    if final_answer == "SERVER_BUSY":
        return "⚠️ **Service Unavailable**: The AI server is currently busy. Please try again in a few moments."
    
    # If the answer is a denial (Pakistan filter), return it as is.
    if "I specialize only in Pakistani Law" in final_answer:
        return final_answer

    # Creative Closing Generator (Optional, can be appended if the answer isn't a denial)
    closings = [
        "Need clarification on any point?",
        "Shall we explore related case laws?",
        "I can help draft a legal notice based on this.",
        "Would you like to know about relevant court procedures?",
        "Ask me if you need further details on this topic!"
    ]
    next_step = random.choice(closings)

    # Construct final output
    # The 'final_answer' from Gemini is already comprehensive.
    # We just add the closing.
    
    return f"{final_answer}\n\n_{next_step}_"