import search_engine import random def generate_hybrid_response(question, rag_chain): """ Generate response using RAG context + Gemini Search synthesis. """ print(f"\nAnalyzing: {question}...") # Phase 1: Local RAG # We always get RAG context even if it's empty, to pass to Gemini try: rag_response = rag_chain.invoke(question) except Exception as e: print(f"RAG Error: {e}") rag_response = "Error retrieving local context." # Phase 2: Combined Synthesis via Gemini print("Fetching information from Gemini (Context + Web)...") final_answer = search_engine.search_and_synthesize(question, rag_response) # Phase 3: Error Handling & Formatting if final_answer == "SERVER_BUSY": return "⚠️ **Service Unavailable**: The AI server is currently busy. Please try again in a few moments." # If the answer is a denial (Pakistan filter), return it as is. if "I specialize only in Pakistani Law" in final_answer: return final_answer # Creative Closing Generator (Optional, can be appended if the answer isn't a denial) closings = [ "Need clarification on any point?", "Shall we explore related case laws?", "I can help draft a legal notice based on this.", "Would you like to know about relevant court procedures?", "Ask me if you need further details on this topic!" ] next_step = random.choice(closings) # Construct final output # The 'final_answer' from Gemini is already comprehensive. # We just add the closing. return f"{final_answer}\n\n_{next_step}_"