from warbler_cda.answer_generator import AnswerGenerator def test_generate_answer_prefers_available_provider(): generator = AnswerGenerator({"enabled": True}) results = [ { "id": "doc-1", "content": "Courage is acting despite fear rather than waiting for fear to disappear.", "relevance_score": 0.9, "metadata": {"pack": "warbler-pack-core"}, } ] generator._try_openai_generation = lambda prompt: None generator._try_local_generation = lambda prompt: "Courage means acting despite fear. Sources: [1]" answer = generator.generate_answer("What is courage?", results) assert answer.answer.startswith("Courage means acting despite fear") assert answer.provider == generator.model_name assert answer.used_fallback is False assert answer.citations[0]["pack"] == "warbler-pack-core" def test_generate_answer_uses_extractive_fallback_when_models_unavailable(): generator = AnswerGenerator({"enabled": True}) results = [ { "id": "doc-1", "content": "Resilience grows through repeated recovery after setbacks.", "relevance_score": 0.85, "metadata": {"pack": "warbler-pack-core"}, } ] generator._try_openai_generation = lambda prompt: None generator._try_local_generation = lambda prompt: None answer = generator.generate_answer("How does resilience grow?", results) assert "Resilience grows through repeated recovery" in answer.answer assert answer.provider == "extractive" assert answer.used_fallback is True def test_generate_answer_handles_missing_results(): generator = AnswerGenerator({"enabled": True}) answer = generator.generate_answer("What now?", []) assert "could not find enough relevant context" in answer.answer.lower() assert answer.citations == [] def test_extractive_fallback_uses_query_relevant_sentence_not_document_header(): generator = AnswerGenerator({"enabled": True}) results = [ { "id": "doc-1", "content": ( "Drizzt ship tutorial version 1.5. " "Milkshape 3d (ms3d) is an excellent 3d modeling program used for finishing touches. " "Export your ship as a quake 3 file and import it into ms3d." ), "relevance_score": 0.9, "metadata": {"pack": "synthesis-session"}, } ] generator._try_openai_generation = lambda prompt: None generator._try_local_generation = lambda prompt: None answer = generator.generate_answer("what is milkshape?", results) assert "Milkshape 3d" in answer.answer assert "Drizzt ship tutorial version 1.5" not in answer.answer