| | |
| |
|
| | import pytest |
| | import asyncio |
| | import json |
| | import tempfile |
| | from pathlib import Path |
| | from unittest.mock import AsyncMock, MagicMock, patch |
| | from typing import List, Dict, Any |
| |
|
| | from ankigen_core.agents.integration import AgentOrchestrator, integrate_with_existing_workflow |
| | from ankigen_core.agents.feature_flags import AgentFeatureFlags, AgentMode |
| | from ankigen_core.agents.config import AgentConfigManager |
| | from ankigen_core.llm_interface import OpenAIClientManager |
| | from ankigen_core.models import Card, CardFront, CardBack |
| |
|
| |
|
| | |
| | @pytest.fixture |
| | def temp_config_dir(): |
| | """Create temporary config directory for testing""" |
| | with tempfile.TemporaryDirectory() as tmp_dir: |
| | yield tmp_dir |
| |
|
| |
|
| | @pytest.fixture |
| | def sample_cards(): |
| | """Sample cards for testing workflows""" |
| | return [ |
| | Card( |
| | card_type="basic", |
| | front=CardFront(question="What is a Python function?"), |
| | back=CardBack( |
| | answer="A reusable block of code", |
| | explanation="Functions help organize code into reusable components", |
| | example="def hello(): print('hello')" |
| | ), |
| | metadata={ |
| | "difficulty": "beginner", |
| | "subject": "programming", |
| | "topic": "Python Functions", |
| | "learning_outcomes": ["understanding functions"], |
| | "quality_score": 8.5 |
| | } |
| | ), |
| | Card( |
| | card_type="basic", |
| | front=CardFront(question="How do you call a function in Python?"), |
| | back=CardBack( |
| | answer="By using the function name followed by parentheses", |
| | explanation="Function calls execute the code inside the function", |
| | example="hello()" |
| | ), |
| | metadata={ |
| | "difficulty": "beginner", |
| | "subject": "programming", |
| | "topic": "Python Functions", |
| | "learning_outcomes": ["function execution"], |
| | "quality_score": 7.8 |
| | } |
| | ) |
| | ] |
| |
|
| |
|
| | @pytest.fixture |
| | def mock_openai_responses(): |
| | """Mock OpenAI API responses for different agents""" |
| | return { |
| | "generation": { |
| | "cards": [ |
| | { |
| | "card_type": "basic", |
| | "front": {"question": "What is a Python function?"}, |
| | "back": { |
| | "answer": "A reusable block of code", |
| | "explanation": "Functions help organize code", |
| | "example": "def hello(): print('hello')" |
| | }, |
| | "metadata": { |
| | "difficulty": "beginner", |
| | "subject": "programming", |
| | "topic": "Functions" |
| | } |
| | } |
| | ] |
| | }, |
| | "judgment": { |
| | "approved": True, |
| | "quality_score": 8.5, |
| | "feedback": "Good question with clear answer", |
| | "suggestions": [] |
| | }, |
| | "enhancement": { |
| | "enhanced_explanation": "Functions help organize code into reusable, testable components", |
| | "enhanced_example": "def greet(name): return f'Hello, {name}!'", |
| | "additional_metadata": { |
| | "complexity": "low", |
| | "estimated_study_time": "5 minutes" |
| | } |
| | } |
| | } |
| |
|
| |
|
| | |
| | @patch('ankigen_core.agents.integration.get_feature_flags') |
| | @patch('ankigen_core.agents.integration.record_agent_execution') |
| | async def test_complete_agent_workflow_success(mock_record, mock_get_flags, sample_cards, mock_openai_responses): |
| | """Test complete agent workflow from generation to enhancement""" |
| | |
| | |
| | feature_flags = AgentFeatureFlags( |
| | mode=AgentMode.AGENT_ONLY, |
| | enable_generation_coordinator=True, |
| | enable_judge_coordinator=True, |
| | enable_revision_agent=True, |
| | enable_enhancement_agent=True, |
| | enable_parallel_judging=True, |
| | min_judge_consensus=0.6 |
| | ) |
| | mock_get_flags.return_value = feature_flags |
| | |
| | |
| | mock_client_manager = MagicMock(spec=OpenAIClientManager) |
| | mock_client_manager.initialize_client = AsyncMock() |
| | mock_openai_client = MagicMock() |
| | mock_client_manager.get_client.return_value = mock_openai_client |
| | |
| | |
| | orchestrator = AgentOrchestrator(mock_client_manager) |
| | |
| | |
| | with patch('ankigen_core.agents.integration.GenerationCoordinator') as mock_gen_coord, \ |
| | patch('ankigen_core.agents.integration.JudgeCoordinator') as mock_judge_coord, \ |
| | patch('ankigen_core.agents.integration.RevisionAgent') as mock_revision, \ |
| | patch('ankigen_core.agents.integration.EnhancementAgent') as mock_enhancement: |
| | |
| | |
| | mock_gen_instance = MagicMock() |
| | mock_gen_instance.coordinate_generation = AsyncMock(return_value=sample_cards) |
| | mock_gen_coord.return_value = mock_gen_instance |
| | |
| | |
| | mock_judge_instance = MagicMock() |
| | judge_results = [(card, ["positive feedback"], True) for card in sample_cards] |
| | mock_judge_instance.coordinate_judgment = AsyncMock(return_value=judge_results) |
| | mock_judge_coord.return_value = mock_judge_instance |
| | |
| | |
| | enhanced_cards = sample_cards.copy() |
| | for card in enhanced_cards: |
| | card.metadata["enhanced"] = True |
| | mock_enhancement_instance = MagicMock() |
| | mock_enhancement_instance.enhance_card_batch = AsyncMock(return_value=enhanced_cards) |
| | mock_enhancement.return_value = mock_enhancement_instance |
| | |
| | |
| | await orchestrator.initialize("test-api-key") |
| | |
| | cards, metadata = await orchestrator.generate_cards_with_agents( |
| | topic="Python Functions", |
| | subject="programming", |
| | num_cards=2, |
| | difficulty="beginner", |
| | enable_quality_pipeline=True |
| | ) |
| | |
| | |
| | assert len(cards) == 2 |
| | assert all(isinstance(card, Card) for card in cards) |
| | assert all(card.metadata.get("enhanced") for card in cards) |
| | |
| | |
| | assert metadata["generation_method"] == "agent_system" |
| | assert metadata["cards_generated"] == 2 |
| | assert metadata["topic"] == "Python Functions" |
| | assert metadata["subject"] == "programming" |
| | assert "quality_results" in metadata |
| | |
| | |
| | mock_gen_instance.coordinate_generation.assert_called_once() |
| | mock_judge_instance.coordinate_judgment.assert_called_once() |
| | mock_enhancement_instance.enhance_card_batch.assert_called_once() |
| | |
| | |
| | mock_record.assert_called() |
| |
|
| |
|
| | @patch('ankigen_core.agents.integration.get_feature_flags') |
| | async def test_agent_workflow_with_card_rejection_and_revision(mock_get_flags, sample_cards): |
| | """Test workflow when cards are rejected and need revision""" |
| | |
| | feature_flags = AgentFeatureFlags( |
| | mode=AgentMode.AGENT_ONLY, |
| | enable_generation_coordinator=True, |
| | enable_judge_coordinator=True, |
| | enable_revision_agent=True, |
| | max_revision_iterations=2 |
| | ) |
| | mock_get_flags.return_value = feature_flags |
| | |
| | |
| | mock_client_manager = MagicMock(spec=OpenAIClientManager) |
| | mock_client_manager.initialize_client = AsyncMock() |
| | mock_openai_client = MagicMock() |
| | mock_client_manager.get_client.return_value = mock_openai_client |
| | |
| | orchestrator = AgentOrchestrator(mock_client_manager) |
| | |
| | with patch('ankigen_core.agents.integration.GenerationCoordinator') as mock_gen_coord, \ |
| | patch('ankigen_core.agents.integration.JudgeCoordinator') as mock_judge_coord, \ |
| | patch('ankigen_core.agents.integration.RevisionAgent') as mock_revision: |
| | |
| | |
| | mock_gen_instance = MagicMock() |
| | mock_gen_instance.coordinate_generation = AsyncMock(return_value=sample_cards) |
| | mock_gen_coord.return_value = mock_gen_instance |
| | |
| | |
| | judge_results_initial = [ |
| | (sample_cards[0], ["unclear question"], False), |
| | (sample_cards[1], ["good question"], True) |
| | ] |
| | |
| | |
| | revised_card = Card( |
| | card_type="basic", |
| | front=CardFront(question="What is a Python function and how is it used?"), |
| | back=CardBack( |
| | answer="A reusable block of code that performs a specific task", |
| | explanation="Functions are fundamental building blocks in programming", |
| | example="def add(a, b): return a + b" |
| | ), |
| | metadata={"difficulty": "beginner", "revised": True} |
| | ) |
| | |
| | |
| | judge_results_revision = [(revised_card, ["much improved"], True)] |
| | |
| | mock_judge_instance = MagicMock() |
| | mock_judge_instance.coordinate_judgment = AsyncMock( |
| | side_effect=[judge_results_initial, judge_results_revision] |
| | ) |
| | mock_judge_coord.return_value = mock_judge_instance |
| | |
| | |
| | mock_revision_instance = MagicMock() |
| | mock_revision_instance.revise_card = AsyncMock(return_value=revised_card) |
| | mock_revision.return_value = mock_revision_instance |
| | |
| | |
| | await orchestrator.initialize("test-api-key") |
| | |
| | cards, metadata = await orchestrator.generate_cards_with_agents( |
| | topic="Python Functions", |
| | subject="programming", |
| | num_cards=2, |
| | difficulty="beginner" |
| | ) |
| | |
| | |
| | assert len(cards) == 2 |
| | assert sample_cards[1] in cards |
| | assert revised_card in cards |
| | |
| | |
| | quality_results = metadata["quality_results"] |
| | assert quality_results["initially_approved"] == 1 |
| | assert quality_results["initially_rejected"] == 1 |
| | assert quality_results["successfully_revised"] == 1 |
| | assert quality_results["final_approval_rate"] == 1.0 |
| | |
| | |
| | mock_revision_instance.revise_card.assert_called_once() |
| |
|
| |
|
| | @patch('ankigen_core.agents.integration.get_feature_flags') |
| | async def test_agent_workflow_hybrid_mode(mock_get_flags, sample_cards): |
| | """Test workflow in hybrid mode with selective agent usage""" |
| | |
| | feature_flags = AgentFeatureFlags( |
| | mode=AgentMode.HYBRID, |
| | enable_subject_expert_agent=True, |
| | enable_content_accuracy_judge=True, |
| | enable_generation_coordinator=False, |
| | enable_enhancement_agent=False |
| | ) |
| | mock_get_flags.return_value = feature_flags |
| | |
| | mock_client_manager = MagicMock(spec=OpenAIClientManager) |
| | mock_client_manager.initialize_client = AsyncMock() |
| | mock_openai_client = MagicMock() |
| | mock_client_manager.get_client.return_value = mock_openai_client |
| | |
| | orchestrator = AgentOrchestrator(mock_client_manager) |
| | |
| | with patch('ankigen_core.agents.integration.SubjectExpertAgent') as mock_subject_expert: |
| | |
| | |
| | mock_expert_instance = MagicMock() |
| | mock_expert_instance.generate_cards = AsyncMock(return_value=sample_cards) |
| | mock_subject_expert.return_value = mock_expert_instance |
| | |
| | |
| | await orchestrator.initialize("test-api-key") |
| | |
| | |
| | assert orchestrator.generation_coordinator is None |
| | assert orchestrator.judge_coordinator is None |
| | assert orchestrator.enhancement_agent is None |
| | |
| | |
| | cards, metadata = await orchestrator.generate_cards_with_agents( |
| | topic="Python Functions", |
| | subject="programming", |
| | num_cards=2 |
| | ) |
| | |
| | |
| | assert len(cards) == 2 |
| | assert metadata["generation_method"] == "agent_system" |
| | |
| | |
| | mock_subject_expert.assert_called_once_with(mock_openai_client, "programming") |
| | mock_expert_instance.generate_cards.assert_called_once() |
| |
|
| |
|
| | @patch('ankigen_core.agents.integration.get_feature_flags') |
| | async def test_integrate_with_existing_workflow_function(mock_get_flags, sample_cards): |
| | """Test the integrate_with_existing_workflow function""" |
| | |
| | feature_flags = AgentFeatureFlags(mode=AgentMode.AGENT_ONLY, enable_subject_expert_agent=True) |
| | mock_get_flags.return_value = feature_flags |
| | |
| | mock_client_manager = MagicMock(spec=OpenAIClientManager) |
| | |
| | with patch('ankigen_core.agents.integration.AgentOrchestrator') as mock_orchestrator_class: |
| | |
| | |
| | mock_orchestrator = MagicMock() |
| | mock_orchestrator.initialize = AsyncMock() |
| | mock_orchestrator.generate_cards_with_agents = AsyncMock( |
| | return_value=(sample_cards, {"method": "agent_system"}) |
| | ) |
| | mock_orchestrator_class.return_value = mock_orchestrator |
| | |
| | |
| | cards, metadata = await integrate_with_existing_workflow( |
| | client_manager=mock_client_manager, |
| | api_key="test-key", |
| | topic="Python Basics", |
| | subject="programming", |
| | num_cards=2, |
| | difficulty="beginner" |
| | ) |
| | |
| | |
| | assert cards == sample_cards |
| | assert metadata == {"method": "agent_system"} |
| | |
| | |
| | mock_orchestrator_class.assert_called_once_with(mock_client_manager) |
| | mock_orchestrator.initialize.assert_called_once_with("test-key") |
| | mock_orchestrator.generate_cards_with_agents.assert_called_once_with( |
| | topic="Python Basics", |
| | subject="programming", |
| | num_cards=2, |
| | difficulty="beginner" |
| | ) |
| |
|
| |
|
| | @patch('ankigen_core.agents.integration.get_feature_flags') |
| | async def test_integrate_with_existing_workflow_legacy_fallback(mock_get_flags): |
| | """Test integration function with legacy fallback""" |
| | |
| | feature_flags = AgentFeatureFlags(mode=AgentMode.LEGACY) |
| | mock_get_flags.return_value = feature_flags |
| | |
| | mock_client_manager = MagicMock(spec=OpenAIClientManager) |
| | |
| | |
| | with pytest.raises(NotImplementedError, match="Legacy fallback not implemented"): |
| | await integrate_with_existing_workflow( |
| | client_manager=mock_client_manager, |
| | api_key="test-key", |
| | topic="Test" |
| | ) |
| |
|
| |
|
| | async def test_agent_workflow_error_handling(): |
| | """Test agent workflow error handling and recovery""" |
| | |
| | mock_client_manager = MagicMock(spec=OpenAIClientManager) |
| | mock_client_manager.initialize_client = AsyncMock(side_effect=Exception("API key invalid")) |
| | |
| | orchestrator = AgentOrchestrator(mock_client_manager) |
| | |
| | |
| | with pytest.raises(Exception, match="API key invalid"): |
| | await orchestrator.initialize("invalid-key") |
| |
|
| |
|
| | async def test_agent_workflow_timeout_handling(): |
| | """Test agent workflow timeout handling""" |
| | |
| | feature_flags = AgentFeatureFlags( |
| | mode=AgentMode.AGENT_ONLY, |
| | enable_generation_coordinator=True, |
| | agent_timeout=0.1 |
| | ) |
| | |
| | mock_client_manager = MagicMock(spec=OpenAIClientManager) |
| | mock_client_manager.initialize_client = AsyncMock() |
| | mock_client_manager.get_client.return_value = MagicMock() |
| | |
| | orchestrator = AgentOrchestrator(mock_client_manager) |
| | orchestrator.feature_flags = feature_flags |
| | |
| | with patch('ankigen_core.agents.integration.GenerationCoordinator') as mock_gen_coord: |
| | |
| | |
| | mock_gen_instance = MagicMock() |
| | mock_gen_instance.coordinate_generation = AsyncMock() |
| | |
| | async def slow_generation(*args, **kwargs): |
| | await asyncio.sleep(1) |
| | return [] |
| | |
| | mock_gen_instance.coordinate_generation.side_effect = slow_generation |
| | mock_gen_coord.return_value = mock_gen_instance |
| | |
| | await orchestrator.initialize("test-key") |
| | |
| | |
| | |
| | with pytest.raises(Exception): |
| | await orchestrator.generate_cards_with_agents( |
| | topic="Test", |
| | subject="test", |
| | num_cards=1 |
| | ) |
| |
|
| |
|
| | def test_agent_config_integration_with_workflow(temp_config_dir): |
| | """Test agent configuration integration with workflow""" |
| | |
| | |
| | config_manager = AgentConfigManager(config_dir=temp_config_dir) |
| | |
| | test_config = { |
| | "agents": { |
| | "subject_expert": { |
| | "instructions": "You are a subject matter expert", |
| | "model": "gpt-4o", |
| | "temperature": 0.8, |
| | "timeout": 45.0, |
| | "custom_prompts": { |
| | "programming": "Focus on code examples and best practices" |
| | } |
| | } |
| | } |
| | } |
| | |
| | config_manager.load_config_from_dict(test_config) |
| | |
| | |
| | subject_config = config_manager.get_config("subject_expert") |
| | assert subject_config is not None |
| | assert subject_config.temperature == 0.8 |
| | assert subject_config.timeout == 45.0 |
| | assert "programming" in subject_config.custom_prompts |
| |
|
| |
|
| | @patch('ankigen_core.agents.integration.get_feature_flags') |
| | async def test_agent_performance_metrics_collection(mock_get_flags, sample_cards): |
| | """Test that performance metrics are collected during workflow""" |
| | |
| | feature_flags = AgentFeatureFlags( |
| | mode=AgentMode.AGENT_ONLY, |
| | enable_generation_coordinator=True, |
| | enable_agent_tracing=True |
| | ) |
| | mock_get_flags.return_value = feature_flags |
| | |
| | mock_client_manager = MagicMock(spec=OpenAIClientManager) |
| | mock_client_manager.initialize_client = AsyncMock() |
| | mock_client_manager.get_client.return_value = MagicMock() |
| | |
| | orchestrator = AgentOrchestrator(mock_client_manager) |
| | |
| | with patch('ankigen_core.agents.integration.GenerationCoordinator') as mock_gen_coord, \ |
| | patch('ankigen_core.agents.integration.get_metrics') as mock_get_metrics: |
| | |
| | |
| | mock_gen_instance = MagicMock() |
| | mock_gen_instance.coordinate_generation = AsyncMock(return_value=sample_cards) |
| | mock_gen_coord.return_value = mock_gen_instance |
| | |
| | |
| | mock_metrics = MagicMock() |
| | mock_metrics.get_performance_report.return_value = {"avg_response_time": 1.5} |
| | mock_metrics.get_quality_metrics.return_value = {"avg_quality": 8.2} |
| | mock_get_metrics.return_value = mock_metrics |
| | |
| | await orchestrator.initialize("test-key") |
| | |
| | |
| | await orchestrator.generate_cards_with_agents( |
| | topic="Test", |
| | subject="test", |
| | num_cards=1 |
| | ) |
| | |
| | |
| | performance = orchestrator.get_performance_metrics() |
| | |
| | |
| | assert "agent_performance" in performance |
| | assert "quality_metrics" in performance |
| | assert "feature_flags" in performance |
| | assert "enabled_agents" in performance |
| | |
| | |
| | mock_metrics.get_performance_report.assert_called_once_with(hours=24) |
| | mock_metrics.get_quality_metrics.assert_called_once() |
| |
|
| |
|
| | |
| | @patch('ankigen_core.agents.integration.get_feature_flags') |
| | async def test_concurrent_agent_operations(mock_get_flags, sample_cards): |
| | """Test concurrent agent operations""" |
| | |
| | feature_flags = AgentFeatureFlags( |
| | mode=AgentMode.AGENT_ONLY, |
| | enable_generation_coordinator=True, |
| | enable_parallel_judging=True |
| | ) |
| | mock_get_flags.return_value = feature_flags |
| | |
| | mock_client_manager = MagicMock(spec=OpenAIClientManager) |
| | mock_client_manager.initialize_client = AsyncMock() |
| | mock_client_manager.get_client.return_value = MagicMock() |
| | |
| | |
| | orchestrators = [AgentOrchestrator(mock_client_manager) for _ in range(3)] |
| | |
| | with patch('ankigen_core.agents.integration.GenerationCoordinator') as mock_gen_coord: |
| | |
| | |
| | mock_gen_instance = MagicMock() |
| | mock_gen_instance.coordinate_generation = AsyncMock(return_value=sample_cards) |
| | mock_gen_coord.return_value = mock_gen_instance |
| | |
| | |
| | await asyncio.gather(*[orch.initialize("test-key") for orch in orchestrators]) |
| | |
| | |
| | tasks = [ |
| | orch.generate_cards_with_agents( |
| | topic=f"Topic {i}", |
| | subject="test", |
| | num_cards=1 |
| | ) |
| | for i, orch in enumerate(orchestrators) |
| | ] |
| | |
| | results = await asyncio.gather(*tasks) |
| | |
| | |
| | assert len(results) == 3 |
| | for cards, metadata in results: |
| | assert len(cards) == 2 |
| | assert metadata["generation_method"] == "agent_system" |