chat-bot / tests /conftest.py
surahj's picture
Initial commit: LLM Chat Interface for HF Spaces
c2f9396
import pytest
import asyncio
from unittest.mock import patch, AsyncMock
from fastapi.testclient import TestClient
from app.main import app
from app.llm_manager import LLMManager
@pytest.fixture(scope="session")
def event_loop():
"""Create an instance of the default event loop for the test session."""
loop = asyncio.get_event_loop_policy().new_event_loop()
yield loop
loop.close()
@pytest.fixture
def mock_llm_manager():
"""Create a mock LLM manager for testing."""
with patch("app.main.llm_manager") as mock_manager:
# Set up the mock manager
mock_manager.is_loaded = True
mock_manager.model_type = "mock"
mock_manager.get_model_info.return_value = {
"id": "llama-2-7b-chat",
"object": "model",
"created": 1234567890,
"owned_by": "huggingface",
"type": "mock",
"context_window": 2048,
"is_loaded": True,
}
# Mock the generate_stream method
async def mock_generate_stream(request):
# Generate a simple mock response
yield {
"id": "test-id-1",
"object": "chat.completion.chunk",
"created": 1234567890,
"model": request.model,
"choices": [
{"index": 0, "delta": {"content": "Hello"}, "finish_reason": None}
],
}
yield {
"id": "test-id-2",
"object": "chat.completion.chunk",
"created": 1234567890,
"model": request.model,
"choices": [
{"index": 0, "delta": {"content": " world"}, "finish_reason": None}
],
}
yield {
"id": "test-id-3",
"object": "chat.completion.chunk",
"created": 1234567890,
"model": request.model,
"choices": [{"index": 0, "delta": {}, "finish_reason": "stop"}],
}
mock_manager.generate_stream = mock_generate_stream
yield mock_manager
@pytest.fixture
def client(mock_llm_manager):
"""Create a test client with mocked LLM manager."""
return TestClient(app)
@pytest.fixture
def async_client(mock_llm_manager):
"""Create an async test client with mocked LLM manager."""
from httpx import AsyncClient
return AsyncClient(app=app, base_url="http://test")