File size: 10,125 Bytes
cd7c282
 
 
 
 
 
e85ccf5
 
cd7c282
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
e85ccf5
 
cd7c282
 
 
 
 
 
e85ccf5
e4c6475
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
cd7c282
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8615dab
cd7c282
8615dab
 
cd7c282
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8615dab
cd7c282
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
"""Unit tests for ChatClientFactory (SPEC-16: Unified Architecture)."""

from unittest.mock import MagicMock, patch

import pytest

from src.utils.exceptions import ConfigurationError

# Skip if agent-framework-core not installed
pytest.importorskip("agent_framework")


@pytest.mark.unit
class TestChatClientFactory:
    """Test get_chat_client() factory function."""

    def test_returns_openai_client_when_openai_key_available(self) -> None:
        """When OpenAI key is available, should return OpenAIChatClient."""
        with patch("src.clients.factory.settings") as mock_settings:
            mock_settings.has_openai_key = True
            mock_settings.has_gemini_key = False
            mock_settings.openai_api_key = "sk-test-key"
            mock_settings.openai_model = "gpt-5"

            from src.clients.factory import get_chat_client

            client = get_chat_client()

            # Should be OpenAIChatClient
            assert "OpenAI" in type(client).__name__

    def test_returns_huggingface_client_when_no_key_available(self) -> None:
        """When no API key is available, should return HuggingFaceChatClient (free tier)."""
        with patch("src.clients.factory.settings") as mock_settings:
            mock_settings.has_openai_key = False
            mock_settings.has_gemini_key = False
            mock_settings.huggingface_model = "meta-llama/Llama-3.1-70B-Instruct"
            mock_settings.hf_token = None

            from src.clients.factory import get_chat_client

            client = get_chat_client()

            # Should be HuggingFaceChatClient
            assert "HuggingFace" in type(client).__name__

    def test_explicit_provider_openai_overrides_auto_detection(self) -> None:
        """Explicit provider='openai' should use OpenAI even if no env key."""
        with patch("src.clients.factory.settings") as mock_settings:
            mock_settings.has_openai_key = False
            mock_settings.has_gemini_key = False
            mock_settings.openai_api_key = None
            mock_settings.openai_model = "gpt-5"

            from src.clients.factory import get_chat_client

            # Explicit provider with api_key parameter
            client = get_chat_client(provider="openai", api_key="sk-explicit-key")

            assert "OpenAI" in type(client).__name__

    def test_explicit_provider_huggingface(self) -> None:
        """Explicit provider='huggingface' should use HuggingFace."""
        with patch("src.clients.factory.settings") as mock_settings:
            mock_settings.has_openai_key = True  # Even with OpenAI key available
            mock_settings.huggingface_model = "meta-llama/Llama-3.1-70B-Instruct"
            mock_settings.hf_token = None

            from src.clients.factory import get_chat_client

            # Explicit provider forces HuggingFace
            client = get_chat_client(provider="huggingface")

            assert "HuggingFace" in type(client).__name__

    def test_unsupported_provider_raises_configuration_error(self) -> None:
        """Unsupported provider should raise ConfigurationError, not silently fallback."""
        with patch("src.clients.factory.settings") as mock_settings:
            mock_settings.has_openai_key = False
            mock_settings.has_gemini_key = False

            from src.clients.factory import get_chat_client

            with pytest.raises(ConfigurationError, match="No suitable provider found"):
                get_chat_client(provider="invalid_provider")

    def test_byok_auto_detects_openai_from_key_prefix(self) -> None:
        """BYOK: api_key starting with 'sk-' should auto-select OpenAI without explicit provider.

        This is the critical BYOK (Bring Your Own Key) test case:
        - User enters 'sk-...' key in Gradio
        - No explicit provider parameter
        - No OPENAI_API_KEY in env (settings.has_openai_key = False)
        - Should auto-detect OpenAI from the key prefix
        """
        with patch("src.clients.factory.settings") as mock_settings:
            mock_settings.has_openai_key = False  # No env key
            mock_settings.has_gemini_key = False
            mock_settings.openai_api_key = None
            mock_settings.openai_model = "gpt-5"

            from src.clients.factory import get_chat_client

            # BYOK: Pass api_key without explicit provider
            client = get_chat_client(api_key="sk-user-provided-key")

            # Should auto-detect OpenAI from 'sk-' prefix
            assert "OpenAI" in type(client).__name__

    def test_byok_hf_token_falls_through_to_huggingface(self) -> None:
        """BYOK: HuggingFace tokens (hf_...) should use HuggingFace client."""
        with patch("src.clients.factory.settings") as mock_settings:
            mock_settings.has_openai_key = False
            mock_settings.has_gemini_key = False
            mock_settings.huggingface_model = "Qwen/Qwen2.5-7B-Instruct"
            mock_settings.hf_token = None

            from src.clients.factory import get_chat_client

            # HF tokens don't trigger auto-detection, falls through to HuggingFace
            client = get_chat_client(api_key="hf_user_provided_token")

            assert "HuggingFace" in type(client).__name__

    def test_provider_is_case_insensitive(self) -> None:
        """Provider matching should be case-insensitive."""
        with patch("src.clients.factory.settings") as mock_settings:
            mock_settings.has_openai_key = False
            mock_settings.has_gemini_key = False
            mock_settings.openai_api_key = None
            mock_settings.openai_model = "gpt-5"

            from src.clients.factory import get_chat_client

            # "OpenAI" should work same as "openai"
            client = get_chat_client(provider="OpenAI", api_key="sk-test")
            assert "OpenAI" in type(client).__name__

            # "HUGGINGFACE" should work same as "huggingface"
            mock_settings.huggingface_model = "meta-llama/Llama-3.1-70B-Instruct"
            mock_settings.hf_token = None
            client = get_chat_client(provider="HUGGINGFACE")
            assert "HuggingFace" in type(client).__name__


@pytest.mark.unit
class TestHuggingFaceChatClient:
    """Test HuggingFaceChatClient adapter."""

    def test_initialization_with_defaults(self) -> None:
        """Should initialize with default model from settings."""
        with patch("src.clients.huggingface.settings") as mock_settings:
            mock_settings.huggingface_model = "meta-llama/Llama-3.1-70B-Instruct"
            mock_settings.hf_token = None

            from src.clients.huggingface import HuggingFaceChatClient

            client = HuggingFaceChatClient()

            assert client.model_id == "meta-llama/Llama-3.1-70B-Instruct"

    def test_initialization_with_custom_model(self) -> None:
        """Should accept custom model_id."""
        with patch("src.clients.huggingface.settings") as mock_settings:
            mock_settings.huggingface_model = "meta-llama/Llama-3.1-70B-Instruct"
            mock_settings.hf_token = None

            from src.clients.huggingface import HuggingFaceChatClient

            client = HuggingFaceChatClient(model_id="mistralai/Mistral-7B-Instruct-v0.3")

            assert client.model_id == "mistralai/Mistral-7B-Instruct-v0.3"

    def test_convert_messages_basic(self) -> None:
        """Should convert ChatMessage list to HuggingFace format."""
        with patch("src.clients.huggingface.settings") as mock_settings:
            mock_settings.huggingface_model = "meta-llama/Llama-3.1-70B-Instruct"
            mock_settings.hf_token = None

            from agent_framework import ChatMessage

            from src.clients.huggingface import HuggingFaceChatClient

            client = HuggingFaceChatClient()

            # Create mock messages (include contents=None for tool call processing)
            messages = [
                MagicMock(spec=ChatMessage, role="user", text="Hello", contents=None),
                MagicMock(spec=ChatMessage, role="assistant", text="Hi there!", contents=None),
            ]

            result = client._convert_messages(messages)

            assert len(result) == 2
            assert result[0] == {"role": "user", "content": "Hello"}
            assert result[1] == {"role": "assistant", "content": "Hi there!"}

    def test_convert_messages_handles_role_enum(self) -> None:
        """Should extract .value from Role enum, not stringify the enum itself."""
        with patch("src.clients.huggingface.settings") as mock_settings:
            mock_settings.huggingface_model = "meta-llama/Llama-3.1-70B-Instruct"
            mock_settings.hf_token = None

            from enum import Enum

            from agent_framework import ChatMessage

            from src.clients.huggingface import HuggingFaceChatClient

            # Simulate a Role enum like agent_framework might use
            class Role(Enum):
                USER = "user"
                ASSISTANT = "assistant"

            client = HuggingFaceChatClient()

            # Create mock message with enum role
            mock_msg = MagicMock(spec=ChatMessage)
            mock_msg.role = Role.USER  # Enum, not string
            mock_msg.text = "Hello"
            mock_msg.contents = None  # Required for tool call processing

            result = client._convert_messages([mock_msg])

            # Should be "user", NOT "Role.USER"
            assert result[0]["role"] == "user"
            assert "Role" not in result[0]["role"]

    def test_inherits_from_base_chat_client(self) -> None:
        """Should inherit from agent_framework.BaseChatClient."""
        with patch("src.clients.huggingface.settings") as mock_settings:
            mock_settings.huggingface_model = "meta-llama/Llama-3.1-70B-Instruct"
            mock_settings.hf_token = None

            from agent_framework import BaseChatClient

            from src.clients.huggingface import HuggingFaceChatClient

            client = HuggingFaceChatClient()

            assert isinstance(client, BaseChatClient)