VibecoderMcSwaggins commited on
Commit
b4f896b
·
unverified ·
2 Parent(s): e956e72 e217fae

Merge pull request #126 from The-Obstacle-Is-The-Way/refactor/dead-config

Browse files

refactor(config): Remove dead config and Anthropic references (Priority 4)

src/app.py CHANGED
@@ -78,22 +78,15 @@ def configure_orchestrator(
78
 
79
  # 2. Paid API Key (User provided or Env)
80
  elif user_api_key and user_api_key.strip():
81
- if user_api_key.startswith("sk-ant-"):
82
- backend_info = "Paid API (Anthropic)"
83
- elif user_api_key.startswith("sk-"):
84
  backend_info = "Paid API (OpenAI)"
85
  else:
86
- raise ConfigurationError(
87
- "Invalid API key format. Expected sk-... (OpenAI) or sk-ant-... (Anthropic)"
88
- )
89
 
90
  # 3. Environment API Keys (fallback)
91
  elif settings.has_openai_key:
92
  backend_info = "Paid API (OpenAI from env)"
93
 
94
- elif settings.has_anthropic_key:
95
- backend_info = "Paid API (Anthropic from env)"
96
-
97
  # 4. Free Tier (HuggingFace Inference)
98
  else:
99
  backend_info = "Free Tier (Llama 3.1 / Mistral)"
@@ -125,8 +118,7 @@ def _validate_inputs(
125
 
126
  # Check available keys
127
  has_openai = settings.has_openai_key
128
- has_anthropic = settings.has_anthropic_key
129
- has_paid_key = has_openai or has_anthropic or bool(user_api_key)
130
 
131
  return user_api_key, has_paid_key
132
 
@@ -320,7 +312,7 @@ def create_demo() -> tuple[gr.ChatInterface, gr.Accordion]:
320
  ),
321
  gr.Textbox(
322
  label="🔑 API Key (Optional)",
323
- placeholder="sk-... (OpenAI) or sk-ant-... (Anthropic)",
324
  type="password",
325
  info="Leave empty for free tier. Auto-detects provider from key prefix.",
326
  elem_classes=["api-key-input"],
 
78
 
79
  # 2. Paid API Key (User provided or Env)
80
  elif user_api_key and user_api_key.strip():
81
+ if user_api_key.startswith("sk-"):
 
 
82
  backend_info = "Paid API (OpenAI)"
83
  else:
84
+ raise ConfigurationError("Invalid API key format. Expected sk-... (OpenAI)")
 
 
85
 
86
  # 3. Environment API Keys (fallback)
87
  elif settings.has_openai_key:
88
  backend_info = "Paid API (OpenAI from env)"
89
 
 
 
 
90
  # 4. Free Tier (HuggingFace Inference)
91
  else:
92
  backend_info = "Free Tier (Llama 3.1 / Mistral)"
 
118
 
119
  # Check available keys
120
  has_openai = settings.has_openai_key
121
+ has_paid_key = has_openai or bool(user_api_key)
 
122
 
123
  return user_api_key, has_paid_key
124
 
 
312
  ),
313
  gr.Textbox(
314
  label="🔑 API Key (Optional)",
315
+ placeholder="sk-... (OpenAI)",
316
  type="password",
317
  info="Leave empty for free tier. Auto-detects provider from key prefix.",
318
  elem_classes=["api-key-input"],
src/clients/factory.py CHANGED
@@ -48,14 +48,12 @@ def get_chat_client(
48
  # This enables BYOK (Bring Your Own Key) from Gradio without explicit provider
49
  # Order matters: "sk-ant-" must be checked before "sk-" (both start with "sk-")
50
  if normalized is None and api_key:
51
- if api_key.startswith("sk-ant-"):
52
- normalized = "anthropic"
53
- elif api_key.startswith("sk-"):
54
  normalized = "openai"
55
  # HF tokens start with "hf_" - no auto-detection needed (falls through to default)
56
 
57
  # Validate explicit provider requests early
58
- valid_providers = (None, "openai", "anthropic", "gemini", "huggingface")
59
  if normalized not in valid_providers:
60
  raise ValueError(f"Unsupported provider: {provider!r}")
61
 
@@ -68,23 +66,6 @@ def get_chat_client(
68
  **kwargs,
69
  )
70
 
71
- # 2. Anthropic (Detected from sk-ant- prefix or explicit)
72
- if normalized == "anthropic":
73
- # Anthropic key was detected or explicitly requested - fail loudly
74
- raise NotImplementedError(
75
- "Anthropic client not yet implemented. "
76
- "Use OpenAI key (sk-...) or leave empty for free HuggingFace tier."
77
- )
78
-
79
- # 3. Gemini (High Performance / Alternative)
80
- if normalized == "gemini":
81
- # Explicit request for Gemini - fail loudly
82
- raise NotImplementedError("Gemini client not yet implemented (Planned Phase 4)")
83
-
84
- if normalized is None and settings.has_gemini_key:
85
- # Implicit (has key but not explicit) - log warning and fall through
86
- logger.warning("Gemini key detected but client not yet implemented; falling back")
87
-
88
  # 4. HuggingFace (Free Fallback)
89
  # This is the default if no other keys are present
90
  logger.info("Using HuggingFace Chat Client (Free Tier)")
 
48
  # This enables BYOK (Bring Your Own Key) from Gradio without explicit provider
49
  # Order matters: "sk-ant-" must be checked before "sk-" (both start with "sk-")
50
  if normalized is None and api_key:
51
+ if api_key.startswith("sk-"):
 
 
52
  normalized = "openai"
53
  # HF tokens start with "hf_" - no auto-detection needed (falls through to default)
54
 
55
  # Validate explicit provider requests early
56
+ valid_providers = (None, "openai", "huggingface")
57
  if normalized not in valid_providers:
58
  raise ValueError(f"Unsupported provider: {provider!r}")
59
 
 
66
  **kwargs,
67
  )
68
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
69
  # 4. HuggingFace (Free Fallback)
70
  # This is the default if no other keys are present
71
  logger.info("Using HuggingFace Chat Client (Free Tier)")
src/utils/config.py CHANGED
@@ -26,15 +26,10 @@ class Settings(BaseSettings):
26
 
27
  # LLM Configuration
28
  openai_api_key: str | None = Field(default=None, description="OpenAI API key")
29
- anthropic_api_key: str | None = Field(default=None, description="Anthropic API key")
30
- gemini_api_key: str | None = Field(default=None, description="Google Gemini API key")
31
- llm_provider: Literal["openai", "anthropic", "huggingface", "gemini"] = Field(
32
  default="openai", description="Which LLM provider to use"
33
  )
34
  openai_model: str = Field(default="gpt-5", description="OpenAI model name")
35
- anthropic_model: str = Field(
36
- default="claude-sonnet-4-5-20250929", description="Anthropic model"
37
- )
38
  # HuggingFace (free tier)
39
  # NOTE: Large models (70B+) are routed to third-party providers (Novita, Hyperbolic) which are
40
  # unreliable (500/401 errors). We use Qwen2.5-7B-Instruct as it is small enough to run on
@@ -77,10 +72,6 @@ class Settings(BaseSettings):
77
  description="Timeout for Advanced mode in seconds (default 10 min)",
78
  )
79
  search_timeout: int = Field(default=30, description="Seconds to wait for search")
80
- magentic_timeout: int = Field(
81
- default=600,
82
- description="Timeout for Magentic mode in seconds (deprecated, use advanced_timeout)",
83
- )
84
 
85
  # Logging
86
  log_level: Literal["DEBUG", "INFO", "WARNING", "ERROR"] = "INFO"
@@ -105,11 +96,6 @@ class Settings(BaseSettings):
105
  raise ConfigurationError("OPENAI_API_KEY not set")
106
  return self.openai_api_key
107
 
108
- if provider_lower == "anthropic":
109
- if not self.anthropic_api_key:
110
- raise ConfigurationError("ANTHROPIC_API_KEY not set")
111
- return self.anthropic_api_key
112
-
113
  raise ConfigurationError(f"Unknown LLM provider: {self.llm_provider}")
114
 
115
  def get_openai_api_key(self) -> str:
@@ -126,16 +112,6 @@ class Settings(BaseSettings):
126
  """Check if OpenAI API key is available."""
127
  return bool(self.openai_api_key)
128
 
129
- @property
130
- def has_anthropic_key(self) -> bool:
131
- """Check if Anthropic API key is available."""
132
- return bool(self.anthropic_api_key)
133
-
134
- @property
135
- def has_gemini_key(self) -> bool:
136
- """Check if Gemini API key is available."""
137
- return bool(self.gemini_api_key)
138
-
139
  @property
140
  def has_huggingface_key(self) -> bool:
141
  """Check if HuggingFace token is available."""
@@ -144,12 +120,7 @@ class Settings(BaseSettings):
144
  @property
145
  def has_any_llm_key(self) -> bool:
146
  """Check if any LLM API key is available."""
147
- return (
148
- self.has_openai_key
149
- or self.has_anthropic_key
150
- or self.has_huggingface_key
151
- or self.has_gemini_key
152
- )
153
 
154
 
155
  def get_settings() -> Settings:
 
26
 
27
  # LLM Configuration
28
  openai_api_key: str | None = Field(default=None, description="OpenAI API key")
29
+ llm_provider: Literal["openai", "huggingface"] = Field(
 
 
30
  default="openai", description="Which LLM provider to use"
31
  )
32
  openai_model: str = Field(default="gpt-5", description="OpenAI model name")
 
 
 
33
  # HuggingFace (free tier)
34
  # NOTE: Large models (70B+) are routed to third-party providers (Novita, Hyperbolic) which are
35
  # unreliable (500/401 errors). We use Qwen2.5-7B-Instruct as it is small enough to run on
 
72
  description="Timeout for Advanced mode in seconds (default 10 min)",
73
  )
74
  search_timeout: int = Field(default=30, description="Seconds to wait for search")
 
 
 
 
75
 
76
  # Logging
77
  log_level: Literal["DEBUG", "INFO", "WARNING", "ERROR"] = "INFO"
 
96
  raise ConfigurationError("OPENAI_API_KEY not set")
97
  return self.openai_api_key
98
 
 
 
 
 
 
99
  raise ConfigurationError(f"Unknown LLM provider: {self.llm_provider}")
100
 
101
  def get_openai_api_key(self) -> str:
 
112
  """Check if OpenAI API key is available."""
113
  return bool(self.openai_api_key)
114
 
 
 
 
 
 
 
 
 
 
 
115
  @property
116
  def has_huggingface_key(self) -> bool:
117
  """Check if HuggingFace token is available."""
 
120
  @property
121
  def has_any_llm_key(self) -> bool:
122
  """Check if any LLM API key is available."""
123
+ return self.has_openai_key or self.has_huggingface_key
 
 
 
 
 
124
 
125
 
126
  def get_settings() -> Settings:
tests/unit/clients/test_chat_client_factory.py CHANGED
@@ -71,17 +71,6 @@ class TestChatClientFactory:
71
 
72
  assert "HuggingFace" in type(client).__name__
73
 
74
- def test_gemini_provider_raises_not_implemented(self) -> None:
75
- """Explicit provider='gemini' should raise NotImplementedError (Phase 4)."""
76
- with patch("src.clients.factory.settings") as mock_settings:
77
- mock_settings.has_openai_key = False
78
- mock_settings.has_gemini_key = False
79
-
80
- from src.clients.factory import get_chat_client
81
-
82
- with pytest.raises(NotImplementedError, match="Gemini client not yet implemented"):
83
- get_chat_client(provider="gemini")
84
-
85
  def test_unsupported_provider_raises_value_error(self) -> None:
86
  """Unsupported provider should raise ValueError, not silently fallback."""
87
  with patch("src.clients.factory.settings") as mock_settings:
@@ -93,17 +82,6 @@ class TestChatClientFactory:
93
  with pytest.raises(ValueError, match="Unsupported provider"):
94
  get_chat_client(provider="invalid_provider")
95
 
96
- def test_anthropic_provider_raises_not_implemented(self) -> None:
97
- """Anthropic provider should raise NotImplementedError (not yet implemented)."""
98
- with patch("src.clients.factory.settings") as mock_settings:
99
- mock_settings.has_openai_key = False
100
- mock_settings.has_gemini_key = False
101
-
102
- from src.clients.factory import get_chat_client
103
-
104
- with pytest.raises(NotImplementedError, match="Anthropic client not yet implemented"):
105
- get_chat_client(provider="anthropic")
106
-
107
  def test_byok_auto_detects_openai_from_key_prefix(self) -> None:
108
  """BYOK: api_key starting with 'sk-' should auto-select OpenAI without explicit provider.
109
 
@@ -127,22 +105,6 @@ class TestChatClientFactory:
127
  # Should auto-detect OpenAI from 'sk-' prefix
128
  assert "OpenAI" in type(client).__name__
129
 
130
- def test_byok_auto_detects_anthropic_from_key_prefix(self) -> None:
131
- """BYOK: api_key starting with 'sk-ant-' should auto-detect Anthropic.
132
-
133
- Anthropic keys start with 'sk-ant-' which is a superset of 'sk-'.
134
- Detection must check 'sk-ant-' first to avoid misdetecting as OpenAI.
135
- """
136
- with patch("src.clients.factory.settings") as mock_settings:
137
- mock_settings.has_openai_key = False
138
- mock_settings.has_gemini_key = False
139
-
140
- from src.clients.factory import get_chat_client
141
-
142
- # BYOK: Anthropic key should raise NotImplementedError (not fall to HuggingFace!)
143
- with pytest.raises(NotImplementedError, match="Anthropic client not yet implemented"):
144
- get_chat_client(api_key="sk-ant-user-anthropic-key")
145
-
146
  def test_byok_hf_token_falls_through_to_huggingface(self) -> None:
147
  """BYOK: HuggingFace tokens (hf_...) should use HuggingFace client."""
148
  with patch("src.clients.factory.settings") as mock_settings:
 
71
 
72
  assert "HuggingFace" in type(client).__name__
73
 
 
 
 
 
 
 
 
 
 
 
 
74
  def test_unsupported_provider_raises_value_error(self) -> None:
75
  """Unsupported provider should raise ValueError, not silently fallback."""
76
  with patch("src.clients.factory.settings") as mock_settings:
 
82
  with pytest.raises(ValueError, match="Unsupported provider"):
83
  get_chat_client(provider="invalid_provider")
84
 
 
 
 
 
 
 
 
 
 
 
 
85
  def test_byok_auto_detects_openai_from_key_prefix(self) -> None:
86
  """BYOK: api_key starting with 'sk-' should auto-select OpenAI without explicit provider.
87
 
 
105
  # Should auto-detect OpenAI from 'sk-' prefix
106
  assert "OpenAI" in type(client).__name__
107
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
108
  def test_byok_hf_token_falls_through_to_huggingface(self) -> None:
109
  """BYOK: HuggingFace tokens (hf_...) should use HuggingFace client."""
110
  with patch("src.clients.factory.settings") as mock_settings:
tests/unit/test_app_timeout.py CHANGED
@@ -1,6 +1,5 @@
1
  """Tests for app timeout and history preservation."""
2
 
3
- import os
4
  from unittest.mock import MagicMock, patch
5
 
6
  import pytest
@@ -48,13 +47,3 @@ async def test_complete_event_preserves_history():
48
  assert "Step 1: Thinking..." in final_output
49
  assert "Step 2: Found data" in final_output
50
  assert "Timeout: Synthesizing..." in final_output
51
-
52
-
53
- @pytest.mark.asyncio
54
- async def test_timeout_configurable():
55
- """Verify MAGENTIC_TIMEOUT env var is respected."""
56
- from src.utils.config import Settings
57
-
58
- with patch.dict(os.environ, {"MAGENTIC_TIMEOUT": "120"}):
59
- settings = Settings()
60
- assert settings.magentic_timeout == 120
 
1
  """Tests for app timeout and history preservation."""
2
 
 
3
  from unittest.mock import MagicMock, patch
4
 
5
  import pytest
 
47
  assert "Step 1: Thinking..." in final_output
48
  assert "Step 2: Found data" in final_output
49
  assert "Timeout: Synthesizing..." in final_output
 
 
 
 
 
 
 
 
 
 
tests/unit/utils/test_config.py CHANGED
@@ -52,20 +52,3 @@ class TestSettings:
52
  settings = Settings(_env_file=None)
53
  with pytest.raises(ConfigurationError, match="OPENAI_API_KEY not set"):
54
  settings.get_api_key()
55
-
56
- def test_get_api_key_anthropic(self) -> None:
57
- """get_api_key should return Anthropic key when provider is anthropic."""
58
- with patch.dict(
59
- os.environ,
60
- {"LLM_PROVIDER": "anthropic", "ANTHROPIC_API_KEY": "sk-ant-test-key"},
61
- clear=True,
62
- ):
63
- settings = Settings(_env_file=None)
64
- assert settings.get_api_key() == "sk-ant-test-key"
65
-
66
- def test_get_api_key_anthropic_missing_raises(self) -> None:
67
- """get_api_key should raise ConfigurationError when Anthropic key is not set."""
68
- with patch.dict(os.environ, {"LLM_PROVIDER": "anthropic"}, clear=True):
69
- settings = Settings(_env_file=None)
70
- with pytest.raises(ConfigurationError, match="ANTHROPIC_API_KEY not set"):
71
- settings.get_api_key()
 
52
  settings = Settings(_env_file=None)
53
  with pytest.raises(ConfigurationError, match="OPENAI_API_KEY not set"):
54
  settings.get_api_key()