File size: 1,443 Bytes
0355450
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
"""Configuration management for the analyzer."""

import os
from enum import Enum
from typing import Optional


class LLMProvider(str, Enum):
    """Supported LLM providers."""

    OPENAI = "openai"
    LOCAL = "local"
    MOCK = "mock"


class Config:
    """Application configuration from environment variables."""

    def __init__(self):
        self.llm_provider = LLMProvider(
            os.getenv("LLM_PROVIDER", "mock").lower()
        )
        self.openai_api_key: Optional[str] = os.getenv("OPENAI_API_KEY")
        self.llm_model: Optional[str] = os.getenv("LLM_MODEL")
        self.debug = os.getenv("DEBUG", "false").lower() == "true"

    def validate(self) -> None:
        """Validate configuration based on selected provider."""
        if self.llm_provider == LLMProvider.OPENAI and not self.openai_api_key:
            raise ValueError(
                "OPENAI_API_KEY environment variable required "
                "when LLM_PROVIDER=openai"
            )

    @property
    def model_name(self) -> str:
        """Get the model name for the selected provider."""
        if self.llm_model:
            return self.llm_model
        
        defaults = {
            LLMProvider.OPENAI: "gpt-4-turbo",
            LLMProvider.LOCAL: "mistral:7b",
            LLMProvider.MOCK: "mock-analyzer-v1",
        }
        return defaults.get(self.llm_provider, "mock-analyzer-v1")


# Global config instance
config = Config()