File size: 7,981 Bytes
c59d808
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
# Configuration settings for the Recipe Recommendation Bot
import os
from typing import Optional, List
from dotenv import load_dotenv

# Load environment variables from .env file
load_dotenv()

class Settings:
    """Simple settings class that reads environment variables directly"""
    
    def __init__(self):
        # ===========================================
        # Server Configuration
        # ===========================================
        self.PORT = int(os.getenv("PORT", 8000))
        self.HOST = os.getenv("HOST", "0.0.0.0")
        self.ENVIRONMENT = os.getenv("ENVIRONMENT", "development")
        self.DEBUG = os.getenv("DEBUG", "true").lower() == "true"
        
        # ===========================================
        # CORS Configuration
        # ===========================================
        cors_origins = os.getenv("CORS_ORIGINS", '["http://localhost:3000","http://localhost:5173","http://localhost:8080"]')
        self.CORS_ORIGINS = self._parse_list(cors_origins)
        self.CORS_ALLOW_CREDENTIALS = os.getenv("CORS_ALLOW_CREDENTIALS", "true").lower() == "true"
        
        cors_methods = os.getenv("CORS_ALLOW_METHODS", '["GET","POST","PUT","DELETE","OPTIONS"]')
        self.CORS_ALLOW_METHODS = self._parse_list(cors_methods)
        
        cors_headers = os.getenv("CORS_ALLOW_HEADERS", '["*"]')
        self.CORS_ALLOW_HEADERS = self._parse_list(cors_headers)
        
        # ===========================================
        # LLM & Embedding Provider Configuration
        # ===========================================
        self.LLM_PROVIDER = os.getenv("LLM_PROVIDER", "google")
        self.EMBEDDING_PROVIDER = os.getenv("EMBEDDING_PROVIDER", self.LLM_PROVIDER)  # Default to same as LLM
        
        # OpenAI Configuration
        self.OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
        self.OPENAI_MODEL = os.getenv("OPENAI_MODEL", "gpt-5-nano")
        self.OPENAI_TEMPERATURE = float(os.getenv("OPENAI_TEMPERATURE", "0.7"))
        self.OPENAI_MAX_TOKENS = int(os.getenv("OPENAI_MAX_TOKENS", "1000"))
        
        # Google AI Configuration
        self.GOOGLE_API_KEY = os.getenv("GOOGLE_API_KEY")
        self.GOOGLE_MODEL = os.getenv("GOOGLE_MODEL", "gemini-2.5-flash")
        self.GOOGLE_TEMPERATURE = float(os.getenv("GOOGLE_TEMPERATURE", "0.7"))
        self.GOOGLE_MAX_TOKENS = int(os.getenv("GOOGLE_MAX_TOKENS", "1000"))
        
        # Hugging Face Configuration
        self.HUGGINGFACE_API_TOKEN = os.getenv("HUGGINGFACE_API_TOKEN")
        self.HUGGINGFACE_MODEL = os.getenv("HUGGINGFACE_MODEL", "microsoft/DialoGPT-medium")
        self.HUGGINGFACE_API_URL = os.getenv("HUGGINGFACE_API_URL", "https://api-inference.huggingface.co/models/")
        self.HUGGINGFACE_USE_GPU = os.getenv("HUGGINGFACE_USE_GPU", "false").lower() == "true"
        self.HUGGINGFACE_USE_API = os.getenv("HUGGINGFACE_USE_API", "false").lower() == "true"
        
        # Ollama Configuration
        self.OLLAMA_BASE_URL = os.getenv("OLLAMA_BASE_URL", "http://localhost:11434")
        self.OLLAMA_MODEL = os.getenv("OLLAMA_MODEL", "llama3.1:8b")
        self.OLLAMA_TEMPERATURE = float(os.getenv("OLLAMA_TEMPERATURE", "0.7"))
        
        # ===========================================
        # Embedding Model Configuration 
        # ===========================================
        # Note: Embedding provider is determined by LLM_PROVIDER setting above
        
        # OpenAI Embeddings
        self.OPENAI_EMBEDDING_MODEL = os.getenv("OPENAI_EMBEDDING_MODEL", "text-embedding-ada-002")
        
        # Google Embeddings
        self.GOOGLE_EMBEDDING_MODEL = os.getenv("GOOGLE_EMBEDDING_MODEL", "models/embedding-001")
        
        # Hugging Face Embeddings
        self.HUGGINGFACE_EMBEDDING_MODEL = os.getenv("HUGGINGFACE_EMBEDDING_MODEL", "sentence-transformers/all-MiniLM-L6-v2")
        
        # Ollama Embeddings
        self.OLLAMA_EMBEDDING_MODEL = os.getenv("OLLAMA_EMBEDDING_MODEL", "nomic-embed-text")
        
        # ===========================================
        # Logging Configuration
        # ===========================================
        self.LOG_LEVEL = os.getenv("LOG_LEVEL", "INFO")
        self.LOG_FORMAT = os.getenv("LOG_FORMAT", "%(asctime)s - %(name)s - %(levelname)s - %(message)s")
        self.LOG_FILE = os.getenv("LOG_FILE", "./logs/app.log")

        # ===========================================
        # Langchain Debugging Configuration
        # ===========================================
        # Note: set to "true" to enable detailed Langchain logs
        self.LANGCHAIN_DEBUG = os.getenv("LANGCHAIN_DEBUG", "false").lower() == "true"
    
    def _parse_list(self, value: str) -> List[str]:
        """Parse a string representation of a list into an actual list"""
        try:
            # Remove brackets and quotes, split by comma
            if value.startswith('[') and value.endswith(']'):
                value = value[1:-1]
            items = [item.strip().strip('"').strip("'") for item in value.split(',')]
            return [item for item in items if item]  # Remove empty items
        except:
            return ["*"]  # Fallback to allow all

    def get_llm_config(self):
        """Get LLM configuration based on selected provider"""
        if self.LLM_PROVIDER == "openai":
            return {
                "provider": "openai",
                "api_key": self.OPENAI_API_KEY,
                "model": self.OPENAI_MODEL,
                "temperature": self.OPENAI_TEMPERATURE,
                "max_tokens": self.OPENAI_MAX_TOKENS
            }
        elif self.LLM_PROVIDER == "google":
            return {
                "provider": "google",
                "api_key": self.GOOGLE_API_KEY,
                "model": self.GOOGLE_MODEL,
                "temperature": self.GOOGLE_TEMPERATURE,
                "max_tokens": self.GOOGLE_MAX_TOKENS
            }
        elif self.LLM_PROVIDER == "huggingface":
            return {
                "provider": "huggingface",
                "api_token": self.HUGGINGFACE_API_TOKEN,
                "model": self.HUGGINGFACE_MODEL,
                "api_url": self.HUGGINGFACE_API_URL,
                "use_gpu": self.HUGGINGFACE_USE_GPU,
                "use_api": self.HUGGINGFACE_USE_API
            }
        elif self.LLM_PROVIDER == "ollama":
            return {
                "provider": "ollama",
                "base_url": self.OLLAMA_BASE_URL,
                "model": self.OLLAMA_MODEL,
                "temperature": self.OLLAMA_TEMPERATURE
            }
        else:
            raise ValueError(f"Unsupported LLM provider: {self.LLM_PROVIDER}")
    
    def get_embedding_config(self):
        """Get embedding configuration based on EMBEDDING_PROVIDER setting"""
        provider = self.EMBEDDING_PROVIDER
        
        if provider == "openai":
            return {
                "provider": "openai",
                "api_key": self.OPENAI_API_KEY,
                "model": self.OPENAI_EMBEDDING_MODEL
            }
        elif provider == "google":
            return {
                "provider": "google",
                "api_key": self.GOOGLE_API_KEY,
                "model": self.GOOGLE_EMBEDDING_MODEL
            }
        elif provider == "huggingface":
            return {
                "provider": "huggingface",
                "model": self.HUGGINGFACE_EMBEDDING_MODEL
            }
        elif provider == "ollama":
            return {
                "provider": "ollama",
                "base_url": self.OLLAMA_BASE_URL,
                "model": self.OLLAMA_EMBEDDING_MODEL
            }
        else:
            raise ValueError(f"Unsupported provider: {provider}. Supported providers: openai, google, huggingface, ollama")

# Create global settings instance
settings = Settings()

# Note: Vector store and database configuration is in database.py
# from config.database import db_settings