File size: 4,216 Bytes
dff68cb
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2e3671b
dff68cb
2e3671b
dff68cb
2e3671b
dff68cb
 
 
 
 
 
e0dea15
dff68cb
 
3c90289
 
 
 
 
 
 
 
 
 
 
 
 
 
dff68cb
 
 
 
5f9a01b
 
 
dff68cb
 
 
 
 
 
 
 
 
 
 
 
 
5f9a01b
 
dff68cb
 
 
 
 
 
 
5c38e71
 
 
dff68cb
 
 
 
5c38e71
dff68cb
5c38e71
 
 
 
dff68cb
 
 
 
 
 
5c38e71
dff68cb
5c38e71
dff68cb
 
 
5c38e71
dff68cb
5c38e71
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
"""
Configuration module for model initialization and environment setup.
CRITICAL: Includes Ollama integration fix for Google ADK.
"""
import os
from dotenv import load_dotenv
from google.adk.models.lite_llm import LiteLlm
from google.adk.sessions import DatabaseSessionService
from google.genai import types
from .utils import logger

# Load environment variables
load_dotenv()

# ===== SSL CONFIGURATION =====
# Fix for SSL certificate errors on Windows
import certifi
os.environ['SSL_CERT_FILE'] = certifi.where()
logger.info(f"πŸ” SSL Cert File configured: {os.environ['SSL_CERT_FILE']}")

# ===== MODEL INITIALIZATION =====
# Using OpenRouter (Grok) via LiteLLM
def get_model():
    """Returns a configured LiteLlm model instance for OpenRouter."""
    # Configure OpenRouter endpoint
    os.environ["OPENAI_API_BASE"] = "https://openrouter.ai/api/v1"
    os.environ["OPENAI_API_KEY"] = os.getenv("OPENROUTER_API_KEY")
    
    # Use GLM 4.5 Air - free tier optimized for speed with function calling
    # LiteLLM uses 'openai/' prefix for OpenAI-compatible endpoints
    model = LiteLlm(model="openai/z-ai/glm-4.5-air:free")
    
    logger.info("βœ… Model initialized: z-ai/glm-4.5-air:free via OpenRouter")
    return model


# ===== GEMINI MODEL INITIALIZATION =====
# Using Google Gemini for Search Agents
from google.adk.models.google_llm import Gemini
Model="gemini-2.5-flash"
def get_gemini_model():
    """Returns a configured Gemini model instance."""
    # Ensure Google API Key is available
    api_key = os.getenv("GOOGLE_API_KEY")
    if not api_key:
        logger.warning("⚠️ GOOGLE_API_KEY not found in environment. Gemini may fail.")

    model = Gemini(
        model=Model,
        generate_content_config=types.GenerateContentConfig(
            http_options=types.HttpOptions(
                retry_options=types.HttpRetryOptions(initial_delay=10, attempts=10)
            )
        )
    )
    logger.info(f"βœ… Model initialized: {Model} with Retry Options")
    return model


# ===== SESSION SERVICE INITIALIZATION =====
# Using LazyDatabaseSessionService to prevent empty sessions on load
from .lazy_session import LazyDatabaseSessionService

def get_session_service(db_url=None):
    """
    Returns a configured DatabaseSessionService instance.
    
    Args:
        db_url: Database connection string. 
                Defaults to DATABASE_URL env var, or local SQLite if not set.
    """
    # Prioritize argument, then env var, then local default
    if not db_url:
        # Use legacy_solver.db as it contains the existing sessions
        db_url = os.getenv("DATABASE_URL", "sqlite+aiosqlite:///legacy_solver.db")
        
    session_service = LazyDatabaseSessionService(db_url=db_url)
    logger.info(f"βœ… Session service initialized (Lazy): {db_url.split('://')[0]}://...") # Log safe URL
    return session_service


# ===== MEMORY SERVICE INITIALIZATION =====
# Using InMemoryMemoryService for simplicity (DatabaseMemoryService not available in this ADK version)
from google.adk.memory import InMemoryMemoryService

# Global cache for memory service
_memory_service_instance = None

def get_memory_service():
    """
    Returns a configured MemoryService instance.
    Uses Pinecone if PINECONE_API_KEY is set, otherwise InMemory.
    Implements Singleton pattern to avoid reloading embeddings.
    """
    global _memory_service_instance
    if _memory_service_instance:
        return _memory_service_instance

    pinecone_key = os.getenv("PINECONE_API_KEY")
    logger.info(f"πŸ” Checking PINECONE_API_KEY: {'Found' if pinecone_key else 'Missing'}")
    
    if pinecone_key:
        try:
            from .memory import PineconeMemoryService
            _memory_service_instance = PineconeMemoryService(api_key=pinecone_key)
            logger.info("βœ… Memory service initialized: Pinecone (Long-Term Vector Store)")
            return _memory_service_instance
        except Exception as e:
            logger.error(f"❌ Failed to init Pinecone, falling back to InMemory: {e}")
            
    _memory_service_instance = InMemoryMemoryService()
    logger.info("βœ… Memory service initialized: InMemory (Ephemeral)")
    return _memory_service_instance