# src/llm.py from langchain_openai import ChatOpenAI from src.config.config import ( LLM_MODEL, OPENROUTER_BASE_URL, OPENROUTER_API_KEY ) def load_llm(): if not OPENROUTER_API_KEY: raise EnvironmentError("OPENROUTER_API_KEY not set") return ChatOpenAI( model=LLM_MODEL, base_url=OPENROUTER_BASE_URL, api_key=OPENROUTER_API_KEY )