version: 1.0.0 # Cache settings: Set to true to enable caching cache: true # Definition of custom endpoints endpoints: custom: # Mistral AI API - name: "Mistral" apiKey: "${MISTRAL_API_KEY}" baseURL: "https://api.mistral.ai/v1" models: default: ["mistral-tiny", "mistral-small", "mistral-medium"] fetch: true titleConvo: true titleMethod: "completion" titleModel: "mistral-tiny" summarize: false summaryModel: "mistral-tiny" forcePrompt: false modelDisplayLabel: "Mistral" addParams: safe_mode: true dropParams: ["stop", "temperature", "top_p"] # OpenRouter.ai - name: "OpenRouter" apiKey: "${OPENROUTER_KEY}" baseURL: "https://openrouter.ai/api/v1" models: default: ["nousresearch/nous-capybara-7b:free", "mistralai/mistral-7b-instruct:free", "huggingfaceh4/zephyr-7b-beta:free"] fetch: true titleConvo: true titleModel: "gpt-3.5-turbo" summarize: false summaryModel: "gpt-3.5-turbo" forcePrompt: false modelDisplayLabel: "OpenRouter" # Reverse Proxy - name: "Reverse Proxy" apiKey: "user_provided" baseURL: "user_provided" models: default: ["gpt-3.5-turbo"] fetch: true titleConvo: true titleModel: "gpt-3.5-turbo" summarize: false summaryModel: "gpt-3.5-turbo" forcePrompt: false modelDisplayLabel: "AI" # Google: Gemini 2.0 Flash Experimental - name: "Google Gemini Flash" label: "Google: Gemini 2.0 Flash Experimental" showIconInMenu: true showIconInHeader: true default: false preset: endpoint: "google" model: "gemini-2.0-flash-exp" modelLabel: "Google: Gemini 2.0 Flash Experimental"