Spaces:
Sleeping
Sleeping
File size: 1,822 Bytes
3772222 5b6bf58 3772222 5b6bf58 3772222 5b6bf58 3772222 5b6bf58 3772222 5b6bf58 3772222 5b6bf58 3772222 5b6bf58 3772222 5b6bf58 3772222 5b6bf58 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 |
version: 1.0.0
# Cache settings: Set to true to enable caching
cache: true
# Definition of custom endpoints
endpoints:
custom:
# Mistral AI API
- name: "Mistral"
apiKey: "${MISTRAL_API_KEY}"
baseURL: "https://api.mistral.ai/v1"
models:
default: ["mistral-tiny", "mistral-small", "mistral-medium"]
fetch: true
titleConvo: true
titleMethod: "completion"
titleModel: "mistral-tiny"
summarize: false
summaryModel: "mistral-tiny"
forcePrompt: false
modelDisplayLabel: "Mistral"
addParams:
safe_mode: true
dropParams: ["stop", "temperature", "top_p"]
# OpenRouter.ai
- name: "OpenRouter"
apiKey: "${OPENROUTER_KEY}"
baseURL: "https://openrouter.ai/api/v1"
models:
default: ["nousresearch/nous-capybara-7b:free", "mistralai/mistral-7b-instruct:free", "huggingfaceh4/zephyr-7b-beta:free"]
fetch: true
titleConvo: true
titleModel: "gpt-3.5-turbo"
summarize: false
summaryModel: "gpt-3.5-turbo"
forcePrompt: false
modelDisplayLabel: "OpenRouter"
# Reverse Proxy
- name: "Reverse Proxy"
apiKey: "user_provided"
baseURL: "user_provided"
models:
default: ["gpt-3.5-turbo"]
fetch: true
titleConvo: true
titleModel: "gpt-3.5-turbo"
summarize: false
summaryModel: "gpt-3.5-turbo"
forcePrompt: false
modelDisplayLabel: "AI"
# Google: Gemini 2.0 Flash Experimental
- name: "Google Gemini Flash"
label: "Google: Gemini 2.0 Flash Experimental"
showIconInMenu: true
showIconInHeader: true
default: false
preset:
endpoint: "google"
model: "gemini-2.0-flash-exp"
modelLabel: "Google: Gemini 2.0 Flash Experimental"
|