Cole Medin
commited on
Commit
·
a6d81b1
1
Parent(s):
e7ce257
Making Ollama work within the Docker container, very important fix
Browse files- app/lib/.server/llm/api-key.ts +5 -1
- app/utils/constants.ts +17 -1
- docker-compose.yaml +6 -0
app/lib/.server/llm/api-key.ts
CHANGED
|
@@ -35,7 +35,11 @@ export function getBaseURL(cloudflareEnv: Env, provider: string) {
|
|
| 35 |
case 'OpenAILike':
|
| 36 |
return env.OPENAI_LIKE_API_BASE_URL || cloudflareEnv.OPENAI_LIKE_API_BASE_URL;
|
| 37 |
case 'Ollama':
|
| 38 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 39 |
default:
|
| 40 |
return "";
|
| 41 |
}
|
|
|
|
| 35 |
case 'OpenAILike':
|
| 36 |
return env.OPENAI_LIKE_API_BASE_URL || cloudflareEnv.OPENAI_LIKE_API_BASE_URL;
|
| 37 |
case 'Ollama':
|
| 38 |
+
let baseUrl = env.OLLAMA_API_BASE_URL || cloudflareEnv.OLLAMA_API_BASE_URL || "http://localhost:11434";
|
| 39 |
+
if (env.RUNNING_IN_DOCKER === 'true') {
|
| 40 |
+
baseUrl = baseUrl.replace("localhost", "host.docker.internal");
|
| 41 |
+
}
|
| 42 |
+
return baseUrl;
|
| 43 |
default:
|
| 44 |
return "";
|
| 45 |
}
|
app/utils/constants.ts
CHANGED
|
@@ -47,9 +47,25 @@ const staticModels: ModelInfo[] = [
|
|
| 47 |
|
| 48 |
export let MODEL_LIST: ModelInfo[] = [...staticModels];
|
| 49 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 50 |
async function getOllamaModels(): Promise<ModelInfo[]> {
|
| 51 |
try {
|
| 52 |
-
const base_url =
|
| 53 |
const response = await fetch(`${base_url}/api/tags`);
|
| 54 |
const data = await response.json() as OllamaApiResponse;
|
| 55 |
|
|
|
|
| 47 |
|
| 48 |
export let MODEL_LIST: ModelInfo[] = [...staticModels];
|
| 49 |
|
| 50 |
+
const getOllamaBaseUrl = () => {
|
| 51 |
+
const defaultBaseUrl = import.meta.env.OLLAMA_API_BASE_URL || 'http://localhost:11434';
|
| 52 |
+
// Check if we're in the browser
|
| 53 |
+
if (typeof window !== 'undefined') {
|
| 54 |
+
// Frontend always uses localhost
|
| 55 |
+
return defaultBaseUrl;
|
| 56 |
+
}
|
| 57 |
+
|
| 58 |
+
// Backend: Check if we're running in Docker
|
| 59 |
+
const isDocker = process.env.RUNNING_IN_DOCKER === 'true';
|
| 60 |
+
|
| 61 |
+
return isDocker
|
| 62 |
+
? defaultBaseUrl.replace("localhost", "host.docker.internal")
|
| 63 |
+
: defaultBaseUrl;
|
| 64 |
+
};
|
| 65 |
+
|
| 66 |
async function getOllamaModels(): Promise<ModelInfo[]> {
|
| 67 |
try {
|
| 68 |
+
const base_url = getOllamaBaseUrl();
|
| 69 |
const response = await fetch(`${base_url}/api/tags`);
|
| 70 |
const data = await response.json() as OllamaApiResponse;
|
| 71 |
|
docker-compose.yaml
CHANGED
|
@@ -20,6 +20,9 @@ services:
|
|
| 20 |
- GOOGLE_GENERATIVE_AI_API_KEY=${GOOGLE_GENERATIVE_AI_API_KEY}
|
| 21 |
- OLLAMA_API_BASE_URL=${OLLAMA_API_BASE_URL}
|
| 22 |
- VITE_LOG_LEVEL=${VITE_LOG_LEVEL:-debug}
|
|
|
|
|
|
|
|
|
|
| 23 |
command: pnpm run dockerstart
|
| 24 |
profiles:
|
| 25 |
- production # This service only runs in the production profile
|
|
@@ -43,6 +46,9 @@ services:
|
|
| 43 |
- GOOGLE_GENERATIVE_AI_API_KEY=${GOOGLE_GENERATIVE_AI_API_KEY}
|
| 44 |
- OLLAMA_API_BASE_URL=${OLLAMA_API_BASE_URL}
|
| 45 |
- VITE_LOG_LEVEL=${VITE_LOG_LEVEL:-debug}
|
|
|
|
|
|
|
|
|
|
| 46 |
volumes:
|
| 47 |
- type: bind
|
| 48 |
source: .
|
|
|
|
| 20 |
- GOOGLE_GENERATIVE_AI_API_KEY=${GOOGLE_GENERATIVE_AI_API_KEY}
|
| 21 |
- OLLAMA_API_BASE_URL=${OLLAMA_API_BASE_URL}
|
| 22 |
- VITE_LOG_LEVEL=${VITE_LOG_LEVEL:-debug}
|
| 23 |
+
- RUNNING_IN_DOCKER=true
|
| 24 |
+
extra_hosts:
|
| 25 |
+
- "host.docker.internal:host-gateway"
|
| 26 |
command: pnpm run dockerstart
|
| 27 |
profiles:
|
| 28 |
- production # This service only runs in the production profile
|
|
|
|
| 46 |
- GOOGLE_GENERATIVE_AI_API_KEY=${GOOGLE_GENERATIVE_AI_API_KEY}
|
| 47 |
- OLLAMA_API_BASE_URL=${OLLAMA_API_BASE_URL}
|
| 48 |
- VITE_LOG_LEVEL=${VITE_LOG_LEVEL:-debug}
|
| 49 |
+
- RUNNING_IN_DOCKER=true
|
| 50 |
+
extra_hosts:
|
| 51 |
+
- "host.docker.internal:host-gateway"
|
| 52 |
volumes:
|
| 53 |
- type: bind
|
| 54 |
source: .
|