htaf commited on
Commit
365aa28
·
1 Parent(s): 0bd72ac

initial pipeline test passed

Browse files
Files changed (1) hide show
  1. src/providers/ollama_provider.mjs +44 -23
src/providers/ollama_provider.mjs CHANGED
@@ -1,37 +1,58 @@
1
  // src/providers/ollama_provider.mjs
2
- import { BaseProvider } from "./base.mjs";
 
 
 
 
 
3
 
4
  export class OllamaProvider extends BaseProvider {
5
- constructor(stage = "generator") {
 
 
 
 
 
6
  super();
7
- this.url = process.env.OLLAMA_URL || "http://localhost:11434/api/generate";
8
 
9
- // STAGE-SELECTED MODEL
 
 
 
 
10
  this.model =
11
- process.env[`${stage.toUpperCase()}_MODEL`] ||
12
- process.env.MODEL ||
13
- "qwen3-8b-instruct";
 
14
  }
15
 
 
 
 
 
 
16
  async generate(prompt) {
17
- const body = {
18
- model: this.model,
19
- prompt,
20
- stream: false,
21
- options: {
22
- temperature: Number(process.env.TEMP || "0.3")
23
- }
24
- };
25
-
26
- const res = await fetch(this.url, {
27
- method: "POST",
28
- headers: { "Content-Type": "application/json" },
29
- body: JSON.stringify(body)
30
  });
31
 
32
- if (!res.ok) throw new Error(`OllamaProvider error: ${res.status}`);
 
 
33
 
34
- const json = await res.json();
35
- return json.response;
 
36
  }
37
  }
 
 
 
1
  // src/providers/ollama_provider.mjs
2
+ import { BaseProvider } from './base.mjs';
3
+
4
+ function normalizeBase(url) {
5
+ // strip trailing slashes so we can safely append /api/generate
6
+ return url.replace(/\/+$/, '');
7
+ }
8
 
9
  export class OllamaProvider extends BaseProvider {
10
+ /**
11
+ * @param {object} opts
12
+ * @param {string} [opts.model] - model name/tag in Ollama
13
+ * @param {string} [opts.baseUrl] - base Ollama URL (without /api/generate)
14
+ */
15
+ constructor(opts = {}) {
16
  super();
 
17
 
18
+ // Base URL: env or default, WITHOUT endpoint path
19
+ const envBase = process.env.OLLAMA_URL || 'http://localhost:11434';
20
+ this.baseUrl = normalizeBase(opts.baseUrl || envBase);
21
+
22
+ // Model: allow stage-specific env, then generic, then default
23
  this.model =
24
+ opts.model ||
25
+ process.env.GENERATOR_MODEL ||
26
+ process.env.OLLAMA_MODEL ||
27
+ 'qwen3-vl:8b-thinking';
28
  }
29
 
30
+ /**
31
+ * Call Ollama /api/generate
32
+ * @param {string} prompt
33
+ * @returns {Promise<string>} the model's response text
34
+ */
35
  async generate(prompt) {
36
+ const url = `${this.baseUrl}/api/generate`;
37
+
38
+ const res = await fetch(url, {
39
+ method: 'POST',
40
+ headers: { 'Content-Type': 'application/json' },
41
+ body: JSON.stringify({
42
+ model: this.model,
43
+ prompt,
44
+ stream: false,
45
+ }),
 
 
 
46
  });
47
 
48
+ if (!res.ok) {
49
+ throw new Error(`OllamaProvider error: ${res.status}`);
50
+ }
51
 
52
+ const data = await res.json();
53
+ // Standard Ollama /api/generate response uses `response`
54
+ return data.response ?? '';
55
  }
56
  }
57
+
58
+ export default OllamaProvider;