| import fetch from 'node-fetch'; |
| import { setAdditionalHeadersByType } from '../additional-headers.js'; |
| import { TEXTGEN_TYPES } from '../constants.js'; |
|
|
| |
| |
| |
| |
| |
| |
| |
| |
| |
| export async function getOllamaBatchVector(texts, apiUrl, model, keep, directories) { |
| const result = []; |
| for (const text of texts) { |
| const vector = await getOllamaVector(text, apiUrl, model, keep, directories); |
| result.push(vector); |
| } |
| return result; |
| } |
|
|
| |
| |
| |
| |
| |
| |
| |
| |
| |
| export async function getOllamaVector(text, apiUrl, model, keep, directories) { |
| const url = new URL(apiUrl); |
| url.pathname = '/api/embeddings'; |
|
|
| const headers = {}; |
| setAdditionalHeadersByType(headers, TEXTGEN_TYPES.OLLAMA, apiUrl, directories); |
|
|
| const response = await fetch(url, { |
| method: 'POST', |
| headers: { |
| 'Content-Type': 'application/json', |
| ...headers, |
| }, |
| body: JSON.stringify({ |
| prompt: text, |
| model: model, |
| keep_alive: keep ? -1 : undefined, |
| truncate: true, |
| }), |
| }); |
|
|
| if (!response.ok) { |
| const responseText = await response.text(); |
| throw new Error(`Ollama: Failed to get vector for text: ${response.statusText} ${responseText}`); |
| } |
|
|
| |
| const data = await response.json(); |
|
|
| if (!Array.isArray(data?.embedding)) { |
| throw new Error('API response was not an array'); |
| } |
|
|
| return data.embedding; |
| } |
|
|