VirtualLabo / server /aiCore.js
rinogeek's picture
Update
e1633a4
import https from "node:https";
import { URL } from "node:url";
export class UpstreamError extends Error {
constructor(status, message, upstreamMessage) {
super(message);
this.status = status;
this.upstreamMessage = upstreamMessage;
}
}
function readEnv(env, key) {
const v = (env?.[key] ?? process.env?.[key] ?? "").toString();
return v.trim();
}
function parseModelList(env) {
const rawList = readEnv(env, "GROQ_MODELS");
const single = readEnv(env, "GROQ_MODEL");
const fromList = rawList
? rawList
.split(",")
.map((s) => s.trim())
.filter(Boolean)
: [];
const seed = fromList.length > 0 ? fromList : single ? [single] : [];
const deduped = [];
const seen = new Set();
for (const m of seed) {
if (seen.has(m)) continue;
seen.add(m);
deduped.push(m);
}
return deduped;
}
function defaultModelList() {
return ["openai/gpt-oss-120b", "llama-3.1-8b-instant", "llama-3.3-70b-versatile", "openai/gpt-oss-20b"];
}
function filterGenerationModels(models) {
const filtered = models.filter((m) => !/safeguard/i.test(m));
return filtered.length > 0 ? filtered : models;
}
function shouldFallbackOnUpstreamError(err) {
if (err.status === 401 || err.status === 403) return false;
if (err.status === 429) return true;
if (err.status === 0) return true;
if (err.status >= 500 && err.status <= 599) return true;
if (err.status === 400 || err.status === 404) return true;
if (err.status === 422) return true;
return false;
}
function countSentences(text) {
const matches = text.match(/[.!?](\s|$)/g);
return matches ? matches.length : 0;
}
function validateObservation({ text, temperatureC, hasHazard }) {
const t = (text || "").trim();
if (!/^Observation\s*:/i.test(t)) return { ok: false, reason: "Ne commence pas par Observation:" };
if (!/Equation\s*:/i.test(t)) return { ok: false, reason: "equation manquante (Equation:)" };
if (!/Application\s*:/i.test(t)) return { ok: false, reason: "application manquante (Application:)" };
const parts = t.split(/Equation\s*:/i);
const observation = parts[0].replace(/^Observation\s*:/i, '').trim();
const lowerParts = parts[1].split(/Application\s*:/i);
const equation = lowerParts[0].trim();
const application = lowerParts[1] ? lowerParts[1].trim() : "";
if (observation.length < 10) return { ok: false, reason: "observation trop courte" };
const sentenceCount = countSentences(observation);
if (sentenceCount > 5) return { ok: false, reason: `observation trop longue (${sentenceCount} phrases)` };
if (equation.length < 2) return { ok: false, reason: "equation invalide" };
if (application.length < 10) return { ok: false, reason: "application invalide ou trop courte" };
if (hasHazard && !/secur|prud|attention|protection|danger|gants|lunettes|corros/i.test(observation)) {
// on peut ne pas bloquer rigidement mais le logguer, ici on accepte
}
return { ok: true, reason: null, parsed: { observation, equation, application } };
}
function normalizeOneParagraph(text) {
return (text || "")
.replace(/\s+/g, " ")
.replace(/\u00a0/g, " ")
.trim();
}
function buildPrompt({ temperatureC, substances }) {
const lines = substances.map((s) =>
[
`- id: ${s.id}`,
` nom: ${s.name}`,
` formule: ${s.formula}`,
` etat: ${s.state}`,
` danger: ${s.hazard}`,
` description: ${s.description}`,
].join("\n"),
);
const system =
"Tu es un assistant de laboratoire de chimie. " +
"Decris l'observation du melange en etant TRES CONCIS (MAXIMUM 2 a 3 phrases), en allant droit au but et en enlevant tous les details inutiles. Inclus les rappels de securite si necessaire. " +
"Donne ensuite l'equation correspondante. " +
"Termine avec une application pratique de ce melange dans la vie courante ou l'industrie (ex: 'ce melange sert a la fabrication de detergent'). " +
"Important: Tu dois repondre UNIQUEMENT sous ce format textuel exact (sans markdown, sans blabla):\n" +
"Observation: <tes 2 a 3 phrases ici>\n" +
"Equation: <equation chimique avec -> ou -> ou →>\n" +
"Application: <1 a 2 phrases sur l'utilite pratique>";
const user = [
"Contexte:",
`- Temperature du milieu: ${temperatureC} degres Celsius`,
`- Substances presentes (a melanger):`,
...lines,
"",
"Format attendu strictement: ",
"Observation: <...>",
"Equation: <...>",
"Application: <...>"
].join("\n");
return { system, user };
}
async function httpsPostJson({ url, headers, body }) {
const u = new URL(url);
const bodyStr = JSON.stringify(body);
return new Promise((resolve, reject) => {
const req = https.request(
{
protocol: u.protocol,
hostname: u.hostname,
port: u.port ? Number(u.port) : undefined,
path: `${u.pathname}${u.search}`,
method: "POST",
headers: {
...headers,
"Content-Length": Buffer.byteLength(bodyStr).toString(),
},
},
(res) => {
let data = "";
res.setEncoding("utf8");
res.on("data", (chunk) => (data += chunk));
res.on("end", () => {
let parsed = null;
try {
parsed = data ? JSON.parse(data) : null;
} catch {
parsed = null;
}
resolve({
status: res.statusCode || 0,
statusText: res.statusMessage || "",
text: data,
json: parsed,
});
});
},
);
req.on("error", reject);
req.write(bodyStr);
req.end();
});
}
function extractResponsesText(data) {
if (typeof data?.output_text === "string" && data.output_text.trim()) return data.output_text.trim();
const chunks = [];
const outputs = Array.isArray(data?.output) ? data.output : [];
for (const out of outputs) {
const content = Array.isArray(out?.content) ? out.content : [];
for (const part of content) {
if (part?.type === "output_text" && typeof part?.text === "string") chunks.push(part.text);
if (part?.type === "text" && typeof part?.text === "string") chunks.push(part.text);
}
const msgContent = Array.isArray(out?.message?.content) ? out.message.content : [];
for (const part of msgContent) {
if (part?.type === "output_text" && typeof part?.text === "string") chunks.push(part.text);
if (part?.type === "text" && typeof part?.text === "string") chunks.push(part.text);
}
}
const joined = chunks.join("\n").trim();
return joined || null;
}
async function callGroqResponsesText({ apiKey, model, system, user }) {
let resp;
try {
resp = await httpsPostJson({
url: "https://api.groq.com/openai/v1/responses",
headers: { "Content-Type": "application/json", Authorization: `Bearer ${apiKey}` },
body: { model, instructions: system, input: user, temperature: 0.2, max_output_tokens: 350 },
});
} catch (e) {
const code = e?.code ? String(e.code) : "NETWORK_ERROR";
const msg = e?.message ? String(e.message) : "Network error";
throw new UpstreamError(0, `Network error calling Groq: ${code} ${msg}`);
}
if (resp.status < 200 || resp.status >= 300) {
const upstreamMessage = resp?.json?.error?.message || resp?.json?.message;
const msg = upstreamMessage ? `Groq API error ${resp.status}: ${upstreamMessage}` : `Groq API error ${resp.status}: ${resp.statusText || "Unknown"}`;
throw new UpstreamError(resp.status, msg, upstreamMessage);
}
const extracted = extractResponsesText(resp.json);
if (extracted) return extracted;
throw new UpstreamError(502, "Groq Responses returned no output text");
}
async function callGroqChatCompletionsText({ apiKey, model, system, user }) {
let resp;
try {
resp = await httpsPostJson({
url: "https://api.groq.com/openai/v1/chat/completions",
headers: { "Content-Type": "application/json", Authorization: `Bearer ${apiKey}` },
body: {
model,
messages: [
{ role: "system", content: system },
{ role: "user", content: user },
],
temperature: 0.2,
max_tokens: 350,
},
});
} catch (e) {
const code = e?.code ? String(e.code) : "NETWORK_ERROR";
const msg = e?.message ? String(e.message) : "Network error";
throw new UpstreamError(0, `Network error calling Groq: ${code} ${msg}`);
}
if (resp.status < 200 || resp.status >= 300) {
const upstreamMessage = resp?.json?.error?.message || resp?.json?.message;
const msg = upstreamMessage ? `Groq API error ${resp.status}: ${upstreamMessage}` : `Groq API error ${resp.status}: ${resp.statusText || "Unknown"}`;
throw new UpstreamError(resp.status, msg, upstreamMessage);
}
const data = resp.json;
const choice = Array.isArray(data?.choices) ? data.choices[0] : null;
const msg = choice?.message;
const content = msg?.content;
if (typeof content === "string" && content.trim()) return content.trim();
if (typeof choice?.text === "string" && choice.text.trim()) return choice.text.trim();
if (typeof choice?.delta?.content === "string" && choice.delta.content.trim()) return choice.delta.content.trim();
if (Array.isArray(content)) {
const parts = content.map((p) => (typeof p?.text === "string" ? p.text : "")).filter(Boolean);
const joined = parts.join("\n").trim();
if (joined) return joined;
}
if (content && typeof content === "object") {
const objText = content.text;
if (typeof objText === "string" && objText.trim()) return objText.trim();
}
if (typeof msg?.refusal === "string" && msg.refusal.trim()) throw new UpstreamError(422, "Model refusal", msg.refusal.trim());
if (typeof msg?.reasoning === "string" && msg.reasoning.trim()) {
// Some models may place the final answer into "reasoning". Accept it only if it matches our constraints.
const candidate = normalizeOneParagraph(msg.reasoning);
// Full validation is done later with real temp/hazard. Here we only ensure it looks like a final answer.
if (/equation\s*:/i.test(candidate) && /application\s*:/i.test(candidate) && /(->|→)/.test(candidate)) {
return candidate;
}
throw new UpstreamError(422, "Model returned reasoning without final content");
}
if (Array.isArray(msg?.tool_calls) && msg.tool_calls.length > 0) throw new UpstreamError(422, "Model returned tool_calls with no text");
throw new UpstreamError(502, "Groq Chat Completions returned no message content");
}
export function createAiCore(env = {}) {
const apiKey = readEnv(env, "GROQ_API_KEY");
const modelsRaw = parseModelList(env);
const models = modelsRaw.length > 0 ? modelsRaw : defaultModelList();
const generationModels = filterGenerationModels(models);
return {
ping() {
return {
ok: true,
hasGroqApiKey: !!apiKey && !apiKey.includes("your_groq_api_key_here"),
groqApiKeyPrefix: apiKey ? apiKey.slice(0, 4) : null,
models,
generationModels,
node: process.version,
};
},
async mix({ substances, temperatureC }) {
if (!apiKey || apiKey.includes("your_groq_api_key_here")) {
throw new Error("GROQ_API_KEY manquant. Ajoute-le dans l'environnement/Secrets.");
}
const picked = Array.isArray(substances) ? substances.filter((s) => s && s.id) : [];
const uniqueById = [];
const seen = new Set();
for (const s of picked) {
if (seen.has(s.id)) continue;
seen.add(s.id);
uniqueById.push(s);
}
if (uniqueById.length < 2) throw new Error("substances doit contenir au moins 2 substances valides");
const safeTemp = Number.isFinite(Number(temperatureC)) ? Number(temperatureC) : 20;
const hasHazard = uniqueById.some((s) => s.hazard && s.hazard !== "none");
const { system, user } = buildPrompt({ temperatureC: safeTemp, substances: uniqueById });
const triedModels = [];
let lastModel = null;
let lastErr = null;
for (const model of generationModels) {
triedModels.push(model);
lastModel = model;
let lastValidation = null;
for (let attempt = 1; attempt <= 2; attempt++) {
const attemptUser =
attempt === 1
? user
: `${user}\n\nLe texte precedent ne respecte pas les contraintes (${lastValidation?.reason || "incomplet"}). Regenerate en respectant STRICTEMENT le format avec les labels Observation:, Temperature:, Equation:, Cas d'usage:, Suggestions:, en un seul paragraphe, sans sauts de ligne.`;
try {
let text;
try {
text = await callGroqResponsesText({ apiKey, model, system, user: attemptUser });
} catch (e) {
if (e instanceof UpstreamError && shouldFallbackOnUpstreamError(e)) {
// Try chat completions for the same model before moving on.
text = await callGroqChatCompletionsText({ apiKey, model, system, user: attemptUser });
} else {
throw e;
}
}
text = normalizeOneParagraph(text);
const v = validateObservation({ text, temperatureC: safeTemp, hasHazard });
lastValidation = v;
if (v.ok) return { text: v.parsed, modelUsed: model, triedModels };
} catch (e) {
lastErr = e;
if (e instanceof UpstreamError) {
if (!shouldFallbackOnUpstreamError(e)) throw e;
break;
}
throw e;
}
}
lastErr = lastErr || new UpstreamError(400, `Validation failed for model ${model}`, lastValidation?.reason || "validation failed");
}
if (lastErr instanceof UpstreamError) {
const err = lastErr;
err.triedModels = triedModels;
err.lastModel = lastModel;
throw err;
}
const e = new Error("All models failed");
e.triedModels = triedModels;
e.lastModel = lastModel;
throw e;
},
};
}