Genus-gpt-api / index.js
Domify's picture
Create index.js
607561b verified
const express = require('express');
const cors = require('cors');
const fetch = require('node-fetch');
const app = express();
app.use(cors());
app.use(express.json());
const NVIDIA_API_KEY = process.env.NVIDIA_API_KEY;
const NVIDIA_BASE_URL = 'https://integrate.api.nvidia.com/v1';
const TEXT_MODELS = [
'meta/llama-3.1-405b-instruct',
'moonshotai/kimi-k2-instruct',
'qwen/qwen3-coder-480b-a35b-instruct',
'mistralai/mistral-large-3-675b-instruct-2512'
];
const IMAGE_MODEL = 'nvidia/sdxl';
const IMAGE_FALLBACK = 'black-forest-labs/flux-1-dev';
const VISION_MODEL = 'nvidia/neva-22b';
async function describeImage(base64Image) {
try {
const res = await fetch(`${NVIDIA_BASE_URL}/chat/completions`, {
method: 'POST',
headers: {
'Authorization': `Bearer ${NVIDIA_API_KEY}`,
'Content-Type': 'application/json'
},
body: JSON.stringify({
model: VISION_MODEL,
messages: [{
role: 'user',
content: [
{ type: 'text', text: 'Describe this image in detail, including any text visible.' },
{ type: 'image_url', image_url: { url: `data:image/png;base64,${base64Image}` } }
]
}],
max_tokens: 300,
temperature: 0.2
})
});
if (!res.ok) return null;
const json = await res.json();
return json.choices?.[0]?.message?.content || null;
} catch (e) { return null; }
}
async function generateImage(prompt) {
const models = [IMAGE_MODEL, IMAGE_FALLBACK];
for (const model of models) {
try {
const res = await fetch(`${NVIDIA_BASE_URL}/images/generations`, {
method: 'POST',
headers: {
'Authorization': `Bearer ${NVIDIA_API_KEY}`,
'Content-Type': 'application/json'
},
body: JSON.stringify({ model, prompt, n: 1, size: '1024x1024', response_format: 'b64_json' })
});
if (!res.ok) continue;
const json = await res.json();
const b64 = json.data?.[0]?.b64_json;
if (b64) return `data:image/png;base64,${b64}`;
} catch (e) { continue; }
}
return null;
}
async function callNVIDIA(messages, temperature = 0.6, max_tokens = 800) {
for (const model of TEXT_MODELS) {
try {
const res = await fetch(`${NVIDIA_BASE_URL}/chat/completions`, {
method: 'POST',
headers: {
'Authorization': `Bearer ${NVIDIA_API_KEY}`,
'Content-Type': 'application/json'
},
body: JSON.stringify({ model, messages, temperature, max_tokens })
});
if (!res.ok) continue;
const json = await res.json();
return json.choices?.[0]?.message?.content || '';
} catch (e) { continue; }
}
throw new Error('All models failed');
}
app.post('/ask', async (req, res) => {
const { question, system_prompt, image_base64 } = req.body;
const today = new Date().toDateString();
let systemContent = system_prompt || `Your name is GENUS GPT. Today: ${today}`;
let userMessage = question || 'What do you see?';
if (image_base64) {
const desc = await describeImage(image_base64);
if (desc) userMessage = `[Image: ${desc}]\n\nQuestion: ${question || 'What do you see?'}`;
}
const messages = [
{ role: 'system', content: systemContent },
{ role: 'user', content: userMessage }
];
try {
const rawAnswer = await callNVIDIA(messages);
let reasoning = '', answer = rawAnswer;
const m = rawAnswer.match(/\[REASONING\]([\s\S]*?)\[\/REASONING\]/i);
if (m) { reasoning = m[1].trim(); answer = rawAnswer.replace(/\[REASONING\][\s\S]*?\[\/REASONING\]/i, '').trim(); }
let asset = null, type = 'text';
if (answer.includes('[GENERATE_IMG:')) {
const p = answer.split('[GENERATE_IMG:')[1].split(']')[0];
asset = await generateImage(p);
type = asset ? 'image' : 'text';
}
res.json({ answer, reasoning, asset, type });
} catch (e) {
res.status(500).json({ error: 'Overloaded' });
}
});
app.listen(7860, () => console.log('Genus GPT Engine Live on NVIDIA'));