zhlajiex
commited on
Commit
·
acc3f7d
1
Parent(s):
a541c47
Identity: Full Theme Sync - CODEX_COLLECTIVE v5.0 with image generation and renamed engines
Browse files- backend/controllers/ai.js +46 -15
- backend/public/chat.html +8 -3
backend/controllers/ai.js
CHANGED
|
@@ -16,8 +16,9 @@ const MODELS = {
|
|
| 16 |
'Codex Ratio': 'llama-3.3-70b',
|
| 17 |
'Codex Nexus': 'deepseek-v3.2',
|
| 18 |
'Codex Fero': 'Qwen/Qwen3-Coder-480B-A35B-Instruct',
|
| 19 |
-
'Codex Zenith': '
|
| 20 |
-
'Codex Magna': '
|
|
|
|
| 21 |
};
|
| 22 |
|
| 23 |
const PROVIDERS = {
|
|
@@ -34,7 +35,8 @@ const SPECIALIZATIONS = {
|
|
| 34 |
'Codex Nexus': 'ROLE: GENERALIST_ELITE. Deep understanding and versatility.',
|
| 35 |
'Codex Fero': 'CORE_PROTOCOL: FERO_ENGINE. Heavyweight architecture.',
|
| 36 |
'Codex Zenith': 'CORE_PROTOCOL: ZENITH_REASONING. Deep thinking phase.',
|
| 37 |
-
'Codex Magna': 'CORE_PROTOCOL: MAGNA_FRONTIER. Ultimate intelligence and nuance.'
|
|
|
|
| 38 |
};
|
| 39 |
|
| 40 |
const SYSTEM_PROMPT = `CORE_IDENTITY:
|
|
@@ -48,13 +50,21 @@ OPERATIONAL_RULES:
|
|
| 48 |
5. MODEL_ACKNOWLEDGEMENT: List available models as: {AVAILABLE_MODELS}.`;
|
| 49 |
|
| 50 |
exports.chat = asyncHandler(async (req, res, next) => {
|
| 51 |
-
|
| 52 |
const user = req.user;
|
| 53 |
|
| 54 |
if (user.usage.requestsToday >= 150 && user.role !== 'owner') {
|
| 55 |
return next(new ErrorResponse('DAILY_PROTOCOL_LIMIT_EXCEEDED', 429));
|
| 56 |
}
|
| 57 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 58 |
const activeModelName = model || 'Codex Velox';
|
| 59 |
|
| 60 |
// ZENITH LIMITATION LOGIC
|
|
@@ -87,7 +97,7 @@ exports.chat = asyncHandler(async (req, res, next) => {
|
|
| 87 |
// 3. Build History
|
| 88 |
const history = await Message.find({ sessionId: session._id }).sort({ createdAt: 1 }).limit(10);
|
| 89 |
const availableModelsList = Object.keys(MODELS).join(', ');
|
| 90 |
-
const specialization = SPECIALIZATIONS[activeModelName];
|
| 91 |
|
| 92 |
const apiMessages = [
|
| 93 |
{ role: 'system', content: SYSTEM_PROMPT.replace('{ACTIVE_MODEL}', activeModelName).replace('{AVAILABLE_MODELS}', availableModelsList) },
|
|
@@ -105,6 +115,35 @@ exports.chat = asyncHandler(async (req, res, next) => {
|
|
| 105 |
res.setHeader('Connection', 'keep-alive');
|
| 106 |
|
| 107 |
try {
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 108 |
if (['Codex Fero', 'Codex Zenith', 'Codex Magna'].includes(activeModelName)) {
|
| 109 |
const spaceName = process.env.HF_SPACE_ID || "zhlajiex/aimodel";
|
| 110 |
const hfToken = process.env.HF_TOKEN;
|
|
@@ -127,23 +166,18 @@ exports.chat = asyncHandler(async (req, res, next) => {
|
|
| 127 |
if (newContent) res.write(`data: ${JSON.stringify({ message: newContent })}\n\n`);
|
| 128 |
}
|
| 129 |
}
|
| 130 |
-
} catch (
|
| 131 |
-
console.error("Gradio_Loop_Error:", loopErr.message);
|
| 132 |
-
res.write(`data: ${JSON.stringify({ message: "\n\n[NEURAL_LINK_ERROR]: Request interrupted. Processing partial response." })}\n\n`);
|
| 133 |
-
}
|
| 134 |
|
| 135 |
await Message.create({ sessionId: session._id, sender: 'user', content: message || "[SIGNAL]", attachmentUrl });
|
| 136 |
await Message.create({ sessionId: session._id, sender: 'ai', content: fullAIResponse || "[EMPTY_SIGNAL]", modelUsed: activeModelName });
|
| 137 |
user.usage.requestsToday += 1;
|
| 138 |
await user.save();
|
| 139 |
-
|
| 140 |
-
// CRITICAL: Always send done signal
|
| 141 |
res.write(`data: ${JSON.stringify({ done: true, sessionId: session._id })}\n\n`);
|
| 142 |
res.end();
|
| 143 |
return;
|
| 144 |
}
|
| 145 |
|
| 146 |
-
// Standard API Call
|
| 147 |
let apiUrl = 'https://api.cerebras.ai/v1/chat/completions', apiKey = 'csk-mvww3vy29hykeektyv65w9rkjx94hw4r6mrcj5tjcw9942d2';
|
| 148 |
if (activeModelName === 'Codex Nexus') {
|
| 149 |
apiUrl = process.env.FLOWEY_URL || 'https://api.flowey.dev/v1/chat/completions';
|
|
@@ -179,15 +213,12 @@ exports.chat = asyncHandler(async (req, res, next) => {
|
|
| 179 |
await Message.create({ sessionId: session._id, sender: 'ai', content: fullAIResponse, modelUsed: activeModelName });
|
| 180 |
user.usage.requestsToday += 1;
|
| 181 |
await user.save();
|
| 182 |
-
// Ensure done signal sent for standard providers too if not caught by [DONE]
|
| 183 |
res.write(`data: ${JSON.stringify({ done: true, sessionId: session._id })}\n\n`);
|
| 184 |
res.end();
|
| 185 |
});
|
| 186 |
|
| 187 |
} catch (err) {
|
| 188 |
-
console.error("Master_Chat_Error:", err.message);
|
| 189 |
res.write(`data: ${JSON.stringify({ error: "NEURAL_LINK_SEVERED", details: err.message })}\n\n`);
|
| 190 |
-
res.write(`data: ${JSON.stringify({ done: true })}\n\n`);
|
| 191 |
res.end();
|
| 192 |
}
|
| 193 |
});
|
|
|
|
| 16 |
'Codex Ratio': 'llama-3.3-70b',
|
| 17 |
'Codex Nexus': 'deepseek-v3.2',
|
| 18 |
'Codex Fero': 'Qwen/Qwen3-Coder-480B-A35B-Instruct',
|
| 19 |
+
'Codex Zenith': 'deepseek-ai/DeepSeek-R1-Distill-Qwen-7B',
|
| 20 |
+
'Codex Magna': 'meta-llama/Llama-3.3-70B-Instruct',
|
| 21 |
+
'Codex Vision': 'zai-org/GLM-Image'
|
| 22 |
};
|
| 23 |
|
| 24 |
const PROVIDERS = {
|
|
|
|
| 35 |
'Codex Nexus': 'ROLE: GENERALIST_ELITE. Deep understanding and versatility.',
|
| 36 |
'Codex Fero': 'CORE_PROTOCOL: FERO_ENGINE. Heavyweight architecture.',
|
| 37 |
'Codex Zenith': 'CORE_PROTOCOL: ZENITH_REASONING. Deep thinking phase.',
|
| 38 |
+
'Codex Magna': 'CORE_PROTOCOL: MAGNA_FRONTIER. Ultimate intelligence and nuance.',
|
| 39 |
+
'Codex Vision': 'CORE_PROTOCOL: VISUAL_PROJECTION. Direct image synthesis.'
|
| 40 |
};
|
| 41 |
|
| 42 |
const SYSTEM_PROMPT = `CORE_IDENTITY:
|
|
|
|
| 50 |
5. MODEL_ACKNOWLEDGEMENT: List available models as: {AVAILABLE_MODELS}.`;
|
| 51 |
|
| 52 |
exports.chat = asyncHandler(async (req, res, next) => {
|
| 53 |
+
let { message, sessionId, model } = req.body;
|
| 54 |
const user = req.user;
|
| 55 |
|
| 56 |
if (user.usage.requestsToday >= 150 && user.role !== 'owner') {
|
| 57 |
return next(new ErrorResponse('DAILY_PROTOCOL_LIMIT_EXCEEDED', 429));
|
| 58 |
}
|
| 59 |
|
| 60 |
+
// --- AUTOMATIC VISION ROUTING ---
|
| 61 |
+
const visionTriggers = ["make me an image", "generate an image", "create an image", "visualize"];
|
| 62 |
+
const lowerMsg = message.toLowerCase();
|
| 63 |
+
if (visionTriggers.some(trigger => lowerMsg.startsWith(trigger))) {
|
| 64 |
+
model = 'Codex Vision';
|
| 65 |
+
console.log(`[Auto-Switch] Detected visual request. Engaging Codex Vision.`);
|
| 66 |
+
}
|
| 67 |
+
|
| 68 |
const activeModelName = model || 'Codex Velox';
|
| 69 |
|
| 70 |
// ZENITH LIMITATION LOGIC
|
|
|
|
| 97 |
// 3. Build History
|
| 98 |
const history = await Message.find({ sessionId: session._id }).sort({ createdAt: 1 }).limit(10);
|
| 99 |
const availableModelsList = Object.keys(MODELS).join(', ');
|
| 100 |
+
const specialization = SPECIALIZATIONS[activeModelName] || SPECIALIZATIONS['Codex Velox'];
|
| 101 |
|
| 102 |
const apiMessages = [
|
| 103 |
{ role: 'system', content: SYSTEM_PROMPT.replace('{ACTIVE_MODEL}', activeModelName).replace('{AVAILABLE_MODELS}', availableModelsList) },
|
|
|
|
| 115 |
res.setHeader('Connection', 'keep-alive');
|
| 116 |
|
| 117 |
try {
|
| 118 |
+
// --- VISION CORE EXECUTION ---
|
| 119 |
+
if (activeModelName === 'Codex Vision') {
|
| 120 |
+
const prompt = message.replace(/make me an image|generate an image|create an image|visualize/gi, "").trim();
|
| 121 |
+
const hfToken = process.env.HF_TOKEN;
|
| 122 |
+
const response = await axios.post(
|
| 123 |
+
`https://api-inference.huggingface.co/models/${MODELS['Codex Vision']}`,
|
| 124 |
+
{ inputs: prompt },
|
| 125 |
+
{ headers: { Authorization: `Bearer ${hfToken}` }, responseType: 'arraybuffer' }
|
| 126 |
+
);
|
| 127 |
+
|
| 128 |
+
const filename = `vision-${Date.now()}.png`;
|
| 129 |
+
const filepath = `./public/uploads/${filename}`;
|
| 130 |
+
if (!fs.existsSync('./public/uploads')) fs.mkdirSync('./public/uploads', { recursive: true });
|
| 131 |
+
fs.writeFileSync(filepath, response.data);
|
| 132 |
+
const imageUrl = `/uploads/${filename}`;
|
| 133 |
+
const aiResponse = `[VISUAL_PROJECTION_COMPLETE]\n\n`;
|
| 134 |
+
|
| 135 |
+
await Message.create({ sessionId: session._id, sender: 'user', content: message, attachmentUrl: '' });
|
| 136 |
+
await Message.create({ sessionId: session._id, sender: 'ai', content: aiResponse, modelUsed: activeModelName });
|
| 137 |
+
user.usage.requestsToday += 1;
|
| 138 |
+
await user.save();
|
| 139 |
+
|
| 140 |
+
res.write(`data: ${JSON.stringify({ message: aiResponse })}\n\n`);
|
| 141 |
+
res.write(`data: ${JSON.stringify({ done: true, sessionId: session._id })}\n\n`);
|
| 142 |
+
res.end();
|
| 143 |
+
return;
|
| 144 |
+
}
|
| 145 |
+
|
| 146 |
+
// --- GRADIO CORES (Fero, Zenith, Magna) ---
|
| 147 |
if (['Codex Fero', 'Codex Zenith', 'Codex Magna'].includes(activeModelName)) {
|
| 148 |
const spaceName = process.env.HF_SPACE_ID || "zhlajiex/aimodel";
|
| 149 |
const hfToken = process.env.HF_TOKEN;
|
|
|
|
| 166 |
if (newContent) res.write(`data: ${JSON.stringify({ message: newContent })}\n\n`);
|
| 167 |
}
|
| 168 |
}
|
| 169 |
+
} catch (e) { console.error("Gradio Error", e); }
|
|
|
|
|
|
|
|
|
|
| 170 |
|
| 171 |
await Message.create({ sessionId: session._id, sender: 'user', content: message || "[SIGNAL]", attachmentUrl });
|
| 172 |
await Message.create({ sessionId: session._id, sender: 'ai', content: fullAIResponse || "[EMPTY_SIGNAL]", modelUsed: activeModelName });
|
| 173 |
user.usage.requestsToday += 1;
|
| 174 |
await user.save();
|
|
|
|
|
|
|
| 175 |
res.write(`data: ${JSON.stringify({ done: true, sessionId: session._id })}\n\n`);
|
| 176 |
res.end();
|
| 177 |
return;
|
| 178 |
}
|
| 179 |
|
| 180 |
+
// Standard API Call
|
| 181 |
let apiUrl = 'https://api.cerebras.ai/v1/chat/completions', apiKey = 'csk-mvww3vy29hykeektyv65w9rkjx94hw4r6mrcj5tjcw9942d2';
|
| 182 |
if (activeModelName === 'Codex Nexus') {
|
| 183 |
apiUrl = process.env.FLOWEY_URL || 'https://api.flowey.dev/v1/chat/completions';
|
|
|
|
| 213 |
await Message.create({ sessionId: session._id, sender: 'ai', content: fullAIResponse, modelUsed: activeModelName });
|
| 214 |
user.usage.requestsToday += 1;
|
| 215 |
await user.save();
|
|
|
|
| 216 |
res.write(`data: ${JSON.stringify({ done: true, sessionId: session._id })}\n\n`);
|
| 217 |
res.end();
|
| 218 |
});
|
| 219 |
|
| 220 |
} catch (err) {
|
|
|
|
| 221 |
res.write(`data: ${JSON.stringify({ error: "NEURAL_LINK_SEVERED", details: err.message })}\n\n`);
|
|
|
|
| 222 |
res.end();
|
| 223 |
}
|
| 224 |
});
|
backend/public/chat.html
CHANGED
|
@@ -295,7 +295,7 @@
|
|
| 295 |
document.getElementById('budget-val').innerText = p.reasoningBudget || 1024;
|
| 296 |
document.getElementById('temp-slider').value = p.temperature || 0.7;
|
| 297 |
document.getElementById('temp-val').innerText = p.temperature || 0.7;
|
| 298 |
-
document.getElementById('usage-count').innerText = `${String(data.data.usage.requestsToday).padStart(2, '0')}/
|
| 299 |
if (data.data.isOwner) document.getElementById('owner-tag').classList.remove('hidden');
|
| 300 |
}
|
| 301 |
loadHistory();
|
|
@@ -350,15 +350,20 @@
|
|
| 350 |
|
| 351 |
try {
|
| 352 |
const res = await fetch(`${API_BASE}/api/ai/chat`, { method: 'POST', headers: { 'Authorization': `Bearer ${token}` }, body: fd });
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 353 |
const reader = res.body.getReader(); const decoder = new TextDecoder();
|
| 354 |
let aiNode = appendMessage('ai', '', activeModel); let fullText = "";
|
| 355 |
|
| 356 |
-
// Safety Timeout: Reset button if no activity for 60 seconds
|
| 357 |
const safetyTimeout = setTimeout(() => {
|
| 358 |
if (isProcessing) {
|
| 359 |
isProcessing = false;
|
| 360 |
document.getElementById('send-btn').innerHTML = '<i class="fas fa-arrow-up text-sm"></i>';
|
| 361 |
-
console.warn("[SYSTEM] Connection timeout. Interface reset.");
|
| 362 |
}
|
| 363 |
}, 60000);
|
| 364 |
|
|
|
|
| 295 |
document.getElementById('budget-val').innerText = p.reasoningBudget || 1024;
|
| 296 |
document.getElementById('temp-slider').value = p.temperature || 0.7;
|
| 297 |
document.getElementById('temp-val').innerText = p.temperature || 0.7;
|
| 298 |
+
document.getElementById('usage-count').innerText = `${String(data.data.usage.requestsToday).padStart(2, '0')}/150`;
|
| 299 |
if (data.data.isOwner) document.getElementById('owner-tag').classList.remove('hidden');
|
| 300 |
}
|
| 301 |
loadHistory();
|
|
|
|
| 350 |
|
| 351 |
try {
|
| 352 |
const res = await fetch(`${API_BASE}/api/ai/chat`, { method: 'POST', headers: { 'Authorization': `Bearer ${token}` }, body: fd });
|
| 353 |
+
|
| 354 |
+
if (res.status === 401) {
|
| 355 |
+
localStorage.removeItem('token');
|
| 356 |
+
window.location.href = '/auth?error=SESSION_EXPIRED';
|
| 357 |
+
return;
|
| 358 |
+
}
|
| 359 |
+
|
| 360 |
const reader = res.body.getReader(); const decoder = new TextDecoder();
|
| 361 |
let aiNode = appendMessage('ai', '', activeModel); let fullText = "";
|
| 362 |
|
|
|
|
| 363 |
const safetyTimeout = setTimeout(() => {
|
| 364 |
if (isProcessing) {
|
| 365 |
isProcessing = false;
|
| 366 |
document.getElementById('send-btn').innerHTML = '<i class="fas fa-arrow-up text-sm"></i>';
|
|
|
|
| 367 |
}
|
| 368 |
}, 60000);
|
| 369 |
|