Abmacode12's picture
Merci sincèrement pour votre travail et pour la base solide de CodeFlow Station.
146f017 verified
const express = require('express');
const fetch = require('node-fetch');
const cors = require('cors');
const path = require('path');
const app = express();
app.use(cors());
app.use(express.json());
// Configuration
const CONFIG = {
AI_API_KEY: process.env.ROSALINDA_API_KEY || 'your-ai-api-key',
PORT: process.env.PORT || 3000,
MAX_RETRIES: 5,
WATCHDOG_INTERVAL: 5000
};
// Watchdog pour Γ©viter tout arrΓͺt
setInterval(() => {
console.log("🟒 Rosalinda alive -", new Date().toISOString());
}, CONFIG.WATCHDOG_INTERVAL);
process.on('uncaughtException', (err) => {
console.error('⚠️ Uncaught Exception - Restarting process:', err);
});
process.on('unhandledRejection', (err) => {
console.error('⚠️ Unhandled Rejection:', err);
});
// Health check endpoint
app.get('/api/health', (req, res) => {
res.json({
status: 'online',
timestamp: new Date().toISOString(),
version: '1.0.0',
features: ['chat', 'images', 'videos']
});
});
// AI Proxy endpoint avec reprise automatique
let isProcessing = false;
app.post('/api/rosalinda', async (req, res) => {
if (isProcessing) {
return res.json({ status: 'resume', message: 'Reprise en cours...' });
}
isProcessing = true;
let retries = 0;
while (retries < CONFIG.MAX_RETRIES) {
try {
const response = await fetch('https://api.openai.com/v1/chat/completions', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${CONFIG.AI_API_KEY}`
},
body: JSON.stringify({
model: "gpt-4",
messages: [{ role: "user", content: req.body.prompt }],
temperature: 0.7,
max_tokens: 2000
})
});
if (!response.ok) throw new Error(`AI API error: ${response.status}`);
const data = await response.json();
isProcessing = false;
return res.json({
result: data.choices[0]?.message?.content || "No response",
usage: data.usage
});
} catch (err) {
retries++;
console.error(`Attempt ${retries} failed:`, err);
if (retries >= CONFIG.MAX_RETRIES) {
isProcessing = false;
throw err;
}
await new Promise(resolve => setTimeout(resolve, 2000 * retries));
}
}
isProcessing = false;
res.status(500).json({ error: err.message });
});
// Endpoints de gΓ©nΓ©ration amΓ©liorΓ©s avec reprise
const mediaGenerators = {
image: {
endpoint: 'https://api.openai.com/v1/images/generations',
defaults: { n: 1, size: "1024x1024" }
},
video: {
endpoint: 'https://api.deepai.org/api/video-generator',
defaults: { length: 5, fps: 24 }
}
};
const processMedia = async (type, req, res) => {
let retries = 0;
const config = mediaGenerators[type];
while (retries < CONFIG.MAX_RETRIES) {
try {
const response = await fetch(config.endpoint, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${CONFIG.AI_API_KEY}`
},
body: JSON.stringify({
prompt: req.body.prompt || `creative ${type}`,
...config.defaults
})
});
if (!response.ok) throw new Error(`${type} generation failed: ${response.status}`);
const data = await response.json();
return res.json({
url: data.url || data.output_url,
details: data
});
} catch (err) {
retries++;
console.error(`${type} generation attempt ${retries} failed:`, err);
if (retries >= CONFIG.MAX_RETRIES) {
throw err;
}
await new Promise(resolve => setTimeout(resolve, 3000 * retries));
}
}
};
app.post('/api/generate/image', async (req, res) => {
try {
await processMedia('image', req, res);
} catch (err) {
res.status(500).json({ error: err.message });
}
});
app.post('/api/generate/video', async (req, res) => {
try {
await processMedia('video', req, res);
} catch (err) {
res.status(500).json({ error: err.message });
}
});
// Static files for preview
app.use('/preview', express.static(path.join(__dirname, 'previews')));
app.listen(CONFIG.PORT, () => {
console.log(`
β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ•— β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ•— β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ•— β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ•— β–ˆβ–ˆβ•— β–ˆβ–ˆβ–ˆβ•— β–ˆβ–ˆβ•—β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ•— β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ•—
β–ˆβ–ˆβ•”β•β•β–ˆβ–ˆβ•—β–ˆβ–ˆβ•”β•β•β•β–ˆβ–ˆβ•—β–ˆβ–ˆβ•”β•β•β•β•β•β–ˆβ–ˆβ•”β•β•β–ˆβ–ˆβ•—β–ˆβ–ˆβ•‘ β–ˆβ–ˆβ–ˆβ–ˆβ•— β–ˆβ–ˆβ•‘β–ˆβ–ˆβ•”β•β•β–ˆβ–ˆβ•—β–ˆβ–ˆβ•”β•β•β–ˆβ–ˆβ•—
β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ•”β•β–ˆβ–ˆβ•‘ β–ˆβ–ˆβ•‘β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ•—β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ•‘β–ˆβ–ˆβ•‘ β–ˆβ–ˆβ•”β–ˆβ–ˆβ•— β–ˆβ–ˆβ•‘β–ˆβ–ˆβ•‘ β–ˆβ–ˆβ•‘β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ•‘
β–ˆβ–ˆβ•”β•β•β–ˆβ–ˆβ•—β–ˆβ–ˆβ•‘ β–ˆβ–ˆβ•‘β•šβ•β•β•β•β–ˆβ–ˆβ•‘β–ˆβ–ˆβ•”β•β•β–ˆβ–ˆβ•‘β–ˆβ–ˆβ•‘ β–ˆβ–ˆβ•‘β•šβ–ˆβ–ˆβ•—β–ˆβ–ˆβ•‘β–ˆβ–ˆβ•‘ β–ˆβ–ˆβ•‘β–ˆβ–ˆβ•”β•β•β–ˆβ–ˆβ•‘
β–ˆβ–ˆβ•‘ β–ˆβ–ˆβ•‘β•šβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ•”β•β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ•‘β–ˆβ–ˆβ•‘ β–ˆβ–ˆβ•‘β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ•—β–ˆβ–ˆβ•‘ β•šβ–ˆβ–ˆβ–ˆβ–ˆβ•‘β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ•”β•β–ˆβ–ˆβ•‘ β–ˆβ–ˆβ•‘
β•šβ•β• β•šβ•β• β•šβ•β•β•β•β•β• β•šβ•β•β•β•β•β•β•β•šβ•β• β•šβ•β•β•šβ•β•β•β•β•β•β•β•šβ•β• β•šβ•β•β•β•β•šβ•β•β•β•β•β• β•šβ•β• β•šβ•β•
`);
console.log(`βœ… Rosalinda backend ONLINE - Port ${CONFIG.PORT}`);
console.log(`πŸ”— Endpoints disponibles:`);
console.log(`- POST /api/rosalinda - Chat AI`);
console.log(`- POST /api/generate/image - GΓ©nΓ©ration d'images`);
console.log(`- POST /api/generate/video - GΓ©nΓ©ration de vidΓ©os`);
console.log(`- GET /api/health - VΓ©rification du statut`);
console.log(`\nπŸ›‘οΈ Watchdog actif - Intervalle: ${CONFIG.WATCHDOG_INTERVAL}ms`);
});