|
|
|
|
|
const express = require('express'); |
|
|
const fetch = require('node-fetch'); |
|
|
const cors = require('cors'); |
|
|
const path = require('path'); |
|
|
|
|
|
const app = express(); |
|
|
app.use(cors()); |
|
|
app.use(express.json()); |
|
|
|
|
|
|
|
|
const CONFIG = { |
|
|
AI_API_KEY: process.env.ROSALINDA_API_KEY || 'your-ai-api-key', |
|
|
PORT: process.env.PORT || 3000, |
|
|
MAX_RETRIES: 5, |
|
|
WATCHDOG_INTERVAL: 5000 |
|
|
}; |
|
|
|
|
|
|
|
|
setInterval(() => { |
|
|
console.log("π’ Rosalinda alive -", new Date().toISOString()); |
|
|
}, CONFIG.WATCHDOG_INTERVAL); |
|
|
|
|
|
process.on('uncaughtException', (err) => { |
|
|
console.error('β οΈ Uncaught Exception - Restarting process:', err); |
|
|
}); |
|
|
|
|
|
process.on('unhandledRejection', (err) => { |
|
|
console.error('β οΈ Unhandled Rejection:', err); |
|
|
}); |
|
|
|
|
|
|
|
|
app.get('/api/health', (req, res) => { |
|
|
res.json({ |
|
|
status: 'online', |
|
|
timestamp: new Date().toISOString(), |
|
|
version: '1.0.0', |
|
|
features: ['chat', 'images', 'videos'] |
|
|
}); |
|
|
}); |
|
|
|
|
|
|
|
|
let isProcessing = false; |
|
|
app.post('/api/rosalinda', async (req, res) => { |
|
|
if (isProcessing) { |
|
|
return res.json({ status: 'resume', message: 'Reprise en cours...' }); |
|
|
} |
|
|
|
|
|
isProcessing = true; |
|
|
let retries = 0; |
|
|
|
|
|
while (retries < CONFIG.MAX_RETRIES) { |
|
|
try { |
|
|
const response = await fetch('https://api.openai.com/v1/chat/completions', { |
|
|
method: 'POST', |
|
|
headers: { |
|
|
'Content-Type': 'application/json', |
|
|
'Authorization': `Bearer ${CONFIG.AI_API_KEY}` |
|
|
}, |
|
|
body: JSON.stringify({ |
|
|
model: "gpt-4", |
|
|
messages: [{ role: "user", content: req.body.prompt }], |
|
|
temperature: 0.7, |
|
|
max_tokens: 2000 |
|
|
}) |
|
|
}); |
|
|
|
|
|
if (!response.ok) throw new Error(`AI API error: ${response.status}`); |
|
|
|
|
|
const data = await response.json(); |
|
|
isProcessing = false; |
|
|
return res.json({ |
|
|
result: data.choices[0]?.message?.content || "No response", |
|
|
usage: data.usage |
|
|
}); |
|
|
} catch (err) { |
|
|
retries++; |
|
|
console.error(`Attempt ${retries} failed:`, err); |
|
|
if (retries >= CONFIG.MAX_RETRIES) { |
|
|
isProcessing = false; |
|
|
throw err; |
|
|
} |
|
|
await new Promise(resolve => setTimeout(resolve, 2000 * retries)); |
|
|
} |
|
|
} |
|
|
isProcessing = false; |
|
|
res.status(500).json({ error: err.message }); |
|
|
}); |
|
|
|
|
|
|
|
|
const mediaGenerators = { |
|
|
image: { |
|
|
endpoint: 'https://api.openai.com/v1/images/generations', |
|
|
defaults: { n: 1, size: "1024x1024" } |
|
|
}, |
|
|
video: { |
|
|
endpoint: 'https://api.deepai.org/api/video-generator', |
|
|
defaults: { length: 5, fps: 24 } |
|
|
} |
|
|
}; |
|
|
|
|
|
const processMedia = async (type, req, res) => { |
|
|
let retries = 0; |
|
|
const config = mediaGenerators[type]; |
|
|
|
|
|
while (retries < CONFIG.MAX_RETRIES) { |
|
|
try { |
|
|
const response = await fetch(config.endpoint, { |
|
|
method: 'POST', |
|
|
headers: { |
|
|
'Content-Type': 'application/json', |
|
|
'Authorization': `Bearer ${CONFIG.AI_API_KEY}` |
|
|
}, |
|
|
body: JSON.stringify({ |
|
|
prompt: req.body.prompt || `creative ${type}`, |
|
|
...config.defaults |
|
|
}) |
|
|
}); |
|
|
|
|
|
if (!response.ok) throw new Error(`${type} generation failed: ${response.status}`); |
|
|
|
|
|
const data = await response.json(); |
|
|
return res.json({ |
|
|
url: data.url || data.output_url, |
|
|
details: data |
|
|
}); |
|
|
} catch (err) { |
|
|
retries++; |
|
|
console.error(`${type} generation attempt ${retries} failed:`, err); |
|
|
if (retries >= CONFIG.MAX_RETRIES) { |
|
|
throw err; |
|
|
} |
|
|
await new Promise(resolve => setTimeout(resolve, 3000 * retries)); |
|
|
} |
|
|
} |
|
|
}; |
|
|
|
|
|
app.post('/api/generate/image', async (req, res) => { |
|
|
try { |
|
|
await processMedia('image', req, res); |
|
|
} catch (err) { |
|
|
res.status(500).json({ error: err.message }); |
|
|
} |
|
|
}); |
|
|
|
|
|
app.post('/api/generate/video', async (req, res) => { |
|
|
try { |
|
|
await processMedia('video', req, res); |
|
|
} catch (err) { |
|
|
res.status(500).json({ error: err.message }); |
|
|
} |
|
|
}); |
|
|
|
|
|
app.use('/preview', express.static(path.join(__dirname, 'previews'))); |
|
|
|
|
|
app.listen(CONFIG.PORT, () => { |
|
|
console.log(` |
|
|
βββββββ βββββββ ββββββββ ββββββ βββ ββββ ββββββββββ ββββββ |
|
|
ββββββββββββββββββββββββββββββββββββ βββββ βββββββββββββββββββ |
|
|
βββββββββββ ββββββββββββββββββββββ ββββββ ββββββ βββββββββββ |
|
|
βββββββββββ ββββββββββββββββββββββ βββββββββββββ βββββββββββ |
|
|
βββ βββββββββββββββββββββββ ββββββββββββββ βββββββββββββββββ βββ |
|
|
βββ βββ βββββββ βββββββββββ ββββββββββββββ ββββββββββββ βββ βββ |
|
|
`); |
|
|
console.log(`β
Rosalinda backend ONLINE - Port ${CONFIG.PORT}`); |
|
|
console.log(`π Endpoints disponibles:`); |
|
|
console.log(`- POST /api/rosalinda - Chat AI`); |
|
|
console.log(`- POST /api/generate/image - GΓ©nΓ©ration d'images`); |
|
|
console.log(`- POST /api/generate/video - GΓ©nΓ©ration de vidΓ©os`); |
|
|
console.log(`- GET /api/health - VΓ©rification du statut`); |
|
|
console.log(`\nπ‘οΈ Watchdog actif - Intervalle: ${CONFIG.WATCHDOG_INTERVAL}ms`); |
|
|
}); |
|
|
|