File size: 5,860 Bytes
146f017
241ddf3
146f017
 
241ddf3
 
 
146f017
241ddf3
 
 
 
146f017
241ddf3
146f017
 
241ddf3
 
146f017
 
 
 
 
 
 
 
 
 
 
 
 
241ddf3
 
146f017
 
 
 
 
 
241ddf3
 
146f017
 
241ddf3
146f017
 
 
 
 
 
 
 
241ddf3
146f017
241ddf3
 
 
 
 
 
 
 
146f017
 
241ddf3
 
 
 
146f017
 
 
 
 
 
 
241ddf3
146f017
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
241ddf3
146f017
 
 
 
241ddf3
146f017
 
 
241ddf3
 
 
 
 
146f017
241ddf3
146f017
241ddf3
 
 
 
 
 
 
 
146f017
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173

const express = require('express');
const fetch = require('node-fetch');
const cors = require('cors');
const path = require('path');

const app = express();
app.use(cors());
app.use(express.json());

// Configuration
const CONFIG = {
  AI_API_KEY: process.env.ROSALINDA_API_KEY || 'your-ai-api-key',
  PORT: process.env.PORT || 3000,
  MAX_RETRIES: 5,
  WATCHDOG_INTERVAL: 5000
};

// Watchdog pour Γ©viter tout arrΓͺt
setInterval(() => {
  console.log("🟒 Rosalinda alive -", new Date().toISOString());
}, CONFIG.WATCHDOG_INTERVAL);

process.on('uncaughtException', (err) => {
  console.error('⚠️ Uncaught Exception - Restarting process:', err);
});

process.on('unhandledRejection', (err) => {
  console.error('⚠️ Unhandled Rejection:', err);
});

// Health check endpoint
app.get('/api/health', (req, res) => {
  res.json({ 
    status: 'online', 
    timestamp: new Date().toISOString(),
    version: '1.0.0',
    features: ['chat', 'images', 'videos']
  });
});

// AI Proxy endpoint avec reprise automatique
let isProcessing = false;
app.post('/api/rosalinda', async (req, res) => {
    if (isProcessing) {
      return res.json({ status: 'resume', message: 'Reprise en cours...' });
    }

    isProcessing = true;
    let retries = 0;

    while (retries < CONFIG.MAX_RETRIES) {
      try {
        const response = await fetch('https://api.openai.com/v1/chat/completions', {
          method: 'POST',
          headers: {
            'Content-Type': 'application/json',
            'Authorization': `Bearer ${CONFIG.AI_API_KEY}`
          },
          body: JSON.stringify({
            model: "gpt-4",
            messages: [{ role: "user", content: req.body.prompt }],
            temperature: 0.7,
            max_tokens: 2000
          })
        });

        if (!response.ok) throw new Error(`AI API error: ${response.status}`);
        
        const data = await response.json();
        isProcessing = false;
        return res.json({ 
          result: data.choices[0]?.message?.content || "No response",
          usage: data.usage
        });
      } catch (err) {
        retries++;
        console.error(`Attempt ${retries} failed:`, err);
        if (retries >= CONFIG.MAX_RETRIES) {
          isProcessing = false;
          throw err;
        }
        await new Promise(resolve => setTimeout(resolve, 2000 * retries));
      }
    }
    isProcessing = false;
    res.status(500).json({ error: err.message });
});

// Endpoints de gΓ©nΓ©ration amΓ©liorΓ©s avec reprise
const mediaGenerators = {
  image: {
    endpoint: 'https://api.openai.com/v1/images/generations',
    defaults: { n: 1, size: "1024x1024" }
  },
  video: {
    endpoint: 'https://api.deepai.org/api/video-generator',
    defaults: { length: 5, fps: 24 }
  }
};

const processMedia = async (type, req, res) => {
  let retries = 0;
  const config = mediaGenerators[type];

  while (retries < CONFIG.MAX_RETRIES) {
    try {
      const response = await fetch(config.endpoint, {
        method: 'POST',
        headers: {
          'Content-Type': 'application/json',
          'Authorization': `Bearer ${CONFIG.AI_API_KEY}`
        },
        body: JSON.stringify({
          prompt: req.body.prompt || `creative ${type}`,
          ...config.defaults
        })
      });

      if (!response.ok) throw new Error(`${type} generation failed: ${response.status}`);
      
      const data = await response.json();
      return res.json({ 
        url: data.url || data.output_url,
        details: data
      });
    } catch (err) {
      retries++;
      console.error(`${type} generation attempt ${retries} failed:`, err);
      if (retries >= CONFIG.MAX_RETRIES) {
        throw err;
      }
      await new Promise(resolve => setTimeout(resolve, 3000 * retries));
    }
  }
};

app.post('/api/generate/image', async (req, res) => {
  try {
    await processMedia('image', req, res);
  } catch (err) {
    res.status(500).json({ error: err.message });
  }
});

app.post('/api/generate/video', async (req, res) => {
  try {
    await processMedia('video', req, res);
  } catch (err) {
    res.status(500).json({ error: err.message });
  }
});
// Static files for preview
app.use('/preview', express.static(path.join(__dirname, 'previews')));

app.listen(CONFIG.PORT, () => {
  console.log(`
  β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ•—  β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ•— β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ•— β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ•— β–ˆβ–ˆβ•—     β–ˆβ–ˆβ–ˆβ•—   β–ˆβ–ˆβ•—β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ•—  β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ•— 
  β–ˆβ–ˆβ•”β•β•β–ˆβ–ˆβ•—β–ˆβ–ˆβ•”β•β•β•β–ˆβ–ˆβ•—β–ˆβ–ˆβ•”β•β•β•β•β•β–ˆβ–ˆβ•”β•β•β–ˆβ–ˆβ•—β–ˆβ–ˆβ•‘     β–ˆβ–ˆβ–ˆβ–ˆβ•—  β–ˆβ–ˆβ•‘β–ˆβ–ˆβ•”β•β•β–ˆβ–ˆβ•—β–ˆβ–ˆβ•”β•β•β–ˆβ–ˆβ•—
  β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ•”β•β–ˆβ–ˆβ•‘   β–ˆβ–ˆβ•‘β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ•—β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ•‘β–ˆβ–ˆβ•‘     β–ˆβ–ˆβ•”β–ˆβ–ˆβ•— β–ˆβ–ˆβ•‘β–ˆβ–ˆβ•‘  β–ˆβ–ˆβ•‘β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ•‘
  β–ˆβ–ˆβ•”β•β•β–ˆβ–ˆβ•—β–ˆβ–ˆβ•‘   β–ˆβ–ˆβ•‘β•šβ•β•β•β•β–ˆβ–ˆβ•‘β–ˆβ–ˆβ•”β•β•β–ˆβ–ˆβ•‘β–ˆβ–ˆβ•‘     β–ˆβ–ˆβ•‘β•šβ–ˆβ–ˆβ•—β–ˆβ–ˆβ•‘β–ˆβ–ˆβ•‘  β–ˆβ–ˆβ•‘β–ˆβ–ˆβ•”β•β•β–ˆβ–ˆβ•‘
  β–ˆβ–ˆβ•‘  β–ˆβ–ˆβ•‘β•šβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ•”β•β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ•‘β–ˆβ–ˆβ•‘  β–ˆβ–ˆβ•‘β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ•—β–ˆβ–ˆβ•‘ β•šβ–ˆβ–ˆβ–ˆβ–ˆβ•‘β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ•”β•β–ˆβ–ˆβ•‘  β–ˆβ–ˆβ•‘
  β•šβ•β•  β•šβ•β• β•šβ•β•β•β•β•β• β•šβ•β•β•β•β•β•β•β•šβ•β•  β•šβ•β•β•šβ•β•β•β•β•β•β•β•šβ•β•  β•šβ•β•β•β•β•šβ•β•β•β•β•β• β•šβ•β•  β•šβ•β•
  `);
  console.log(`βœ… Rosalinda backend ONLINE - Port ${CONFIG.PORT}`);
  console.log(`πŸ”— Endpoints disponibles:`);
  console.log(`- POST /api/rosalinda - Chat AI`);
  console.log(`- POST /api/generate/image - GΓ©nΓ©ration d'images`);
  console.log(`- POST /api/generate/video - GΓ©nΓ©ration de vidΓ©os`);
  console.log(`- GET /api/health - VΓ©rification du statut`);
  console.log(`\nπŸ›‘οΈ Watchdog actif - Intervalle: ${CONFIG.WATCHDOG_INTERVAL}ms`);
});