MoShow commited on
Commit
d7ed7b3
·
verified ·
1 Parent(s): 6f7e76a

Check Huggingface api inference, its not reachable and nothing ids worlking

Browse files
components/gallery-grid.js CHANGED
@@ -158,15 +158,20 @@ class CustomGalleryGrid extends HTMLElement {
158
  </style>
159
 
160
  <div class="gallery-grid">
161
- ${this.items.length === 0 ? `
162
  <div class="loading-skeleton"></div>
163
  <div class="loading-skeleton"></div>
164
  <div class="loading-skeleton"></div>
165
  <div class="loading-skeleton"></div>
166
  ` : this.items.map(item => `
167
  <div class="gallery-item" data-id="${item.id}">
168
- ${item.result.qualityLabel ? `<span class="resolution-badge">${item.result.qualityLabel}</span>` : ''}
169
- <img src="${item.result.url}" alt="${item.prompt}" loading="lazy">
 
 
 
 
 
170
  <div class="item-overlay">
171
  <p class="item-prompt">${item.prompt}</p>
172
  <div class="item-meta">
 
158
  </style>
159
 
160
  <div class="gallery-grid">
161
+ ${this.items.length === 0 ? `
162
  <div class="loading-skeleton"></div>
163
  <div class="loading-skeleton"></div>
164
  <div class="loading-skeleton"></div>
165
  <div class="loading-skeleton"></div>
166
  ` : this.items.map(item => `
167
  <div class="gallery-item" data-id="${item.id}">
168
+ ${item.result.qualityLabel ? `
169
+ <span class="resolution-badge ${item.isFallback ? 'bg-amber-500/20 text-amber-400' : ''}">
170
+ ${item.result.qualityLabel}
171
+ ${item.isFallback ? ' (Demo)' : ''}
172
+ </span>
173
+ ` : ''}
174
+ <img src="${item.result.url}" alt="${item.prompt}" loading="lazy">
175
  <div class="item-overlay">
176
  <p class="item-prompt">${item.prompt}</p>
177
  <div class="item-meta">
components/generation-modal.js CHANGED
@@ -128,15 +128,25 @@ class CustomGenerationModal extends HTMLElement {
128
  .progress-info {
129
  text-align: center;
130
  }
131
-
132
  .progress-text {
133
  font-size: 1.5rem;
134
  color: #e2e8f0;
135
  font-weight: 600;
136
  margin-bottom: 0.5rem;
 
 
 
137
  }
138
-
139
- .progress-subtext {
 
 
 
 
 
 
 
 
140
  color: #64748b;
141
  font-size: 0.9375rem;
142
  }
@@ -277,9 +287,12 @@ class CustomGenerationModal extends HTMLElement {
277
  <div class="neural-ring"></div>
278
  </div>
279
  <div class="progress-info">
280
- <div class="progress-text" id="progress-text">Initializing AI...</div>
281
- <div class="progress-subtext" id="progress-subtext">Preparing high-resolution pipeline</div>
282
- <div class="progress-bar">
 
 
 
283
  <div class="progress-fill" id="progress-fill"></div>
284
  </div>
285
  </div>
 
128
  .progress-info {
129
  text-align: center;
130
  }
 
131
  .progress-text {
132
  font-size: 1.5rem;
133
  color: #e2e8f0;
134
  font-weight: 600;
135
  margin-bottom: 0.5rem;
136
+ display: flex;
137
+ align-items: center;
138
+ gap: 0.5rem;
139
  }
140
+
141
+ .fallback-badge {
142
+ padding: 0.25rem 0.5rem;
143
+ background: rgba(245, 158, 11, 0.2);
144
+ border-radius: 6px;
145
+ font-size: 0.75rem;
146
+ color: #f59e0b;
147
+ font-weight: 600;
148
+ }
149
+ .progress-subtext {
150
  color: #64748b;
151
  font-size: 0.9375rem;
152
  }
 
287
  <div class="neural-ring"></div>
288
  </div>
289
  <div class="progress-info">
290
+ <div class="progress-text" id="progress-text">
291
+ <span id="progress-status">Initializing AI...</span>
292
+ <span id="fallback-badge" class="fallback-badge" style="display: none;">Fallback Mode</span>
293
+ </div>
294
+ <div class="progress-subtext" id="progress-subtext">Preparing pipeline</div>
295
+ <div class="progress-bar">
296
  <div class="progress-fill" id="progress-fill"></div>
297
  </div>
298
  </div>
script.js CHANGED
@@ -1,8 +1,8 @@
1
  // VortexAI - Main Application Script
2
-
3
- // Hugging Face API Configuration
4
  const HF_CONFIG = {
5
  baseURL: 'https://api-inference.huggingface.co/models/',
 
6
  models: {
7
  image: {
8
  'stable-diffusion-xl': 'stabilityai/stable-diffusion-xl-base-1.0',
@@ -13,7 +13,7 @@ const HF_CONFIG = {
13
  'sdxl-8k': 'stabilityai/stable-diffusion-xl-base-1.0',
14
  'esrgan': 'Real-ESRGAN'
15
  },
16
- video: {
17
  'modelscope-t2v': 'damo-vilab/text-to-video-ms-1.7b',
18
  'zeroscope': 'cerspense/zeroscope_v2_576w'
19
  },
@@ -26,9 +26,26 @@ video: {
26
  headers: (token) => ({
27
  'Authorization': `Bearer ${token}`,
28
  'Content-Type': 'application/json'
29
- })
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
30
  };
31
-
32
  // Global State
33
  const state = {
34
  currentMode: 'image',
@@ -134,8 +151,7 @@ class NeuralNetwork {
134
  requestAnimationFrame(() => this.animate());
135
  }
136
  }
137
-
138
- // Generation Service
139
  class GenerationService {
140
  static async generateImage(prompt, options = {}) {
141
  const modelId = HF_CONFIG.models.image[options.model || 'stable-diffusion-xl'];
@@ -149,42 +165,76 @@ class GenerationService {
149
  else if (pixels >= 8294400) qualityTier = 'ultra'; // 4K+
150
  else if (pixels >= 4194304) qualityTier = 'high'; // 2K+
151
 
152
- const payload = {
153
- inputs: prompt,
154
- parameters: {
155
- width: Math.min(targetWidth, 2048), // Base generation at max supported
156
- height: Math.min(targetHeight, 2048),
157
- num_inference_steps: options.steps || 100,
158
- guidance_scale: options.guidance || 7.5,
159
- seed: options.seed || Math.floor(Math.random() * 1000000),
160
- upscale_factor: options.upscale || Math.ceil(targetWidth / 2048),
161
- detail_enhancement: options.detailEnhancement || 75
162
- }
163
- };
164
 
165
  try {
166
- // Progressive loading based on resolution
167
- const baseDuration = qualityTier === 'maximum' ? 30000 :
168
- qualityTier === 'ultra' ? 20000 :
169
- qualityTier === 'high' ? 12000 : 8000;
170
- await this.simulateProgress(baseDuration);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
171
 
172
- // Return high-res mock data
173
  return {
174
- url: `https://static.photos/${qualityTier === 'maximum' ? 'technology' : qualityTier === 'ultra' ? 'nature' : 'abstract'}/${Math.min(targetWidth, 1200)}x${Math.min(targetHeight, 630)}/${payload.parameters.seed}`,
175
  seed: payload.parameters.seed,
176
  model: modelId,
177
  resolution: `${targetWidth}x${targetHeight}`,
178
- qualityTier: qualityTier,
179
- fileSize: this.estimateFileSize(targetWidth, targetHeight, options.mode || 'image'),
180
- format: targetWidth >= 7680 ? 'TIFF/RAW' : targetWidth >= 3840 ? 'PNG-24' : 'PNG'
181
  };
182
  } catch (error) {
183
- throw new Error(`Image generation failed: ${error.message}`);
 
184
  }
185
  }
186
-
187
- static estimateFileSize(width, height, mode) {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
188
  const pixels = width * height;
189
  if (mode === 'image') {
190
  if (pixels >= 33177600) return '~250-500 MB';
@@ -195,39 +245,86 @@ class GenerationService {
195
  return 'Unknown';
196
  }
197
  static async generateVideo(prompt, options = {}) {
198
- const modelId = HF_CONFIG.models.video['modelscope-t2v'];
199
 
 
 
 
 
 
200
  try {
201
- await this.simulateProgress(15000); // Longer for video
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
202
 
203
  return {
204
- url: `https://static.photos/video/640x360/${Date.now()}`,
205
- frames: 16,
206
- fps: 8,
207
- duration: 2,
208
  model: modelId
209
  };
210
  } catch (error) {
211
- throw new Error(`Video generation failed: ${error.message}`);
 
212
  }
213
  }
214
-
215
  static async generate3D(prompt, options = {}) {
 
 
 
 
 
 
 
216
  try {
217
- await this.simulateProgress(20000); // Longest for 3D
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
218
 
219
  return {
220
- url: `https://static.photos/3d/512x512/${Date.now()}`,
221
  format: 'obj',
222
  vertices: 5000,
223
  faces: 3000,
224
- model: HF_CONFIG.models['3d']['shap-e']
225
  };
226
  } catch (error) {
227
- throw new Error(`3D generation failed: ${error.message}`);
 
228
  }
229
  }
230
-
231
  static simulateProgress(duration = 5000) {
232
  return new Promise((resolve) => {
233
  const steps = 20;
@@ -239,7 +336,12 @@ static async generateVideo(prompt, options = {}) {
239
  const progress = (currentStep / steps) * 100;
240
 
241
  document.dispatchEvent(new CustomEvent('generationProgress', {
242
- detail: { progress, step: currentStep, total: steps }
 
 
 
 
 
243
  }));
244
 
245
  if (currentStep >= steps) {
@@ -250,24 +352,39 @@ static async generateVideo(prompt, options = {}) {
250
  });
251
  }
252
  }
253
-
254
  // UI Controllers
255
  const UIController = {
256
- init() {
257
  this.initNeuralBackground();
258
- this.initApiStatus();
259
  this.initEventListeners();
260
  this.loadGallery();
261
  },
262
-
263
- initNeuralBackground() {
264
  const canvas = document.getElementById('neural-canvas');
265
  if (canvas) {
266
  new NeuralNetwork(canvas);
267
  }
268
  },
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
269
 
270
- initApiStatus() {
271
  const services = [
272
  { name: 'Stable Diffusion XL', status: 'operational', latency: '124ms' },
273
  { name: 'ModelScope T2V', status: 'operational', latency: '892ms' },
@@ -275,23 +392,19 @@ const UIController = {
275
  { name: 'Realistic Vision', status: 'operational', latency: '156ms' }
276
  ];
277
 
278
- const grid = document.getElementById('api-status-grid');
279
- if (grid) {
280
- grid.innerHTML = services.map(s => `
281
- <div class="glass-card rounded-xl p-4 flex items-center justify-between">
282
- <div>
283
- <p class="font-medium text-sm">${s.name}</p>
284
- <p class="text-xs text-slate-400">${s.latency}</p>
285
- </div>
286
- <span class="px-2 py-1 rounded-full text-xs font-medium ${s.status === 'operational' ? 'bg-emerald-500/20 text-emerald-400' : 'bg-amber-500/20 text-amber-400'}">
287
- ${s.status}
288
- </span>
289
  </div>
290
- `).join('');
291
- }
 
 
 
292
  },
293
-
294
- initEventListeners() {
295
  // Prompt submission
296
  document.addEventListener('submitPrompt', async (e) => {
297
  const { prompt, mode, settings } = e.detail;
@@ -308,12 +421,20 @@ const UIController = {
308
  this.loadGallery();
309
  });
310
  },
311
-
312
  async handleGeneration(prompt, mode, settings) {
313
  const modal = document.getElementById('generation-modal');
314
  modal.show();
315
-
316
  try {
 
 
 
 
 
 
 
 
 
317
  let result;
318
  switch (mode) {
319
  case 'image':
@@ -338,7 +459,8 @@ const UIController = {
338
  prompt,
339
  mode,
340
  result,
341
- timestamp: new Date().toISOString()
 
342
  };
343
  state.history.unshift(entry);
344
  localStorage.setItem('vortex_history', JSON.stringify(state.history.slice(0, 50)));
@@ -346,22 +468,35 @@ const UIController = {
346
  modal.showResult(result, mode);
347
 
348
  } catch (error) {
 
349
  modal.showError(error.message);
350
  }
351
  },
352
-
353
- updateProgress(progress) {
354
  const modal = document.getElementById('generation-modal');
355
  if (modal && modal.updateProgress) {
356
- modal.updateProgress(progress);
 
 
 
 
 
357
  }
358
  },
359
-
360
  loadGallery() {
361
  const gallery = document.getElementById('gallery');
362
  if (gallery) {
363
  // Load from history or generate demo items
364
- const items = state.history.length > 0 ? state.history : this.generateDemoGallery();
 
 
 
 
 
 
 
 
 
365
  gallery.setItems(items.slice(0, 12));
366
  }
367
  },
@@ -391,9 +526,11 @@ const UIController = {
391
  result: {
392
  url: `https://static.photos/${['technology', 'nature', 'abstract', 'architecture', 'people', 'science'][i]}/1200x630/${i + 1}`,
393
  resolution: `${resolutions[i].w}x${resolutions[i].h}`,
394
- qualityLabel: resolutions[i].label
 
395
  },
396
- timestamp: new Date(Date.now() - i * 86400000).toISOString()
 
397
  }));
398
  }
399
  };
 
1
  // VortexAI - Main Application Script
2
+ // Hugging Face API Configuration with fallback
 
3
  const HF_CONFIG = {
4
  baseURL: 'https://api-inference.huggingface.co/models/',
5
+ fallbackMode: true, // Set to false when API is working
6
  models: {
7
  image: {
8
  'stable-diffusion-xl': 'stabilityai/stable-diffusion-xl-base-1.0',
 
13
  'sdxl-8k': 'stabilityai/stable-diffusion-xl-base-1.0',
14
  'esrgan': 'Real-ESRGAN'
15
  },
16
+ video: {
17
  'modelscope-t2v': 'damo-vilab/text-to-video-ms-1.7b',
18
  'zeroscope': 'cerspense/zeroscope_v2_576w'
19
  },
 
26
  headers: (token) => ({
27
  'Authorization': `Bearer ${token}`,
28
  'Content-Type': 'application/json'
29
+ }),
30
+
31
+ // Check API status
32
+ async checkStatus() {
33
+ try {
34
+ const response = await fetch(this.baseURL, {
35
+ method: 'HEAD',
36
+ headers: this.headers('')
37
+ });
38
+ if (response.status === 200 || response.status === 401) {
39
+ this.fallbackMode = false;
40
+ return true;
41
+ }
42
+ } catch (error) {
43
+ console.error('Hugging Face API unreachable:', error);
44
+ this.fallbackMode = true;
45
+ return false;
46
+ }
47
+ }
48
  };
 
49
  // Global State
50
  const state = {
51
  currentMode: 'image',
 
151
  requestAnimationFrame(() => this.animate());
152
  }
153
  }
154
+ // Generation Service with API fallback
 
155
  class GenerationService {
156
  static async generateImage(prompt, options = {}) {
157
  const modelId = HF_CONFIG.models.image[options.model || 'stable-diffusion-xl'];
 
165
  else if (pixels >= 8294400) qualityTier = 'ultra'; // 4K+
166
  else if (pixels >= 4194304) qualityTier = 'high'; // 2K+
167
 
168
+ // Check API status first
169
+ const apiAvailable = await HF_CONFIG.checkStatus();
170
+
171
+ if (!apiAvailable || HF_CONFIG.fallbackMode) {
172
+ console.warn('Using fallback generation mode - API unavailable');
173
+ return this.fallbackGeneration(prompt, options, 'image');
174
+ }
 
 
 
 
 
175
 
176
  try {
177
+ const payload = {
178
+ inputs: prompt,
179
+ parameters: {
180
+ width: Math.min(targetWidth, 2048),
181
+ height: Math.min(targetHeight, 2048),
182
+ num_inference_steps: options.steps || 100,
183
+ guidance_scale: options.guidance || 7.5,
184
+ seed: options.seed || Math.floor(Math.random() * 1000000)
185
+ }
186
+ };
187
+
188
+ const response = await fetch(HF_CONFIG.baseURL + modelId, {
189
+ method: 'POST',
190
+ headers: HF_CONFIG.headers('YOUR_HF_TOKEN_HERE'),
191
+ body: JSON.stringify(payload)
192
+ });
193
+
194
+ if (!response.ok) {
195
+ throw new Error(`API Error: ${response.status}`);
196
+ }
197
+
198
+ const blob = await response.blob();
199
+ const url = URL.createObjectURL(blob);
200
 
 
201
  return {
202
+ url,
203
  seed: payload.parameters.seed,
204
  model: modelId,
205
  resolution: `${targetWidth}x${targetHeight}`,
206
+ qualityTier,
207
+ fileSize: this.estimateFileSize(targetWidth, targetHeight, 'image'),
208
+ format: 'PNG'
209
  };
210
  } catch (error) {
211
+ console.error('API generation failed, falling back:', error);
212
+ return this.fallbackGeneration(prompt, options, 'image');
213
  }
214
  }
215
+
216
+ static fallbackGeneration(prompt, options, mode) {
217
+ const baseDuration = 5000; // Shorter duration for fallback
218
+ const seed = options.seed || Math.floor(Math.random() * 1000000);
219
+ const width = options.width || 1024;
220
+ const height = options.height || 1024;
221
+
222
+ return new Promise((resolve) => {
223
+ setTimeout(() => {
224
+ resolve({
225
+ url: `https://static.photos/${mode === 'image' ? 'abstract' : mode === 'video' ? 'technology' : '3d'}/${Math.min(width, 1200)}x${Math.min(height, 630)}/${seed}`,
226
+ seed,
227
+ model: 'fallback',
228
+ resolution: `${width}x${height}`,
229
+ qualityTier: 'standard',
230
+ fileSize: this.estimateFileSize(width, height, mode),
231
+ format: 'PNG',
232
+ isFallback: true
233
+ });
234
+ }, baseDuration);
235
+ });
236
+ }
237
+ static estimateFileSize(width, height, mode) {
238
  const pixels = width * height;
239
  if (mode === 'image') {
240
  if (pixels >= 33177600) return '~250-500 MB';
 
245
  return 'Unknown';
246
  }
247
  static async generateVideo(prompt, options = {}) {
248
+ const apiAvailable = await HF_CONFIG.checkStatus();
249
 
250
+ if (!apiAvailable || HF_CONFIG.fallbackMode) {
251
+ console.warn('Using fallback generation mode - API unavailable');
252
+ return this.fallbackGeneration(prompt, options, 'video');
253
+ }
254
+
255
  try {
256
+ const modelId = HF_CONFIG.models.video['modelscope-t2v'];
257
+ const response = await fetch(HF_CONFIG.baseURL + modelId, {
258
+ method: 'POST',
259
+ headers: HF_CONFIG.headers('YOUR_HF_TOKEN_HERE'),
260
+ body: JSON.stringify({
261
+ inputs: prompt,
262
+ parameters: {
263
+ num_frames: options.frames || 16,
264
+ fps: options.fps || 8
265
+ }
266
+ })
267
+ });
268
+
269
+ if (!response.ok) {
270
+ throw new Error(`API Error: ${response.status}`);
271
+ }
272
+
273
+ const blob = await response.blob();
274
+ const url = URL.createObjectURL(blob);
275
 
276
  return {
277
+ url,
278
+ frames: options.frames || 16,
279
+ fps: options.fps || 8,
280
+ duration: Math.round((options.frames || 16) / (options.fps || 8)),
281
  model: modelId
282
  };
283
  } catch (error) {
284
+ console.error('API generation failed, falling back:', error);
285
+ return this.fallbackGeneration(prompt, options, 'video');
286
  }
287
  }
 
288
  static async generate3D(prompt, options = {}) {
289
+ const apiAvailable = await HF_CONFIG.checkStatus();
290
+
291
+ if (!apiAvailable || HF_CONFIG.fallbackMode) {
292
+ console.warn('Using fallback generation mode - API unavailable');
293
+ return this.fallbackGeneration(prompt, options, '3d');
294
+ }
295
+
296
  try {
297
+ const modelId = HF_CONFIG.models['3d']['shap-e'];
298
+ const response = await fetch(HF_CONFIG.baseURL + modelId, {
299
+ method: 'POST',
300
+ headers: HF_CONFIG.headers('YOUR_HF_TOKEN_HERE'),
301
+ body: JSON.stringify({
302
+ inputs: prompt,
303
+ parameters: {
304
+ resolution: options.resolution || 256
305
+ }
306
+ })
307
+ });
308
+
309
+ if (!response.ok) {
310
+ throw new Error(`API Error: ${response.status}`);
311
+ }
312
+
313
+ const blob = await response.blob();
314
+ const url = URL.createObjectURL(blob);
315
 
316
  return {
317
+ url,
318
  format: 'obj',
319
  vertices: 5000,
320
  faces: 3000,
321
+ model: modelId
322
  };
323
  } catch (error) {
324
+ console.error('API generation failed, falling back:', error);
325
+ return this.fallbackGeneration(prompt, options, '3d');
326
  }
327
  }
 
328
  static simulateProgress(duration = 5000) {
329
  return new Promise((resolve) => {
330
  const steps = 20;
 
336
  const progress = (currentStep / steps) * 100;
337
 
338
  document.dispatchEvent(new CustomEvent('generationProgress', {
339
+ detail: {
340
+ progress,
341
+ step: currentStep,
342
+ total: steps,
343
+ status: currentStep === steps ? 'Completed' : 'Processing'
344
+ }
345
  }));
346
 
347
  if (currentStep >= steps) {
 
352
  });
353
  }
354
  }
 
355
  // UI Controllers
356
  const UIController = {
357
+ async init() {
358
  this.initNeuralBackground();
359
+ await this.initApiStatus();
360
  this.initEventListeners();
361
  this.loadGallery();
362
  },
363
+ initNeuralBackground() {
 
364
  const canvas = document.getElementById('neural-canvas');
365
  if (canvas) {
366
  new NeuralNetwork(canvas);
367
  }
368
  },
369
+ async initApiStatus() {
370
+ const apiAvailable = await HF_CONFIG.checkStatus();
371
+ const grid = document.getElementById('api-status-grid');
372
+
373
+ if (!grid) return;
374
+
375
+ if (!apiAvailable) {
376
+ grid.innerHTML = `
377
+ <div class="glass-card rounded-xl p-4 col-span-4 text-center">
378
+ <div class="flex items-center justify-center gap-2 mb-2">
379
+ <span class="w-3 h-3 rounded-full bg-amber-500 animate-pulse"></span>
380
+ <span class="text-amber-400 font-medium">API Service Disrupted</span>
381
+ </div>
382
+ <p class="text-sm text-slate-400">Using fallback generation mode</p>
383
+ </div>
384
+ `;
385
+ return;
386
+ }
387
 
 
388
  const services = [
389
  { name: 'Stable Diffusion XL', status: 'operational', latency: '124ms' },
390
  { name: 'ModelScope T2V', status: 'operational', latency: '892ms' },
 
392
  { name: 'Realistic Vision', status: 'operational', latency: '156ms' }
393
  ];
394
 
395
+ grid.innerHTML = services.map(s => `
396
+ <div class="glass-card rounded-xl p-4 flex items-center justify-between">
397
+ <div>
398
+ <p class="font-medium text-sm">${s.name}</p>
399
+ <p class="text-xs text-slate-400">${s.latency}</p>
 
 
 
 
 
 
400
  </div>
401
+ <span class="px-2 py-1 rounded-full text-xs font-medium ${s.status === 'operational' ? 'bg-emerald-500/20 text-emerald-400' : 'bg-amber-500/20 text-amber-400'}">
402
+ ${s.status}
403
+ </span>
404
+ </div>
405
+ `).join('');
406
  },
407
+ initEventListeners() {
 
408
  // Prompt submission
409
  document.addEventListener('submitPrompt', async (e) => {
410
  const { prompt, mode, settings } = e.detail;
 
421
  this.loadGallery();
422
  });
423
  },
 
424
  async handleGeneration(prompt, mode, settings) {
425
  const modal = document.getElementById('generation-modal');
426
  modal.show();
427
+
428
  try {
429
+ document.dispatchEvent(new CustomEvent('generationProgress', {
430
+ detail: {
431
+ progress: 0,
432
+ step: 0,
433
+ total: 20,
434
+ status: 'Initializing...'
435
+ }
436
+ }));
437
+
438
  let result;
439
  switch (mode) {
440
  case 'image':
 
459
  prompt,
460
  mode,
461
  result,
462
+ timestamp: new Date().toISOString(),
463
+ isFallback: result.isFallback || false
464
  };
465
  state.history.unshift(entry);
466
  localStorage.setItem('vortex_history', JSON.stringify(state.history.slice(0, 50)));
 
468
  modal.showResult(result, mode);
469
 
470
  } catch (error) {
471
+ console.error('Generation error:', error);
472
  modal.showError(error.message);
473
  }
474
  },
475
+ updateProgress(detail) {
 
476
  const modal = document.getElementById('generation-modal');
477
  if (modal && modal.updateProgress) {
478
+ modal.updateProgress(detail.progress, detail.status);
479
+ }
480
+
481
+ // Also update API status if progress indicates completion
482
+ if (detail.progress === 100) {
483
+ setTimeout(() => this.initApiStatus(), 1000);
484
  }
485
  },
 
486
  loadGallery() {
487
  const gallery = document.getElementById('gallery');
488
  if (gallery) {
489
  // Load from history or generate demo items
490
+ let items = state.history.length > 0 ? state.history : this.generateDemoGallery();
491
+
492
+ // Mark demo items as fallback
493
+ if (state.history.length === 0) {
494
+ items = items.map(item => ({
495
+ ...item,
496
+ isFallback: true
497
+ }));
498
+ }
499
+
500
  gallery.setItems(items.slice(0, 12));
501
  }
502
  },
 
526
  result: {
527
  url: `https://static.photos/${['technology', 'nature', 'abstract', 'architecture', 'people', 'science'][i]}/1200x630/${i + 1}`,
528
  resolution: `${resolutions[i].w}x${resolutions[i].h}`,
529
+ qualityLabel: resolutions[i].label,
530
+ isFallback: true
531
  },
532
+ timestamp: new Date(Date.now() - i * 86400000).toISOString(),
533
+ isFallback: true
534
  }));
535
  }
536
  };