|
|
|
|
|
|
|
|
const BACKEND_URL = window.location.origin; |
|
|
|
|
|
class SystemSimulator { |
|
|
constructor() { |
|
|
this.logs = document.getElementById('system-logs'); |
|
|
this.outputCanvas = document.getElementById('output-canvas'); |
|
|
this.outputCtx = this.outputCanvas?.getContext('2d'); |
|
|
|
|
|
this.isGenerating = false; |
|
|
this.sourceImage = null; |
|
|
this.config = { |
|
|
prompt: '', |
|
|
influence: 5, |
|
|
depth: 16, |
|
|
method: 'adaptive' |
|
|
}; |
|
|
|
|
|
|
|
|
this.directorMode = true; |
|
|
this.movieFrames = []; |
|
|
this.accumulatedFrames = 0; |
|
|
|
|
|
this.init(); |
|
|
} |
|
|
|
|
|
async callBackendApi(endpoint, data) { |
|
|
try { |
|
|
const response = await fetch(`${BACKEND_URL}${endpoint}`, { |
|
|
method: 'POST', |
|
|
headers: { |
|
|
'Content-Type': 'application/json', |
|
|
}, |
|
|
body: JSON.stringify(data), |
|
|
}); |
|
|
const jsonResponse = await response.json(); |
|
|
if (!response.ok) { |
|
|
throw new Error(jsonResponse.error || `Backend error: ${response.statusText}`); |
|
|
} |
|
|
return jsonResponse; |
|
|
} catch (error) { |
|
|
this.log(`Backend API Error (${endpoint}): ${error.message}`, 'error'); |
|
|
console.error(`Backend API Error (${endpoint}):`, error); |
|
|
throw error; |
|
|
} |
|
|
} |
|
|
|
|
|
init() { |
|
|
this.setupListeners(); |
|
|
this.resizeCanvas(); |
|
|
window.addEventListener('resize', () => this.resizeCanvas()); |
|
|
|
|
|
|
|
|
this.drawStaticNoise(); |
|
|
} |
|
|
|
|
|
setupListeners() { |
|
|
|
|
|
const dropZone = document.getElementById('drop-zone'); |
|
|
const fileInput = document.getElementById('image-input'); |
|
|
|
|
|
dropZone.addEventListener('click', () => fileInput.click()); |
|
|
|
|
|
dropZone.addEventListener('dragover', (e) => { |
|
|
e.preventDefault(); |
|
|
dropZone.classList.add('drag-over'); |
|
|
}); |
|
|
|
|
|
dropZone.addEventListener('dragleave', () => { |
|
|
dropZone.classList.remove('drag-over'); |
|
|
}); |
|
|
|
|
|
dropZone.addEventListener('drop', (e) => { |
|
|
e.preventDefault(); |
|
|
dropZone.classList.remove('drag-over'); |
|
|
if(e.dataTransfer.files.length) { |
|
|
this.handleImage(e.dataTransfer.files[0]); |
|
|
} |
|
|
}); |
|
|
|
|
|
fileInput.addEventListener('change', (e) => { |
|
|
if(e.target.files.length) { |
|
|
this.handleImage(e.target.files[0]); |
|
|
} |
|
|
}); |
|
|
|
|
|
|
|
|
const directorToggle = document.getElementById('director-mode-toggle'); |
|
|
if (directorToggle) { |
|
|
directorToggle.addEventListener('change', (e) => { |
|
|
this.directorMode = e.target.checked; |
|
|
this.log(`Director Mode: ${this.directorMode ? 'ENABLED' : 'DISABLED'}`, 'info'); |
|
|
}); |
|
|
} |
|
|
|
|
|
document.getElementById('download-btn').addEventListener('click', () => this.downloadMovie()); |
|
|
document.getElementById('reset-movie-btn').addEventListener('click', () => this.resetMovie()); |
|
|
|
|
|
|
|
|
document.getElementById('quantum-influence').addEventListener('input', (e) => { |
|
|
document.getElementById('influence-val').textContent = `${e.target.value}%`; |
|
|
this.config.influence = parseInt(e.target.value); |
|
|
}); |
|
|
|
|
|
document.getElementById('entanglement-depth').addEventListener('input', (e) => { |
|
|
document.getElementById('depth-val').textContent = e.target.value; |
|
|
this.config.depth = parseInt(e.target.value); |
|
|
}); |
|
|
|
|
|
|
|
|
document.querySelectorAll('.viz-tab').forEach(tab => { |
|
|
tab.addEventListener('click', () => { |
|
|
document.querySelectorAll('.viz-tab').forEach(t => t.classList.remove('active')); |
|
|
document.querySelectorAll('.viz-view').forEach(v => v.classList.remove('active')); |
|
|
|
|
|
tab.classList.add('active'); |
|
|
document.getElementById(`view-${tab.dataset.view}`).classList.add('active'); |
|
|
}); |
|
|
}); |
|
|
|
|
|
|
|
|
document.getElementById('start-btn').addEventListener('click', () => this.startGeneration()); |
|
|
} |
|
|
|
|
|
handleImage(file) { |
|
|
const reader = new FileReader(); |
|
|
reader.onload = async (e) => { |
|
|
this.sourceImage = new Image(); |
|
|
this.sourceImage.onload = async () => { |
|
|
|
|
|
const preview = document.getElementById('preview-img'); |
|
|
preview.src = this.sourceImage.src; |
|
|
preview.classList.remove('hidden'); |
|
|
document.querySelector('.drop-content').style.opacity = '0'; |
|
|
this.log(`Image loaded: ${file.name} (${this.sourceImage.width}x${this.sourceImage.height})`, 'success'); |
|
|
|
|
|
|
|
|
try { |
|
|
await this.analyzeImageContext(e.target.result); |
|
|
} catch (error) { |
|
|
this.log(`Failed CLIP analysis for ${file.name}: ${error.message}`, 'error'); |
|
|
} |
|
|
}; |
|
|
this.sourceImage.src = e.target.result; |
|
|
}; |
|
|
reader.readAsDataURL(file); |
|
|
} |
|
|
|
|
|
async analyzeImageContext(imageDataURL) { |
|
|
this.log('CLIP-Encoder: Sending image for feature extraction...', 'info'); |
|
|
try { |
|
|
const response = await this.callBackendApi('/embed_image', { image: imageDataURL }); |
|
|
const embeddings = response.embeddings; |
|
|
this.log(`CLIP-Encoder: Extracted feature vector [${embeddings[0].toFixed(4)}, ${embeddings[1].toFixed(4)}, ${embeddings[2].toFixed(4)}, ...]`, 'success'); |
|
|
} catch (error) { |
|
|
this.log(`CLIP-Encoder: Failed to get embeddings. Is backend running? ${error.message}`, 'error'); |
|
|
throw error; |
|
|
} |
|
|
} |
|
|
|
|
|
updateDirectorUI() { |
|
|
document.getElementById('total-frames').textContent = `${this.movieFrames.length} FRAMES`; |
|
|
document.getElementById('download-btn').disabled = this.movieFrames.length === 0; |
|
|
document.getElementById('reset-movie-btn').disabled = this.movieFrames.length === 0; |
|
|
} |
|
|
|
|
|
resetMovie() { |
|
|
this.movieFrames = []; |
|
|
this.updateDirectorUI(); |
|
|
this.log('Director Mode: Timeline cleared.', 'warn'); |
|
|
} |
|
|
|
|
|
resizeCanvas() { |
|
|
if (!this.outputCanvas) return; |
|
|
const rect = this.outputCanvas.parentElement.getBoundingClientRect(); |
|
|
this.outputCanvas.width = rect.width; |
|
|
this.outputCanvas.height = rect.height; |
|
|
if (!this.isGenerating) this.drawStaticNoise(); |
|
|
} |
|
|
|
|
|
log(message, type = 'info') { |
|
|
const div = document.createElement('div'); |
|
|
div.className = `log-line ${type}`; |
|
|
const time = new Date().toLocaleTimeString('en-US', { hour12: false }); |
|
|
div.innerHTML = `<span class="ts">[${time}]</span> ${message}`; |
|
|
this.logs.appendChild(div); |
|
|
this.logs.scrollTop = this.logs.scrollHeight; |
|
|
} |
|
|
|
|
|
async startGeneration() { |
|
|
if (this.isGenerating) return; |
|
|
|
|
|
|
|
|
this.isGenerating = true; |
|
|
document.getElementById('start-btn').disabled = true; |
|
|
document.getElementById('prompt-input').disabled = true; |
|
|
document.getElementById('image-input').disabled = true; |
|
|
document.getElementById('quantum-influence').disabled = true; |
|
|
document.getElementById('entanglement-depth').disabled = true; |
|
|
document.getElementById('sampling-method').disabled = true; |
|
|
|
|
|
document.getElementById('generation-stats').style.display = 'block'; |
|
|
|
|
|
try { |
|
|
if (!this.sourceImage) { |
|
|
this.log('Error: Source image required for I2V generation.', 'error'); |
|
|
alert("Please upload a source image first."); |
|
|
return; |
|
|
} |
|
|
|
|
|
const prompt = document.getElementById('prompt-input').value.trim() || "Quantum interpolation"; |
|
|
|
|
|
|
|
|
this.log('Checking backend availability...', 'info'); |
|
|
try { |
|
|
const health = await this.callBackendApi('/'); |
|
|
this.log(`Backend Status: ${health.status} (LLM: ${health.llm_status}, CLIP: ${health.clip_status})`, 'success'); |
|
|
if (health.llm_status.includes("not loaded") || health.clip_status.includes("not loaded")) { |
|
|
throw new Error("One or more AI models not loaded on backend. Check backend console."); |
|
|
} |
|
|
} catch (error) { |
|
|
this.log(`Backend not available or unhealthy: ${error.message}. Please ensure your Python Flask backend is running.`, 'error'); |
|
|
alert(`Backend Error: ${error.message}. Please start the backend.`); |
|
|
return; |
|
|
} |
|
|
|
|
|
|
|
|
this.log(`Initializing I2V pipeline for: "${prompt.substring(0, 30)}..."`, 'info'); |
|
|
|
|
|
|
|
|
await this.phaseInitialization(); |
|
|
|
|
|
|
|
|
await this.phaseQuantumCircuit(); |
|
|
|
|
|
|
|
|
await this.phaseWebGPU(); |
|
|
|
|
|
|
|
|
|
|
|
await this.phaseRealDiffusion(prompt); |
|
|
|
|
|
this.log('Generation Sequence Complete.', 'success'); |
|
|
document.getElementById('generation-stats').innerHTML = 'GENERATION COMPLETE'; |
|
|
|
|
|
|
|
|
if (this.directorMode && this.movieFrames.length > 0) { |
|
|
this.prepareNextContext(); |
|
|
} |
|
|
|
|
|
} catch (error) { |
|
|
this.log(`System Error during generation: ${error.message}`, 'error'); |
|
|
document.getElementById('generation-stats').innerHTML = `ERROR: ${error.message}`; |
|
|
console.error(error); |
|
|
} finally { |
|
|
|
|
|
this.isGenerating = false; |
|
|
document.getElementById('start-btn').disabled = false; |
|
|
document.getElementById('prompt-input').disabled = false; |
|
|
document.getElementById('image-input').disabled = false; |
|
|
document.getElementById('quantum-influence').disabled = false; |
|
|
document.getElementById('entanglement-depth').disabled = false; |
|
|
document.getElementById('sampling-method').disabled = false; |
|
|
} |
|
|
} |
|
|
|
|
|
prepareNextContext() { |
|
|
|
|
|
const lastFrameBitmap = this.movieFrames[this.movieFrames.length - 1]; |
|
|
|
|
|
|
|
|
const canvas = document.createElement('canvas'); |
|
|
canvas.width = this.outputCanvas.width; |
|
|
canvas.height = this.outputCanvas.height; |
|
|
const ctx = canvas.getContext('2d'); |
|
|
ctx.drawImage(lastFrameBitmap, 0, 0); |
|
|
|
|
|
|
|
|
const newUrl = canvas.toDataURL(); |
|
|
const nextImg = new Image(); |
|
|
nextImg.onload = () => { |
|
|
this.sourceImage = nextImg; |
|
|
|
|
|
const preview = document.getElementById('preview-img'); |
|
|
preview.src = newUrl; |
|
|
this.log('Director Mode: Context refreshed. Last frame set as input for next sequence.', 'secondary'); |
|
|
}; |
|
|
nextImg.src = newUrl; |
|
|
} |
|
|
|
|
|
async sleep(ms) { |
|
|
return new Promise(r => setTimeout(r, ms)); |
|
|
} |
|
|
|
|
|
async phaseInitialization() { |
|
|
this.log('Allocating WebGPU buffers for I2V tensor...', 'info'); |
|
|
await this.sleep(600); |
|
|
this.log('Quantizing source image to 512-dim latent space...', 'info'); |
|
|
await this.sleep(800); |
|
|
} |
|
|
|
|
|
async phaseQuantumCircuit() { |
|
|
this.log(`Constructing ${this.config.depth}-layer quantum circuit...`, 'info'); |
|
|
|
|
|
if (window.circuitViz) window.circuitViz.updateVizParameters(this.config.influence, this.config.depth); |
|
|
|
|
|
await this.sleep(1000); |
|
|
this.log('Applying Hadamard gates to initialization layer...', 'info'); |
|
|
await this.sleep(400); |
|
|
this.log(`Entangling qubits 0-511 with depth ${this.config.depth}...`, 'info'); |
|
|
await this.sleep(800); |
|
|
} |
|
|
|
|
|
async phaseWebGPU() { |
|
|
this.log('Compiling circuit to WGSL shaders...', 'info'); |
|
|
await this.sleep(600); |
|
|
this.log('Injecting quantum noise into CLIP embeddings...', 'info'); |
|
|
|
|
|
|
|
|
if (window.stateViz) window.stateViz.updateVizParameters(this.config.influence, this.config.depth); |
|
|
|
|
|
for (let i = 0; i < 5; i++) { |
|
|
await this.sleep(200); |
|
|
} |
|
|
|
|
|
const entropy = (Math.random() * 3 + 0.5).toFixed(4); |
|
|
document.getElementById('entropy-value').textContent = entropy; |
|
|
this.log(`Latent perturbation complete. Entropy: ${entropy}`, 'success'); |
|
|
} |
|
|
|
|
|
async phaseRealDiffusion(prompt) { |
|
|
this.log('Starting Frame-by-Frame Quantum Diffusion...', 'warn'); |
|
|
|
|
|
|
|
|
document.querySelector('[data-view="output"]').click(); |
|
|
|
|
|
|
|
|
let currentImage = this.sourceImage; |
|
|
const totalFrames = 48; |
|
|
let currentFrameDataURL = currentImage.src; |
|
|
|
|
|
for (let frame = 0; frame < totalFrames; frame++) { |
|
|
this.log(`Requesting guidance for Frame ${frame + 1}/${totalFrames}...`, 'info'); |
|
|
document.getElementById('generation-stats').innerHTML = `GETTING GUIDANCE FOR FRAME ${frame + 1}/${totalFrames}<br>Quantum-Diffusing...`; |
|
|
|
|
|
|
|
|
const guidanceResponse = await this.callBackendApi('/generate_frame_guidance', { |
|
|
image: currentFrameDataURL, |
|
|
prompt: prompt, |
|
|
influence: this.config.influence, |
|
|
depth: this.config.depth, |
|
|
frame_number: frame |
|
|
}); |
|
|
|
|
|
const llmGuidance = guidanceResponse.guidance; |
|
|
this.log(`LLM Guidance (Frame ${frame + 1}): ${llmGuidance.substring(0, 80)}...`, 'secondary'); |
|
|
|
|
|
document.getElementById('generation-stats').innerHTML = `RENDERING FRAME ${frame + 1}/${totalFrames}<br>Applying Quantum Effects...`; |
|
|
|
|
|
|
|
|
const newFrameDataURL = await this.renderFrameTransition(currentImage, this.config.influence, llmGuidance, frame); |
|
|
|
|
|
|
|
|
currentImage = await this this.loadImageFromDataURL(newFrameDataURL); |
|
|
currentFrameDataURL = newFrameDataURL; |
|
|
|
|
|
|
|
|
if (this.directorMode) { |
|
|
const bitmap = await createImageBitmap(this.outputCanvas); |
|
|
this.movieFrames.push(bitmap); |
|
|
this.updateDirectorUI(); |
|
|
} |
|
|
|
|
|
await this.sleep(50); |
|
|
} |
|
|
} |
|
|
|
|
|
async loadImageFromDataURL(dataURL) { |
|
|
return new Promise((resolve, reject) => { |
|
|
const img = new Image(); |
|
|
img.onload = () => resolve(img); |
|
|
img.onerror = reject; |
|
|
img.src = dataURL; |
|
|
}); |
|
|
} |
|
|
|
|
|
async renderFrameTransition(currentImage, influence, llmGuidance, frameNumber) { |
|
|
const w = this.outputCanvas.width; |
|
|
const h = this.outputCanvas.height; |
|
|
this.outputCtx.clearRect(0, 0, w, h); |
|
|
|
|
|
|
|
|
const tempCanvas = document.createElement('canvas'); |
|
|
tempCanvas.width = w; |
|
|
tempCanvas.height = h; |
|
|
const tempCtx = tempCanvas.getContext('2d'); |
|
|
|
|
|
|
|
|
const aspectRatio = currentImage.width / currentImage.height; |
|
|
let drawWidth = w; |
|
|
let drawHeight = h; |
|
|
if (w / h > aspectRatio) { |
|
|
drawWidth = h * aspectRatio; |
|
|
} else { |
|
|
drawHeight = w / aspectRatio; |
|
|
} |
|
|
const offsetX = (w - drawWidth) / 2; |
|
|
const offsetY = (h - drawHeight) / 2; |
|
|
tempCtx.drawImage(currentImage, offsetX, offsetY, drawWidth, drawHeight); |
|
|
|
|
|
|
|
|
let imageData = tempCtx.getImageData(0, 0, w, h); |
|
|
let data = imageData.data; |
|
|
|
|
|
|
|
|
const instructions = llmGuidance.toLowerCase().split(',').map(s => s.trim()); |
|
|
let pixelShiftX = 0; |
|
|
let pixelShiftY = 0; |
|
|
let colorShiftR = 0; |
|
|
let colorShiftG = 0; |
|
|
let colorShiftB = 0; |
|
|
let blurRadius = 0; |
|
|
let zoomFactor = 1; |
|
|
let staticOverlayOpacity = 0; |
|
|
|
|
|
for (const instruction of instructions) { |
|
|
if (instruction.includes("shift red by")) { |
|
|
colorShiftR += parseInt(instruction.match(/by (-?\d+)/)?.[1] || "0"); |
|
|
} else if (instruction.includes("shift green by")) { |
|
|
colorShiftG += parseInt(instruction.match(/by (-?\d+)/)?.[1] || "0"); |
|
|
} else if (instruction.includes("shift blue by")) { |
|
|
colorShiftB += parseInt(instruction.match(/by (-?\d+)/)?.[1] || "0"); |
|
|
} else if (instruction.includes("pixel displacement x-axis")) { |
|
|
pixelShiftX += parseInt(instruction.match(/random (-?\d+)px/)?.[1] || "0"); |
|
|
} else if (instruction.includes("pixel displacement y-axis")) { |
|
|
pixelShiftY += parseInt(instruction.match(/random (-?\d+)px/)?.[1] || "0"); |
|
|
} else if (instruction.includes("apply gaussian blur radius")) { |
|
|
blurRadius = Math.max(blurRadius, parseInt(instruction.match(/radius (\d+)/)?.[1] || "0")); |
|
|
} else if (instruction.includes("zoom in")) { |
|
|
zoomFactor *= (1 + parseFloat(instruction.match(/zoom in (\d+(\.\d+)?)/)?.[1] || "0")); |
|
|
} else if (instruction.includes("zoom out")) { |
|
|
zoomFactor /= (1 + parseFloat(instruction.match(/zoom out (\d+(\.\d+)?)/)?.[1] || "0")); |
|
|
} else if (instruction.includes("static pattern opacity")) { |
|
|
staticOverlayOpacity = Math.max(staticOverlayOpacity, parseFloat(instruction.match(/opacity (\d+(\.\d+)?)/)?.[1] || "0")); |
|
|
} |
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
const tempImageData = tempCtx.createImageData(w, h); |
|
|
const tempData = tempImageData.data; |
|
|
|
|
|
for (let y = 0; y < h; y++) { |
|
|
for (let x = 0; x < w; x++) { |
|
|
const originalIndex = (y * w + x) * 4; |
|
|
|
|
|
const shiftedX = (x - pixelShiftX + w) % w; |
|
|
const shiftedY = (y - pixelShiftY + h) % h; |
|
|
const shiftedIndex = (shiftedY * w + shiftedX) * 4; |
|
|
|
|
|
if (shiftedIndex >= 0 && shiftedIndex < data.length) { |
|
|
tempData[originalIndex] = Math.min(255, Math.max(0, data[shiftedIndex] + colorShiftR)); |
|
|
tempData[originalIndex + 1] = Math.min(255, Math.max(0, data[shiftedIndex + 1] + colorShiftG)); |
|
|
tempData[originalIndex + 2] = Math.min(255, Math.max(0, data[shiftedIndex + 2] + colorShiftB)); |
|
|
tempData[originalIndex + 3] = data[shiftedIndex + 3]; |
|
|
} else { |
|
|
|
|
|
tempData[originalIndex] = 0; |
|
|
tempData[originalIndex + 1] = 0; |
|
|
tempData[originalIndex + 2] = 0; |
|
|
tempData[originalIndex + 3] = 255; |
|
|
} |
|
|
} |
|
|
} |
|
|
imageData = tempImageData; |
|
|
|
|
|
|
|
|
if (blurRadius > 0) { |
|
|
const blurredImageData = tempCtx.createImageData(w, h); |
|
|
const blurredData = blurredImageData.data; |
|
|
for (let y = 0; y < h; y++) { |
|
|
for (let x = 0; x < w; x++) { |
|
|
let rSum = 0, gSum = 0, bSum = 0, aSum = 0; |
|
|
let count = 0; |
|
|
for (let ky = -blurRadius; ky <= blurRadius; ky++) { |
|
|
for (let kx = -blurRadius; kx <= blurRadius; kx++) { |
|
|
const nx = x + kx; |
|
|
const ny = y + ky; |
|
|
if (nx >= 0 && nx < w && ny >= 0 && ny < h) { |
|
|
const index = (ny * w + nx) * 4; |
|
|
rSum += data[index]; |
|
|
gSum += data[index + 1]; |
|
|
bSum += data[index + 2]; |
|
|
aSum += data[index + 3]; |
|
|
count++; |
|
|
} |
|
|
} |
|
|
} |
|
|
const outputIndex = (y * w + x) * 4; |
|
|
blurredData[outputIndex] = rSum / count; |
|
|
blurredData[outputIndex + 1] = gSum / count; |
|
|
blurredData[outputIndex + 2] = bSum / count; |
|
|
blurredData[outputIndex + 3] = aSum / count; |
|
|
} |
|
|
} |
|
|
imageData = blurredImageData; |
|
|
} |
|
|
|
|
|
|
|
|
if (staticOverlayOpacity > 0) { |
|
|
for (let i = 0; i < imageData.data.length; i += 4) { |
|
|
const staticValue = Math.random() * 255; |
|
|
imageData.data[i] = (imageData.data[i] * (1 - staticOverlayOpacity)) + (staticValue * staticOverlayOpacity); |
|
|
imageData.data[i+1] = (imageData.data[i+1] * (1 - staticOverlayOpacity)) + (staticValue * staticOverlayOpacity); |
|
|
imageData.data[i+2] = (imageData.data[i+2] * (1 - staticOverlayOpacity)) + (staticValue * staticOverlayOpacity); |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
tempCtx.putImageData(imageData, 0, 0); |
|
|
|
|
|
|
|
|
const zoomedWidth = w * zoomFactor; |
|
|
const zoomedHeight = h * zoomFactor; |
|
|
const zoomOffsetX = (w - zoomedWidth) / 2; |
|
|
const zoomOffsetY = (h - zoomedHeight) / 2; |
|
|
|
|
|
this.outputCtx.drawImage(tempCanvas, zoomOffsetX, zoomOffsetY, zoomedWidth, zoomedHeight); |
|
|
|
|
|
|
|
|
if (influence > 50 && frameNumber % 5 === 0) { |
|
|
this.drawCircuitOverlay(); |
|
|
} |
|
|
|
|
|
|
|
|
return this.outputCanvas.toDataURL(); |
|
|
} |
|
|
|
|
|
drawCircuitOverlay() { |
|
|
const ctx = this.outputCtx; |
|
|
const w = this.outputCanvas.width; |
|
|
const h = this.outputCanvas.height; |
|
|
|
|
|
ctx.strokeStyle = 'rgba(0, 240, 255, 0.3)'; |
|
|
ctx.lineWidth = 1; |
|
|
ctx.beginPath(); |
|
|
const y = Math.random() * h; |
|
|
ctx.moveTo(0, y); |
|
|
ctx.lineTo(w, y); |
|
|
ctx.stroke(); |
|
|
|
|
|
ctx.fillStyle = 'rgba(0, 240, 255, 0.5)'; |
|
|
|
|
|
const fontSize = Math.max(10, Math.min(w, h) / 30); |
|
|
ctx.font = `${fontSize}px Arial`; |
|
|
ctx.fillText(`Q-GATE-${Math.floor(Math.random()*100)}`, 10, y - 5); |
|
|
} |
|
|
|
|
|
drawStaticNoise() { |
|
|
const w = this.outputCanvas.width; |
|
|
const h = this.outputCanvas.height; |
|
|
const id = this.outputCtx.createImageData(w, h); |
|
|
const d = id.data; |
|
|
|
|
|
for (let i = 0; i < d.length; i += 4) { |
|
|
const v = Math.random() * 20; |
|
|
d[i] = v; d[i+1] = v; d[i+2] = v + 10; d[i+3] = 255; |
|
|
} |
|
|
this.outputCtx.putImageData(id, 0, 0); |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async downloadMovie() { |
|
|
if (this.movieFrames.length === 0) return; |
|
|
|
|
|
const btn = document.getElementById('download-btn'); |
|
|
const originalText = btn.innerHTML; |
|
|
btn.disabled = true; |
|
|
btn.innerHTML = 'RENDER...'; |
|
|
|
|
|
this.log('Starting Movie Rendering...', 'info'); |
|
|
|
|
|
try { |
|
|
|
|
|
const canvas = document.createElement('canvas'); |
|
|
canvas.width = this.outputCanvas.width; |
|
|
canvas.height = this.outputCanvas.height; |
|
|
const ctx = canvas.getContext('2d'); |
|
|
|
|
|
|
|
|
const stream = canvas.captureStream(30); |
|
|
const mimeType = MediaRecorder.isTypeSupported('video/webm;codecs=vp9') |
|
|
? 'video/webm;codecs=vp9' |
|
|
: 'video/webm'; |
|
|
|
|
|
const recorder = new MediaRecorder(stream, { |
|
|
mimeType: mimeType, |
|
|
videoBitsPerSecond: 5000000 |
|
|
}); |
|
|
|
|
|
const chunks = []; |
|
|
recorder.ondataavailable = (e) => { |
|
|
if (e.data.size > 0) chunks.push(e.data); |
|
|
}; |
|
|
|
|
|
recorder.onstop = () => { |
|
|
const blob = new Blob(chunks, { type: 'video/webm' }); |
|
|
const url = URL.createObjectURL(blob); |
|
|
const a = document.createElement('a'); |
|
|
a.href = url; |
|
|
a.download = `wan-quantum-director-cut-${Date.now()}.webm`; |
|
|
a.click(); |
|
|
URL.revokeObjectURL(url); |
|
|
this.log('Movie Downloaded Successfully.', 'success'); |
|
|
btn.innerHTML = originalText; |
|
|
btn.disabled = false; |
|
|
}; |
|
|
|
|
|
recorder.start(); |
|
|
|
|
|
|
|
|
|
|
|
const frameDuration = 1000 / 30; |
|
|
|
|
|
for (const bitmap of this.movieFrames) { |
|
|
ctx.drawImage(bitmap, 0, 0); |
|
|
|
|
|
|
|
|
|
|
|
await new Promise(r => setTimeout(r, frameDuration)); |
|
|
} |
|
|
|
|
|
recorder.stop(); |
|
|
|
|
|
} catch (e) { |
|
|
this.log(`Export failed: ${e.message}`, 'error'); |
|
|
btn.innerHTML = originalText; |
|
|
btn.disabled = false; |
|
|
} |
|
|
} |