Spaces:
Running
Running
| <html lang="en"> | |
| <head> | |
| <meta charset="UTF-8"> | |
| <meta name="viewport" content="width=device-width, initial-scale=1.0"> | |
| <title>Anycoder Raytrace Morph FX</title> | |
| <!-- Tailwind CSS for modern UI --> | |
| <script src="https://cdn.tailwindcss.com"></script> | |
| <!-- Three.js for advanced effects --> | |
| <script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/r128/three.min.js"></script> | |
| <!-- Google Fonts --> | |
| <link href="https://fonts.googleapis.com/css2?family=Orbitron:wght@400;700;900&family=Rajdhani:wght@300;500;700&display=swap" rel="stylesheet"> | |
| <style> | |
| :root { | |
| --neon-primary: #00f3ff; | |
| --neon-secondary: #bc13fe; | |
| --bg-dark: #050505; | |
| --panel-bg: rgba(20, 20, 30, 0.85); | |
| } | |
| body { | |
| background-color: var(--bg-dark); | |
| color: #ffffff; | |
| font-family: 'Rajdhani', sans-serif; | |
| overflow: hidden; /* Prevent scrolling, app-like feel */ | |
| margin: 0; | |
| height: 100vh; | |
| width: 100vw; | |
| } | |
| /* Custom Scrollbar */ | |
| ::-webkit-scrollbar { | |
| width: 6px; | |
| } | |
| ::-webkit-scrollbar-track { | |
| background: #111; | |
| } | |
| ::-webkit-scrollbar-thumb { | |
| background: var(--neon-primary); | |
| border-radius: 3px; | |
| } | |
| /* Canvas Container */ | |
| #canvas-wrapper { | |
| position: relative; | |
| width: 100%; | |
| height: 100%; | |
| display: flex; | |
| justify-content: center; | |
| align-items: center; | |
| background: radial-gradient(circle at center, #1a1a2e 0%, #000000 100%); | |
| box-shadow: inset 0 0 100px rgba(0,0,0,0.9); | |
| } | |
| canvas { | |
| max-width: 100%; | |
| max-height: 100%; | |
| object-fit: contain; | |
| box-shadow: 0 0 30px rgba(0, 243, 255, 0.1); | |
| } | |
| /* UI Overlays */ | |
| .hud-panel { | |
| background: var(--panel-bg); | |
| backdrop-filter: blur(10px); | |
| border: 1px solid rgba(255, 255, 255, 0.1); | |
| border-left: 3px solid var(--neon-primary); | |
| transition: all 0.3s ease; | |
| } | |
| .hud-panel:hover { | |
| border-left: 3px solid var(--neon-secondary); | |
| box-shadow: 0 0 15px rgba(188, 19, 254, 0.2); | |
| } | |
| /* Range Slider Styling */ | |
| input[type=range] { | |
| -webkit-appearance: none; | |
| width: 100%; | |
| background: transparent; | |
| } | |
| input[type=range]::-webkit-slider-thumb { | |
| -webkit-appearance: none; | |
| height: 16px; | |
| width: 16px; | |
| border-radius: 50%; | |
| background: var(--neon-primary); | |
| cursor: pointer; | |
| margin-top: -6px; | |
| box-shadow: 0 0 10px var(--neon-primary); | |
| } | |
| input[type=range]::-webkit-slider-runnable-track { | |
| width: 100%; | |
| height: 4px; | |
| cursor: pointer; | |
| background: #333; | |
| border-radius: 2px; | |
| } | |
| /* Animations */ | |
| @keyframes pulse-border { | |
| 0% { border-color: rgba(0, 243, 255, 0.3); } | |
| 50% { border-color: rgba(0, 243, 255, 1); } | |
| 100% { border-color: rgba(0, 243, 255, 0.3); } | |
| } | |
| .active-mode { | |
| animation: pulse-border 2s infinite; | |
| background: rgba(0, 243, 255, 0.1); | |
| } | |
| .loading-overlay { | |
| position: absolute; | |
| top: 0; left: 0; right: 0; bottom: 0; | |
| background: rgba(0,0,0,0.8); | |
| display: flex; | |
| justify-content: center; | |
| align-items: center; | |
| z-index: 50; | |
| display: none; | |
| } | |
| /* Typography */ | |
| .font-tech { | |
| font-family: 'Orbitron', sans-serif; | |
| } | |
| .brand-link { | |
| color: var(--neon-primary); | |
| text-decoration: none; | |
| position: relative; | |
| } | |
| .brand-link::after { | |
| content: ''; | |
| position: absolute; | |
| width: 0; | |
| height: 1px; | |
| bottom: -2px; | |
| left: 0; | |
| background-color: var(--neon-primary); | |
| transition: width 0.3s; | |
| } | |
| .brand-link:hover::after { | |
| width: 100%; | |
| } | |
| /* CRT Scanline Effect Overlay */ | |
| .scanlines { | |
| position: absolute; | |
| top: 0; left: 0; width: 100%; height: 100%; | |
| background: linear-gradient(rgba(18, 16, 16, 0) 50%, rgba(0, 0, 0, 0.1) 50%), linear-gradient(90deg, rgba(255, 0, 0, 0.03), rgba(0, 255, 0, 0.01), rgba(0, 0, 255, 0.03)); | |
| background-size: 100% 4px, 6px 100%; | |
| pointer-events: none; | |
| z-index: 10; | |
| } | |
| </style> | |
| </head> | |
| <body class="flex flex-col h-screen"> | |
| <!-- Header --> | |
| <header class="h-14 flex items-center justify-between px-6 bg-black border-b border-gray-800 z-20"> | |
| <div class="flex items-center gap-3"> | |
| <div class="w-3 h-3 bg-cyan-400 rounded-full shadow-[0_0_10px_#00f3ff]"></div> | |
| <h1 class="text-xl font-tech font-bold tracking-wider text-white">RAYTRACE <span class="text-cyan-400">MORPH</span></h1> | |
| </div> | |
| <div class="text-xs text-gray-400 font-mono"> | |
| Built with <a href="https://huggingface.co/spaces/akhaliq/anycoder" target="_blank" class="brand-link font-bold">anycoder</a> | |
| </div> | |
| </header> | |
| <!-- Main Workspace --> | |
| <main class="flex-1 flex overflow-hidden relative"> | |
| <!-- Sidebar Controls --> | |
| <aside class="w-80 bg-gray-900 border-r border-gray-800 flex flex-col overflow-y-auto z-20 shadow-2xl"> | |
| <!-- Input Section --> | |
| <div class="p-4 border-b border-gray-800"> | |
| <h2 class="text-sm font-tech text-cyan-400 mb-3 uppercase tracking-widest">Source Input</h2> | |
| <!-- Image Input --> | |
| <div class="mb-4"> | |
| <label class="block text-xs text-gray-500 mb-1">IMAGES (Multi)</label> | |
| <input type="file" id="imageInput" multiple accept="image/*" class="block w-full text-xs text-gray-400 file:mr-2 file:py-2 file:px-2 file:rounded file:border-0 file:text-xs file:font-semibold file:bg-cyan-900 file:text-cyan-400 hover:file:bg-cyan-800 cursor-pointer"/> | |
| </div> | |
| <!-- Video Input --> | |
| <div class="mb-4"> | |
| <label class="block text-xs text-gray-500 mb-1">VIDEO SOURCE</label> | |
| <input type="file" id="videoInput" accept="video/*" class="block w-full text-xs text-gray-400 file:mr-2 file:py-2 file:px-2 file:rounded file:border-0 file:text-xs file:font-semibold file:bg-purple-900 file:text-purple-400 hover:file:bg-purple-800 cursor-pointer"/> | |
| </div> | |
| <!-- Audio Input --> | |
| <div> | |
| <label class="block text-xs text-gray-500 mb-1">AUDIO (BPM SYNC)</label> | |
| <input type="file" id="audioInput" accept="audio/*" class="block w-full text-xs text-gray-400 file:mr-2 file:py-2 file:px-2 file:rounded file:border-0 file:text-xs file:font-semibold file:bg-green-900 file:text-green-400 hover:file:bg-green-800 cursor-pointer"/> | |
| <div id="bpmDisplay" class="text-right text-xs font-mono text-green-400 mt-1">BPM: AUTO</div> | |
| </div> | |
| </div> | |
| <!-- FX Controls --> | |
| <div class="p-4 border-b border-gray-800 space-y-5"> | |
| <h2 class="text-sm font-tech text-cyan-400 mb-1 uppercase tracking-widest">FX Parameters</h2> | |
| <!-- FPS Slider --> | |
| <div> | |
| <div class="flex justify-between mb-1"> | |
| <label class="text-xs text-gray-400">Target FPS</label> | |
| <input type="number" id="fpsInput" value="60" class="w-12 bg-black border border-gray-700 text-right text-xs text-cyan-400 focus:outline-none focus:border-cyan-500 rounded px-1"> | |
| </div> | |
| <input type="range" id="fpsSlider" min="1" max="144" value="60"> | |
| </div> | |
| <!-- Morph Intensity --> | |
| <div> | |
| <div class="flex justify-between mb-1"> | |
| <label class="text-xs text-gray-400">Morph Intensity</label> | |
| <span id="morphVal" class="text-xs font-mono text-purple-400">50%</span> | |
| </div> | |
| <input type="range" id="morphSlider" min="0" max="100" value="50"> | |
| </div> | |
| <!-- Contrast Shaping --> | |
| <div> | |
| <div class="flex justify-between mb-1"> | |
| <label class="text-xs text-gray-400">Contrast / Shaping</label> | |
| <span id="contrastVal" class="text-xs font-mono text-yellow-400">1.0</span> | |
| </div> | |
| <input type="range" id="contrastSlider" min="0" max="300" value="100"> | |
| </div> | |
| <!-- Transition Speed --> | |
| <div> | |
| <div class="flex justify-between mb-1"> | |
| <label class="text-xs text-gray-400">Transition Speed (ms)</label> | |
| <span id="speedVal" class="text-xs font-mono text-white">1000ms</span> | |
| </div> | |
| <input type="range" id="speedSlider" min="100" max="5000" step="100" value="1000"> | |
| </div> | |
| <!-- Aspect Ratio Lock --> | |
| <div class="flex items-center justify-between"> | |
| <label class="text-xs text-gray-400">Force 9:16 Crop</label> | |
| <button id="cropToggle" class="w-10 h-5 bg-gray-700 rounded-full relative transition-colors duration-300"> | |
| <div class="w-3 h-3 bg-white rounded-full absolute top-1 left-1 transition-transform duration-300"></div> | |
| </button> | |
| </div> | |
| </div> | |
| <!-- Action Buttons --> | |
| <div class="p-4 mt-auto"> | |
| <button id="playPauseBtn" class="w-full py-3 bg-cyan-600 hover:bg-cyan-500 text-white font-tech font-bold rounded shadow-[0_0_15px_rgba(8,145,178,0.5)] transition-all mb-2"> | |
| START RENDER | |
| </button> | |
| <button id="resetBtn" class="w-full py-2 border border-gray-600 hover:border-white text-gray-400 hover:text-white text-xs rounded transition-all"> | |
| RESET SYSTEM | |
| </button> | |
| </div> | |
| </aside> | |
| <!-- Canvas Area --> | |
| <div id="canvas-wrapper" class="flex-1 relative"> | |
| <div class="scanlines"></div> | |
| <!-- Main Canvas --> | |
| <canvas id="mainCanvas" width="1080" height="1920"></canvas> | |
| <!-- Hidden Video Element for processing --> | |
| <video id="sourceVideo" loop muted playsinline style="display: none;"></video> | |
| <!-- HUD Overlay inside Canvas Area --> | |
| <div class="absolute top-4 right-4 text-right pointer-events-none"> | |
| <div class="text-4xl font-tech font-bold text-white/20" id="fpsCounter">00</div> | |
| <div class="text-xs text-cyan-400/60 font-mono">REALTIME FPS</div> | |
| </div> | |
| <!-- Loading Indicator --> | |
| <div id="loading" class="loading-overlay"> | |
| <div class="text-center"> | |
| <div class="w-12 h-12 border-4 border-cyan-500 border-t-transparent rounded-full animate-spin mx-auto mb-4"></div> | |
| <div class="text-cyan-400 font-mono text-sm tracking-widest">PROCESSING DATA...</div> | |
| </div> | |
| </div> | |
| <!-- Hidden Elements for Audio Analysis --> | |
| <audio id="audioPlayer" style="display:none;"></audio> | |
| </div> | |
| </main> | |
| <script> | |
| /** | |
| * APPLICATION STATE & CONFIGURATION | |
| */ | |
| const state = { | |
| isPlaying: false, | |
| fps: 60, | |
| targetFps: 60, | |
| lastFrameTime: 0, | |
| frameInterval: 1000 / 60, | |
| // Assets | |
| images: [], | |
| currentImageIndex: 0, | |
| nextImageIndex: 1, | |
| videoMode: false, | |
| // FX Parameters | |
| morphAmount: 0.5, // 0 to 1 | |
| contrast: 1.0, | |
| transitionDuration: 1000, // ms | |
| transitionProgress: 0, // 0 to 1 | |
| isTransitioning: false, | |
| transitionStartTime: 0, | |
| // Layout | |
| forceCrop: false, | |
| aspectRatio: 9/16, | |
| // Audio | |
| audioContext: null, | |
| analyser: null, | |
| dataArray: null, | |
| bpm: 0, | |
| bassEnergy: 0 | |
| }; | |
| // DOM Elements | |
| const canvas = document.getElementById('mainCanvas'); | |
| const ctx = canvas.getContext('2d', { willReadFrequently: true }); | |
| const video = document.getElementById('sourceVideo'); | |
| const audioPlayer = document.getElementById('audioPlayer'); | |
| const loading = document.getElementById('loading'); | |
| const fpsCounter = document.getElementById('fpsCounter'); | |
| // Resize Canvas to fit wrapper (maintain high internal resolution) | |
| function resizeCanvas() { | |
| const wrapper = document.getElementById('canvas-wrapper'); | |
| const rect = wrapper.getBoundingClientRect(); | |
| // We keep internal resolution at 1080x1920 (9:16) for consistency | |
| // but scale it via CSS. | |
| // If user wants full scale, we can adjust, but let's keep the buffer fixed for performance. | |
| canvas.width = 1080; | |
| canvas.height = 1920; | |
| } | |
| resizeCanvas(); | |
| /** | |
| * IMAGE HANDLING | |
| */ | |
| document.getElementById('imageInput').addEventListener('change', async (e) => { | |
| const files = Array.from(e.target.files); | |
| if (files.length === 0) return; | |
| loading.style.display = 'flex'; | |
| state.images = []; | |
| state.videoMode = false; | |
| video.pause(); | |
| for (let file of files) { | |
| const bitmap = await createImageBitmap(file); | |
| state.images.push(bitmap); | |
| } | |
| state.currentImageIndex = 0; | |
| state.nextImageIndex = 1; | |
| loading.style.display = 'none'; | |
| // Draw initial frame | |
| if (!state.isPlaying) drawFrame(0); | |
| }); | |
| /** | |
| * VIDEO HANDLING | |
| */ | |
| document.getElementById('videoInput').addEventListener('change', (e) => { | |
| const file = e.target.files[0]; | |
| if (!file) return; | |
| const url = URL.createObjectURL(file); | |
| video.src = url; | |
| state.videoMode = true; | |
| state.images = []; // Clear images | |
| video.onloadeddata = () => { | |
| video.play(); | |
| if (!state.isPlaying) { | |
| state.isPlaying = true; | |
| loop(); | |
| } | |
| }; | |
| }); | |
| /** | |
| * AUDIO HANDLING & BPM ANALYSIS | |
| */ | |
| document.getElementById('audioInput').addEventListener('change', (e) => { | |
| const file = e.target.files[0]; | |
| if (!file) return; | |
| const url = URL.createObjectURL(file); | |
| audioPlayer.src = url; | |
| if (!state.audioContext) { | |
| initAudio(); | |
| } | |
| audioPlayer.play(); | |
| }); | |
| function initAudio() { | |
| const AudioContext = window.AudioContext || window.webkitAudioContext; | |
| state.audioContext = new AudioContext(); | |
| const source = state.audioContext.createMediaElementSource(audioPlayer); | |
| state.analyser = state.audioContext.createAnalyser(); | |
| state.analyser.fftSize = 256; | |
| source.connect(state.analyser); | |
| state.analyser.connect(state.audioContext.destination); | |
| state.dataArray = new Uint8Array(state.analyser.frequencyBinCount); | |
| } | |
| function analyzeAudio() { | |
| if (!state.analyser) return 0; | |
| state.analyser.getByteFrequencyData(state.dataArray); | |
| // Calculate bass energy (lower frequencies) | |
| let bassSum = 0; | |
| for(let i = 0; i < 10; i++) { | |
| bassSum += state.dataArray[i]; | |
| } | |
| state.bassEnergy = bassSum / 10; | |
| // Simple beat detection simulation (threshold based) | |
| const isBeat = state.bassEnergy > 200; // Threshold | |
| return state.bassEnergy / 255; // Normalized 0-1 | |
| } | |
| /** | |
| * UI CONTROLS | |
| */ | |
| // FPS | |
| const fpsSlider = document.getElementById('fpsSlider'); | |
| const fpsInput = document.getElementById('fpsInput'); | |
| function updateFps(val) { | |
| state.targetFps = parseInt(val); | |
| state.frameInterval = 1000 / state.targetFps; | |
| fpsSlider.value = val; | |
| fpsInput.value = val; | |
| } | |
| fpsSlider.addEventListener('input', (e) => updateFps(e.target.value)); | |
| fpsInput.addEventListener('change', (e) => updateFps(e.target.value)); | |
| // Morph | |
| document.getElementById('morphSlider').addEventListener('input', (e) => { | |
| state.morphAmount = e.target.value / 100; | |
| document.getElementById('morphVal').innerText = e.target.value + '%'; | |
| }); | |
| // Contrast | |
| document.getElementById('contrastSlider').addEventListener('input', (e) => { | |
| state.contrast = e.target.value / 100; | |
| document.getElementById('contrastVal').innerText = state.contrast.toFixed(1); | |
| }); | |
| // Speed | |
| document.getElementById('speedSlider').addEventListener('input', (e) => { | |
| state.transitionDuration = parseInt(e.target.value); | |
| document.getElementById('speedVal').innerText = state.transitionDuration + 'ms'; | |
| }); | |
| // Crop Toggle | |
| const cropToggle = document.getElementById('cropToggle'); | |
| const cropKnob = cropToggle.querySelector('div'); | |
| cropToggle.addEventListener('click', () => { | |
| state.forceCrop = !state.forceCrop; | |
| if (state.forceCrop) { | |
| cropToggle.classList.remove('bg-gray-700'); | |
| cropToggle.classList.add('bg-cyan-600'); | |
| cropKnob.classList.add('translate-x-5'); | |
| } else { | |
| cropToggle.classList.add('bg-gray-700'); | |
| cropToggle.classList.remove('bg-cyan-600'); | |
| cropKnob.classList.remove('translate-x-5'); | |
| } | |
| }); | |
| // Play/Pause | |
| const playBtn = document.getElementById('playPauseBtn'); | |
| playBtn.addEventListener('click', () => { | |
| if (state.images.length === 0 && !state.videoMode) { | |
| alert("Please upload images or a video first."); | |
| return; | |
| } | |
| state.isPlaying = !state.isPlaying; | |
| playBtn.innerText = state.isPlaying ? "STOP RENDER" : "START RENDER"; | |
| playBtn.classList.toggle('bg-red-600'); | |
| playBtn.classList.toggle('bg-cyan-600'); | |
| if (state.isPlaying) { | |
| state.lastFrameTime = performance.now(); | |
| loop(); | |
| } | |
| }); | |
| // Reset | |
| document.getElementById('resetBtn').addEventListener('click', () => { | |
| state.isPlaying = false; | |
| ctx.clearRect(0, 0, canvas.width, canvas.height); | |
| playBtn.innerText = "START RENDER"; | |
| playBtn.classList.add('bg-cyan-600'); | |
| playBtn.classList.remove('bg-red-600'); | |
| }); | |
| /** | |
| * CORE RENDERING LOGIC | |
| */ | |
| function loop(timestamp) { | |
| if (!state.isPlaying) return; | |
| requestAnimationFrame(loop); | |
| const elapsed = timestamp - state.lastFrameTime; | |
| // FPS Throttling Logic | |
| if (elapsed > state.frameInterval) { | |
| state.lastFrameTime = timestamp - (elapsed % state.frameInterval); | |
| // Calculate real FPS | |
| const currentFps = Math.round(1000 / elapsed); | |
| fpsCounter.innerText = currentFps; | |
| // Update Logic | |
| updateLogic(timestamp); | |
| // Draw Frame | |
| drawFrame(timestamp); | |
| } | |
| } | |
| function updateLogic(timestamp) { | |
| const audioLevel = analyzeAudio(); | |
| // If in video mode, transition logic is handled differently or ignored for simple playback | |
| if (state.videoMode) { | |
| // In video mode, we might apply effects based on audio | |
| return; | |
| } | |
| // Image Slideshow Logic | |
| if (!state.isTransitioning) { | |
| // Check if time to transition (Auto exposure based on BPM or fixed time) | |
| // For this demo, we use the transition slider as the "length of picture" | |
| // We can add randomization or audio trigger here. | |
| // Simple timer for demo purposes: | |
| if (!state.lastTransitionTime) state.lastTransitionTime = timestamp; | |
| // If audio is playing, try to sync to beats (bass energy spike) | |
| if (audioLevel > 0.6 && (timestamp - state.lastTransitionTime > 500)) { | |
| startTransition(timestamp); | |
| } | |
| // Fallback to timer if no audio | |
| else if ((timestamp - state.lastTransitionTime) > (state.transitionDuration * 2)) { | |
| startTransition(timestamp); | |
| } | |
| } else { | |
| // Handle Transition Progress | |
| const progress = timestamp - state.transitionStartTime; | |
| state.transitionProgress = Math.min(progress / state.transitionDuration, 1); | |
| if (state.transitionProgress >= 1) { | |
| // Transition Complete | |
| state.isTransitioning = false; | |
| state.currentImageIndex = state.nextImageIndex; | |
| state.nextImageIndex = (state.currentImageIndex + 1) % state.images.length; | |
| state.transitionProgress = 0; | |
| state.lastTransitionTime = timestamp; | |
| } | |
| } | |
| } | |
| function startTransition(timestamp) { | |
| state.isTransitioning = true; | |
| state.transitionStartTime = timestamp; | |
| } | |
| /** | |
| * DRAWING & EFFECTS | |
| */ | |
| function drawFrame(timestamp) { | |
| // 1. Clear Canvas | |
| ctx.clearRect(0, 0, canvas.width, canvas.height); | |
| // 2. Get Sources | |
| let source1, source2; | |
| let w1, h1, w2, h2; | |
| if (state.videoMode) { | |
| source1 = video; | |
| w1 = video.videoWidth; | |
| h1 = video.videoHeight; | |
| source2 = null; // No next frame for video in this simple mode | |
| } else { | |
| if (state.images.length === 0) return; | |
| source1 = state.images[state.currentImageIndex]; | |
| w1 = source1.width; | |
| h1 = source1.height; | |
| if (state.images.length > 1) { | |
| source2 = state.images[state.nextImageIndex]; | |
| w2 = source2.width; | |
| h2 = source2.height; | |
| } | |
| } | |
| // 3. Calculate Crop Coordinates (Object-fit: cover logic) | |
| const drawParams1 = calculateCover(canvas.width, canvas.height, w1, h1); | |
| // 4. Draw Image 1 | |
| ctx.save(); | |
| ctx.filter = `contrast(${state.contrast}) brightness(${1 + (state.bassEnergy/5)})`; | |
| ctx.drawImage(source1, drawParams1.x, drawParams1.y, drawParams1.w, drawParams1.h); | |
| ctx.restore(); | |
| // 5. Draw Image 2 (Morph Transition) | |
| if (state.isTransitioning && source2) { | |
| const drawParams2 = calculateCover(canvas.width, canvas.height, w2, h2); | |
| ctx.save(); | |
| ctx.globalAlpha = state.transitionProgress; // Simple Fade | |
| // Advanced Morphing Effect | |
| // We use a composite operation or slight scaling/translation based on "Morph Amount" | |
| // Raytrace/Morph Simulation: | |
| // As we transition, we displace pixels or scale the second image | |
| const scaleEffect = 1 + (Math.sin(state.transitionProgress * Math.PI) * state.morphAmount * 0.1); | |
| const centerX = canvas.width / 2; | |
| const centerY = canvas.height / 2; | |
| ctx.translate(centerX, centerY); | |
| ctx.scale(scaleEffect, scaleEffect); | |
| ctx.translate(-centerX, -centerY); | |
| // Apply Contrast to incoming image too | |
| ctx.filter = `contrast(${state.contrast})`; | |
| // "Sliding Morph" effect logic | |
| // We shift the X position based on the morph slider to create a sliding feel | |
| const slideX = (1 - state.transitionProgress) * (canvas.width * state.morphAmount); | |
| ctx.drawImage(source2, drawParams2.x + slideX, drawParams2.y, drawParams2.w, drawParams2.h); | |
| ctx.restore(); | |
| } | |
| // 6. Apply Raytrace Overlay / Post-Processing | |
| applyPostProcessing(timestamp); | |
| } | |
| function calculateCover(canvasW, canvasH, imgW, imgH) { | |
| const imgRatio = imgW / imgH; | |
| const canvasRatio = canvasW / canvasH; | |
| let renderW, renderH, offsetX, offsetY; | |
| if (state.forceCrop) { | |
| // Force 9:16 crop logic (already canvas is 9:16, so this is standard cover) | |
| if (imgRatio > canvasRatio) { | |
| renderH = canvasH; | |
| renderW = imgW * (canvasH / imgH); | |
| offsetX = (canvasW - renderW) / 2; | |
| offsetY = 0; | |
| } else { | |
| renderW = canvasW; | |
| renderH = imgH * (canvasW / imgW); | |
| offsetX = 0; | |
| offsetY = (canvasH - renderH) / 2; | |
| } | |
| } else { | |
| // Contain or stretch? Let's do Contain to see full image | |
| if (imgRatio > canvasRatio) { | |
| renderW = canvasW; | |
| renderH = imgH * (canvasW / imgW); | |
| offsetX = 0; | |
| offsetY = (canvasH - renderH) / 2; | |
| } else { | |
| renderH = canvasH; | |
| renderW = imgW * (canvasH / imgH); | |
| offsetX = (canvasW - renderW) / 2; | |
| offsetY = 0; | |
| } | |
| } | |
| return { x: offsetX, y: offsetY, w: renderW, h: renderH }; | |
| } | |
| function applyPostProcessing(timestamp) { | |
| // We simulate "Raytracing" and "Multifx" using Canvas Composite operations and gradients | |
| // 1. Scanlines / CRT (Already CSS, but let's add a canvas layer) | |
| // ctx.fillStyle = "rgba(0, 0, 0, 0.1)"; | |
| // for(let i=0; i<canvas.height; i+=4) { | |
| // ctx.fillRect(0, i, canvas.width, 1); | |
| // } | |
| // 2. Vignette (Raytrace light falloff) | |
| const gradient = ctx.createRadialGradient(canvas.width/2, canvas.height/2, canvas.height/3, canvas.width/2, canvas.height/2, canvas.height); | |
| gradient.addColorStop(0, "rgba(0,0,0,0)"); | |
| gradient.addColorStop(1, "rgba(0,0,0,0.8)"); | |
| ctx.fillStyle = gradient; | |
| ctx.fillRect(0, 0, canvas.width, canvas.height); | |
| // 3. Chromatic Aberration (Simulated via drawing channels with offset - expensive in 2D canvas, simplified here) | |
| // We use a colored overlay that shifts slightly based on bass | |
| if (state.bassEnergy > 0.1) { | |
| const shift = state.bassEnergy * 10; | |
| ctx.save(); | |
| ctx.globalCompositeOperation = 'screen'; | |
| ctx.fillStyle = `rgba(255, 0, 0, 0.05)`; | |
| ctx.fillRect(-shift, 0, canvas.width, canvas.height); | |
| ctx.fillStyle = `rgba(0, 255, 255, 0.05)`; | |
| ctx.fillRect(shift, 0, canvas.width, canvas.height); | |
| ctx.restore(); | |
| } | |
| // 4. "Kontradt Shaping" visualizer overlay | |
| // Draw a waveform at the bottom | |
| if (state.analyser) { | |
| ctx.save(); | |
| ctx.beginPath(); | |
| ctx.strokeStyle = `rgba(0, 243, 255, ${state.bassEnergy})`; | |
| ctx.lineWidth = 2; | |
| const sliceWidth = canvas.width / state.dataArray.length; | |
| let x = 0; | |
| for(let i = 0; i < state.dataArray.length; i++) { | |
| const v = state.dataArray[i] / 128.0; | |
| const y = v * (canvas.height / 4) + (canvas.height - 200); // Draw at bottom | |
| if(i === 0) ctx.moveTo(x, y); | |
| else ctx.lineTo(x, y); | |
| x += sliceWidth; | |
| } | |
| ctx.stroke(); | |
| ctx.restore(); | |
| } | |
| } | |
| // Initial Draw | |
| ctx.fillStyle = "#111"; | |
| ctx.fillRect(0,0, canvas.width, canvas.height); | |
| ctx.fillStyle = "#333"; | |
| ctx.font = "30px Orbitron"; | |
| ctx.textAlign = "center"; | |
| ctx.fillText("NO SIGNAL", canvas.width/2, canvas.height/2); | |
| </script> | |
| </body> | |
| </html> |