anycoder-bf32b817 / index.html
HI7RAI's picture
Upload folder using huggingface_hub
0ebb725 verified
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Raytrace Morphing Slideshow</title>
<!-- Import Three.js -->
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/r128/three.min.js"></script>
<style>
:root {
--bg-color: #050505;
--accent: #00f3ff;
--text: #ffffff;
--panel-bg: rgba(20, 20, 20, 0.8);
}
* {
box-sizing: border-box;
margin: 0;
padding: 0;
}
body {
background-color: var(--bg-color);
color: var(--text);
font-family: 'Segoe UI', Roboto, Helvetica, Arial, sans-serif;
height: 100vh;
width: 100vw;
display: flex;
flex-direction: column;
align-items: center;
justify-content: center;
overflow: hidden;
}
/* Header */
header {
position: absolute;
top: 0;
left: 0;
width: 100%;
padding: 15px;
z-index: 100;
display: flex;
justify-content: space-between;
align-items: center;
background: linear-gradient(to bottom, rgba(0,0,0,0.8), transparent);
}
header a {
color: var(--accent);
text-decoration: none;
font-weight: bold;
font-size: 0.9rem;
text-transform: uppercase;
letter-spacing: 2px;
text-shadow: 0 0 10px var(--accent);
}
/* Main Stage - 9:16 Aspect Ratio */
#stage-container {
position: relative;
width: 100%;
max-width: 60vh; /* Keeps height constraint on desktop */
aspect-ratio: 9 / 16;
background: #000;
box-shadow: 0 0 50px rgba(0, 243, 255, 0.1);
overflow: hidden;
border-radius: 12px;
}
/* WebGL Raytracing Overlay */
#fx-canvas {
position: absolute;
top: 0;
left: 0;
width: 100%;
height: 100%;
z-index: 10;
pointer-events: none;
mix-blend-mode: overlay;
opacity: 0.6;
}
/* Slideshow Layer */
#slideshow-layer {
position: absolute;
top: 0;
left: 0;
width: 100%;
height: 100%;
z-index: 5;
display: flex;
overflow: hidden;
}
.slide {
min-width: 100%;
height: 100%;
position: absolute;
top: 0;
left: 0;
will-change: transform, clip-path;
}
.slide img {
width: 100%;
height: 100%;
object-fit: cover;
/* Contrast shaping via CSS initially */
filter: contrast(1.1) saturate(1.1);
}
/* UI Controls Overlay */
.ui-overlay {
position: absolute;
bottom: 20px;
left: 0;
width: 100%;
z-index: 20;
padding: 20px;
background: linear-gradient(to top, rgba(0,0,0,0.9), transparent);
display: flex;
flex-direction: column;
gap: 10px;
}
.controls-row {
display: flex;
justify-content: space-between;
align-items: center;
}
.stat-box {
font-family: 'Courier New', monospace;
font-size: 0.8rem;
color: #aaa;
}
.stat-val {
color: var(--accent);
font-weight: bold;
}
input[type="file"] {
display: none;
}
.btn {
background: rgba(255,255,255,0.1);
border: 1px solid var(--accent);
color: var(--accent);
padding: 8px 16px;
border-radius: 20px;
cursor: pointer;
font-size: 0.8rem;
transition: all 0.3s ease;
}
.btn:hover {
background: var(--accent);
color: #000;
box-shadow: 0 0 15px var(--accent);
}
/* Hidden raw audio element */
#audio-source {
display: none;
}
/* BPM Meter */
#bpm-meter {
width: 100%;
height: 4px;
background: #333;
margin-top: 5px;
position: relative;
}
#bpm-bar {
height: 100%;
background: var(--accent);
width: 0%;
transition: width 0.1s linear;
}
</style>
</head>
<body>
<header>
<div class="stat-box">RAYTRACER <span class="stat-val">V.1.0</span></div>
<a href="https://huggingface.co/spaces/akhaliq/anycoder" target="_blank">Built with anycoder</a>
</header>
<div id="stage-container">
<!-- Raytracing/WebGL Background -->
<canvas id="fx-canvas"></canvas>
<!-- Slideshow Images -->
<div id="slideshow-layer">
<!-- Images injected via JS -->
</div>
<!-- Controls -->
<div class="ui-overlay">
<div class="controls-row">
<div class="stat-box">FPS: <span id="fps-counter" class="stat-val">0</span></div>
<div class="stat-box">BPM: <span id="bpm-counter" class="stat-val">--</span></div>
<div class="stat-box">EXP: <span id="exposure-counter" class="stat-val">100%</span></div>
</div>
<div id="bpm-meter"><div id="bpm-bar"></div></div>
<div class="controls-row" style="justify-content: center; margin-top: 10px;">
<button class="btn" onclick="document.getElementById('audio-upload').click()">Upload Audio</button>
<button class="btn" id="play-btn">Play / Pause</button>
</div>
</div>
</div>
<input type="file" id="audio-upload" accept="audio/*">
<audio id="audio-source"></audio>
<script>
// --- Configuration ---
const config = {
contrastFactor: 2.5, // Math formula for contrast shaping
raytracingSpeed: 1.0, // FX speed
transitionDuration: 1.2, // Seconds
baseExposure: 1.0
};
// --- State ---
let state = {
isPlaying: false,
currentSlide: 0,
progress: 0, // 0 to 1 for current transition
audioContext: null,
analyser: null,
dataArray: null,
bpm: 0,
lastFrameTime: 0,
frameCount: 0,
fps: 60,
exposure: 1.0
};
// --- Placeholder Images (9:16 Vertical) ---
const imageSources = [
'https://images.unsplash.com/photo-1534528741775-53994a69daeb?ixlib=rb-4.0.3&auto=format&fit=crop&w=500&q=80', // Portrait 1
'https://images.unsplash.com/photo-1517841905240-472988babdf9?ixlib=rb-4.0.3&auto=format&fit=crop&w=500&q=80', // Portrait 2
'https://images.unsplash.com/photo-1531746020798-e6953c6e8e04?ixlib=rb-4.0.3&auto=format&fit=crop&w=500&q=80', // Portrait 3
'https://images.unsplash.com/photo-1506794778202-cad84cf45f1d?ixlib=rb-4.0.3&auto=format&fit=crop&w=500&q=80', // Portrait 4
'https://images.unsplash.com/photo-1500917293891-ef795e70e1f6?ixlib=rb-4.0.3&auto=format&fit=crop&w=500&q=80' // Portrait 5
];
// --- DOM Elements ---
const stage = document.getElementById('slideshow-layer');
const audioEl = document.getElementById('audio-source');
const fpsEl = document.getElementById('fps-counter');
const bpmEl = document.getElementById('bpm-counter');
const expEl = document.getElementById('exposure-counter');
const bpmBar = document.getElementById('bpm-bar');
const playBtn = document.getElementById('play-btn');
const fileInput = document.getElementById('audio-upload');
// --- Three.js Raytracing Shader Setup ---
const canvas = document.getElementById('fx-canvas');
const renderer = new THREE.WebGLRenderer({ canvas: canvas, alpha: true });
renderer.setSize(stage.clientWidth, stage.clientHeight);
const scene = new THREE.Scene();
const camera = new THREE.OrthographicCamera(-1, 1, 1, -1, 0, 1);
// GLSL Shader for "Raytracing" / Morphing Overlay
const fragmentShader = `
uniform float uTime;
uniform float uExposure;
uniform vec2 uResolution;
uniform float uBeat; // Audio reactivity
// Pseudo-Raymarching / Noise Function
float random(vec2 st) {
return fract(sin(dot(st.xy, vec2(12.9898,78.233))) * 43758.5453123);
}
float noise(vec2 st) {
vec2 i = floor(st);
vec2 f = fract(st);
float a = random(i);
float b = random(i + vec2(1.0, 0.0));
float c = random(i + vec2(0.0, 1.0));
float d = random(i + vec2(1.0, 1.0));
vec2 u = f * f * (3.0 - 2.0 * f);
return mix(a, b, u.x) + (c - a)* u.y * (1.0 - u.x) + (d - b) * u.x * u.y;
}
void main() {
vec2 uv = gl_FragCoord.xy / uResolution.xy;
// RGB Shift / Chromatic Aberration based on beat
float shift = 0.005 + (uBeat * 0.02);
float r = noise(uv * 10.0 + uTime * 0.5 + shift);
float g = noise(uv * 10.0 + uTime * 0.5);
float b = noise(uv * 10.0 + uTime * 0.5 - shift);
// Apply Contrast Shaping Formula
vec3 color = vec3(r, g, b);
color = (color - 0.5) * uExposure + 0.5;
// Scanline effect
float scanline = sin(uv.y * 800.0) * 0.05;
gl_FragColor = vec4(color - scanline, 1.0);
}
`;
const vertexShader = `
void main() {
gl_Position = vec4( position, 1.0 );
}
`;
const uniforms = {
uTime: { value: 0 },
uResolution: { value: new THREE.Vector2(stage.clientWidth, stage.clientHeight) },
uExposure: { value: 1.0 },
uBeat: { value: 0.0 }
};
const material = new THREE.ShaderMaterial({
uniforms: uniforms,
vertexShader: vertexShader,
fragmentShader: fragmentShader
});
const plane = new THREE.Mesh( new THREE.PlaneGeometry( 2, 2 ), material );
scene.add( plane );
// --- Initialization ---
function initSlideshow() {
stage.innerHTML = '';
imageSources.forEach((src, index) => {
const div = document.createElement('div');
div.className = 'slide';
div.id = `slide-${index}`;
const img = document.createElement('img');
img.src = src;
img.draggable = false;
div.appendChild(img);
stage.appendChild(div);
});
updateSlideVisuals();
}
// --- Transition Logic (The Math) ---
function updateSlideVisuals() {
const slides = document.querySelectorAll('.slide');
const nextIndex = (state.currentSlide + 1) % imageSources.length;
// Reset positions
slides.forEach((s, i) => {
const img = s.querySelector('img');
// Reset CSS transforms
s.style.transform = `translateX(0) scale(1)`;
s.style.zIndex = 1;
s.style.clipPath = 'inset(0 0 0 0)';
img.style.filter = 'contrast(1.1)';
});
// Set Active Slide
const currentSlideEl = slides[state.currentSlide];
const nextSlideEl = slides[nextIndex];
currentSlideEl.style.zIndex = 2;
nextSlideEl.style.zIndex = 1;
nextSlideEl.style.transform = `translateX(100%)`; // Start off-screen
}
function animateSlides(timestamp) {
if (!state.isPlaying) return;
// FPS Calculation
const delta = timestamp - state.lastFrameTime;
if (delta >= 1000) {
state.fps = Math.round(state.frameCount * 1000 / delta);
fpsEl.innerText = state.fps;
state.frameCount = 0;
state.lastFrameTime = timestamp;
}
state.frameCount++;
// Audio Analysis
let bass = 0;
if (state.analyser) {
state.analyser.getByteFrequencyData(state.dataArray);
// Calculate average of low frequencies (Bass)
const bufferLength = state.analyser.frequencyBinCount;
let sum = 0;
// Take lower 1/4 of spectrum for beat
for(let i = 0; i < bufferLength / 4; i++) {
sum += state.dataArray[i];
}
bass = sum / (bufferLength / 4);
const bassNorm = bass / 255;
// Math: Auto Exposure
// Formula: TargetExposure = Base + (Bass * Sensitivity)
const targetExposure = 1.0 + (bassNorm * 1.5);
state.exposure += (targetExposure - state.exposure) * 0.1; // Smooth lerp
// Update UI
expEl.innerText = Math.round(state.exposure * 100) + '%';
uniforms.uBeat.value = bassNorm;
// BPM Detection Logic (Simple Peak Detection)
if (bass > 200 && !state.beatLocked) {
state.beatLocked = true;
triggerBPMFlash();
// Auto transition logic could go here based on strong beats
}
if (bass < 100) {
state.beatLocked = false;
}
}
// Raytracing Shader Update
uniforms.uTime.value += 0.01;
uniforms.uExposure.value = state.exposure; // Sync shader contrast with audio
renderer.render(scene, camera);
requestAnimationFrame(animateSlides);
}
function triggerBPMFlash() {
bpmEl.classList.add('stat-val');
bpmBar.style.width = '100%';
setTimeout(() => {
bpmBar.style.width = '0%';
bpmEl.classList.remove('stat-val');
}, 150);
}
function performTransition() {
const slides = document.querySelectorAll('.slide');
const nextIndex = (state.currentSlide + 1) % imageSources.length;
const currentSlideEl = slides[state.currentSlide];
const nextSlideEl = slides[nextIndex];
// Reset
nextSlideEl.style.transform = `translateX(100%) scale(1.2)`;
// Animate using Web Animations API for performance
const animOut = currentSlideEl.animate([
{ transform: 'translateX(0) scale(1) skewX(0deg)', filter: `brightness(${state.exposure}) contrast(1.1)` },
{ transform: 'translateX(-50%) scale(0.8) skewX(10deg)', filter: `brightness(${state.exposure + 0.5}) contrast(1.5)` }
], {
duration: config.transitionDuration * 1000,
easing: 'cubic-bezier(0.4, 0.0, 0.2, 1)',
fill: 'forwards'
});
const animIn = nextSlideEl.animate([
{ transform: 'translateX(100%) scale(1.2) skewX(-10deg)' },
{ transform: 'translateX(0) scale(1) skewX(0deg)' }
], {
duration: config.transitionDuration * 1000,
easing: 'cubic-bezier(0.4, 0.0, 0.2, 1)',
fill: 'forwards'
});
animOut.onfinish = () => {
currentSlideEl.style.transform = 'translateX(0)'; // Reset logic for next loop
currentSlideEl.style.zIndex = 1;
currentSlideEl.querySelector('img').style.filter = 'contrast(1.1)';
state.currentSlide = nextIndex;
// Schedule next slide based on BPM or timer
const beatInterval = (60 / 120) * 1000; // Assuming 120BPM default
setTimeout(performTransition, beatInterval * 4); // Change every 4 beats (1 bar)
};
}
// --- Audio Handling ---
fileInput.addEventListener('change', function(e) {
const file = e.target.files[0];
if (file) {
const fileURL = URL.createObjectURL(file);
audioEl.src = fileURL;
audioEl.load();
if (!state.audioContext) {
setupAudioContext();
}
state.isPlaying = true;
playBtn.innerText = "Pause";
audioEl.play();
initSlideshow();
requestAnimationFrame(animateSlides);
// Start transition loop
setTimeout(performTransition, 2000);
}
});
playBtn.addEventListener('click', () => {
if (!state.audioContext) setupAudioContext();
if (audioEl.paused) {
audioEl.play();
state.isPlaying = true;
playBtn.innerText = "Pause";
requestAnimationFrame(animateSlides);
} else {
audioEl.pause();
state.isPlaying = false;
playBtn.innerText = "Play";
}
});
function setupAudioContext() {
const AudioContext = window.AudioContext || window.webkitAudioContext;
state.audioContext = new AudioContext();
state.analyser = state.audioContext.createAnalyser();
state.analyser.fftSize = 256;
const source = state.audioContext.createMediaElementSource(audioEl);
source.connect(state.analyser);
state.analyser.connect(state.audioContext.destination);
state.dataArray = new Uint8Array(state.analyser.frequencyBinCount);
}
// --- Resize Handler ---
window.addEventListener('resize', () => {
// Update container size variables
const w = stage.clientWidth;
const h = stage.clientHeight;
renderer.setSize(w, h);
uniforms.uResolution.value.set(w, h);
});
// Init
initSlideshow();
// Kickoff render loop (static initially)
requestAnimationFrame((t) => {
state.lastFrameTime = t;
renderer.render(scene, camera);
});
</script>
</body>
</html>