duqing2026's picture
Update: Automate asset generation and exclude binaries from git
23b4a3c
<!DOCTYPE html>
<html lang="zh-CN">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Audiogram Studio - 声波视频生成器</title>
<script src="https://unpkg.com/vue@3/dist/vue.global.js"></script>
<script src="https://cdn.tailwindcss.com"></script>
<link href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.0.0/css/all.min.css" rel="stylesheet">
<style>
@import url('https://fonts.googleapis.com/css2?family=Inter:wght@400;600;700&display=swap');
body {
font-family: 'Inter', sans-serif;
background-color: #f3f4f6;
}
.canvas-container {
box-shadow: 0 10px 25px -5px rgba(0, 0, 0, 0.1), 0 10px 10px -5px rgba(0, 0, 0, 0.04);
}
/* Custom scrollbar */
::-webkit-scrollbar {
width: 8px;
}
::-webkit-scrollbar-track {
background: #f1f1f1;
}
::-webkit-scrollbar-thumb {
background: #888;
border-radius: 4px;
}
::-webkit-scrollbar-thumb:hover {
background: #555;
}
</style>
</head>
<body>
<script>
// Pass server-side variables to client
window.SERVER_CONFIG = {
ffmpegAvailable: {{ 'true' if ffmpeg_available else 'false' }}
};
</script>
{% raw %}
<div id="app" class="min-h-screen flex flex-col">
<!-- Header -->
<header class="bg-white shadow-sm z-10">
<div class="max-w-7xl mx-auto px-4 sm:px-6 lg:px-8 py-4 flex justify-between items-center">
<div class="flex items-center space-x-3">
<div class="bg-gradient-to-r from-purple-600 to-indigo-600 text-white p-2 rounded-lg">
<i class="fa-solid fa-wave-square text-xl"></i>
</div>
<div>
<h1 class="text-2xl font-bold text-gray-900 tracking-tight">Audiogram Studio</h1>
<div v-if="!serverConfig.ffmpegAvailable" class="text-xs text-orange-500 font-medium">
<i class="fa-solid fa-circle-info mr-1"></i>WebM 模式 (无 FFmpeg)
</div>
</div>
</div>
<!-- <div class="flex items-center space-x-4">
<a href="https://github.com/duqing26" target="_blank" class="text-gray-500 hover:text-gray-900 transition-colors">
<i class="fa-brands fa-github text-xl"></i>
</a>
</div> -->
</div>
</header>
<!-- Main Content -->
<main class="flex-grow container mx-auto px-4 py-8 flex flex-col lg:flex-row gap-8">
<!-- Sidebar: Controls -->
<div class="w-full lg:w-1/3 space-y-6">
<!-- Global Actions -->
<div class="flex space-x-3">
<button @click="loadDemo" class="flex-1 bg-white border border-gray-200 hover:bg-indigo-50 hover:border-indigo-200 text-indigo-600 py-2.5 rounded-xl font-medium text-sm transition-all shadow-sm">
<i class="fa-solid fa-magic mr-2"></i>加载演示
</button>
<button @click="clearAll" class="flex-1 bg-white border border-gray-200 hover:bg-red-50 hover:border-red-200 text-red-500 py-2.5 rounded-xl font-medium text-sm transition-all shadow-sm">
<i class="fa-solid fa-trash-can mr-2"></i>清空全部
</button>
</div>
<!-- 1. Uploads -->
<div class="bg-white rounded-xl shadow-sm border border-gray-100 p-6 transition-all hover:shadow-md">
<h2 class="text-lg font-semibold text-gray-800 mb-4 flex items-center">
<i class="fa-solid fa-cloud-arrow-up mr-2 text-indigo-500"></i> 素材上传
</h2>
<!-- Audio Upload -->
<div class="mb-4">
<label class="block text-sm font-medium text-gray-700 mb-2">音频文件 (MP3/WAV)</label>
<div class="relative group">
<input type="file" accept="audio/*" @change="handleAudioUpload" class="absolute inset-0 w-full h-full opacity-0 cursor-pointer z-10">
<div class="border-2 border-dashed border-gray-300 rounded-lg p-4 text-center group-hover:border-indigo-500 transition-colors bg-gray-50">
<div v-if="audioFile" class="text-indigo-600 font-medium truncate">
<i class="fa-solid fa-music mr-2"></i> {{ audioFile.name }}
</div>
<div v-else-if="hasAudio" class="text-green-600 font-medium truncate">
<i class="fa-solid fa-music mr-2"></i> 示例音频 (10s)
</div>
<div v-else class="text-gray-500">
<i class="fa-solid fa-file-audio text-2xl mb-2 block text-gray-400"></i>
点击或拖拽上传音频
</div>
</div>
</div>
</div>
<!-- Image Upload -->
<div>
<label class="block text-sm font-medium text-gray-700 mb-2">背景图片</label>
<div class="relative group">
<input type="file" accept="image/*" @change="handleImageUpload" class="absolute inset-0 w-full h-full opacity-0 cursor-pointer z-10">
<div class="border-2 border-dashed border-gray-300 rounded-lg p-4 text-center group-hover:border-indigo-500 transition-colors bg-gray-50">
<div v-if="bgImageSrc && !bgImageSrc.startsWith('data:image')" class="text-indigo-600 font-medium truncate">
<i class="fa-solid fa-image mr-2"></i> 已加载自定义图片
</div>
<div v-else-if="bgImageSrc" class="text-green-600 font-medium truncate">
<i class="fa-solid fa-image mr-2"></i> 已加载默认背景
</div>
<div v-else class="text-gray-500">
<i class="fa-regular fa-image text-2xl mb-2 block text-gray-400"></i>
点击或拖拽上传背景
</div>
</div>
</div>
</div>
<!-- Watermark Upload -->
<div class="mb-4">
<label class="block text-sm font-medium text-gray-700 mb-2">水印/Logo (可选)</label>
<div class="relative group">
<input type="file" accept="image/*" @change="handleWatermarkUpload" class="absolute inset-0 w-full h-full opacity-0 cursor-pointer z-10">
<div class="border border-dashed border-gray-300 rounded-lg p-2 text-center group-hover:border-indigo-500 transition-colors bg-gray-50 text-sm">
<div v-if="watermarkSrc" class="text-indigo-600 truncate">
<i class="fa-solid fa-stamp mr-1"></i> 已加载 Logo
</div>
<div v-else class="text-gray-500">
<i class="fa-regular fa-id-badge mb-1 block"></i>
点击上传 Logo
</div>
</div>
</div>
<div v-if="watermarkSrc" class="mt-2 flex items-center justify-between">
<label class="text-xs text-gray-500">位置</label>
<select v-model="watermarkPos" class="text-xs border-gray-300 rounded p-1">
<option value="top-left">左上</option>
<option value="top-right">右上</option>
<option value="bottom-left">左下</option>
<option value="bottom-right">右下</option>
</select>
</div>
</div>
</div>
<!-- 2. Settings -->
<div class="bg-white rounded-xl shadow-sm border border-gray-100 p-6 transition-all hover:shadow-md">
<h2 class="text-lg font-semibold text-gray-800 mb-4 flex items-center">
<i class="fa-solid fa-sliders mr-2 text-indigo-500"></i> 样式设置
</h2>
<!-- Presets -->
<div class="mb-4">
<label class="block text-sm font-medium text-gray-700 mb-2">快速配色</label>
<div class="flex space-x-2">
<button @click="applyPalette('#ffffff')" class="w-6 h-6 rounded-full bg-white border border-gray-300 shadow-sm" title="White"></button>
<button @click="applyPalette('#fbbf24')" class="w-6 h-6 rounded-full bg-yellow-400 shadow-sm" title="Amber"></button>
<button @click="applyPalette('#f472b6')" class="w-6 h-6 rounded-full bg-pink-400 shadow-sm" title="Pink"></button>
<button @click="applyPalette('#60a5fa')" class="w-6 h-6 rounded-full bg-blue-400 shadow-sm" title="Blue"></button>
<button @click="applyPalette('#34d399')" class="w-6 h-6 rounded-full bg-emerald-400 shadow-sm" title="Green"></button>
</div>
</div>
<!-- Canvas Size -->
<div class="mb-4">
<label class="block text-sm font-medium text-gray-700 mb-2">画布比例</label>
<div class="grid grid-cols-3 gap-2">
<button @click="setAspectRatio(1, 1)" :class="{'bg-indigo-600 text-white': aspectRatio === 1, 'bg-gray-100 text-gray-600 hover:bg-gray-200': aspectRatio !== 1}" class="px-3 py-2 rounded-md text-sm font-medium transition-colors">
1:1 (正方)
</button>
<button @click="setAspectRatio(9, 16)" :class="{'bg-indigo-600 text-white': aspectRatio === 9/16, 'bg-gray-100 text-gray-600 hover:bg-gray-200': aspectRatio !== 9/16}" class="px-3 py-2 rounded-md text-sm font-medium transition-colors">
9:16 (手机)
</button>
<button @click="setAspectRatio(16, 9)" :class="{'bg-indigo-600 text-white': aspectRatio === 16/9, 'bg-gray-100 text-gray-600 hover:bg-gray-200': aspectRatio !== 16/9}" class="px-3 py-2 rounded-md text-sm font-medium transition-colors">
16:9 (横屏)
</button>
</div>
</div>
<!-- Wave Type -->
<div class="mb-4">
<label class="block text-sm font-medium text-gray-700 mb-2">波形样式</label>
<select v-model="waveType" class="w-full border-gray-300 rounded-md shadow-sm focus:border-indigo-500 focus:ring focus:ring-indigo-200 focus:ring-opacity-50 p-2 border">
<option value="bars">柱状图 (Bars)</option>
<option value="line">线条 (Line)</option>
<option value="circle">圆形 (Circle)</option>
<option value="particles">粒子 (Particles)</option>
</select>
</div>
<!-- Bar Settings -->
<div class="mb-4" v-if="waveType === 'bars'">
<div class="grid grid-cols-2 gap-4">
<div>
<label class="block text-xs text-gray-500 mb-1">宽度系数 ({{ barScale }})</label>
<input type="range" v-model.number="barScale" min="0.1" max="10" step="0.1" class="w-full h-1 bg-gray-200 rounded-lg appearance-none cursor-pointer">
</div>
<div>
<label class="block text-xs text-gray-500 mb-1">间距 ({{ barGap }}px)</label>
<input type="range" v-model.number="barGap" min="0" max="20" step="1" class="w-full h-1 bg-gray-200 rounded-lg appearance-none cursor-pointer">
</div>
</div>
</div>
<!-- Color Picker -->
<div class="mb-4">
<label class="block text-sm font-medium text-gray-700 mb-2">波形颜色</label>
<div class="flex items-center space-x-2">
<input type="color" v-model="waveColor" class="h-10 w-full rounded cursor-pointer border-0 p-0">
</div>
</div>
<!-- Text Overlay -->
<div class="mb-4">
<label class="block text-sm font-medium text-gray-700 mb-2">标题文字</label>
<input type="text" v-model="titleText" placeholder="输入标题..." class="w-full border-gray-300 rounded-md shadow-sm focus:border-indigo-500 focus:ring focus:ring-indigo-200 focus:ring-opacity-50 p-2 border mb-2">
<div v-if="titleText">
<div class="flex items-center space-x-2 mb-2">
<label class="block text-xs text-gray-500">颜色</label>
<input type="color" v-model="titleColor" class="h-6 w-8 rounded cursor-pointer border-0 p-0">
</div>
<label class="block text-xs text-gray-500 mb-1">字体大小 ({{ titleFontSize }}px)</label>
<input type="range" v-model.number="titleFontSize" min="20" max="120" step="1" class="w-full h-1 bg-gray-200 rounded-lg appearance-none cursor-pointer">
</div>
</div>
<!-- Position -->
<div class="mb-4">
<label class="block text-sm font-medium text-gray-700 mb-2">波形位置 Y轴 ({{ waveY }}%)</label>
<input type="range" v-model.number="waveY" min="0" max="100" class="w-full h-2 bg-gray-200 rounded-lg appearance-none cursor-pointer accent-indigo-600">
</div>
</div>
<!-- 3. Actions -->
<div class="bg-white rounded-xl shadow-sm border border-gray-100 p-6 transition-all hover:shadow-md">
<h2 class="text-lg font-semibold text-gray-800 mb-4 flex items-center">
<i class="fa-solid fa-clapperboard mr-2 text-indigo-500"></i> 生成操作
</h2>
<div class="space-y-3">
<button @click="togglePlay" :disabled="!hasAudio"
class="w-full py-3 rounded-lg font-semibold flex items-center justify-center space-x-2 transition-all disabled:opacity-50 disabled:cursor-not-allowed"
:class="isPlaying ? 'bg-yellow-500 hover:bg-yellow-600 text-white' : 'bg-green-500 hover:bg-green-600 text-white'">
<i class="fa-solid" :class="isPlaying ? 'fa-pause' : 'fa-play'"></i>
<span>{{ isPlaying ? '暂停预览' : '播放预览' }}</span>
</button>
<button @click="startRecording" :disabled="!hasAudio || isRecording || isConverting"
class="w-full py-3 rounded-lg font-semibold flex items-center justify-center space-x-2 transition-all disabled:opacity-50 disabled:cursor-not-allowed bg-indigo-600 hover:bg-indigo-700 text-white">
<i class="fa-solid" :class="isRecording ? 'fa-spinner fa-spin' : (isConverting ? 'fa-cog fa-spin' : 'fa-video')"></i>
<span>{{ isRecording ? '正在录制...' : (isConverting ? '正在转码为 MP4...' : '开始生成视频') }}</span>
</button>
<div v-if="isRecording" class="text-center text-sm text-gray-500 mt-2">
请等待音频播放结束... ({{ formatTime(currentTime) }} / {{ formatTime(duration) }})
</div>
</div>
</div>
</div>
<!-- Main Area: Canvas Preview -->
<div class="w-full lg:w-2/3 flex flex-col items-center bg-gray-100 rounded-xl border border-gray-200 p-4 lg:p-8 min-h-[500px]">
<div class="canvas-container relative bg-white shadow-2xl rounded-sm overflow-hidden" :style="{width: canvasWidth + 'px', height: canvasHeight + 'px'}">
<canvas ref="canvas" :width="canvasWidth" :height="canvasHeight" class="block"></canvas>
<!-- Overlay Text (Optional) -->
<!-- <div class="absolute bottom-8 left-8 text-white font-bold text-2xl drop-shadow-md">My Podcast Episode</div> -->
</div>
<div class="mt-6 text-gray-500 text-sm flex items-center">
<i class="fa-solid fa-info-circle mr-2"></i>
<span>预览区域即为最终视频效果。建议使用 Chrome 浏览器以获得最佳性能。</span>
</div>
</div>
</main>
<!-- Result Modal -->
<div v-if="showResultModal" class="fixed inset-0 bg-black bg-opacity-50 flex items-center justify-center z-50 p-4">
<div class="bg-white rounded-xl shadow-2xl max-w-lg w-full p-6 animate-fade-in-up">
<div class="flex justify-between items-center mb-4">
<h3 class="text-xl font-bold text-gray-900">视频生成成功!</h3>
<button @click="showResultModal = false" class="text-gray-400 hover:text-gray-600">
<i class="fa-solid fa-times text-xl"></i>
</button>
</div>
<div class="bg-gray-100 rounded-lg p-4 mb-6 text-center">
<p class="text-gray-600 mb-2">您的视频已生成 ({{ videoType.toUpperCase() }}格式)</p>
<p class="text-xs text-gray-500" v-if="videoType === 'webm'">注意:WebM 格式在 Windows/Android 上兼容性良好。如需 MP4,请使用在线工具转换。</p>
<p class="text-xs text-gray-500" v-else>MP4 格式,兼容所有平台 (iOS/Android/PC)。</p>
</div>
<div class="flex space-x-3">
<a :href="videoUrl" :download="'audiogram.' + videoType" class="flex-1 bg-indigo-600 text-white py-3 rounded-lg font-semibold text-center hover:bg-indigo-700 transition-colors">
<i class="fa-solid fa-download mr-2"></i> 下载视频
</a>
</div>
</div>
</div>
</div>
{% endraw %}
<script>
const { createApp, ref, onMounted, watch, computed } = Vue;
createApp({
setup() {
const canvas = ref(null);
const audioFile = ref(null);
const hasAudio = ref(false); // Track if audio is loaded (file or demo)
const bgImageSrc = ref(null);
const isPlaying = ref(false);
const isRecording = ref(false);
const isConverting = ref(false);
const showResultModal = ref(false);
const videoUrl = ref('');
const videoType = ref('webm');
const serverConfig = ref(window.SERVER_CONFIG || { ffmpegAvailable: false });
// Settings
const aspectRatio = ref(1); // 1:1 default
const baseSize = 600; // Base resolution size
const waveType = ref('bars');
const waveColor = ref('#ffffff');
const waveY = ref(80); // Y position percentage
const barScale = ref(2.5);
const barGap = ref(1);
const titleText = ref('');
const titleFontSize = ref(40);
const titleColor = ref('#ffffff');
const watermarkSrc = ref(null);
const watermarkPos = ref('top-right');
// Image Cache
const bgImageObj = ref(null);
const watermarkObj = ref(null);
watch(bgImageSrc, (newVal) => {
if (newVal) {
const img = new Image();
img.crossOrigin = "anonymous";
img.onload = () => {
bgImageObj.value = img;
if (!isPlaying.value) drawCanvas();
};
img.src = newVal;
} else {
bgImageObj.value = null;
if (!isPlaying.value) drawCanvas();
}
});
watch(watermarkSrc, (newVal) => {
if (newVal) {
const img = new Image();
img.crossOrigin = "anonymous";
img.onload = () => {
watermarkObj.value = img;
if (!isPlaying.value) drawCanvas();
};
img.src = newVal;
} else {
watermarkObj.value = null;
if (!isPlaying.value) drawCanvas();
}
});
// Audio Context
let audioCtx = null;
let analyser = null;
let source = null;
let dataArray = null;
let audioBuffer = null;
let startTime = 0;
let animationId = null;
// Playback state
const currentTime = ref(0);
const duration = ref(0);
// Recording
let mediaRecorder = null;
let recordedChunks = [];
let dest = null; // Audio destination for recording
const canvasWidth = computed(() => {
return baseSize;
});
const canvasHeight = computed(() => {
return baseSize / aspectRatio.value;
});
// Init
onMounted(() => {
drawCanvas();
loadDemo();
});
watch([aspectRatio, waveType, waveColor, waveY, barScale, barGap, bgImageSrc, titleText, titleFontSize, titleColor, watermarkSrc, watermarkPos], () => {
if (!isPlaying.value) drawCanvas();
});
function setAspectRatio(w, h) {
aspectRatio.value = w / h;
// Need next tick to wait for canvas resize
setTimeout(() => {
if (!isPlaying.value) drawCanvas();
}, 50);
}
function initAudioCtx() {
if (!audioCtx) {
audioCtx = new (window.AudioContext || window.webkitAudioContext)();
}
}
async function loadDemo() {
initAudioCtx();
try {
// Load Audio
const response = await fetch('/static/demo.wav');
const arrayBuffer = await response.arrayBuffer();
audioCtx.decodeAudioData(arrayBuffer, function(buffer) {
audioBuffer = buffer;
duration.value = buffer.duration;
hasAudio.value = true;
// Fake file object for display
audioFile.value = { name: "demo.wav" };
if (!isPlaying.value) drawCanvas();
});
} catch (e) {
console.error("Demo audio load failed", e);
}
// Demo Background
bgImageSrc.value = '/static/demo-bg.jpg';
// Demo Watermark
watermarkSrc.value = '/static/demo-logo.png';
watermarkPos.value = 'top-right';
// Demo Title
titleText.value = "Audiogram Demo";
titleColor.value = "#ffffff";
// Update View
setTimeout(drawCanvas, 100);
}
function clearAll() {
stopPlayback();
audioFile.value = null;
hasAudio.value = false;
audioBuffer = null;
bgImageSrc.value = null;
watermarkSrc.value = null;
titleText.value = '';
// Reset to defaults
waveColor.value = '#ffffff';
waveY.value = 80;
// Reset canvas
setTimeout(drawCanvas, 50);
}
function handleAudioUpload(event) {
const file = event.target.files[0];
if (!file) return;
// 100MB limit
if (file.size > 100 * 1024 * 1024) {
alert("音频文件过大 (超过 100MB)。请上传较小的文件。");
return;
}
audioFile.value = file;
initAudioCtx();
// Decode audio data
const reader = new FileReader();
reader.onload = function(e) {
audioCtx.decodeAudioData(e.target.result, function(buffer) {
audioBuffer = buffer;
duration.value = buffer.duration;
hasAudio.value = true;
drawCanvas(); // Redraw to clear previous state
});
};
reader.readAsArrayBuffer(file);
}
function handleImageUpload(event) {
const file = event.target.files[0];
if (!file) return;
// 10MB limit
if (file.size > 10 * 1024 * 1024) {
alert("图片文件过大 (超过 10MB)。");
return;
}
const reader = new FileReader();
reader.onload = function(e) {
bgImageSrc.value = e.target.result;
};
reader.readAsDataURL(file);
}
function handleWatermarkUpload(event) {
const file = event.target.files[0];
if (!file) return;
const reader = new FileReader();
reader.onload = function(e) {
watermarkSrc.value = e.target.result;
};
reader.readAsDataURL(file);
}
function applyPalette(color) {
waveColor.value = color;
}
function formatTime(seconds) {
const m = Math.floor(seconds / 60);
const s = Math.floor(seconds % 60);
return `${m}:${s.toString().padStart(2, '0')}`;
}
function togglePlay() {
if (isPlaying.value) {
stopPlayback();
} else {
startPlayback();
}
}
function startPlayback(isRecordingMode = false) {
if (!audioBuffer) return;
// Resume context if suspended
if (audioCtx.state === 'suspended') {
audioCtx.resume();
}
source = audioCtx.createBufferSource();
source.buffer = audioBuffer;
analyser = audioCtx.createAnalyser();
analyser.fftSize = 256;
// Connect graph
source.connect(analyser);
if (isRecordingMode) {
// Create destination for recording
dest = audioCtx.createMediaStreamDestination();
source.connect(dest);
// Also connect to speakers so user can hear (optional, maybe mute during record to avoid feedback?)
// Let's keep it audible
source.connect(audioCtx.destination);
} else {
source.connect(audioCtx.destination);
}
dataArray = new Uint8Array(analyser.frequencyBinCount);
source.start(0);
startTime = audioCtx.currentTime;
isPlaying.value = true;
source.onended = () => {
isPlaying.value = false;
if (isRecordingMode) {
stopRecording();
} else {
cancelAnimationFrame(animationId);
currentTime.value = 0;
drawCanvas(); // Reset view
}
};
animate();
}
function stopPlayback() {
if (source) {
source.stop();
source = null;
}
isPlaying.value = false;
cancelAnimationFrame(animationId);
}
function animate() {
if (!isPlaying.value) return;
animationId = requestAnimationFrame(animate);
// Update current time
currentTime.value = audioCtx.currentTime - startTime;
if (currentTime.value > duration.value) currentTime.value = duration.value;
// Get audio data
if (analyser) {
analyser.getByteFrequencyData(dataArray);
}
drawCanvas(dataArray);
}
function drawCanvas(audioData = null) {
const ctx = canvas.value.getContext('2d');
const w = canvas.value.width;
const h = canvas.value.height;
// 1. Background
ctx.fillStyle = '#111';
ctx.fillRect(0, 0, w, h);
if (bgImageObj.value) {
const img = bgImageObj.value;
// Scale image to cover
const scale = Math.max(w / img.width, h / img.height);
const x = (w / 2) - (img.width / 2) * scale;
const y = (h / 2) - (img.height / 2) * scale;
try {
ctx.drawImage(img, x, y, img.width * scale, img.height * scale);
} catch (e) {
// Ignore
}
} else if (!bgImageSrc.value) {
// Default Gradient (only if no src)
const grd = ctx.createLinearGradient(0, 0, w, h);
grd.addColorStop(0, '#4f46e5');
grd.addColorStop(1, '#9333ea');
ctx.fillStyle = grd;
ctx.fillRect(0, 0, w, h);
}
// 2. Waveform
if (audioData) {
drawWaveform(ctx, w, h, audioData);
} else {
// Draw static placeholder line
ctx.beginPath();
ctx.strokeStyle = waveColor.value;
ctx.globalAlpha = 0.5;
const y = h * (waveY.value / 100);
ctx.moveTo(0, y);
ctx.lineTo(w, y);
ctx.stroke();
ctx.globalAlpha = 1.0;
}
// 3. Title Text
if (titleText.value) {
ctx.fillStyle = titleColor.value;
ctx.font = 'bold ' + titleFontSize.value + 'px sans-serif';
ctx.textAlign = 'center';
ctx.shadowColor = 'rgba(0,0,0,0.5)';
ctx.shadowBlur = 10;
ctx.fillText(titleText.value, w / 2, 100);
ctx.shadowBlur = 0;
}
// 4. Watermark
if (watermarkObj.value) {
const logo = watermarkObj.value;
const logoSize = Math.min(w, h) * 0.15; // 15% of min dimension
const padding = 20;
let lx = padding;
let ly = padding;
if (watermarkPos.value.includes('right')) {
lx = w - logoSize - padding;
}
if (watermarkPos.value.includes('bottom')) {
ly = h - logoSize - padding;
}
try {
ctx.drawImage(logo, lx, ly, logoSize, logoSize);
} catch (e) {
// Ignore
}
}
}
function drawWaveform(ctx, w, h, data) {
const centerY = h * (waveY.value / 100);
ctx.fillStyle = waveColor.value;
ctx.strokeStyle = waveColor.value;
const bufferLength = data.length;
// We only use the lower half of frequencies usually
const usableLength = Math.floor(bufferLength * 0.7);
if (waveType.value === 'bars' || waveType.value === 'mirrored-bars') {
const barWidth = (w / usableLength) * barScale.value;
let x = 0;
for(let i = 0; i < usableLength; i++) {
const v = data[i] / 255.0;
const barHeight = v * h * 0.4; // Max height 40% of canvas
if (waveType.value === 'mirrored-bars') {
// Draw centered
ctx.fillRect(x, centerY - barHeight/2, barWidth, barHeight);
} else {
// Draw from bottom up (relative to centerY)
// Actually the original code was centered: ctx.fillRect(x, centerY - barHeight/2, barWidth, barHeight);
// Wait, the original code WAS centered. Let's check the logic.
// centerY - barHeight/2 means it starts above center and goes down. Yes, that's centered.
// Let's change 'bars' to be bottom-aligned to centerY (like a floor)
// and 'mirrored-bars' to be centered.
ctx.fillRect(x, centerY - barHeight, barWidth, barHeight);
}
x += barWidth + barGap.value;
}
} else if (waveType.value === 'line') {
ctx.lineWidth = 3;
ctx.beginPath();
const sliceWidth = w * 1.0 / usableLength;
let x = 0;
for(let i = 0; i < usableLength; i++) {
const v = data[i] / 128.0;
const y = v * (h/4) + centerY - (h/4); // Centered around centerY
if(i === 0) ctx.moveTo(x, y);
else ctx.lineTo(x, y);
x += sliceWidth;
}
ctx.stroke();
} else if (waveType.value === 'circle') {
// Circle visualization
const radius = Math.min(w, h) * 0.25;
const centerX = w / 2;
// For circle, we want to center it regardless of Y setting maybe?
// Or use Y setting as center Y? Let's use Y setting.
ctx.beginPath();
for(let i = 0; i < usableLength; i++) {
const v = data[i] / 255.0;
const barHeight = v * (radius * 0.8);
const angle = (i / usableLength) * Math.PI * 2;
const x1 = centerX + Math.cos(angle) * radius;
const y1 = centerY + Math.sin(angle) * radius;
const x2 = centerX + Math.cos(angle) * (radius + barHeight);
const y2 = centerY + Math.sin(angle) * (radius + barHeight);
ctx.moveTo(x1, y1);
ctx.lineTo(x2, y2);
}
ctx.stroke();
} else if (waveType.value === 'particles') {
const centerX = w / 2;
const centerY = h * (waveY.value / 100);
for(let i = 0; i < usableLength; i += 2) {
const v = data[i];
if (v < 10) continue; // Skip silence
const angle = (i / usableLength) * Math.PI * 2;
const radius = v * (h * 0.005);
const distance = 50 + (v * 0.5);
const x = centerX + Math.cos(angle) * distance;
const y = centerY + Math.sin(angle) * distance;
ctx.beginPath();
ctx.arc(x, y, radius, 0, Math.PI * 2);
ctx.fillStyle = waveColor.value;
ctx.fill();
}
}
}
async function startRecording() {
if (!audioBuffer) return;
if (isPlaying.value) stopPlayback();
isRecording.value = true;
recordedChunks = [];
// Setup MediaRecorder
const canvasStream = canvas.value.captureStream(30); // 30 FPS
// Start Playback (which creates 'dest' audio destination)
startPlayback(true);
// Combine streams
// dest is created in startPlayback
const combinedStream = new MediaStream([
...canvasStream.getVideoTracks(),
...dest.stream.getAudioTracks()
]);
try {
// Try vp9, fallback to vp8
const options = { mimeType: 'video/webm;codecs=vp9' };
if (!MediaRecorder.isTypeSupported(options.mimeType)) {
options.mimeType = 'video/webm;codecs=vp8';
}
if (!MediaRecorder.isTypeSupported(options.mimeType)) {
options.mimeType = 'video/webm';
}
mediaRecorder = new MediaRecorder(combinedStream, options);
mediaRecorder.ondataavailable = (event) => {
if (event.data.size > 0) {
recordedChunks.push(event.data);
}
};
mediaRecorder.onstop = async () => {
const blob = new Blob(recordedChunks, { type: 'video/webm' });
// Upload for conversion
isConverting.value = true;
const formData = new FormData();
formData.append('video', blob, 'recording.webm');
try {
const response = await fetch('/convert', {
method: 'POST',
body: formData
});
if (response.ok) {
const data = await response.json();
videoUrl.value = data.url;
videoType.value = 'mp4';
} else {
// Fallback logic
const errorData = await response.json().catch(() => ({}));
console.warn("Server returned error:", response.status, errorData);
videoUrl.value = URL.createObjectURL(blob);
videoType.value = 'webm';
if (response.status === 503 && errorData.fallback) {
// FFmpeg missing but handled gracefully
// No alert needed, just show result
} else {
// Real error
console.error("Conversion failed, using WebM fallback");
}
}
} catch (e) {
console.error("Network error, falling back to WebM", e);
videoUrl.value = URL.createObjectURL(blob);
videoType.value = 'webm';
} finally {
isConverting.value = false;
showResultModal.value = true;
isRecording.value = false;
}
};
mediaRecorder.start();
} catch (e) {
console.error("Recording error:", e);
alert("录制初始化失败,您的浏览器可能不支持 MediaRecorder。");
isRecording.value = false;
stopPlayback();
}
}
function stopRecording() {
if (mediaRecorder && mediaRecorder.state !== 'inactive') {
mediaRecorder.stop();
}
// Playback stops automatically in source.onended
}
return {
canvas,
audioFile,
bgImageSrc,
isPlaying,
isRecording,
aspectRatio,
waveType,
waveColor,
waveY,
canvasWidth,
canvasHeight,
currentTime,
duration,
showResultModal,
videoUrl,
isConverting,
videoType,
serverConfig,
titleText,
watermarkSrc,
watermarkPos,
handleAudioUpload,
handleImageUpload,
handleWatermarkUpload,
applyPalette,
setAspectRatio,
togglePlay,
startRecording,
formatTime,
loadDemo,
clearAll
};
}
}).mount('#app');
</script>
</body>
</html>