daw-audio-workstation / components /AudioProcessingPanel.tsx
OnyxMunk's picture
๐Ÿš€ Deploy AudioForge: AI-Powered Digital Audio Workstation
46c3324
"use client";
import { useState } from "react";
import { FileAudio, X, Music } from "lucide-react";
import { AudioUpload } from "./AudioUpload";
import { StemSeparationPanel } from "./StemSeparationPanel";
import { MIDIExtractionPanel } from "./MIDIExtractionPanel";
import { AgenticWorkflowPanel } from "./AgenticWorkflowPanel";
import { MusicGenerator } from "./MusicGenerator";
import { useAudioStore } from "@/store/audio-store";
export function AudioProcessingPanel() {
const loadGeneratedMIDI = useAudioStore((state) => state.loadGeneratedMIDI);
const [uploadedFile, setUploadedFile] = useState<File | null>(null);
const [activeTab, setActiveTab] = useState<"upload" | "workflow" | "stems" | "midi" | "music">(
"upload"
);
const handleFileUploaded = (file: File, audioBuffer: AudioBuffer) => {
setUploadedFile(file);
setActiveTab("workflow"); // Switch to workflow tab after upload
};
const tabs = [
{ id: "upload" as const, label: "Upload", icon: FileAudio },
{ id: "workflow" as const, label: "Workflow", icon: FileAudio },
{ id: "stems" as const, label: "Stems", icon: FileAudio },
{ id: "midi" as const, label: "MIDI", icon: FileAudio },
{ id: "music" as const, label: "Generate", icon: Music },
];
return (
<div className="w-full h-full flex flex-col bg-gray-950">
{/* Tab Navigation */}
<div className="flex border-b border-gray-800">
{tabs.map((tab) => {
const Icon = tab.icon;
return (
<button
key={tab.id}
onClick={() => setActiveTab(tab.id)}
className={`
flex items-center gap-2 px-4 py-3 text-sm font-medium transition-colors
${
activeTab === tab.id
? "text-blue-500 border-b-2 border-blue-500"
: "text-gray-400 hover:text-gray-300"
}
`}
>
<Icon className="w-4 h-4" />
{tab.label}
</button>
);
})}
</div>
{/* Content */}
<div className="flex-1 overflow-y-auto p-4">
{activeTab === "upload" && (
<div className="space-y-4">
<div className="flex items-center justify-between">
<h2 className="text-xl font-semibold text-white">Audio Upload</h2>
{uploadedFile && (
<button
onClick={() => setUploadedFile(null)}
className="text-gray-400 hover:text-white"
>
<X className="w-5 h-5" />
</button>
)}
</div>
<AudioUpload
onFileUploaded={handleFileUploaded}
maxSize={200}
/>
{uploadedFile && (
<div className="bg-gray-900 rounded-lg p-4">
<p className="text-sm text-gray-400">Uploaded:</p>
<p className="text-white font-medium">{uploadedFile.name}</p>
<p className="text-xs text-gray-500 mt-1">
{(uploadedFile.size / (1024 * 1024)).toFixed(2)} MB
</p>
</div>
)}
</div>
)}
{activeTab === "workflow" && (
<div className="space-y-4">
<h2 className="text-xl font-semibold text-white">Agentic Workflow</h2>
<AgenticWorkflowPanel
audioFile={uploadedFile}
onWorkflowComplete={(result) => {
console.log("Workflow complete:", result);
}}
/>
</div>
)}
{activeTab === "stems" && (
<div className="space-y-4">
<h2 className="text-xl font-semibold text-white">Stem Separation</h2>
<StemSeparationPanel
audioFile={uploadedFile}
onStemsSeparated={(result) => {
console.log("Stems separated:", result);
}}
/>
</div>
)}
{activeTab === "midi" && (
<div className="space-y-4">
<h2 className="text-xl font-semibold text-white">MIDI Extraction</h2>
<MIDIExtractionPanel
audioFile={uploadedFile}
onMIDIExtracted={(result) => {
console.log("MIDI extracted:", result);
}}
/>
</div>
)}
{activeTab === "music" && (
<div className="space-y-4">
<h2 className="text-xl font-semibold text-white">AI Music Generation</h2>
<MusicGenerator
onTrackGenerated={async (midiFile, audioFile) => {
console.log("Music generated:", { midiFile, audioFile });
try {
if (midiFile) {
const trackId = await loadGeneratedMIDI(midiFile, "AI Generated Music");
console.log("Added generated MIDI to track:", trackId);
}
} catch (error) {
console.error("Failed to load generated MIDI:", error);
}
}}
/>
</div>
)}
</div>
</div>
);
}