import React, { useState, useEffect, useRef, useCallback, Suspense, Component } from 'react';
import {
Container,
Box,
Tabs,
Tab,
Typography,
Paper,
Button,
IconButton,
TextField,
Alert,
CircularProgress,
Grid,
Card,
CardContent,
Chip,
Divider,
Dialog,
DialogTitle,
DialogContent,
DialogActions,
LinearProgress,
Slider,
FormControl,
InputLabel,
Select,
MenuItem,
Accordion,
AccordionSummary,
AccordionDetails,
List,
ListItem,
ListItemText,
ThemeProvider,
createTheme,
Backdrop,
Fade,
Checkbox,
FormControlLabel,
Snackbar
} from '@mui/material';
import {
Add as AddIcon,
Delete as DeleteIcon,
Upload as UploadIcon,
PlayArrow as PlayIcon,
Stop as StopIcon,
Download as DownloadIcon,
Refresh as RefreshIcon,
ExpandMore as ExpandMoreIcon,
CloudDownload as CloudDownloadIcon,
Close as CloseIcon,
Info as InfoIcon,
MenuBook as MenuBookIcon,
Settings as SettingsIcon,
FolderOpen as FolderOpenIcon
} from '@mui/icons-material';
import { useDropzone } from 'react-dropzone';
import ReactPlayer from './react-player-config';
import axios from 'axios';
import { LineChart, Line, XAxis, YAxis, CartesianGrid, Tooltip, ResponsiveContainer } from 'recharts';
class AudioPlayerErrorBoundary extends Component {
constructor(props) {
super(props);
this.state = { hasError: false };
}
static getDerivedStateFromError() {
return { hasError: true };
}
componentDidCatch(error, info) {
console.warn('ReactPlayer failed to load, using native audio:', error);
}
render() {
if (this.state.hasError) {
return this.props.audioUrl
?
: null;
}
return this.props.children;
}
}
const theme = createTheme({
palette: {
mode: 'dark',
primary: {
main: '#3a6fec',
light: '#3a6fec',
dark: '#3a6fec',
contrastText: '#ffffff',
},
secondary: {
main: '#9198A1',
light: '#C9D1D9',
dark: '#6E7681',
contrastText: '#ffffff',
},
background: {
default: '#0D1117',
paper: '#161B22',
},
text: {
primary: '#E6EDF3',
secondary: '#9198A1',
},
divider: '#30363D',
error: {
main: '#DC5145',
},
warning: {
main: '#EB8B3A',
},
success: {
main: '#3A6FEC',
},
},
typography: {
fontFamily: [
'Helvetica Neue',
'Helvetica',
'Arial',
'sans-serif'
].join(','),
h1: {
fontFamily: 'Helvetica Neue, Helvetica, Arial, sans-serif',
fontWeight: 300,
},
h2: {
fontFamily: 'Helvetica Neue, Helvetica, Arial, sans-serif',
fontWeight: 300,
},
h3: {
fontFamily: 'Helvetica Neue, Helvetica, Arial, sans-serif',
fontWeight: 400,
},
h4: {
fontFamily: 'Helvetica Neue, Helvetica, Arial, sans-serif',
fontWeight: 400,
},
h5: {
fontFamily: 'Helvetica Neue, Helvetica, Arial, sans-serif',
fontWeight: 500,
},
h6: {
fontFamily: 'Helvetica Neue, Helvetica, Arial, sans-serif',
fontWeight: 500,
},
body1: {
fontFamily: 'Helvetica Neue, Helvetica, Arial, sans-serif',
fontWeight: 400,
},
body2: {
fontFamily: 'Helvetica Neue, Helvetica, Arial, sans-serif',
fontWeight: 400,
},
button: {
fontFamily: 'Helvetica Neue, Helvetica, Arial, sans-serif',
fontWeight: 500,
},
},
components: {
MuiCssBaseline: {
styleOverrides: {
body: {
background: 'radial-gradient(ellipse at center, #0D1117 0%, #1C2128 50%, #0A0D10 100%)',
minHeight: '100vh',
},
'*::-webkit-scrollbar': {
width: '8px',
height: '8px',
},
'*::-webkit-scrollbar-track': {
background: '#30363D',
borderRadius: '4px',
},
'*::-webkit-scrollbar-thumb': {
background: '#484F58',
borderRadius: '4px',
'&:hover': {
background: '#6E7681',
},
},
'*::-webkit-scrollbar-corner': {
background: '#30363D',
},
'*': {
scrollbarWidth: 'thin',
scrollbarColor: '#484F58 #30363D',
},
},
},
MuiPaper: {
styleOverrides: {
root: {
backgroundColor: '#161B22',
backgroundImage: 'none',
border: '1px solid #30363D',
},
},
},
MuiCard: {
styleOverrides: {
root: {
backgroundColor: '#161B22',
backgroundImage: 'none',
border: '1px solid #30363D',
'&:hover': {
borderColor: '#484F58',
boxShadow: '0 8px 24px rgba(0, 0, 0, 0.4), 0 4px 12px rgba(0, 0, 0, 0.2)',
},
},
},
},
MuiButton: {
styleOverrides: {
root: {
textTransform: 'none',
borderRadius: '8px',
fontWeight: 500,
},
contained: {
boxShadow: '0 1px 3px rgba(0, 0, 0, 0.3)',
'&:hover': {
boxShadow: '0 2px 6px rgba(0, 0, 0, 0.4)',
},
},
outlined: {
borderColor: '#30363D',
'&:hover': {
borderColor: '#3a6fec',
backgroundColor: 'rgba(255, 107, 53, 0.08)',
},
},
},
},
MuiTextField: {
styleOverrides: {
root: {
'& .MuiOutlinedInput-root': {
backgroundColor: '#0D1117',
'& fieldset': {
borderColor: '#30363D',
},
'&:hover fieldset': {
borderColor: '#6E7681',
},
'&.Mui-focused fieldset': {
borderColor: '#3a6fec',
},
},
},
},
},
MuiSelect: {
styleOverrides: {
root: {
backgroundColor: '#0D1117',
'& .MuiOutlinedInput-notchedOutline': {
borderColor: '#30363D',
},
'&:hover .MuiOutlinedInput-notchedOutline': {
borderColor: '#6E7681',
},
'&.Mui-focused .MuiOutlinedInput-notchedOutline': {
borderColor: '#3a6fec',
},
},
},
},
MuiMenuItem: {
styleOverrides: {
root: {
backgroundColor: '#161B22',
'&:hover': {
backgroundColor: '#21262D',
},
'&.Mui-selected': {
backgroundColor: 'rgba(53, 100, 255, 0.12)',
'&:hover': {
backgroundColor: 'rgba(53, 124, 255, 0.2)',
},
},
},
},
},
MuiChip: {
styleOverrides: {
root: {
backgroundColor: '#21262D',
color: '#E6EDF3',
'&.MuiChip-colorPrimary': {
backgroundColor: 'rgba(53, 134, 255, 0.2)',
color: '#3a6fec',
},
},
outlined: {
borderColor: '#30363D',
'&.MuiChip-colorPrimary': {
borderColor: '#3a6fec',
color: '#3a6fec'
},
},
},
},
MuiAccordion: {
styleOverrides: {
root: {
backgroundColor: '#161B22',
border: '1px solid #30363D',
'&:before': {
display: 'none',
},
'&.Mui-expanded': {
margin: 0,
},
},
},
},
MuiAccordionSummary: {
styleOverrides: {
root: {
backgroundColor: '#21262D',
'&:hover': {
backgroundColor: '#262C36',
},
},
},
},
MuiDialog: {
styleOverrides: {
paper: {
backgroundColor: '#161B22',
border: '1px solid #30363D',
borderRadius: 12,
boxShadow: '0 25px 50px -12px rgba(0, 0, 0, 0.5)',
},
},
},
MuiDialogTitle: {
styleOverrides: {
root: {
backgroundColor: '#21262D',
borderBottom: '1px solid #30363D',
color: '#F0F6FC',
fontWeight: 600,
fontSize: '1.25rem',
},
},
},
MuiDialogContent: {
styleOverrides: {
root: {
backgroundColor: '#161B22',
color: '#C9D1D9',
},
},
},
MuiDialogActions: {
styleOverrides: {
root: {
backgroundColor: '#161B22',
borderTop: '1px solid #30363D',
padding: '16px 24px',
},
},
},
MuiListItem: {
styleOverrides: {
root: {
'&:hover': {
backgroundColor: '#21262D',
},
'&.Mui-selected': {
backgroundColor: 'rgba(53, 147, 255, 0.12)',
'&:hover': {
backgroundColor: 'rgba(53, 124, 255, 0.2)',
},
},
},
},
},
MuiCheckbox: {
styleOverrides: {
root: {
color: '#6E7681',
'&.Mui-checked': {
color: '#3a6fec',
},
'&:hover': {
backgroundColor: 'rgba(58, 111, 236, 0.08)',
},
},
},
},
MuiFormControlLabel: {
styleOverrides: {
label: {
color: '#C9D1D9',
fontSize: '0.875rem',
},
},
},
MuiSlider: {
styleOverrides: {
root: {
color: '#3a6fec',
},
rail: {
backgroundColor: '#30363D',
},
track: {
backgroundColor: '#3a6fec',
},
thumb: {
backgroundColor: '#3a6fec',
'&:hover': {
boxShadow: '0 0 0 8px rgba(53, 134, 255, 0.16)',
},
},
},
},
MuiLinearProgress: {
styleOverrides: {
root: {
backgroundColor: '#30363D',
},
bar: {
backgroundColor: '#3a6fec',
},
},
},
MuiCircularProgress: {
styleOverrides: {
root: {
color: '#3a6fec',
},
},
},
MuiTabs: {
styleOverrides: {
root: {
'& .MuiTabs-indicator': {
backgroundColor: '#3a6fec',
},
},
},
},
MuiTab: {
styleOverrides: {
root: {
color: '#9198A1',
'&.Mui-selected': {
color: '#3a6fec',
},
'&:hover': {
color: '#E6EDF3',
},
},
},
},
MuiBackdrop: {
styleOverrides: {
root: {
backgroundColor: 'rgba(0, 0, 0, 0.8)',
},
},
},
MuiDivider: {
styleOverrides: {
root: {
borderColor: '#30363D',
},
},
},
MuiIconButton: {
styleOverrides: {
root: {
color: '#9198A1',
'&:hover': {
backgroundColor: 'rgba(255, 107, 53, 0.08)',
color: '#3a6fec',
},
},
},
},
MuiContainer: {
styleOverrides: {
root: {
backgroundColor: 'transparent',
background: 'transparent',
},
},
},
},
});
function TabPanel({ children, value, index, ...other }) {
return (
{value === index && (
{children}
)}
);
}
function AudioUploadRow({ index, data, onChange, onRemove }) {
const [audioFile, setAudioFile] = useState(null);
const [audioUrl, setAudioUrl] = useState('');
useEffect(() => {
if (!data.file && !data.audioUrl) {
if (audioUrl) {
URL.revokeObjectURL(audioUrl);
}
setAudioFile(null);
setAudioUrl('');
}
}, [data.file, data.audioUrl, audioUrl]);
const { getRootProps, getInputProps, isDragActive } = useDropzone({
accept: {
'audio/*': ['.mp3', '.wav', '.flac', '.m4a', '.aac']
},
multiple: false,
onDrop: (acceptedFiles) => {
const file = acceptedFiles[0];
setAudioFile(file);
setAudioUrl(URL.createObjectURL(file));
onChange(index, { ...data, file, audioUrl: URL.createObjectURL(file) });
}
});
return (
{audioFile ? (
{audioFile.name}
{audioUrl && (
}>
)}
) : (
{isDragActive ? 'Drop audio here' : 'Click or drag audio file'}
)}
onChange(index, { ...data, prompt: e.target.value })}
variant="outlined"
/>
onRemove(index)}
sx={{ alignSelf: 'flex-start' }}
>
);
}
function formatDuration(seconds) {
const sec = Math.floor(seconds % 60);
const min = Math.floor((seconds / 60) % 60);
const hr = Math.floor(seconds / 3600);
return [hr, min, sec]
.map((v, i) => (i === 0 ? v : v.toString().padStart(2, '0')))
.join(':');
}
function TrainingMonitor({
isTraining,
trainingProgress,
trainingStatus,
trainingHistory,
trainingStartTime,
trainingError,
trainingConfig,
systemStatus
}) {
const getElapsedTime = () => {
if (!trainingStartTime) return 0;
return Math.floor((Date.now() - trainingStartTime) / 1000);
};
const getEstimatedTimeRemaining = () => {
if (!trainingStartTime || trainingProgress === 0) return null;
const elapsed = getElapsedTime();
const estimatedTotal = (elapsed / trainingProgress) * 100;
return Math.max(0, estimatedTotal - elapsed);
};
const getProgressColor = () => {
if (trainingError) return 'error';
if (trainingProgress === 100) return 'success';
return 'primary';
};
return (
Training Monitor
{isTraining && (
)}
Progress
{trainingProgress}%
{trainingStatus?.device_info && (
Device Used for Training
Device: {trainingStatus.device_info.device} ({trainingStatus.device_info.memory_gb?.toFixed(2)}GB VRAM)
Info: {trainingStatus.device_info.type === 'cuda' ? 'CUDA GPU available and selected for training' :
trainingStatus.device_info.type === 'cpu' ? 'Using CPU (no CUDA GPU available or compatible)' :
'Using MPS (Apple Silicon GPU)'}
)}
Current Epoch
{trainingStatus?.current_epoch !== undefined ?
`${trainingStatus.current_epoch + 1} / ${trainingConfig.epochs}` :
'0 / ' + trainingConfig.epochs}
Global Step / Total Steps
{trainingStatus?.global_step !== undefined && trainingStatus?.total_steps !== undefined ?
`${trainingStatus.global_step} / ${trainingStatus.total_steps}` :
'N/A'}
Checkpoints Saved
{trainingStatus?.checkpoints_saved || 0}
Current Loss
{trainingStatus?.loss ? parseFloat(trainingStatus.loss).toFixed(4) : 'N/A'}
{trainingStatus?.loss_history && trainingStatus.loss_history.length > 0 && (
Loss History
`${Math.floor(value / 60)}:${(value % 60).toString().padStart(2, '0')}`}
label={{ value: 'Time (min:sec)', position: 'insideBottom', offset: -5 }}
/>
`Time: ${Math.floor(value / 60)}:${(value % 60).toString().padStart(2, '0')}`}
formatter={(value, name) => [value.toFixed(4), 'Loss']}
/>
)}
{trainingError && (
Training Error: {trainingError}
)}
);
}
function ModelUnwrapButton({ model, onUnwrap, onRefresh }) {
const [loading, setLoading] = useState(false);
const [result, setResult] = useState(null);
const [error, setError] = useState(null);
const handleUnwrap = async () => {
setLoading(true);
setResult(null);
setError(null);
try {
const response = await axios.post('/api/unwrap-model', {
model_config: model.configPath,
ckpt_path: model.ckptPath,
name: model.name + '_unwrapped'
});
setResult(response.data);
if (onUnwrap) onUnwrap(response.data);
if (onRefresh) onRefresh(); // Refresh model list after unwrapping
} catch (err) {
console.error('Unwrap error:', err);
setError(err.response?.data?.error || err.message);
} finally {
setLoading(false);
}
};
return (
}
onClick={handleUnwrap}
disabled={loading}
>
{loading ? 'Unwrapping...' : 'Unwrap for Inference'}
{result && result.unwrapped_path && (
)}
{error && (
{error}
)}
);
}
function CheckpointManager({ model, onRefresh }) {
const [loadingStates, setLoadingStates] = useState({});
const [error, setError] = useState(null);
const [expandedCheckpoint, setExpandedCheckpoint] = useState(null);
const handleUnwrapCheckpoint = async (checkpoint) => {
const checkpointId = checkpoint.path;
setLoadingStates(prev => ({ ...prev, [checkpointId]: { unwrapping: true } }));
setError(null);
try {
const response = await axios.post('/api/unwrap-model', {
model_config: model.config_path,
ckpt_path: checkpoint.path,
name: `${checkpoint.name}_unwrapped`
});
setError(null);
alert(`Checkpoint "${checkpoint.name}" unwrapped successfully!`);
onRefresh();
} catch (err) {
setError(`Failed to unwrap ${checkpoint.name}: ${err.response?.data?.error || err.message}`);
} finally {
setLoadingStates(prev => ({ ...prev, [checkpointId]: { unwrapping: false } }));
}
};
const handleDeleteCheckpoint = async (checkpoint) => {
if (!confirm(`Are you sure you want to delete the wrapped checkpoint "${checkpoint.name}"? This action cannot be undone.`)) {
return;
}
const checkpointId = checkpoint.path;
setLoadingStates(prev => ({ ...prev, [checkpointId]: { deleting: true } }));
setError(null);
try {
await axios.post('/api/delete-checkpoint', {
checkpoint_path: checkpoint.path
});
alert(`Checkpoint "${checkpoint.name}" deleted successfully.`);
onRefresh();
} catch (err) {
setError(`Failed to delete ${checkpoint.name}: ${err.response?.data?.error || err.message}`);
} finally {
setLoadingStates(prev => ({ ...prev, [checkpointId]: { deleting: false } }));
}
};
const checkpoints = model.checkpoints || [];
return (
Checkpoint Management for {model.name}
{checkpoints.length === 0 ? (
No checkpoints found for this model.
) : (
<>
Available Checkpoints: {checkpoints.length}
Unwrapped Models: {model.unwrapped_models?.length || 0}
{/* Individual Checkpoint Cards */}
{checkpoints.map((checkpoint, index) => {
const checkpointId = checkpoint.path;
const isUnwrapping = loadingStates[checkpointId]?.unwrapping;
const isDeleting = loadingStates[checkpointId]?.deleting;
const hasUnwrappedVersion = model.unwrapped_models?.some(unwrapped =>
unwrapped.name.includes(checkpoint.name) ||
checkpoint.name.includes(unwrapped.name.replace('_unwrapped', ''))
);
return (
{checkpoint.name}
{hasUnwrappedVersion && (
)}
Size: {checkpoint.size_mb} MB
{checkpoint.epoch !== undefined && (
| Epoch: {checkpoint.epoch}
)}
{checkpoint.step !== undefined && (
| Step: {checkpoint.step}
)}
{!hasUnwrappedVersion && (
}
onClick={() => handleUnwrapCheckpoint(checkpoint)}
disabled={isUnwrapping || isDeleting}
>
{isUnwrapping ? 'Unwrapping...' : 'Unwrap'}
)}
{hasUnwrappedVersion && (
}
onClick={() => handleDeleteCheckpoint(checkpoint)}
disabled={isDeleting}
>
{isDeleting ? 'Deleting Wrapped...' : 'Delete Wrapped Checkpoint'}
)}
);
})}
>
)}
{error && (
{error}
)}
);
}
function GeneratedFragmentsWindow({ fragments, onDownload, isDocker, onDockerMessage }) {
const [playingFragment, setPlayingFragment] = useState(null);
const audioRefs = useRef({});
const handlePlayPause = (fragment) => {
const audio = audioRefs.current[fragment.id];
if (!audio) return;
if (playingFragment === fragment.id) {
audio.pause();
setPlayingFragment(null);
} else {
if (playingFragment && audioRefs.current[playingFragment]) {
audioRefs.current[playingFragment].pause();
}
audio.play();
setPlayingFragment(fragment.id);
}
};
const setAudioRef = useCallback((fragmentId, audioElement) => {
if (audioElement) {
audioRefs.current[fragmentId] = audioElement;
}
}, []);
return (
Generated Fragments ({fragments.length})
{fragments.length === 0 ? (
No fragments generated yet
) : (
{fragments.slice().reverse().map((fragment, index) => (
{fragment.prompt}
{fragment.duration}s • {fragment.timestamp}
handlePlayPause(fragment)}
color={playingFragment === fragment.id ? "primary" : "default"}
sx={{
border: '1px solid',
borderColor: playingFragment === fragment.id ? 'primary.main' : 'divider'
}}
>
{playingFragment === fragment.id ? : }
}
onClick={() => onDownload(fragment)}
>
Download
))}
)}
);
}
function WelcomePage({ open, onClose }) {
const [titleVisible, setTitleVisible] = useState(false);
const [textVisible, setTextVisible] = useState(false);
useEffect(() => {
if (open) {
const titleTimer = setTimeout(() => {
setTitleVisible(true);
}, 500);
const textTimer = setTimeout(() => {
setTextVisible(true);
}, 1500);
return () => {
clearTimeout(titleTimer);
clearTimeout(textTimer);
};
} else {
setTitleVisible(false);
setTextVisible(false);
}
}, [open]);
if (!open) return null;
return (
e.stopPropagation()}
>
Welcome to Fragmenta!
An End-to-End Pipeline to Fine-Tune and Use Text-to-Audio Models.
Made for composers and audio creators.
@2025 Misagh Azimi
Version 0.0.1
or click anywhere to continue
);
}
function App() {
const [tabValue, setTabValue] = useState(0);
const [uploadRows, setUploadRows] = useState([
{ file: null, prompt: '', audioUrl: '' }
]);
const [processingStatus, setProcessingStatus] = useState('');
const [isProcessing, setIsProcessing] = useState(false);
const [processedCount, setProcessedCount] = useState(0);
const [chunksPreview, setChunksPreview] = useState([]);
const [showWelcomePage, setShowWelcomePage] = useState(true);
// Docker mode state
const [isDocker, setIsDocker] = useState(false);
const [showDownloadDialog, setShowDownloadDialog] = useState(false);
const [showAboutDialog, setShowAboutDialog] = useState(false);
const [showOutputFilesDialog, setShowOutputFilesDialog] = useState(false);
const [outputFiles, setOutputFiles] = useState([]);
const [downloadingModels, setDownloadingModels] = useState({});
const [downloadProgress, setDownloadProgress] = useState({});
const [dockerSnackbar, setDockerSnackbar] = useState({ open: false, message: '' });
const [hfToken, setHfToken] = useState('');
const [hfAuthStatus, setHfAuthStatus] = useState({ authenticated: false, username: null });
const [hfTokenSaving, setHfTokenSaving] = useState(false);
const [hfTokenError, setHfTokenError] = useState('');
const [trainingConfig, setTrainingConfig] = useState({
epochs: 50,
checkpointSteps: 100,
batchSize: 4,
learningRate: 1e-4,
modelName: 'my_fine_tuned_model',
baseModel: 'stable-audio-open-small',
saveWrappedCheckpoint: false
});
const [isTraining, setIsTraining] = useState(false);
const [trainingProgress, setTrainingProgress] = useState(0);
const [trainingStatus, setTrainingStatus] = useState(null);
const [trainingHistory, setTrainingHistory] = useState([]);
const [trainingStartTime, setTrainingStartTime] = useState(null);
const [trainingError, setTrainingError] = useState(null);
const [generationPrompt, setGenerationPrompt] = useState('');
const [generationDuration, setGenerationDuration] = useState(10);
const [generatedAudio, setGeneratedAudio] = useState(null);
const [generatedAudioBlob, setGeneratedAudioBlob] = useState(null);
const [isGenerating, setIsGenerating] = useState(false);
const [generationProgress, setGenerationProgress] = useState(0);
const [selectedModel, setSelectedModel] = useState('');
const [selectedUnwrappedModel, setSelectedUnwrappedModel] = useState('');
const [outputCounter, setOutputCounter] = useState(0);
const [generatedFragments, setGeneratedFragments] = useState([]);
const generateFileName = () => {
return `fragmenta_output${outputCounter.toString().padStart(3, '0')}.wav`;
};
const downloadAudio = () => {
if (generatedAudioBlob) {
const url = URL.createObjectURL(generatedAudioBlob);
const link = document.createElement('a');
link.href = url;
link.download = generateFileName();
document.body.appendChild(link);
link.click();
document.body.removeChild(link);
URL.revokeObjectURL(url);
}
};
const downloadFragment = (fragment) => {
const link = document.createElement('a');
link.href = fragment.audioUrl;
link.download = fragment.filename;
document.body.appendChild(link);
link.click();
document.body.removeChild(link);
};
const [systemStatus, setSystemStatus] = useState(null);
const [availableModels, setAvailableModels] = useState([]);
const [gpuMemoryStatus, setGpuMemoryStatus] = useState(null);
const [isUpdatingGpuMemory, setIsUpdatingGpuMemory] = useState(false);
const [baseModels, setBaseModels] = useState([
{
name: 'stable-audio-open-small',
displayName: 'Stable Audio Open Small (Recommended)',
description: 'Faster - Lower memory usage',
type: 'base',
path: '/models/pretrained/stable-audio-open-small-model.safetensors',
configPath: '/models/config/model_config_small.json',
downloaded: false
},
{
name: 'stable-audio-open-1.0',
displayName: 'Stable Audio Open 1.0',
description: 'Higher quality - Requires more memory',
type: 'base',
path: '/models/pretrained/stable-audio-open-model.safetensors',
configPath: '/models/config/model_config.json',
downloaded: false
}
]);
const [showStartFreshDialog, setShowStartFreshDialog] = useState(false);
const [isStartingFresh, setIsStartingFresh] = useState(false);
const [uploadKey, setUploadKey] = useState(0);
const [isFreeingGPU, setIsFreeingGPU] = useState(false);
const [showFreeGPUDialog, setShowFreeGPUDialog] = useState(false);
useEffect(() => {
setSelectedUnwrappedModel('');
}, [selectedModel]);
useEffect(() => {
console.log('Model changed:', selectedModel);
}, [selectedModel]);
const getMaxDuration = () => {
if (!selectedModel) return 10;
const baseModel = baseModels.find(m => m.name === selectedModel);
if (baseModel) {
if (baseModel.name === 'stable-audio-open-small') {
return 11;
} else if (baseModel.name === 'stable-audio-open-1.0') {
return 47;
}
}
const model = availableModels.find(m => m.name === selectedModel);
if (model && selectedUnwrappedModel) {
const selectedUnwrapped = model.unwrapped_models?.find(u => u.path === selectedUnwrappedModel);
if (selectedUnwrapped) {
const sizeMB = selectedUnwrapped.size_mb || 0;
return sizeMB < 2000 ? 11 : 47;
}
}
return 10;
};
useEffect(() => {
const maxDuration = getMaxDuration();
if (generationDuration > maxDuration) {
setGenerationDuration(maxDuration);
}
}, [selectedModel, selectedUnwrappedModel]);
const handleTabChange = (event, newValue) => {
setTabValue(newValue);
};
const addUploadRow = () => {
setUploadRows([...uploadRows, { file: null, prompt: '', audioUrl: '' }]);
};
const removeUploadRow = (index) => {
const newRows = uploadRows.filter((_, i) => i !== index);
setUploadRows(newRows);
};
const updateUploadRow = (index, data) => {
const newRows = [...uploadRows];
newRows[index] = data;
setUploadRows(newRows);
};
const fetchSystemStatus = async () => {
try {
const response = await axios.get('/api/status');
setSystemStatus(response.data);
} catch (error) {
console.error('Error fetching system status:', error);
}
};
const fetchAvailableModels = async () => {
try {
const response = await axios.get('/api/models');
console.log('Fetched models:', response.data.models);
setAvailableModels(response.data.models || []);
} catch (error) {
console.error('Error fetching available models:', error);
}
};
const fetchBaseModelsStatus = async () => {
try {
const response = await axios.get('/api/base-models/status');
const baseModelsStatus = response.data.base_models;
setBaseModels(prevModels =>
prevModels.map(model => ({
...model,
downloaded: baseModelsStatus[model.name]?.downloaded || false
}))
);
} catch (error) {
console.error('Error fetching base models status:', error);
}
};
const refreshAllModels = async () => {
await Promise.all([
fetchAvailableModels(),
fetchBaseModelsStatus()
]);
};
const fetchGpuMemoryStatus = async () => {
try {
setIsUpdatingGpuMemory(true);
const response = await axios.get('/api/gpu-memory-status');
console.log('GPU Memory Response:', response.data);
setGpuMemoryStatus(response.data.memory_info);
} catch (error) {
console.error('Error fetching GPU memory status:', error.response?.data?.error || error.message || error);
setGpuMemoryStatus(null);
} finally {
setIsUpdatingGpuMemory(false);
}
};
useEffect(() => {
fetchSystemStatus();
fetchAvailableModels();
fetchBaseModelsStatus();
fetchGpuMemoryStatus();
}, []);
// Detect Docker mode on mount
useEffect(() => {
const fetchEnvironment = async () => {
try {
const response = await axios.get('/api/environment');
const docker = response.data.docker === true;
setIsDocker(docker);
// If Docker, also check HF auth status
if (docker) {
try {
const authResp = await axios.get('/api/hf-token/status');
setHfAuthStatus(authResp.data);
} catch (e) {
console.error('Error checking HF auth status:', e);
}
}
} catch (error) {
console.error('Error fetching environment:', error);
}
};
fetchEnvironment();
}, []);
// Check HF auth status when download dialog opens
const checkHfAuthStatus = async () => {
try {
const response = await axios.get('/api/hf-token/status');
setHfAuthStatus(response.data);
return response.data.authenticated;
} catch (e) {
return false;
}
};
// Save HF token
const handleSaveHfToken = async () => {
if (!hfToken.trim()) return;
setHfTokenSaving(true);
setHfTokenError('');
try {
const response = await axios.post('/api/hf-token', { token: hfToken.trim() });
if (response.data.success) {
setHfAuthStatus({ authenticated: true, username: response.data.username });
setHfToken('');
setDockerSnackbar({ open: true, message: `Authenticated as ${response.data.username}` });
}
} catch (error) {
const errMsg = error.response?.data?.error || 'Failed to set token';
setHfTokenError(errMsg);
} finally {
setHfTokenSaving(false);
}
};
// Show download dialog in Docker mode when welcome page dismissed and no models downloaded
useEffect(() => {
if (!showWelcomePage && isDocker) {
// Fresh API check to avoid race with stale initial state
const checkModels = async () => {
try {
const response = await axios.get('/api/base-models/status');
const statuses = response.data.base_models;
// Update baseModels with fresh data
setBaseModels(prevModels =>
prevModels.map(model => ({
...model,
downloaded: statuses[model.name]?.downloaded || false
}))
);
const allNotDownloaded = Object.values(statuses).every(m => !m.downloaded);
if (allNotDownloaded) {
setShowDownloadDialog(true);
}
} catch (error) {
// API error — fall back to current state
const allNotDownloaded = baseModels.every(m => !m.downloaded);
if (allNotDownloaded) {
setShowDownloadDialog(true);
}
}
};
checkModels();
}
}, [showWelcomePage, isDocker]); // eslint-disable-line
// HF model page URLs for terms acceptance
const modelPageUrls = {
'stable-audio-open-small': 'https://huggingface.co/stabilityai/stable-audio-open-small',
'stable-audio-open-1.0': 'https://huggingface.co/stabilityai/stable-audio-open-1.0',
};
// Helper: accept terms then download a model (async with progress polling)
const handleDockerModelDownload = async (modelId) => {
setDownloadingModels(prev => ({ ...prev, [modelId]: 'downloading' }));
setDownloadProgress(prev => ({ ...prev, [modelId]: { percent: 0, message: 'Starting...' } }));
try {
// Accept terms first
await axios.post(`/api/models/${modelId}/accept-terms`);
// Kick off download (backend runs it in background thread)
const response = await axios.post(`/api/models/${modelId}/download`);
if (!response.data.success) {
setDownloadingModels(prev => ({ ...prev, [modelId]: 'error' }));
return;
}
// Poll for progress
const pollInterval = setInterval(async () => {
try {
const prog = await axios.get(`/api/models/${modelId}/download/progress`);
const { percent, message, status } = prog.data;
setDownloadProgress(prev => ({ ...prev, [modelId]: { percent, message } }));
if (status === 'done') {
clearInterval(pollInterval);
setDownloadingModels(prev => ({ ...prev, [modelId]: 'done' }));
await fetchBaseModelsStatus();
} else if (status === 'error') {
clearInterval(pollInterval);
setDownloadingModels(prev => ({ ...prev, [modelId]: 'error' }));
setDownloadProgress(prev => ({ ...prev, [modelId]: { percent: 0, message } }));
}
} catch (err) {
console.error('Error polling progress:', err);
}
}, 1500);
} catch (error) {
console.error(`Error downloading model ${modelId}:`, error);
setDownloadingModels(prev => ({ ...prev, [modelId]: 'error' }));
}
};
// Helper: fetch output files list (Docker)
const fetchOutputFiles = async () => {
try {
const response = await axios.get('/api/output-files');
setOutputFiles(response.data.files || []);
} catch (error) {
console.error('Error fetching output files:', error);
}
};
useEffect(() => {
const interval = setInterval(() => {
fetchGpuMemoryStatus();
}, isTraining ? 2000 : 10000);
return () => clearInterval(interval);
}, [isTraining]);
useEffect(() => {
let statusInterval;
if (isTraining) {
statusInterval = setInterval(async () => {
try {
const statusResponse = await axios.get('/api/training-status');
const currentStatus = statusResponse.data;
setTrainingStatus(currentStatus);
if (currentStatus.progress !== undefined) {
setTrainingProgress(prevProgress => {
if (currentStatus.progress >= prevProgress && (prevProgress > 0 || currentStatus.progress > 0)) {
return currentStatus.progress;
}
return prevProgress;
});
}
setTrainingHistory(prev => {
const newEntry = {
timestamp: Date.now(),
progress: currentStatus.progress || 0,
current_epoch: currentStatus.current_epoch || 0,
current_step: currentStatus.current_step || 0,
loss: currentStatus.loss,
checkpoints_saved: currentStatus.checkpoints_saved || 0,
is_training: currentStatus.is_training,
message: currentStatus.error ||
(currentStatus.progress > 0 ? `Progress: ${currentStatus.progress}%` : 'Starting...')
};
const lastEntry = prev[prev.length - 1];
if (!lastEntry ||
lastEntry.progress !== newEntry.progress ||
lastEntry.current_epoch !== newEntry.current_epoch ||
lastEntry.current_step !== newEntry.current_step ||
lastEntry.loss !== newEntry.loss ||
lastEntry.checkpoints_saved !== newEntry.checkpoints_saved ||
lastEntry.message !== newEntry.message) {
return [...prev, newEntry];
}
return prev;
});
if (currentStatus.is_training) {
setTrainingProgress(currentStatus.progress || 0);
} else {
setIsTraining(false);
if (currentStatus.error) {
setTrainingError(currentStatus.error);
setProcessingStatus(`Training failed: ${currentStatus.error}`);
} else {
setProcessingStatus('Training completed successfully!');
setTrainingProgress(100);
}
setTimeout(() => {
fetchSystemStatus();
fetchAvailableModels();
}, 0);
}
} catch (statusError) {
console.error('Error fetching training status:', statusError);
setTrainingError('Failed to fetch training status');
}
}, 2000);
}
return () => {
if (statusInterval) {
clearInterval(statusInterval);
}
};
}, [isTraining]);
const processFiles = async () => {
setIsProcessing(true);
setProcessingStatus('Processing files...');
try {
const formData = new FormData();
uploadRows.forEach((row, index) => {
if (row.file && row.prompt) {
formData.append(`file_${index}`, row.file);
formData.append(`prompt_${index}`, row.prompt);
}
});
const response = await axios.post('/api/process-files', formData, {
headers: { 'Content-Type': 'multipart/form-data' }
});
setProcessingStatus(response.data.message);
setProcessedCount(response.data.processed_count);
setChunksPreview(response.data.chunks_preview || []);
setUploadRows([{ file: null, prompt: '', audioUrl: '' }]);
fetchSystemStatus();
} catch (error) {
setProcessingStatus(`Error: ${error.response?.data?.error || error.message}`);
} finally {
setIsProcessing(false);
}
};
const startTraining = async () => {
setIsTraining(true);
setTrainingProgress(0);
setTrainingError(null);
setTrainingStartTime(Date.now());
setTrainingHistory([]);
try {
const response = await axios.post('/api/start-training', trainingConfig);
setProcessingStatus('Training started successfully!');
} catch (error) {
const errorData = error.response?.data;
const errorMessage = errorData?.error || error.message;
if (errorData?.checkpoint_warning) {
setTrainingError(errorMessage);
setProcessingStatus(errorMessage);
} else {
setTrainingError(errorMessage);
setProcessingStatus(`Training error: ${errorMessage}`);
}
setIsTraining(false);
}
};
const stopTraining = async () => {
try {
const response = await axios.post('/api/stop-training');
setProcessingStatus('Training stopped gracefully');
setIsTraining(false);
setTrainingProgress(0);
setTrainingError(null);
} catch (error) {
setTrainingError(error.response?.data?.error || error.message);
setProcessingStatus(`Stop training error: ${error.response?.data?.error || error.message}`);
}
};
const generateAudio = async () => {
if (!generationPrompt.trim()) {
setProcessingStatus('Please enter a prompt');
return;
}
let requestData = {
prompt: generationPrompt,
duration: generationDuration
};
console.log('=== FRONTEND DEBUG: MODEL SELECTION ===');
console.log('selectedModel:', selectedModel);
console.log('selectedUnwrappedModel:', selectedUnwrappedModel);
console.log('baseModels:', baseModels);
console.log('availableModels:', availableModels);
const baseModel = baseModels.find(m => m.name === selectedModel);
if (baseModel) {
requestData.model_name = selectedModel;
console.log('FRONTEND: Using base model:', selectedModel);
console.log('FRONTEND: Base model details:', baseModel);
} else if (selectedUnwrappedModel) {
requestData.unwrapped_model_path = selectedUnwrappedModel;
console.log('FRONTEND: Using unwrapped model:', selectedUnwrappedModel);
const parentModel = availableModels.find(m => m.name === selectedModel);
console.log('FRONTEND: Parent model info:', parentModel);
} else {
console.log('FRONTEND: No model selected!');
setProcessingStatus('Please select a model');
return;
}
console.log('FRONTEND: Final request data:', requestData);
setIsGenerating(true);
setGenerationProgress(0);
setProcessingStatus('Starting audio generation...');
const progressInterval = setInterval(() => {
setGenerationProgress(prev => {
if (prev >= 90) return prev;
const newProgress = prev + Math.random() * 3; // Reduced from 10 to 3
setProcessingStatus(`Generating audio... ${Math.round(newProgress)}%`);
return newProgress;
});
}, 1000); // Increased from 500ms to 1000ms
try {
console.log('FRONTEND: Sending request to /api/generate with data:', requestData);
const response = await axios.post('/api/generate', requestData, {
responseType: 'blob'
});
clearInterval(progressInterval);
setGenerationProgress(100);
const audioUrl = URL.createObjectURL(response.data);
setGeneratedAudio(audioUrl);
setGeneratedAudioBlob(response.data);
const newFragment = {
id: Date.now(),
prompt: generationPrompt,
duration: generationDuration,
audioUrl: audioUrl,
audioBlob: response.data,
filename: generateFileName(),
timestamp: new Date().toLocaleString()
};
setGeneratedFragments(prev => [...prev, newFragment]);
setOutputCounter(prev => prev + 1);
setProcessingStatus('Audio generated successfully!');
setTimeout(() => {
setGenerationProgress(0);
}, 2000);
} catch (error) {
clearInterval(progressInterval);
setGenerationProgress(0);
console.log('FRONTEND: Generation error:', error);
console.log('FRONTEND: Error response:', error.response);
setProcessingStatus(`Generation error: ${error.response?.data?.error || error.message}`);
} finally {
setIsGenerating(false);
}
};
const handleStartFresh = async () => {
setIsStartingFresh(true);
setShowStartFreshDialog(false);
try {
const response = await axios.post('/api/start-fresh');
setUploadRows([{ file: null, prompt: '', audioUrl: '' }]);
setProcessedCount(0);
setChunksPreview([]);
setGeneratedAudio(null);
setGeneratedAudioBlob(null);
setGeneratedFragments([]);
setProcessingStatus('');
setGenerationPrompt('');
setUploadKey(prev => prev + 1);
setProcessingStatus(response.data.message);
fetchSystemStatus();
} catch (error) {
setProcessingStatus(`Start fresh error: ${error.response?.data?.error || error.message}`);
} finally {
setIsStartingFresh(false);
}
};
const handleFreeGPUMemory = async () => {
setIsFreeingGPU(true);
setShowFreeGPUDialog(false);
try {
const response = await axios.post('/api/free-gpu-memory');
setProcessingStatus(`GPU Memory Freed: ${response.data.message}`);
if (response.data.memory_info && response.data.memory_info.cuda) {
const mem = response.data.memory_info.cuda;
setProcessingStatus(`GPU Memory Freed: ${mem.free.toFixed(2)}GB free of ${mem.total.toFixed(2)}GB total`);
}
fetchGpuMemoryStatus();
} catch (error) {
setProcessingStatus(`Free GPU Memory error: ${error.response?.data?.error || error.message}`);
} finally {
setIsFreeingGPU(false);
}
};
const getSelectedModelDisplayName = () => {
console.log('=== GETTING DISPLAY NAME ===');
console.log('selectedModel:', selectedModel);
console.log('selectedUnwrappedModel:', selectedUnwrappedModel);
if (!selectedModel) {
console.log('No selectedModel, returning empty string');
return '';
}
const baseModel = baseModels.find(m => m.name === selectedModel);
if (baseModel) {
console.log('Found base model:', baseModel.displayName);
return baseModel.displayName;
}
const model = availableModels.find(m => m.name === selectedModel);
if (model && selectedUnwrappedModel) {
const selectedUnwrapped = model.unwrapped_models?.find(u => u.path === selectedUnwrappedModel);
if (selectedUnwrapped) {
const displayName = `${model.name} (${selectedUnwrapped.name})`;
console.log('Generated fine-tuned display name:', displayName);
return displayName;
}
}
console.log('Using fallback name:', selectedModel);
return selectedModel;
};
const allAvailableModels = [
...baseModels,
...availableModels
];
const handleModelChange = (event) => {
const newSelectedModel = event.target.value;
setSelectedModel(newSelectedModel);
setSelectedUnwrappedModel('');
};
return (
{
setShowWelcomePage(false);
axios.post('http://127.0.0.1:5001/api/welcome-page-closed')
.then(() => {
console.log('Welcome page closure signal sent successfully');
})
.catch((error) => {
console.error('Failed to signal welcome page closure:', error);
});
}}
/>
{/* Docker Mode Menu Bar */}
{isDocker && (
}
onClick={() => setShowDownloadDialog(true)}
sx={{ textTransform: 'none', fontSize: '0.8rem', color: 'text.secondary', '&:hover': { color: 'text.primary' } }}
>
Model Setup
}
onClick={() => { fetchOutputFiles(); setShowOutputFilesDialog(true); }}
sx={{ textTransform: 'none', fontSize: '0.8rem', color: 'text.secondary', '&:hover': { color: 'text.primary' } }}
>
Output Files
}
onClick={() => window.open('https://github.com/MAz-Codes/Fragmenta', '_blank')}
sx={{ textTransform: 'none', fontSize: '0.8rem', color: 'text.secondary', '&:hover': { color: 'text.primary' } }}
>
Documentation
}
onClick={() => setShowAboutDialog(true)}
sx={{ textTransform: 'none', fontSize: '0.8rem', color: 'text.secondary', '&:hover': { color: 'text.primary' } }}
>
About
)}
{/* Logo */}
{/* Title */}
Fragmenta
{/* Action Buttons - Left Side */}
}
onClick={() => setShowFreeGPUDialog(true)}
disabled={isFreeingGPU || !(gpuMemoryStatus && gpuMemoryStatus.cuda)}
sx={{
fontSize: '0.65rem',
py: 0.25,
px: 1,
minWidth: 90,
height: 28,
opacity: !(gpuMemoryStatus && gpuMemoryStatus.cuda) ? 0.5 : 1
}}
>
{isFreeingGPU ? 'Freeing...' : 'Free GPU'}
}
onClick={() => setShowStartFreshDialog(true)}
disabled={isStartingFresh}
sx={{
fontSize: '0.65rem',
py: 0.25,
px: 1,
minWidth: 90,
height: 28
}}
>
{isStartingFresh ? 'Starting...' : 'Fresh Start'}
{/* GPU Memory Status - Right Side */}
{gpuMemoryStatus && gpuMemoryStatus.cuda ? (
<>
{/* Status Indicator */}
2 ? 'success.main' :
gpuMemoryStatus.cuda.free > 0.5 ? 'warning.main' : 'error.main'
}} />
{/* Header */}
GPU Memory
2 ? 'success.main' :
gpuMemoryStatus.cuda.free > 0.5 ? 'warning.main' : 'error.main',
animation: 'pulse 2s infinite',
'@keyframes pulse': {
'0%': { opacity: 1 },
'50%': { opacity: 0.5 },
'100%': { opacity: 1 }
}
}} />
{gpuMemoryStatus.cuda.free > 2 ? 'Good' :
gpuMemoryStatus.cuda.free > 0.5 ? 'Low' : 'Critical'}
{/* Memory Bar */}
{/* Used Memory */}
{/* Cached Memory */}
{/* Memory Details */}
{gpuMemoryStatus.cuda.free.toFixed(1)}GB free
{gpuMemoryStatus.cuda.total.toFixed(1)}GB total
>
) : (
<>
{/* Status Indicator - No GPU */}
{/* Header */}
GPU Status
No GPU
{/* No GPU Message */}
No CUDA GPU detected
Using CPU for processing
>
)}
{/* Main Content with Sidebar Layout */}
{/* Left Sidebar with Vertical Tabs */}
{/* Main Content Area */}
{/* Data Processing Tab */}
Upload Audio Files with Annotations
{uploadRows.map((row, index) => (
))}
}
onClick={addUploadRow}
sx={{ mb: 3 }}
>
Add Another Row
: }
fullWidth
>
{isProcessing ? 'Processing...' : 'Process Files'}
Processing Status
{processingStatus && (
{processingStatus}
)}
{systemStatus && (
System Status
Raw Files: {systemStatus.raw_files}
Processed Segments: {systemStatus.processed_segments}
Total Duration: {formatDuration(systemStatus.total_duration || 0)}
Custom Metadata: {systemStatus.has_metadata_json ? 'Yes' : 'Not Found'}
{systemStatus.raw_file_names && systemStatus.raw_file_names.length > 0 && (
Recent files: {systemStatus.raw_file_names.join(', ')}
)}
)}
{/* Training Tab */}
Training Configuration
Base Model
setTrainingConfig({
...trainingConfig,
modelName: e.target.value
})}
sx={{ mb: 2 }}
/>
}>
Advanced Settings
{/* Row 1 */}
Epochs
setTrainingConfig({
...trainingConfig,
epochs: value
})}
min={1}
max={1000}
valueLabelDisplay="auto"
sx={{ flex: 1 }}
/>
{
const val = parseInt(e.target.value) || 1;
setTrainingConfig({
...trainingConfig,
epochs: Math.max(1, Math.min(1000, val))
});
}}
inputProps={{ min: 1, max: 1000, step: 1 }}
sx={{ width: '80px' }}
size="small"
/>
Checkpoint Interval (steps)
setTrainingConfig({
...trainingConfig,
checkpointSteps: value
})}
min={10}
max={1000}
step={10}
valueLabelDisplay="auto"
sx={{ flex: 1 }}
/>
{
const val = parseInt(e.target.value) || 10;
setTrainingConfig({
...trainingConfig,
checkpointSteps: Math.max(10, Math.min(1000, val))
});
}}
inputProps={{ min: 10, max: 1000, step: 10 }}
sx={{ width: '80px' }}
size="small"
/>
{/* Row 2 */}
Learning Rate
setTrainingConfig({
...trainingConfig,
learningRate: value
})}
min={1e-6}
max={1e-3}
step={1e-6}
valueLabelDisplay="auto"
sx={{ flex: 1 }}
/>
{
const val = parseFloat(e.target.value) || 1e-6;
setTrainingConfig({
...trainingConfig,
learningRate: Math.max(1e-6, Math.min(1e-3, val))
});
}}
inputProps={{ min: 1e-6, max: 1e-3, step: 1e-6 }}
sx={{ width: '100px' }}
size="small"
/>
}
sx={{ flex: 1 }}
>
Stop Training
{/* Warning when base model is not downloaded */}
{(() => {
const baseModel = baseModels.find(m => m.name === trainingConfig.baseModel);
if (baseModel && !baseModel.downloaded) {
return (
The selected base model "{baseModel.displayName}" is not downloaded.
{isDocker
? ' Click "Model Setup" in the menu bar above to download it.'
: ' Please use the File Authentication menu to download it before training.'}
{isDocker && (
)}
);
}
return null;
})()}
Training Monitor
{/* Generation Tab */}
Audio Generation
{/* Unwrapped Model Selection for Fine-tuned Models */}
{selectedModel && availableModels.find(m => m.name === selectedModel)?.unwrapped_models?.length > 0 && (
(() => {
const unwrappedModels = availableModels.find(m => m.name === selectedModel)?.unwrapped_models || [];
const validPaths = unwrappedModels.map(u => String(u.path));
// Only allow the value if it's in the list, otherwise set to ''
const safeSelected = validPaths.includes(selectedUnwrappedModel) ? selectedUnwrappedModel : '';
return (
<>
>
);
})()
)}
setGenerationPrompt(e.target.value)}
sx={{ mb: 3 }}
/>
Desired Duration (seconds):
setGenerationDuration(value)}
min={1}
max={getMaxDuration()}
step={1}
marks
valueLabelDisplay="auto"
/>
{generationDuration}s
{isGenerating ? (
Generating audio... {Math.round(generationProgress)}%
Generation time may vary considerably depending on your hardware.
) : (
)}
{/* Warnings for model issues */}
{selectedModel &&
availableModels.find(m => m.name === selectedModel) &&
availableModels.find(m => m.name === selectedModel)?.unwrapped_models?.length > 0 &&
!selectedUnwrappedModel && (
Please select a checkpoint for the selected fine-tuned model before generating audio.
)}
{/* Warning when base model is not downloaded */}
{(() => {
const baseModel = baseModels.find(m => m.name === selectedModel);
if (baseModel && !baseModel.downloaded) {
return (
The selected base model "{baseModel.displayName}" is not downloaded.
{isDocker
? ' Click "Model Setup" in the menu bar above to download it.'
: ' Please use the Authentication menu to download it before generating audio.'}
{isDocker && (
)}
);
}
return null;
})()}
Selected Model
{selectedModel ? (
(() => {
// Check if it's a base model
const baseModel = baseModels.find(m => m.name === selectedModel);
if (baseModel) {
const maxDuration = getMaxDuration();
return (
{baseModel.displayName}
Type: Base Model
{baseModel.description}
{baseModel.downloaded ? (
Ready for inference
) : (
Model not downloaded
)}
);
}
// Check if it's a fine-tuned model
const model = availableModels.find(m => m.name === selectedModel);
if (model) {
const maxDuration = getMaxDuration();
return (
{model.name}
Type: Fine-tuned Model
Path: {model.path}
Checkpoint: {model.has_checkpoint ? 'Available' : 'Missing'}
{model.unwrapped_models && model.unwrapped_models.length > 0 && (
Selected Unwrapped Model for Generation
{selectedUnwrappedModel ? (
(() => {
const selectedUnwrapped = model.unwrapped_models.find(u => u.path === selectedUnwrappedModel);
if (selectedUnwrapped) {
const isLargeModel = selectedUnwrapped.size_mb >= 2000;
return (
{selectedUnwrapped.name}
Size: {selectedUnwrapped.size_mb} MB
Max Duration: {maxDuration} seconds ({isLargeModel ? 'Large Model' : 'Small Model'})
Ready for inference
);
}
return null;
})()
) : (
No checkpoint selected.
)}
)}
);
}
return (
Model not found
);
})()
) : (
Please select a model to generate audio
)}
{/* Checkpoint Management Section */}
{selectedModel && availableModels.find(m => m.name === selectedModel) && (
m.name === selectedModel)}
onRefresh={refreshAllModels}
/>
)}
Generated Fragments
setDockerSnackbar({ open: true, message: msg })}
/>
{/* Start Fresh Confirmation Dialog */}
{/* Free GPU Memory Confirmation Dialog */}
{/* Docker: Model Download Dialog */}
{/* Docker: About Dialog */}
{/* Docker: Output Files Browser Dialog */}
{/* Docker: Snackbar for messages */}
setDockerSnackbar({ open: false, message: '' })}
message={dockerSnackbar.message}
/>
);
}
export default App;