import { useState, useEffect, useRef } from 'react'; import { Upload, Cpu, HardDrive, Database, CheckCircle, AlertCircle, Loader2, Package, Trash2, Sparkles, Clock, Download } from 'lucide-react'; import { useSystemStore } from '../store'; import { motion, AnimatePresence } from 'framer-motion'; /** * ModelLoader page - load HuggingFace models with progress tracking */ export default function ModelLoader() { const systemInfo = useSystemStore((state) => state.systemInfo); const [modelName, setModelName] = useState(''); const [exampleModels, setExampleModels] = useState(null); const [loadResult, setLoadResult] = useState(null); const [isLoading, setIsLoading] = useState(false); const [progress, setProgress] = useState(null); const [cachedModels, setCachedModels] = useState([]); const [modelInfo, setModelInfo] = useState(null); const progressPollRef = useRef(null); // Fetch example models and cache info on mount useEffect(() => { // Optimistic load from cache const cachedExamples = localStorage.getItem('example_models'); if (cachedExamples) { try { setExampleModels(JSON.parse(cachedExamples)); } catch (e) { } } fetch('/api/models/examples') .then(res => res.json()) .then(data => { setExampleModels(data); localStorage.setItem('example_models', JSON.stringify(data)); }) .catch(() => { }); fetchCacheInfo(); fetchModelInfo(); }, []); const fetchCacheInfo = async () => { try { const res = await fetch('/api/models/cache'); const data = await res.json(); setCachedModels(data.models || []); } catch (e) { } }; const fetchModelInfo = async () => { try { const res = await fetch('/api/models/info'); const data = await res.json(); if (data.loaded) { setModelInfo(data); } } catch (e) { } }; const pollProgress = (name) => { if (progressPollRef.current) { clearInterval(progressPollRef.current); } progressPollRef.current = setInterval(async () => { try { const res = await fetch(`/api/models/progress/${encodeURIComponent(name)}`); const data = await res.json(); if (data.downloading) { setProgress(data); } } catch (e) { } }, 500); }; const stopPolling = () => { if (progressPollRef.current) { clearInterval(progressPollRef.current); progressPollRef.current = null; } }; const handleLoadModel = async () => { if (!modelName.trim() || isLoading) return; setIsLoading(true); setLoadResult(null); setProgress({ status: 'starting', percent: 0, message: 'Starting download...' }); // Start polling for progress pollProgress(modelName.trim()); try { const response = await fetch('/api/models/load', { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ model_name: modelName.trim(), dtype: 'auto', device: 'auto', trust_remote_code: true }) }); const data = await response.json(); setLoadResult(data); if (data.success) { setModelInfo(data.model_info); setProgress({ status: 'complete', percent: 100, message: 'Model loaded!' }); fetchCacheInfo(); } else { setProgress(null); } } catch (err) { setLoadResult({ success: false, error: err.message }); setProgress(null); } finally { setIsLoading(false); stopPolling(); } }; const handleQuickLoad = (modelId) => { setModelName(modelId); }; const handleUnload = async () => { try { await fetch('/api/models/unload', { method: 'POST' }); setModelInfo(null); setLoadResult(null); setProgress(null); } catch (e) { } }; const handleDeleteFromCache = async (name) => { try { await fetch(`/api/models/cache/${encodeURIComponent(name)}`, { method: 'DELETE' }); fetchCacheInfo(); } catch (e) { } }; const handleCleanup = async () => { try { const res = await fetch('/api/models/cache/cleanup', { method: 'POST' }); const data = await res.json(); fetchCacheInfo(); alert(`Cleaned up ${data.deleted_count} models`); } catch (e) { } }; return (
{/* Header */}

Load HuggingFace Model

Download and analyze models directly from HuggingFace Hub

{/* Main Content */}
{/* Load Model Card */}

Load Model

setModelName(e.target.value)} onKeyDown={(e) => e.key === 'Enter' && handleLoadModel()} disabled={isLoading} />

Enter the HuggingFace model identifier (organization/model-name)

{/* Progress Bar */} {progress && (
{progress.message || progress.status} {progress.percent || 0}%
{progress.speed_mbps && (
{progress.speed_mbps} MB/s {progress.eta_seconds && ETA: {progress.eta_seconds}s}
)}
)}
{/* Result Message */} {loadResult && !isLoading && ( {loadResult.success ? ( <>
Model loaded successfully!

{loadResult.model_info?.architecture} - {loadResult.model_info?.num_params_millions}M params

) : ( <>
Failed to load model

{loadResult.error}

{loadResult.suggestion &&

{loadResult.suggestion}

}
)}
)}
{/* Currently Loaded Model */} {modelInfo && (

Loaded Model

Name {modelInfo.name}
Parameters {modelInfo.num_params_millions}M
Memory {modelInfo.memory_mb?.toFixed(1)} MB
Device {modelInfo.device}
Quantizable Layers {modelInfo.num_quantizable_layers}
)} {/* Quick Start */}

Quick Start

Click to select a model:

{exampleModels ? ( <> {exampleModels.sample_models?.length > 0 && (

⭐ Sample Models (Pre-cached)

{exampleModels.sample_models.map((model) => ( ))}
)}

Small Models

{exampleModels.small_models?.map((model) => ( ))}
) : (
Loading examples...
)}
{/* System Status */}

System

{systemInfo ? (
Device {systemInfo.cuda_available ? '🟢 CUDA GPU' : systemInfo.mps_available ? '🟢 Apple MPS' : '🟡 CPU'}
{systemInfo.gpus?.length > 0 && (
GPU {systemInfo.gpus[0].name}
)}
RAM {systemInfo.ram_available_gb?.toFixed(1)} GB
) : (

Loading...

)}
{/* Cached Models */}

Model Cache

Models auto-delete after 4 hours (except samples)

{cachedModels.length > 0 ? (
{cachedModels.map((model) => (
{model.is_sample && '⭐ '} {model.name} {model.size_mb} MB
{!model.is_sample && ( )}
))}
) : (

No models cached

)}
); }