uploadv2 / hooks /useFileUpload.ts
marriedtermiteblyi's picture
Update hooks/useFileUpload.ts
8f35e50 verified
import { useState, useCallback, useRef } from 'react';
import { FileItem, UploadStatus } from '../types';
import { uploadBatchToHub } from '../services/hfService';
export const useFileUpload = () => {
const [files, setFiles] = useState<FileItem[]>([]);
const [isUploading, setIsUploading] = useState(false);
// Use Ref to access latest state inside async loops without dependencies issues
const filesRef = useRef<FileItem[]>([]);
filesRef.current = files;
// --- CONFIGURATION FOR SPEED ---
// Tăng số lượng file mỗi lần gửi để giảm thời gian tạo commit trên HF
const BATCH_SIZE = 10;
// Số lượng request gửi song song (Browser thường giới hạn 6 domain connections)
const CONCURRENCY_LIMIT = 5;
// --- UPLOAD LOGIC ---
const addFiles = useCallback((newFilesList: FileItem[]) => {
setFiles((prev) => [...prev, ...newFilesList]);
}, []);
const removeFile = useCallback((id: string) => {
setFiles((prev) => prev.filter((f) => f.id !== id));
}, []);
const updateFilePath = useCallback((id: string, newPath: string) => {
setFiles((prev) => prev.map((f) => (f.id === id ? { ...f, path: newPath } : f)));
}, []);
const startUpload = useCallback(async () => {
// Filter pending files
const pendingFiles = filesRef.current.filter(
(f) => f.status === UploadStatus.IDLE || f.status === UploadStatus.ERROR
);
if (pendingFiles.length === 0) return;
setIsUploading(true);
// 1. Chunk files into Batches
const batches: FileItem[][] = [];
for (let i = 0; i < pendingFiles.length; i += BATCH_SIZE) {
batches.push(pendingFiles.slice(i, i + BATCH_SIZE));
}
// A queue of batches to process
const queue = [...batches];
let activeWorkers = 0;
// Helper to update status safely
const updateBatchStatus = (batchItems: FileItem[], status: UploadStatus, result?: { urls?: string[], error?: string }) => {
setFiles((prev) =>
prev.map((f) => {
const batchIndex = batchItems.findIndex(b => b.id === f.id);
if (batchIndex !== -1) {
return {
...f,
status: status,
url: status === UploadStatus.SUCCESS ? result?.urls?.[batchIndex] : f.url,
error: status === UploadStatus.ERROR ? result?.error : undefined
};
}
return f;
})
);
};
// 2. The Worker Function
// Process one batch, then immediately grab the next one from the queue
const processNextBatch = async (): Promise<void> => {
if (queue.length === 0) return;
const batch = queue.shift();
if (!batch) return;
activeWorkers++;
// Update UI -> UPLOADING
updateBatchStatus(batch, UploadStatus.UPLOADING);
try {
const payload = batch.map(item => ({
id: item.id,
file: item.file,
path: item.path
}));
const urls = await uploadBatchToHub(payload);
// Update UI -> SUCCESS
updateBatchStatus(batch, UploadStatus.SUCCESS, { urls });
} catch (err: any) {
console.error("Batch failed:", err);
// Update UI -> ERROR
updateBatchStatus(batch, UploadStatus.ERROR, { error: err.message || "Upload failed" });
} finally {
activeWorkers--;
// Recursively process next batch if available
if (queue.length > 0) {
await processNextBatch();
}
}
};
// 3. Start Initial Workers (Pool)
// Create exactly CONCURRENCY_LIMIT workers that will keep eating from the queue
const initialWorkers = [];
const limit = Math.min(CONCURRENCY_LIMIT, batches.length);
for (let i = 0; i < limit; i++) {
initialWorkers.push(processNextBatch());
}
// Wait for all workers to finish depleting the queue
await Promise.all(initialWorkers);
setIsUploading(false);
}, []); // Remove 'files' dependency to avoid closure staleness, use ref
return {
files,
isUploading,
addFiles,
removeFile,
updateFilePath,
startUpload
};
};