File size: 3,052 Bytes
c0659f6
b49e394
 
c0659f6
b49e394
 
 
 
 
c0659f6
 
 
 
b49e394
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c0659f6
 
 
 
 
 
 
 
 
b49e394
 
c0659f6
 
 
b49e394
 
 
 
c0659f6
 
 
b49e394
 
c0659f6
 
 
 
b49e394
c0659f6
b49e394
c0659f6
 
 
 
 
 
 
b49e394
 
c0659f6
b49e394
 
c0659f6
 
 
b49e394
 
 
c0659f6
 
 
 
 
 
 
 
b49e394
 
 
 
 
 
 
 
 
 
 
 
c0659f6
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106

import { useState, useCallback } from 'react';
import { FileItem, UploadStatus } from '../types';
import { uploadBatchToHub } from '../services/hfService';

export const useFileUpload = () => {
  const [files, setFiles] = useState<FileItem[]>([]);
  const [isUploading, setIsUploading] = useState(false);

  // --- CONFIGURATION ---
  const BATCH_SIZE = 5; // Files per request
  const CONCURRENCY_LIMIT = 3; // Parallel requests

  // --- UPLOAD LOGIC ---

  const addFiles = useCallback((newFilesList: FileItem[]) => {
    setFiles((prev) => [...prev, ...newFilesList]);
  }, []);

  const removeFile = useCallback((id: string) => {
    setFiles((prev) => prev.filter((f) => f.id !== id));
  }, []);

  const updateFilePath = useCallback((id: string, newPath: string) => {
    setFiles((prev) => prev.map((f) => (f.id === id ? { ...f, path: newPath } : f)));
  }, []);

  const startUpload = useCallback(async () => {
    const filesToUpload = files.filter(
      (f) => f.status === UploadStatus.IDLE || f.status === UploadStatus.ERROR
    );

    if (filesToUpload.length === 0) return;

    setIsUploading(true);

    // 1. Create Batches (Chunks)
    const batches = [];
    for (let i = 0; i < filesToUpload.length; i += BATCH_SIZE) {
      batches.push(filesToUpload.slice(i, i + BATCH_SIZE));
    }

    // 2. Process Batch Function
    const processBatch = async (batch: FileItem[]) => {
      // Set status UPLOADING for this batch
      setFiles((prev) =>
        prev.map((f) =>
          batch.find((b) => b.id === f.id) 
            ? { ...f, status: UploadStatus.UPLOADING, error: undefined } 
            : f
        )
      );

      try {
        // Prepare payload for service
        const batchPayload = batch.map(item => ({
            id: item.id,
            file: item.file,
            path: item.path
        }));

        // Call API
        const urls = await uploadBatchToHub(batchPayload);

        // Success: Update status and add URLs
        setFiles((prev) =>
          prev.map((f) => {
            const index = batch.findIndex(b => b.id === f.id);
            if (index !== -1) {
                return { ...f, status: UploadStatus.SUCCESS, url: urls[index] };
            }
            return f;
          })
        );
      } catch (err: any) {
        // Error: Update status for whole batch
        setFiles((prev) =>
          prev.map((f) =>
            batch.find((b) => b.id === f.id) 
                ? { ...f, status: UploadStatus.ERROR, error: err.message } 
                : f
          )
        );
      }
    };

    // 3. Execute with Concurrency Limit
    // We process batches in groups of CONCURRENCY_LIMIT
    for (let i = 0; i < batches.length; i += CONCURRENCY_LIMIT) {
        const activeBatches = batches.slice(i, i + CONCURRENCY_LIMIT);
        await Promise.allSettled(activeBatches.map(batch => processBatch(batch)));
    }

    setIsUploading(false);
  }, [files]);

  return {
    files,
    isUploading,
    addFiles,
    removeFile,
    updateFilePath,
    startUpload
  };
};