File size: 6,804 Bytes
cb35bc5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
# Upload Q:\OE42000_Aria\00_Data_Collection to HF datasets repo in 20GB chunks (no admin, no symlinks)
# - Repo: FraunhoferIPK/IndEgo (dataset)
# - Branch: branch_1
# - Dest folder in repo: 00_Data_Collection/
# - Preserves full source structure
# - Skips any single file > 50,000 MB (decimal)

param(
  [string]$RepoUrl  = "https://huggingface.co/datasets/FraunhoferIPK/IndEgo",
  [string]$WorkDir  = "$env:USERPROFILE\hf_ingest_branch1",
  [string]$SrcRoot  = "Q:\OE42000_Aria\00_Data_Collection",
  [string]$Branch   = "branch_1",
  [string]$DestRoot = "00_Data_Collection"
)

# -------- SETTINGS (do not change unless you need to) --------
[int64]$ChunkMaxBytes = 20 * 1GB       # ~20 GiB per commit
[int64]$MaxFileBytes  = 50000 * 1MB    # 50,000 MB (decimal) skip threshold
# ------------------------------------------------------------

function HumanBytes([Int64]$n) {
  $units = "B","KB","MB","GB","TB","PB"
  $i = 0; $v = [double]$n
  while ($v -ge 1024 -and $i -lt $units.Length-1) { $v /= 1024; $i++ }
  "{0:N2} {1}" -f $v, $units[$i]
}

# --- sanity checks ---
if (-not (Get-Command git -ErrorAction SilentlyContinue)) {
  Write-Error "Git not found. Install Git for Windows (user-level) and retry."; exit 1
}
if (-not (Test-Path $SrcRoot)) {
  Write-Error "Source path not found: $SrcRoot"; exit 1
}

# --- prepare working directory / clone once ---
New-Item -ItemType Directory -Force -Path $WorkDir | Out-Null
Set-Location $WorkDir
if (-not (Test-Path (Join-Path $WorkDir ".git"))) {
  git clone $RepoUrl . || exit 1
} else {
  git remote -v | Out-Null
}
git config core.longpaths true | Out-Null

# --- ensure branch exists, checkout clean branch state ---
$exists = (git ls-remote --heads origin $Branch) -ne ""
if (-not $exists) {
  git checkout -b $Branch || exit 1
  git push -u origin $Branch || exit 1
} else {
  git fetch origin $Branch || exit 1
  git checkout $Branch || exit 1
  git reset --hard origin/$Branch || exit 1
}

# --- enumerate source files (preserve structure) ---
$all = Get-ChildItem -LiteralPath $SrcRoot -Recurse -File -Force
if (-not $all) { Write-Error "No files under $SrcRoot"; exit 1 }

# map to repo-relative paths under DestRoot
$items = $all | ForEach-Object {
  $src = $_.FullName
  $rel = (Resolve-Path -LiteralPath $src).Path.Substring((Resolve-Path -LiteralPath $SrcRoot).Path.Length).TrimStart('\','/')
  [PSCustomObject]@{
    SrcPath = $src
    RelPath = ($DestRoot.TrimEnd('\','/') + '/' + $rel).Replace('\','/')
    Size    = $_.Length
  }
} | Sort-Object RelPath

# --- chunking: ≤ 20 GiB per commit, skip > 50,000 MB ---
$skipped = @()
$chunks  = @()
$cur     = New-Object System.Collections.Generic.List[object]
[int64]$curSize = 0
[int64]$totalAll = ($items | Measure-Object -Property Size -Sum).Sum

foreach ($it in $items) {
  $sz = [int64]$it.Size
  if ($sz -gt $MaxFileBytes) { $skipped += $it; continue }

  if ($sz -gt $ChunkMaxBytes) {
    if ($cur.Count -gt 0) { $chunks += ,@($cur.ToArray()); $cur = New-Object System.Collections.Generic.List[object]; $curSize = 0 }
    $chunks += ,@($it) # own chunk (single large-but-allowed file)
    continue
  }

  if ($curSize + $sz -le $ChunkMaxBytes) { $cur.Add($it); $curSize += $sz }
  else { $chunks += ,@($cur.ToArray()); $cur = New-Object System.Collections.Generic.List[object]; $cur.Add($it); $curSize = $sz }
}
if ($cur.Count -gt 0) { $chunks += ,@($cur.ToArray()) }

Write-Host ""
Write-Host "Source              : $SrcRoot"
Write-Host "Repo                : $RepoUrl"
Write-Host "Branch              : $Branch"
Write-Host "Dest in repo        : $DestRoot/"
Write-Host "Total files found   : $($items.Count) ($(HumanBytes $totalAll))"
Write-Host "Planned chunks      : $($chunks.Count) (≤ $(HumanBytes $ChunkMaxBytes) each)"
if ($skipped.Count -gt 0) {
  Write-Host "Skipping $($skipped.Count) file(s) > 50,000 MB:"
  foreach ($s in $skipped) { Write-Host "  SKIP: $($s.RelPath) ($(HumanBytes $s.Size))" }
}

# --- process each chunk: copy -> add -> commit -> push -> delete local copies ---
$overallStart = Get-Date
$idx = 0
foreach ($chunk in $chunks) {
  $idx++
  $chunkBytes = ($chunk | Measure-Object Size -Sum).Sum
  Write-Host ""
  Write-Host "==> Chunk $idx / $($chunks.Count): $($chunk.Count) files, $(HumanBytes $chunkBytes)"

  # copy chunk files into repo working tree (preserving subfolders)
  foreach ($it in $chunk) {
    $destPath = Join-Path $WorkDir ($it.RelPath -replace '/','\')
    $destDir  = Split-Path $destPath -Parent
    New-Item -ItemType Directory -Force -Path $destDir | Out-Null
    Copy-Item -LiteralPath $it.SrcPath -Destination $destPath -Force
  }

  # stage using a pathspec file (avoid cmd length limits)
  $listFile = Join-Path $WorkDir "paths_chunk_$idx.txt"
  $chunk | ForEach-Object { $_.RelPath } | Set-Content -LiteralPath $listFile -Encoding UTF8

  $t0 = Get-Date
  git add --pathspec-from-file="$listFile"
  if ($LASTEXITCODE -ne 0) { Write-Error "git add failed"; exit 1 }

  git commit -m "[branch_1][chunk $idx/$($chunks.Count)] Add $($chunk.Count) files ($(HumanBytes $chunkBytes))"
  if ($LASTEXITCODE -ne 0) {
    Write-Host "Nothing to commit (maybe already added). Skipping push."
  } else {
    git push origin $Branch
    if ($LASTEXITCODE -ne 0) { Write-Error "git push failed"; exit 1 }
  }
  $dt = ((Get-Date) - $t0).TotalSeconds
  if ($dt -gt 0) {
    $mbps = (($chunkBytes/1MB) / $dt)
    Write-Host ("   ✔ Done chunk {0}: {1} in {2:n1}s (~{3:n2} MB/s)" -f $idx,(HumanBytes $chunkBytes),$dt,$mbps)
  }

  Remove-Item -Force $listFile

  # delete copied files to reclaim disk after push
  foreach ($it in $chunk) {
    $destPath = Join-Path $WorkDir ($it.RelPath -replace '/','\')
    if (Test-Path $destPath) { Remove-Item -Force $destPath }
  }
  # clean up empty dirs under DestRoot
  if (Test-Path (Join-Path $WorkDir $DestRoot)) {
    Get-ChildItem -LiteralPath (Join-Path $WorkDir $DestRoot) -Recurse -Directory -Force |
      Where-Object { (Get-ChildItem $_.FullName -Recurse -File -Force | Measure-Object).Count -eq 0 } |
      Remove-Item -Force -Recurse
  }
}

$overallDt = ((Get-Date) - $overallStart).TotalSeconds
$totalUp   = ($chunks | ForEach-Object { ($_ | Measure-Object Size -Sum).Sum } | Measure-Object -Sum).Sum
Write-Host ""
Write-Host "========== Summary =========="
Write-Host ("Chunks committed : {0}" -f $chunks.Count)
Write-Host ("Uploaded bytes   : {0}" -f (HumanBytes $totalUp))
Write-Host ("Skipped files    : {0}" -f $skipped.Count)
if ($overallDt -gt 0) {
  Write-Host ("Elapsed time     : {0:n1}s (avg {1:n2} MB/s)" -f $overallDt, (($totalUp/1MB)/$overallDt))
}
Write-Host ("Branch           : {0}" -f $Branch)