vivek9chavan's picture
Move folder '00_Data_Collection' from branch_1
cb35bc5 verified
# Upload Q:\OE42000_Aria\00_Data_Collection to HF datasets repo in 20GB chunks (no admin, no symlinks)
# - Repo: FraunhoferIPK/IndEgo (dataset)
# - Branch: branch_1
# - Dest folder in repo: 00_Data_Collection/
# - Preserves full source structure
# - Skips any single file > 50,000 MB (decimal)
param(
[string]$RepoUrl = "https://huggingface.co/datasets/FraunhoferIPK/IndEgo",
[string]$WorkDir = "$env:USERPROFILE\hf_ingest_branch1",
[string]$SrcRoot = "Q:\OE42000_Aria\00_Data_Collection",
[string]$Branch = "branch_1",
[string]$DestRoot = "00_Data_Collection"
)
# -------- SETTINGS (do not change unless you need to) --------
[int64]$ChunkMaxBytes = 20 * 1GB # ~20 GiB per commit
[int64]$MaxFileBytes = 50000 * 1MB # 50,000 MB (decimal) skip threshold
# ------------------------------------------------------------
function HumanBytes([Int64]$n) {
$units = "B","KB","MB","GB","TB","PB"
$i = 0; $v = [double]$n
while ($v -ge 1024 -and $i -lt $units.Length-1) { $v /= 1024; $i++ }
"{0:N2} {1}" -f $v, $units[$i]
}
# --- sanity checks ---
if (-not (Get-Command git -ErrorAction SilentlyContinue)) {
Write-Error "Git not found. Install Git for Windows (user-level) and retry."; exit 1
}
if (-not (Test-Path $SrcRoot)) {
Write-Error "Source path not found: $SrcRoot"; exit 1
}
# --- prepare working directory / clone once ---
New-Item -ItemType Directory -Force -Path $WorkDir | Out-Null
Set-Location $WorkDir
if (-not (Test-Path (Join-Path $WorkDir ".git"))) {
git clone $RepoUrl . || exit 1
} else {
git remote -v | Out-Null
}
git config core.longpaths true | Out-Null
# --- ensure branch exists, checkout clean branch state ---
$exists = (git ls-remote --heads origin $Branch) -ne ""
if (-not $exists) {
git checkout -b $Branch || exit 1
git push -u origin $Branch || exit 1
} else {
git fetch origin $Branch || exit 1
git checkout $Branch || exit 1
git reset --hard origin/$Branch || exit 1
}
# --- enumerate source files (preserve structure) ---
$all = Get-ChildItem -LiteralPath $SrcRoot -Recurse -File -Force
if (-not $all) { Write-Error "No files under $SrcRoot"; exit 1 }
# map to repo-relative paths under DestRoot
$items = $all | ForEach-Object {
$src = $_.FullName
$rel = (Resolve-Path -LiteralPath $src).Path.Substring((Resolve-Path -LiteralPath $SrcRoot).Path.Length).TrimStart('\','/')
[PSCustomObject]@{
SrcPath = $src
RelPath = ($DestRoot.TrimEnd('\','/') + '/' + $rel).Replace('\','/')
Size = $_.Length
}
} | Sort-Object RelPath
# --- chunking: ≤ 20 GiB per commit, skip > 50,000 MB ---
$skipped = @()
$chunks = @()
$cur = New-Object System.Collections.Generic.List[object]
[int64]$curSize = 0
[int64]$totalAll = ($items | Measure-Object -Property Size -Sum).Sum
foreach ($it in $items) {
$sz = [int64]$it.Size
if ($sz -gt $MaxFileBytes) { $skipped += $it; continue }
if ($sz -gt $ChunkMaxBytes) {
if ($cur.Count -gt 0) { $chunks += ,@($cur.ToArray()); $cur = New-Object System.Collections.Generic.List[object]; $curSize = 0 }
$chunks += ,@($it) # own chunk (single large-but-allowed file)
continue
}
if ($curSize + $sz -le $ChunkMaxBytes) { $cur.Add($it); $curSize += $sz }
else { $chunks += ,@($cur.ToArray()); $cur = New-Object System.Collections.Generic.List[object]; $cur.Add($it); $curSize = $sz }
}
if ($cur.Count -gt 0) { $chunks += ,@($cur.ToArray()) }
Write-Host ""
Write-Host "Source : $SrcRoot"
Write-Host "Repo : $RepoUrl"
Write-Host "Branch : $Branch"
Write-Host "Dest in repo : $DestRoot/"
Write-Host "Total files found : $($items.Count) ($(HumanBytes $totalAll))"
Write-Host "Planned chunks : $($chunks.Count) (≤ $(HumanBytes $ChunkMaxBytes) each)"
if ($skipped.Count -gt 0) {
Write-Host "Skipping $($skipped.Count) file(s) > 50,000 MB:"
foreach ($s in $skipped) { Write-Host " SKIP: $($s.RelPath) ($(HumanBytes $s.Size))" }
}
# --- process each chunk: copy -> add -> commit -> push -> delete local copies ---
$overallStart = Get-Date
$idx = 0
foreach ($chunk in $chunks) {
$idx++
$chunkBytes = ($chunk | Measure-Object Size -Sum).Sum
Write-Host ""
Write-Host "==> Chunk $idx / $($chunks.Count): $($chunk.Count) files, $(HumanBytes $chunkBytes)"
# copy chunk files into repo working tree (preserving subfolders)
foreach ($it in $chunk) {
$destPath = Join-Path $WorkDir ($it.RelPath -replace '/','\')
$destDir = Split-Path $destPath -Parent
New-Item -ItemType Directory -Force -Path $destDir | Out-Null
Copy-Item -LiteralPath $it.SrcPath -Destination $destPath -Force
}
# stage using a pathspec file (avoid cmd length limits)
$listFile = Join-Path $WorkDir "paths_chunk_$idx.txt"
$chunk | ForEach-Object { $_.RelPath } | Set-Content -LiteralPath $listFile -Encoding UTF8
$t0 = Get-Date
git add --pathspec-from-file="$listFile"
if ($LASTEXITCODE -ne 0) { Write-Error "git add failed"; exit 1 }
git commit -m "[branch_1][chunk $idx/$($chunks.Count)] Add $($chunk.Count) files ($(HumanBytes $chunkBytes))"
if ($LASTEXITCODE -ne 0) {
Write-Host "Nothing to commit (maybe already added). Skipping push."
} else {
git push origin $Branch
if ($LASTEXITCODE -ne 0) { Write-Error "git push failed"; exit 1 }
}
$dt = ((Get-Date) - $t0).TotalSeconds
if ($dt -gt 0) {
$mbps = (($chunkBytes/1MB) / $dt)
Write-Host (" ✔ Done chunk {0}: {1} in {2:n1}s (~{3:n2} MB/s)" -f $idx,(HumanBytes $chunkBytes),$dt,$mbps)
}
Remove-Item -Force $listFile
# delete copied files to reclaim disk after push
foreach ($it in $chunk) {
$destPath = Join-Path $WorkDir ($it.RelPath -replace '/','\')
if (Test-Path $destPath) { Remove-Item -Force $destPath }
}
# clean up empty dirs under DestRoot
if (Test-Path (Join-Path $WorkDir $DestRoot)) {
Get-ChildItem -LiteralPath (Join-Path $WorkDir $DestRoot) -Recurse -Directory -Force |
Where-Object { (Get-ChildItem $_.FullName -Recurse -File -Force | Measure-Object).Count -eq 0 } |
Remove-Item -Force -Recurse
}
}
$overallDt = ((Get-Date) - $overallStart).TotalSeconds
$totalUp = ($chunks | ForEach-Object { ($_ | Measure-Object Size -Sum).Sum } | Measure-Object -Sum).Sum
Write-Host ""
Write-Host "========== Summary =========="
Write-Host ("Chunks committed : {0}" -f $chunks.Count)
Write-Host ("Uploaded bytes : {0}" -f (HumanBytes $totalUp))
Write-Host ("Skipped files : {0}" -f $skipped.Count)
if ($overallDt -gt 0) {
Write-Host ("Elapsed time : {0:n1}s (avg {1:n2} MB/s)" -f $overallDt, (($totalUp/1MB)/$overallDt))
}
Write-Host ("Branch : {0}" -f $Branch)