File size: 3,192 Bytes
e6f4902 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 | #!/usr/bin/env bash
set -euo pipefail
# Required tools: git-lfs, coreutils
# Already executed: huggingface-cli lfs-enable-largefiles .
# Your >50GB file list
BIG_FILES=(
"fix_compute/c4_12B.json" # 58 GB
"fix_compute/c4_13B5.json" # 66 GB
"fix_compute/c4_23B.json" # 112 GB
"fix_compute/c4_23B8.json" # 116 GB
"fix_compute/c4_23B8_2ep.json" # 58 GB
"fix_compute/c4_23B_2ep.json" # 56 GB
"fix_compute/c4_40B.json" # 195 GB
"fix_compute/c4_40B_2ep.json" # 97 GB
)
# Target size (GB), used to estimate lines per shard; actual target includes 10% safety margin
TARGET_GB=10
SAFETY=0.9 # Target size = TARGET_GB * SAFETY
OUTDIR="data_line_shards"
SUFFIX=".jsonl" # Add .jsonl suffix to all shards to indicate they are JSONL
# Ensure large files are enabled
huggingface-cli lfs-enable-largefiles .
# LFS tracking (line-split files + possible .json/.jsonl original files)
git lfs track "*.jsonl"
git lfs track "*.json"
git lfs track "${OUTDIR}/*.part-*${SUFFIX}"
git add .gitattributes
mkdir -p "${OUTDIR}"
bytes_in_gb=$((1024*1024*1024))
target_bytes=$(python3 - <<PY
print(int(${TARGET_GB}*${bytes_in_gb}*${SAFETY}))
PY
)
calc_lines_per_shard() {
local f="$1"
# Count total bytes, total lines (may be large, wc -l will take a while)
local size lines
size=$(stat -c%s "$f" 2>/dev/null || stat -f%z "$f")
lines=$(wc -l < "$f" || echo 0)
if [[ "$lines" -le 0 ]]; then
# No lines detected, fallback to fixed line count (avoid division by zero); you can also specify manually
echo 10000000
return
fi
# Average bytes per line
local bpl=$(( size / lines ))
if [[ "$bpl" -le 0 ]]; then
bpl=1
fi
# Calculate lines per shard (floor division)
local lps=$(( target_bytes / bpl ))
# Reasonable lower limit, avoid too many shards; also avoid exceeding total file lines
if [[ "$lps" -lt 100000 ]]; then
lps=100000
fi
echo "$lps"
}
shard_one_jsonl() {
local f="$1"
[[ -f "$f" ]] || { echo "Skip: file not found $f"; return; }
# local base="$(basename "$f")"
# local stem="${base%.*}" # Remove extension
# local prefix="${OUTDIR}/${stem}.part-"
# echo "==> Estimating lines per shard: $f"
# local LPS
# LPS=$(calc_lines_per_shard "$f")
# echo " Lines per shard ~ ${LPS} lines (target ${TARGET_GB}GB, including safety margin)"
# echo "==> Splitting by lines: $f → ${prefix}000${SUFFIX}, ${prefix}001${SUFFIX}, ..."
# # Avoid duplicate splitting
# if compgen -G "${prefix}*${SUFFIX}" >/dev/null; then
# echo " Shards already exist, skipping split"
# else
# # -d numeric suffix, -a 3 generates 000..999; change to -a 4 if more needed
# split -d -a 3 -l "$LPS" --additional-suffix="${SUFFIX}" "$f" "$prefix"
# fi
echo "==> Replacing original file in staging area with shards (keeping working directory original file)"
git rm --cached -f "$f" || true
# echo "==> Adding shards to LFS"
# git add "${prefix}"*"${SUFFIX}"
# echo "Completed: $f"
}
for f in "${BIG_FILES[@]}"; do
shard_one_jsonl "$f"
done
git commit -m "Line-split >50GB JSONL-like files into ~${TARGET_GB}GB LFS shards (${OUTDIR})"
|