results / .github /workflows /generate_cached_results.yml
isaacchung's picture
Add Git LFS support for large cache files in GitHub Actions workflow (#379)
aa79a85 unverified
name: Generate Cached Results
on:
push:
branches: [main]
# Allow manual trigger for testing
workflow_dispatch:
jobs:
generate-cache:
runs-on: ubuntu-latest
steps:
- name: Free disk space
run: |
sudo rm -rf /usr/share/dotnet
sudo rm -rf /opt/ghc
sudo rm -rf /usr/local/share/boost
docker system prune -af
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 0
token: ${{ secrets.GITHUB_TOKEN }}
lfs: true
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: '3.10'
cache: 'pip'
- name: Install dependencies
run: |
pip install git+https://github.com/embeddings-benchmark/mteb.git
- name: Generate cached results
run: |
python scripts/generate_cached_results.py
env:
PYTHONUNBUFFERED: 1
- name: Install Git LFS
run: |
sudo apt-get update
sudo apt-get install -y git-lfs
git lfs install
# Explicitly pull LFS files to work around checkout action issues
git lfs pull || true
- name: Configure Git
run: |
git config --global user.name "github-actions[bot]"
git config --global user.email "github-actions[bot]@users.noreply.github.com"
- name: Update cached-data branch
run: |
# Check if __cached_results.json.gz was created
if [ ! -f "__cached_results.json.gz" ]; then
echo "❌ Cached results file not found"
exit 1
fi
# Get file size for logging
FILE_SIZE=$(stat -f%z __cached_results.json.gz 2>/dev/null || stat -c%s __cached_results.json.gz)
echo "πŸ“¦ Generated cache file: $(echo "scale=1; $FILE_SIZE/1024/1024" | bc -l)MB"
# Temporarily move the cache file to avoid checkout conflicts
if [ -f "__cached_results.json.gz" ]; then
mv __cached_results.json.gz __cached_results.json.gz.tmp
fi
# Check if cached-data branch exists
if git show-ref --verify --quiet refs/remotes/origin/cached-data; then
echo "πŸ“‹ Switching to existing cached-data branch"
git checkout cached-data
git pull origin cached-data
# Ensure LFS files are available after branch checkout
git lfs pull || true
else
echo "πŸ†• Creating new cached-data branch"
git checkout --orphan cached-data
# Remove all files from staging area when creating orphan branch
git rm -rf . 2>/dev/null || true
fi
# Restore the cache file
if [ -f "__cached_results.json.gz.tmp" ]; then
mv __cached_results.json.gz.tmp __cached_results.json.gz
fi
# Setup Git LFS tracking for the cache file (if not already tracked)
if ! git lfs ls-files | grep -q "__cached_results.json.gz"; then
git lfs track "__cached_results.json.gz"
fi
# Ensure we only have the files we want
# Remove all tracked files except README.md and .gitattributes
if [ -f "README.md" ]; then
git ls-files | grep -v "README.md" | grep -v ".gitattributes" | xargs -r git rm 2>/dev/null || true
else
git ls-files | grep -v ".gitattributes" | xargs -r git rm 2>/dev/null || true
fi
# Add the cached results file (will be tracked by LFS)
git add __cached_results.json.gz
# Preserve README.md if it exists in the working directory
if [ -f "README.md" ]; then
git add README.md
fi
# Add .gitattributes file (required for LFS tracking)
if [ -f ".gitattributes" ]; then
git add .gitattributes
fi
# Check if there are changes to commit
if git diff --staged --quiet; then
echo "βœ… No changes in cached results, skipping commit"
else
# Verify we're not committing too many files (safety check)
STAGED_FILES=$(git diff --staged --name-only | wc -l)
if [ "$STAGED_FILES" -gt 10 ]; then
echo "❌ ERROR: Too many files staged ($STAGED_FILES). Expected only 1-3 files (__cached_results.json.gz, README.md, .gitattributes)."
echo "Staged files:"
git diff --staged --name-only
exit 1
fi
# Commit with timestamp and file size
TIMESTAMP=$(date -u '+%Y-%m-%d %H:%M:%S UTC')
COMMIT_MSG="Update cached results - $TIMESTAMP ($(echo "scale=1; $FILE_SIZE/1024/1024" | bc -l)MB)"
git commit -m "$COMMIT_MSG"
# Push to remote (LFS will handle the large file automatically)
git push origin cached-data
echo "βœ… Successfully updated cached-data branch"
fi
- name: Report status
if: always()
run: |
if [ -f "__cached_results.json.gz" ]; then
FILE_SIZE=$(stat -f%z __cached_results.json.gz 2>/dev/null || stat -c%s __cached_results.json.gz)
echo "βœ… Workflow completed. Cache file size: $(echo "scale=1; $FILE_SIZE/1024/1024" | bc -l)MB"
else
echo "❌ Workflow failed - no cache file generated"
fi