File size: 3,153 Bytes
da2430e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
#!/bin/bash
# Automated deployment script for Hugging Face Spaces
# This script checks precompute status and handles deployment

set -e

SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
cd "$SCRIPT_DIR"

echo "╔══════════════════════════════════════════════════════════╗"
echo "β•‘     HF Spaces Auto-Deployment Script                    β•‘"
echo "β•šβ•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•"
echo ""

# Check if precompute is complete
check_precompute() {
    if [ -f "precomputed_data/models_v1.parquet" ] && [ -f "precomputed_data/chunk_index_v1.parquet" ]; then
        echo "βœ… Precomputed data files found"
        return 0
    else
        echo "⏳ Precomputed data not ready yet"
        return 1
    fi
}

# Upload data to HF Dataset
upload_data() {
    echo ""
    echo "πŸ“€ Uploading chunked data to Hugging Face Dataset..."
    echo ""
    
    cd backend
    source venv/bin/activate 2>/dev/null || python3 -m venv venv && source venv/bin/activate
    pip install -q huggingface-hub tqdm 2>/dev/null
    
    cd ..
    python upload_to_hf_dataset.py --dataset-id modelbiome/hf-viz-precomputed --version v1
    
    echo ""
    echo "βœ… Data upload complete!"
}

# Prepare Space files
prepare_space() {
    SPACE_DIR="${1:-hf-viz-space}"
    
    echo ""
    echo "πŸ“¦ Preparing files for HF Space..."
    echo ""
    
    mkdir -p "$SPACE_DIR"
    
    # Copy required files
    cp app.py "$SPACE_DIR/"
    cp requirements.txt "$SPACE_DIR/"
    cp Dockerfile "$SPACE_DIR/"
    cp README_SPACE.md "$SPACE_DIR/README.md"
    cp -r backend "$SPACE_DIR/"
    cp -r frontend "$SPACE_DIR/"
    mkdir -p "$SPACE_DIR/precomputed_data"
    touch "$SPACE_DIR/precomputed_data/.gitkeep"
    
    echo "βœ… Files prepared in: $SPACE_DIR"
    echo ""
    echo "Next steps:"
    echo "  1. cd $SPACE_DIR"
    echo "  2. git init"
    echo "  3. git remote add origin https://huggingface.co/spaces/YOUR_USERNAME/YOUR_SPACE_NAME"
    echo "  4. git add ."
    echo "  5. git commit -m 'Deploy HF Model Ecosystem Visualizer'"
    echo "  6. git push"
}

# Main execution
main() {
    if check_precompute; then
        echo ""
        read -p "Precompute complete! Upload data to HF Dataset? (y/n) " -n 1 -r
        echo ""
        if [[ $REPLY =~ ^[Yy]$ ]]; then
            upload_data
        fi
        
        echo ""
        read -p "Prepare files for HF Space deployment? (y/n) " -n 1 -r
        echo ""
        if [[ $REPLY =~ ^[Yy]$ ]]; then
            prepare_space
        fi
    else
        echo ""
        echo "⏳ Waiting for precompute to complete..."
        echo "   Check progress: tail -f precompute_full.log"
        echo "   Or run this script again when precompute is done"
        echo ""
        echo "Current status:"
        ps aux | grep "[p]recompute_data.py" && echo "   Precompute is running" || echo "   Precompute not running"
    fi
}

main "$@"