gapura-oneclick / scripts /deploy_hf.sh
Muhammad Ridzki Nugraha
Deploy API and config (Batch 3)
07476a1 verified
#!/bin/bash
# Deploy Gapura AI to Hugging Face Spaces
set -e
HF_USERNAME=${1:-ridzki-nrzngr}
SPACE_NAME=${2:-gapura-ai}
SKIP_MODELS=${3:-false}
echo "=== Deploying to Hugging Face Spaces ==="
echo "Username: $HF_USERNAME"
echo "Space: $SPACE_NAME"
echo "Skip models: $SKIP_MODELS"
echo ""
# Check if huggingface-cli is installed
if ! command -v hf &> /dev/null; then
echo "Installing huggingface-hub..."
pip install huggingface-hub
fi
# Check if logged in
if ! hf auth whoami &> /dev/null; then
echo ""
echo "Please login to Hugging Face:"
echo " hf auth login"
echo ""
echo "You need a HF token from: https://huggingface.co/settings/tokens"
exit 1
fi
# Create temp directory for deployment
DEPLOY_DIR=$(mktemp -d)
echo "Creating deployment package in $DEPLOY_DIR..."
# Copy necessary files
cp -r api $DEPLOY_DIR/
cp -r data $DEPLOY_DIR/
cp -r scripts $DEPLOY_DIR/
cp -r training $DEPLOY_DIR/
# Use HF-specific requirements to include huggingface-hub
cp hf-space/requirements.txt $DEPLOY_DIR/requirements.txt
mkdir -p $DEPLOY_DIR/scripts
cp hf-space/scripts/start.sh $DEPLOY_DIR/scripts/start.sh
date -u +"%Y-%m-%dT%H:%M:%SZ" > $DEPLOY_DIR/BUILD_INFO.txt
if [ "$SKIP_MODELS" != "true" ]; then
mkdir -p $DEPLOY_DIR/models/regression
mkdir -p $DEPLOY_DIR/models/nlp/severity_classifier
if [ -f "models/regression/resolution_predictor_latest.pkl" ]; then
cp models/regression/resolution_predictor_latest.pkl $DEPLOY_DIR/models/regression/
cp models/regression/resolution_predictor_latest_metrics.json $DEPLOY_DIR/models/regression/ 2>/dev/null || true
cp models/regression/shap_explainer.pkl $DEPLOY_DIR/models/regression/ 2>/dev/null || true
cp models/regression/anomaly_stats.pkl $DEPLOY_DIR/models/regression/ 2>/dev/null || true
echo " ✓ Regression model"
fi
if [ -d "models/nlp/severity_classifier" ]; then
cp models/nlp/severity_classifier/*.pkl $DEPLOY_DIR/models/nlp/severity_classifier/
cp models/nlp/severity_classifier/config.json $DEPLOY_DIR/models/nlp/severity_classifier/ 2>/dev/null || true
echo " ✓ NLP model (TF-IDF + RandomForest)"
fi
else
echo " ⚠️ Skipping model files to reduce upload size"
mkdir -p $DEPLOY_DIR/models
echo "placeholder" > $DEPLOY_DIR/models/.keep
fi
# Copy Dockerfile
cp hf-space/Dockerfile $DEPLOY_DIR/
cp hf-space/README.md $DEPLOY_DIR/
# Create .env.example
cat > $DEPLOY_DIR/.env.example << 'EOF'
# Google Sheets Configuration
GOOGLE_SERVICE_ACCOUNT_EMAIL=your-service@project.iam.gserviceaccount.com
GOOGLE_PRIVATE_KEY="-----BEGIN PRIVATE KEY-----\n...\n-----END PRIVATE KEY-----\n"
GOOGLE_SHEET_ID=your-google-sheet-id
# Optional: For faster model downloads
# HF_TOKEN=your-huggingface-token
EOF
echo " ✓ Configuration files"
# Show what will be uploaded
echo ""
echo "Files to upload:"
find $DEPLOY_DIR -type f | head -20
echo ""
echo "Total size: $(du -sh $DEPLOY_DIR | cut -f1)"
# Upload to Hugging Face
echo ""
echo "Uploading to Hugging Face Spaces..."
cd $DEPLOY_DIR
# Create repo if not exists
export HF_HUB_TIMEOUT=600
hf repo create $SPACE_NAME --repo-type space --space-sdk docker --exist-ok || true
# Upload files
hf upload $HF_USERNAME/$SPACE_NAME . . --repo-type space
echo ""
echo "✓ Deployment complete!"
echo ""
echo "Your Space: https://huggingface.co/spaces/$HF_USERNAME/$SPACE_NAME"
echo ""
echo "IMPORTANT: Go to your Space Settings > Repository secrets and add:"
echo " - GOOGLE_SERVICE_ACCOUNT_EMAIL"
echo " - GOOGLE_PRIVATE_KEY"
echo " - GOOGLE_SHEET_ID"
echo ""
# Cleanup
rm -rf $DEPLOY_DIR