File size: 3,627 Bytes
07476a1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
#!/bin/bash
# Deploy Gapura AI to Hugging Face Spaces

set -e

HF_USERNAME=${1:-ridzki-nrzngr}
SPACE_NAME=${2:-gapura-ai}
SKIP_MODELS=${3:-false}

echo "=== Deploying to Hugging Face Spaces ==="
echo "Username: $HF_USERNAME"
echo "Space: $SPACE_NAME"
echo "Skip models: $SKIP_MODELS"
echo ""

# Check if huggingface-cli is installed
if ! command -v hf &> /dev/null; then
    echo "Installing huggingface-hub..."
    pip install huggingface-hub
fi

# Check if logged in
if ! hf auth whoami &> /dev/null; then
    echo ""
    echo "Please login to Hugging Face:"
    echo "  hf auth login"
    echo ""
    echo "You need a HF token from: https://huggingface.co/settings/tokens"
    exit 1
fi

# Create temp directory for deployment
DEPLOY_DIR=$(mktemp -d)
echo "Creating deployment package in $DEPLOY_DIR..."

# Copy necessary files
cp -r api $DEPLOY_DIR/
cp -r data $DEPLOY_DIR/
cp -r scripts $DEPLOY_DIR/
cp -r training $DEPLOY_DIR/
# Use HF-specific requirements to include huggingface-hub
cp hf-space/requirements.txt $DEPLOY_DIR/requirements.txt
mkdir -p $DEPLOY_DIR/scripts
cp hf-space/scripts/start.sh $DEPLOY_DIR/scripts/start.sh
date -u +"%Y-%m-%dT%H:%M:%SZ" > $DEPLOY_DIR/BUILD_INFO.txt

if [ "$SKIP_MODELS" != "true" ]; then
    mkdir -p $DEPLOY_DIR/models/regression
    mkdir -p $DEPLOY_DIR/models/nlp/severity_classifier
    if [ -f "models/regression/resolution_predictor_latest.pkl" ]; then
        cp models/regression/resolution_predictor_latest.pkl $DEPLOY_DIR/models/regression/
        cp models/regression/resolution_predictor_latest_metrics.json $DEPLOY_DIR/models/regression/ 2>/dev/null || true
        cp models/regression/shap_explainer.pkl $DEPLOY_DIR/models/regression/ 2>/dev/null || true
        cp models/regression/anomaly_stats.pkl $DEPLOY_DIR/models/regression/ 2>/dev/null || true
        echo "  ✓ Regression model"
    fi
    if [ -d "models/nlp/severity_classifier" ]; then
        cp models/nlp/severity_classifier/*.pkl $DEPLOY_DIR/models/nlp/severity_classifier/
        cp models/nlp/severity_classifier/config.json $DEPLOY_DIR/models/nlp/severity_classifier/ 2>/dev/null || true
        echo "  ✓ NLP model (TF-IDF + RandomForest)"
    fi
else
    echo "  ⚠️ Skipping model files to reduce upload size"
    mkdir -p $DEPLOY_DIR/models
    echo "placeholder" > $DEPLOY_DIR/models/.keep
fi

# Copy Dockerfile
cp hf-space/Dockerfile $DEPLOY_DIR/
cp hf-space/README.md $DEPLOY_DIR/

# Create .env.example
cat > $DEPLOY_DIR/.env.example << 'EOF'
# Google Sheets Configuration
GOOGLE_SERVICE_ACCOUNT_EMAIL=your-service@project.iam.gserviceaccount.com
GOOGLE_PRIVATE_KEY="-----BEGIN PRIVATE KEY-----\n...\n-----END PRIVATE KEY-----\n"
GOOGLE_SHEET_ID=your-google-sheet-id

# Optional: For faster model downloads
# HF_TOKEN=your-huggingface-token
EOF

echo "  ✓ Configuration files"

# Show what will be uploaded
echo ""
echo "Files to upload:"
find $DEPLOY_DIR -type f | head -20
echo ""
echo "Total size: $(du -sh $DEPLOY_DIR | cut -f1)"

# Upload to Hugging Face
echo ""
echo "Uploading to Hugging Face Spaces..."
cd $DEPLOY_DIR

# Create repo if not exists
export HF_HUB_TIMEOUT=600
hf repo create $SPACE_NAME --repo-type space --space-sdk docker --exist-ok || true

# Upload files
hf upload $HF_USERNAME/$SPACE_NAME . . --repo-type space

echo ""
echo "✓ Deployment complete!"
echo ""
echo "Your Space: https://huggingface.co/spaces/$HF_USERNAME/$SPACE_NAME"
echo ""
echo "IMPORTANT: Go to your Space Settings > Repository secrets and add:"
echo "  - GOOGLE_SERVICE_ACCOUNT_EMAIL"
echo "  - GOOGLE_PRIVATE_KEY"
echo "  - GOOGLE_SHEET_ID"
echo ""

# Cleanup
rm -rf $DEPLOY_DIR