name: EMOTIA CI/CD Pipeline on: push: branches: [ main, develop ] pull_request: branches: [ main ] release: types: [ published ] env: REGISTRY: ghcr.io BACKEND_IMAGE: ${{ github.repository }}/backend FRONTEND_IMAGE: ${{ github.repository }}/frontend jobs: # Code Quality Checks quality-check: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - name: Set up Python uses: actions/setup-python@v4 with: python-version: '3.9' - name: Install dependencies run: | python -m pip install --upgrade pip pip install -r requirements.txt pip install -r requirements-dev.txt - name: Run linting run: | flake8 models/ scripts/ backend/ black --check models/ scripts/ backend/ - name: Run type checking run: mypy models/ scripts/ backend/ - name: Run security scan run: | pip install safety safety check # Backend Tests backend-test: runs-on: ubuntu-latest needs: quality-check services: redis: image: redis:7-alpine ports: - 6379:6379 options: >- --health-cmd "redis-cli ping" --health-interval 10s --health-timeout 5s --health-retries 5 steps: - uses: actions/checkout@v3 - name: Set up Python uses: actions/setup-python@v4 with: python-version: '3.9' - name: Install dependencies run: | python -m pip install --upgrade pip pip install -r requirements.txt pip install -r requirements-dev.txt - name: Run backend tests run: | cd backend python -m pytest --cov=. --cov-report=xml --cov-report=html env: REDIS_URL: redis://localhost:6379 - name: Upload coverage reports uses: codecov/codecov-action@v3 with: file: ./backend/coverage.xml flags: backend name: backend-coverage # Model Tests model-test: runs-on: ubuntu-latest needs: quality-check steps: - uses: actions/checkout@v3 - name: Set up Python uses: actions/setup-python@v4 with: python-version: '3.9' - name: Install PyTorch run: | pip install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cpu - name: Install model dependencies run: | pip install -r requirements.txt pip install transformers datasets - name: Run model tests run: | python -m pytest models/ scripts/ -v --tb=short python scripts/train.py --test-run --epochs 1 - name: Run model validation run: | python scripts/evaluate.py --model-path models/checkpoints/test_model.pth --test-data # Frontend Tests frontend-test: runs-on: ubuntu-latest needs: quality-check steps: - uses: actions/checkout@v3 - name: Set up Node.js uses: actions/setup-node@v3 with: node-version: '18' cache: 'npm' cache-dependency-path: frontend/package-lock.json - name: Install dependencies run: | cd frontend npm ci - name: Run linting run: | cd frontend npm run lint - name: Run type checking run: | cd frontend npm run type-check - name: Run tests run: | cd frontend npm test -- --coverage --watchAll=false env: CI: true - name: Build application run: | cd frontend npm run build - name: Upload build artifacts uses: actions/upload-artifact@v3 with: name: frontend-build path: frontend/build/ # Security Scan security-scan: runs-on: ubuntu-latest needs: [backend-test, frontend-test] steps: - uses: actions/checkout@v3 - name: Run Trivy vulnerability scanner uses: aquasecurity/trivy-action@master with: scan-type: 'fs' scan-ref: '.' format: 'sarif' output: 'trivy-results.sarif' - name: Upload Trivy scan results uses: github/codeql-action/upload-sarif@v2 if: always() with: sarif_file: 'trivy-results.sarif' # Build and Push Docker Images build-and-push: runs-on: ubuntu-latest needs: [backend-test, model-test, frontend-test, security-scan] if: github.event_name == 'push' && (github.ref == 'refs/heads/main' || github.ref == 'refs/heads/develop') steps: - name: Checkout code uses: actions/checkout@v3 - name: Set up Docker Buildx uses: docker/setup-buildx-action@v2 - name: Log in to Container Registry uses: docker/login-action@v2 with: registry: ${{ env.REGISTRY }} username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - name: Extract metadata for backend id: meta-backend uses: docker/metadata-action@v4 with: images: ${{ env.REGISTRY }}/${{ env.BACKEND_IMAGE }} tags: | type=ref,event=branch type=ref,event=pr type=sha,prefix={{branch}}- type=raw,value=latest,enable={{is_default_branch}} - name: Build and push backend image uses: docker/build-push-action@v4 with: context: . file: ./Dockerfile.backend push: true tags: ${{ steps.meta-backend.outputs.tags }} labels: ${{ steps.meta-backend.outputs.labels }} cache-from: type=gha cache-to: type=gha,mode=max - name: Extract metadata for frontend id: meta-frontend uses: docker/metadata-action@v4 with: images: ${{ env.REGISTRY }}/${{ env.FRONTEND_IMAGE }} tags: | type=ref,event=branch type=ref,event=pr type=sha,prefix={{branch}}- type=raw,value=latest,enable={{is_default_branch}} - name: Build and push frontend image uses: docker/build-push-action@v4 with: context: ./frontend push: true tags: ${{ steps.meta-frontend.outputs.tags }} labels: ${{ steps.meta-frontend.outputs.labels }} cache-from: type=gha cache-to: type=gha,mode=max # Deploy to Staging deploy-staging: runs-on: ubuntu-latest needs: build-and-push if: github.ref == 'refs/heads/develop' environment: staging steps: - name: Checkout code uses: actions/checkout@v3 - name: Configure kubectl uses: azure/k8s-set-context@v3 with: method: kubeconfig kubeconfig: ${{ secrets.KUBE_CONFIG_STAGING }} - name: Deploy to staging run: | kubectl apply -f infrastructure/kubernetes/namespace.yaml kubectl apply -f infrastructure/kubernetes/configmaps.yaml kubectl apply -f infrastructure/kubernetes/storage.yaml kubectl apply -f infrastructure/kubernetes/deployments.yaml kubectl apply -f infrastructure/kubernetes/services.yaml kubectl set image deployment/emotia-backend emotia-api=${{ env.REGISTRY }}/${{ env.BACKEND_IMAGE }}:develop kubectl set image deployment/emotia-frontend emotia-web=${{ env.REGISTRY }}/${{ env.FRONTEND_IMAGE }}:develop kubectl rollout status deployment/emotia-backend kubectl rollout status deployment/emotia-frontend # Deploy to Production deploy-production: runs-on: ubuntu-latest needs: build-and-push if: github.event_name == 'release' environment: production steps: - name: Checkout code uses: actions/checkout@v3 - name: Configure kubectl uses: azure/k8s-set-context@v3 with: method: kubeconfig kubeconfig: ${{ secrets.KUBE_CONFIG_PRODUCTION }} - name: Deploy to production run: | kubectl apply -f infrastructure/kubernetes/namespace.yaml kubectl apply -f infrastructure/kubernetes/configmaps.yaml kubectl apply -f infrastructure/kubernetes/storage.yaml kubectl apply -f infrastructure/kubernetes/deployments.yaml kubectl apply -f infrastructure/kubernetes/services.yaml kubectl apply -f infrastructure/kubernetes/scaling.yaml kubectl set image deployment/emotia-backend emotia-api=${{ env.REGISTRY }}/${{ env.BACKEND_IMAGE }}:${{ github.event.release.tag_name }} kubectl set image deployment/emotia-frontend emotia-web=${{ env.REGISTRY }}/${{ env.FRONTEND_IMAGE }}:${{ github.event.release.tag_name }} kubectl rollout status deployment/emotia-backend --timeout=600s kubectl rollout status deployment/emotia-frontend --timeout=300s - name: Run post-deployment tests run: | # Wait for services to be ready sleep 60 # Run smoke tests curl -f https://api.emotia.example.com/health || exit 1 curl -f https://emotia.example.com/ || exit 1 # Performance Testing performance-test: runs-on: ubuntu-latest needs: deploy-staging if: github.ref == 'refs/heads/develop' steps: - name: Checkout code uses: actions/checkout@v3 - name: Run k6 performance tests uses: k6io/action@v0.1 with: filename: tests/performance/k6-script.js env: K6_API_URL: https://api-staging.emotia.example.com - name: Generate performance report run: | # Generate and upload performance metrics echo "Performance test completed" # Model Performance Regression Test model-regression-test: runs-on: ubuntu-latest needs: model-test if: github.event_name == 'pull_request' steps: - name: Checkout code uses: actions/checkout@v3 - name: Download baseline model uses: actions/download-artifact@v3 with: name: baseline-model path: models/baseline/ - name: Run regression tests run: | python scripts/evaluate.py \ --model-path models/checkpoints/latest_model.pth \ --baseline-path models/baseline/model.pth \ --regression-test \ --accuracy-threshold 0.95 \ --latency-threshold 1.2 # Documentation docs: runs-on: ubuntu-latest needs: [backend-test, frontend-test] steps: - name: Checkout code uses: actions/checkout@v3 - name: Generate API documentation run: | cd backend python -m pydoc -w ./ # Generate OpenAPI spec python scripts/generate_openapi.py - name: Deploy documentation uses: peaceiris/actions-gh-pages@v3 if: github.ref == 'refs/heads/main' with: github_token: ${{ secrets.GITHUB_TOKEN }} publish_dir: ./docs