mofh-api-proxy / github_deploy.py
mrsavage1's picture
Upload github_deploy.py
ece2064 verified
# GitHub Deployment Module for Celestine Hosting
# Handles GitHub App integration, workflow injection, and deployment tracking
import requests
import base64
import json
import time
import jwt
from typing import Dict, List, Optional, Tuple
from datetime import datetime, timedelta
from nacl import encoding, public
import logging
logger = logging.getLogger(__name__)
class GitHubDeployment:
"""
Manages GitHub deployments for Celestine Hosting
- GitHub App authentication
- Repository access
- Workflow file injection
- Deployment status tracking
"""
def __init__(self, app_id: str, client_id: str, client_secret: str, private_key: str):
self.app_id = app_id
self.client_id = client_id
self.client_secret = client_secret
self.private_key = private_key
self.github_api = "https://api.github.com"
def generate_jwt(self) -> str:
"""Generate JWT for GitHub App authentication"""
now = int(time.time())
payload = {
'iat': now - 60, # Issued 60 seconds in the past
'exp': now + (10 * 60), # Expires in 10 minutes
'iss': self.app_id
}
try:
token = jwt.encode(payload, self.private_key, algorithm='RS256')
logger.debug(f"Generated JWT for app_id: {self.app_id}")
return token
except Exception as e:
logger.error(f"Failed to generate JWT: {e}")
raise
def get_installation_access_token(self, installation_id: str) -> str:
"""Get installation access token for a specific installation"""
jwt_token = self.generate_jwt()
headers = {
'Authorization': f'Bearer {jwt_token}',
'Accept': 'application/vnd.github.v3+json'
}
response = requests.post(
f'{self.github_api}/app/installations/{installation_id}/access_tokens',
headers=headers,
timeout=30
)
if response.status_code != 201:
raise Exception(f"Failed to get access token: {response.text}")
return response.json()['token']
def get_repo_installation_id(self, owner: str, repo: str) -> str:
"""Get installation ID for a repository"""
jwt_token = self.generate_jwt()
headers = {
'Authorization': f'Bearer {jwt_token}',
'Accept': 'application/vnd.github.v3+json'
}
# Try method 1: Get installation for specific repo
response = requests.get(
f'{self.github_api}/repos/{owner}/{repo}/installation',
headers=headers,
timeout=30
)
if response.status_code == 200:
return str(response.json()['id'])
# Method 1 failed, try method 2: List all installations and find the right one
logger.warning(f"Method 1 failed ({response.status_code}), trying method 2...")
response = requests.get(
f'{self.github_api}/app/installations',
headers=headers,
timeout=30
)
if response.status_code != 200:
raise Exception(f"Failed to get installations: {response.text}")
installations = response.json()
# If there's only one installation, use it
if len(installations) == 1:
logger.info(f"Found single installation: {installations[0]['id']}")
return str(installations[0]['id'])
# Multiple installations - need to find the right one
for installation in installations:
if installation['account']['login'] == owner:
logger.info(f"Found installation for {owner}: {installation['id']}")
return str(installation['id'])
raise Exception(f"No installation found for {owner}/{repo}")
def exchange_code_for_token(self, code: str) -> Dict:
"""Exchange OAuth code for access token"""
response = requests.post(
'https://github.com/login/oauth/access_token',
headers={'Accept': 'application/json'},
data={
'client_id': self.client_id,
'client_secret': self.client_secret,
'code': code
},
timeout=30
)
if response.status_code != 200:
raise Exception(f"Failed to exchange code: {response.text}")
return response.json()
def get_user_repos(self, access_token: str) -> List[Dict]:
"""Get user's repositories"""
headers = {
'Authorization': f'token {access_token}',
'Accept': 'application/vnd.github.v3+json'
}
response = requests.get(
f'{self.github_api}/user/repos',
headers=headers,
params={'per_page': 100, 'sort': 'updated'},
timeout=30
)
if response.status_code != 200:
raise Exception(f"Failed to get repositories: {response.text}")
return response.json()
def get_repo_public_key(self, owner: str, repo: str, access_token: str) -> Tuple[str, str]:
"""Get repository's public key for secrets encryption"""
headers = {
'Authorization': f'token {access_token}',
'Accept': 'application/vnd.github.v3+json'
}
response = requests.get(
f'{self.github_api}/repos/{owner}/{repo}/actions/secrets/public-key',
headers=headers,
timeout=30
)
if response.status_code != 200:
raise Exception(f"Failed to get public key: {response.text}")
data = response.json()
return data['key'], data['key_id']
def encrypt_secret(self, public_key: str, secret_value: str) -> str:
"""Encrypt a secret using the repository's public key"""
public_key_obj = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder)
sealed_box = public.SealedBox(public_key_obj)
encrypted = sealed_box.encrypt(secret_value.encode("utf-8"))
return base64.b64encode(encrypted).decode("utf-8")
def create_or_update_secret(self, owner: str, repo: str, secret_name: str,
secret_value: str, access_token: str) -> bool:
"""Create or update a repository secret"""
try:
logger.info(f"Creating secret {secret_name} (value length: {len(secret_value) if secret_value else 0})")
# Get public key
public_key, key_id = self.get_repo_public_key(owner, repo, access_token)
# Encrypt secret
encrypted_value = self.encrypt_secret(public_key, secret_value)
# Create/update secret
headers = {
'Authorization': f'token {access_token}',
'Accept': 'application/vnd.github.v3+json'
}
response = requests.put(
f'{self.github_api}/repos/{owner}/{repo}/actions/secrets/{secret_name}',
headers=headers,
json={
'encrypted_value': encrypted_value,
'key_id': key_id
},
timeout=30
)
success = response.status_code in [201, 204]
if success:
logger.info(f"Secret {secret_name} created successfully")
else:
logger.error(f"Failed to create secret {secret_name}: {response.status_code} - {response.text}")
return success
except Exception as e:
logger.error(f"Failed to create secret {secret_name}: {e}")
return False
def detect_project_type(self, owner: str, repo: str, access_token: str) -> Dict:
"""
Enhanced project type detection
Supports: React, Vue, Angular, Next.js, Nuxt, Svelte, Gatsby, Astro,
Hugo, Jekyll, Python, PHP, Ruby, Go, Rust, and 30+ frameworks
"""
headers = {
'Authorization': f'token {access_token}',
'Accept': 'application/vnd.github.v3+json'
}
# Get repository contents
response = requests.get(
f'{self.github_api}/repos/{owner}/{repo}/contents',
headers=headers,
timeout=30
)
if response.status_code != 200:
return {
'type': 'static',
'build_command': '',
'output_dir': '.',
'install_command': '',
'language': 'html'
}
files = {item['name']: item for item in response.json() if item['type'] == 'file'}
# Priority 1: Check for .celestine.yml (custom configuration)
if '.celestine.yml' in files or '.celestine.yaml' in files:
try:
config_file = '.celestine.yml' if '.celestine.yml' in files else '.celestine.yaml'
config_response = requests.get(files[config_file]['download_url'], timeout=30)
import yaml
config = yaml.safe_load(config_response.text)
return {
'type': config.get('project', {}).get('type', 'custom'),
'build_command': config.get('build', {}).get('command', ''),
'output_dir': config.get('build', {}).get('output', 'dist'),
'install_command': config.get('build', {}).get('install', ''),
'language': config.get('project', {}).get('language', 'unknown'),
'node_version': config.get('runtime', {}).get('node', '18'),
'python_version': config.get('runtime', {}).get('python', '3.11'),
'php_version': config.get('runtime', {}).get('php', '8.2')
}
except Exception as e:
logger.warning(f"Failed to parse .celestine.yml: {e}")
# Priority 2: Node.js/JavaScript/TypeScript projects
if 'package.json' in files:
try:
pkg_response = requests.get(files['package.json']['download_url'], timeout=30)
package_json = pkg_response.json()
dependencies = package_json.get('dependencies', {})
dev_dependencies = package_json.get('devDependencies', {})
scripts = package_json.get('scripts', {})
# Next.js
if 'next' in dependencies or 'next' in dev_dependencies:
return {
'type': 'nextjs',
'build_command': 'npm run build && npm run export',
'output_dir': 'out',
'install_command': 'npm ci',
'language': 'javascript',
'node_version': '18'
}
# Nuxt.js
elif 'nuxt' in dependencies or 'nuxt' in dev_dependencies:
return {
'type': 'nuxt',
'build_command': 'npm run generate',
'output_dir': 'dist',
'install_command': 'npm ci',
'language': 'javascript',
'node_version': '18'
}
# Gatsby
elif 'gatsby' in dependencies or 'gatsby' in dev_dependencies:
return {
'type': 'gatsby',
'build_command': 'npm run build',
'output_dir': 'public',
'install_command': 'npm ci',
'language': 'javascript',
'node_version': '18'
}
# Astro
elif 'astro' in dependencies or 'astro' in dev_dependencies:
return {
'type': 'astro',
'build_command': 'npm run build',
'output_dir': 'dist',
'install_command': 'npm ci',
'language': 'javascript',
'node_version': '18'
}
# Docusaurus
elif '@docusaurus/core' in dependencies:
return {
'type': 'docusaurus',
'build_command': 'npm run build',
'output_dir': 'build',
'install_command': 'npm ci',
'language': 'javascript',
'node_version': '18'
}
# Eleventy (11ty)
elif '@11ty/eleventy' in dependencies or '@11ty/eleventy' in dev_dependencies:
return {
'type': 'eleventy',
'build_command': 'npm run build',
'output_dir': '_site',
'install_command': 'npm ci',
'language': 'javascript',
'node_version': '18'
}
# VuePress
elif 'vuepress' in dependencies or 'vuepress' in dev_dependencies:
return {
'type': 'vuepress',
'build_command': 'npm run build',
'output_dir': 'docs/.vuepress/dist',
'install_command': 'npm ci',
'language': 'javascript',
'node_version': '18'
}
# Hexo
elif 'hexo' in dependencies or 'hexo' in dev_dependencies:
return {
'type': 'hexo',
'build_command': 'npm run build',
'output_dir': 'public',
'install_command': 'npm ci',
'language': 'javascript',
'node_version': '18'
}
# React
elif 'react' in dependencies:
# Always use npm run build to use the script from package.json
build_cmd = 'npm run build'
# Check if using Vite (requires Node 20+)
if 'vite' in dev_dependencies:
output_dir = 'dist'
node_version = '20'
else:
output_dir = 'build'
node_version = '18'
return {
'type': 'react',
'build_command': build_cmd,
'output_dir': output_dir,
'install_command': 'npm ci',
'language': 'javascript',
'node_version': node_version
}
# Vue.js
elif 'vue' in dependencies:
return {
'type': 'vue',
'build_command': 'npm run build',
'output_dir': 'dist',
'install_command': 'npm ci',
'language': 'javascript',
'node_version': '18'
}
# Angular
elif '@angular/core' in dependencies:
return {
'type': 'angular',
'build_command': 'npm run build -- --configuration production',
'output_dir': 'dist',
'install_command': 'npm ci',
'language': 'typescript',
'node_version': '18'
}
# Svelte/SvelteKit
elif 'svelte' in dependencies or 'svelte' in dev_dependencies:
if '@sveltejs/kit' in dependencies or '@sveltejs/kit' in dev_dependencies:
return {
'type': 'sveltekit',
'build_command': 'npm run build',
'output_dir': 'build',
'install_command': 'npm ci',
'language': 'javascript',
'node_version': '18'
}
return {
'type': 'svelte',
'build_command': 'npm run build',
'output_dir': 'public',
'install_command': 'npm ci',
'language': 'javascript',
'node_version': '18'
}
# Vite
elif 'vite' in dev_dependencies:
return {
'type': 'vite',
'build_command': 'npm run build',
'output_dir': 'dist',
'install_command': 'npm ci',
'language': 'javascript',
'node_version': '18'
}
# Generic Node.js
else:
return {
'type': 'nodejs',
'build_command': scripts.get('build', 'npm run build'),
'output_dir': 'dist',
'install_command': 'npm ci',
'language': 'javascript',
'node_version': '18'
}
except Exception as e:
logger.warning(f"Failed to parse package.json: {e}")
# Priority 3: Python projects
if 'requirements.txt' in files or 'pyproject.toml' in files:
# Django
if 'manage.py' in files:
return {
'type': 'django',
'build_command': 'python manage.py collectstatic --noinput',
'output_dir': 'staticfiles',
'install_command': 'pip install -r requirements.txt',
'language': 'python',
'python_version': '3.11'
}
# Flask
elif 'app.py' in files or 'wsgi.py' in files:
return {
'type': 'flask',
'build_command': 'python build.py',
'output_dir': 'build',
'install_command': 'pip install -r requirements.txt',
'language': 'python',
'python_version': '3.11'
}
# Generic Python
else:
return {
'type': 'python',
'build_command': 'python build.py',
'output_dir': 'dist',
'install_command': 'pip install -r requirements.txt',
'language': 'python',
'python_version': '3.11'
}
# Priority 4: PHP projects
if 'composer.json' in files:
try:
composer_response = requests.get(files['composer.json']['download_url'], timeout=30)
composer_json = composer_response.json()
require = composer_json.get('require', {})
# Laravel
if 'laravel/framework' in require:
return {
'type': 'laravel',
'build_command': 'composer install --no-dev --optimize-autoloader && npm run build',
'output_dir': 'public',
'install_command': 'composer install',
'language': 'php',
'php_version': '8.2',
'node_version': '18'
}
# Symfony
elif 'symfony/framework-bundle' in require:
return {
'type': 'symfony',
'build_command': 'composer install --no-dev --optimize-autoloader && npm run build',
'output_dir': 'public',
'install_command': 'composer install',
'language': 'php',
'php_version': '8.2',
'node_version': '18'
}
# CodeIgniter
elif 'codeigniter4/framework' in require:
return {
'type': 'codeigniter',
'build_command': 'composer install --no-dev',
'output_dir': 'public',
'install_command': 'composer install',
'language': 'php',
'php_version': '8.1'
}
# Generic PHP
else:
return {
'type': 'php',
'build_command': 'composer install --no-dev',
'output_dir': '.',
'install_command': 'composer install',
'language': 'php',
'php_version': '8.1'
}
except Exception as e:
logger.warning(f"Failed to parse composer.json: {e}")
# Priority 5: Ruby projects
if 'Gemfile' in files:
# Jekyll
if '_config.yml' in files:
return {
'type': 'jekyll',
'build_command': 'bundle exec jekyll build',
'output_dir': '_site',
'install_command': 'bundle install',
'language': 'ruby',
'ruby_version': '3.2'
}
# Middleman
elif 'config.rb' in files:
return {
'type': 'middleman',
'build_command': 'bundle exec middleman build',
'output_dir': 'build',
'install_command': 'bundle install',
'language': 'ruby',
'ruby_version': '3.2'
}
# Generic Ruby
else:
return {
'type': 'ruby',
'build_command': 'bundle exec rake build',
'output_dir': 'build',
'install_command': 'bundle install',
'language': 'ruby',
'ruby_version': '3.2'
}
# Priority 6: Go projects
if 'go.mod' in files:
# Hugo
if 'config.toml' in files or 'config.yaml' in files or 'hugo.toml' in files:
return {
'type': 'hugo',
'build_command': 'hugo --minify',
'output_dir': 'public',
'install_command': '',
'language': 'go',
'hugo_version': 'latest'
}
# Generic Go
else:
return {
'type': 'go',
'build_command': 'go build -o dist/',
'output_dir': 'dist',
'install_command': 'go mod download',
'language': 'go',
'go_version': '1.21'
}
# Priority 7: Rust projects
if 'Cargo.toml' in files:
return {
'type': 'rust',
'build_command': 'trunk build --release',
'output_dir': 'dist',
'install_command': 'cargo install trunk',
'language': 'rust',
'rust_version': 'stable'
}
# Priority 8: Java projects
if 'pom.xml' in files:
return {
'type': 'maven',
'build_command': './mvnw clean package',
'output_dir': 'target/classes/static',
'install_command': '',
'language': 'java',
'java_version': '17'
}
if 'build.gradle' in files or 'build.gradle.kts' in files:
return {
'type': 'gradle',
'build_command': './gradlew build',
'output_dir': 'build/resources/main/static',
'install_command': '',
'language': 'java',
'java_version': '17'
}
# Priority 9: Static HTML
if 'index.html' in files:
return {
'type': 'static',
'build_command': '',
'output_dir': '.',
'install_command': '',
'language': 'html'
}
# Default: Static
return {
'type': 'static',
'build_command': '',
'output_dir': '.',
'install_command': '',
'language': 'unknown'
}
def generate_workflow_yaml(self, project_config: Dict, ftp_server: str,
ftp_username: str, branch: str = 'main') -> str:
"""Generate GitHub Actions workflow YAML for deployment with multi-language support"""
language = project_config.get('language', 'unknown')
project_type = project_config['type']
workflow = f"""name: Deploy to Celestine Hosting
on:
push:
branches: [ {branch} ]
workflow_dispatch:
jobs:
deploy:
runs-on: ubuntu-latest
steps:
- name: πŸ“₯ Checkout code
uses: actions/checkout@v4
"""
# Language-specific setup
if language in ['javascript', 'typescript'] or project_config.get('node_version'):
node_version = project_config.get('node_version', '18')
workflow += f""" - name: πŸ“¦ Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '{node_version}'
cache: 'npm'
"""
elif language == 'python':
python_version = project_config.get('python_version', '3.11')
workflow += f""" - name: 🐍 Setup Python
uses: actions/setup-python@v4
with:
python-version: '{python_version}'
cache: 'pip'
"""
elif language == 'php':
php_version = project_config.get('php_version', '8.2')
workflow += f""" - name: 🐘 Setup PHP
uses: shivammathur/setup-php@v2
with:
php-version: '{php_version}'
tools: composer
"""
# PHP projects might also need Node.js
if project_config.get('node_version'):
workflow += f""" - name: πŸ“¦ Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '{project_config.get('node_version', '18')}'
cache: 'npm'
"""
elif language == 'ruby':
ruby_version = project_config.get('ruby_version', '3.2')
workflow += f""" - name: πŸ’Ž Setup Ruby
uses: ruby/setup-ruby@v1
with:
ruby-version: '{ruby_version}'
bundler-cache: true
"""
elif language == 'go' or project_type == 'hugo':
if project_type == 'hugo':
hugo_version = project_config.get('hugo_version', 'latest')
workflow += f""" - name: πŸš€ Setup Hugo
uses: peaceiris/actions-hugo@v2
with:
hugo-version: '{hugo_version}'
extended: true
"""
else:
go_version = project_config.get('go_version', '1.21')
workflow += f""" - name: πŸ”· Setup Go
uses: actions/setup-go@v4
with:
go-version: '{go_version}'
"""
elif language == 'rust':
workflow += f""" - name: πŸ¦€ Setup Rust
uses: actions-rs/toolchain@v1
with:
toolchain: stable
override: true
- name: πŸ“¦ Install Trunk
run: cargo install trunk
"""
elif language == 'java':
java_version = project_config.get('java_version', '17')
workflow += f""" - name: β˜• Setup Java
uses: actions/setup-java@v3
with:
java-version: '{java_version}'
distribution: 'temurin'
"""
# Install dependencies
if project_config['install_command']:
workflow += f""" - name: πŸ“š Install dependencies
run: {project_config['install_command']}
"""
# Build project
if project_config['build_command']:
workflow += f""" - name: πŸ”¨ Build project
run: {project_config['build_command']}
env:
CI: false
NODE_ENV: production
"""
# Universal FTP deployment using Python (works for all languages and project types)
workflow += f""" - name: πŸš€ Deploy to VistaPanel via FTP
env:
FTP_USER: ${{{{ secrets.FTP_USERNAME }}}}
FTP_PASS: ${{{{ secrets.FTP_PASSWORD }}}}
FTP_SERVER: {ftp_server}
BUILD_DIR: {project_config['output_dir']}
run: |
python3 << 'EOF'
import os
import sys
import ftplib
from pathlib import Path
def upload_ftp():
# Get environment variables
ftp_server = os.environ.get('FTP_SERVER')
ftp_user = os.environ.get('FTP_USER')
ftp_pass = os.environ.get('FTP_PASS')
build_dir = os.environ.get('BUILD_DIR', '.')
remote_dir = '/htdocs'
if not all([ftp_server, ftp_user, ftp_pass]):
print("❌ Missing FTP credentials!")
sys.exit(1)
print(f"οΏ½ Connecting to {{ftp_server}}...")
try:
# Connect to FTP server
ftp = ftplib.FTP(ftp_server, timeout=60)
ftp.login(ftp_user, ftp_pass)
print(f"βœ… Logged in as {{ftp_user}}")
# Change to remote directory
try:
ftp.cwd(remote_dir)
print(f"πŸ“‚ Working in {{remote_dir}}")
except ftplib.error_perm:
print(f"πŸ“ Creating {{remote_dir}}...")
ftp.mkd(remote_dir)
ftp.cwd(remote_dir)
uploaded_files = 0
uploaded_dirs = 0
def upload_directory(local_path, remote_base=''):
nonlocal uploaded_files, uploaded_dirs
local_path = Path(local_path)
if not local_path.exists():
print(f"⚠️ Build directory {{local_path}} not found!")
return
items = sorted(local_path.iterdir())
for item in items:
item_name = item.name
# Skip hidden files and common excludes
if item_name.startswith('.') or item_name in ['node_modules', '__pycache__', 'vendor']:
continue
if item.is_file():
# Upload file
try:
with open(item, 'rb') as f:
ftp.storbinary(f'STOR {{item_name}}', f)
uploaded_files += 1
print(f" βœ“ {{remote_base}}/{{item_name}}")
except Exception as e:
print(f" βœ— {{item_name}}: {{e}}")
elif item.is_dir():
# Create and enter directory
try:
ftp.mkd(item_name)
uploaded_dirs += 1
except ftplib.error_perm:
pass # Directory exists
current_dir = ftp.pwd()
ftp.cwd(item_name)
print(f"πŸ“ {{remote_base}}/{{item_name}}/")
# Recursively upload subdirectory
upload_directory(item, f"{{remote_base}}/{{item_name}}")
# Go back to parent directory
ftp.cwd(current_dir)
# Start upload
print(f"\\nπŸš€ Uploading from {{build_dir}}...\\n")
upload_directory(build_dir)
# Close connection
ftp.quit()
print(f"\\nβœ… Deployment complete!")
print(f"πŸ“Š Uploaded {{uploaded_files}} files in {{uploaded_dirs}} directories")
return 0
except ftplib.error_perm as e:
print(f"❌ FTP Permission Error: {{e}}")
return 1
except ftplib.error_temp as e:
print(f"❌ FTP Temporary Error: {{e}}")
return 1
except Exception as e:
print(f"❌ Error: {{e}}")
return 1
# Run upload
exit_code = upload_ftp()
sys.exit(exit_code)
EOF
- name: βœ… Deployment complete
run: |
echo ""
echo "════════════════════════════════════════════════════════════"
echo "πŸŽ‰ Deployment Successful!"
echo "════════════════════════════════════════════════════════════"
echo "πŸ“¦ Project Type: {project_type}"
echo "πŸ”¨ Build Command: {project_config['build_command'] or 'None'}"
echo "πŸ“‚ Output Directory: {project_config['output_dir']}"
echo "🌐 Your site is now live!"
echo "════════════════════════════════════════════════════════════"
"""
return workflow
def inject_workflow(self, owner: str, repo: str, workflow_content: str,
access_token: str, branch: str = 'main') -> bool:
"""Inject workflow file into repository"""
try:
headers = {
'Authorization': f'token {access_token}',
'Accept': 'application/vnd.github.v3+json'
}
# Check if .github/workflows directory exists
workflow_path = '.github/workflows/celestine_deploy.yml'
# Get file SHA if it exists (for updates)
sha = None
try:
response = requests.get(
f'{self.github_api}/repos/{owner}/{repo}/contents/{workflow_path}',
headers=headers,
params={'ref': branch},
timeout=30
)
if response.status_code == 200:
sha = response.json()['sha']
except:
pass
# Create or update file
payload = {
'message': 'πŸš€ Add Celestine Hosting deployment workflow',
'content': base64.b64encode(workflow_content.encode()).decode(),
'branch': branch
}
if sha:
payload['sha'] = sha
response = requests.put(
f'{self.github_api}/repos/{owner}/{repo}/contents/{workflow_path}',
headers=headers,
json=payload,
timeout=30
)
return response.status_code in [200, 201]
except Exception as e:
logger.error(f"Failed to inject workflow: {e}")
return False
def setup_deployment(self, owner: str, repo: str, access_token: str,
ftp_server: str, ftp_username: str, ftp_password: str,
branch: str = 'main') -> Dict:
"""
Complete deployment setup:
1. Detect project type
2. Create FTP secrets
3. Inject workflow file
"""
try:
logger.info("β”Œ" + "─" * 78 + "┐")
logger.info(f"β”‚ πŸš€ SETUP DEPLOYMENT: {owner}/{repo}")
logger.info("β”œ" + "─" * 78 + "─")
logger.info(f"β”‚ πŸ“₯ Received FTP Server: {ftp_server}")
logger.info(f"β”‚ πŸ“₯ Received FTP Username: {ftp_username if ftp_username else '⚠️ EMPTY'}")
logger.info(f"β”‚ πŸ“₯ Received FTP Password: {'βœ“ Present' if ftp_password else '⚠️ EMPTY'} (length: {len(ftp_password) if ftp_password else 0})")
logger.info(f"β”‚ 🌿 Branch: {branch}")
logger.info("β””" + "─" * 78 + "β”˜")
# Get installation token for this repository
logger.info("πŸ” Step 1/5: Getting installation access token...")
installation_id = self.get_repo_installation_id(owner, repo)
logger.info(f" βœ“ Installation ID: {installation_id}")
installation_token = self.get_installation_access_token(installation_id)
logger.info(f" βœ“ Installation token obtained (length: {len(installation_token)})")
# Detect project type (use user token for reading)
logger.info("πŸ” Step 2/5: Detecting project type...")
project_config = self.detect_project_type(owner, repo, access_token)
logger.info(f" βœ“ Project Type: {project_config['type']}")
logger.info(f" βœ“ Build Command: {project_config.get('build_command', 'None')}")
logger.info(f" βœ“ Output Directory: {project_config.get('output_dir', 'None')}")
logger.info(f" βœ“ Language: {project_config.get('language', 'unknown')}")
# Create secrets (use installation token for secrets)
logger.info("πŸ”‘ Step 3/5: Creating FTP secrets in GitHub...")
logger.info(f" β†’ Creating FTP_USERNAME with value: {ftp_username}")
ftp_user_created = self.create_or_update_secret(
owner, repo, 'FTP_USERNAME', ftp_username, installation_token
)
logger.info(f" β†’ Creating FTP_PASSWORD (length: {len(ftp_password)})")
ftp_pass_created = self.create_or_update_secret(
owner, repo, 'FTP_PASSWORD', ftp_password, installation_token
)
if not ftp_user_created:
logger.error(" βœ— Failed to create FTP_USERNAME secret")
raise Exception("Failed to create FTP_USERNAME secret")
if not ftp_pass_created:
logger.error(" βœ— Failed to create FTP_PASSWORD secret")
raise Exception("Failed to create FTP_PASSWORD secret")
logger.info(" βœ“ Both secrets created successfully")
# Generate workflow
logger.info("πŸ“ Step 4/5: Generating workflow YAML...")
workflow_content = self.generate_workflow_yaml(
project_config, ftp_server, ftp_username, branch
)
logger.info(f" βœ“ Workflow generated ({len(workflow_content)} bytes)")
# Inject workflow
logger.info("πŸ“€ Step 5/5: Injecting workflow into repository...")
workflow_injected = self.inject_workflow(
owner, repo, workflow_content, access_token, branch
)
if not workflow_injected:
logger.error(" βœ— Failed to inject workflow file")
raise Exception("Failed to inject workflow file")
logger.info(" βœ“ Workflow file injected successfully")
logger.info("β”Œ" + "─" * 78 + "┐")
logger.info("β”‚ βœ… DEPLOYMENT SETUP COMPLETE")
logger.info("β””" + "─" * 78 + "β”˜")
return {
'success': True,
'project_type': project_config['type'],
'build_command': project_config['build_command'],
'output_dir': project_config['output_dir'],
'workflow_url': f'https://github.com/{owner}/{repo}/blob/{branch}/.github/workflows/celestine_deploy.yml',
'actions_url': f'https://github.com/{owner}/{repo}/actions'
}
except Exception as e:
logger.error("β”Œ" + "─" * 78 + "┐")
logger.error(f"β”‚ ❌ DEPLOYMENT SETUP FAILED: {str(e)}")
logger.error("β””" + "─" * 78 + "β”˜")
return {
'success': False,
'error': str(e)
}
def get_workflow_runs(self, owner: str, repo: str, access_token: str,
limit: int = 10) -> List[Dict]:
"""Get workflow run history"""
try:
headers = {
'Authorization': f'token {access_token}',
'Accept': 'application/vnd.github.v3+json'
}
response = requests.get(
f'{self.github_api}/repos/{owner}/{repo}/actions/runs',
headers=headers,
params={'per_page': limit},
timeout=30
)
if response.status_code != 200:
return []
runs = response.json()['workflow_runs']
return [{
'id': run['id'],
'name': run['name'],
'status': run['status'],
'conclusion': run['conclusion'],
'created_at': run['created_at'],
'updated_at': run['updated_at'],
'html_url': run['html_url'],
'run_number': run['run_number']
} for run in runs]
except Exception as e:
logger.error(f"Failed to get workflow runs: {e}")
return []
def get_workflow_logs(self, owner: str, repo: str, run_id: int,
access_token: str) -> str:
"""Get workflow run logs"""
try:
headers = {
'Authorization': f'token {access_token}',
'Accept': 'application/vnd.github.v3+json'
}
response = requests.get(
f'{self.github_api}/repos/{owner}/{repo}/actions/runs/{run_id}/logs',
headers=headers,
timeout=30,
allow_redirects=True
)
if response.status_code == 200:
return response.text
return ""
except Exception as e:
logger.error(f"Failed to get logs: {e}")
return ""
def trigger_workflow(self, owner: str, repo: str, workflow_file: str,
access_token: str, branch: str = 'main') -> bool:
"""Manually trigger a workflow"""
try:
headers = {
'Authorization': f'token {access_token}',
'Accept': 'application/vnd.github.v3+json'
}
response = requests.post(
f'{self.github_api}/repos/{owner}/{repo}/actions/workflows/{workflow_file}/dispatches',
headers=headers,
json={'ref': branch},
timeout=30
)
return response.status_code == 204
except Exception as e:
logger.error(f"Failed to trigger workflow: {e}")
return False