Spaces:
Runtime error
Runtime error
Factor Studios
commited on
Upload 96 files
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +35 -35
- Dockerfile +35 -0
- admin_routes.py +166 -0
- app.py +819 -0
- ca/ca.crt +30 -0
- ca/ca.key +52 -0
- config/outline_config.json +11 -0
- config/server.json +33 -0
- config/server_config.json +11 -0
- config/users.json +1 -0
- core/__pycache__/database.cpython-311.pyc +0 -0
- core/__pycache__/database_init.cpython-311.pyc +0 -0
- core/__pycache__/ikev2_server.cpython-311.pyc +0 -0
- core/__pycache__/ip_parser.cpython-311.pyc +0 -0
- core/__pycache__/logger.cpython-311.pyc +0 -0
- core/__pycache__/nat_engine.cpython-311.pyc +0 -0
- core/__pycache__/outline_config.cpython-311.pyc +0 -0
- core/__pycache__/outline_server.cpython-311.pyc +0 -0
- core/__pycache__/port_manager.cpython-311.pyc +0 -0
- core/__pycache__/session_tracker.cpython-311.pyc +0 -0
- core/__pycache__/shadowsocks_protocol.cpython-311.pyc +0 -0
- core/__pycache__/tcp_engine.cpython-311.pyc +0 -0
- core/__pycache__/tcp_forward.cpython-311.pyc +0 -0
- core/__pycache__/traffic_router.cpython-311.pyc +0 -0
- core/__pycache__/vpn_auth.cpython-311.pyc +0 -0
- core/auth.py +74 -0
- core/database.py +31 -0
- core/database_init.py +52 -0
- core/error_handlers.py +51 -0
- core/ikev2_server.py +207 -0
- core/ip_parser.py +230 -0
- core/l2tp_server.py +201 -0
- core/logger.py +282 -0
- core/middleware.py +58 -0
- core/models/__pycache__/user.cpython-311.pyc +0 -0
- core/models/user.py +98 -0
- core/nat_engine.py +135 -0
- core/outline_config.py +126 -0
- core/outline_server.py +181 -0
- core/port_manager.py +113 -0
- core/pptp_server.py +179 -0
- core/process_lock.py +51 -0
- core/services/__pycache__/user_service.cpython-311.pyc +0 -0
- core/services/user_service.py +99 -0
- core/session_tracker.py +287 -0
- core/shadowsocks_protocol.py +121 -0
- core/socket_translator.py +339 -0
- core/tcp_engine.py +356 -0
- core/tcp_forward.py +159 -0
- core/traffic_forwarder.py +185 -0
.gitattributes
CHANGED
|
@@ -1,35 +1,35 @@
|
|
| 1 |
-
*.7z filter=lfs diff=lfs merge=lfs -text
|
| 2 |
-
*.arrow filter=lfs diff=lfs merge=lfs -text
|
| 3 |
-
*.bin filter=lfs diff=lfs merge=lfs -text
|
| 4 |
-
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
| 5 |
-
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
| 6 |
-
*.ftz filter=lfs diff=lfs merge=lfs -text
|
| 7 |
-
*.gz filter=lfs diff=lfs merge=lfs -text
|
| 8 |
-
*.h5 filter=lfs diff=lfs merge=lfs -text
|
| 9 |
-
*.joblib filter=lfs diff=lfs merge=lfs -text
|
| 10 |
-
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
| 11 |
-
*.mlmodel filter=lfs diff=lfs merge=lfs -text
|
| 12 |
-
*.model filter=lfs diff=lfs merge=lfs -text
|
| 13 |
-
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
| 14 |
-
*.npy filter=lfs diff=lfs merge=lfs -text
|
| 15 |
-
*.npz filter=lfs diff=lfs merge=lfs -text
|
| 16 |
-
*.onnx filter=lfs diff=lfs merge=lfs -text
|
| 17 |
-
*.ot filter=lfs diff=lfs merge=lfs -text
|
| 18 |
-
*.parquet filter=lfs diff=lfs merge=lfs -text
|
| 19 |
-
*.pb filter=lfs diff=lfs merge=lfs -text
|
| 20 |
-
*.pickle filter=lfs diff=lfs merge=lfs -text
|
| 21 |
-
*.pkl filter=lfs diff=lfs merge=lfs -text
|
| 22 |
-
*.pt filter=lfs diff=lfs merge=lfs -text
|
| 23 |
-
*.pth filter=lfs diff=lfs merge=lfs -text
|
| 24 |
-
*.rar filter=lfs diff=lfs merge=lfs -text
|
| 25 |
-
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
| 26 |
-
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
| 27 |
-
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
| 28 |
-
*.tar filter=lfs diff=lfs merge=lfs -text
|
| 29 |
-
*.tflite filter=lfs diff=lfs merge=lfs -text
|
| 30 |
-
*.tgz filter=lfs diff=lfs merge=lfs -text
|
| 31 |
-
*.wasm filter=lfs diff=lfs merge=lfs -text
|
| 32 |
-
*.xz filter=lfs diff=lfs merge=lfs -text
|
| 33 |
-
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
-
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
-
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
| 1 |
+
*.7z filter=lfs diff=lfs merge=lfs -text
|
| 2 |
+
*.arrow filter=lfs diff=lfs merge=lfs -text
|
| 3 |
+
*.bin filter=lfs diff=lfs merge=lfs -text
|
| 4 |
+
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
| 5 |
+
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
| 6 |
+
*.ftz filter=lfs diff=lfs merge=lfs -text
|
| 7 |
+
*.gz filter=lfs diff=lfs merge=lfs -text
|
| 8 |
+
*.h5 filter=lfs diff=lfs merge=lfs -text
|
| 9 |
+
*.joblib filter=lfs diff=lfs merge=lfs -text
|
| 10 |
+
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
| 11 |
+
*.mlmodel filter=lfs diff=lfs merge=lfs -text
|
| 12 |
+
*.model filter=lfs diff=lfs merge=lfs -text
|
| 13 |
+
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
| 14 |
+
*.npy filter=lfs diff=lfs merge=lfs -text
|
| 15 |
+
*.npz filter=lfs diff=lfs merge=lfs -text
|
| 16 |
+
*.onnx filter=lfs diff=lfs merge=lfs -text
|
| 17 |
+
*.ot filter=lfs diff=lfs merge=lfs -text
|
| 18 |
+
*.parquet filter=lfs diff=lfs merge=lfs -text
|
| 19 |
+
*.pb filter=lfs diff=lfs merge=lfs -text
|
| 20 |
+
*.pickle filter=lfs diff=lfs merge=lfs -text
|
| 21 |
+
*.pkl filter=lfs diff=lfs merge=lfs -text
|
| 22 |
+
*.pt filter=lfs diff=lfs merge=lfs -text
|
| 23 |
+
*.pth filter=lfs diff=lfs merge=lfs -text
|
| 24 |
+
*.rar filter=lfs diff=lfs merge=lfs -text
|
| 25 |
+
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
| 26 |
+
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
| 27 |
+
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
| 28 |
+
*.tar filter=lfs diff=lfs merge=lfs -text
|
| 29 |
+
*.tflite filter=lfs diff=lfs merge=lfs -text
|
| 30 |
+
*.tgz filter=lfs diff=lfs merge=lfs -text
|
| 31 |
+
*.wasm filter=lfs diff=lfs merge=lfs -text
|
| 32 |
+
*.xz filter=lfs diff=lfs merge=lfs -text
|
| 33 |
+
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
+
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
+
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
Dockerfile
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# VPN Server with FastAPI
|
| 2 |
+
FROM python:3.11-slim
|
| 3 |
+
|
| 4 |
+
# Set working directory
|
| 5 |
+
WORKDIR /app
|
| 6 |
+
|
| 7 |
+
# Install system dependencies
|
| 8 |
+
RUN apt-get update && apt-get install -y \
|
| 9 |
+
build-essential \
|
| 10 |
+
python3-dev \
|
| 11 |
+
&& rm -rf /var/lib/apt/lists/*
|
| 12 |
+
|
| 13 |
+
# Copy requirements and install Python dependencies
|
| 14 |
+
COPY requirements.txt .
|
| 15 |
+
RUN pip install --no-cache-dir -r requirements.txt
|
| 16 |
+
|
| 17 |
+
# Create non-root user
|
| 18 |
+
RUN useradd -m -u 1000 vpnuser && \
|
| 19 |
+
chown -R vpnuser:vpnuser /app
|
| 20 |
+
|
| 21 |
+
# Copy application files
|
| 22 |
+
COPY --chown=vpnuser:vpnuser . .
|
| 23 |
+
|
| 24 |
+
# Set environment variables
|
| 25 |
+
ENV PYTHONPATH=/app
|
| 26 |
+
ENV PYTHONUNBUFFERED=1
|
| 27 |
+
|
| 28 |
+
# Switch to non-root user
|
| 29 |
+
USER vpnuser
|
| 30 |
+
|
| 31 |
+
# Expose port
|
| 32 |
+
EXPOSE 7860
|
| 33 |
+
|
| 34 |
+
# Run the application with uvicorn
|
| 35 |
+
CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860", "--workers", "4"]
|
admin_routes.py
ADDED
|
@@ -0,0 +1,166 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Admin routes and functionality for Outline VPN
|
| 3 |
+
"""
|
| 4 |
+
import os
|
| 5 |
+
import json
|
| 6 |
+
import psutil
|
| 7 |
+
import zipfile
|
| 8 |
+
from datetime import datetime
|
| 9 |
+
from flask import jsonify, request, send_file, flash, redirect, url_for
|
| 10 |
+
from flask_login import login_required, current_user
|
| 11 |
+
from . import app
|
| 12 |
+
from .models import User, UserRole, SystemHealth, AuditLog, Alert
|
| 13 |
+
from .services import backup_service, monitoring_service
|
| 14 |
+
|
| 15 |
+
def admin_required(f):
|
| 16 |
+
"""Decorator to require admin role for routes"""
|
| 17 |
+
@wraps(f)
|
| 18 |
+
def decorated_function(*args, **kwargs):
|
| 19 |
+
if not current_user.is_authenticated or current_user.role != UserRole.ADMIN:
|
| 20 |
+
flash('You need administrator privileges to access this page.')
|
| 21 |
+
return redirect(url_for('dashboard'))
|
| 22 |
+
return f(*args, **kwargs)
|
| 23 |
+
return decorated_function
|
| 24 |
+
|
| 25 |
+
@app.route('/admin')
|
| 26 |
+
@login_required
|
| 27 |
+
@admin_required
|
| 28 |
+
def admin_dashboard():
|
| 29 |
+
"""Admin dashboard view"""
|
| 30 |
+
system_health = monitoring_service.get_system_health()
|
| 31 |
+
active_alerts = Alert.query.filter_by(status='active').order_by(Alert.created_at.desc()).all()
|
| 32 |
+
audit_logs = AuditLog.query.order_by(AuditLog.timestamp.desc()).limit(50).all()
|
| 33 |
+
|
| 34 |
+
return render_template('admin.html',
|
| 35 |
+
system_health=system_health,
|
| 36 |
+
active_alerts=active_alerts,
|
| 37 |
+
audit_logs=audit_logs)
|
| 38 |
+
|
| 39 |
+
@app.route('/api/system-health')
|
| 40 |
+
@login_required
|
| 41 |
+
@admin_required
|
| 42 |
+
def get_system_health():
|
| 43 |
+
"""Get real-time system health metrics"""
|
| 44 |
+
return jsonify(monitoring_service.get_system_health())
|
| 45 |
+
|
| 46 |
+
@app.route('/api/update-server-config', methods=['POST'])
|
| 47 |
+
@login_required
|
| 48 |
+
@admin_required
|
| 49 |
+
def update_server_config():
|
| 50 |
+
"""Update server configuration"""
|
| 51 |
+
try:
|
| 52 |
+
config = request.get_json()
|
| 53 |
+
backup_service.backup_config('pre_update') # Create backup before updating
|
| 54 |
+
|
| 55 |
+
# Update configuration
|
| 56 |
+
current_config = ServerConfig.query.first()
|
| 57 |
+
for key, value in config.items():
|
| 58 |
+
setattr(current_config, key, value)
|
| 59 |
+
|
| 60 |
+
db.session.commit()
|
| 61 |
+
|
| 62 |
+
# Log the change
|
| 63 |
+
AuditLog.create(
|
| 64 |
+
user_id=current_user.id,
|
| 65 |
+
action='update_config',
|
| 66 |
+
details='Server configuration updated'
|
| 67 |
+
)
|
| 68 |
+
|
| 69 |
+
# Restart required services
|
| 70 |
+
monitoring_service.restart_services()
|
| 71 |
+
|
| 72 |
+
return jsonify({'status': 'success'})
|
| 73 |
+
except Exception as e:
|
| 74 |
+
return jsonify({'status': 'error', 'message': str(e)}), 500
|
| 75 |
+
|
| 76 |
+
@app.route('/api/create-backup')
|
| 77 |
+
@login_required
|
| 78 |
+
@admin_required
|
| 79 |
+
def create_backup():
|
| 80 |
+
"""Create a backup of server configuration"""
|
| 81 |
+
try:
|
| 82 |
+
include_user_data = request.args.get('include_user_data', 'false') == 'true'
|
| 83 |
+
backup_path = backup_service.create_backup(include_user_data)
|
| 84 |
+
|
| 85 |
+
# Log the backup creation
|
| 86 |
+
AuditLog.create(
|
| 87 |
+
user_id=current_user.id,
|
| 88 |
+
action='create_backup',
|
| 89 |
+
details=f'Backup created: {os.path.basename(backup_path)}'
|
| 90 |
+
)
|
| 91 |
+
|
| 92 |
+
return send_file(
|
| 93 |
+
backup_path,
|
| 94 |
+
as_attachment=True,
|
| 95 |
+
download_name=f'outline_backup_{datetime.now().strftime("%Y%m%d_%H%M%S")}.zip'
|
| 96 |
+
)
|
| 97 |
+
except Exception as e:
|
| 98 |
+
flash(f'Error creating backup: {str(e)}', 'error')
|
| 99 |
+
return redirect(url_for('admin_dashboard'))
|
| 100 |
+
|
| 101 |
+
@app.route('/api/restore-config', methods=['POST'])
|
| 102 |
+
@login_required
|
| 103 |
+
@admin_required
|
| 104 |
+
def restore_config():
|
| 105 |
+
"""Restore server configuration from backup"""
|
| 106 |
+
try:
|
| 107 |
+
if 'backup_file' not in request.files:
|
| 108 |
+
flash('No backup file provided', 'error')
|
| 109 |
+
return redirect(url_for('admin_dashboard'))
|
| 110 |
+
|
| 111 |
+
backup_file = request.files['backup_file']
|
| 112 |
+
if backup_file.filename == '':
|
| 113 |
+
flash('No backup file selected', 'error')
|
| 114 |
+
return redirect(url_for('admin_dashboard'))
|
| 115 |
+
|
| 116 |
+
# Create backup of current configuration
|
| 117 |
+
backup_service.backup_config('pre_restore')
|
| 118 |
+
|
| 119 |
+
# Restore from backup
|
| 120 |
+
backup_service.restore_from_backup(backup_file)
|
| 121 |
+
|
| 122 |
+
# Log the restore
|
| 123 |
+
AuditLog.create(
|
| 124 |
+
user_id=current_user.id,
|
| 125 |
+
action='restore_config',
|
| 126 |
+
details=f'Configuration restored from {backup_file.filename}'
|
| 127 |
+
)
|
| 128 |
+
|
| 129 |
+
flash('Configuration restored successfully', 'success')
|
| 130 |
+
return redirect(url_for('admin_dashboard'))
|
| 131 |
+
except Exception as e:
|
| 132 |
+
flash(f'Error restoring configuration: {str(e)}', 'error')
|
| 133 |
+
return redirect(url_for('admin_dashboard'))
|
| 134 |
+
|
| 135 |
+
@app.route('/api/export-audit-log')
|
| 136 |
+
@login_required
|
| 137 |
+
@admin_required
|
| 138 |
+
def export_audit_log():
|
| 139 |
+
"""Export audit log in specified format"""
|
| 140 |
+
format = request.args.get('format', 'csv')
|
| 141 |
+
logs = AuditLog.query.order_by(AuditLog.timestamp.desc()).all()
|
| 142 |
+
|
| 143 |
+
if format == 'csv':
|
| 144 |
+
output = io.StringIO()
|
| 145 |
+
writer = csv.writer(output)
|
| 146 |
+
writer.writerow(['Timestamp', 'User', 'Action', 'Details'])
|
| 147 |
+
for log in logs:
|
| 148 |
+
writer.writerow([
|
| 149 |
+
log.timestamp.strftime('%Y-%m-%d %H:%M:%S'),
|
| 150 |
+
log.user.username,
|
| 151 |
+
log.action,
|
| 152 |
+
log.details
|
| 153 |
+
])
|
| 154 |
+
|
| 155 |
+
return Response(
|
| 156 |
+
output.getvalue(),
|
| 157 |
+
mimetype='text/csv',
|
| 158 |
+
headers={'Content-Disposition': 'attachment; filename=audit_log.csv'}
|
| 159 |
+
)
|
| 160 |
+
elif format == 'json':
|
| 161 |
+
return jsonify([{
|
| 162 |
+
'timestamp': log.timestamp.strftime('%Y-%m-%d %H:%M:%S'),
|
| 163 |
+
'user': log.user.username,
|
| 164 |
+
'action': log.action,
|
| 165 |
+
'details': log.details
|
| 166 |
+
} for log in logs])
|
app.py
ADDED
|
@@ -0,0 +1,819 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Main application entry point
|
| 3 |
+
"""
|
| 4 |
+
from fastapi import FastAPI, Request
|
| 5 |
+
from fastapi.middleware.cors import CORSMiddleware
|
| 6 |
+
from fastapi.staticfiles import StaticFiles
|
| 7 |
+
from fastapi.templating import Jinja2Templates
|
| 8 |
+
|
| 9 |
+
from routers import admin, users, vpn
|
| 10 |
+
from core.error_handlers import setup_error_handlers
|
| 11 |
+
from core.database import init_db
|
| 12 |
+
from core.logger import setup_logging
|
| 13 |
+
from core.middleware import RequestLoggerMiddleware, ErrorHandlerMiddleware
|
| 14 |
+
|
| 15 |
+
import logging
|
| 16 |
+
import os
|
| 17 |
+
import requests
|
| 18 |
+
import socket
|
| 19 |
+
from starlette.responses import RedirectResponse as StarletteRedirect
|
| 20 |
+
from starlette.status import HTTP_302_FOUND, HTTP_303_SEE_OTHER
|
| 21 |
+
import logging
|
| 22 |
+
import json
|
| 23 |
+
import asyncio
|
| 24 |
+
import threading
|
| 25 |
+
import os
|
| 26 |
+
import json
|
| 27 |
+
import uuid
|
| 28 |
+
import bcrypt
|
| 29 |
+
from datetime import datetime, timedelta
|
| 30 |
+
import logging
|
| 31 |
+
from typing import Dict, Optional, List
|
| 32 |
+
from sqlalchemy.orm import Session
|
| 33 |
+
# Initialize logging
|
| 34 |
+
setup_logging()
|
| 35 |
+
logger = logging.getLogger(__name__)
|
| 36 |
+
|
| 37 |
+
# Create FastAPI application
|
| 38 |
+
app = FastAPI(
|
| 39 |
+
title="VPN Server API",
|
| 40 |
+
description="API for managing VPN server and users",
|
| 41 |
+
version="1.0.0"
|
| 42 |
+
)
|
| 43 |
+
|
| 44 |
+
# Configure CORS
|
| 45 |
+
app.add_middleware(
|
| 46 |
+
CORSMiddleware,
|
| 47 |
+
allow_origins=["*"], # Configure this properly in production
|
| 48 |
+
allow_credentials=True,
|
| 49 |
+
allow_methods=["*"],
|
| 50 |
+
allow_headers=["*"],
|
| 51 |
+
)
|
| 52 |
+
|
| 53 |
+
# Add custom middleware
|
| 54 |
+
app.add_middleware(RequestLoggerMiddleware)
|
| 55 |
+
app.add_middleware(ErrorHandlerMiddleware)
|
| 56 |
+
|
| 57 |
+
# Configure static files and templates
|
| 58 |
+
app.mount("/static", StaticFiles(directory="static"), name="static")
|
| 59 |
+
templates = Jinja2Templates(directory="templates")
|
| 60 |
+
|
| 61 |
+
# Include routers
|
| 62 |
+
app.include_router(admin.router, prefix="/api")
|
| 63 |
+
app.include_router(users.router, prefix="/api")
|
| 64 |
+
app.include_router(vpn.router, prefix="/api")
|
| 65 |
+
|
| 66 |
+
# Setup error handlers
|
| 67 |
+
setup_error_handlers(app)
|
| 68 |
+
|
| 69 |
+
@app.on_event("startup")
|
| 70 |
+
async def startup_event():
|
| 71 |
+
"""Initialize application on startup"""
|
| 72 |
+
try:
|
| 73 |
+
# Initialize database
|
| 74 |
+
await init_db()
|
| 75 |
+
logger.info("Database initialized successfully")
|
| 76 |
+
|
| 77 |
+
except Exception as e:
|
| 78 |
+
logger.error(f"Failed to initialize application: {e}")
|
| 79 |
+
raise
|
| 80 |
+
|
| 81 |
+
@app.get("/")
|
| 82 |
+
async def root(request: Request):
|
| 83 |
+
"""Root endpoint - renders the main template"""
|
| 84 |
+
return templates.TemplateResponse(
|
| 85 |
+
"index.html",
|
| 86 |
+
{"request": request}
|
| 87 |
+
)
|
| 88 |
+
|
| 89 |
+
@app.get("/health")
|
| 90 |
+
async def health_check():
|
| 91 |
+
"""Health check endpoint"""
|
| 92 |
+
return {"status": "healthy"}
|
| 93 |
+
# Database dependency
|
| 94 |
+
def get_db():
|
| 95 |
+
db = SessionLocal()
|
| 96 |
+
try:
|
| 97 |
+
yield db
|
| 98 |
+
finally:
|
| 99 |
+
db.close()
|
| 100 |
+
|
| 101 |
+
# OAuth2 password bearer for token auth
|
| 102 |
+
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token", auto_error=False)
|
| 103 |
+
|
| 104 |
+
# Pydantic models for request/response validation
|
| 105 |
+
class Token(BaseModel):
|
| 106 |
+
access_token: str
|
| 107 |
+
token_type: str
|
| 108 |
+
|
| 109 |
+
app = FastAPI()
|
| 110 |
+
|
| 111 |
+
# Configure static files and templates
|
| 112 |
+
app.mount("/static", StaticFiles(directory="web/static"), name="static")
|
| 113 |
+
templates = Jinja2Templates(directory="web/templates")
|
| 114 |
+
|
| 115 |
+
# Add template context processor for static URLs and other global helpers
|
| 116 |
+
def static_url(path: str) -> str:
|
| 117 |
+
return f"/static/{path}"
|
| 118 |
+
|
| 119 |
+
@app.get("/api/user/current")
|
| 120 |
+
async def get_current_user(request: Request, db: Session = Depends(get_db)):
|
| 121 |
+
try:
|
| 122 |
+
user = await get_optional_user(request, db)
|
| 123 |
+
if user:
|
| 124 |
+
return {
|
| 125 |
+
"username": user.username,
|
| 126 |
+
"id": str(user.id),
|
| 127 |
+
"config_id": user.config_id
|
| 128 |
+
}
|
| 129 |
+
return None
|
| 130 |
+
except Exception:
|
| 131 |
+
return None
|
| 132 |
+
|
| 133 |
+
templates.env.globals.update({
|
| 134 |
+
"static_url": static_url,
|
| 135 |
+
"url_for": lambda name, **params: f"/{name}" if name != "static" else static_url(params.get("filename", "")),
|
| 136 |
+
})
|
| 137 |
+
|
| 138 |
+
# OAuth2 password bearer for token auth
|
| 139 |
+
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token")
|
| 140 |
+
|
| 141 |
+
# Pydantic models for request/response validation
|
| 142 |
+
class Token(BaseModel):
|
| 143 |
+
access_token: str
|
| 144 |
+
token_type: str
|
| 145 |
+
|
| 146 |
+
class TokenData(BaseModel):
|
| 147 |
+
username: Optional[str] = None
|
| 148 |
+
|
| 149 |
+
class UserBase(BaseModel):
|
| 150 |
+
email: EmailStr
|
| 151 |
+
|
| 152 |
+
class UserCreate(UserBase):
|
| 153 |
+
password: str
|
| 154 |
+
|
| 155 |
+
class UserInDB(UserBase):
|
| 156 |
+
hashed_password: str
|
| 157 |
+
config_id: str
|
| 158 |
+
created_at: datetime
|
| 159 |
+
|
| 160 |
+
class Config:
|
| 161 |
+
orm_mode = True
|
| 162 |
+
|
| 163 |
+
async def get_current_user(token: str = Depends(oauth2_scheme), db: Session = Depends(get_db)):
|
| 164 |
+
credentials_exception = HTTPException(
|
| 165 |
+
status_code=status.HTTP_401_UNAUTHORIZED,
|
| 166 |
+
detail="Could not validate credentials",
|
| 167 |
+
headers={"WWW-Authenticate": "Bearer"},
|
| 168 |
+
)
|
| 169 |
+
try:
|
| 170 |
+
payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
|
| 171 |
+
username: str = payload.get("sub")
|
| 172 |
+
if username is None:
|
| 173 |
+
raise credentials_exception
|
| 174 |
+
token_data = TokenData(username=username)
|
| 175 |
+
except JWTError:
|
| 176 |
+
raise credentials_exception
|
| 177 |
+
|
| 178 |
+
user = db.query(User).filter(User.username == token_data.username).first()
|
| 179 |
+
if user is None:
|
| 180 |
+
raise credentials_exception
|
| 181 |
+
return user
|
| 182 |
+
|
| 183 |
+
def create_access_token(data: dict, expires_delta: Optional[timedelta] = None):
|
| 184 |
+
to_encode = data.copy()
|
| 185 |
+
if expires_delta:
|
| 186 |
+
expire = datetime.utcnow() + expires_delta
|
| 187 |
+
else:
|
| 188 |
+
expire = datetime.utcnow() + timedelta(minutes=15)
|
| 189 |
+
to_encode.update({"exp": expire})
|
| 190 |
+
encoded_jwt = jwt.encode(to_encode, SECRET_KEY, algorithm=ALGORITHM)
|
| 191 |
+
return encoded_jwt
|
| 192 |
+
|
| 193 |
+
# Global VPN server state
|
| 194 |
+
vpn_server: Optional[OutlineServer] = None
|
| 195 |
+
session_tracker: Optional[SessionTracker] = None
|
| 196 |
+
logger: Optional[LogManager] = None
|
| 197 |
+
|
| 198 |
+
# Initialize database
|
| 199 |
+
init_db()
|
| 200 |
+
|
| 201 |
+
CONFIG_DIR = 'config'
|
| 202 |
+
USERS_FILE = os.path.join(CONFIG_DIR, 'users.json')
|
| 203 |
+
os.makedirs(CONFIG_DIR, exist_ok=True)
|
| 204 |
+
|
| 205 |
+
def load_users():
|
| 206 |
+
if os.path.exists(USERS_FILE):
|
| 207 |
+
with open(USERS_FILE, 'r') as f:
|
| 208 |
+
return json.load(f)
|
| 209 |
+
return {}
|
| 210 |
+
|
| 211 |
+
def save_users(users):
|
| 212 |
+
with open(USERS_FILE, 'w') as f:
|
| 213 |
+
json.dump(users, f)
|
| 214 |
+
|
| 215 |
+
def get_server_ip():
|
| 216 |
+
"""Get the server's public IP address"""
|
| 217 |
+
try:
|
| 218 |
+
# First try to get public IP from external service
|
| 219 |
+
response = requests.get('https://api.ipify.org', timeout=5)
|
| 220 |
+
if response.status_code == 200:
|
| 221 |
+
return response.text.strip()
|
| 222 |
+
except:
|
| 223 |
+
pass
|
| 224 |
+
|
| 225 |
+
try:
|
| 226 |
+
# Try another public IP service as backup
|
| 227 |
+
response = requests.get('https://ifconfig.me', timeout=5)
|
| 228 |
+
if response.status_code == 200:
|
| 229 |
+
return response.text.strip()
|
| 230 |
+
except:
|
| 231 |
+
pass
|
| 232 |
+
|
| 233 |
+
# Fallback: Get local IP
|
| 234 |
+
try:
|
| 235 |
+
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
| 236 |
+
s.connect(('8.8.8.8', 80))
|
| 237 |
+
local_ip = s.getsockname()[0]
|
| 238 |
+
s.close()
|
| 239 |
+
return local_ip
|
| 240 |
+
except:
|
| 241 |
+
# Last resort fallback
|
| 242 |
+
return '127.0.0.1'
|
| 243 |
+
|
| 244 |
+
def initialize_ikev2_server():
|
| 245 |
+
"""Initialize IKEv2 server"""
|
| 246 |
+
global ikev2_server
|
| 247 |
+
server_ip = get_server_ip()
|
| 248 |
+
ikev2_server = IKEv2Server(server_ip, logger)
|
| 249 |
+
logger.log(LogLevel.INFO, LogCategory.SYSTEM, "app", "IKEv2 server initialized")
|
| 250 |
+
|
| 251 |
+
def initialize_vpn_server():
|
| 252 |
+
"""Initialize the VPN server components"""
|
| 253 |
+
global vpn_server, session_tracker, logger, ikev2_server
|
| 254 |
+
|
| 255 |
+
# Initialize logger
|
| 256 |
+
logger = LogManager()
|
| 257 |
+
logger.log(LogLevel.INFO, LogCategory.SYSTEM, "app", "Initializing VPN server")
|
| 258 |
+
|
| 259 |
+
# Initialize session tracker
|
| 260 |
+
session_tracker = SessionTracker()
|
| 261 |
+
|
| 262 |
+
# Initialize IKEv2 server
|
| 263 |
+
initialize_ikev2_server()
|
| 264 |
+
|
| 265 |
+
# Initialize VPN server
|
| 266 |
+
server_ip = get_server_ip()
|
| 267 |
+
vpn_server_config = {
|
| 268 |
+
"server": {
|
| 269 |
+
"host": server_ip, # Use automatically detected server IP
|
| 270 |
+
"port": 8388, # Default Shadowsocks port
|
| 271 |
+
"virtual_network": "10.7.0.0/24", # Virtual network for client IPs
|
| 272 |
+
"protocols": {
|
| 273 |
+
"shadowsocks": {
|
| 274 |
+
"enabled": True,
|
| 275 |
+
"port": 8388
|
| 276 |
+
},
|
| 277 |
+
"wireguard": {
|
| 278 |
+
"enabled": True,
|
| 279 |
+
"port": 51820
|
| 280 |
+
},
|
| 281 |
+
"openvpn": {
|
| 282 |
+
"enabled": True,
|
| 283 |
+
"port": 1194
|
| 284 |
+
},
|
| 285 |
+
"ikev2": {
|
| 286 |
+
"enabled": True,
|
| 287 |
+
"port": 500
|
| 288 |
+
}
|
| 289 |
+
}
|
| 290 |
+
},
|
| 291 |
+
"security": {
|
| 292 |
+
"cipher": "aes-256-gcm",
|
| 293 |
+
"auth": "sha256",
|
| 294 |
+
"enable_perfect_forward_secrecy": True
|
| 295 |
+
}
|
| 296 |
+
}
|
| 297 |
+
vpn_server = OutlineServer(vpn_server_config)
|
| 298 |
+
# Start the VPN server in a separate thread
|
| 299 |
+
def run_server():
|
| 300 |
+
loop = asyncio.new_event_loop()
|
| 301 |
+
asyncio.set_event_loop(loop)
|
| 302 |
+
loop.run_until_complete(vpn_server.start())
|
| 303 |
+
loop.run_forever()
|
| 304 |
+
|
| 305 |
+
server_thread = threading.Thread(target=run_server, daemon=True)
|
| 306 |
+
server_thread.start()
|
| 307 |
+
logger.log(LogLevel.INFO, LogCategory.SYSTEM, "app", f"VPN server initialized and started on {server_ip}")
|
| 308 |
+
|
| 309 |
+
def load_users():
|
| 310 |
+
if os.path.exists(USERS_FILE):
|
| 311 |
+
with open(USERS_FILE, 'r') as f:
|
| 312 |
+
return json.load(f)
|
| 313 |
+
return {}
|
| 314 |
+
|
| 315 |
+
def save_users(users):
|
| 316 |
+
with open(USERS_FILE, 'w') as f:
|
| 317 |
+
json.dump(users, f)
|
| 318 |
+
|
| 319 |
+
def login_required(func):
|
| 320 |
+
@wraps(func)
|
| 321 |
+
async def wrapper(*args, **kwargs):
|
| 322 |
+
token = kwargs.get('token')
|
| 323 |
+
if not token:
|
| 324 |
+
return StarletteRedirect('/login', status_code=HTTP_303_SEE_OTHER)
|
| 325 |
+
try:
|
| 326 |
+
payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
|
| 327 |
+
username: str = payload.get("sub")
|
| 328 |
+
if username is None:
|
| 329 |
+
return StarletteRedirect('/login', status_code=HTTP_303_SEE_OTHER)
|
| 330 |
+
except JWTError:
|
| 331 |
+
return StarletteRedirect('/login', status_code=HTTP_303_SEE_OTHER)
|
| 332 |
+
return await func(*args, **kwargs)
|
| 333 |
+
return wrapper
|
| 334 |
+
|
| 335 |
+
async def get_optional_user(
|
| 336 |
+
request: Request,
|
| 337 |
+
db: Session = Depends(get_db)
|
| 338 |
+
) -> Optional[User]:
|
| 339 |
+
try:
|
| 340 |
+
auth = request.headers.get("Authorization")
|
| 341 |
+
if not auth:
|
| 342 |
+
return None
|
| 343 |
+
scheme, _, token = auth.partition(" ")
|
| 344 |
+
if scheme.lower() != "bearer":
|
| 345 |
+
return None
|
| 346 |
+
try:
|
| 347 |
+
payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
|
| 348 |
+
username: str = payload.get("sub")
|
| 349 |
+
if username is None:
|
| 350 |
+
return None
|
| 351 |
+
user = db.query(User).filter(User.username == username).first()
|
| 352 |
+
return user
|
| 353 |
+
except JWTError:
|
| 354 |
+
return None
|
| 355 |
+
except Exception:
|
| 356 |
+
return None
|
| 357 |
+
|
| 358 |
+
@app.get("/", response_class=HTMLResponse)
|
| 359 |
+
async def index(request: Request):
|
| 360 |
+
return templates.TemplateResponse("index.html", {"request": request})
|
| 361 |
+
|
| 362 |
+
@app.post("/token")
|
| 363 |
+
async def login(
|
| 364 |
+
request: Request,
|
| 365 |
+
form_data: OAuth2PasswordRequestForm = Depends(),
|
| 366 |
+
db: Session = Depends(get_db)
|
| 367 |
+
):
|
| 368 |
+
user = db.query(User).filter(User.username == form_data.username).first()
|
| 369 |
+
if not user:
|
| 370 |
+
raise HTTPException(
|
| 371 |
+
status_code=status.HTTP_401_UNAUTHORIZED,
|
| 372 |
+
detail="Incorrect username or password",
|
| 373 |
+
headers={"WWW-Authenticate": "Bearer"},
|
| 374 |
+
)
|
| 375 |
+
|
| 376 |
+
# Verify password
|
| 377 |
+
if not verify_password(form_data.password, user.hashed_password):
|
| 378 |
+
raise HTTPException(
|
| 379 |
+
status_code=status.HTTP_401_UNAUTHORIZED,
|
| 380 |
+
detail="Incorrect username or password",
|
| 381 |
+
headers={"WWW-Authenticate": "Bearer"},
|
| 382 |
+
)
|
| 383 |
+
|
| 384 |
+
# Check if account is locked
|
| 385 |
+
if user.status == UserStatus.LOCKED:
|
| 386 |
+
raise HTTPException(
|
| 387 |
+
status_code=status.HTTP_403_FORBIDDEN,
|
| 388 |
+
detail="Account is locked. Please contact support."
|
| 389 |
+
)
|
| 390 |
+
|
| 391 |
+
# Create access token
|
| 392 |
+
access_token_expires = timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES)
|
| 393 |
+
access_token = create_access_token(
|
| 394 |
+
data={"sub": user.username}, expires_delta=access_token_expires
|
| 395 |
+
)
|
| 396 |
+
|
| 397 |
+
# Record successful login
|
| 398 |
+
user_service = UserService(db)
|
| 399 |
+
user_service.create_session(
|
| 400 |
+
user=user,
|
| 401 |
+
ip_address=request.client.host,
|
| 402 |
+
device_info=request.headers.get("user-agent", "")
|
| 403 |
+
)
|
| 404 |
+
user.record_login_attempt(success=True)
|
| 405 |
+
db.commit()
|
| 406 |
+
|
| 407 |
+
return {"access_token": access_token, "token_type": "bearer"}
|
| 408 |
+
|
| 409 |
+
@app.post("/signup", response_model=Token)
|
| 410 |
+
async def signup(user: UserCreate, db: Session = Depends(get_db)):
|
| 411 |
+
# Check if user exists
|
| 412 |
+
db_user = db.query(User).filter(User.username == user.email).first()
|
| 413 |
+
if db_user:
|
| 414 |
+
raise HTTPException(status_code=400, detail="Email already registered")
|
| 415 |
+
|
| 416 |
+
# Create new user
|
| 417 |
+
config_id = str(uuid.uuid4())
|
| 418 |
+
hashed_password = get_password_hash(user.password)
|
| 419 |
+
db_user = User(
|
| 420 |
+
username=user.email,
|
| 421 |
+
hashed_password=hashed_password,
|
| 422 |
+
config_id=config_id,
|
| 423 |
+
created_at=datetime.utcnow()
|
| 424 |
+
)
|
| 425 |
+
db.add(db_user)
|
| 426 |
+
db.commit()
|
| 427 |
+
db.refresh(db_user)
|
| 428 |
+
|
| 429 |
+
# Create VPN configuration
|
| 430 |
+
create_user_config(config_id)
|
| 431 |
+
|
| 432 |
+
# Create access token
|
| 433 |
+
access_token_expires = timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES)
|
| 434 |
+
access_token = create_access_token(
|
| 435 |
+
data={"sub": user.email}, expires_delta=access_token_expires
|
| 436 |
+
)
|
| 437 |
+
|
| 438 |
+
return {"access_token": access_token, "token_type": "bearer"}
|
| 439 |
+
|
| 440 |
+
@app.get("/signup", response_class=HTMLResponse)
|
| 441 |
+
async def signup_form(request: Request):
|
| 442 |
+
return templates.TemplateResponse("signup.html", {"request": request})
|
| 443 |
+
|
| 444 |
+
@app.get("/dashboard", response_class=HTMLResponse)
|
| 445 |
+
async def dashboard(request: Request, current_user: User = Depends(get_current_user)):
|
| 446 |
+
stats = get_user_stats(current_user.config_id)
|
| 447 |
+
return templates.TemplateResponse("dashboard.html", {
|
| 448 |
+
"request": request,
|
| 449 |
+
"stats": stats
|
| 450 |
+
})
|
| 451 |
+
|
| 452 |
+
@app.get('/download_config')
|
| 453 |
+
async def download_config(current_user: User = Depends(get_current_user)):
|
| 454 |
+
config_path = os.path.join(CONFIG_DIR, f"{current_user.config_id}.json")
|
| 455 |
+
|
| 456 |
+
if not os.path.exists(config_path):
|
| 457 |
+
raise HTTPException(
|
| 458 |
+
status_code=404,
|
| 459 |
+
detail="Configuration not found"
|
| 460 |
+
)
|
| 461 |
+
|
| 462 |
+
with open(config_path, 'r') as f:
|
| 463 |
+
config = json.load(f)
|
| 464 |
+
|
| 465 |
+
return JSONResponse(content=config)
|
| 466 |
+
|
| 467 |
+
@app.get('/api/stats')
|
| 468 |
+
async def get_stats(current_user: User = Depends(get_current_user)):
|
| 469 |
+
return JSONResponse(content=get_user_stats(current_user.config_id))
|
| 470 |
+
|
| 471 |
+
def get_server_ip():
|
| 472 |
+
"""Get the server's public IP address"""
|
| 473 |
+
try:
|
| 474 |
+
# First try to get public IP from external service
|
| 475 |
+
response = requests.get('https://api.ipify.org')
|
| 476 |
+
if response.status_code == 200:
|
| 477 |
+
return response.text.strip()
|
| 478 |
+
except:
|
| 479 |
+
pass
|
| 480 |
+
|
| 481 |
+
# Fallback: Get local IP
|
| 482 |
+
try:
|
| 483 |
+
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
| 484 |
+
s.connect(('8.8.8.8', 80))
|
| 485 |
+
local_ip = s.getsockname()[0]
|
| 486 |
+
s.close()
|
| 487 |
+
return local_ip
|
| 488 |
+
except:
|
| 489 |
+
return '127.0.0.1' # Last resort fallback
|
| 490 |
+
|
| 491 |
+
def initialize_ikev2_server():
|
| 492 |
+
"""Initialize IKEv2 server"""
|
| 493 |
+
global ikev2_server
|
| 494 |
+
server_ip = get_server_ip()
|
| 495 |
+
ikev2_server = IKEv2Server(server_ip, logger)
|
| 496 |
+
logger.log(LogLevel.INFO, LogCategory.SYSTEM, "app", "IKEv2 server initialized")
|
| 497 |
+
|
| 498 |
+
def generate_ikev2_certificate(config_id: str) -> Dict:
|
| 499 |
+
"""Generate IKEv2 certificates for a user"""
|
| 500 |
+
username = f"user_{config_id[:8]}"
|
| 501 |
+
password = str(uuid.uuid4())
|
| 502 |
+
psk = str(uuid.uuid4())
|
| 503 |
+
|
| 504 |
+
try:
|
| 505 |
+
cert_data = ikev2_server.add_user(config_id, username, password, psk)
|
| 506 |
+
logger.info(LogCategory.SYSTEM, "app", f"Generated IKEv2 certificates for user {config_id}")
|
| 507 |
+
return cert_data
|
| 508 |
+
except Exception as e:
|
| 509 |
+
logger.error(LogCategory.SYSTEM, "app", f"Failed to generate IKEv2 certificates: {e}")
|
| 510 |
+
return None
|
| 511 |
+
|
| 512 |
+
def create_user_config(config_id):
|
| 513 |
+
"""Create Outline VPN configuration for a new user"""
|
| 514 |
+
if not os.path.exists(CONFIG_DIR):
|
| 515 |
+
os.makedirs(CONFIG_DIR)
|
| 516 |
+
|
| 517 |
+
server_ip = get_server_ip()
|
| 518 |
+
access_key = str(uuid.uuid4())
|
| 519 |
+
|
| 520 |
+
# Outline/Shadowsocks config
|
| 521 |
+
ss_config = {
|
| 522 |
+
'id': config_id,
|
| 523 |
+
'server': {
|
| 524 |
+
'host': server_ip,
|
| 525 |
+
'port': 8388 # Shadowsocks port
|
| 526 |
+
},
|
| 527 |
+
'access_key': access_key,
|
| 528 |
+
'protocol': 'shadowsocks',
|
| 529 |
+
'created_at': datetime.now().isoformat()
|
| 530 |
+
}
|
| 531 |
+
|
| 532 |
+
# IKEv2 config (Windows 10/11, Android 10+)
|
| 533 |
+
ikev2_config = {
|
| 534 |
+
'id': f"{config_id}_ikev2",
|
| 535 |
+
'server': {
|
| 536 |
+
'host': server_ip,
|
| 537 |
+
'port': 500 # IKEv2 port
|
| 538 |
+
},
|
| 539 |
+
'credentials': {
|
| 540 |
+
'username': f"user_{config_id[:8]}",
|
| 541 |
+
'password': str(uuid.uuid4()),
|
| 542 |
+
},
|
| 543 |
+
'psk': str(uuid.uuid4()), # Pre-shared key
|
| 544 |
+
'certificate': generate_ikev2_certificate(config_id),
|
| 545 |
+
'protocol': 'ikev2',
|
| 546 |
+
'created_at': datetime.now().isoformat()
|
| 547 |
+
}
|
| 548 |
+
|
| 549 |
+
# L2TP/IPsec config (Windows, Android)
|
| 550 |
+
l2tp_config = {
|
| 551 |
+
'id': f"{config_id}_l2tp",
|
| 552 |
+
'server': {
|
| 553 |
+
'host': server_ip,
|
| 554 |
+
'ports': {
|
| 555 |
+
'l2tp': 1701,
|
| 556 |
+
'ipsec': [500, 4500] # IPsec ports for NAT traversal
|
| 557 |
+
}
|
| 558 |
+
},
|
| 559 |
+
'credentials': {
|
| 560 |
+
'username': f"user_{config_id[:8]}",
|
| 561 |
+
'password': str(uuid.uuid4())
|
| 562 |
+
},
|
| 563 |
+
'ipsec': {
|
| 564 |
+
'psk': str(uuid.uuid4()), # Pre-shared key for IPsec
|
| 565 |
+
'encryption': 'aes-256-cbc',
|
| 566 |
+
'hash': 'sha256'
|
| 567 |
+
},
|
| 568 |
+
'protocol': 'l2tp_ipsec',
|
| 569 |
+
'created_at': datetime.now().isoformat()
|
| 570 |
+
}
|
| 571 |
+
|
| 572 |
+
# PPTP config (Legacy support - Windows, Android)
|
| 573 |
+
pptp_config = {
|
| 574 |
+
'id': f"{config_id}_pptp",
|
| 575 |
+
'server': {
|
| 576 |
+
'host': server_ip,
|
| 577 |
+
'port': 1723 # PPTP port
|
| 578 |
+
},
|
| 579 |
+
'credentials': {
|
| 580 |
+
'username': f"user_{config_id[:8]}",
|
| 581 |
+
'password': str(uuid.uuid4())
|
| 582 |
+
},
|
| 583 |
+
'protocol': 'pptp',
|
| 584 |
+
'encryption': 'require-mppe', # Maximum PPTP security
|
| 585 |
+
'warning': 'PPTP is considered less secure, use IKEv2 or L2TP/IPsec when possible',
|
| 586 |
+
'created_at': datetime.now().isoformat()
|
| 587 |
+
}
|
| 588 |
+
|
| 589 |
+
# OpenVPN config (Universal support)
|
| 590 |
+
openvpn_config = {
|
| 591 |
+
'id': f"{config_id}_openvpn",
|
| 592 |
+
'server': {
|
| 593 |
+
'host': server_ip,
|
| 594 |
+
'port': 1194, # OpenVPN default port
|
| 595 |
+
'protocol': 'udp' # UDP for better performance
|
| 596 |
+
},
|
| 597 |
+
'credentials': {
|
| 598 |
+
'username': f"user_{config_id[:8]}",
|
| 599 |
+
'password': str(uuid.uuid4())
|
| 600 |
+
},
|
| 601 |
+
'certificates': generate_openvpn_certificates(config_id),
|
| 602 |
+
'protocol': 'openvpn',
|
| 603 |
+
'created_at': datetime.now().isoformat(),
|
| 604 |
+
'config_file': generate_openvpn_config(config_id, server_ip)
|
| 605 |
+
}
|
| 606 |
+
|
| 607 |
+
# WireGuard config (Built-in Windows 11, Android, iOS)
|
| 608 |
+
wireguard_config = {
|
| 609 |
+
'id': f"{config_id}_wireguard",
|
| 610 |
+
'server': {
|
| 611 |
+
'host': server_ip,
|
| 612 |
+
'port': 51820, # WireGuard default port
|
| 613 |
+
'public_key': generate_wireguard_keys(config_id)['server_public'],
|
| 614 |
+
'allowed_ips': ['0.0.0.0/0', '::/0'] # Route all traffic
|
| 615 |
+
},
|
| 616 |
+
'client': {
|
| 617 |
+
'private_key': generate_wireguard_keys(config_id)['client_private'],
|
| 618 |
+
'public_key': generate_wireguard_keys(config_id)['client_public'],
|
| 619 |
+
'address': f'10.7.0.{2 + len(load_users())}', # Unique IP for each client
|
| 620 |
+
'dns': ['1.1.1.1', '8.8.8.8']
|
| 621 |
+
},
|
| 622 |
+
'protocol': 'wireguard',
|
| 623 |
+
'created_at': datetime.now().isoformat()
|
| 624 |
+
}
|
| 625 |
+
|
| 626 |
+
# L2TP/IPsec config (Built-in Windows, Android, iOS)
|
| 627 |
+
l2tp_config = {
|
| 628 |
+
'id': f"{config_id}_l2tp",
|
| 629 |
+
'server': {
|
| 630 |
+
'host': server_ip,
|
| 631 |
+
'port': 1701, # L2TP port
|
| 632 |
+
},
|
| 633 |
+
'credentials': {
|
| 634 |
+
'username': f"user_{config_id[:8]}",
|
| 635 |
+
'password': str(uuid.uuid4())
|
| 636 |
+
},
|
| 637 |
+
'ipsec': {
|
| 638 |
+
'psk': str(uuid.uuid4()) # Pre-shared key for IPsec
|
| 639 |
+
},
|
| 640 |
+
'protocol': 'l2tp_ipsec',
|
| 641 |
+
'created_at': datetime.now().isoformat()
|
| 642 |
+
}
|
| 643 |
+
|
| 644 |
+
# Combined config with all supported protocols
|
| 645 |
+
config = {
|
| 646 |
+
'id': config_id,
|
| 647 |
+
'protocols': {
|
| 648 |
+
'shadowsocks': ss_config,
|
| 649 |
+
'ikev2': ikev2_config,
|
| 650 |
+
'l2tp': l2tp_config,
|
| 651 |
+
'pptp': pptp_config
|
| 652 |
+
},
|
| 653 |
+
'recommended_protocol': {
|
| 654 |
+
'windows': 'ikev2',
|
| 655 |
+
'android': 'ikev2',
|
| 656 |
+
'fallback': 'l2tp'
|
| 657 |
+
},
|
| 658 |
+
'created_at': datetime.now().isoformat()
|
| 659 |
+
}
|
| 660 |
+
|
| 661 |
+
config_path = os.path.join(CONFIG_DIR, f"{config_id}.json")
|
| 662 |
+
with open(config_path, 'w') as f:
|
| 663 |
+
json.dump(config, f)
|
| 664 |
+
|
| 665 |
+
def get_user_stats(config_id):
|
| 666 |
+
"""Get real VPN usage statistics for a user from all active sessions"""
|
| 667 |
+
try:
|
| 668 |
+
if not session_tracker:
|
| 669 |
+
logger.error(LogCategory.SYSTEM, "app", "Session tracker not initialized")
|
| 670 |
+
return None
|
| 671 |
+
|
| 672 |
+
# Get all sessions for this user
|
| 673 |
+
user_sessions = session_tracker.get_user_sessions(config_id)
|
| 674 |
+
if not user_sessions:
|
| 675 |
+
return {
|
| 676 |
+
'bytes_sent': 0,
|
| 677 |
+
'bytes_received': 0,
|
| 678 |
+
'connected_since': None,
|
| 679 |
+
'last_seen': None,
|
| 680 |
+
'status': 'disconnected',
|
| 681 |
+
'active_sessions': [],
|
| 682 |
+
'protocols': []
|
| 683 |
+
}
|
| 684 |
+
|
| 685 |
+
# Aggregate stats from all active sessions
|
| 686 |
+
total_bytes_sent = 0
|
| 687 |
+
total_bytes_received = 0
|
| 688 |
+
earliest_connection = None
|
| 689 |
+
latest_seen = None
|
| 690 |
+
active_sessions = []
|
| 691 |
+
used_protocols = set()
|
| 692 |
+
|
| 693 |
+
for sess in user_sessions:
|
| 694 |
+
# Update totals
|
| 695 |
+
total_bytes_sent += sess.bytes_out
|
| 696 |
+
total_bytes_received += sess.bytes_in
|
| 697 |
+
|
| 698 |
+
# Track connection times
|
| 699 |
+
session_start = datetime.fromtimestamp(sess.start_time)
|
| 700 |
+
session_last_seen = datetime.fromtimestamp(sess.last_seen)
|
| 701 |
+
|
| 702 |
+
if not earliest_connection or session_start < earliest_connection:
|
| 703 |
+
earliest_connection = session_start
|
| 704 |
+
if not latest_seen or session_last_seen > latest_seen:
|
| 705 |
+
latest_seen = session_last_seen
|
| 706 |
+
|
| 707 |
+
# Track protocols
|
| 708 |
+
used_protocols.add(sess.protocol)
|
| 709 |
+
|
| 710 |
+
# Get session details
|
| 711 |
+
session_info = {
|
| 712 |
+
'id': sess.session_id,
|
| 713 |
+
'protocol': sess.protocol,
|
| 714 |
+
'assigned_ip': sess.assigned_ip,
|
| 715 |
+
'connected_since': session_start.isoformat(),
|
| 716 |
+
'last_seen': session_last_seen.isoformat(),
|
| 717 |
+
'bytes_sent': sess.bytes_out,
|
| 718 |
+
'bytes_received': sess.bytes_in,
|
| 719 |
+
'is_offline': sess.is_offline
|
| 720 |
+
}
|
| 721 |
+
active_sessions.append(session_info)
|
| 722 |
+
|
| 723 |
+
# Determine overall status
|
| 724 |
+
current_time = datetime.now()
|
| 725 |
+
is_active = any(
|
| 726 |
+
(current_time - datetime.fromtimestamp(s.last_seen)).total_seconds() < 300 # 5 minutes
|
| 727 |
+
for s in user_sessions
|
| 728 |
+
)
|
| 729 |
+
|
| 730 |
+
status = 'active' if is_active else 'offline'
|
| 731 |
+
if not is_active and any(s.is_offline for s in user_sessions):
|
| 732 |
+
status = 'offline_available'
|
| 733 |
+
|
| 734 |
+
return {
|
| 735 |
+
'bytes_sent': total_bytes_sent,
|
| 736 |
+
'bytes_received': total_bytes_received,
|
| 737 |
+
'connected_since': earliest_connection.isoformat() if earliest_connection else None,
|
| 738 |
+
'last_seen': latest_seen.isoformat() if latest_seen else None,
|
| 739 |
+
'status': status,
|
| 740 |
+
'active_sessions': active_sessions,
|
| 741 |
+
'protocols': list(used_protocols)
|
| 742 |
+
}
|
| 743 |
+
|
| 744 |
+
except Exception as e:
|
| 745 |
+
logger.error(LogCategory.SYSTEM, "app", f"Error getting user stats: {e}")
|
| 746 |
+
return None
|
| 747 |
+
|
| 748 |
+
@app.post('/logout')
|
| 749 |
+
async def logout(current_user: User = Depends(get_current_user), db: Session = Depends(get_db)):
|
| 750 |
+
try:
|
| 751 |
+
# Find and end the current session
|
| 752 |
+
current_session = (
|
| 753 |
+
db.query(UserSession)
|
| 754 |
+
.filter(UserSession.user_id == current_user.id)
|
| 755 |
+
.order_by(UserSession.created_at.desc())
|
| 756 |
+
.first()
|
| 757 |
+
)
|
| 758 |
+
if current_session:
|
| 759 |
+
current_session.expires_at = datetime.utcnow()
|
| 760 |
+
db.commit()
|
| 761 |
+
|
| 762 |
+
return StarletteRedirect('/', status_code=HTTP_303_SEE_OTHER)
|
| 763 |
+
except Exception as e:
|
| 764 |
+
raise HTTPException(
|
| 765 |
+
status_code=500,
|
| 766 |
+
detail="Error during logout"
|
| 767 |
+
)
|
| 768 |
+
|
| 769 |
+
@app.get('/forgot-password')
|
| 770 |
+
async def forgot_password_form(request: Request):
|
| 771 |
+
return templates.TemplateResponse("forgot_password.html", {"request": request})
|
| 772 |
+
|
| 773 |
+
@app.post('/forgot-password')
|
| 774 |
+
async def forgot_password(email: str, db: Session = Depends(get_db)):
|
| 775 |
+
try:
|
| 776 |
+
user = db.query(User).filter(User.username == email).first()
|
| 777 |
+
if user:
|
| 778 |
+
# Generate password reset token
|
| 779 |
+
user_service = UserService(db)
|
| 780 |
+
reset_token = user_service.generate_reset_token()
|
| 781 |
+
user.reset_token = reset_token
|
| 782 |
+
user.reset_token_expires = datetime.utcnow() + timedelta(hours=24)
|
| 783 |
+
db.commit()
|
| 784 |
+
|
| 785 |
+
# TODO: Send reset email with token
|
| 786 |
+
# For now, just return success message
|
| 787 |
+
return JSONResponse(
|
| 788 |
+
content={"message": "Password reset link has been sent to your email address"}
|
| 789 |
+
)
|
| 790 |
+
else:
|
| 791 |
+
# To prevent user enumeration, show the same message
|
| 792 |
+
return JSONResponse(
|
| 793 |
+
content={"message": "Password reset link has been sent to your email address"}
|
| 794 |
+
)
|
| 795 |
+
except Exception as e:
|
| 796 |
+
raise HTTPException(
|
| 797 |
+
status_code=500,
|
| 798 |
+
detail="Error processing password reset request"
|
| 799 |
+
)
|
| 800 |
+
|
| 801 |
+
@app.on_event("startup")
|
| 802 |
+
async def startup_event():
|
| 803 |
+
"""Initialize VPN server on startup"""
|
| 804 |
+
initialize_vpn_server()
|
| 805 |
+
|
| 806 |
+
@app.on_event("shutdown")
|
| 807 |
+
async def shutdown_event():
|
| 808 |
+
"""Shutdown VPN server on application shutdown"""
|
| 809 |
+
global vpn_server
|
| 810 |
+
if vpn_server and vpn_server.is_running:
|
| 811 |
+
await vpn_server.stop()
|
| 812 |
+
logger.log(LogLevel.INFO, LogCategory.SYSTEM, "app", "VPN server shut down.")
|
| 813 |
+
|
| 814 |
+
if __name__ == '__main__':
|
| 815 |
+
import uvicorn
|
| 816 |
+
uvicorn.run(app, host="0.0.0.0", port=7860)
|
| 817 |
+
|
| 818 |
+
|
| 819 |
+
|
ca/ca.crt
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
-----BEGIN CERTIFICATE-----
|
| 2 |
+
MIIFEzCCAvugAwIBAgIUVVFA6g5tK6la3NhsAuNIHSgtKcIwDQYJKoZIhvcNAQEL
|
| 3 |
+
BQAwGTEXMBUGA1UEAwwOT3V0bGluZSBWUE4gQ0EwHhcNMjUwODExMjA0MzMyWhcN
|
| 4 |
+
MzUwODA5MjA0MzMyWjAZMRcwFQYDVQQDDA5PdXRsaW5lIFZQTiBDQTCCAiIwDQYJ
|
| 5 |
+
KoZIhvcNAQEBBQADggIPADCCAgoCggIBANV3/kuWVymGqMMtjvKZRNgMFoQqCOMG
|
| 6 |
+
nAJi7bTehIFgd+uRmKnDwoHvtYjkGEmsY6D4Mq3Z6OxL7Wp3Ny2h1CouTQGdpR+I
|
| 7 |
+
sTvJEoVVNO8dNxJp9TrAXX3FQkSbW4+c7vfImZwoOfxMTu8AZ+VcI3+XeWKmyxdg
|
| 8 |
+
YgQMcTbXAMqtmIcDPxkM+/llniT4SfddG+J2MrPRuEEBcrr0vhRVPqTZXoBZ0XeM
|
| 9 |
+
TnLb4b7rcyo6L/MhrT07dBiDWt3J96RSM8uz/24pEFbQk3J3ppVGTPp0tpbL6DOB
|
| 10 |
+
3fijvOW+/Dtcq3pUiS6baM07bzD9czHatOq7Z2LbZshrrkxckl0Z83tHAvWvUOM+
|
| 11 |
+
DAp+7FhxaxJQfHodKc5/QX4taEDR8iJj7uNe3tTE6gIIHlw+9oB6JN1RwSm/WQjW
|
| 12 |
+
QlZDrLXG9rCH6Rye2nRchIsYthDhjo1Q/FWEe14sIAxio/GKhPDK+GqWypSqbb1D
|
| 13 |
+
dcTQ0xduwXaP0DpA+ZJCLbZ+siX9tFyHW23YmhG14tP9IE2O9JsrFGDXDwLB4sAA
|
| 14 |
+
/DN+InQdW1SABFZk4TChcUs0BvaRuitzfsmiMFF1nR9Cytu3m82+FLSgFcDTNJv/
|
| 15 |
+
n/nFE9/3eMOWwu786JDqJ/oAmnqRP+0qpfepMWTxbUJuhtAXpFlmjSTiiDhoyO//
|
| 16 |
+
w925yr1PUW8xAgMBAAGjUzBRMB0GA1UdDgQWBBRcrKe+jD6bZWSXI//HpXairOb6
|
| 17 |
+
5zAfBgNVHSMEGDAWgBRcrKe+jD6bZWSXI//HpXairOb65zAPBgNVHRMBAf8EBTAD
|
| 18 |
+
AQH/MA0GCSqGSIb3DQEBCwUAA4ICAQAr7zGUZ4golbeNUlwf//Ex08/PT+wHo93C
|
| 19 |
+
A2yqnRE5aX4HcGSHBdXnfJIajGzqg1ha6G9KoOAeUwrz43MKQJ5MlTPsstw3F5eG
|
| 20 |
+
PTf8pvXtqoKCUz+n5NflVAnPi2rkwg3xek3FNnRxeFtRNQk7JdiZyUrrkls9zdXG
|
| 21 |
+
b51nabQulMyU+S2AgS8tZieis9zKtaxL6FIkw/Ppd3kkC+IKSDqWq73UKesUwmwz
|
| 22 |
+
wVxmAjCwGNkzQmIriAEMOrjrUU4TqTlcYELmKjWjQtFn3DHYx0dVtQ+EtXvpvQTh
|
| 23 |
+
5y5E//O5crj8w8APWZ9AbV+RnHEzzUmn+CWEmKlw6INc5GipGJiF4iOgyIBKKTg0
|
| 24 |
+
gfomG2nFrmmiQMLO+SMONdb6qWbdUMO8tptATV5+NEyjgcyoD4zhiQIjnDovil5F
|
| 25 |
+
klX8IwKMWSu7969uzXqNbr0gNFzUbpRtaoVoVJbaZMgDBSA+eWVJYlcDgE+qR6bE
|
| 26 |
+
NWTJiXZfVE3rat/pp/Nn25UHdtOBt1ZLGOItI7saB9xHqfS+aL+/Kj+mp8f3Tb/q
|
| 27 |
+
A20AeJUECG2jjErLYdUWY8+jMcPK6H34boUBuvmcdPRfBREsfotk0bNLiY+NlcnO
|
| 28 |
+
qCslUskUdquWVjYZ4Vce22byIJz2TdHWqfek4HAmU5p7VdKZ9Ux82pggWSHlEmUD
|
| 29 |
+
7YNttdciIA==
|
| 30 |
+
-----END CERTIFICATE-----
|
ca/ca.key
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
-----BEGIN PRIVATE KEY-----
|
| 2 |
+
MIIJQwIBADANBgkqhkiG9w0BAQEFAASCCS0wggkpAgEAAoICAQDVd/5LllcphqjD
|
| 3 |
+
LY7ymUTYDBaEKgjjBpwCYu203oSBYHfrkZipw8KB77WI5BhJrGOg+DKt2ejsS+1q
|
| 4 |
+
dzctodQqLk0BnaUfiLE7yRKFVTTvHTcSafU6wF19xUJEm1uPnO73yJmcKDn8TE7v
|
| 5 |
+
AGflXCN/l3lipssXYGIEDHE21wDKrZiHAz8ZDPv5ZZ4k+En3XRvidjKz0bhBAXK6
|
| 6 |
+
9L4UVT6k2V6AWdF3jE5y2+G+63MqOi/zIa09O3QYg1rdyfekUjPLs/9uKRBW0JNy
|
| 7 |
+
d6aVRkz6dLaWy+gzgd34o7zlvvw7XKt6VIkum2jNO28w/XMx2rTqu2di22bIa65M
|
| 8 |
+
XJJdGfN7RwL1r1DjPgwKfuxYcWsSUHx6HSnOf0F+LWhA0fIiY+7jXt7UxOoCCB5c
|
| 9 |
+
PvaAeiTdUcEpv1kI1kJWQ6y1xvawh+kcntp0XISLGLYQ4Y6NUPxVhHteLCAMYqPx
|
| 10 |
+
ioTwyvhqlsqUqm29Q3XE0NMXbsF2j9A6QPmSQi22frIl/bRch1tt2JoRteLT/SBN
|
| 11 |
+
jvSbKxRg1w8CweLAAPwzfiJ0HVtUgARWZOEwoXFLNAb2kborc37JojBRdZ0fQsrb
|
| 12 |
+
t5vNvhS0oBXA0zSb/5/5xRPf93jDlsLu/OiQ6if6AJp6kT/tKqX3qTFk8W1CbobQ
|
| 13 |
+
F6RZZo0k4og4aMjv/8Pducq9T1FvMQIDAQABAoICAF4EpfcjpYsQGIcyKxn1YGlp
|
| 14 |
+
VYdrPhPDhvXUHY7CTIjw9JBHxX3LzwDMk19R2tKj/xNYDXYdmiVswYnZLO/HrTrQ
|
| 15 |
+
vrDd/mp3mVvUEPixkQlZjDZrfYsdS3AH78poxHhpraRrcSBiZTuWXlOMkbXmkWny
|
| 16 |
+
TI+jF6LZnAHdewWkx1/8+kdIqkM9wULUO0VcJ7OvigcBeQ5S6XyUBzSJc6hf7SHM
|
| 17 |
+
7P7J0GR/YtPavUAJ0mTAUPscE4F7DIR5Yg16FTyFyfNHeVJK+rvJzI8nXLK1TlUn
|
| 18 |
+
D342G7SH17xZXWqw5cW9aHcOAYeKAiwWJ8BjeJd2FKWn2X6kVE4kgxV11i70LZSJ
|
| 19 |
+
eRtp3oEsUXOyTBu1XnVqkyokhvT4SW+UzCSbTVpCaDVbN223Ep2sw2ntjfL9OKwc
|
| 20 |
+
DmZaHhwycDtHABlERBpom+IZofVHH6NBX7CMsTNrD4y/qOSwdkbWrGhzGfNHBgrq
|
| 21 |
+
WPb9nSok+6m3LQT+8i7bpGj0LImYei5tp2rmQJt1psQOFTIksd7blOhy9dNeknj7
|
| 22 |
+
zrfLXfT6GkeZ2CPStg7yUDTdQ7mVYa5FeTqpi09bfGZ9Y1ACxSdvdfMlrkLqTRQH
|
| 23 |
+
KkXKlwg607mK5t/QSt7tlUiE9AFCrciXviheQd2hfij2R5Kfoprp4PoGiWz/o478
|
| 24 |
+
gsezT4CzugpEyZ/ICVqTAoIBAQD2jMVbqr5ivRsXdMhdru4MaZ5t/K+fHtL2V5c5
|
| 25 |
+
SkDzrOIdOpy25zNbIpbNdSyVIv2zm69VTZHV3lDGT3RSFjr7T3jK1h9HrCBk9duV
|
| 26 |
+
IC1NDYNoIajJWGzF3VjAdRz5gsGNu74+04uNkkYBJzY4m6KYNsCYSPQw3Q01HdOh
|
| 27 |
+
hVcIbxyzbUFfUb2Nck+bnTYJAACnkCMxAZNE9g6THnTnjNMMwhuBvt19W/L4Qurl
|
| 28 |
+
F+Ix1kaBPe+3BFI4IyMhe3W3LvEraESDri5dTS0BlgM/lpGh9cme1JQc34xV/eUA
|
| 29 |
+
E31JG60prQCTWkkZuZxOTjgiiE7Z7/9L0hZqfcIVWHpF5gb3AoIBAQDdpp59OAaY
|
| 30 |
+
3Wud+lGc3KTkoiPHZYzAcVshUhg2FxkAKrwUJA7GGjcFWETh+E9LYYy0IfQcRKiN
|
| 31 |
+
ZmV8aXYnL26O/N6Dr2mHOt5WHvlzkEIZCp0QMJImh0If25fZXuMRl9eRVOQZn+Gh
|
| 32 |
+
eU8mhzNKS+5apXuofY04hch8VXCaGkwPVeD0EKXx9xuKcatGUCjQ1zkp7I9hXI69
|
| 33 |
+
uX3pxZWHN2zCisFB9yEomq9G8xueUc86t0inFEsecESGLNOQmgxXscQTDt4tWG9h
|
| 34 |
+
7QQA1KA+TQeLg+i5Mr9xv++kTJNmPI63Jw0rWYvxk8mJRY5KUtSR+iSyYF960IJP
|
| 35 |
+
UlssOAFLMOkXAoIBAQDE6vpWlLErO87/lQ7ThHws/c7EGiZK+NuWVa862su11Edl
|
| 36 |
+
AQNaMp8aEy5PO184XpIzeg04HJR2NPJe8eb+CTNitb7MgujI3fmhqZyQJvsHp9tk
|
| 37 |
+
uD2PU0jNYFUaom9Z+c2N3n28wEmd8U5obWEpJWVgHZsGBn7C6Es8OW5me5Ff8x8B
|
| 38 |
+
UCn+b9LtvndG2vHljlL3gnAZHCD722sYpiLJLfkDH6XIoyFUlrQhBZGHGORY2cPG
|
| 39 |
+
RinIC3N/0tCkVW9Xt+53tPfEFMKDUri3o5FEoIYAzccTTMZfqUz1Aax9uxM96RUN
|
| 40 |
+
TFhBWMM6AL2O7Xp4WlZgSwelD09IDtmNIvXGDktRAoIBAQCt2h6+AM/L3wCmLM0O
|
| 41 |
+
yFHdsv91SsWXvFHKVOYApyVI6DwVYCLmZ3F4k7+TrnwjmCQQtgEOmxvJrOM1LlMq
|
| 42 |
+
cR26scSmbVPMafQygKEQb7ooghanuDEqXzUSX98+9BoOlpbSu08eejUzvj7C7ZDh
|
| 43 |
+
WaVfHCVeBvxZtTWHsExd0vqNnMKRLO28WCIV+QpqYD1jcSy5IX9k0oBzd6a3Ue7y
|
| 44 |
+
3BpGjScAYqJzgsCwWcbz6x8r4s7tnhE9krlstIRNC0dbEWfFuwexcYgLuyhEroHx
|
| 45 |
+
2+FrIM/NU2yt/+oraJTEwAMAzXSa5+XIWi7dqNzulwF8bkOSVd0OK7XKGcLBcDwz
|
| 46 |
+
ie2JAoIBAAEES8SwOYxbKxfle5zfEIJ33F9xvJZaoIHKK39oUsv6akG95QLuYi/O
|
| 47 |
+
EzNbvFs2DulZNijro9HavZzVTq8yevIsdJY3fUyVk/Rw408c8NiLPMqQs7CbuB43
|
| 48 |
+
e8mexW0rECPM0BWJaBAbVbYfUyNPWFgAucmiPj8EKTiA64P/xHvwfeRF7cDsnBYl
|
| 49 |
+
ecAePGTGa0Kkrmbg1VbTxNu9Y5POdw/yYYfYw2D0dSX2g/wPovvcjdBemmdHBMr4
|
| 50 |
+
UlL6YR/tc6/TuECC9FCR5Y/l9isd7v3lIanQKgQBG1+NkpIZ2rW/zDeBscAUGlYw
|
| 51 |
+
Xt2St+RJucRsevZirIfm7k2Dwyn22NY=
|
| 52 |
+
-----END PRIVATE KEY-----
|
config/outline_config.json
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"server": {
|
| 3 |
+
"port": 443,
|
| 4 |
+
"cipher": "chacha20-ietf-poly1305",
|
| 5 |
+
"timeout": 600,
|
| 6 |
+
"workers": 4,
|
| 7 |
+
"bind_address": "0.0.0.0",
|
| 8 |
+
"access_key_salt": "974edfe0ae852e69d07515fa724334668cea0b34b83c5e4e5f6c83c0fe4fd3ed"
|
| 9 |
+
},
|
| 10 |
+
"users": []
|
| 11 |
+
}
|
config/server.json
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"server": {
|
| 3 |
+
"host": "0.0.0.0",
|
| 4 |
+
"port": 8388,
|
| 5 |
+
"interface": "eth0",
|
| 6 |
+
"virtual_network": "10.0.0.0/16"
|
| 7 |
+
},
|
| 8 |
+
"protocols": {
|
| 9 |
+
"shadowsocks": true,
|
| 10 |
+
"l2tp": true,
|
| 11 |
+
"pptp": true
|
| 12 |
+
},
|
| 13 |
+
"offline_access": {
|
| 14 |
+
"enabled": true,
|
| 15 |
+
"timeout": 604800,
|
| 16 |
+
"max_offline_sessions": 5
|
| 17 |
+
},
|
| 18 |
+
"internet_sharing": {
|
| 19 |
+
"enabled": true,
|
| 20 |
+
"rate_limit": "10mbps",
|
| 21 |
+
"allowed_ports": ["80", "443", "53", "22"]
|
| 22 |
+
},
|
| 23 |
+
"security": {
|
| 24 |
+
"encryption": "aes-256-gcm",
|
| 25 |
+
"ipsec_psk": "your_pre_shared_key_here",
|
| 26 |
+
"certificate_path": "/path/to/server.crt",
|
| 27 |
+
"key_path": "/path/to/server.key"
|
| 28 |
+
},
|
| 29 |
+
"logging": {
|
| 30 |
+
"level": "info",
|
| 31 |
+
"file": "/var/log/vpn-server.log"
|
| 32 |
+
}
|
| 33 |
+
}
|
config/server_config.json
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"server": {
|
| 3 |
+
"host": "0.0.0.0",
|
| 4 |
+
"port": 8443,
|
| 5 |
+
"virtual_network": "10.0.0.0/24",
|
| 6 |
+
"dns_servers": [
|
| 7 |
+
"8.8.8.8",
|
| 8 |
+
"8.8.4.4"
|
| 9 |
+
]
|
| 10 |
+
}
|
| 11 |
+
}
|
config/users.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"test@example.com": {"password": "$2b$12$r7y/a..c8zrgBL/nlN1yQOCDYxNkJu/I8iQzBxBpTzxndZ16TIiuC", "created_at": "2025-08-18T16:26:15.307068", "config_id": "e58fa5bc-30b0-4c16-9533-3761727a56fa"}}
|
core/__pycache__/database.cpython-311.pyc
ADDED
|
Binary file (1.55 kB). View file
|
|
|
core/__pycache__/database_init.cpython-311.pyc
ADDED
|
Binary file (3.09 kB). View file
|
|
|
core/__pycache__/ikev2_server.cpython-311.pyc
ADDED
|
Binary file (9.63 kB). View file
|
|
|
core/__pycache__/ip_parser.cpython-311.pyc
ADDED
|
Binary file (10.4 kB). View file
|
|
|
core/__pycache__/logger.cpython-311.pyc
ADDED
|
Binary file (15.4 kB). View file
|
|
|
core/__pycache__/nat_engine.cpython-311.pyc
ADDED
|
Binary file (8.84 kB). View file
|
|
|
core/__pycache__/outline_config.cpython-311.pyc
ADDED
|
Binary file (9.86 kB). View file
|
|
|
core/__pycache__/outline_server.cpython-311.pyc
ADDED
|
Binary file (11.1 kB). View file
|
|
|
core/__pycache__/port_manager.cpython-311.pyc
ADDED
|
Binary file (6.08 kB). View file
|
|
|
core/__pycache__/session_tracker.cpython-311.pyc
ADDED
|
Binary file (16.7 kB). View file
|
|
|
core/__pycache__/shadowsocks_protocol.cpython-311.pyc
ADDED
|
Binary file (8.39 kB). View file
|
|
|
core/__pycache__/tcp_engine.cpython-311.pyc
ADDED
|
Binary file (17.8 kB). View file
|
|
|
core/__pycache__/tcp_forward.cpython-311.pyc
ADDED
|
Binary file (10.3 kB). View file
|
|
|
core/__pycache__/traffic_router.cpython-311.pyc
ADDED
|
Binary file (11.7 kB). View file
|
|
|
core/__pycache__/vpn_auth.cpython-311.pyc
ADDED
|
Binary file (3.22 kB). View file
|
|
|
core/auth.py
ADDED
|
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Authentication and authorization utilities
|
| 3 |
+
"""
|
| 4 |
+
from fastapi import Depends, HTTPException, status
|
| 5 |
+
from fastapi.security import OAuth2PasswordBearer
|
| 6 |
+
from jose import JWTError, jwt
|
| 7 |
+
from sqlalchemy.orm import Session
|
| 8 |
+
from datetime import datetime, timedelta
|
| 9 |
+
from typing import Optional
|
| 10 |
+
|
| 11 |
+
from core.database import get_db
|
| 12 |
+
from models.user import User, UserRole
|
| 13 |
+
from schemas.auth import TokenData
|
| 14 |
+
|
| 15 |
+
# JWT settings
|
| 16 |
+
SECRET_KEY = "your-secret-key" # Change this to a secure secret key in production
|
| 17 |
+
ALGORITHM = "HS256"
|
| 18 |
+
ACCESS_TOKEN_EXPIRE_MINUTES = 30
|
| 19 |
+
|
| 20 |
+
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token")
|
| 21 |
+
|
| 22 |
+
def create_access_token(data: dict, expires_delta: Optional[timedelta] = None):
|
| 23 |
+
"""Create JWT token"""
|
| 24 |
+
to_encode = data.copy()
|
| 25 |
+
if expires_delta:
|
| 26 |
+
expire = datetime.utcnow() + expires_delta
|
| 27 |
+
else:
|
| 28 |
+
expire = datetime.utcnow() + timedelta(minutes=15)
|
| 29 |
+
to_encode.update({"exp": expire})
|
| 30 |
+
encoded_jwt = jwt.encode(to_encode, SECRET_KEY, algorithm=ALGORITHM)
|
| 31 |
+
return encoded_jwt
|
| 32 |
+
|
| 33 |
+
async def get_current_user(
|
| 34 |
+
token: str = Depends(oauth2_scheme),
|
| 35 |
+
db: Session = Depends(get_db)
|
| 36 |
+
) -> User:
|
| 37 |
+
"""Get current user from JWT token"""
|
| 38 |
+
credentials_exception = HTTPException(
|
| 39 |
+
status_code=status.HTTP_401_UNAUTHORIZED,
|
| 40 |
+
detail="Could not validate credentials",
|
| 41 |
+
headers={"WWW-Authenticate": "Bearer"},
|
| 42 |
+
)
|
| 43 |
+
try:
|
| 44 |
+
payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
|
| 45 |
+
username: str = payload.get("sub")
|
| 46 |
+
if username is None:
|
| 47 |
+
raise credentials_exception
|
| 48 |
+
token_data = TokenData(username=username)
|
| 49 |
+
except JWTError:
|
| 50 |
+
raise credentials_exception
|
| 51 |
+
|
| 52 |
+
user = db.query(User).filter(User.username == token_data.username).first()
|
| 53 |
+
if user is None:
|
| 54 |
+
raise credentials_exception
|
| 55 |
+
return user
|
| 56 |
+
|
| 57 |
+
async def get_current_active_user(
|
| 58 |
+
current_user: User = Depends(get_current_user)
|
| 59 |
+
) -> User:
|
| 60 |
+
"""Get current active user"""
|
| 61 |
+
if not current_user.is_active:
|
| 62 |
+
raise HTTPException(status_code=400, detail="Inactive user")
|
| 63 |
+
return current_user
|
| 64 |
+
|
| 65 |
+
async def get_current_admin_user(
|
| 66 |
+
current_user: User = Depends(get_current_active_user)
|
| 67 |
+
) -> User:
|
| 68 |
+
"""Get current admin user"""
|
| 69 |
+
if current_user.role != UserRole.ADMIN:
|
| 70 |
+
raise HTTPException(
|
| 71 |
+
status_code=status.HTTP_403_FORBIDDEN,
|
| 72 |
+
detail="Not enough privileges"
|
| 73 |
+
)
|
| 74 |
+
return current_user
|
core/database.py
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Database configuration and base models
|
| 3 |
+
"""
|
| 4 |
+
from sqlalchemy import create_engine
|
| 5 |
+
from sqlalchemy.ext.declarative import declarative_base
|
| 6 |
+
from sqlalchemy.orm import sessionmaker
|
| 7 |
+
import os
|
| 8 |
+
|
| 9 |
+
# Get the database file path
|
| 10 |
+
DB_PATH = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'data', 'vpn.db')
|
| 11 |
+
os.makedirs(os.path.dirname(DB_PATH), exist_ok=True)
|
| 12 |
+
|
| 13 |
+
# Create database engine
|
| 14 |
+
SQLALCHEMY_DATABASE_URL = f"sqlite:///{DB_PATH}"
|
| 15 |
+
engine = create_engine(
|
| 16 |
+
SQLALCHEMY_DATABASE_URL, connect_args={"check_same_thread": False}
|
| 17 |
+
)
|
| 18 |
+
|
| 19 |
+
# Create SessionLocal class
|
| 20 |
+
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
| 21 |
+
|
| 22 |
+
# Create Base class
|
| 23 |
+
Base = declarative_base()
|
| 24 |
+
|
| 25 |
+
def get_db():
|
| 26 |
+
"""Dependency to get database session"""
|
| 27 |
+
db = SessionLocal()
|
| 28 |
+
try:
|
| 29 |
+
yield db
|
| 30 |
+
finally:
|
| 31 |
+
db.close()
|
core/database_init.py
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Database initialization and migration
|
| 3 |
+
"""
|
| 4 |
+
from sqlalchemy import event
|
| 5 |
+
from .database import Base, engine, SessionLocal
|
| 6 |
+
from .models.user import User, UserRole, UserStatus
|
| 7 |
+
import logging
|
| 8 |
+
|
| 9 |
+
logger = logging.getLogger(__name__)
|
| 10 |
+
|
| 11 |
+
def init_db():
|
| 12 |
+
"""Initialize database and create default admin user"""
|
| 13 |
+
try:
|
| 14 |
+
# Create all tables
|
| 15 |
+
Base.metadata.create_all(bind=engine)
|
| 16 |
+
|
| 17 |
+
# Create a session
|
| 18 |
+
db = SessionLocal()
|
| 19 |
+
|
| 20 |
+
# Check if admin user exists
|
| 21 |
+
admin = db.query(User).filter(User.role == UserRole.ADMIN).first()
|
| 22 |
+
if not admin:
|
| 23 |
+
# Create default admin user with password 'admin'
|
| 24 |
+
# Note: This should be changed immediately after first login
|
| 25 |
+
admin = User(
|
| 26 |
+
username="admin",
|
| 27 |
+
password_hash=User.hash_password("admin"),
|
| 28 |
+
role=UserRole.ADMIN,
|
| 29 |
+
status=UserStatus.ACTIVE
|
| 30 |
+
)
|
| 31 |
+
db.add(admin)
|
| 32 |
+
db.commit()
|
| 33 |
+
logger.info("Created default admin user")
|
| 34 |
+
|
| 35 |
+
db.close()
|
| 36 |
+
logger.info("Database initialized successfully")
|
| 37 |
+
return True
|
| 38 |
+
|
| 39 |
+
except Exception as e:
|
| 40 |
+
logger.error(f"Error initializing database: {e}")
|
| 41 |
+
return False
|
| 42 |
+
|
| 43 |
+
@event.listens_for(User, 'after_insert')
|
| 44 |
+
def user_created(mapper, connection, target):
|
| 45 |
+
"""Log user creation"""
|
| 46 |
+
logger.info(f"New user created: {target.username}")
|
| 47 |
+
|
| 48 |
+
@event.listens_for(User.status, 'set')
|
| 49 |
+
def user_status_changed(target, value, oldvalue, initiator):
|
| 50 |
+
"""Log user status changes"""
|
| 51 |
+
if oldvalue and value != oldvalue:
|
| 52 |
+
logger.info(f"User {target.username} status changed from {oldvalue} to {value}")
|
core/error_handlers.py
ADDED
|
@@ -0,0 +1,51 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Global error handlers and exceptions
|
| 3 |
+
"""
|
| 4 |
+
from fastapi import FastAPI, Request, status
|
| 5 |
+
from fastapi.responses import JSONResponse
|
| 6 |
+
from sqlalchemy.exc import SQLAlchemyError
|
| 7 |
+
from typing import Union, Dict, Any
|
| 8 |
+
|
| 9 |
+
class AppException(Exception):
|
| 10 |
+
"""Base application exception"""
|
| 11 |
+
def __init__(
|
| 12 |
+
self,
|
| 13 |
+
status_code: int,
|
| 14 |
+
detail: Union[str, Dict[str, Any]],
|
| 15 |
+
headers: Dict[str, str] = None
|
| 16 |
+
):
|
| 17 |
+
self.status_code = status_code
|
| 18 |
+
self.detail = detail
|
| 19 |
+
self.headers = headers
|
| 20 |
+
|
| 21 |
+
def setup_error_handlers(app: FastAPI):
|
| 22 |
+
"""Setup global error handlers"""
|
| 23 |
+
|
| 24 |
+
@app.exception_handler(AppException)
|
| 25 |
+
async def app_exception_handler(request: Request, exc: AppException):
|
| 26 |
+
headers = exc.headers if exc.headers else {}
|
| 27 |
+
return JSONResponse(
|
| 28 |
+
status_code=exc.status_code,
|
| 29 |
+
content={"detail": exc.detail},
|
| 30 |
+
headers=headers
|
| 31 |
+
)
|
| 32 |
+
|
| 33 |
+
@app.exception_handler(SQLAlchemyError)
|
| 34 |
+
async def sqlalchemy_exception_handler(request: Request, exc: SQLAlchemyError):
|
| 35 |
+
return JSONResponse(
|
| 36 |
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
| 37 |
+
content={
|
| 38 |
+
"detail": "Database error occurred",
|
| 39 |
+
"message": str(exc)
|
| 40 |
+
}
|
| 41 |
+
)
|
| 42 |
+
|
| 43 |
+
@app.exception_handler(Exception)
|
| 44 |
+
async def general_exception_handler(request: Request, exc: Exception):
|
| 45 |
+
return JSONResponse(
|
| 46 |
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
| 47 |
+
content={
|
| 48 |
+
"detail": "An unexpected error occurred",
|
| 49 |
+
"message": str(exc)
|
| 50 |
+
}
|
| 51 |
+
)
|
core/ikev2_server.py
ADDED
|
@@ -0,0 +1,207 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
IKEv2 Server Implementation for Outline VPN
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
import os
|
| 6 |
+
import subprocess
|
| 7 |
+
import tempfile
|
| 8 |
+
from typing import Dict, Optional
|
| 9 |
+
import uuid
|
| 10 |
+
from datetime import datetime, timedelta
|
| 11 |
+
|
| 12 |
+
class IKEv2Server:
|
| 13 |
+
def __init__(self, server_ip: str, logger):
|
| 14 |
+
self.server_ip = server_ip
|
| 15 |
+
self.logger = logger
|
| 16 |
+
self.ca_dir = "ca"
|
| 17 |
+
self.cert_dir = "certs"
|
| 18 |
+
self.config_dir = "config"
|
| 19 |
+
self._setup_directories()
|
| 20 |
+
self._initialize_ca()
|
| 21 |
+
|
| 22 |
+
def _setup_directories(self):
|
| 23 |
+
"""Create necessary directories"""
|
| 24 |
+
for directory in [self.ca_dir, self.cert_dir, self.config_dir]:
|
| 25 |
+
os.makedirs(directory, exist_ok=True)
|
| 26 |
+
|
| 27 |
+
def _initialize_ca(self):
|
| 28 |
+
"""Initialize Certificate Authority if not already done"""
|
| 29 |
+
ca_key = os.path.join(self.ca_dir, "ca.key")
|
| 30 |
+
ca_cert = os.path.join(self.ca_dir, "ca.crt")
|
| 31 |
+
|
| 32 |
+
if not os.path.exists(ca_key) or not os.path.exists(ca_cert):
|
| 33 |
+
# Generate CA private key
|
| 34 |
+
subprocess.run([
|
| 35 |
+
"openssl", "genrsa",
|
| 36 |
+
"-out", ca_key,
|
| 37 |
+
"4096"
|
| 38 |
+
], check=True)
|
| 39 |
+
|
| 40 |
+
# Generate CA certificate
|
| 41 |
+
subprocess.run([
|
| 42 |
+
"openssl", "req",
|
| 43 |
+
"-x509",
|
| 44 |
+
"-new",
|
| 45 |
+
"-nodes",
|
| 46 |
+
"-key", ca_key,
|
| 47 |
+
"-sha256",
|
| 48 |
+
"-days", "3650",
|
| 49 |
+
"-out", ca_cert,
|
| 50 |
+
"-subj", f"/CN=Outline VPN CA"
|
| 51 |
+
], check=True)
|
| 52 |
+
|
| 53 |
+
def generate_certificate(self, user_id: str) -> Dict[str, str]:
|
| 54 |
+
"""Generate client certificate for IKEv2"""
|
| 55 |
+
cert_name = f"client_{user_id}"
|
| 56 |
+
key_path = os.path.join(self.cert_dir, f"{cert_name}.key")
|
| 57 |
+
csr_path = os.path.join(self.cert_dir, f"{cert_name}.csr")
|
| 58 |
+
cert_path = os.path.join(self.cert_dir, f"{cert_name}.crt")
|
| 59 |
+
p12_path = os.path.join(self.cert_dir, f"{cert_name}.p12")
|
| 60 |
+
|
| 61 |
+
try:
|
| 62 |
+
# Generate client private key
|
| 63 |
+
subprocess.run([
|
| 64 |
+
"openssl", "genrsa",
|
| 65 |
+
"-out", key_path,
|
| 66 |
+
"2048"
|
| 67 |
+
], check=True)
|
| 68 |
+
|
| 69 |
+
# Generate CSR
|
| 70 |
+
subprocess.run([
|
| 71 |
+
"openssl", "req",
|
| 72 |
+
"-new",
|
| 73 |
+
"-key", key_path,
|
| 74 |
+
"-out", csr_path,
|
| 75 |
+
"-subj", f"/CN=client_{user_id}"
|
| 76 |
+
], check=True)
|
| 77 |
+
|
| 78 |
+
# Sign client certificate with CA
|
| 79 |
+
subprocess.run([
|
| 80 |
+
"openssl", "x509",
|
| 81 |
+
"-req",
|
| 82 |
+
"-in", csr_path,
|
| 83 |
+
"-CA", os.path.join(self.ca_dir, "ca.crt"),
|
| 84 |
+
"-CAkey", os.path.join(self.ca_dir, "ca.key"),
|
| 85 |
+
"-CAcreateserial",
|
| 86 |
+
"-out", cert_path,
|
| 87 |
+
"-days", "365",
|
| 88 |
+
"-sha256"
|
| 89 |
+
], check=True)
|
| 90 |
+
|
| 91 |
+
# Create PKCS12 bundle
|
| 92 |
+
export_password = str(uuid.uuid4())
|
| 93 |
+
subprocess.run([
|
| 94 |
+
"openssl", "pkcs12",
|
| 95 |
+
"-export",
|
| 96 |
+
"-in", cert_path,
|
| 97 |
+
"-inkey", key_path,
|
| 98 |
+
"-out", p12_path,
|
| 99 |
+
"-password", f"pass:{export_password}"
|
| 100 |
+
], check=True)
|
| 101 |
+
|
| 102 |
+
# Read certificate files
|
| 103 |
+
with open(cert_path, 'r') as f:
|
| 104 |
+
cert_data = f.read()
|
| 105 |
+
with open(key_path, 'r') as f:
|
| 106 |
+
key_data = f.read()
|
| 107 |
+
with open(os.path.join(self.ca_dir, "ca.crt"), 'r') as f:
|
| 108 |
+
ca_data = f.read()
|
| 109 |
+
|
| 110 |
+
return {
|
| 111 |
+
'certificate': cert_data,
|
| 112 |
+
'private_key': key_data,
|
| 113 |
+
'ca_certificate': ca_data,
|
| 114 |
+
'p12_bundle': p12_path,
|
| 115 |
+
'p12_password': export_password
|
| 116 |
+
}
|
| 117 |
+
|
| 118 |
+
except Exception as e:
|
| 119 |
+
self.logger.error("Error generating certificate: " + str(e))
|
| 120 |
+
raise
|
| 121 |
+
|
| 122 |
+
def generate_strongswan_config(self, user_id: str, psk: str) -> str:
|
| 123 |
+
"""Generate strongSwan configuration for a user"""
|
| 124 |
+
config = f"""
|
| 125 |
+
conn outline-{user_id}
|
| 126 |
+
auto=add
|
| 127 |
+
compress=no
|
| 128 |
+
type=tunnel
|
| 129 |
+
keyexchange=ikev2
|
| 130 |
+
fragmentation=yes
|
| 131 |
+
forceencaps=yes
|
| 132 |
+
|
| 133 |
+
# Local/Server configuration
|
| 134 |
+
left=%any
|
| 135 |
+
leftsubnet=0.0.0.0/0
|
| 136 |
+
leftcert=/etc/ipsec.d/certs/server.crt
|
| 137 |
+
leftsendcert=always
|
| 138 |
+
leftid=@outline.vpn
|
| 139 |
+
|
| 140 |
+
# Remote/Client configuration
|
| 141 |
+
right=%any
|
| 142 |
+
rightid=%any
|
| 143 |
+
rightauth=eap-mschapv2
|
| 144 |
+
rightsourceip=10.10.10.0/24
|
| 145 |
+
rightdns=8.8.8.8,8.8.4.4
|
| 146 |
+
|
| 147 |
+
# Security parameters
|
| 148 |
+
ike=aes256-sha256-modp2048,aes128-sha1-modp2048
|
| 149 |
+
esp=aes256-sha256,aes128-sha1
|
| 150 |
+
dpdaction=clear
|
| 151 |
+
dpddelay=300s
|
| 152 |
+
rekey=no
|
| 153 |
+
"""
|
| 154 |
+
config_path = os.path.join(self.config_dir, f"outline-{user_id}.conf")
|
| 155 |
+
with open(config_path, 'w') as f:
|
| 156 |
+
f.write(config)
|
| 157 |
+
|
| 158 |
+
return config_path
|
| 159 |
+
|
| 160 |
+
def add_user(self, user_id: str, username: str, password: str, psk: str):
|
| 161 |
+
"""Add a new VPN user"""
|
| 162 |
+
# Generate certificates
|
| 163 |
+
cert_data = self.generate_certificate(user_id)
|
| 164 |
+
|
| 165 |
+
# Generate strongSwan config
|
| 166 |
+
config_path = self.generate_strongswan_config(user_id, psk)
|
| 167 |
+
|
| 168 |
+
# Add user credentials to strongSwan secrets
|
| 169 |
+
secrets_path = os.path.join(self.config_dir, "ipsec.secrets")
|
| 170 |
+
with open(secrets_path, 'a') as f:
|
| 171 |
+
f.write(f'{username} : EAP "{password}"\n')
|
| 172 |
+
f.write(f'{self.server_ip} %any : PSK "{psk}"\n')
|
| 173 |
+
|
| 174 |
+
return cert_data
|
| 175 |
+
|
| 176 |
+
def remove_user(self, user_id: str):
|
| 177 |
+
"""Remove a VPN user"""
|
| 178 |
+
# Remove certificates
|
| 179 |
+
cert_name = f"client_{user_id}"
|
| 180 |
+
for ext in ['.key', '.csr', '.crt', '.p12']:
|
| 181 |
+
path = os.path.join(self.cert_dir, f"{cert_name}{ext}")
|
| 182 |
+
if os.path.exists(path):
|
| 183 |
+
os.remove(path)
|
| 184 |
+
|
| 185 |
+
# Remove config
|
| 186 |
+
config_path = os.path.join(self.config_dir, f"outline-{user_id}.conf")
|
| 187 |
+
if os.path.exists(config_path):
|
| 188 |
+
os.remove(config_path)
|
| 189 |
+
|
| 190 |
+
# Remove from secrets (would need to rewrite the file)
|
| 191 |
+
# This is a bit more complex and would require parsing and rewriting ipsec.secrets
|
| 192 |
+
|
| 193 |
+
|
| 194 |
+
async def start(self):
|
| 195 |
+
"""Start the IKEv2 service"""
|
| 196 |
+
self.logger.info("Starting IKEv2 service...")
|
| 197 |
+
# Placeholder for actual IKEv2 service startup logic
|
| 198 |
+
# This might involve starting strongSwan or similar
|
| 199 |
+
pass
|
| 200 |
+
|
| 201 |
+
async def stop(self):
|
| 202 |
+
"""Stop the IKEv2 service"""
|
| 203 |
+
self.logger.info("Stopping IKEv2 service...")
|
| 204 |
+
# Placeholder for actual IKEv2 service shutdown logic
|
| 205 |
+
pass
|
| 206 |
+
|
| 207 |
+
|
core/ip_parser.py
ADDED
|
@@ -0,0 +1,230 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
IP Parser/Assembler Module
|
| 3 |
+
|
| 4 |
+
Handles IPv4 packet parsing and construction:
|
| 5 |
+
- Parse IPv4, UDP, and TCP headers
|
| 6 |
+
- Calculate and verify checksums
|
| 7 |
+
- Handle packet fragmentation and reassembly
|
| 8 |
+
- Support various IP options
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
import struct
|
| 12 |
+
import socket
|
| 13 |
+
from typing import Dict, List, Optional, Tuple
|
| 14 |
+
from dataclasses import dataclass
|
| 15 |
+
from enum import Enum
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
class IPProtocol(Enum):
|
| 19 |
+
ICMP = 1
|
| 20 |
+
TCP = 6
|
| 21 |
+
UDP = 17
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
@dataclass
|
| 25 |
+
class IPv4Header:
|
| 26 |
+
"""IPv4 header structure"""
|
| 27 |
+
version: int = 4
|
| 28 |
+
ihl: int = 5 # Internet Header Length (in 32-bit words)
|
| 29 |
+
tos: int = 0 # Type of Service
|
| 30 |
+
total_length: int = 0
|
| 31 |
+
identification: int = 0
|
| 32 |
+
flags: int = 0 # 3 bits: Reserved, Don't Fragment, More Fragments
|
| 33 |
+
fragment_offset: int = 0 # 13 bits
|
| 34 |
+
ttl: int = 64 # Time to Live
|
| 35 |
+
protocol: int = 0
|
| 36 |
+
header_checksum: int = 0
|
| 37 |
+
source_ip: str = '0.0.0.0'
|
| 38 |
+
dest_ip: str = '0.0.0.0'
|
| 39 |
+
options: bytes = b''
|
| 40 |
+
|
| 41 |
+
@property
|
| 42 |
+
def header_length(self) -> int:
|
| 43 |
+
"""Get header length in bytes"""
|
| 44 |
+
return self.ihl * 4
|
| 45 |
+
|
| 46 |
+
@property
|
| 47 |
+
def dont_fragment(self) -> bool:
|
| 48 |
+
"""Check if Don't Fragment flag is set"""
|
| 49 |
+
return bool(self.flags & 0x2)
|
| 50 |
+
|
| 51 |
+
@property
|
| 52 |
+
def more_fragments(self) -> bool:
|
| 53 |
+
"""Check if More Fragments flag is set"""
|
| 54 |
+
return bool(self.flags & 0x1)
|
| 55 |
+
|
| 56 |
+
@property
|
| 57 |
+
def is_fragment(self) -> bool:
|
| 58 |
+
"""Check if this is a fragment"""
|
| 59 |
+
return self.more_fragments or self.fragment_offset > 0
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
@dataclass
|
| 63 |
+
class TCPHeader:
|
| 64 |
+
"""TCP header structure"""
|
| 65 |
+
source_port: int = 0
|
| 66 |
+
dest_port: int = 0
|
| 67 |
+
seq_num: int = 0
|
| 68 |
+
ack_num: int = 0
|
| 69 |
+
data_offset: int = 5 # Header length in 32-bit words
|
| 70 |
+
reserved: int = 0
|
| 71 |
+
flags: int = 0 # 9 bits: NS, CWR, ECE, URG, ACK, PSH, RST, SYN, FIN
|
| 72 |
+
window_size: int = 65535
|
| 73 |
+
checksum: int = 0
|
| 74 |
+
urgent_pointer: int = 0
|
| 75 |
+
options: bytes = b''
|
| 76 |
+
|
| 77 |
+
@property
|
| 78 |
+
def header_length(self) -> int:
|
| 79 |
+
"""Get header length in bytes"""
|
| 80 |
+
return self.data_offset * 4
|
| 81 |
+
|
| 82 |
+
# TCP Flag properties
|
| 83 |
+
@property
|
| 84 |
+
def fin(self) -> bool:
|
| 85 |
+
return bool(self.flags & 0x01)
|
| 86 |
+
|
| 87 |
+
@property
|
| 88 |
+
def syn(self) -> bool:
|
| 89 |
+
return bool(self.flags & 0x02)
|
| 90 |
+
|
| 91 |
+
@property
|
| 92 |
+
def rst(self) -> bool:
|
| 93 |
+
return bool(self.flags & 0x04)
|
| 94 |
+
|
| 95 |
+
@property
|
| 96 |
+
def psh(self) -> bool:
|
| 97 |
+
return bool(self.flags & 0x08)
|
| 98 |
+
|
| 99 |
+
@property
|
| 100 |
+
def ack(self) -> bool:
|
| 101 |
+
return bool(self.flags & 0x10)
|
| 102 |
+
|
| 103 |
+
@property
|
| 104 |
+
def urg(self) -> bool:
|
| 105 |
+
return bool(self.flags & 0x20)
|
| 106 |
+
|
| 107 |
+
@property
|
| 108 |
+
def ece(self) -> bool:
|
| 109 |
+
return bool(self.flags & 0x40)
|
| 110 |
+
|
| 111 |
+
@property
|
| 112 |
+
def cwr(self) -> bool:
|
| 113 |
+
return bool(self.flags & 0x80)
|
| 114 |
+
|
| 115 |
+
@property
|
| 116 |
+
def ns(self) -> bool:
|
| 117 |
+
return bool(self.flags & 0x100)
|
| 118 |
+
|
| 119 |
+
|
| 120 |
+
@dataclass
|
| 121 |
+
class UDPHeader:
|
| 122 |
+
"""UDP header structure"""
|
| 123 |
+
source_port: int = 0
|
| 124 |
+
dest_port: int = 0
|
| 125 |
+
length: int = 8 # Header length (8) + data length
|
| 126 |
+
checksum: int = 0
|
| 127 |
+
|
| 128 |
+
|
| 129 |
+
class IPParser:
|
| 130 |
+
"""IP packet parser and assembler"""
|
| 131 |
+
|
| 132 |
+
@staticmethod
|
| 133 |
+
def parse_ipv4_header(packet: bytes) -> Tuple[IPv4Header, bytes]:
|
| 134 |
+
"""Parse IPv4 header and return header object and remaining data"""
|
| 135 |
+
# Basic header (20 bytes)
|
| 136 |
+
if len(packet) < 20:
|
| 137 |
+
raise ValueError("Packet too short for IPv4 header")
|
| 138 |
+
|
| 139 |
+
ver_ihl, tos, total_len, ident, flags_frag, ttl, proto, checksum, src, dst = \
|
| 140 |
+
struct.unpack('!BBHHHBBH4s4s', packet[:20])
|
| 141 |
+
|
| 142 |
+
version = ver_ihl >> 4
|
| 143 |
+
ihl = ver_ihl & 0x0F
|
| 144 |
+
flags = flags_frag >> 13
|
| 145 |
+
frag_offset = flags_frag & 0x1FFF
|
| 146 |
+
|
| 147 |
+
header = IPv4Header(
|
| 148 |
+
version=version,
|
| 149 |
+
ihl=ihl,
|
| 150 |
+
tos=tos,
|
| 151 |
+
total_length=total_len,
|
| 152 |
+
identification=ident,
|
| 153 |
+
flags=flags,
|
| 154 |
+
fragment_offset=frag_offset,
|
| 155 |
+
ttl=ttl,
|
| 156 |
+
protocol=proto,
|
| 157 |
+
header_checksum=checksum,
|
| 158 |
+
source_ip=socket.inet_ntoa(src),
|
| 159 |
+
dest_ip=socket.inet_ntoa(dst)
|
| 160 |
+
)
|
| 161 |
+
|
| 162 |
+
# Extract options if present
|
| 163 |
+
header_len = header.header_length
|
| 164 |
+
if header_len > 20:
|
| 165 |
+
header.options = packet[20:header_len]
|
| 166 |
+
|
| 167 |
+
return header, packet[header_len:]
|
| 168 |
+
|
| 169 |
+
@staticmethod
|
| 170 |
+
def parse_tcp_header(packet: bytes) -> Tuple[TCPHeader, bytes]:
|
| 171 |
+
"""Parse TCP header and return header object and remaining data"""
|
| 172 |
+
if len(packet) < 20:
|
| 173 |
+
raise ValueError("Packet too short for TCP header")
|
| 174 |
+
|
| 175 |
+
src_port, dst_port, seq, ack, offset_flags, window, checksum, urgent = \
|
| 176 |
+
struct.unpack('!HHIIHHH', packet[:20])
|
| 177 |
+
|
| 178 |
+
data_offset = offset_flags >> 12
|
| 179 |
+
flags = offset_flags & 0x1FF
|
| 180 |
+
|
| 181 |
+
header = TCPHeader(
|
| 182 |
+
source_port=src_port,
|
| 183 |
+
dest_port=dst_port,
|
| 184 |
+
seq_num=seq,
|
| 185 |
+
ack_num=ack,
|
| 186 |
+
data_offset=data_offset,
|
| 187 |
+
flags=flags,
|
| 188 |
+
window_size=window,
|
| 189 |
+
checksum=checksum,
|
| 190 |
+
urgent_pointer=urgent
|
| 191 |
+
)
|
| 192 |
+
|
| 193 |
+
# Extract options if present
|
| 194 |
+
header_len = header.header_length
|
| 195 |
+
if header_len > 20:
|
| 196 |
+
header.options = packet[20:header_len]
|
| 197 |
+
|
| 198 |
+
return header, packet[header_len:]
|
| 199 |
+
|
| 200 |
+
@staticmethod
|
| 201 |
+
def parse_udp_header(packet: bytes) -> Tuple[UDPHeader, bytes]:
|
| 202 |
+
"""Parse UDP header and return header object and remaining data"""
|
| 203 |
+
if len(packet) < 8:
|
| 204 |
+
raise ValueError("Packet too short for UDP header")
|
| 205 |
+
|
| 206 |
+
src_port, dst_port, length, checksum = struct.unpack('!HHHH', packet[:8])
|
| 207 |
+
|
| 208 |
+
header = UDPHeader(
|
| 209 |
+
source_port=src_port,
|
| 210 |
+
dest_port=dst_port,
|
| 211 |
+
length=length,
|
| 212 |
+
checksum=checksum
|
| 213 |
+
)
|
| 214 |
+
|
| 215 |
+
return header, packet[8:]
|
| 216 |
+
|
| 217 |
+
@staticmethod
|
| 218 |
+
def calculate_checksum(data: bytes) -> int:
|
| 219 |
+
"""Calculate IP/TCP/UDP checksum"""
|
| 220 |
+
if len(data) % 2 == 1:
|
| 221 |
+
data += b'\0'
|
| 222 |
+
|
| 223 |
+
words = struct.unpack('!%dH' % (len(data) // 2), data)
|
| 224 |
+
checksum = sum(words)
|
| 225 |
+
|
| 226 |
+
# Fold 32-bit sum into 16 bits
|
| 227 |
+
while checksum >> 16:
|
| 228 |
+
checksum = (checksum & 0xFFFF) + (checksum >> 16)
|
| 229 |
+
|
| 230 |
+
return ~checksum & 0xFFFF
|
core/l2tp_server.py
ADDED
|
@@ -0,0 +1,201 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
L2TP/IPsec Server Implementation
|
| 3 |
+
Handles L2TP tunneling with IPsec encryption
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
import asyncio
|
| 7 |
+
import socket
|
| 8 |
+
import struct
|
| 9 |
+
from typing import Dict, Optional, Tuple
|
| 10 |
+
from dataclasses import dataclass
|
| 11 |
+
import os
|
| 12 |
+
import hmac
|
| 13 |
+
import hashlib
|
| 14 |
+
from .ip_parser import IPv4Header, IPParser
|
| 15 |
+
from .logger import Logger, LogCategory
|
| 16 |
+
|
| 17 |
+
@dataclass
|
| 18 |
+
class L2TPSession:
|
| 19 |
+
tunnel_id: int
|
| 20 |
+
session_id: int
|
| 21 |
+
client_ip: str
|
| 22 |
+
assigned_ip: str
|
| 23 |
+
created_at: float
|
| 24 |
+
last_seen: float
|
| 25 |
+
bytes_in: int = 0
|
| 26 |
+
bytes_out: int = 0
|
| 27 |
+
|
| 28 |
+
class L2TPServer:
|
| 29 |
+
"""L2TP/IPsec server implementation"""
|
| 30 |
+
|
| 31 |
+
def __init__(self, logger: Logger, ip_pool_start: str = "10.10.0.2"):
|
| 32 |
+
self.logger = logger
|
| 33 |
+
self.sessions: Dict[Tuple[int, int], L2TPSession] = {} # (tunnel_id, session_id) -> session
|
| 34 |
+
self.next_tunnel_id = 1
|
| 35 |
+
self.next_session_id = 1
|
| 36 |
+
self.next_ip = ip_pool_start
|
| 37 |
+
self._running = False
|
| 38 |
+
self._transport = None
|
| 39 |
+
self._ipsec = IPSecHandler(logger)
|
| 40 |
+
|
| 41 |
+
async def start(self, host: str = "0.0.0.0", port: int = 1701):
|
| 42 |
+
"""Start L2TP server"""
|
| 43 |
+
loop = asyncio.get_running_loop()
|
| 44 |
+
self._transport, _ = await loop.create_datagram_endpoint(
|
| 45 |
+
lambda: L2TPProtocol(self),
|
| 46 |
+
local_addr=(host, port)
|
| 47 |
+
)
|
| 48 |
+
self._running = True
|
| 49 |
+
self.logger.info(LogCategory.SYSTEM, "l2tp_server", f"L2TP server started on {host}:{port}")
|
| 50 |
+
|
| 51 |
+
async def stop(self):
|
| 52 |
+
"""Stop L2TP server"""
|
| 53 |
+
if self._transport:
|
| 54 |
+
self._transport.close()
|
| 55 |
+
self._running = False
|
| 56 |
+
self.logger.info(LogCategory.SYSTEM, "l2tp_server", "L2TP server stopped")
|
| 57 |
+
|
| 58 |
+
def allocate_ip(self) -> str:
|
| 59 |
+
"""Allocate next available IP from pool"""
|
| 60 |
+
allocated = self.next_ip
|
| 61 |
+
# Increment last octet, handling rollover
|
| 62 |
+
last_octet = int(self.next_ip.split('.')[-1])
|
| 63 |
+
if last_octet >= 254:
|
| 64 |
+
raise ValueError("IP pool exhausted")
|
| 65 |
+
self.next_ip = f"10.10.0.{last_octet + 1}"
|
| 66 |
+
return allocated
|
| 67 |
+
|
| 68 |
+
async def handle_packet(self, data: bytes, addr: Tuple[str, int]):
|
| 69 |
+
"""Handle incoming L2TP packet"""
|
| 70 |
+
try:
|
| 71 |
+
# Decrypt IPsec if present
|
| 72 |
+
if self._ipsec.is_ipsec_packet(data):
|
| 73 |
+
data = self._ipsec.decrypt_packet(data)
|
| 74 |
+
if not data:
|
| 75 |
+
return
|
| 76 |
+
|
| 77 |
+
# Parse L2TP header
|
| 78 |
+
if len(data) < 6:
|
| 79 |
+
return
|
| 80 |
+
|
| 81 |
+
flags = struct.unpack('!H', data[0:2])[0]
|
| 82 |
+
tunnel_id = struct.unpack('!H', data[2:4])[0]
|
| 83 |
+
session_id = struct.unpack('!H', data[4:6])[0]
|
| 84 |
+
|
| 85 |
+
# Handle control messages
|
| 86 |
+
if flags & 0x8000: # Control message
|
| 87 |
+
await self._handle_control(data[6:], tunnel_id, session_id, addr)
|
| 88 |
+
else: # Data message
|
| 89 |
+
await self._handle_data(data[6:], tunnel_id, session_id)
|
| 90 |
+
|
| 91 |
+
except Exception as e:
|
| 92 |
+
self.logger.error(LogCategory.SYSTEM, "l2tp_server", f"Error handling packet: {e}")
|
| 93 |
+
|
| 94 |
+
async def _handle_control(self, data: bytes, tunnel_id: int, session_id: int, addr: Tuple[str, int]):
|
| 95 |
+
"""Handle L2TP control message"""
|
| 96 |
+
msg_type = struct.unpack('!H', data[0:2])[0]
|
| 97 |
+
|
| 98 |
+
if msg_type == 1: # SCCRQ - Start-Control-Connection-Request
|
| 99 |
+
# Send SCCRP - Start-Control-Connection-Reply
|
| 100 |
+
reply = self._build_sccrp(tunnel_id)
|
| 101 |
+
await self._send_control(reply, addr)
|
| 102 |
+
|
| 103 |
+
elif msg_type == 3: # ICRQ - Incoming-Call-Request
|
| 104 |
+
# Create new session
|
| 105 |
+
session = L2TPSession(
|
| 106 |
+
tunnel_id=tunnel_id,
|
| 107 |
+
session_id=self.next_session_id,
|
| 108 |
+
client_ip=addr[0],
|
| 109 |
+
assigned_ip=self.allocate_ip(),
|
| 110 |
+
created_at=asyncio.get_running_loop().time(),
|
| 111 |
+
last_seen=asyncio.get_running_loop().time()
|
| 112 |
+
)
|
| 113 |
+
self.sessions[(tunnel_id, session_id)] = session
|
| 114 |
+
self.next_session_id += 1
|
| 115 |
+
|
| 116 |
+
# Send ICRP - Incoming-Call-Reply
|
| 117 |
+
reply = self._build_icrp(tunnel_id, session_id)
|
| 118 |
+
await self._send_control(reply, addr)
|
| 119 |
+
|
| 120 |
+
async def _handle_data(self, data: bytes, tunnel_id: int, session_id: int):
|
| 121 |
+
"""Handle L2TP data message"""
|
| 122 |
+
session_key = (tunnel_id, session_id)
|
| 123 |
+
if session_key not in self.sessions:
|
| 124 |
+
return
|
| 125 |
+
|
| 126 |
+
session = self.sessions[session_key]
|
| 127 |
+
session.last_seen = asyncio.get_running_loop().time()
|
| 128 |
+
|
| 129 |
+
# Parse PPP frame
|
| 130 |
+
if len(data) < 4:
|
| 131 |
+
return
|
| 132 |
+
|
| 133 |
+
ppp_protocol = struct.unpack('!H', data[2:4])[0]
|
| 134 |
+
if ppp_protocol == 0x0021: # IP
|
| 135 |
+
ip_packet = data[4:]
|
| 136 |
+
await self._handle_ip_packet(ip_packet, session)
|
| 137 |
+
|
| 138 |
+
async def _handle_ip_packet(self, data: bytes, session: L2TPSession):
|
| 139 |
+
"""Handle IP packet inside PPP frame"""
|
| 140 |
+
try:
|
| 141 |
+
# Parse IP header
|
| 142 |
+
ip_header = IPParser.parse_ipv4_header(data)
|
| 143 |
+
|
| 144 |
+
# Update statistics
|
| 145 |
+
session.bytes_in += len(data)
|
| 146 |
+
|
| 147 |
+
# Forward packet to destination
|
| 148 |
+
if ip_header.protocol == socket.IPPROTO_TCP:
|
| 149 |
+
await self._forward_tcp(data, session)
|
| 150 |
+
elif ip_header.protocol == socket.IPPROTO_UDP:
|
| 151 |
+
await self._forward_udp(data, session)
|
| 152 |
+
|
| 153 |
+
except Exception as e:
|
| 154 |
+
self.logger.error(LogCategory.SYSTEM, "l2tp_server", f"Error handling IP packet: {e}")
|
| 155 |
+
|
| 156 |
+
async def _forward_tcp(self, data: bytes, session: L2TPSession):
|
| 157 |
+
"""Forward TCP packet"""
|
| 158 |
+
# This will be handled by the TCP forwarding engine
|
| 159 |
+
# Just a placeholder for now
|
| 160 |
+
pass
|
| 161 |
+
|
| 162 |
+
async def _forward_udp(self, data: bytes, session: L2TPSession):
|
| 163 |
+
"""Forward UDP packet"""
|
| 164 |
+
# This will be handled by the UDP forwarding engine
|
| 165 |
+
# Just a placeholder for now
|
| 166 |
+
pass
|
| 167 |
+
|
| 168 |
+
class IPSecHandler:
|
| 169 |
+
"""Handles IPsec encryption/decryption"""
|
| 170 |
+
|
| 171 |
+
def __init__(self, logger: Logger):
|
| 172 |
+
self.logger = logger
|
| 173 |
+
self.security_associations: Dict[str, Dict] = {}
|
| 174 |
+
|
| 175 |
+
def is_ipsec_packet(self, data: bytes) -> bool:
|
| 176 |
+
"""Check if packet is IPsec encrypted"""
|
| 177 |
+
if len(data) < 8:
|
| 178 |
+
return False
|
| 179 |
+
# Check for ESP or AH protocol
|
| 180 |
+
return data[0] == 50 or data[0] == 51
|
| 181 |
+
|
| 182 |
+
def decrypt_packet(self, data: bytes) -> Optional[bytes]:
|
| 183 |
+
"""Decrypt IPsec packet"""
|
| 184 |
+
try:
|
| 185 |
+
# Implementation depends on the encryption method
|
| 186 |
+
# This is a placeholder for actual decryption
|
| 187 |
+
return data[8:] # Skip ESP header for now
|
| 188 |
+
except Exception as e:
|
| 189 |
+
self.logger.error(LogCategory.SYSTEM, "ipsec_handler", f"Decryption error: {e}")
|
| 190 |
+
return None
|
| 191 |
+
|
| 192 |
+
def encrypt_packet(self, data: bytes, sa_id: str) -> bytes:
|
| 193 |
+
"""Encrypt packet using IPsec"""
|
| 194 |
+
try:
|
| 195 |
+
# Implementation depends on the encryption method
|
| 196 |
+
# This is a placeholder for actual encryption
|
| 197 |
+
esp_header = struct.pack('!II', 0, 0) # SPI and Sequence Number
|
| 198 |
+
return esp_header + data
|
| 199 |
+
except Exception as e:
|
| 200 |
+
self.logger.error(LogCategory.SYSTEM, "ipsec_handler", f"Encryption error: {e}")
|
| 201 |
+
return data
|
core/logger.py
ADDED
|
@@ -0,0 +1,282 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Logger Module
|
| 3 |
+
|
| 4 |
+
Centralized logging system for the virtual ISP stack:
|
| 5 |
+
- Structured logging with multiple levels
|
| 6 |
+
- Log aggregation and filtering
|
| 7 |
+
- Real-time log streaming
|
| 8 |
+
- Log persistence and rotation
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
import logging
|
| 12 |
+
import logging.handlers
|
| 13 |
+
import time
|
| 14 |
+
import threading
|
| 15 |
+
import json
|
| 16 |
+
import os
|
| 17 |
+
from typing import Dict, List, Optional, Any, Callable
|
| 18 |
+
from dataclasses import dataclass, asdict
|
| 19 |
+
from enum import Enum
|
| 20 |
+
from collections import deque
|
| 21 |
+
import queue
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
class LogLevel(Enum):
|
| 25 |
+
DEBUG = "DEBUG"
|
| 26 |
+
INFO = "INFO"
|
| 27 |
+
WARNING = "WARNING"
|
| 28 |
+
ERROR = "ERROR"
|
| 29 |
+
CRITICAL = "CRITICAL"
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
class LogCategory(Enum):
|
| 33 |
+
SYSTEM = "SYSTEM"
|
| 34 |
+
OUTLINE = "OUTLINE"
|
| 35 |
+
NAT = "NAT"
|
| 36 |
+
TCP = "TCP"
|
| 37 |
+
ROUTER = "ROUTER"
|
| 38 |
+
BRIDGE = "BRIDGE"
|
| 39 |
+
SOCKET = "SOCKET"
|
| 40 |
+
SESSION = "SESSION"
|
| 41 |
+
SECURITY = "SECURITY"
|
| 42 |
+
PERFORMANCE = "PERFORMANCE"
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
@dataclass
|
| 46 |
+
class LogEntry:
|
| 47 |
+
"""Structured log entry"""
|
| 48 |
+
timestamp: float
|
| 49 |
+
level: str
|
| 50 |
+
category: str
|
| 51 |
+
module: str
|
| 52 |
+
message: str
|
| 53 |
+
session_id: Optional[str] = None
|
| 54 |
+
client_id: Optional[str] = None
|
| 55 |
+
source_ip: Optional[str] = None
|
| 56 |
+
dest_ip: Optional[str] = None
|
| 57 |
+
protocol: Optional[str] = None
|
| 58 |
+
metadata: Dict[str, Any] = None
|
| 59 |
+
|
| 60 |
+
def __post_init__(self):
|
| 61 |
+
if self.timestamp == 0:
|
| 62 |
+
self.timestamp = time.time()
|
| 63 |
+
if self.metadata is None:
|
| 64 |
+
self.metadata = {}
|
| 65 |
+
|
| 66 |
+
def to_dict(self) -> Dict:
|
| 67 |
+
"""Convert to dictionary"""
|
| 68 |
+
return asdict(self)
|
| 69 |
+
|
| 70 |
+
def to_json(self) -> str:
|
| 71 |
+
"""Convert to JSON string"""
|
| 72 |
+
return json.dumps(self.to_dict(), default=str)
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
class LogFilter:
|
| 76 |
+
"""Log filtering class"""
|
| 77 |
+
|
| 78 |
+
def __init__(self):
|
| 79 |
+
self.level_filter: Optional[LogLevel] = None
|
| 80 |
+
self.category_filter: Optional[LogCategory] = None
|
| 81 |
+
self.module_filter: Optional[str] = None
|
| 82 |
+
self.session_filter: Optional[str] = None
|
| 83 |
+
self.client_filter: Optional[str] = None
|
| 84 |
+
self.ip_filter: Optional[str] = None
|
| 85 |
+
self.text_filter: Optional[str] = None
|
| 86 |
+
self.time_range: Optional[tuple] = None
|
| 87 |
+
|
| 88 |
+
def matches(self, entry: LogEntry) -> bool:
|
| 89 |
+
"""Check if log entry matches filter criteria"""
|
| 90 |
+
# Level filter
|
| 91 |
+
if self.level_filter:
|
| 92 |
+
entry_level_value = getattr(logging, entry.level)
|
| 93 |
+
filter_level_value = getattr(logging, self.level_filter.value)
|
| 94 |
+
if entry_level_value < filter_level_value:
|
| 95 |
+
return False
|
| 96 |
+
|
| 97 |
+
# Category filter
|
| 98 |
+
if self.category_filter and entry.category != self.category_filter.value:
|
| 99 |
+
return False
|
| 100 |
+
|
| 101 |
+
# Module filter
|
| 102 |
+
if self.module_filter and entry.module != self.module_filter:
|
| 103 |
+
return False
|
| 104 |
+
|
| 105 |
+
# Session filter
|
| 106 |
+
if self.session_filter and entry.session_id != self.session_filter:
|
| 107 |
+
return False
|
| 108 |
+
|
| 109 |
+
# Client filter
|
| 110 |
+
if self.client_filter and entry.client_id != self.client_filter:
|
| 111 |
+
return False
|
| 112 |
+
|
| 113 |
+
# IP filter
|
| 114 |
+
if self.ip_filter:
|
| 115 |
+
if not (entry.source_ip == self.ip_filter or
|
| 116 |
+
entry.dest_ip == self.ip_filter):
|
| 117 |
+
return False
|
| 118 |
+
|
| 119 |
+
# Text filter
|
| 120 |
+
if self.text_filter:
|
| 121 |
+
if self.text_filter.lower() not in entry.message.lower():
|
| 122 |
+
return False
|
| 123 |
+
|
| 124 |
+
# Time range filter
|
| 125 |
+
if self.time_range:
|
| 126 |
+
start_time, end_time = self.time_range
|
| 127 |
+
if not (start_time <= entry.timestamp <= end_time):
|
| 128 |
+
return False
|
| 129 |
+
|
| 130 |
+
return True
|
| 131 |
+
|
| 132 |
+
|
| 133 |
+
class LogAggregator:
|
| 134 |
+
"""Aggregates and manages logs"""
|
| 135 |
+
|
| 136 |
+
def __init__(self, max_entries: int = 10000):
|
| 137 |
+
self.entries: deque = deque(maxlen=max_entries)
|
| 138 |
+
self.subscribers: List[Callable[[LogEntry], None]] = []
|
| 139 |
+
self.lock = threading.Lock()
|
| 140 |
+
|
| 141 |
+
def add_entry(self, entry: LogEntry):
|
| 142 |
+
"""Add a new log entry"""
|
| 143 |
+
with self.lock:
|
| 144 |
+
self.entries.append(entry)
|
| 145 |
+
# Notify subscribers
|
| 146 |
+
for subscriber in self.subscribers:
|
| 147 |
+
try:
|
| 148 |
+
subscriber(entry)
|
| 149 |
+
except Exception as e:
|
| 150 |
+
logging.error(f"Error notifying subscriber: {e}")
|
| 151 |
+
|
| 152 |
+
def get_entries(self, log_filter: Optional[LogFilter] = None) -> List[LogEntry]:
|
| 153 |
+
"""Get filtered log entries"""
|
| 154 |
+
with self.lock:
|
| 155 |
+
if log_filter is None:
|
| 156 |
+
return list(self.entries)
|
| 157 |
+
return [entry for entry in self.entries if log_filter.matches(entry)]
|
| 158 |
+
|
| 159 |
+
def subscribe(self, callback: Callable[[LogEntry], None]):
|
| 160 |
+
"""Subscribe to new log entries"""
|
| 161 |
+
with self.lock:
|
| 162 |
+
self.subscribers.append(callback)
|
| 163 |
+
|
| 164 |
+
def unsubscribe(self, callback: Callable[[LogEntry], None]):
|
| 165 |
+
"""Unsubscribe from log entries"""
|
| 166 |
+
with self.lock:
|
| 167 |
+
try:
|
| 168 |
+
self.subscribers.remove(callback)
|
| 169 |
+
except ValueError:
|
| 170 |
+
pass
|
| 171 |
+
|
| 172 |
+
|
| 173 |
+
class LogManager:
|
| 174 |
+
"""Central logging manager"""
|
| 175 |
+
|
| 176 |
+
_instance = None
|
| 177 |
+
_lock = threading.Lock()
|
| 178 |
+
|
| 179 |
+
def __new__(cls):
|
| 180 |
+
with cls._lock:
|
| 181 |
+
if cls._instance is None:
|
| 182 |
+
cls._instance = super().__new__(cls)
|
| 183 |
+
cls._instance._initialized = False
|
| 184 |
+
return cls._instance
|
| 185 |
+
|
| 186 |
+
def __init__(self):
|
| 187 |
+
if self._initialized:
|
| 188 |
+
return
|
| 189 |
+
|
| 190 |
+
self.aggregator = LogAggregator()
|
| 191 |
+
self.log_queue = queue.Queue()
|
| 192 |
+
self.file_handler = None
|
| 193 |
+
self.console_handler = None
|
| 194 |
+
|
| 195 |
+
# Configure logging
|
| 196 |
+
self._configure_logging()
|
| 197 |
+
|
| 198 |
+
# Start processing thread
|
| 199 |
+
self.running = True
|
| 200 |
+
self.process_thread = threading.Thread(target=self._process_queue)
|
| 201 |
+
self.process_thread.daemon = True
|
| 202 |
+
self.process_thread.start()
|
| 203 |
+
|
| 204 |
+
self._initialized = True
|
| 205 |
+
|
| 206 |
+
def _configure_logging(self):
|
| 207 |
+
"""Configure logging handlers"""
|
| 208 |
+
log_dir = os.path.join(os.path.dirname(__file__), '../logs')
|
| 209 |
+
os.makedirs(log_dir, exist_ok=True)
|
| 210 |
+
|
| 211 |
+
# File handler with rotation
|
| 212 |
+
log_file = os.path.join(log_dir, 'outline.log')
|
| 213 |
+
self.file_handler = logging.handlers.RotatingFileHandler(
|
| 214 |
+
log_file, maxBytes=10*1024*1024, backupCount=5
|
| 215 |
+
)
|
| 216 |
+
self.file_handler.setFormatter(
|
| 217 |
+
logging.Formatter('%(asctime)s [%(levelname)s] %(message)s')
|
| 218 |
+
)
|
| 219 |
+
|
| 220 |
+
# Console handler
|
| 221 |
+
self.console_handler = logging.StreamHandler()
|
| 222 |
+
self.console_handler.setFormatter(
|
| 223 |
+
logging.Formatter('[%(levelname)s] %(message)s')
|
| 224 |
+
)
|
| 225 |
+
|
| 226 |
+
# Configure root logger
|
| 227 |
+
root_logger = logging.getLogger()
|
| 228 |
+
root_logger.setLevel(logging.INFO)
|
| 229 |
+
root_logger.addHandler(self.file_handler)
|
| 230 |
+
root_logger.addHandler(self.console_handler)
|
| 231 |
+
|
| 232 |
+
def _process_queue(self):
|
| 233 |
+
"""Process log queue"""
|
| 234 |
+
while self.running:
|
| 235 |
+
try:
|
| 236 |
+
entry = self.log_queue.get(timeout=1)
|
| 237 |
+
self.aggregator.add_entry(entry)
|
| 238 |
+
except queue.Empty:
|
| 239 |
+
continue
|
| 240 |
+
except Exception as e:
|
| 241 |
+
logging.error(f"Error processing log entry: {e}")
|
| 242 |
+
|
| 243 |
+
def log(self, level: LogLevel, category: LogCategory, module: str,
|
| 244 |
+
message: str, **kwargs):
|
| 245 |
+
"""Add a log entry"""
|
| 246 |
+
entry = LogEntry(
|
| 247 |
+
timestamp=time.time(),
|
| 248 |
+
level=level.value,
|
| 249 |
+
category=category.value,
|
| 250 |
+
module=module,
|
| 251 |
+
message=message,
|
| 252 |
+
**kwargs
|
| 253 |
+
)
|
| 254 |
+
self.log_queue.put(entry)
|
| 255 |
+
|
| 256 |
+
# Also log to Python's logging system
|
| 257 |
+
log_func = getattr(logging, level.value.lower())
|
| 258 |
+
log_func(f"[{category.value}] {message}")
|
| 259 |
+
|
| 260 |
+
def get_logs(self, log_filter: Optional[LogFilter] = None) -> List[LogEntry]:
|
| 261 |
+
"""Get filtered logs"""
|
| 262 |
+
return self.aggregator.get_entries(log_filter)
|
| 263 |
+
|
| 264 |
+
def subscribe(self, callback: Callable[[LogEntry], None]):
|
| 265 |
+
"""Subscribe to log entries"""
|
| 266 |
+
self.aggregator.subscribe(callback)
|
| 267 |
+
|
| 268 |
+
def unsubscribe(self, callback: Callable[[LogEntry], None]):
|
| 269 |
+
"""Unsubscribe from log entries"""
|
| 270 |
+
self.aggregator.unsubscribe(callback)
|
| 271 |
+
|
| 272 |
+
def shutdown(self):
|
| 273 |
+
"""Shutdown the log manager"""
|
| 274 |
+
self.running = False
|
| 275 |
+
if self.process_thread.is_alive():
|
| 276 |
+
self.process_thread.join()
|
| 277 |
+
|
| 278 |
+
# Close handlers
|
| 279 |
+
if self.file_handler:
|
| 280 |
+
self.file_handler.close()
|
| 281 |
+
if self.console_handler:
|
| 282 |
+
self.console_handler.close()
|
core/middleware.py
ADDED
|
@@ -0,0 +1,58 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Custom middleware for the application
|
| 3 |
+
"""
|
| 4 |
+
from fastapi import Request
|
| 5 |
+
from starlette.middleware.base import BaseHTTPMiddleware
|
| 6 |
+
from starlette.responses import Response
|
| 7 |
+
import logging
|
| 8 |
+
import time
|
| 9 |
+
from typing import Callable
|
| 10 |
+
|
| 11 |
+
logger = logging.getLogger(__name__)
|
| 12 |
+
|
| 13 |
+
class RequestLoggerMiddleware(BaseHTTPMiddleware):
|
| 14 |
+
"""Middleware for logging requests"""
|
| 15 |
+
|
| 16 |
+
async def dispatch(self, request: Request, call_next: Callable) -> Response:
|
| 17 |
+
start_time = time.time()
|
| 18 |
+
|
| 19 |
+
# Get request details
|
| 20 |
+
method = request.method
|
| 21 |
+
url = str(request.url)
|
| 22 |
+
|
| 23 |
+
try:
|
| 24 |
+
# Process the request
|
| 25 |
+
response = await call_next(request)
|
| 26 |
+
|
| 27 |
+
# Calculate processing time
|
| 28 |
+
process_time = time.time() - start_time
|
| 29 |
+
|
| 30 |
+
# Log the request details
|
| 31 |
+
logger.info(
|
| 32 |
+
f"Request: {method} {url} - Status: {response.status_code} - "
|
| 33 |
+
f"Processing Time: {process_time:.3f}s"
|
| 34 |
+
)
|
| 35 |
+
|
| 36 |
+
return response
|
| 37 |
+
|
| 38 |
+
except Exception as e:
|
| 39 |
+
logger.error(
|
| 40 |
+
f"Request failed: {method} {url} - Error: {str(e)}"
|
| 41 |
+
)
|
| 42 |
+
raise
|
| 43 |
+
|
| 44 |
+
class ErrorHandlerMiddleware(BaseHTTPMiddleware):
|
| 45 |
+
"""Middleware for handling errors"""
|
| 46 |
+
|
| 47 |
+
async def dispatch(self, request: Request, call_next: Callable) -> Response:
|
| 48 |
+
try:
|
| 49 |
+
return await call_next(request)
|
| 50 |
+
except Exception as e:
|
| 51 |
+
# Log the error
|
| 52 |
+
logger.error(f"Unhandled error: {str(e)}", exc_info=True)
|
| 53 |
+
|
| 54 |
+
# Return error response
|
| 55 |
+
return Response(
|
| 56 |
+
content={"detail": "Internal server error"},
|
| 57 |
+
status_code=500
|
| 58 |
+
)
|
core/models/__pycache__/user.cpython-311.pyc
ADDED
|
Binary file (6.18 kB). View file
|
|
|
core/models/user.py
ADDED
|
@@ -0,0 +1,98 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
User models and authentication
|
| 3 |
+
"""
|
| 4 |
+
from datetime import datetime, timedelta
|
| 5 |
+
from typing import Optional, List
|
| 6 |
+
from sqlalchemy import Column, Integer, String, DateTime, Boolean, Enum as SQLEnum
|
| 7 |
+
from sqlalchemy.sql import func
|
| 8 |
+
import enum
|
| 9 |
+
from passlib.context import CryptContext
|
| 10 |
+
from core.database import Base
|
| 11 |
+
|
| 12 |
+
# Password hashing context
|
| 13 |
+
pwd_context = CryptContext(
|
| 14 |
+
schemes=["argon2"],
|
| 15 |
+
deprecated="auto",
|
| 16 |
+
argon2__memory_cost=65536,
|
| 17 |
+
argon2__parallelism=4,
|
| 18 |
+
argon2__time_cost=3
|
| 19 |
+
)
|
| 20 |
+
|
| 21 |
+
class UserRole(enum.Enum):
|
| 22 |
+
ADMIN = "admin"
|
| 23 |
+
POWER_USER = "power_user"
|
| 24 |
+
USER = "user"
|
| 25 |
+
GUEST = "guest"
|
| 26 |
+
|
| 27 |
+
class UserStatus(enum.Enum):
|
| 28 |
+
ACTIVE = "active"
|
| 29 |
+
SUSPENDED = "suspended"
|
| 30 |
+
LOCKED = "locked"
|
| 31 |
+
PENDING = "pending"
|
| 32 |
+
EXPIRED = "expired"
|
| 33 |
+
|
| 34 |
+
class User(Base):
|
| 35 |
+
"""User model for authentication and authorization"""
|
| 36 |
+
__tablename__ = "users"
|
| 37 |
+
|
| 38 |
+
id = Column(Integer, primary_key=True, index=True)
|
| 39 |
+
username = Column(String, unique=True, index=True, nullable=False)
|
| 40 |
+
password_hash = Column(String, nullable=False)
|
| 41 |
+
role = Column(SQLEnum(UserRole), nullable=False, default=UserRole.USER)
|
| 42 |
+
status = Column(SQLEnum(UserStatus), nullable=False, default=UserStatus.PENDING)
|
| 43 |
+
|
| 44 |
+
# Account tracking
|
| 45 |
+
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
| 46 |
+
last_login = Column(DateTime(timezone=True), nullable=True)
|
| 47 |
+
failed_attempts = Column(Integer, default=0)
|
| 48 |
+
lockout_until = Column(DateTime(timezone=True), nullable=True)
|
| 49 |
+
|
| 50 |
+
def verify_password(self, password: str) -> bool:
|
| 51 |
+
"""Verify password against hash"""
|
| 52 |
+
return pwd_context.verify(password, self.password_hash)
|
| 53 |
+
|
| 54 |
+
@staticmethod
|
| 55 |
+
def hash_password(password: str) -> str:
|
| 56 |
+
"""Hash password using Argon2"""
|
| 57 |
+
return pwd_context.hash(password)
|
| 58 |
+
|
| 59 |
+
def is_locked(self) -> bool:
|
| 60 |
+
"""Check if account is locked"""
|
| 61 |
+
if self.lockout_until and self.lockout_until > datetime.utcnow():
|
| 62 |
+
return True
|
| 63 |
+
return False
|
| 64 |
+
|
| 65 |
+
def record_login_attempt(self, success: bool):
|
| 66 |
+
"""Record login attempt and handle lockout"""
|
| 67 |
+
if success:
|
| 68 |
+
self.failed_attempts = 0
|
| 69 |
+
self.last_login = datetime.utcnow()
|
| 70 |
+
self.lockout_until = None
|
| 71 |
+
else:
|
| 72 |
+
self.failed_attempts += 1
|
| 73 |
+
if self.failed_attempts >= 5: # Lock after 5 failed attempts
|
| 74 |
+
self.lockout_until = datetime.utcnow() + timedelta(minutes=15)
|
| 75 |
+
|
| 76 |
+
class UserSession(Base):
|
| 77 |
+
"""User session tracking"""
|
| 78 |
+
__tablename__ = "user_sessions"
|
| 79 |
+
|
| 80 |
+
id = Column(Integer, primary_key=True, index=True)
|
| 81 |
+
user_id = Column(Integer, nullable=False)
|
| 82 |
+
token = Column(String, unique=True, index=True, nullable=False)
|
| 83 |
+
ip_address = Column(String)
|
| 84 |
+
device_info = Column(String)
|
| 85 |
+
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
| 86 |
+
expires_at = Column(DateTime(timezone=True), nullable=False)
|
| 87 |
+
last_active = Column(DateTime(timezone=True), server_default=func.now())
|
| 88 |
+
|
| 89 |
+
class UserPermission(Base):
|
| 90 |
+
"""User specific permissions"""
|
| 91 |
+
__tablename__ = "user_permissions"
|
| 92 |
+
|
| 93 |
+
id = Column(Integer, primary_key=True, index=True)
|
| 94 |
+
user_id = Column(Integer, nullable=False)
|
| 95 |
+
permission_type = Column(String, nullable=False)
|
| 96 |
+
resource_id = Column(String, nullable=True) # Optional resource identifier
|
| 97 |
+
granted_at = Column(DateTime(timezone=True), server_default=func.now())
|
| 98 |
+
granted_by = Column(Integer, nullable=True) # User ID who granted the permission
|
core/nat_engine.py
ADDED
|
@@ -0,0 +1,135 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
NAT Engine Module for VPN
|
| 3 |
+
Implements Network Address Translation
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
import asyncio
|
| 7 |
+
import time
|
| 8 |
+
from typing import Dict, Optional, Tuple
|
| 9 |
+
from dataclasses import dataclass
|
| 10 |
+
import logging
|
| 11 |
+
import subprocess
|
| 12 |
+
|
| 13 |
+
logger = logging.getLogger(__name__)
|
| 14 |
+
|
| 15 |
+
@dataclass
|
| 16 |
+
class NATSession:
|
| 17 |
+
virtual_ip: str
|
| 18 |
+
virtual_port: int
|
| 19 |
+
real_ip: str
|
| 20 |
+
real_port: int
|
| 21 |
+
host_port: int
|
| 22 |
+
created_time: float
|
| 23 |
+
last_activity: float
|
| 24 |
+
bytes_in: int = 0
|
| 25 |
+
bytes_out: int = 0
|
| 26 |
+
|
| 27 |
+
class NATEngine:
|
| 28 |
+
def __init__(self, logger_instance=None):
|
| 29 |
+
self.sessions: Dict[str, NATSession] = {}
|
| 30 |
+
self.port_mappings: Dict[int, Tuple[str, int]] = {}
|
| 31 |
+
self.next_port = 10000
|
| 32 |
+
self.cleanup_interval = 300 # 5 minutes
|
| 33 |
+
self._cleanup_task = None
|
| 34 |
+
self.logger = logger_instance if logger_instance else logging.getLogger(__name__)
|
| 35 |
+
|
| 36 |
+
async def start(self):
|
| 37 |
+
"""Start the NAT engine"""
|
| 38 |
+
self._cleanup_task = asyncio.create_task(self._cleanup_loop())
|
| 39 |
+
self.logger.info("NAT engine started")
|
| 40 |
+
|
| 41 |
+
async def stop(self):
|
| 42 |
+
"""Stop the NAT engine"""
|
| 43 |
+
if self._cleanup_task:
|
| 44 |
+
self._cleanup_task.cancel()
|
| 45 |
+
try:
|
| 46 |
+
await self._cleanup_task
|
| 47 |
+
except asyncio.CancelledError:
|
| 48 |
+
pass
|
| 49 |
+
self.sessions.clear()
|
| 50 |
+
self.port_mappings.clear()
|
| 51 |
+
self.logger.info("NAT engine stopped")
|
| 52 |
+
|
| 53 |
+
def create_session(self, virtual_ip: str, virtual_port: int,
|
| 54 |
+
real_ip: str, real_port: int) -> NATSession:
|
| 55 |
+
"""Create a new NAT session"""
|
| 56 |
+
host_port = self._allocate_port()
|
| 57 |
+
session = NATSession(
|
| 58 |
+
virtual_ip=virtual_ip,
|
| 59 |
+
virtual_port=virtual_port,
|
| 60 |
+
real_ip=real_ip,
|
| 61 |
+
real_port=real_port,
|
| 62 |
+
host_port=host_port,
|
| 63 |
+
created_time=time.time(),
|
| 64 |
+
last_activity=time.time()
|
| 65 |
+
)
|
| 66 |
+
|
| 67 |
+
session_key = f"{virtual_ip}:{virtual_port}"
|
| 68 |
+
self.sessions[session_key] = session
|
| 69 |
+
self.port_mappings[host_port] = (virtual_ip, virtual_port)
|
| 70 |
+
|
| 71 |
+
self.logger.debug(f"Created NAT session: {session_key} -> {real_ip}:{real_port}")
|
| 72 |
+
return session
|
| 73 |
+
|
| 74 |
+
def lookup_session(self, ip: str, port: int) -> Optional[NATSession]:
|
| 75 |
+
"""Look up a NAT session by real IP and port"""
|
| 76 |
+
for session in self.sessions.values():
|
| 77 |
+
if session.real_ip == ip and session.real_port == port:
|
| 78 |
+
return session
|
| 79 |
+
return None
|
| 80 |
+
|
| 81 |
+
def get_session_by_virtual(self, ip: str, port: int) -> Optional[NATSession]:
|
| 82 |
+
"""Get session by virtual IP and port"""
|
| 83 |
+
return self.sessions.get(f"{ip}:{port}")
|
| 84 |
+
|
| 85 |
+
def remove_session(self, session: NATSession):
|
| 86 |
+
"""Remove a NAT session"""
|
| 87 |
+
session_key = f"{session.virtual_ip}:{session.virtual_port}"
|
| 88 |
+
if session_key in self.sessions:
|
| 89 |
+
del self.sessions[session_key]
|
| 90 |
+
if session.host_port in self.port_mappings:
|
| 91 |
+
del self.port_mappings[session.host_port]
|
| 92 |
+
|
| 93 |
+
def _allocate_port(self) -> int:
|
| 94 |
+
"""Allocate a new port for NAT"""
|
| 95 |
+
while self.next_port in self.port_mappings:
|
| 96 |
+
self.next_port += 1
|
| 97 |
+
if self.next_port > 65535:
|
| 98 |
+
self.next_port = 10000
|
| 99 |
+
return self.next_port
|
| 100 |
+
|
| 101 |
+
async def _cleanup_loop(self):
|
| 102 |
+
"""Periodically clean up expired sessions"""
|
| 103 |
+
while True:
|
| 104 |
+
try:
|
| 105 |
+
await asyncio.sleep(self.cleanup_interval)
|
| 106 |
+
current_time = time.time()
|
| 107 |
+
|
| 108 |
+
for session_key, session in list(self.sessions.items()):
|
| 109 |
+
if current_time - session.last_activity > self.cleanup_interval:
|
| 110 |
+
self.remove_session(session)
|
| 111 |
+
self.logger.debug(f"Removed expired session: {session_key}")
|
| 112 |
+
|
| 113 |
+
except asyncio.CancelledError:
|
| 114 |
+
break
|
| 115 |
+
except Exception as e:
|
| 116 |
+
self.logger.error(f"Error in cleanup loop: {e}")
|
| 117 |
+
|
| 118 |
+
def get_stats(self) -> Dict:
|
| 119 |
+
"""Get NAT engine statistics"""
|
| 120 |
+
return {
|
| 121 |
+
"active_sessions": len(self.sessions),
|
| 122 |
+
"allocated_ports": len(self.port_mappings),
|
| 123 |
+
"total_bytes_in": sum(s.bytes_in for s in self.sessions.values()),
|
| 124 |
+
"total_bytes_out": sum(s.bytes_out for s in self.sessions.values())
|
| 125 |
+
}
|
| 126 |
+
|
| 127 |
+
|
| 128 |
+
|
| 129 |
+
|
| 130 |
+
async def setup_nat(self, interface: str):
|
| 131 |
+
"""Setup NAT configuration"""
|
| 132 |
+
# In this Windows implementation, NAT is handled at the application level
|
| 133 |
+
# through the traffic forwarding engine and doesn't require system-level configuration
|
| 134 |
+
logger.info(f"NAT configuration ready for interface {interface}")
|
| 135 |
+
pass
|
core/outline_config.py
ADDED
|
@@ -0,0 +1,126 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Outline VPN Configuration Manager
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
import os
|
| 6 |
+
import json
|
| 7 |
+
from dataclasses import dataclass
|
| 8 |
+
from typing import List, Optional, Dict
|
| 9 |
+
|
| 10 |
+
@dataclass
|
| 11 |
+
class OutlineConfig:
|
| 12 |
+
port: int = 443
|
| 13 |
+
cipher: str = "chacha20-ietf-poly1305"
|
| 14 |
+
timeout: int = 600
|
| 15 |
+
workers: int = 4
|
| 16 |
+
bind_address: str = "0.0.0.0"
|
| 17 |
+
access_key_salt: str = os.urandom(32).hex()
|
| 18 |
+
|
| 19 |
+
@dataclass
|
| 20 |
+
class UserConfig:
|
| 21 |
+
user_id: str
|
| 22 |
+
access_key: str
|
| 23 |
+
data_limit: Optional[int] = None
|
| 24 |
+
expiry_date: Optional[str] = None
|
| 25 |
+
is_active: bool = True
|
| 26 |
+
last_connection: Optional[str] = None
|
| 27 |
+
bandwidth_usage: int = 0
|
| 28 |
+
|
| 29 |
+
class OutlineManager:
|
| 30 |
+
def __init__(self, config_path: str = "config/outline_config.json"):
|
| 31 |
+
self.config_path = config_path
|
| 32 |
+
self.config = OutlineConfig()
|
| 33 |
+
self.users: List[UserConfig] = []
|
| 34 |
+
self._load_config()
|
| 35 |
+
|
| 36 |
+
def _load_config(self):
|
| 37 |
+
"""Load configuration from file or create default"""
|
| 38 |
+
os.makedirs(os.path.dirname(self.config_path), exist_ok=True)
|
| 39 |
+
|
| 40 |
+
if os.path.exists(self.config_path):
|
| 41 |
+
with open(self.config_path, 'r') as f:
|
| 42 |
+
data = json.load(f)
|
| 43 |
+
self.config = OutlineConfig(**data.get('server', {}))
|
| 44 |
+
self.users = [UserConfig(**u) for u in data.get('users', [])]
|
| 45 |
+
else:
|
| 46 |
+
self.save_config()
|
| 47 |
+
|
| 48 |
+
def save_config(self):
|
| 49 |
+
"""Save current configuration to file"""
|
| 50 |
+
data = {
|
| 51 |
+
'server': self.config.__dict__,
|
| 52 |
+
'users': [u.__dict__ for u in self.users]
|
| 53 |
+
}
|
| 54 |
+
with open(self.config_path, 'w') as f:
|
| 55 |
+
json.dump(data, f, indent=4)
|
| 56 |
+
|
| 57 |
+
def add_user(self, user_id: str, data_limit: Optional[int] = None) -> UserConfig:
|
| 58 |
+
"""Add a new user and generate their access key"""
|
| 59 |
+
# Check if user already exists
|
| 60 |
+
if any(u.user_id == user_id for u in self.users):
|
| 61 |
+
raise ValueError(f"User {user_id} already exists")
|
| 62 |
+
|
| 63 |
+
access_key = self._generate_access_key(user_id)
|
| 64 |
+
user = UserConfig(
|
| 65 |
+
user_id=user_id,
|
| 66 |
+
access_key=access_key,
|
| 67 |
+
data_limit=data_limit
|
| 68 |
+
)
|
| 69 |
+
self.users.append(user)
|
| 70 |
+
self.save_config()
|
| 71 |
+
return user
|
| 72 |
+
|
| 73 |
+
def remove_user(self, user_id: str) -> bool:
|
| 74 |
+
"""Remove a user by their ID"""
|
| 75 |
+
initial_length = len(self.users)
|
| 76 |
+
self.users = [u for u in self.users if u.user_id != user_id]
|
| 77 |
+
if len(self.users) < initial_length:
|
| 78 |
+
self.save_config()
|
| 79 |
+
return True
|
| 80 |
+
return False
|
| 81 |
+
|
| 82 |
+
def get_user_by_key(self, access_key: str) -> Optional[UserConfig]:
|
| 83 |
+
"""Find user by their access key"""
|
| 84 |
+
for user in self.users:
|
| 85 |
+
if user.access_key == access_key and user.is_active:
|
| 86 |
+
return user
|
| 87 |
+
return None
|
| 88 |
+
|
| 89 |
+
def update_user_bandwidth(self, user_id: str, bytes_used: int):
|
| 90 |
+
"""Update user's bandwidth usage"""
|
| 91 |
+
for user in self.users:
|
| 92 |
+
if user.user_id == user_id:
|
| 93 |
+
user.bandwidth_usage += bytes_used
|
| 94 |
+
if user.data_limit and user.bandwidth_usage >= user.data_limit:
|
| 95 |
+
user.is_active = False
|
| 96 |
+
self.save_config()
|
| 97 |
+
break
|
| 98 |
+
|
| 99 |
+
def _generate_access_key(self, user_id: str) -> str:
|
| 100 |
+
"""Generate a unique access key for a user"""
|
| 101 |
+
import hashlib
|
| 102 |
+
key = hashlib.sha256(f"{user_id}{self.config.access_key_salt}".encode()).hexdigest()
|
| 103 |
+
return key[:32] # Return first 32 chars as access key
|
| 104 |
+
|
| 105 |
+
def get_server_stats(self) -> Dict:
|
| 106 |
+
"""Get server statistics"""
|
| 107 |
+
return {
|
| 108 |
+
"total_users": len(self.users),
|
| 109 |
+
"active_users": sum(1 for u in self.users if u.is_active),
|
| 110 |
+
"total_bandwidth": sum(u.bandwidth_usage for u in self.users)
|
| 111 |
+
}
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
def generate_openvpn_certificates(config_id: str) -> Dict:
|
| 115 |
+
"""Placeholder for OpenVPN certificate generation"""
|
| 116 |
+
return {"cert": "placeholder_cert", "key": "placeholder_key"}
|
| 117 |
+
|
| 118 |
+
def generate_openvpn_config(config_id: str, server_ip: str) -> str:
|
| 119 |
+
"""Placeholder for OpenVPN config generation"""
|
| 120 |
+
return f"client\ndev tun\nproto udp\nremote {server_ip} 1194\nresolv-retry infinite\nnobind\npersist-key\npersist-tun\nremote-cert-tls server\ncipher AES-256-CBC\nverb 3"
|
| 121 |
+
|
| 122 |
+
def generate_wireguard_keys(config_id: str) -> Dict:
|
| 123 |
+
"""Placeholder for WireGuard key generation"""
|
| 124 |
+
return {"server_public": "placeholder_server_public", "client_private": "placeholder_client_private", "client_public": "placeholder_client_public"}
|
| 125 |
+
|
| 126 |
+
|
core/outline_server.py
ADDED
|
@@ -0,0 +1,181 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Enhanced Outline VPN Server Implementation
|
| 3 |
+
Core server component with offline access and internet sharing support
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
import asyncio
|
| 7 |
+
import logging
|
| 8 |
+
import json
|
| 9 |
+
import os
|
| 10 |
+
import psutil
|
| 11 |
+
from typing import Dict, Optional, List, Tuple
|
| 12 |
+
from .shadowsocks_protocol import ShadowsocksProtocol
|
| 13 |
+
from .nat_engine import NATEngine
|
| 14 |
+
from .traffic_router import TrafficRouter
|
| 15 |
+
from .outline_config import OutlineManager, OutlineConfig
|
| 16 |
+
from .ikev2_server import IKEv2Server as IPsecManager
|
| 17 |
+
from .session_tracker import SessionTracker
|
| 18 |
+
from .port_manager import PortManager
|
| 19 |
+
|
| 20 |
+
logger = logging.getLogger(__name__)
|
| 21 |
+
|
| 22 |
+
class OutlineServer:
|
| 23 |
+
def __init__(self, config: Dict):
|
| 24 |
+
self.config = config
|
| 25 |
+
self.outline_manager = OutlineManager()
|
| 26 |
+
self.ipsec_manager = IPsecManager(config["server"]["host"], logger)
|
| 27 |
+
|
| 28 |
+
# Initialize authentication
|
| 29 |
+
from .vpn_auth import VPNAuthManager
|
| 30 |
+
from .database_init import init_db
|
| 31 |
+
|
| 32 |
+
# Initialize database
|
| 33 |
+
if not init_db():
|
| 34 |
+
raise RuntimeError("Failed to initialize database")
|
| 35 |
+
|
| 36 |
+
self.auth_manager = VPNAuthManager()
|
| 37 |
+
|
| 38 |
+
# Initialize port manager
|
| 39 |
+
self.port_manager = PortManager()
|
| 40 |
+
|
| 41 |
+
# Initialize components
|
| 42 |
+
self.nat_engine = NATEngine(logger)
|
| 43 |
+
self.traffic_router = TrafficRouter({
|
| 44 |
+
"vpn_host": "0.0.0.0", # Listen on all interfaces
|
| 45 |
+
"vpn_port": config["server"]["port"],
|
| 46 |
+
"virtual_network": config["server"]["virtual_network"]
|
| 47 |
+
}, logger)
|
| 48 |
+
|
| 49 |
+
# Initialize session tracker for offline support
|
| 50 |
+
self.session_tracker = SessionTracker()
|
| 51 |
+
|
| 52 |
+
self.sessions = {}
|
| 53 |
+
self.is_running = False
|
| 54 |
+
self.bound_ports: List[Tuple[int, str]] = [] # List of (port, service) tuples
|
| 55 |
+
|
| 56 |
+
async def setup_internet_sharing(self):
|
| 57 |
+
"""Configure internet sharing for VPN clients"""
|
| 58 |
+
try:
|
| 59 |
+
interface = self.config["server"].get("interface", "eth0")
|
| 60 |
+
await self.nat_engine.setup_nat(interface)
|
| 61 |
+
logger.info(f"Internet sharing enabled on interface {interface}")
|
| 62 |
+
except Exception as e:
|
| 63 |
+
logger.error(f"Failed to setup internet sharing: {e}")
|
| 64 |
+
raise
|
| 65 |
+
async def start(self):
|
| 66 |
+
"""Start the VPN server"""
|
| 67 |
+
if self.is_running:
|
| 68 |
+
logger.warning("Outline VPN server is already running")
|
| 69 |
+
return
|
| 70 |
+
|
| 71 |
+
try:
|
| 72 |
+
# Clean up any existing connections first
|
| 73 |
+
await self.cleanup_existing_connections()
|
| 74 |
+
|
| 75 |
+
# Setup network
|
| 76 |
+
await self.setup_internet_sharing()
|
| 77 |
+
|
| 78 |
+
# Start components in order with proper error handling
|
| 79 |
+
try:
|
| 80 |
+
await self.nat_engine.start()
|
| 81 |
+
except Exception as e:
|
| 82 |
+
logger.error(f"Failed to start NAT engine: {e}")
|
| 83 |
+
raise
|
| 84 |
+
|
| 85 |
+
try:
|
| 86 |
+
await self.traffic_router.start()
|
| 87 |
+
except Exception as e:
|
| 88 |
+
await self.nat_engine.stop()
|
| 89 |
+
logger.error(f"Failed to start traffic router: {e}")
|
| 90 |
+
raise
|
| 91 |
+
|
| 92 |
+
try:
|
| 93 |
+
await self.ipsec_manager.start()
|
| 94 |
+
except Exception as e:
|
| 95 |
+
await self.nat_engine.stop()
|
| 96 |
+
await self.traffic_router.stop()
|
| 97 |
+
logger.error(f"Failed to start IKEv2 service: {e}")
|
| 98 |
+
raise
|
| 99 |
+
|
| 100 |
+
self.is_running = True
|
| 101 |
+
logger.info(f"Outline VPN server started successfully on port {self.config['server']['port']}")
|
| 102 |
+
|
| 103 |
+
except Exception as e:
|
| 104 |
+
logger.error(f"Failed to start server: {e}")
|
| 105 |
+
await self.stop() # Ensure cleanup on failure
|
| 106 |
+
raise
|
| 107 |
+
|
| 108 |
+
async def cleanup_existing_connections(self):
|
| 109 |
+
"""Clean up any existing connections before starting"""
|
| 110 |
+
import psutil
|
| 111 |
+
current_pid = os.getpid()
|
| 112 |
+
|
| 113 |
+
# Find and clean up any existing VPN processes
|
| 114 |
+
for proc in psutil.process_iter(['pid', 'name', 'connections']):
|
| 115 |
+
try:
|
| 116 |
+
if proc.pid != current_pid:
|
| 117 |
+
for conn in proc.connections():
|
| 118 |
+
if conn.laddr.port == self.config['server']['port']:
|
| 119 |
+
logger.warning(f"Found existing process using port {self.config['server']['port']}, terminating...")
|
| 120 |
+
proc.terminate()
|
| 121 |
+
proc.wait(timeout=5)
|
| 122 |
+
except (psutil.NoSuchProcess, psutil.AccessDenied, psutil.TimeoutExpired):
|
| 123 |
+
continue
|
| 124 |
+
|
| 125 |
+
async def stop(self):
|
| 126 |
+
"""Stop the VPN server"""
|
| 127 |
+
self.is_running = False
|
| 128 |
+
await self.traffic_router.stop()
|
| 129 |
+
await self.nat_engine.stop()
|
| 130 |
+
await self.ipsec_manager.stop()
|
| 131 |
+
|
| 132 |
+
# Close all active sessions
|
| 133 |
+
for session in self.sessions.values():
|
| 134 |
+
await session.close()
|
| 135 |
+
self.sessions.clear()
|
| 136 |
+
|
| 137 |
+
logger.info("VPN server stopped")
|
| 138 |
+
|
| 139 |
+
async def _handle_client(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter):
|
| 140 |
+
"""Handle new client connections"""
|
| 141 |
+
peer = writer.get_extra_info('peername')
|
| 142 |
+
logger.info(f"New connection from {peer}")
|
| 143 |
+
|
| 144 |
+
try:
|
| 145 |
+
# First packet contains access key
|
| 146 |
+
first_packet = await reader.read(64)
|
| 147 |
+
access_key = self._extract_access_key(first_packet)
|
| 148 |
+
|
| 149 |
+
# Validate access key
|
| 150 |
+
user = self.outline_manager.get_user_by_key(access_key)
|
| 151 |
+
if not user:
|
| 152 |
+
logger.warning(f"Invalid access key from {peer}")
|
| 153 |
+
writer.close()
|
| 154 |
+
return
|
| 155 |
+
|
| 156 |
+
# Create protocol handler
|
| 157 |
+
protocol = ShadowsocksProtocol(access_key)
|
| 158 |
+
self.sessions[user.user_id] = protocol
|
| 159 |
+
|
| 160 |
+
# Handle the connection
|
| 161 |
+
await protocol.handle_connection(reader, writer)
|
| 162 |
+
|
| 163 |
+
except Exception as e:
|
| 164 |
+
logger.error(f"Error handling client {peer}: {e}")
|
| 165 |
+
finally:
|
| 166 |
+
if not writer.is_closing():
|
| 167 |
+
writer.close()
|
| 168 |
+
await writer.wait_closed()
|
| 169 |
+
|
| 170 |
+
def _extract_access_key(self, packet: bytes) -> str:
|
| 171 |
+
"""Extract access key from initial packet"""
|
| 172 |
+
return packet[:32].hex()
|
| 173 |
+
|
| 174 |
+
def get_stats(self) -> Dict:
|
| 175 |
+
"""Get server statistics"""
|
| 176 |
+
return {
|
| 177 |
+
"is_running": self.is_running,
|
| 178 |
+
"active_sessions": len(self.sessions),
|
| 179 |
+
"traffic_stats": self.traffic_router.get_stats(),
|
| 180 |
+
"nat_stats": self.nat_engine.get_stats()
|
| 181 |
+
}
|
core/port_manager.py
ADDED
|
@@ -0,0 +1,113 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Port Manager for VPN Server
|
| 3 |
+
Handles port allocation, testing, and management
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
import socket
|
| 7 |
+
import asyncio
|
| 8 |
+
import logging
|
| 9 |
+
from typing import List, Optional, Tuple
|
| 10 |
+
from contextlib import closing
|
| 11 |
+
|
| 12 |
+
logger = logging.getLogger(__name__)
|
| 13 |
+
|
| 14 |
+
class PortManager:
|
| 15 |
+
# Common VPN ports to try
|
| 16 |
+
DEFAULT_VPN_PORTS = [
|
| 17 |
+
443, # HTTPS
|
| 18 |
+
8388, # Shadowsocks default
|
| 19 |
+
8443, # Alternative HTTPS
|
| 20 |
+
1194, # OpenVPN default
|
| 21 |
+
1984, # Outline default
|
| 22 |
+
8000, # Alternative
|
| 23 |
+
8080 # Alternative HTTP
|
| 24 |
+
]
|
| 25 |
+
|
| 26 |
+
def __init__(self):
|
| 27 |
+
self.bound_ports: List[int] = []
|
| 28 |
+
self.reserved_ports: List[int] = []
|
| 29 |
+
|
| 30 |
+
async def find_available_port(self, preferred_port: int,
|
| 31 |
+
fallback_ports: List[int] = None,
|
| 32 |
+
bind_address: str = '0.0.0.0') -> Tuple[int, bool]:
|
| 33 |
+
"""
|
| 34 |
+
Find an available port, starting with preferred_port.
|
| 35 |
+
Returns: (port_number, is_preferred_port)
|
| 36 |
+
"""
|
| 37 |
+
# Try preferred port first
|
| 38 |
+
if await self._test_port(preferred_port, bind_address):
|
| 39 |
+
return preferred_port, True
|
| 40 |
+
|
| 41 |
+
# Try fallback ports
|
| 42 |
+
ports_to_try = (fallback_ports or []) + self.DEFAULT_VPN_PORTS
|
| 43 |
+
for port in ports_to_try:
|
| 44 |
+
if port != preferred_port and await self._test_port(port, bind_address):
|
| 45 |
+
return port, False
|
| 46 |
+
|
| 47 |
+
# If no predefined ports work, scan for any available port
|
| 48 |
+
port = await self._scan_for_available_port(bind_address)
|
| 49 |
+
return port, False
|
| 50 |
+
|
| 51 |
+
async def _test_port(self, port: int, bind_address: str) -> bool:
|
| 52 |
+
"""Test if a port is available for both TCP and UDP"""
|
| 53 |
+
if port in self.bound_ports or port in self.reserved_ports:
|
| 54 |
+
return False
|
| 55 |
+
|
| 56 |
+
try:
|
| 57 |
+
# Test TCP
|
| 58 |
+
tcp_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
| 59 |
+
tcp_sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
| 60 |
+
tcp_sock.bind((bind_address, port))
|
| 61 |
+
tcp_sock.close()
|
| 62 |
+
|
| 63 |
+
# Test UDP
|
| 64 |
+
udp_sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
| 65 |
+
udp_sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
| 66 |
+
udp_sock.bind((bind_address, port))
|
| 67 |
+
udp_sock.close()
|
| 68 |
+
|
| 69 |
+
return True
|
| 70 |
+
|
| 71 |
+
except OSError:
|
| 72 |
+
return False
|
| 73 |
+
|
| 74 |
+
async def _scan_for_available_port(self, bind_address: str,
|
| 75 |
+
start_port: int = 10000,
|
| 76 |
+
end_port: int = 65535) -> int:
|
| 77 |
+
"""Scan for any available port in the given range"""
|
| 78 |
+
for port in range(start_port, end_port):
|
| 79 |
+
if await self._test_port(port, bind_address):
|
| 80 |
+
return port
|
| 81 |
+
raise RuntimeError("No available ports found")
|
| 82 |
+
|
| 83 |
+
async def reserve_port(self, port: int):
|
| 84 |
+
"""Reserve a port for future use"""
|
| 85 |
+
if port not in self.bound_ports and port not in self.reserved_ports:
|
| 86 |
+
self.reserved_ports.append(port)
|
| 87 |
+
|
| 88 |
+
async def release_port(self, port: int):
|
| 89 |
+
"""Release a reserved or bound port"""
|
| 90 |
+
if port in self.bound_ports:
|
| 91 |
+
self.bound_ports.remove(port)
|
| 92 |
+
if port in self.reserved_ports:
|
| 93 |
+
self.reserved_ports.remove(port)
|
| 94 |
+
|
| 95 |
+
async def bind_port(self, port: int, bind_address: str = '0.0.0.0') -> bool:
|
| 96 |
+
"""
|
| 97 |
+
Attempt to bind to a port and mark it as bound if successful
|
| 98 |
+
Returns True if binding was successful
|
| 99 |
+
"""
|
| 100 |
+
if await self._test_port(port, bind_address):
|
| 101 |
+
self.bound_ports.append(port)
|
| 102 |
+
if port in self.reserved_ports:
|
| 103 |
+
self.reserved_ports.remove(port)
|
| 104 |
+
return True
|
| 105 |
+
return False
|
| 106 |
+
|
| 107 |
+
def get_bound_ports(self) -> List[int]:
|
| 108 |
+
"""Get list of currently bound ports"""
|
| 109 |
+
return self.bound_ports.copy()
|
| 110 |
+
|
| 111 |
+
def get_reserved_ports(self) -> List[int]:
|
| 112 |
+
"""Get list of currently reserved ports"""
|
| 113 |
+
return self.reserved_ports.copy()
|
core/pptp_server.py
ADDED
|
@@ -0,0 +1,179 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
PPTP Server Implementation
|
| 3 |
+
Handles PPTP tunneling and packet forwarding
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
import asyncio
|
| 7 |
+
import socket
|
| 8 |
+
import struct
|
| 9 |
+
from typing import Dict, Optional, Tuple
|
| 10 |
+
from dataclasses import dataclass
|
| 11 |
+
import os
|
| 12 |
+
import hmac
|
| 13 |
+
import hashlib
|
| 14 |
+
from .ip_parser import IPv4Header, IPParser
|
| 15 |
+
from .logger import Logger, LogCategory
|
| 16 |
+
|
| 17 |
+
@dataclass
|
| 18 |
+
class PPTPSession:
|
| 19 |
+
call_id: int
|
| 20 |
+
peer_call_id: int
|
| 21 |
+
client_ip: str
|
| 22 |
+
assigned_ip: str
|
| 23 |
+
created_at: float
|
| 24 |
+
last_seen: float
|
| 25 |
+
bytes_in: int = 0
|
| 26 |
+
bytes_out: int = 0
|
| 27 |
+
|
| 28 |
+
class PPTPServer:
|
| 29 |
+
"""PPTP server implementation"""
|
| 30 |
+
|
| 31 |
+
def __init__(self, logger: Logger, ip_pool_start: str = "10.20.0.2"):
|
| 32 |
+
self.logger = logger
|
| 33 |
+
self.sessions: Dict[int, PPTPSession] = {} # call_id -> session
|
| 34 |
+
self.next_call_id = 1
|
| 35 |
+
self.next_ip = ip_pool_start
|
| 36 |
+
self._running = False
|
| 37 |
+
self._transport = None
|
| 38 |
+
|
| 39 |
+
async def start(self, host: str = "0.0.0.0", port: int = 1723):
|
| 40 |
+
"""Start PPTP server"""
|
| 41 |
+
loop = asyncio.get_running_loop()
|
| 42 |
+
self._transport, _ = await loop.create_server(
|
| 43 |
+
lambda: PPTPProtocol(self),
|
| 44 |
+
host, port
|
| 45 |
+
)
|
| 46 |
+
self._running = True
|
| 47 |
+
self.logger.info(LogCategory.SYSTEM, "pptp_server", f"PPTP server started on {host}:{port}")
|
| 48 |
+
|
| 49 |
+
async def stop(self):
|
| 50 |
+
"""Stop PPTP server"""
|
| 51 |
+
if self._transport:
|
| 52 |
+
self._transport.close()
|
| 53 |
+
self._running = False
|
| 54 |
+
self.logger.info(LogCategory.SYSTEM, "pptp_server", "PPTP server stopped")
|
| 55 |
+
|
| 56 |
+
def allocate_ip(self) -> str:
|
| 57 |
+
"""Allocate next available IP from pool"""
|
| 58 |
+
allocated = self.next_ip
|
| 59 |
+
# Increment last octet, handling rollover
|
| 60 |
+
last_octet = int(self.next_ip.split('.')[-1])
|
| 61 |
+
if last_octet >= 254:
|
| 62 |
+
raise ValueError("IP pool exhausted")
|
| 63 |
+
self.next_ip = f"10.20.0.{last_octet + 1}"
|
| 64 |
+
return allocated
|
| 65 |
+
|
| 66 |
+
async def handle_packet(self, data: bytes, addr: Tuple[str, int]):
|
| 67 |
+
"""Handle incoming PPTP packet"""
|
| 68 |
+
try:
|
| 69 |
+
if len(data) < 8:
|
| 70 |
+
return
|
| 71 |
+
|
| 72 |
+
# Parse PPTP header
|
| 73 |
+
message_type = struct.unpack('!H', data[2:4])[0]
|
| 74 |
+
|
| 75 |
+
if message_type == 1: # Control Message
|
| 76 |
+
await self._handle_control(data, addr)
|
| 77 |
+
else: # Data Message (GRE)
|
| 78 |
+
await self._handle_gre(data, addr)
|
| 79 |
+
|
| 80 |
+
except Exception as e:
|
| 81 |
+
self.logger.error(LogCategory.SYSTEM, "pptp_server", f"Error handling packet: {e}")
|
| 82 |
+
|
| 83 |
+
async def _handle_control(self, data: bytes, addr: Tuple[str, int]):
|
| 84 |
+
"""Handle PPTP control message"""
|
| 85 |
+
control_type = struct.unpack('!H', data[8:10])[0]
|
| 86 |
+
|
| 87 |
+
if control_type == 1: # Start-Control-Connection-Request
|
| 88 |
+
# Send Start-Control-Connection-Reply
|
| 89 |
+
reply = self._build_start_control_reply()
|
| 90 |
+
await self._send_control(reply, addr)
|
| 91 |
+
|
| 92 |
+
elif control_type == 7: # Outgoing-Call-Request
|
| 93 |
+
call_id = struct.unpack('!H', data[12:14])[0]
|
| 94 |
+
|
| 95 |
+
# Create new session
|
| 96 |
+
session = PPTPSession(
|
| 97 |
+
call_id=call_id,
|
| 98 |
+
peer_call_id=self.next_call_id,
|
| 99 |
+
client_ip=addr[0],
|
| 100 |
+
assigned_ip=self.allocate_ip(),
|
| 101 |
+
created_at=asyncio.get_running_loop().time(),
|
| 102 |
+
last_seen=asyncio.get_running_loop().time()
|
| 103 |
+
)
|
| 104 |
+
self.sessions[call_id] = session
|
| 105 |
+
self.next_call_id += 1
|
| 106 |
+
|
| 107 |
+
# Send Outgoing-Call-Reply
|
| 108 |
+
reply = self._build_outgoing_call_reply(session)
|
| 109 |
+
await self._send_control(reply, addr)
|
| 110 |
+
|
| 111 |
+
async def _handle_gre(self, data: bytes, addr: Tuple[str, int]):
|
| 112 |
+
"""Handle GRE encapsulated data"""
|
| 113 |
+
try:
|
| 114 |
+
if len(data) < 12: # GRE v1 header size
|
| 115 |
+
return
|
| 116 |
+
|
| 117 |
+
# Parse GRE header
|
| 118 |
+
flags = struct.unpack('!H', data[0:2])[0]
|
| 119 |
+
protocol = struct.unpack('!H', data[2:4])[0]
|
| 120 |
+
payload_len = struct.unpack('!H', data[4:6])[0]
|
| 121 |
+
call_id = struct.unpack('!H', data[6:8])[0]
|
| 122 |
+
|
| 123 |
+
if call_id not in self.sessions:
|
| 124 |
+
return
|
| 125 |
+
|
| 126 |
+
session = self.sessions[call_id]
|
| 127 |
+
session.last_seen = asyncio.get_running_loop().time()
|
| 128 |
+
|
| 129 |
+
# Handle PPP payload
|
| 130 |
+
if protocol == 0x880B: # PPP
|
| 131 |
+
await self._handle_ppp(data[8:], session)
|
| 132 |
+
|
| 133 |
+
except Exception as e:
|
| 134 |
+
self.logger.error(LogCategory.SYSTEM, "pptp_server", f"Error handling GRE packet: {e}")
|
| 135 |
+
|
| 136 |
+
async def _handle_ppp(self, data: bytes, session: PPTPSession):
|
| 137 |
+
"""Handle PPP payload"""
|
| 138 |
+
try:
|
| 139 |
+
if len(data) < 4:
|
| 140 |
+
return
|
| 141 |
+
|
| 142 |
+
protocol = struct.unpack('!H', data[2:4])[0]
|
| 143 |
+
|
| 144 |
+
if protocol == 0x0021: # IP
|
| 145 |
+
ip_packet = data[4:]
|
| 146 |
+
await self._handle_ip_packet(ip_packet, session)
|
| 147 |
+
|
| 148 |
+
except Exception as e:
|
| 149 |
+
self.logger.error(LogCategory.SYSTEM, "pptp_server", f"Error handling PPP packet: {e}")
|
| 150 |
+
|
| 151 |
+
async def _handle_ip_packet(self, data: bytes, session: PPTPSession):
|
| 152 |
+
"""Handle IP packet inside PPP frame"""
|
| 153 |
+
try:
|
| 154 |
+
# Parse IP header
|
| 155 |
+
ip_header = IPParser.parse_ipv4_header(data)
|
| 156 |
+
|
| 157 |
+
# Update statistics
|
| 158 |
+
session.bytes_in += len(data)
|
| 159 |
+
|
| 160 |
+
# Forward packet to destination
|
| 161 |
+
if ip_header.protocol == socket.IPPROTO_TCP:
|
| 162 |
+
await self._forward_tcp(data, session)
|
| 163 |
+
elif ip_header.protocol == socket.IPPROTO_UDP:
|
| 164 |
+
await self._forward_udp(data, session)
|
| 165 |
+
|
| 166 |
+
except Exception as e:
|
| 167 |
+
self.logger.error(LogCategory.SYSTEM, "pptp_server", f"Error handling IP packet: {e}")
|
| 168 |
+
|
| 169 |
+
async def _forward_tcp(self, data: bytes, session: PPTPSession):
|
| 170 |
+
"""Forward TCP packet"""
|
| 171 |
+
# This will be handled by the TCP forwarding engine
|
| 172 |
+
# Just a placeholder for now
|
| 173 |
+
pass
|
| 174 |
+
|
| 175 |
+
async def _forward_udp(self, data: bytes, session: PPTPSession):
|
| 176 |
+
"""Forward UDP packet"""
|
| 177 |
+
# This will be handled by the UDP forwarding engine
|
| 178 |
+
# Just a placeholder for now
|
| 179 |
+
pass
|
core/process_lock.py
ADDED
|
@@ -0,0 +1,51 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Process Lock Handler for VPN Server
|
| 3 |
+
Ensures only one instance of the server is running
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
import os
|
| 7 |
+
import fcntl
|
| 8 |
+
import errno
|
| 9 |
+
import atexit
|
| 10 |
+
import logging
|
| 11 |
+
from typing import Optional
|
| 12 |
+
|
| 13 |
+
logger = logging.getLogger(__name__)
|
| 14 |
+
|
| 15 |
+
class ProcessLock:
|
| 16 |
+
def __init__(self, lock_file: str = "/tmp/outline_vpn.lock"):
|
| 17 |
+
self.lock_file = lock_file
|
| 18 |
+
self.lock_fd: Optional[int] = None
|
| 19 |
+
atexit.register(self.release)
|
| 20 |
+
|
| 21 |
+
def acquire(self) -> bool:
|
| 22 |
+
"""Acquire process lock. Returns True if successful, False if already locked."""
|
| 23 |
+
try:
|
| 24 |
+
# Create or open lock file
|
| 25 |
+
self.lock_fd = os.open(self.lock_file, os.O_CREAT | os.O_RDWR)
|
| 26 |
+
fcntl.flock(self.lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
|
| 27 |
+
|
| 28 |
+
# Write PID to lock file
|
| 29 |
+
os.truncate(self.lock_fd, 0)
|
| 30 |
+
os.write(self.lock_fd, str(os.getpid()).encode())
|
| 31 |
+
|
| 32 |
+
return True
|
| 33 |
+
|
| 34 |
+
except (IOError, OSError) as e:
|
| 35 |
+
if e.errno == errno.EWOULDBLOCK:
|
| 36 |
+
# Another instance is running
|
| 37 |
+
logger.warning("Another instance of the VPN server is already running")
|
| 38 |
+
else:
|
| 39 |
+
logger.error(f"Failed to acquire process lock: {e}")
|
| 40 |
+
return False
|
| 41 |
+
|
| 42 |
+
def release(self):
|
| 43 |
+
"""Release the process lock"""
|
| 44 |
+
if self.lock_fd is not None:
|
| 45 |
+
try:
|
| 46 |
+
fcntl.flock(self.lock_fd, fcntl.LOCK_UN)
|
| 47 |
+
os.close(self.lock_fd)
|
| 48 |
+
os.unlink(self.lock_file)
|
| 49 |
+
self.lock_fd = None
|
| 50 |
+
except (IOError, OSError) as e:
|
| 51 |
+
logger.error(f"Failed to release process lock: {e}")
|
core/services/__pycache__/user_service.cpython-311.pyc
ADDED
|
Binary file (6.56 kB). View file
|
|
|
core/services/user_service.py
ADDED
|
@@ -0,0 +1,99 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
User management and authentication services
|
| 3 |
+
"""
|
| 4 |
+
from datetime import datetime, timedelta
|
| 5 |
+
from typing import Optional, List, Tuple
|
| 6 |
+
from sqlalchemy.orm import Session
|
| 7 |
+
from sqlalchemy.exc import IntegrityError
|
| 8 |
+
from core.models.user import User, UserRole, UserStatus, UserSession, UserPermission
|
| 9 |
+
|
| 10 |
+
class UserService:
|
| 11 |
+
def __init__(self, db: Session):
|
| 12 |
+
self.db = db
|
| 13 |
+
|
| 14 |
+
def create_user(self, username: str, password: str, role: UserRole = UserRole.USER) -> Tuple[bool, str, Optional[User]]:
|
| 15 |
+
"""
|
| 16 |
+
Create a new user
|
| 17 |
+
Returns: (success, message, user)
|
| 18 |
+
"""
|
| 19 |
+
try:
|
| 20 |
+
user = User(
|
| 21 |
+
username=username,
|
| 22 |
+
password_hash=User.hash_password(password),
|
| 23 |
+
role=role,
|
| 24 |
+
status=UserStatus.ACTIVE
|
| 25 |
+
)
|
| 26 |
+
self.db.add(user)
|
| 27 |
+
self.db.commit()
|
| 28 |
+
self.db.refresh(user)
|
| 29 |
+
return True, "User created successfully", user
|
| 30 |
+
except IntegrityError:
|
| 31 |
+
self.db.rollback()
|
| 32 |
+
return False, "Username already exists", None
|
| 33 |
+
except Exception as e:
|
| 34 |
+
self.db.rollback()
|
| 35 |
+
return False, f"Error creating user: {str(e)}", None
|
| 36 |
+
|
| 37 |
+
def authenticate_user(self, username: str, password: str) -> Tuple[bool, str, Optional[User]]:
|
| 38 |
+
"""
|
| 39 |
+
Authenticate user credentials
|
| 40 |
+
Returns: (success, message, user)
|
| 41 |
+
"""
|
| 42 |
+
user = self.db.query(User).filter(User.username == username).first()
|
| 43 |
+
|
| 44 |
+
if not user:
|
| 45 |
+
return False, "Invalid username or password", None
|
| 46 |
+
|
| 47 |
+
if user.is_locked():
|
| 48 |
+
return False, f"Account is locked until {user.lockout_until}", None
|
| 49 |
+
|
| 50 |
+
if user.status != UserStatus.ACTIVE:
|
| 51 |
+
return False, f"Account is {user.status.value}", None
|
| 52 |
+
|
| 53 |
+
if not user.verify_password(password):
|
| 54 |
+
user.record_login_attempt(success=False)
|
| 55 |
+
self.db.commit()
|
| 56 |
+
return False, "Invalid username or password", None
|
| 57 |
+
|
| 58 |
+
user.record_login_attempt(success=True)
|
| 59 |
+
self.db.commit()
|
| 60 |
+
return True, "Authentication successful", user
|
| 61 |
+
|
| 62 |
+
def create_session(self, user: User, ip_address: str, device_info: str = None) -> UserSession:
|
| 63 |
+
"""Create a new session for user"""
|
| 64 |
+
session = UserSession(
|
| 65 |
+
user_id=user.id,
|
| 66 |
+
token=self._generate_session_token(),
|
| 67 |
+
ip_address=ip_address,
|
| 68 |
+
device_info=device_info,
|
| 69 |
+
expires_at=datetime.utcnow() + timedelta(days=1)
|
| 70 |
+
)
|
| 71 |
+
self.db.add(session)
|
| 72 |
+
self.db.commit()
|
| 73 |
+
self.db.refresh(session)
|
| 74 |
+
return session
|
| 75 |
+
|
| 76 |
+
def validate_session(self, token: str) -> Tuple[bool, str, Optional[UserSession]]:
|
| 77 |
+
"""Validate session token"""
|
| 78 |
+
session = self.db.query(UserSession).filter(UserSession.token == token).first()
|
| 79 |
+
|
| 80 |
+
if not session:
|
| 81 |
+
return False, "Invalid session", None
|
| 82 |
+
|
| 83 |
+
if session.expires_at < datetime.utcnow():
|
| 84 |
+
return False, "Session expired", None
|
| 85 |
+
|
| 86 |
+
# Update last active
|
| 87 |
+
session.last_active = datetime.utcnow()
|
| 88 |
+
self.db.commit()
|
| 89 |
+
|
| 90 |
+
return True, "Session valid", session
|
| 91 |
+
|
| 92 |
+
def get_user_permissions(self, user_id: int) -> List[UserPermission]:
|
| 93 |
+
"""Get user permissions"""
|
| 94 |
+
return self.db.query(UserPermission).filter(UserPermission.user_id == user_id).all()
|
| 95 |
+
|
| 96 |
+
def _generate_session_token(self) -> str:
|
| 97 |
+
"""Generate a unique session token"""
|
| 98 |
+
import secrets
|
| 99 |
+
return secrets.token_urlsafe(32)
|
core/session_tracker.py
ADDED
|
@@ -0,0 +1,287 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Session Tracker Module
|
| 3 |
+
|
| 4 |
+
Manages and tracks all network sessions across the virtual ISP stack:
|
| 5 |
+
- Unified session management across all modules
|
| 6 |
+
- Session lifecycle tracking
|
| 7 |
+
- Performance metrics and analytics
|
| 8 |
+
- Session correlation and debugging
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
import time
|
| 12 |
+
import threading
|
| 13 |
+
import uuid
|
| 14 |
+
from typing import Dict, List, Optional, Set, Any, Tuple
|
| 15 |
+
from dataclasses import dataclass, field
|
| 16 |
+
from enum import Enum
|
| 17 |
+
import json
|
| 18 |
+
|
| 19 |
+
from .tcp_engine import TCPConnection
|
| 20 |
+
from .nat_engine import NATSession
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
class SessionType(Enum):
|
| 24 |
+
NAT_SESSION = "NAT_SESSION"
|
| 25 |
+
TCP_CONNECTION = "TCP_CONNECTION"
|
| 26 |
+
SOCKET_CONNECTION = "SOCKET_CONNECTION"
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
class SessionState(Enum):
|
| 30 |
+
INITIALIZING = "INITIALIZING"
|
| 31 |
+
ACTIVE = "ACTIVE"
|
| 32 |
+
IDLE = "IDLE"
|
| 33 |
+
CLOSING = "CLOSING"
|
| 34 |
+
CLOSED = "CLOSED"
|
| 35 |
+
ERROR = "ERROR"
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
@dataclass
|
| 39 |
+
class SessionMetrics:
|
| 40 |
+
"""Session performance metrics"""
|
| 41 |
+
bytes_in: int = 0
|
| 42 |
+
bytes_out: int = 0
|
| 43 |
+
packets_in: int = 0
|
| 44 |
+
packets_out: int = 0
|
| 45 |
+
errors: int = 0
|
| 46 |
+
retransmits: int = 0
|
| 47 |
+
rtt_samples: List[float] = field(default_factory=list)
|
| 48 |
+
|
| 49 |
+
@property
|
| 50 |
+
def total_bytes(self) -> int:
|
| 51 |
+
return self.bytes_in + self.bytes_out
|
| 52 |
+
|
| 53 |
+
@property
|
| 54 |
+
def total_packets(self) -> int:
|
| 55 |
+
return self.packets_in + self.packets_out
|
| 56 |
+
|
| 57 |
+
@property
|
| 58 |
+
def average_rtt(self) -> float:
|
| 59 |
+
return sum(self.rtt_samples) / len(self.rtt_samples) if self.rtt_samples else 0.0
|
| 60 |
+
|
| 61 |
+
def update_bytes(self, bytes_in: int = 0, bytes_out: int = 0):
|
| 62 |
+
"""Update byte counters"""
|
| 63 |
+
self.bytes_in += bytes_in
|
| 64 |
+
self.bytes_out += bytes_out
|
| 65 |
+
|
| 66 |
+
def update_packets(self, packets_in: int = 0, packets_out: int = 0):
|
| 67 |
+
"""Update packet counters"""
|
| 68 |
+
self.packets_in += packets_in
|
| 69 |
+
self.packets_out += packets_out
|
| 70 |
+
|
| 71 |
+
def add_rtt_sample(self, rtt: float):
|
| 72 |
+
"""Add RTT sample"""
|
| 73 |
+
self.rtt_samples.append(rtt)
|
| 74 |
+
# Keep only last 100 samples
|
| 75 |
+
if len(self.rtt_samples) > 100:
|
| 76 |
+
self.rtt_samples = self.rtt_samples[-100:]
|
| 77 |
+
|
| 78 |
+
def to_dict(self) -> Dict:
|
| 79 |
+
"""Convert to dictionary"""
|
| 80 |
+
return {
|
| 81 |
+
'bytes_in': self.bytes_in,
|
| 82 |
+
'bytes_out': self.bytes_out,
|
| 83 |
+
'packets_in': self.packets_in,
|
| 84 |
+
'packets_out': self.packets_out,
|
| 85 |
+
'total_bytes': self.total_bytes,
|
| 86 |
+
'total_packets': self.total_packets,
|
| 87 |
+
'errors': self.errors,
|
| 88 |
+
'retransmits': self.retransmits,
|
| 89 |
+
'average_rtt': self.average_rtt,
|
| 90 |
+
'rtt_samples_count': len(self.rtt_samples)
|
| 91 |
+
}
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
@dataclass
|
| 95 |
+
class UnifiedSession:
|
| 96 |
+
"""Unified session representation"""
|
| 97 |
+
id: str
|
| 98 |
+
type: SessionType
|
| 99 |
+
state: SessionState
|
| 100 |
+
start_time: float
|
| 101 |
+
last_active: float
|
| 102 |
+
source_ip: str
|
| 103 |
+
source_port: int
|
| 104 |
+
dest_ip: str
|
| 105 |
+
dest_port: int
|
| 106 |
+
metrics: SessionMetrics = field(default_factory=SessionMetrics)
|
| 107 |
+
metadata: Dict[str, Any] = field(default_factory=dict)
|
| 108 |
+
|
| 109 |
+
@property
|
| 110 |
+
def duration(self) -> float:
|
| 111 |
+
"""Get session duration"""
|
| 112 |
+
return time.time() - self.start_time
|
| 113 |
+
|
| 114 |
+
@property
|
| 115 |
+
def idle_time(self) -> float:
|
| 116 |
+
"""Get idle time"""
|
| 117 |
+
return time.time() - self.last_active
|
| 118 |
+
|
| 119 |
+
def update_activity(self):
|
| 120 |
+
"""Update last activity timestamp"""
|
| 121 |
+
self.last_active = time.time()
|
| 122 |
+
|
| 123 |
+
def to_dict(self) -> Dict:
|
| 124 |
+
"""Convert to dictionary"""
|
| 125 |
+
return {
|
| 126 |
+
'id': self.id,
|
| 127 |
+
'type': self.type.value,
|
| 128 |
+
'state': self.state.value,
|
| 129 |
+
'start_time': self.start_time,
|
| 130 |
+
'last_active': self.last_active,
|
| 131 |
+
'duration': self.duration,
|
| 132 |
+
'idle_time': self.idle_time,
|
| 133 |
+
'source_ip': self.source_ip,
|
| 134 |
+
'source_port': self.source_port,
|
| 135 |
+
'dest_ip': self.dest_ip,
|
| 136 |
+
'dest_port': self.dest_port,
|
| 137 |
+
'metrics': self.metrics.to_dict(),
|
| 138 |
+
'metadata': self.metadata
|
| 139 |
+
}
|
| 140 |
+
|
| 141 |
+
|
| 142 |
+
class SessionTracker:
|
| 143 |
+
"""Tracks all active network sessions"""
|
| 144 |
+
|
| 145 |
+
_instance = None
|
| 146 |
+
_lock = threading.Lock()
|
| 147 |
+
|
| 148 |
+
def __new__(cls):
|
| 149 |
+
with cls._lock:
|
| 150 |
+
if cls._instance is None:
|
| 151 |
+
cls._instance = super().__new__(cls)
|
| 152 |
+
cls._instance._initialized = False
|
| 153 |
+
return cls._instance
|
| 154 |
+
|
| 155 |
+
def __init__(self):
|
| 156 |
+
if self._initialized:
|
| 157 |
+
return
|
| 158 |
+
|
| 159 |
+
self.sessions: Dict[str, UnifiedSession] = {}
|
| 160 |
+
self.lock = threading.Lock()
|
| 161 |
+
self.cleanup_thread = threading.Thread(target=self._cleanup_loop)
|
| 162 |
+
self.cleanup_thread.daemon = True
|
| 163 |
+
self.running = True
|
| 164 |
+
self.cleanup_thread.start()
|
| 165 |
+
|
| 166 |
+
self._initialized = True
|
| 167 |
+
|
| 168 |
+
def create_session(self, session_type: SessionType, source_ip: str,
|
| 169 |
+
source_port: int, dest_ip: str, dest_port: int,
|
| 170 |
+
**kwargs) -> UnifiedSession:
|
| 171 |
+
"""Create a new session"""
|
| 172 |
+
session = UnifiedSession(
|
| 173 |
+
id=str(uuid.uuid4()),
|
| 174 |
+
type=session_type,
|
| 175 |
+
state=SessionState.INITIALIZING,
|
| 176 |
+
start_time=time.time(),
|
| 177 |
+
last_active=time.time(),
|
| 178 |
+
source_ip=source_ip,
|
| 179 |
+
source_port=source_port,
|
| 180 |
+
dest_ip=dest_ip,
|
| 181 |
+
dest_port=dest_port,
|
| 182 |
+
metadata=kwargs
|
| 183 |
+
)
|
| 184 |
+
|
| 185 |
+
with self.lock:
|
| 186 |
+
self.sessions[session.id] = session
|
| 187 |
+
|
| 188 |
+
return session
|
| 189 |
+
|
| 190 |
+
def get_session(self, session_id: str) -> Optional[UnifiedSession]:
|
| 191 |
+
"""Get session by ID"""
|
| 192 |
+
return self.sessions.get(session_id)
|
| 193 |
+
|
| 194 |
+
def update_session(self, session_id: str, state: Optional[SessionState] = None,
|
| 195 |
+
metrics_update: Optional[Dict] = None,
|
| 196 |
+
metadata_update: Optional[Dict] = None) -> bool:
|
| 197 |
+
"""Update session state and metrics"""
|
| 198 |
+
session = self.get_session(session_id)
|
| 199 |
+
if not session:
|
| 200 |
+
return False
|
| 201 |
+
|
| 202 |
+
with self.lock:
|
| 203 |
+
if state:
|
| 204 |
+
session.state = state
|
| 205 |
+
|
| 206 |
+
if metrics_update:
|
| 207 |
+
session.metrics.update_bytes(
|
| 208 |
+
metrics_update.get('bytes_in', 0),
|
| 209 |
+
metrics_update.get('bytes_out', 0)
|
| 210 |
+
)
|
| 211 |
+
session.metrics.update_packets(
|
| 212 |
+
metrics_update.get('packets_in', 0),
|
| 213 |
+
metrics_update.get('packets_out', 0)
|
| 214 |
+
)
|
| 215 |
+
if 'rtt' in metrics_update:
|
| 216 |
+
session.metrics.add_rtt_sample(metrics_update['rtt'])
|
| 217 |
+
|
| 218 |
+
if metadata_update:
|
| 219 |
+
session.metadata.update(metadata_update)
|
| 220 |
+
|
| 221 |
+
session.update_activity()
|
| 222 |
+
|
| 223 |
+
return True
|
| 224 |
+
|
| 225 |
+
def close_session(self, session_id: str):
|
| 226 |
+
"""Close a session"""
|
| 227 |
+
session = self.get_session(session_id)
|
| 228 |
+
if session:
|
| 229 |
+
with self.lock:
|
| 230 |
+
session.state = SessionState.CLOSED
|
| 231 |
+
|
| 232 |
+
def get_all_sessions(self) -> List[UnifiedSession]:
|
| 233 |
+
"""Get all active sessions"""
|
| 234 |
+
with self.lock:
|
| 235 |
+
return [s for s in self.sessions.values()
|
| 236 |
+
if s.state != SessionState.CLOSED]
|
| 237 |
+
|
| 238 |
+
def get_sessions_by_type(self, session_type: SessionType) -> List[UnifiedSession]:
|
| 239 |
+
"""Get sessions by type"""
|
| 240 |
+
with self.lock:
|
| 241 |
+
return [s for s in self.sessions.values()
|
| 242 |
+
if s.type == session_type and s.state != SessionState.CLOSED]
|
| 243 |
+
|
| 244 |
+
def get_sessions_by_ip(self, ip_address: str) -> List[UnifiedSession]:
|
| 245 |
+
"""Get sessions by IP address"""
|
| 246 |
+
with self.lock:
|
| 247 |
+
return [s for s in self.sessions.values()
|
| 248 |
+
if (s.source_ip == ip_address or s.dest_ip == ip_address)
|
| 249 |
+
and s.state != SessionState.CLOSED]
|
| 250 |
+
|
| 251 |
+
def _cleanup_loop(self):
|
| 252 |
+
"""Background cleanup loop"""
|
| 253 |
+
while self.running:
|
| 254 |
+
time.sleep(60) # Run every minute
|
| 255 |
+
try:
|
| 256 |
+
self._cleanup_sessions()
|
| 257 |
+
except Exception as e:
|
| 258 |
+
print(f"Error in cleanup loop: {e}")
|
| 259 |
+
|
| 260 |
+
def _cleanup_sessions(self):
|
| 261 |
+
"""Clean up old sessions"""
|
| 262 |
+
current_time = time.time()
|
| 263 |
+
to_remove = []
|
| 264 |
+
|
| 265 |
+
with self.lock:
|
| 266 |
+
for session_id, session in self.sessions.items():
|
| 267 |
+
# Remove closed sessions after 5 minutes
|
| 268 |
+
if (session.state == SessionState.CLOSED and
|
| 269 |
+
current_time - session.last_active > 300):
|
| 270 |
+
to_remove.append(session_id)
|
| 271 |
+
# Remove idle sessions after 30 minutes
|
| 272 |
+
elif (session.state != SessionState.CLOSED and
|
| 273 |
+
current_time - session.last_active > 1800):
|
| 274 |
+
session.state = SessionState.CLOSED
|
| 275 |
+
to_remove.append(session_id)
|
| 276 |
+
|
| 277 |
+
for session_id in to_remove:
|
| 278 |
+
del self.sessions[session_id]
|
| 279 |
+
|
| 280 |
+
def shutdown(self):
|
| 281 |
+
"""Shutdown the tracker"""
|
| 282 |
+
self.running = False
|
| 283 |
+
if self.cleanup_thread.is_alive():
|
| 284 |
+
self.cleanup_thread.join()
|
| 285 |
+
|
| 286 |
+
with self.lock:
|
| 287 |
+
self.sessions.clear()
|
core/shadowsocks_protocol.py
ADDED
|
@@ -0,0 +1,121 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Shadowsocks Protocol Implementation
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
import os
|
| 6 |
+
import asyncio
|
| 7 |
+
import hashlib
|
| 8 |
+
from typing import Optional, Tuple
|
| 9 |
+
from cryptography.hazmat.primitives.ciphers.aead import ChaCha20Poly1305
|
| 10 |
+
import logging
|
| 11 |
+
|
| 12 |
+
logger = logging.getLogger(__name__)
|
| 13 |
+
|
| 14 |
+
class ShadowsocksProtocol:
|
| 15 |
+
CHUNK_SIZE = 8192
|
| 16 |
+
|
| 17 |
+
def __init__(self, access_key: str):
|
| 18 |
+
self.access_key = access_key
|
| 19 |
+
self.cipher = self._create_cipher()
|
| 20 |
+
self.buffer = bytearray()
|
| 21 |
+
|
| 22 |
+
def _create_cipher(self) -> ChaCha20Poly1305:
|
| 23 |
+
"""Create ChaCha20-Poly1305 cipher"""
|
| 24 |
+
key = hashlib.sha256(self.access_key.encode()).digest()
|
| 25 |
+
return ChaCha20Poly1305(key)
|
| 26 |
+
|
| 27 |
+
async def handle_connection(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter):
|
| 28 |
+
"""Handle client connection"""
|
| 29 |
+
try:
|
| 30 |
+
# Read and decrypt initial packet
|
| 31 |
+
data = await reader.read(self.CHUNK_SIZE)
|
| 32 |
+
if not data:
|
| 33 |
+
return
|
| 34 |
+
|
| 35 |
+
# Extract target address
|
| 36 |
+
decrypted = self._decrypt_packet(data)
|
| 37 |
+
target_addr = self._extract_address(decrypted)
|
| 38 |
+
if not target_addr:
|
| 39 |
+
logger.error("Invalid target address")
|
| 40 |
+
return
|
| 41 |
+
|
| 42 |
+
# Connect to target
|
| 43 |
+
target_reader, target_writer = await asyncio.open_connection(
|
| 44 |
+
target_addr[0], target_addr[1]
|
| 45 |
+
)
|
| 46 |
+
|
| 47 |
+
# Start bidirectional forwarding
|
| 48 |
+
await self._proxy_data(reader, writer, target_reader, target_writer)
|
| 49 |
+
|
| 50 |
+
except Exception as e:
|
| 51 |
+
logger.error(f"Connection error: {e}")
|
| 52 |
+
finally:
|
| 53 |
+
writer.close()
|
| 54 |
+
await writer.wait_closed()
|
| 55 |
+
|
| 56 |
+
async def _proxy_data(self,
|
| 57 |
+
client_reader: asyncio.StreamReader,
|
| 58 |
+
client_writer: asyncio.StreamWriter,
|
| 59 |
+
target_reader: asyncio.StreamReader,
|
| 60 |
+
target_writer: asyncio.StreamWriter):
|
| 61 |
+
"""Handle bidirectional data forwarding"""
|
| 62 |
+
async def forward(reader: asyncio.StreamReader,
|
| 63 |
+
writer: asyncio.StreamWriter,
|
| 64 |
+
encrypt: bool = False):
|
| 65 |
+
try:
|
| 66 |
+
while True:
|
| 67 |
+
data = await reader.read(self.CHUNK_SIZE)
|
| 68 |
+
if not data:
|
| 69 |
+
break
|
| 70 |
+
if encrypt:
|
| 71 |
+
data = self._encrypt_packet(data)
|
| 72 |
+
writer.write(data)
|
| 73 |
+
await writer.drain()
|
| 74 |
+
except Exception as e:
|
| 75 |
+
logger.error(f"Forward error: {e}")
|
| 76 |
+
finally:
|
| 77 |
+
writer.close()
|
| 78 |
+
await writer.wait_closed()
|
| 79 |
+
|
| 80 |
+
await asyncio.gather(
|
| 81 |
+
forward(client_reader, target_writer, encrypt=False),
|
| 82 |
+
forward(target_reader, client_writer, encrypt=True)
|
| 83 |
+
)
|
| 84 |
+
|
| 85 |
+
def _encrypt_packet(self, data: bytes) -> bytes:
|
| 86 |
+
"""Encrypt a packet"""
|
| 87 |
+
nonce = os.urandom(12)
|
| 88 |
+
encrypted = self.cipher.encrypt(nonce, data, None)
|
| 89 |
+
return nonce + encrypted
|
| 90 |
+
|
| 91 |
+
def _decrypt_packet(self, data: bytes) -> bytes:
|
| 92 |
+
"""Decrypt a packet"""
|
| 93 |
+
nonce, ciphertext = data[:12], data[12:]
|
| 94 |
+
return self.cipher.decrypt(nonce, ciphertext, None)
|
| 95 |
+
|
| 96 |
+
def _extract_address(self, data: bytes) -> Optional[Tuple[str, int]]:
|
| 97 |
+
"""Extract address from Shadowsocks address header"""
|
| 98 |
+
try:
|
| 99 |
+
atyp = data[0] # Address type
|
| 100 |
+
|
| 101 |
+
if atyp == 1: # IPv4
|
| 102 |
+
addr = '.'.join(str(b) for b in data[1:5])
|
| 103 |
+
port = int.from_bytes(data[5:7], 'big')
|
| 104 |
+
payload_start = 7
|
| 105 |
+
elif atyp == 3: # Domain name
|
| 106 |
+
length = data[1]
|
| 107 |
+
addr = data[2:2+length].decode()
|
| 108 |
+
port = int.from_bytes(data[2+length:4+length], 'big')
|
| 109 |
+
payload_start = 4 + length
|
| 110 |
+
elif atyp == 4: # IPv6
|
| 111 |
+
addr = ':'.join(format(b, '02x') for b in data[1:17])
|
| 112 |
+
port = int.from_bytes(data[17:19], 'big')
|
| 113 |
+
payload_start = 19
|
| 114 |
+
else:
|
| 115 |
+
return None
|
| 116 |
+
|
| 117 |
+
return addr, port
|
| 118 |
+
|
| 119 |
+
except Exception as e:
|
| 120 |
+
logger.error(f"Error extracting address: {e}")
|
| 121 |
+
return None
|
core/socket_translator.py
ADDED
|
@@ -0,0 +1,339 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Socket Translator Module
|
| 3 |
+
|
| 4 |
+
Bridges virtual connections to real host sockets:
|
| 5 |
+
- Map virtual connections to host sockets/HTTP clients
|
| 6 |
+
- Bidirectional data streaming
|
| 7 |
+
- Connection lifecycle management
|
| 8 |
+
- Protocol translation (TCP/UDP to host sockets)
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
import socket
|
| 12 |
+
import threading
|
| 13 |
+
import time
|
| 14 |
+
import asyncio
|
| 15 |
+
import aiohttp
|
| 16 |
+
import ssl
|
| 17 |
+
from typing import Dict, Optional, Callable, Tuple, Any
|
| 18 |
+
from dataclasses import dataclass
|
| 19 |
+
from enum import Enum
|
| 20 |
+
import urllib.parse
|
| 21 |
+
import json
|
| 22 |
+
|
| 23 |
+
from .tcp_engine import TCPConnection
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
class ConnectionType(Enum):
|
| 27 |
+
TCP_SOCKET = "TCP_SOCKET"
|
| 28 |
+
UDP_SOCKET = "UDP_SOCKET"
|
| 29 |
+
HTTP_CLIENT = "HTTP_CLIENT"
|
| 30 |
+
HTTPS_CLIENT = "HTTPS_CLIENT"
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
@dataclass
|
| 34 |
+
class SocketConnection:
|
| 35 |
+
"""Represents a socket connection"""
|
| 36 |
+
connection_id: str
|
| 37 |
+
connection_type: ConnectionType
|
| 38 |
+
virtual_connection: Optional[TCPConnection]
|
| 39 |
+
host_socket: Optional[socket.socket]
|
| 40 |
+
remote_host: str
|
| 41 |
+
remote_port: int
|
| 42 |
+
created_time: float
|
| 43 |
+
last_activity: float
|
| 44 |
+
bytes_sent: int = 0
|
| 45 |
+
bytes_received: int = 0
|
| 46 |
+
is_connected: bool = False
|
| 47 |
+
error_count: int = 0
|
| 48 |
+
|
| 49 |
+
def update_activity(self, bytes_transferred: int = 0, direction: str = 'sent'):
|
| 50 |
+
"""Update connection activity"""
|
| 51 |
+
self.last_activity = time.time()
|
| 52 |
+
if direction == 'sent':
|
| 53 |
+
self.bytes_sent += bytes_transferred
|
| 54 |
+
else:
|
| 55 |
+
self.bytes_received += bytes_transferred
|
| 56 |
+
|
| 57 |
+
def to_dict(self) -> Dict:
|
| 58 |
+
"""Convert to dictionary"""
|
| 59 |
+
return {
|
| 60 |
+
'connection_id': self.connection_id,
|
| 61 |
+
'connection_type': self.connection_type.value,
|
| 62 |
+
'remote_host': self.remote_host,
|
| 63 |
+
'remote_port': self.remote_port,
|
| 64 |
+
'created_time': self.created_time,
|
| 65 |
+
'last_activity': self.last_activity,
|
| 66 |
+
'bytes_sent': self.bytes_sent,
|
| 67 |
+
'bytes_received': self.bytes_received,
|
| 68 |
+
'is_connected': self.is_connected,
|
| 69 |
+
'error_count': self.error_count,
|
| 70 |
+
'duration': time.time() - self.created_time
|
| 71 |
+
}
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
class SocketTranslator:
|
| 75 |
+
"""Manages socket translations and connections"""
|
| 76 |
+
|
| 77 |
+
_instance = None
|
| 78 |
+
_lock = threading.Lock()
|
| 79 |
+
|
| 80 |
+
def __new__(cls):
|
| 81 |
+
with cls._lock:
|
| 82 |
+
if cls._instance is None:
|
| 83 |
+
cls._instance = super().__new__(cls)
|
| 84 |
+
cls._instance._initialized = False
|
| 85 |
+
return cls._instance
|
| 86 |
+
|
| 87 |
+
def __init__(self):
|
| 88 |
+
if self._initialized:
|
| 89 |
+
return
|
| 90 |
+
|
| 91 |
+
self.connections: Dict[str, SocketConnection] = {}
|
| 92 |
+
self.lock = threading.Lock()
|
| 93 |
+
self.cleanup_thread = threading.Thread(target=self._cleanup_loop)
|
| 94 |
+
self.cleanup_thread.daemon = True
|
| 95 |
+
self.running = True
|
| 96 |
+
self.cleanup_thread.start()
|
| 97 |
+
|
| 98 |
+
# SSL context for HTTPS
|
| 99 |
+
self.ssl_context = ssl.create_default_context()
|
| 100 |
+
self.ssl_context.check_hostname = False
|
| 101 |
+
self.ssl_context.verify_mode = ssl.CERT_NONE
|
| 102 |
+
|
| 103 |
+
# Async event loop
|
| 104 |
+
self.loop = asyncio.new_event_loop()
|
| 105 |
+
self.async_thread = threading.Thread(target=self._run_async_loop)
|
| 106 |
+
self.async_thread.daemon = True
|
| 107 |
+
self.async_thread.start()
|
| 108 |
+
|
| 109 |
+
self._initialized = True
|
| 110 |
+
|
| 111 |
+
def create_connection(self, connection_type: ConnectionType,
|
| 112 |
+
remote_host: str, remote_port: int,
|
| 113 |
+
virtual_connection: Optional[TCPConnection] = None) -> str:
|
| 114 |
+
"""Create a new connection"""
|
| 115 |
+
connection = SocketConnection(
|
| 116 |
+
connection_id=str(time.time_ns()),
|
| 117 |
+
connection_type=connection_type,
|
| 118 |
+
virtual_connection=virtual_connection,
|
| 119 |
+
host_socket=None,
|
| 120 |
+
remote_host=remote_host,
|
| 121 |
+
remote_port=remote_port,
|
| 122 |
+
created_time=time.time(),
|
| 123 |
+
last_activity=time.time()
|
| 124 |
+
)
|
| 125 |
+
|
| 126 |
+
with self.lock:
|
| 127 |
+
self.connections[connection.connection_id] = connection
|
| 128 |
+
|
| 129 |
+
return connection.connection_id
|
| 130 |
+
|
| 131 |
+
async def connect(self, connection_id: str) -> bool:
|
| 132 |
+
"""Establish connection"""
|
| 133 |
+
connection = self.connections.get(connection_id)
|
| 134 |
+
if not connection:
|
| 135 |
+
return False
|
| 136 |
+
|
| 137 |
+
try:
|
| 138 |
+
if connection.connection_type in (ConnectionType.TCP_SOCKET, ConnectionType.UDP_SOCKET):
|
| 139 |
+
# Create socket
|
| 140 |
+
sock_type = socket.SOCK_STREAM if connection.connection_type == ConnectionType.TCP_SOCKET else socket.SOCK_DGRAM
|
| 141 |
+
sock = socket.socket(socket.AF_INET, sock_type)
|
| 142 |
+
sock.setblocking(False)
|
| 143 |
+
|
| 144 |
+
# Connect
|
| 145 |
+
try:
|
| 146 |
+
sock.connect((connection.remote_host, connection.remote_port))
|
| 147 |
+
except BlockingIOError:
|
| 148 |
+
pass # Expected for non-blocking socket
|
| 149 |
+
|
| 150 |
+
connection.host_socket = sock
|
| 151 |
+
connection.is_connected = True
|
| 152 |
+
|
| 153 |
+
# Start monitoring
|
| 154 |
+
asyncio.create_task(self._monitor_socket(connection))
|
| 155 |
+
|
| 156 |
+
elif connection.connection_type in (ConnectionType.HTTP_CLIENT, ConnectionType.HTTPS_CLIENT):
|
| 157 |
+
# HTTP(S) connection will be made per request
|
| 158 |
+
connection.is_connected = True
|
| 159 |
+
|
| 160 |
+
connection.update_activity()
|
| 161 |
+
return True
|
| 162 |
+
|
| 163 |
+
except Exception as e:
|
| 164 |
+
print(f"Connection error: {e}")
|
| 165 |
+
connection.error_count += 1
|
| 166 |
+
return False
|
| 167 |
+
|
| 168 |
+
async def send_data(self, connection_id: str, data: bytes) -> bool:
|
| 169 |
+
"""Send data through connection"""
|
| 170 |
+
connection = self.connections.get(connection_id)
|
| 171 |
+
if not connection or not connection.is_connected:
|
| 172 |
+
return False
|
| 173 |
+
|
| 174 |
+
try:
|
| 175 |
+
if connection.connection_type in (ConnectionType.TCP_SOCKET, ConnectionType.UDP_SOCKET):
|
| 176 |
+
await self.loop.sock_sendall(connection.host_socket, data)
|
| 177 |
+
connection.update_activity(len(data), 'sent')
|
| 178 |
+
|
| 179 |
+
elif connection.connection_type in (ConnectionType.HTTP_CLIENT, ConnectionType.HTTPS_CLIENT):
|
| 180 |
+
# Parse HTTP request
|
| 181 |
+
request = self._parse_http_request(data)
|
| 182 |
+
if not request:
|
| 183 |
+
return False
|
| 184 |
+
|
| 185 |
+
# Make HTTP request
|
| 186 |
+
async with aiohttp.ClientSession() as session:
|
| 187 |
+
url = f"{'https' if connection.connection_type == ConnectionType.HTTPS_CLIENT else 'http'}://{connection.remote_host}:{connection.remote_port}{request['path']}"
|
| 188 |
+
|
| 189 |
+
async with session.request(
|
| 190 |
+
method=request['method'],
|
| 191 |
+
url=url,
|
| 192 |
+
headers=request['headers'],
|
| 193 |
+
data=request.get('body', b''),
|
| 194 |
+
ssl=self.ssl_context if connection.connection_type == ConnectionType.HTTPS_CLIENT else None
|
| 195 |
+
) as response:
|
| 196 |
+
# Forward response to virtual connection
|
| 197 |
+
response_data = await response.read()
|
| 198 |
+
if connection.virtual_connection:
|
| 199 |
+
connection.virtual_connection.send_data(response_data)
|
| 200 |
+
|
| 201 |
+
connection.update_activity(len(data), 'sent')
|
| 202 |
+
connection.update_activity(len(response_data), 'received')
|
| 203 |
+
|
| 204 |
+
return True
|
| 205 |
+
|
| 206 |
+
except Exception as e:
|
| 207 |
+
print(f"Send error: {e}")
|
| 208 |
+
connection.error_count += 1
|
| 209 |
+
return False
|
| 210 |
+
|
| 211 |
+
def close_connection(self, connection_id: str):
|
| 212 |
+
"""Close a connection"""
|
| 213 |
+
connection = self.connections.get(connection_id)
|
| 214 |
+
if connection:
|
| 215 |
+
if connection.host_socket:
|
| 216 |
+
try:
|
| 217 |
+
connection.host_socket.close()
|
| 218 |
+
except:
|
| 219 |
+
pass
|
| 220 |
+
connection.is_connected = False
|
| 221 |
+
connection.update_activity()
|
| 222 |
+
|
| 223 |
+
def get_connection(self, connection_id: str) -> Optional[SocketConnection]:
|
| 224 |
+
"""Get connection by ID"""
|
| 225 |
+
return self.connections.get(connection_id)
|
| 226 |
+
|
| 227 |
+
def _run_async_loop(self):
|
| 228 |
+
"""Run async event loop"""
|
| 229 |
+
asyncio.set_event_loop(self.loop)
|
| 230 |
+
self.loop.run_forever()
|
| 231 |
+
|
| 232 |
+
async def _monitor_socket(self, connection: SocketConnection):
|
| 233 |
+
"""Monitor socket for incoming data"""
|
| 234 |
+
while connection.is_connected:
|
| 235 |
+
try:
|
| 236 |
+
data = await self.loop.sock_recv(connection.host_socket, 8192)
|
| 237 |
+
if not data:
|
| 238 |
+
break
|
| 239 |
+
|
| 240 |
+
connection.update_activity(len(data), 'received')
|
| 241 |
+
|
| 242 |
+
# Forward data to virtual connection
|
| 243 |
+
if connection.virtual_connection:
|
| 244 |
+
connection.virtual_connection.send_data(data)
|
| 245 |
+
|
| 246 |
+
except Exception as e:
|
| 247 |
+
print(f"Monitor error: {e}")
|
| 248 |
+
connection.error_count += 1
|
| 249 |
+
break
|
| 250 |
+
|
| 251 |
+
connection.is_connected = False
|
| 252 |
+
|
| 253 |
+
def _cleanup_loop(self):
|
| 254 |
+
"""Background cleanup loop"""
|
| 255 |
+
while self.running:
|
| 256 |
+
time.sleep(60) # Run every minute
|
| 257 |
+
try:
|
| 258 |
+
self._cleanup_connections()
|
| 259 |
+
except Exception as e:
|
| 260 |
+
print(f"Cleanup error: {e}")
|
| 261 |
+
|
| 262 |
+
def _cleanup_connections(self):
|
| 263 |
+
"""Clean up inactive connections"""
|
| 264 |
+
current_time = time.time()
|
| 265 |
+
to_remove = []
|
| 266 |
+
|
| 267 |
+
with self.lock:
|
| 268 |
+
for connection_id, connection in self.connections.items():
|
| 269 |
+
# Remove if:
|
| 270 |
+
# 1. Not connected and inactive for 5 minutes
|
| 271 |
+
# 2. Connected but inactive for 30 minutes
|
| 272 |
+
# 3. Too many errors
|
| 273 |
+
if ((not connection.is_connected and current_time - connection.last_activity > 300) or
|
| 274 |
+
(connection.is_connected and current_time - connection.last_activity > 1800) or
|
| 275 |
+
connection.error_count > 5):
|
| 276 |
+
self.close_connection(connection_id)
|
| 277 |
+
to_remove.append(connection_id)
|
| 278 |
+
|
| 279 |
+
for connection_id in to_remove:
|
| 280 |
+
del self.connections[connection_id]
|
| 281 |
+
|
| 282 |
+
def _parse_http_request(self, data: bytes) -> Optional[Dict]:
|
| 283 |
+
"""Parse HTTP request from raw data"""
|
| 284 |
+
try:
|
| 285 |
+
# Split into lines
|
| 286 |
+
lines = data.decode('utf-8', errors='ignore').split('\r\n')
|
| 287 |
+
if not lines:
|
| 288 |
+
return None
|
| 289 |
+
|
| 290 |
+
# Parse request line
|
| 291 |
+
request_line = lines[0].split(' ')
|
| 292 |
+
if len(request_line) < 3:
|
| 293 |
+
return None
|
| 294 |
+
|
| 295 |
+
method, path, version = request_line[0], request_line[1], request_line[2]
|
| 296 |
+
|
| 297 |
+
# Parse headers
|
| 298 |
+
headers = {}
|
| 299 |
+
i = 1
|
| 300 |
+
while i < len(lines):
|
| 301 |
+
line = lines[i].strip()
|
| 302 |
+
if not line:
|
| 303 |
+
break
|
| 304 |
+
if ':' in line:
|
| 305 |
+
key, value = line.split(':', 1)
|
| 306 |
+
headers[key.strip()] = value.strip()
|
| 307 |
+
i += 1
|
| 308 |
+
|
| 309 |
+
# Get body
|
| 310 |
+
body = '\r\n'.join(lines[i+1:]).encode('utf-8') if i+1 < len(lines) else b''
|
| 311 |
+
|
| 312 |
+
return {
|
| 313 |
+
'method': method,
|
| 314 |
+
'path': path,
|
| 315 |
+
'version': version,
|
| 316 |
+
'headers': headers,
|
| 317 |
+
'body': body
|
| 318 |
+
}
|
| 319 |
+
|
| 320 |
+
except Exception as e:
|
| 321 |
+
print(f"HTTP parse error: {e}")
|
| 322 |
+
return None
|
| 323 |
+
|
| 324 |
+
def shutdown(self):
|
| 325 |
+
"""Shutdown the translator"""
|
| 326 |
+
self.running = False
|
| 327 |
+
if self.cleanup_thread.is_alive():
|
| 328 |
+
self.cleanup_thread.join()
|
| 329 |
+
|
| 330 |
+
# Close all connections
|
| 331 |
+
with self.lock:
|
| 332 |
+
for connection_id in list(self.connections.keys()):
|
| 333 |
+
self.close_connection(connection_id)
|
| 334 |
+
self.connections.clear()
|
| 335 |
+
|
| 336 |
+
# Stop async loop
|
| 337 |
+
self.loop.stop()
|
| 338 |
+
if self.async_thread.is_alive():
|
| 339 |
+
self.async_thread.join()
|
core/tcp_engine.py
ADDED
|
@@ -0,0 +1,356 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
TCP Engine Module
|
| 3 |
+
|
| 4 |
+
Implements a complete TCP state machine in user-space:
|
| 5 |
+
- Full TCP state machine (SYN, SYN-ACK, ESTABLISHED, FIN, RST)
|
| 6 |
+
- Sequence and acknowledgment number tracking
|
| 7 |
+
- Sliding window implementation
|
| 8 |
+
- Retransmission and timeout handling
|
| 9 |
+
- Congestion control
|
| 10 |
+
"""
|
| 11 |
+
|
| 12 |
+
import time
|
| 13 |
+
import threading
|
| 14 |
+
import random
|
| 15 |
+
from typing import Dict, List, Optional, Tuple, Callable
|
| 16 |
+
from dataclasses import dataclass, field
|
| 17 |
+
from enum import Enum
|
| 18 |
+
from collections import deque
|
| 19 |
+
|
| 20 |
+
from .ip_parser import TCPHeader, IPv4Header, IPParser
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
class TCPState(Enum):
|
| 24 |
+
CLOSED = "CLOSED"
|
| 25 |
+
LISTEN = "LISTEN"
|
| 26 |
+
SYN_SENT = "SYN_SENT"
|
| 27 |
+
SYN_RECEIVED = "SYN_RECEIVED"
|
| 28 |
+
ESTABLISHED = "ESTABLISHED"
|
| 29 |
+
FIN_WAIT_1 = "FIN_WAIT_1"
|
| 30 |
+
FIN_WAIT_2 = "FIN_WAIT_2"
|
| 31 |
+
CLOSE_WAIT = "CLOSE_WAIT"
|
| 32 |
+
CLOSING = "CLOSING"
|
| 33 |
+
LAST_ACK = "LAST_ACK"
|
| 34 |
+
TIME_WAIT = "TIME_WAIT"
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
@dataclass
|
| 38 |
+
class TCPSegment:
|
| 39 |
+
"""Represents a TCP segment"""
|
| 40 |
+
seq_num: int
|
| 41 |
+
ack_num: int
|
| 42 |
+
flags: int
|
| 43 |
+
window: int
|
| 44 |
+
data: bytes
|
| 45 |
+
timestamp: float = field(default_factory=time.time)
|
| 46 |
+
retransmit_count: int = 0
|
| 47 |
+
|
| 48 |
+
@property
|
| 49 |
+
def data_length(self) -> int:
|
| 50 |
+
"""Get data length"""
|
| 51 |
+
return len(self.data)
|
| 52 |
+
|
| 53 |
+
@property
|
| 54 |
+
def seq_end(self) -> int:
|
| 55 |
+
"""Get sequence number after this segment"""
|
| 56 |
+
length = self.data_length
|
| 57 |
+
# SYN and FIN consume one sequence number
|
| 58 |
+
if self.flags & 0x02: # SYN
|
| 59 |
+
length += 1
|
| 60 |
+
if self.flags & 0x01: # FIN
|
| 61 |
+
length += 1
|
| 62 |
+
return self.seq_num + length
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
@dataclass
|
| 66 |
+
class TCPConnection:
|
| 67 |
+
"""Represents a TCP connection state"""
|
| 68 |
+
# Connection identification
|
| 69 |
+
local_ip: str
|
| 70 |
+
local_port: int
|
| 71 |
+
remote_ip: str
|
| 72 |
+
remote_port: int
|
| 73 |
+
|
| 74 |
+
# State
|
| 75 |
+
state: TCPState = TCPState.CLOSED
|
| 76 |
+
|
| 77 |
+
# Sequence numbers
|
| 78 |
+
local_seq: int = field(default_factory=lambda: random.randint(0, 0xFFFFFFFF))
|
| 79 |
+
local_ack: int = 0
|
| 80 |
+
remote_seq: int = 0
|
| 81 |
+
remote_ack: int = 0
|
| 82 |
+
initial_seq: int = 0
|
| 83 |
+
|
| 84 |
+
# Window management
|
| 85 |
+
local_window: int = 65535
|
| 86 |
+
remote_window: int = 65535
|
| 87 |
+
window_scale: int = 0
|
| 88 |
+
|
| 89 |
+
# Buffers
|
| 90 |
+
send_buffer: deque = field(default_factory=deque)
|
| 91 |
+
recv_buffer: deque = field(default_factory=deque)
|
| 92 |
+
out_of_order_buffer: Dict[int, bytes] = field(default_factory=dict)
|
| 93 |
+
|
| 94 |
+
# Retransmission
|
| 95 |
+
unacked_segments: Dict[int, TCPSegment] = field(default_factory=dict)
|
| 96 |
+
retransmit_timer: Optional[float] = None
|
| 97 |
+
rto: float = 1.0 # Retransmission timeout
|
| 98 |
+
srtt: float = 0.0 # Smoothed round-trip time
|
| 99 |
+
rttvar: float = 0.0 # Round-trip time variation
|
| 100 |
+
|
| 101 |
+
# Congestion control
|
| 102 |
+
cwnd: int = 1 # Congestion window (in MSS)
|
| 103 |
+
ssthresh: int = 65535 # Slow start threshold
|
| 104 |
+
dupacks: int = 0 # Duplicate ACK count
|
| 105 |
+
mss: int = 1460 # Maximum segment size
|
| 106 |
+
|
| 107 |
+
# Callbacks
|
| 108 |
+
on_data_received: Optional[Callable[[bytes], None]] = None
|
| 109 |
+
on_state_change: Optional[Callable[[TCPState], None]] = None
|
| 110 |
+
|
| 111 |
+
def __post_init__(self):
|
| 112 |
+
self.initial_seq = self.local_seq
|
| 113 |
+
|
| 114 |
+
def handle_packet(self, packet: bytes):
|
| 115 |
+
"""Process incoming TCP packet"""
|
| 116 |
+
try:
|
| 117 |
+
# Parse headers
|
| 118 |
+
ip_header, payload = IPParser.parse_ipv4_header(packet)
|
| 119 |
+
tcp_header, data = IPParser.parse_tcp_header(payload)
|
| 120 |
+
|
| 121 |
+
# Process based on current state
|
| 122 |
+
if self.state == TCPState.LISTEN:
|
| 123 |
+
self._handle_listen(tcp_header, data)
|
| 124 |
+
elif self.state == TCPState.SYN_SENT:
|
| 125 |
+
self._handle_syn_sent(tcp_header, data)
|
| 126 |
+
elif self.state == TCPState.SYN_RECEIVED:
|
| 127 |
+
self._handle_syn_received(tcp_header, data)
|
| 128 |
+
elif self.state == TCPState.ESTABLISHED:
|
| 129 |
+
self._handle_established(tcp_header, data)
|
| 130 |
+
elif self.state in (TCPState.FIN_WAIT_1, TCPState.FIN_WAIT_2):
|
| 131 |
+
self._handle_fin_wait(tcp_header, data)
|
| 132 |
+
elif self.state == TCPState.CLOSE_WAIT:
|
| 133 |
+
self._handle_close_wait(tcp_header, data)
|
| 134 |
+
elif self.state == TCPState.LAST_ACK:
|
| 135 |
+
self._handle_last_ack(tcp_header, data)
|
| 136 |
+
|
| 137 |
+
# Update RTT if this is an ACK for a sent packet
|
| 138 |
+
if tcp_header.ack and tcp_header.ack_num > self.local_seq:
|
| 139 |
+
self._update_rtt(tcp_header.ack_num)
|
| 140 |
+
|
| 141 |
+
# Handle retransmission timer
|
| 142 |
+
self._manage_retransmission_timer()
|
| 143 |
+
|
| 144 |
+
except Exception as e:
|
| 145 |
+
print(f"Error handling packet: {e}")
|
| 146 |
+
|
| 147 |
+
def send_data(self, data: bytes):
|
| 148 |
+
"""Send data over the connection"""
|
| 149 |
+
if self.state != TCPState.ESTABLISHED:
|
| 150 |
+
return False
|
| 151 |
+
|
| 152 |
+
# Add to send buffer
|
| 153 |
+
self.send_buffer.extend(data)
|
| 154 |
+
|
| 155 |
+
# Try to send what we can
|
| 156 |
+
self._send_from_buffer()
|
| 157 |
+
|
| 158 |
+
return True
|
| 159 |
+
|
| 160 |
+
def close(self):
|
| 161 |
+
"""Initiate connection close"""
|
| 162 |
+
if self.state == TCPState.ESTABLISHED:
|
| 163 |
+
self._send_fin()
|
| 164 |
+
self._set_state(TCPState.FIN_WAIT_1)
|
| 165 |
+
elif self.state == TCPState.CLOSE_WAIT:
|
| 166 |
+
self._send_fin()
|
| 167 |
+
self._set_state(TCPState.LAST_ACK)
|
| 168 |
+
|
| 169 |
+
def _set_state(self, new_state: TCPState):
|
| 170 |
+
"""Change connection state"""
|
| 171 |
+
if new_state != self.state:
|
| 172 |
+
self.state = new_state
|
| 173 |
+
if self.on_state_change:
|
| 174 |
+
self.on_state_change(new_state)
|
| 175 |
+
|
| 176 |
+
def _send_packet(self, flags: int, data: bytes = b''):
|
| 177 |
+
"""Send TCP packet"""
|
| 178 |
+
segment = TCPSegment(
|
| 179 |
+
seq_num=self.local_seq,
|
| 180 |
+
ack_num=self.local_ack,
|
| 181 |
+
flags=flags,
|
| 182 |
+
window=self.local_window,
|
| 183 |
+
data=data
|
| 184 |
+
)
|
| 185 |
+
|
| 186 |
+
# Add to unacked segments if not pure ACK
|
| 187 |
+
if data or flags != 0x10: # Not pure ACK
|
| 188 |
+
self.unacked_segments[self.local_seq] = segment
|
| 189 |
+
|
| 190 |
+
# Update sequence number
|
| 191 |
+
self.local_seq = (self.local_seq + len(data)) % 0x100000000
|
| 192 |
+
if flags & 0x02: # SYN
|
| 193 |
+
self.local_seq = (self.local_seq + 1) % 0x100000000
|
| 194 |
+
if flags & 0x01: # FIN
|
| 195 |
+
self.local_seq = (self.local_seq + 1) % 0x100000000
|
| 196 |
+
|
| 197 |
+
# TODO: Actually send the packet
|
| 198 |
+
|
| 199 |
+
def _handle_listen(self, header: TCPHeader, data: bytes):
|
| 200 |
+
"""Handle LISTEN state"""
|
| 201 |
+
if header.syn:
|
| 202 |
+
self.remote_seq = header.seq_num
|
| 203 |
+
self.local_ack = (header.seq_num + 1) % 0x100000000
|
| 204 |
+
self._send_packet(0x12) # SYN-ACK
|
| 205 |
+
self._set_state(TCPState.SYN_RECEIVED)
|
| 206 |
+
|
| 207 |
+
def _handle_syn_sent(self, header: TCPHeader, data: bytes):
|
| 208 |
+
"""Handle SYN_SENT state"""
|
| 209 |
+
if header.syn and header.ack:
|
| 210 |
+
if header.ack_num == (self.initial_seq + 1) % 0x100000000:
|
| 211 |
+
self.remote_seq = header.seq_num
|
| 212 |
+
self.local_ack = (header.seq_num + 1) % 0x100000000
|
| 213 |
+
self._send_packet(0x10) # ACK
|
| 214 |
+
self._set_state(TCPState.ESTABLISHED)
|
| 215 |
+
|
| 216 |
+
def _handle_established(self, header: TCPHeader, data: bytes):
|
| 217 |
+
"""Handle ESTABLISHED state"""
|
| 218 |
+
if data:
|
| 219 |
+
if header.seq_num == self.local_ack:
|
| 220 |
+
# In-order segment
|
| 221 |
+
if self.on_data_received:
|
| 222 |
+
self.on_data_received(data)
|
| 223 |
+
self.local_ack = (self.local_ack + len(data)) % 0x100000000
|
| 224 |
+
self._send_packet(0x10) # ACK
|
| 225 |
+
elif header.seq_num > self.local_ack:
|
| 226 |
+
# Out-of-order segment
|
| 227 |
+
self.out_of_order_buffer[header.seq_num] = data
|
| 228 |
+
self._send_packet(0x10) # ACK
|
| 229 |
+
else:
|
| 230 |
+
# Duplicate segment
|
| 231 |
+
self._send_packet(0x10) # ACK
|
| 232 |
+
|
| 233 |
+
if header.ack:
|
| 234 |
+
# Process acknowledgments
|
| 235 |
+
self._handle_ack(header.ack_num)
|
| 236 |
+
|
| 237 |
+
if header.fin:
|
| 238 |
+
self.local_ack = (self.local_ack + 1) % 0x100000000
|
| 239 |
+
self._send_packet(0x10) # ACK
|
| 240 |
+
self._set_state(TCPState.CLOSE_WAIT)
|
| 241 |
+
|
| 242 |
+
def _handle_ack(self, ack_num: int):
|
| 243 |
+
"""Handle incoming acknowledgment"""
|
| 244 |
+
# Remove acknowledged segments
|
| 245 |
+
acknowledged = [seq for seq in self.unacked_segments.keys()
|
| 246 |
+
if seq < ack_num]
|
| 247 |
+
for seq in acknowledged:
|
| 248 |
+
del self.unacked_segments[seq]
|
| 249 |
+
|
| 250 |
+
# Update congestion window
|
| 251 |
+
if self.cwnd < self.ssthresh:
|
| 252 |
+
# Slow start
|
| 253 |
+
self.cwnd += 1
|
| 254 |
+
else:
|
| 255 |
+
# Congestion avoidance
|
| 256 |
+
self.cwnd += 1 / self.cwnd
|
| 257 |
+
|
| 258 |
+
# Try to send more data
|
| 259 |
+
self._send_from_buffer()
|
| 260 |
+
|
| 261 |
+
def _send_from_buffer(self):
|
| 262 |
+
"""Send data from send buffer"""
|
| 263 |
+
while self.send_buffer:
|
| 264 |
+
# Calculate how much we can send
|
| 265 |
+
window = min(self.remote_window, self.cwnd * self.mss)
|
| 266 |
+
if not window:
|
| 267 |
+
break
|
| 268 |
+
|
| 269 |
+
# Get data to send
|
| 270 |
+
data = bytes(list(self.send_buffer)[:window])
|
| 271 |
+
if not data:
|
| 272 |
+
break
|
| 273 |
+
|
| 274 |
+
# Remove from buffer and send
|
| 275 |
+
for _ in range(len(data)):
|
| 276 |
+
self.send_buffer.popleft()
|
| 277 |
+
self._send_packet(0x18, data) # PSH-ACK
|
| 278 |
+
|
| 279 |
+
def _update_rtt(self, ack_num: int):
|
| 280 |
+
"""Update RTT estimation"""
|
| 281 |
+
for seq, segment in self.unacked_segments.items():
|
| 282 |
+
if seq == ack_num - 1:
|
| 283 |
+
rtt = time.time() - segment.timestamp
|
| 284 |
+
if self.srtt == 0:
|
| 285 |
+
self.srtt = rtt
|
| 286 |
+
self.rttvar = rtt / 2
|
| 287 |
+
else:
|
| 288 |
+
self.rttvar = (0.75 * self.rttvar +
|
| 289 |
+
0.25 * abs(self.srtt - rtt))
|
| 290 |
+
self.srtt = 0.875 * self.srtt + 0.125 * rtt
|
| 291 |
+
self.rto = self.srtt + max(4 * self.rttvar, 0.5)
|
| 292 |
+
break
|
| 293 |
+
|
| 294 |
+
def _manage_retransmission_timer(self):
|
| 295 |
+
"""Manage retransmission timer"""
|
| 296 |
+
if not self.unacked_segments:
|
| 297 |
+
self.retransmit_timer = None
|
| 298 |
+
return
|
| 299 |
+
|
| 300 |
+
current_time = time.time()
|
| 301 |
+
if self.retransmit_timer is None:
|
| 302 |
+
self.retransmit_timer = current_time + self.rto
|
| 303 |
+
elif current_time >= self.retransmit_timer:
|
| 304 |
+
# Timeout occurred
|
| 305 |
+
self._handle_timeout()
|
| 306 |
+
|
| 307 |
+
def _handle_timeout(self):
|
| 308 |
+
"""Handle retransmission timeout"""
|
| 309 |
+
# Exponential backoff
|
| 310 |
+
self.rto *= 2
|
| 311 |
+
|
| 312 |
+
# Reset congestion window
|
| 313 |
+
self.ssthresh = max(2, self.cwnd // 2)
|
| 314 |
+
self.cwnd = 1
|
| 315 |
+
|
| 316 |
+
# Retransmit oldest unacked segment
|
| 317 |
+
if self.unacked_segments:
|
| 318 |
+
oldest_seq = min(self.unacked_segments.keys())
|
| 319 |
+
segment = self.unacked_segments[oldest_seq]
|
| 320 |
+
if segment.retransmit_count < 5:
|
| 321 |
+
segment.retransmit_count += 1
|
| 322 |
+
self._send_packet(segment.flags, segment.data)
|
| 323 |
+
else:
|
| 324 |
+
# Too many retransmissions, close connection
|
| 325 |
+
self._set_state(TCPState.CLOSED)
|
| 326 |
+
|
| 327 |
+
# Reset timer
|
| 328 |
+
self.retransmit_timer = time.time() + self.rto
|
| 329 |
+
|
| 330 |
+
def _send_fin(self):
|
| 331 |
+
"""Send FIN packet"""
|
| 332 |
+
self._send_packet(0x11) # FIN-ACK
|
| 333 |
+
|
| 334 |
+
def _handle_fin_wait(self, header: TCPHeader, data: bytes):
|
| 335 |
+
"""Handle FIN_WAIT states"""
|
| 336 |
+
if self.state == TCPState.FIN_WAIT_1:
|
| 337 |
+
if header.ack and header.ack_num == self.local_seq:
|
| 338 |
+
self._set_state(TCPState.FIN_WAIT_2)
|
| 339 |
+
|
| 340 |
+
if header.fin:
|
| 341 |
+
self.local_ack = (header.seq_num + 1) % 0x100000000
|
| 342 |
+
self._send_packet(0x10) # ACK
|
| 343 |
+
if self.state == TCPState.FIN_WAIT_1:
|
| 344 |
+
self._set_state(TCPState.CLOSING)
|
| 345 |
+
else: # FIN_WAIT_2
|
| 346 |
+
self._set_state(TCPState.TIME_WAIT)
|
| 347 |
+
|
| 348 |
+
def _handle_close_wait(self, header: TCPHeader, data: bytes):
|
| 349 |
+
"""Handle CLOSE_WAIT state"""
|
| 350 |
+
if header.ack:
|
| 351 |
+
self._handle_ack(header.ack_num)
|
| 352 |
+
|
| 353 |
+
def _handle_last_ack(self, header: TCPHeader, data: bytes):
|
| 354 |
+
"""Handle LAST_ACK state"""
|
| 355 |
+
if header.ack and header.ack_num == self.local_seq:
|
| 356 |
+
self._set_state(TCPState.CLOSED)
|
core/tcp_forward.py
ADDED
|
@@ -0,0 +1,159 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
TCP Forwarding Engine Module for Outline VPN
|
| 3 |
+
|
| 4 |
+
Handles TCP traffic forwarding and connection tracking with Shadowsocks protocol support
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
import asyncio
|
| 8 |
+
import logging
|
| 9 |
+
import socket
|
| 10 |
+
from typing import Dict, Set, Optional, Tuple
|
| 11 |
+
from dataclasses import dataclass
|
| 12 |
+
from datetime import datetime
|
| 13 |
+
from .shadowsocks_protocol import ShadowsocksProtocol
|
| 14 |
+
|
| 15 |
+
logger = logging.getLogger(__name__)
|
| 16 |
+
|
| 17 |
+
@dataclass
|
| 18 |
+
class OutlineTCPConnection:
|
| 19 |
+
client_addr: Tuple[str, int]
|
| 20 |
+
target_addr: Tuple[str, int]
|
| 21 |
+
client_writer: asyncio.StreamWriter
|
| 22 |
+
target_writer: asyncio.StreamWriter
|
| 23 |
+
shadowsocks: ShadowsocksProtocol
|
| 24 |
+
bytes_in: int = 0
|
| 25 |
+
bytes_out: int = 0
|
| 26 |
+
created_at: datetime = datetime.now()
|
| 27 |
+
last_activity: datetime = datetime.now()
|
| 28 |
+
|
| 29 |
+
class OutlineTCPForwardingEngine:
|
| 30 |
+
def __init__(self, access_key: str):
|
| 31 |
+
self.connections: Dict[str, OutlineTCPConnection] = {}
|
| 32 |
+
self.active_ports: Set[int] = set()
|
| 33 |
+
self._lock = asyncio.Lock()
|
| 34 |
+
self.buffer_size = 8192
|
| 35 |
+
self.shadowsocks = ShadowsocksProtocol(access_key)
|
| 36 |
+
|
| 37 |
+
async def create_connection(self,
|
| 38 |
+
client_reader: asyncio.StreamReader,
|
| 39 |
+
client_writer: asyncio.StreamWriter,
|
| 40 |
+
target_host: str,
|
| 41 |
+
target_port: int) -> Optional[OutlineTCPConnection]:
|
| 42 |
+
"""Create a new TCP connection with Shadowsocks encryption"""
|
| 43 |
+
try:
|
| 44 |
+
# Get client information
|
| 45 |
+
client_addr = client_writer.get_extra_info('peername')
|
| 46 |
+
|
| 47 |
+
# Connect to target
|
| 48 |
+
target_reader, target_writer = await asyncio.open_connection(
|
| 49 |
+
target_host, target_port
|
| 50 |
+
)
|
| 51 |
+
|
| 52 |
+
# Create connection object
|
| 53 |
+
conn = OutlineTCPConnection(
|
| 54 |
+
client_addr=client_addr,
|
| 55 |
+
target_addr=(target_host, target_port),
|
| 56 |
+
client_writer=client_writer,
|
| 57 |
+
target_writer=target_writer,
|
| 58 |
+
shadowsocks=self.shadowsocks
|
| 59 |
+
)
|
| 60 |
+
|
| 61 |
+
# Store connection
|
| 62 |
+
conn_id = f"{client_addr[0]}:{client_addr[1]}-{target_host}:{target_port}"
|
| 63 |
+
async with self._lock:
|
| 64 |
+
self.connections[conn_id] = conn
|
| 65 |
+
|
| 66 |
+
# Start bidirectional forwarding with encryption
|
| 67 |
+
asyncio.create_task(self._forward_stream(
|
| 68 |
+
client_reader, target_writer, conn, 'in'))
|
| 69 |
+
asyncio.create_task(self._forward_stream(
|
| 70 |
+
target_reader, client_writer, conn, 'out'))
|
| 71 |
+
|
| 72 |
+
logger.info(f"Created Outline TCP connection: {conn_id}")
|
| 73 |
+
return conn
|
| 74 |
+
|
| 75 |
+
except Exception as e:
|
| 76 |
+
logger.error(f"Error creating Outline TCP connection: {e}")
|
| 77 |
+
if client_writer:
|
| 78 |
+
client_writer.close()
|
| 79 |
+
await client_writer.wait_closed()
|
| 80 |
+
return None
|
| 81 |
+
|
| 82 |
+
async def _forward_stream(self,
|
| 83 |
+
reader: asyncio.StreamReader,
|
| 84 |
+
writer: asyncio.StreamWriter,
|
| 85 |
+
conn: OutlineTCPConnection,
|
| 86 |
+
direction: str):
|
| 87 |
+
"""Forward data with Shadowsocks encryption/decryption"""
|
| 88 |
+
try:
|
| 89 |
+
while True:
|
| 90 |
+
data = await reader.read(self.buffer_size)
|
| 91 |
+
if not data:
|
| 92 |
+
break
|
| 93 |
+
|
| 94 |
+
# Handle encryption/decryption
|
| 95 |
+
if direction == 'in':
|
| 96 |
+
# Decrypt incoming data from client
|
| 97 |
+
data = conn.shadowsocks._decrypt_packet(data)
|
| 98 |
+
conn.bytes_in += len(data)
|
| 99 |
+
else:
|
| 100 |
+
# Encrypt outgoing data to client
|
| 101 |
+
data = conn.shadowsocks._encrypt_packet(data)
|
| 102 |
+
conn.bytes_out += len(data)
|
| 103 |
+
|
| 104 |
+
writer.write(data)
|
| 105 |
+
await writer.drain()
|
| 106 |
+
conn.last_activity = datetime.now()
|
| 107 |
+
|
| 108 |
+
except Exception as e:
|
| 109 |
+
logger.error(f"Error forwarding Outline data: {e}")
|
| 110 |
+
|
| 111 |
+
finally:
|
| 112 |
+
writer.close()
|
| 113 |
+
await writer.wait_closed()
|
| 114 |
+
# Clean up connection
|
| 115 |
+
conn_id = (f"{conn.client_addr[0]}:{conn.client_addr[1]}-"
|
| 116 |
+
f"{conn.target_addr[0]}:{conn.target_addr[1]}")
|
| 117 |
+
async with self._lock:
|
| 118 |
+
if conn_id in self.connections:
|
| 119 |
+
del self.connections[conn_id]
|
| 120 |
+
|
| 121 |
+
async def cleanup_inactive(self, timeout: int = 300):
|
| 122 |
+
"""Clean up inactive connections"""
|
| 123 |
+
while True:
|
| 124 |
+
try:
|
| 125 |
+
now = datetime.now()
|
| 126 |
+
to_remove = []
|
| 127 |
+
|
| 128 |
+
async with self._lock:
|
| 129 |
+
for conn_id, conn in self.connections.items():
|
| 130 |
+
if (now - conn.last_activity).seconds > timeout:
|
| 131 |
+
to_remove.append(conn_id)
|
| 132 |
+
|
| 133 |
+
for conn_id in to_remove:
|
| 134 |
+
if conn_id in self.connections:
|
| 135 |
+
conn = self.connections[conn_id]
|
| 136 |
+
conn.client_writer.close()
|
| 137 |
+
conn.target_writer.close()
|
| 138 |
+
del self.connections[conn_id]
|
| 139 |
+
logger.info(f"Cleaned up inactive connection: {conn_id}")
|
| 140 |
+
|
| 141 |
+
await asyncio.sleep(60) # Check every minute
|
| 142 |
+
|
| 143 |
+
except Exception as e:
|
| 144 |
+
logger.error(f"Error in connection cleanup: {e}")
|
| 145 |
+
await asyncio.sleep(60) # Retry after error
|
| 146 |
+
|
| 147 |
+
def get_connection_stats(self) -> Dict[str, Dict]:
|
| 148 |
+
"""Get statistics for all active connections"""
|
| 149 |
+
stats = {}
|
| 150 |
+
for conn_id, conn in self.connections.items():
|
| 151 |
+
stats[conn_id] = {
|
| 152 |
+
'bytes_in': conn.bytes_in,
|
| 153 |
+
'bytes_out': conn.bytes_out,
|
| 154 |
+
'created_at': conn.created_at.isoformat(),
|
| 155 |
+
'last_activity': conn.last_activity.isoformat(),
|
| 156 |
+
'client_addr': f"{conn.client_addr[0]}:{conn.client_addr[1]}",
|
| 157 |
+
'target_addr': f"{conn.target_addr[0]}:{conn.target_addr[1]}"
|
| 158 |
+
}
|
| 159 |
+
return stats
|
core/traffic_forwarder.py
ADDED
|
@@ -0,0 +1,185 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Traffic Forwarding Engine
|
| 3 |
+
Handles IP packet forwarding and NAT for VPN tunnels
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
import asyncio
|
| 7 |
+
import socket
|
| 8 |
+
import struct
|
| 9 |
+
from typing import Dict, Optional, Tuple, Union
|
| 10 |
+
from dataclasses import dataclass
|
| 11 |
+
import os
|
| 12 |
+
from .ip_parser import IPv4Header, IPParser
|
| 13 |
+
from .logger import Logger, LogCategory
|
| 14 |
+
from .nat_engine import NATEngine
|
| 15 |
+
|
| 16 |
+
@dataclass
|
| 17 |
+
class ForwardSession:
|
| 18 |
+
src_ip: str
|
| 19 |
+
dst_ip: str
|
| 20 |
+
src_port: int
|
| 21 |
+
dst_port: int
|
| 22 |
+
protocol: int
|
| 23 |
+
created_at: float
|
| 24 |
+
last_seen: float
|
| 25 |
+
bytes_in: int = 0
|
| 26 |
+
bytes_out: int = 0
|
| 27 |
+
|
| 28 |
+
class TrafficForwarder:
|
| 29 |
+
"""Handles packet forwarding and NAT"""
|
| 30 |
+
|
| 31 |
+
def __init__(self, logger: Logger, nat_engine: NATEngine):
|
| 32 |
+
self.logger = logger
|
| 33 |
+
self.nat_engine = nat_engine
|
| 34 |
+
self.sessions: Dict[Tuple[str, str, int, int, int], ForwardSession] = {}
|
| 35 |
+
self.tcp_connections = {}
|
| 36 |
+
self.udp_endpoints = {}
|
| 37 |
+
|
| 38 |
+
async def forward_packet(self, data: bytes, client_ip: str) -> Optional[bytes]:
|
| 39 |
+
"""Forward an IP packet"""
|
| 40 |
+
try:
|
| 41 |
+
# Parse IP header
|
| 42 |
+
ip_header = IPParser.parse_ipv4_header(data)
|
| 43 |
+
|
| 44 |
+
# Apply NAT
|
| 45 |
+
translated_packet = self.nat_engine.translate_outbound(data)
|
| 46 |
+
if not translated_packet:
|
| 47 |
+
return None
|
| 48 |
+
|
| 49 |
+
# Track session
|
| 50 |
+
session_key = (
|
| 51 |
+
ip_header.src_ip,
|
| 52 |
+
ip_header.dst_ip,
|
| 53 |
+
ip_header.protocol,
|
| 54 |
+
self._get_src_port(data[ip_header.ihl*4:], ip_header.protocol),
|
| 55 |
+
self._get_dst_port(data[ip_header.ihl*4:], ip_header.protocol)
|
| 56 |
+
)
|
| 57 |
+
|
| 58 |
+
if session_key not in self.sessions:
|
| 59 |
+
self.sessions[session_key] = ForwardSession(
|
| 60 |
+
src_ip=ip_header.src_ip,
|
| 61 |
+
dst_ip=ip_header.dst_ip,
|
| 62 |
+
src_port=session_key[3],
|
| 63 |
+
dst_port=session_key[4],
|
| 64 |
+
protocol=ip_header.protocol,
|
| 65 |
+
created_at=asyncio.get_running_loop().time(),
|
| 66 |
+
last_seen=asyncio.get_running_loop().time()
|
| 67 |
+
)
|
| 68 |
+
|
| 69 |
+
session = self.sessions[session_key]
|
| 70 |
+
session.last_seen = asyncio.get_running_loop().time()
|
| 71 |
+
session.bytes_out += len(data)
|
| 72 |
+
|
| 73 |
+
# Forward based on protocol
|
| 74 |
+
if ip_header.protocol == socket.IPPROTO_TCP:
|
| 75 |
+
return await self._forward_tcp(translated_packet, session)
|
| 76 |
+
elif ip_header.protocol == socket.IPPROTO_UDP:
|
| 77 |
+
return await self._forward_udp(translated_packet, session)
|
| 78 |
+
else:
|
| 79 |
+
# Forward other IP protocols directly
|
| 80 |
+
return translated_packet
|
| 81 |
+
|
| 82 |
+
except Exception as e:
|
| 83 |
+
self.logger.error(LogCategory.SYSTEM, "traffic_forwarder", f"Error forwarding packet: {e}")
|
| 84 |
+
return None
|
| 85 |
+
|
| 86 |
+
async def _forward_tcp(self, data: bytes, session: ForwardSession) -> Optional[bytes]:
|
| 87 |
+
"""Forward TCP packet"""
|
| 88 |
+
try:
|
| 89 |
+
ip_header = IPParser.parse_ipv4_header(data)
|
| 90 |
+
tcp_header_offset = ip_header.ihl * 4
|
| 91 |
+
|
| 92 |
+
if len(data) < tcp_header_offset + 20: # TCP header is at least 20 bytes
|
| 93 |
+
return None
|
| 94 |
+
|
| 95 |
+
# Parse TCP header
|
| 96 |
+
tcp_header = data[tcp_header_offset:tcp_header_offset + 20]
|
| 97 |
+
flags = tcp_header[13]
|
| 98 |
+
seq_num = struct.unpack('!I', tcp_header[4:8])[0]
|
| 99 |
+
ack_num = struct.unpack('!I', tcp_header[8:12])[0]
|
| 100 |
+
|
| 101 |
+
conn_key = (session.src_ip, session.src_port, session.dst_ip, session.dst_port)
|
| 102 |
+
|
| 103 |
+
# Handle TCP state
|
| 104 |
+
if flags & 0x02: # SYN
|
| 105 |
+
if conn_key not in self.tcp_connections:
|
| 106 |
+
self.tcp_connections[conn_key] = {
|
| 107 |
+
'state': 'SYN_SENT',
|
| 108 |
+
'seq': seq_num,
|
| 109 |
+
'ack': 0
|
| 110 |
+
}
|
| 111 |
+
elif flags & 0x01: # FIN
|
| 112 |
+
if conn_key in self.tcp_connections:
|
| 113 |
+
self.tcp_connections[conn_key]['state'] = 'FIN_WAIT'
|
| 114 |
+
elif flags & 0x04: # RST
|
| 115 |
+
if conn_key in self.tcp_connections:
|
| 116 |
+
del self.tcp_connections[conn_key]
|
| 117 |
+
|
| 118 |
+
# Forward the packet
|
| 119 |
+
return await self._send_packet(data)
|
| 120 |
+
|
| 121 |
+
except Exception as e:
|
| 122 |
+
self.logger.error(LogCategory.SYSTEM, "traffic_forwarder", f"Error forwarding TCP: {e}")
|
| 123 |
+
return None
|
| 124 |
+
|
| 125 |
+
async def _forward_udp(self, data: bytes, session: ForwardSession) -> Optional[bytes]:
|
| 126 |
+
"""Forward UDP packet"""
|
| 127 |
+
try:
|
| 128 |
+
ip_header = IPParser.parse_ipv4_header(data)
|
| 129 |
+
udp_header_offset = ip_header.ihl * 4
|
| 130 |
+
|
| 131 |
+
if len(data) < udp_header_offset + 8: # UDP header is 8 bytes
|
| 132 |
+
return None
|
| 133 |
+
|
| 134 |
+
# Track UDP endpoint
|
| 135 |
+
endpoint_key = (session.src_ip, session.src_port, session.dst_ip, session.dst_port)
|
| 136 |
+
self.udp_endpoints[endpoint_key] = asyncio.get_running_loop().time()
|
| 137 |
+
|
| 138 |
+
# Forward the packet
|
| 139 |
+
return await self._send_packet(data)
|
| 140 |
+
|
| 141 |
+
except Exception as e:
|
| 142 |
+
self.logger.error(LogCategory.SYSTEM, "traffic_forwarder", f"Error forwarding UDP: {e}")
|
| 143 |
+
return None
|
| 144 |
+
|
| 145 |
+
async def _send_packet(self, data: bytes) -> Optional[bytes]:
|
| 146 |
+
"""Send packet to destination"""
|
| 147 |
+
try:
|
| 148 |
+
# This is where you'd actually send the packet
|
| 149 |
+
# For now, we'll just return it for the VPN server to handle
|
| 150 |
+
return data
|
| 151 |
+
|
| 152 |
+
except Exception as e:
|
| 153 |
+
self.logger.error(LogCategory.SYSTEM, "traffic_forwarder", f"Error sending packet: {e}")
|
| 154 |
+
return None
|
| 155 |
+
|
| 156 |
+
def _get_src_port(self, transport_header: bytes, protocol: int) -> int:
|
| 157 |
+
"""Extract source port from transport header"""
|
| 158 |
+
if len(transport_header) >= 2:
|
| 159 |
+
return struct.unpack('!H', transport_header[0:2])[0]
|
| 160 |
+
return 0
|
| 161 |
+
|
| 162 |
+
def _get_dst_port(self, transport_header: bytes, protocol: int) -> int:
|
| 163 |
+
"""Extract destination port from transport header"""
|
| 164 |
+
if len(transport_header) >= 4:
|
| 165 |
+
return struct.unpack('!H', transport_header[2:4])[0]
|
| 166 |
+
return 0
|
| 167 |
+
|
| 168 |
+
async def cleanup(self):
|
| 169 |
+
"""Clean up expired sessions"""
|
| 170 |
+
current_time = asyncio.get_running_loop().time()
|
| 171 |
+
|
| 172 |
+
# Clean TCP connections
|
| 173 |
+
for key, conn in list(self.tcp_connections.items()):
|
| 174 |
+
if current_time - conn.get('last_seen', 0) > 300: # 5 minutes timeout
|
| 175 |
+
del self.tcp_connections[key]
|
| 176 |
+
|
| 177 |
+
# Clean UDP endpoints
|
| 178 |
+
for key, last_seen in list(self.udp_endpoints.items()):
|
| 179 |
+
if current_time - last_seen > 60: # 1 minute timeout
|
| 180 |
+
del self.udp_endpoints[key]
|
| 181 |
+
|
| 182 |
+
# Clean sessions
|
| 183 |
+
for key, session in list(self.sessions.items()):
|
| 184 |
+
if current_time - session.last_seen > 300: # 5 minutes timeout
|
| 185 |
+
del self.sessions[key]
|