File size: 15,212 Bytes
5d3ee93
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
"""
S3 Backup Manager

Handles automatic backup of SQLite database to S3-compatible storage.
Provides debouncing to prevent excessive S3 uploads and background
threading for non-blocking backup operations.
"""

import os
import sqlite3
import hashlib
import threading
import time
import logging
from datetime import datetime
from typing import Optional
from dataclasses import dataclass

import boto3
from botocore.exceptions import ClientError

from ..utils import s3_logger
from .s3_config import (
    S3Config,
    S3BackupError,
    S3CredentialsError,
    S3BucketNotFoundError,
    S3ConnectionError,
    DatabaseCorruptedError
)


logger = logging.getLogger(__name__)


@dataclass
class BackupMetadata:
    """Metadata for a backup file in S3."""
    s3_key: str
    last_modified: datetime
    size_bytes: int
    checksum_sha256: Optional[str] = None


class BackupManager:
    """
    Manages automatic backup of SQLite database to S3.
    
    Features:
    - Non-blocking backup requests with debouncing
    - Background thread execution
    - Retry logic with exponential backoff
    - Checksum validation
    - Graceful error handling
    """
    
    def __init__(self, config: S3Config, db_path: str):
        """
        Initialize the backup manager.
        
        Args:
            config: S3 configuration object
            db_path: Absolute path to SQLite database file
        
        Raises:
            ValueError: If db_path does not exist or is not readable
        """
        if not os.path.exists(db_path):
            raise ValueError(f"Database file does not exist: {db_path}")
        
        if not os.access(db_path, os.R_OK):
            raise ValueError(f"Database file is not readable: {db_path}")
        
        self.config = config
        self.db_path = db_path
        self.last_backup_request = None
        self.backup_lock = threading.Lock()
        self._debounce_thread = None
        
        if config.enabled:
            self.s3_client = config.create_s3_client()
            logger.info(f"BackupManager initialized for {db_path}")
        else:
            self.s3_client = None
            logger.info("BackupManager initialized but S3 is disabled")
    
    def request_backup(self) -> None:
        """
        Non-blocking method to request a database backup.
        
        Uses debouncing to prevent excessive S3 uploads. Multiple requests
        within the debounce period are collapsed into a single backup.
        
        Side Effects:
            - Starts background thread if not already running
            - Updates last_backup_request timestamp
        """
        if not self.config.enabled:
            s3_logger.backup_skip_reason('s3_not_enabled', 'backup_requested')
            return
        
        with self.backup_lock:
            self.last_backup_request = time.time()
            
            # Start debounce thread if not already running
            if self._debounce_thread is None or not self._debounce_thread.is_alive():
                self._debounce_thread = threading.Thread(
                    target=self._debounced_backup,
                    daemon=True
                )
                self._debounce_thread.start()
                logger.debug("Started debounce thread for backup")
    
    def _debounced_backup(self) -> None:
        """
        Wait for debounce period, then execute backup.
        
        This runs in a background thread and waits for the debounce period
        to ensure no more backup requests arrive before executing.
        """
        debounce_seconds = self.config.debounce_seconds
        
        # Wait for debounce period
        time.sleep(debounce_seconds)
        
        # Check if another request came in during debounce
        with self.backup_lock:
            time_since_last_request = time.time() - self.last_backup_request
            if time_since_last_request < debounce_seconds:
                # Another request came in, let it handle the backup
                logger.debug("Backup skipped - newer request pending")
                return
        
        # Execute the backup
        self._execute_backup()
    
    def execute_backup_now(self) -> bool:
        """
        Synchronous method to execute backup immediately, bypassing debounce.
        
        Returns:
            True if backup succeeded, False if it failed
        
        Raises:
            S3CredentialsError: Invalid S3 credentials
            S3BucketNotFoundError: Bucket does not exist
            DatabaseCorruptedError: Source database failed integrity check
        """
        if not self.config.enabled:
            logger.warning("Backup requested but S3 is disabled")
            return False
        
        return self._execute_backup()
    
    def _execute_backup(self) -> bool:
        """
        Execute the actual backup operation.
        
        Process:
        1. Check database integrity
        2. Create hot backup using sqlite3.backup()
        3. Calculate checksum
        4. Upload to S3 with retries
        5. Clean up temp files
        
        Returns:
            True if backup succeeded, False otherwise
        """
        start_time = time.time()
        db_size = os.path.getsize(self.db_path)
        
        s3_logger.backup_started(self.db_path, db_size)
        logger.info(f"Starting backup of {self.db_path} ({db_size} bytes)")
        
        temp_path = None
        
        try:
            # Validate source database integrity
            if not self._validate_database(self.db_path):
                raise DatabaseCorruptedError("Source database failed integrity check")
            
            # Create backup using sqlite3.backup() API
            temp_path = f"{self.db_path}.backup"
            self._create_hot_backup(temp_path)
            
            # Calculate checksum
            checksum = self._calculate_checksum(temp_path)
            
            # Upload to S3 with timestamp
            timestamp = datetime.utcnow().strftime('%Y-%m-%d-%H-%M-%S')
            s3_key = f"contacts-{timestamp}.db"
            
            if not self._upload_to_s3(temp_path, s3_key, checksum):
                return False
            
            # Success
            duration = time.time() - start_time
            upload_size = os.path.getsize(temp_path)
            s3_logger.backup_completed(duration, s3_key, upload_size)
            logger.info(f"Backup completed successfully: {s3_key} ({duration:.2f}s)")
            
            return True
            
        except DatabaseCorruptedError as e:
            s3_logger.backup_failed(str(e))
            logger.error(f"Backup failed - database corrupted: {e}")
            raise
            
        except Exception as e:
            s3_logger.backup_failed(str(e))
            logger.error(f"Backup failed: {e}", exc_info=True)
            return False
            
        finally:
            # Clean up temp file
            if temp_path and os.path.exists(temp_path):
                try:
                    os.remove(temp_path)
                    logger.debug(f"Cleaned up temp file: {temp_path}")
                except OSError as e:
                    logger.warning(f"Failed to clean up temp file: {e}")
    
    def _create_hot_backup(self, dest_path: str) -> None:
        """
        Create a hot backup of the database using sqlite3.backup() API.
        
        This method is safe to use while the database is being written to,
        as sqlite3.backup() handles concurrent access properly.
        
        Args:
            dest_path: Path where backup should be created
        """
        logger.debug(f"Creating hot backup to {dest_path}")
        
        # Connect to source database
        source_conn = sqlite3.connect(self.db_path)
        
        try:
            # Create destination connection
            dest_conn = sqlite3.connect(dest_path)
            
            try:
                # Execute hot backup
                source_conn.backup(dest_conn)
                logger.debug("Hot backup completed")
            finally:
                dest_conn.close()
        finally:
            source_conn.close()
    
    def _calculate_checksum(self, file_path: str) -> str:
        """
        Calculate SHA-256 checksum of a file.
        
        Args:
            file_path: Path to file
        
        Returns:
            Hexadecimal SHA-256 checksum string
        """
        sha256_hash = hashlib.sha256()
        
        with open(file_path, 'rb') as f:
            # Read in chunks for memory efficiency
            for chunk in iter(lambda: f.read(8192), b''):
                sha256_hash.update(chunk)
        
        checksum = sha256_hash.hexdigest()
        logger.debug(f"Calculated checksum: {checksum}")
        return checksum
    
    def _upload_to_s3(
        self,
        file_path: str,
        s3_key: str,
        checksum: str,
        max_retries: int = 3
    ) -> bool:
        """
        Upload file to S3 with retry logic and exponential backoff.
        
        Args:
            file_path: Path to file to upload
            s3_key: S3 object key
            checksum: SHA-256 checksum to store in metadata
            max_retries: Maximum number of retry attempts
        
        Returns:
            True if upload succeeded, False otherwise
        """
        file_size = os.path.getsize(file_path)
        
        for attempt in range(max_retries):
            try:
                logger.debug(f"Uploading to S3: {s3_key} (attempt {attempt + 1}/{max_retries})")
                
                # Upload with metadata
                with open(file_path, 'rb') as f:
                    self.s3_client.upload_fileobj(
                        f,
                        self.config.bucket,
                        s3_key,
                        ExtraArgs={
                            'Metadata': {
                                'sha256': checksum,
                                'source_host': os.uname().nodename,
                                'db_version': str(sqlite3.sqlite_version)
                            }
                        }
                    )
                
                logger.info(f"Upload successful: {s3_key} ({file_size} bytes)")
                return True
                
            except ClientError as e:
                error_code = e.response['Error']['Code']
                
                # Permanent errors - don't retry
                if error_code in ['NoSuchBucket', 'AccessDenied', 'InvalidAccessKeyId']:
                    s3_logger.backup_failed(error_code, attempt + 1, max_retries)
                    logger.error(f"Permanent S3 error: {error_code}")
                    
                    if error_code == 'NoSuchBucket':
                        raise S3BucketNotFoundError(f"Bucket not found: {self.config.bucket}")
                    elif error_code in ['AccessDenied', 'InvalidAccessKeyId']:
                        raise S3CredentialsError(f"Invalid credentials: {error_code}")
                    
                    return False
                
                # Transient errors - retry with backoff
                if attempt < max_retries - 1:
                    backoff = 2 ** attempt  # 1s, 2s, 4s
                    logger.warning(
                        f"S3 upload failed (attempt {attempt + 1}/{max_retries}): {error_code}, "
                        f"retrying in {backoff}s"
                    )
                    time.sleep(backoff)
                else:
                    s3_logger.backup_failed(error_code, attempt + 1, max_retries)
                    logger.error(f"S3 upload failed after {max_retries} attempts: {error_code}")
                    return False
                    
            except Exception as e:
                s3_logger.backup_failed(str(e), attempt + 1, max_retries)
                logger.error(f"Unexpected error during S3 upload: {e}", exc_info=True)
                return False
        
        return False
    
    def _validate_database(self, db_path: str) -> bool:
        """
        Validate SQLite database integrity.
        
        Args:
            db_path: Path to database file
        
        Returns:
            True if database passes integrity check, False otherwise
        """
        try:
            conn = sqlite3.connect(db_path)
            cursor = conn.cursor()
            cursor.execute("PRAGMA integrity_check")
            result = cursor.fetchone()[0]
            conn.close()
            
            if result == 'ok':
                logger.debug(f"Database integrity check passed: {db_path}")
                return True
            else:
                logger.error(f"Database integrity check failed: {result}")
                return False
                
        except Exception as e:
            logger.error(f"Database validation failed: {e}")
            return False
    
    def get_latest_backup(self) -> Optional[BackupMetadata]:
        """
        Query S3 for the latest backup file.
        
        Returns:
            BackupMetadata object with latest backup info, or None if no backups found
        
        Raises:
            S3ConnectionError: Network or S3 service error
        """
        if not self.config.enabled:
            return None
        
        try:
            logger.debug(f"Querying S3 for latest backup in bucket: {self.config.bucket}")
            
            response = self.s3_client.list_objects_v2(
                Bucket=self.config.bucket,
                Prefix='contacts-'
            )
            
            if 'Contents' not in response or len(response['Contents']) == 0:
                logger.info("No backups found in S3")
                return None
            
            # Find latest by LastModified
            latest = max(response['Contents'], key=lambda x: x['LastModified'])
            
            # Get metadata if available
            try:
                head_response = self.s3_client.head_object(
                    Bucket=self.config.bucket,
                    Key=latest['Key']
                )
                checksum = head_response.get('Metadata', {}).get('sha256')
            except Exception as e:
                logger.warning(f"Failed to get metadata for {latest['Key']}: {e}")
                checksum = None
            
            metadata = BackupMetadata(
                s3_key=latest['Key'],
                last_modified=latest['LastModified'],
                size_bytes=latest['Size'],
                checksum_sha256=checksum
            )
            
            logger.info(f"Latest backup: {metadata.s3_key} ({metadata.last_modified})")
            return metadata
            
        except ClientError as e:
            error_code = e.response['Error']['Code']
            logger.error(f"S3 error querying backups: {error_code}")
            raise S3ConnectionError(f"S3 error: {error_code}") from e
            
        except Exception as e:
            logger.error(f"Unexpected error querying backups: {e}")
            raise S3ConnectionError(f"Error querying backups: {e}") from e