Spaces:
Sleeping
Sleeping
File size: 3,983 Bytes
b610d23 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 |
#!/usr/bin/env python3
"""
Integration test for NASA Solar Image Downloader components.
This script tests the URL generator, storage organizer, and image fetcher together.
"""
import sys
from pathlib import Path
from datetime import datetime, timedelta
# Add src to Python path
sys.path.insert(0, str(Path(__file__).parent / "src"))
from src.downloader.url_generator import URLGenerator
from src.storage.storage_organizer import StorageOrganizer
from src.downloader.image_fetcher import ImageFetcher, DownloadManager
from src.models import DownloadTask
def test_integration():
"""Test integration of all components."""
print("π NASA Solar Image Downloader - Integration Test")
print("=" * 50)
# Initialize components
url_gen = URLGenerator()
storage = StorageOrganizer("test_data")
fetcher = ImageFetcher(rate_limit_delay=0.5) # Be respectful to NASA servers
manager = DownloadManager(fetcher, storage)
print("β
Components initialized")
# Test URL generation
test_date = datetime.now() - timedelta(days=1) # Yesterday
daily_urls = url_gen.generate_daily_urls(test_date)
print(f"β
Generated {len(daily_urls)} URLs for {test_date.strftime('%Y-%m-%d')}")
# Test a few URLs (first 3 to be respectful)
test_urls = daily_urls[:3]
print(f"π Testing {len(test_urls)} URLs...")
successful_downloads = 0
for i, url in enumerate(test_urls, 1):
print(f"\nπ₯ Testing URL {i}/{len(test_urls)}: {url}")
# Validate URL format
if not url_gen.validate_url(url):
print("β Invalid URL format")
continue
# Check if image exists
if not fetcher.check_image_exists(url):
print("β οΈ Image doesn't exist (404) - this is normal")
continue
# Extract metadata
date, time_seq = url_gen.extract_metadata_from_url(url)
filename = Path(url).name
print(f"π Date: {date}, Time: {time_seq}, File: {filename}")
# Create download task
local_path = storage.get_local_path(filename, date)
task = DownloadTask(url=url, target_path=local_path)
# Attempt download
success = manager.download_and_save(task)
if success:
print("β
Download successful!")
successful_downloads += 1
# Verify file exists locally
if storage.file_exists(filename, date):
file_size = storage.get_file_size(filename, date)
print(f"π File saved: {file_size} bytes")
else:
print("β File not found after download")
else:
print(f"β Download failed: {task.error_message}")
print(f"\nπ Summary:")
print(f" β’ URLs tested: {len(test_urls)}")
print(f" β’ Successful downloads: {successful_downloads}")
print(f" β’ Total downloads: {manager.get_download_count()}")
print(f" β’ Failed tasks: {len(manager.get_failed_tasks())}")
# List downloaded files
if successful_downloads > 0:
print(f"\nπ Downloaded files:")
images = storage.list_local_images(test_date)
for image in images:
print(f" β’ {image}")
print(f"\nπ Integration test completed!")
return successful_downloads > 0
if __name__ == "__main__":
try:
success = test_integration()
if success:
print("β
Integration test PASSED")
sys.exit(0)
else:
print("β οΈ No images were downloaded (this may be normal if no recent images exist)")
sys.exit(0)
except Exception as e:
print(f"β Integration test FAILED: {e}")
import traceback
traceback.print_exc()
sys.exit(1) |