Spaces:
Sleeping
Sleeping
| # generate_report.py | |
| """ | |
| Generate performance report comparing Lab 1 vs Lab 5 | |
| """ | |
| import numpy as np | |
| import matplotlib.pyplot as plt | |
| import pandas as pd | |
| import time | |
| from PIL import Image | |
| # Import both implementations | |
| from mosaic_generator import SimpleMosaicGenerator # Baseline | |
| from mosaic_generator import MosaicBuilder, TileManager, ImageProcessor # Optimized | |
| def comprehensive_benchmark(): | |
| """Run comprehensive performance comparison.""" | |
| print("="*60) | |
| print("LAB 5 PERFORMANCE REPORT GENERATOR") | |
| print("="*60) | |
| # Test configurations | |
| configs = [ | |
| (256, 256, 16), | |
| (512, 512, 32), | |
| (1024, 1024, 64) | |
| ] | |
| results = [] | |
| for width, height, grid_size in configs: | |
| print(f"\nTesting {width}×{height} with {grid_size}×{grid_size} grid...") | |
| # Create test image (or load your sample) | |
| test_img = Image.new('RGB', (width, height)) | |
| # test_img = Image.open('your_sample.jpg').resize((width, height)) | |
| # Test Lab 1 (Baseline) | |
| print(" Running baseline...") | |
| gen_baseline = SimpleMosaicGenerator('tiles/', grid_size=(grid_size, grid_size)) | |
| start = time.time() | |
| mosaic_baseline = gen_baseline.create_mosaic(test_img) | |
| time_baseline = time.time() - start | |
| print(f" Baseline: {time_baseline:.3f}s") | |
| # Test Lab 5 (Optimized) | |
| print(" Running optimized...") | |
| tile_manager = TileManager('tiles/') | |
| processor = ImageProcessor() | |
| img_array = processor.from_pil(test_img) | |
| builder = MosaicBuilder(tile_manager, grid_size=(grid_size, grid_size)) | |
| start = time.time() | |
| mosaic_optimized, metadata = builder.create_mosaic(img_array) | |
| time_optimized = time.time() - start | |
| print(f" Optimized: {time_optimized:.3f}s") | |
| # Calculate speedup | |
| speedup = time_baseline / time_optimized | |
| print(f" Speedup: {speedup:.1f}×") | |
| results.append({ | |
| 'Image Size': f"{width}×{height}", | |
| 'Grid Size': f"{grid_size}×{grid_size}", | |
| 'Lab 1 Time (s)': time_baseline, | |
| 'Lab 5 Time (s)': time_optimized, | |
| 'Speedup': speedup | |
| }) | |
| # Create DataFrame | |
| df = pd.DataFrame(results) | |
| print("\n" + "="*60) | |
| print("FINAL RESULTS") | |
| print("="*60) | |
| print(df.to_string(index=False)) | |
| # Generate visualizations | |
| create_visualizations(df) | |
| # Save to CSV | |
| df.to_csv('performance_results.csv', index=False) | |
| print("\n✓ Saved: performance_results.csv") | |
| return df | |
| def create_visualizations(df): | |
| """Create performance visualization charts.""" | |
| fig = plt.figure(figsize=(16, 10)) | |
| # Chart 1: Time Comparison (Bar Chart) | |
| ax1 = plt.subplot(2, 2, 1) | |
| x = np.arange(len(df)) | |
| width = 0.35 | |
| ax1.bar(x - width/2, df['Lab 1 Time (s)'], width, label='Lab 1 (Baseline)', color='#e74c3c') | |
| ax1.bar(x + width/2, df['Lab 5 Time (s)'], width, label='Lab 5 (Optimized)', color='#2ecc71') | |
| ax1.set_xlabel('Configuration', fontsize=12) | |
| ax1.set_ylabel('Time (seconds)', fontsize=12) | |
| ax1.set_title('Execution Time Comparison', fontsize=14, fontweight='bold') | |
| ax1.set_xticks(x) | |
| ax1.set_xticklabels(df['Image Size'], rotation=45) | |
| ax1.legend() | |
| ax1.grid(True, alpha=0.3) | |
| # Chart 2: Speedup Factors (Bar Chart) | |
| ax2 = plt.subplot(2, 2, 2) | |
| bars = ax2.bar(df['Image Size'], df['Speedup'], color='#3498db') | |
| # Add 20x target line | |
| ax2.axhline(y=20, color='red', linestyle='--', label='Target (20×)') | |
| # Add value labels on bars | |
| for bar in bars: | |
| height = bar.get_height() | |
| ax2.text(bar.get_x() + bar.get_width()/2., height, | |
| f'{height:.1f}×', | |
| ha='center', va='bottom', fontweight='bold') | |
| ax2.set_xlabel('Configuration', fontsize=12) | |
| ax2.set_ylabel('Speedup Factor', fontsize=12) | |
| ax2.set_title('Speedup Achieved', fontsize=14, fontweight='bold') | |
| ax2.set_xticklabels(df['Image Size'], rotation=45) | |
| ax2.legend() | |
| ax2.grid(True, alpha=0.3) | |
| # Chart 3: Time Comparison (Log Scale) | |
| ax3 = plt.subplot(2, 2, 3) | |
| ax3.plot(df['Image Size'], df['Lab 1 Time (s)'], | |
| marker='o', linewidth=2, markersize=8, | |
| label='Lab 1 (Baseline)', color='#e74c3c') | |
| ax3.plot(df['Image Size'], df['Lab 5 Time (s)'], | |
| marker='s', linewidth=2, markersize=8, | |
| label='Lab 5 (Optimized)', color='#2ecc71') | |
| ax3.set_yscale('log') | |
| ax3.set_xlabel('Image Size', fontsize=12) | |
| ax3.set_ylabel('Time (seconds, log scale)', fontsize=12) | |
| ax3.set_title('Scaling Behavior (Log Scale)', fontsize=14, fontweight='bold') | |
| ax3.legend() | |
| ax3.grid(True, alpha=0.3, which='both') | |
| # Chart 4: Processing Throughput | |
| ax4 = plt.subplot(2, 2, 4) | |
| # Calculate cells per second | |
| cells = [int(gs.split('×')[0])**2 for gs in df['Grid Size']] | |
| throughput_lab1 = [c/t for c, t in zip(cells, df['Lab 1 Time (s)'])] | |
| throughput_lab5 = [c/t for c, t in zip(cells, df['Lab 5 Time (s)'])] | |
| x = np.arange(len(df)) | |
| width = 0.35 | |
| ax4.bar(x - width/2, throughput_lab1, width, | |
| label='Lab 1', color='#e74c3c', alpha=0.7) | |
| ax4.bar(x + width/2, throughput_lab5, width, | |
| label='Lab 5', color='#2ecc71', alpha=0.7) | |
| ax4.set_xlabel('Configuration', fontsize=12) | |
| ax4.set_ylabel('Cells/second', fontsize=12) | |
| ax4.set_title('Processing Throughput', fontsize=14, fontweight='bold') | |
| ax4.set_xticks(x) | |
| ax4.set_xticklabels(df['Image Size'], rotation=45) | |
| ax4.legend() | |
| ax4.grid(True, alpha=0.3) | |
| plt.tight_layout() | |
| plt.savefig('performance_report.png', dpi=300, bbox_inches='tight') | |
| print("✓ Saved: performance_report.png") | |
| plt.show() | |
| def generate_summary_stats(df): | |
| """Generate summary statistics for report.""" | |
| print("\n" + "="*60) | |
| print("SUMMARY STATISTICS") | |
| print("="*60) | |
| print(f"\nAverage Speedup: {df['Speedup'].mean():.1f}×") | |
| print(f"Minimum Speedup: {df['Speedup'].min():.1f}×") | |
| print(f"Maximum Speedup: {df['Speedup'].max():.1f}×") | |
| print(f"\nTotal Time Saved: {(df['Lab 1 Time (s)'].sum() - df['Lab 5 Time (s)'].sum()):.1f}s") | |
| target_met = (df['Speedup'] >= 20).all() | |
| if target_met: | |
| print("\n✓ SUCCESS: All configurations meet 20× speedup target!") | |
| else: | |
| print("\n⚠ Some configurations below 20× target") | |
| return target_met | |
| if __name__ == "__main__": | |
| # Run comprehensive benchmark | |
| df = comprehensive_benchmark() | |
| # Generate summary | |
| generate_summary_stats(df) | |
| print("\n" + "="*60) | |
| print("NEXT STEPS") | |
| print("="*60) | |
| print(""" | |
| 1. Review performance_report.png for visualizations | |
| 2. Use performance_results.csv for your report | |
| 3. Copy key statistics to your PDF report | |
| 4. Add explanations of optimizations | |
| 5. Include profiling screenshots | |
| """) |