DrRORAL commited on
Commit
06e1b4b
·
verified ·
1 Parent(s): 723881b

[validation] Upload validation examples - 2026-03-12T21:57:46.931478

Browse files
validation_examples/all_epochs_metrics.json ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "2": {
3
+ "epoch": 2,
4
+ "num_samples": 5,
5
+ "psnr": {
6
+ "mean": 8.42457007356943,
7
+ "std": 2.0490775580319913,
8
+ "min": 5.580395004140195,
9
+ "max": 10.949018083124695
10
+ },
11
+ "ssim": {
12
+ "mean": 0.19272444483279338,
13
+ "std": 0.06094948968742925,
14
+ "min": 0.13446655088904022,
15
+ "max": 0.27528859925476507
16
+ },
17
+ "lpips": {
18
+ "mean": 0.7371994853019714,
19
+ "std": 0.040124533781707016
20
+ },
21
+ "clip_similarity": {
22
+ "mean": 0.29375,
23
+ "std": 0.019739128114254125
24
+ }
25
+ }
26
+ }
validation_examples/epoch_2/metrics.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 2,
3
+ "num_samples": 5,
4
+ "psnr": {
5
+ "mean": 8.42457007356943,
6
+ "std": 2.0490775580319913,
7
+ "min": 5.580395004140195,
8
+ "max": 10.949018083124695
9
+ },
10
+ "ssim": {
11
+ "mean": 0.19272444483279338,
12
+ "std": 0.06094948968742925,
13
+ "min": 0.13446655088904022,
14
+ "max": 0.27528859925476507
15
+ },
16
+ "lpips": {
17
+ "mean": 0.7371994853019714,
18
+ "std": 0.040124533781707016
19
+ },
20
+ "clip_similarity": {
21
+ "mean": 0.29375,
22
+ "std": 0.019739128114254125
23
+ }
24
+ }
validation_examples/epoch_2/sample_0000.png ADDED

Git LFS Details

  • SHA256: 3ca2504f977c2d4d80282e4978881312485faa94de64541a78b462b22763e263
  • Pointer size: 132 Bytes
  • Size of remote file: 1.12 MB
validation_examples/metrics_across_epochs.png ADDED

Git LFS Details

  • SHA256: 5024568e8837a340db6f332502407ab3abe5c289796c4393ab55519c4b12c846
  • Pointer size: 131 Bytes
  • Size of remote file: 327 kB
validation_examples/test_epoch_validation.py ADDED
@@ -0,0 +1,134 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python3
2
+ """
3
+ Quick test for validate_epochs.py
4
+
5
+ Tests basic functionality without full validation:
6
+ 1. Import check
7
+ 2. Config loading
8
+ 3. Dataset loading
9
+ 4. Checkpoint detection
10
+ """
11
+
12
+ import sys
13
+ from pathlib import Path
14
+
15
+ print("=" * 70)
16
+ print("🧪 Testing Epoch Validation Pipeline")
17
+ print("=" * 70)
18
+
19
+ # Test 1: Import check
20
+ print("\n[1/4] Testing imports...")
21
+ try:
22
+ sys.path.insert(0, str(Path(__file__).parent))
23
+ sys.path.insert(0, str(Path(__file__).parent / "src"))
24
+
25
+ from validate_epochs import EpochValidator
26
+ from configs.config import get_default_config
27
+ from datasets.sketchy_dataset import SketchyDataset
28
+ print(" ✅ All imports successful")
29
+ except Exception as e:
30
+ print(f" ❌ Import failed: {e}")
31
+ sys.exit(1)
32
+
33
+ # Test 2: Config loading
34
+ print("\n[2/4] Testing config...")
35
+ try:
36
+ config = get_default_config()
37
+ print(f" ✅ Config loaded")
38
+ print(f" - Image size: {config['data'].image_size}")
39
+ print(f" - Pretrained model: {config['model'].pretrained_model_name}")
40
+ except Exception as e:
41
+ print(f" ❌ Config failed: {e}")
42
+ sys.exit(1)
43
+
44
+ # Test 3: Dataset check
45
+ print("\n[3/4] Testing dataset...")
46
+ try:
47
+ import os
48
+ dataset_root = "/workspace/sketchy"
49
+
50
+ if not os.path.exists(dataset_root):
51
+ print(f" ⚠️ Dataset not found at {dataset_root}")
52
+ dataset_root = "/root/Dual-Stage-Controllable-Diffusion-with-Adaptive-Modality-Fusion/sketchy"
53
+
54
+ if os.path.exists(dataset_root):
55
+ dataset = SketchyDataset(
56
+ root_dir=dataset_root,
57
+ split='test',
58
+ image_size=config['data'].image_size,
59
+ augment=False
60
+ )
61
+ print(f" ✅ Dataset loaded: {len(dataset)} samples")
62
+ else:
63
+ print(f" ⚠️ Dataset not available (will be needed for actual validation)")
64
+ except Exception as e:
65
+ print(f" ⚠️ Dataset check skipped: {e}")
66
+
67
+ # Test 4: Checkpoint detection
68
+ print("\n[4/4] Testing checkpoint detection...")
69
+ try:
70
+ # Check local checkpoints
71
+ local_checkpoint_dir = Path("/root/checkpoints/stage1")
72
+ if local_checkpoint_dir.exists():
73
+ checkpoints = list(local_checkpoint_dir.glob("epoch_*.pt"))
74
+ if checkpoints:
75
+ print(f" ✅ Found {len(checkpoints)} local checkpoints:")
76
+ for cp in sorted(checkpoints)[:5]: # Show first 5
77
+ print(f" - {cp.name}")
78
+ if len(checkpoints) > 5:
79
+ print(f" ... and {len(checkpoints) - 5} more")
80
+ else:
81
+ print(f" ⚠️ No epoch checkpoints found locally")
82
+ else:
83
+ print(f" ⚠️ Local checkpoint directory not found")
84
+
85
+ # Try HuggingFace detection
86
+ print("\n Testing HuggingFace checkpoint detection...")
87
+ try:
88
+ from huggingface_hub import list_repo_files
89
+
90
+ # Replace with actual repo ID
91
+ hf_repo_id = "DrRORAL/ragaf-diffusion-checkpoints"
92
+
93
+ files = list_repo_files(hf_repo_id)
94
+ epoch_files = [f for f in files if 'epoch' in f.lower() and f.endswith('.pt')]
95
+
96
+ if epoch_files:
97
+ print(f" ✅ Found {len(epoch_files)} checkpoint files on HuggingFace:")
98
+ for f in sorted(epoch_files)[:5]:
99
+ print(f" - {f}")
100
+ if len(epoch_files) > 5:
101
+ print(f" ... and {len(epoch_files) - 5} more")
102
+ else:
103
+ print(f" ⚠️ No epoch checkpoints found on HuggingFace")
104
+
105
+ except Exception as e:
106
+ print(f" ⚠️ HuggingFace check skipped: {e}")
107
+
108
+ except Exception as e:
109
+ print(f" ⚠️ Checkpoint detection failed: {e}")
110
+
111
+ # Summary
112
+ print("\n" + "=" * 70)
113
+ print("📊 Test Summary")
114
+ print("=" * 70)
115
+ print("""
116
+ ✅ Basic functionality working!
117
+
118
+ To run actual validation:
119
+ python validate_epochs.py --num_samples 20
120
+
121
+ Options:
122
+ --hf_repo HuggingFace repo ID (default: DrRORAL/ragaf-diffusion-checkpoints)
123
+ --dataset_root Path to Sketchy dataset (default: /workspace/sketchy)
124
+ --epochs Specific epochs to validate (default: all)
125
+ --num_samples Samples per epoch (default: 50)
126
+ --output_dir Output directory (default: validation_results)
127
+ --guidance_scale Guidance scale (default: 2.5)
128
+
129
+ Quick start:
130
+ python validate_epochs.py --num_samples 10 # Quick test
131
+
132
+ See docs/EPOCH_VALIDATION_GUIDE.md for complete documentation.
133
+ """)
134
+ print("=" * 70)
validation_examples/test_validation_quick.py ADDED
@@ -0,0 +1,122 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python3
2
+ """
3
+ Quick validation test - simplified version for debugging
4
+ """
5
+
6
+ import torch
7
+ import sys
8
+ from pathlib import Path
9
+
10
+ sys.path.insert(0, str(Path(__file__).parent))
11
+ sys.path.insert(0, str(Path(__file__).parent / "src"))
12
+
13
+ from models.stage1_diffusion import Stage1SketchGuidedDiffusion, Stage1DiffusionPipeline
14
+ from datasets.sketchy_dataset import SketchyDataset
15
+ from configs.config import get_default_config
16
+
17
+ print("=" * 80)
18
+ print("Quick Validation Test")
19
+ print("=" * 80)
20
+
21
+ # Load config
22
+ print("\n1. Loading config...")
23
+ config = get_default_config()
24
+ print(f" Dataset root: {config['data'].sketchy_root}")
25
+ print(f" Image size: {config['data'].image_size}")
26
+
27
+ # Load dataset
28
+ print("\n2. Loading dataset...")
29
+ dataset = SketchyDataset(
30
+ root_dir=config['data'].sketchy_root,
31
+ split='test',
32
+ image_size=config['data'].image_size,
33
+ augment=False
34
+ )
35
+ print(f" Total samples: {len(dataset)}")
36
+
37
+ # Test loading one sample
38
+ print("\n3. Loading one sample...")
39
+ data = dataset[0]
40
+ print(f" Sketch shape: {data['sketch'].shape}")
41
+ print(f" Photo shape: {data['photo'].shape}")
42
+ print(f" Prompt: {data['text_prompt']}")
43
+ print(f" Category: {data['category']}")
44
+
45
+ # Load model
46
+ print("\n4. Loading model...")
47
+ device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
48
+ print(f" Device: {device}")
49
+
50
+ model_config = config['model']
51
+ print(f" Model name: {model_config.pretrained_model_name}")
52
+ print(f" Sketch encoder channels: {model_config.sketch_encoder_channels}")
53
+
54
+ try:
55
+ model = Stage1SketchGuidedDiffusion(
56
+ pretrained_model_name=model_config.pretrained_model_name,
57
+ sketch_encoder_channels=model_config.sketch_encoder_channels,
58
+ freeze_base_unet=model_config.freeze_stage1_unet,
59
+ use_lora=model_config.use_lora,
60
+ lora_rank=model_config.lora_rank
61
+ ).to(device)
62
+ print(" ✅ Model created successfully")
63
+ except Exception as e:
64
+ print(f" ❌ Model creation failed: {e}")
65
+ sys.exit(1)
66
+
67
+ # Load checkpoint
68
+ print("\n5. Loading checkpoint...")
69
+ checkpoint_path = "/root/checkpoints/stage1/final.pt"
70
+ try:
71
+ checkpoint = torch.load(checkpoint_path, map_location=device)
72
+ model.load_state_dict(checkpoint['model_state_dict'])
73
+ model.eval()
74
+ print(f" ✅ Checkpoint loaded")
75
+ except Exception as e:
76
+ print(f" ❌ Checkpoint loading failed: {e}")
77
+ sys.exit(1)
78
+
79
+ # Create pipeline
80
+ print("\n6. Creating pipeline...")
81
+ try:
82
+ pipeline = Stage1DiffusionPipeline(
83
+ model=model,
84
+ num_inference_steps=20, # Reduced for testing
85
+ guidance_scale=2.5,
86
+ device=device
87
+ )
88
+ print(" ✅ Pipeline created")
89
+ except Exception as e:
90
+ print(f" ❌ Pipeline creation failed: {e}")
91
+ sys.exit(1)
92
+
93
+ # Test generation
94
+ print("\n7. Testing generation...")
95
+ sketch = data['sketch'].unsqueeze(0) # Add batch dimension
96
+ prompt = data['text_prompt']
97
+ img_size = config['data'].image_size # Use configured image size
98
+
99
+ print(f" Sketch input shape: {sketch.shape}")
100
+ print(f" Prompt: {prompt}")
101
+ print(f" Image size: {img_size}")
102
+
103
+ try:
104
+ with torch.no_grad():
105
+ generated = pipeline.generate(
106
+ sketch=sketch,
107
+ text_prompt=prompt,
108
+ height=img_size, # Use dataset image size
109
+ width=img_size, # Use dataset image size
110
+ seed=42
111
+ )
112
+ print(f" ✅ Generation successful!")
113
+ print(f" Generated shape: {generated.shape}")
114
+ except Exception as e:
115
+ print(f" ❌ Generation failed: {e}")
116
+ import traceback
117
+ traceback.print_exc()
118
+ sys.exit(1)
119
+
120
+ print("\n" + "=" * 80)
121
+ print("✅ All tests passed!")
122
+ print("=" * 80)
validation_examples/upload_summary.json ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "timestamp": "2026-03-12T21:51:05.217893",
3
+ "repo_id": "DrRORAL/ragaf-diffusion-checkpoints",
4
+ "subfolder": "validation_examples",
5
+ "local_source": "/root/Dual-Stage-Controllable-Diffusion-with-Adaptive-Modality-Fusion/test_validation",
6
+ "files": [
7
+ {
8
+ "path": "all_epochs_metrics.json",
9
+ "size_mb": 0.0
10
+ },
11
+ {
12
+ "path": "epoch_2/metrics.json",
13
+ "size_mb": 0.0
14
+ },
15
+ {
16
+ "path": "epoch_2/sample_0000.png",
17
+ "size_mb": 1.07
18
+ },
19
+ {
20
+ "path": "metrics_across_epochs.png",
21
+ "size_mb": 0.31
22
+ },
23
+ {
24
+ "path": "test_epoch_validation.py",
25
+ "size_mb": 0.0
26
+ },
27
+ {
28
+ "path": "test_validation_quick.py",
29
+ "size_mb": 0.0
30
+ },
31
+ {
32
+ "path": "upload_summary.json",
33
+ "size_mb": 0.0
34
+ },
35
+ {
36
+ "path": "validation_report.html",
37
+ "size_mb": 0.0
38
+ }
39
+ ]
40
+ }
validation_examples/validation_report.html ADDED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ <!DOCTYPE html>
3
+ <html>
4
+ <head>
5
+ <title>Stage-1 Validation Report</title>
6
+ <style>
7
+ body { font-family: Arial, sans-serif; margin: 40px; background: #f5f5f5; }
8
+ h1 { color: #333; border-bottom: 3px solid #4CAF50; padding-bottom: 10px; }
9
+ h2 { color: #555; margin-top: 30px; }
10
+ table { border-collapse: collapse; width: 100%; margin: 20px 0; background: white; }
11
+ th, td { border: 1px solid #ddd; padding: 12px; text-align: left; }
12
+ th { background-color: #4CAF50; color: white; }
13
+ tr:nth-child(even) { background-color: #f2f2f2; }
14
+ .best { background-color: #90EE90 !important; font-weight: bold; }
15
+ .metric-card { background: white; padding: 20px; margin: 20px 0;
16
+ border-radius: 8px; box-shadow: 0 2px 4px rgba(0,0,0,0.1); }
17
+ .images { display: flex; flex-wrap: wrap; gap: 10px; }
18
+ .images img { max-width: 300px; border: 1px solid #ddd; border-radius: 4px; }
19
+ </style>
20
+ </head>
21
+ <body>
22
+ <h1>🎨 Stage-1 Sketch-Guided Diffusion - Validation Report</h1>
23
+ <div class="metric-card">
24
+ <h2>📊 Metrics Across Epochs</h2>
25
+ <table>
26
+ <tr>
27
+ <th>Epoch</th>
28
+ <th>PSNR (dB)</th>
29
+ <th>SSIM</th>
30
+ <th>LPIPS</th>
31
+ <th>FID</th>
32
+ <th>CLIP Sim</th>
33
+ <th>Samples</th>
34
+ </tr>
35
+
36
+ <tr>
37
+ <td><strong>Epoch 2</strong></td>
38
+ <td class="best">8.42 ± 2.05</td>
39
+ <td class="best">0.1927 ± 0.0609</td>
40
+ <td>0.7371994853019714</td>
41
+ <td>N/A</td>
42
+ <td>0.29375</td>
43
+ <td>5</td>
44
+ </tr>
45
+
46
+ </table>
47
+ </div>
48
+
49
+ <div class="metric-card">
50
+ <h2>📈 Visualization</h2>
51
+ <img src="metrics_across_epochs.png" style="max-width: 100%;">
52
+ </div>
53
+
54
+ <div class="metric-card">
55
+ <h2>🎯 Recommendations</h2>
56
+ <ul>
57
+ <li>⚠️ SSIM is low (&lt;0.5). Consider training for more epochs.</li>
58
+ </ul>
59
+ </div>
60
+ </body>
61
+ </html>
62
+