File size: 7,266 Bytes
6d6d94e
4eb7038
 
a37a6c6
 
c2bccc1
4eb7038
c2bccc1
4eb7038
a37a6c6
c2bccc1
 
 
 
 
 
 
4eb7038
c2bccc1
 
 
 
 
 
 
 
 
 
0b2fb9a
c2bccc1
a37a6c6
c1d9caf
a37a6c6
c2bccc1
 
 
4eb7038
c2bccc1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
a37a6c6
c1d9caf
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c2bccc1
c1d9caf
 
 
 
 
 
a37a6c6
c1d9caf
c2bccc1
a37a6c6
c2bccc1
 
 
 
 
c1d9caf
a37a6c6
c2bccc1
 
 
a37a6c6
 
c2bccc1
 
 
 
 
 
 
 
a37a6c6
c2bccc1
 
 
 
 
 
c1d9caf
 
c2bccc1
 
a37a6c6
c2bccc1
a37a6c6
 
4eb7038
 
c2bccc1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c1d9caf
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c2bccc1
 
 
 
 
 
 
 
 
 
 
 
0b2fb9a
c2bccc1
0b2fb9a
c2bccc1
a37a6c6
c2bccc1
 
 
6d6d94e
 
a37a6c6
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
import gradio as gr
import tensorflow as tf
import numpy as np
from PIL import Image
import json
import os

print("=== DIAGNOSTIC MODE ===")

# Load class labels
class_labels = ["Class 0", "Class 1"]  # Default
try:
    with open("class_labels.json", "r") as f:
        class_labels = json.load(f)
    print(f"βœ… Loaded class labels: {class_labels}")
except Exception as e:
    print(f"❌ Error loading class labels: {e}")

# Load model
model = None
keras_files = [f for f in os.listdir(".") if f.endswith('.keras')]
if keras_files:
    try:
        model = tf.keras.models.load_model(keras_files[0])
        print(f"βœ… Model loaded: {keras_files[0]}")
        print(f"Model output shape: {model.output_shape}")
    except Exception as e:
        print(f"❌ Model loading error: {e}")

def diagnose_prediction(image):
    """
    Fixed diagnostic of the prediction process
    """
    if model is None:
        return {"Error": "Model not loaded"}
    
    try:
        print("\n" + "="*50)
        print("DIAGNOSTIC PREDICTION")
        print("="*50)
        
        # Preprocess image
        img_array = np.array(image)
        img_resized = tf.image.resize(img_array, [256, 256])
        img_normalized = tf.cast(img_resized, tf.float32) / 255.0
        img_batch = tf.expand_dims(img_normalized, 0)
        
        print(f"Input shape to model: {img_batch.shape}")
        
        # Get raw predictions
        raw_predictions = model.predict(img_batch, verbose=0)
        print(f"Raw model output: {raw_predictions}")
        print(f"Raw output shape: {raw_predictions.shape}")
        print(f"Raw output type: {type(raw_predictions)}")
        
        # FIXED: Handle binary classification properly
        if raw_predictions.shape[1] == 1:
            # Single output with sigmoid activation (binary classification)
            print("Detected binary classification with sigmoid output")
            sigmoid_output = float(raw_predictions[0][0])
            print(f"Sigmoid output: {sigmoid_output}")
            
            # Convert to probabilities for both classes
            prob_class_1 = sigmoid_output
            prob_class_0 = 1.0 - sigmoid_output
            
            probabilities = np.array([prob_class_0, prob_class_1])
            print(f"Calculated probabilities: [Class 0: {prob_class_0:.4f}, Class 1: {prob_class_1:.4f}]")
            
        elif raw_predictions.shape[1] == 2:
            # Two outputs with softmax activation
            print("Detected two-output classification")
            logits = raw_predictions[0]
            probabilities = tf.nn.softmax(logits).numpy()
            print(f"Softmax probabilities: {probabilities}")
            
        else:
            # Multi-class classification
            print("Detected multi-class classification")
            logits = raw_predictions[0]
            probabilities = tf.nn.softmax(logits).numpy()
            print(f"Multi-class probabilities: {probabilities}")
        
        print(f"Final probabilities shape: {probabilities.shape}")
        print(f"Probabilities sum: {np.sum(probabilities)}")
        
        # Get the predicted class
        predicted_class_index = np.argmax(probabilities)
        print(f"Predicted class index: {predicted_class_index}")
        print(f"Predicted class name: {class_labels[predicted_class_index] if predicted_class_index < len(class_labels) else 'Unknown'}")
        
        # Create results dictionary
        results = {}
        print(f"Number of probabilities: {len(probabilities)}")
        print(f"Number of class labels: {len(class_labels)}")
        
        for i, prob in enumerate(probabilities):
            if i < len(class_labels):
                class_name = class_labels[i]
                prob_value = float(prob)
                results[class_name] = prob_value
                print(f"  {class_name} (index {i}): {prob_value:.4f}")
            else:
                print(f"  WARNING: More probabilities than class labels at index {i}")
        
        print(f"Final results dictionary: {results}")
        
        # Check for common bugs
        if len(set(results.values())) == 1:
            print("🚨 BUG DETECTED: All probabilities are identical!")
        
        if len(results) != len(class_labels):
            print("🚨 BUG DETECTED: Results count doesn't match class labels!")
        else:
            print("βœ… Results count matches class labels!")
            
        # Sort results by probability (Gradio expects this)
        sorted_results = dict(sorted(results.items(), key=lambda x: x[1], reverse=True))
        print(f"Sorted results: {sorted_results}")
        
        return sorted_results
        
    except Exception as e:
        error_msg = f"Prediction failed: {str(e)}"
        print(f"❌ {error_msg}")
        import traceback
        traceback.print_exc()
        return {"Error": error_msg}

# Test the model with a simple synthetic input
def test_model_directly():
    """Test model with known different inputs"""
    if model is None:
        return
        
    print("\n" + "="*50)
    print("TESTING MODEL WITH SYNTHETIC INPUTS")
    print("="*50)
    
    # Test 1: All black image
    black_img = np.zeros((1, 256, 256, 3), dtype=np.float32)
    black_pred = model.predict(black_img, verbose=0)
    
    # Test 2: All white image  
    white_img = np.ones((1, 256, 256, 3), dtype=np.float32)
    white_pred = model.predict(white_img, verbose=0)
    
    # Test 3: Random noise
    noise_img = np.random.random((1, 256, 256, 3)).astype(np.float32)
    noise_pred = model.predict(noise_img, verbose=0)
    
    # Handle predictions based on output shape
    if black_pred.shape[1] == 1:
        # Binary classification with sigmoid
        black_probs = np.array([1-black_pred[0][0], black_pred[0][0]])
        white_probs = np.array([1-white_pred[0][0], white_pred[0][0]])
        noise_probs = np.array([1-noise_pred[0][0], noise_pred[0][0]])
    else:
        # Multi-class with softmax
        black_probs = tf.nn.softmax(black_pred[0]).numpy()
        white_probs = tf.nn.softmax(white_pred[0]).numpy()
        noise_probs = tf.nn.softmax(noise_pred[0]).numpy()
    
    print(f"Black image prediction: {black_probs}")
    print(f"White image prediction: {white_probs}")
    print(f"Noise image prediction: {noise_probs}")
    
    # Check if predictions are different
    if np.allclose(black_probs, white_probs, atol=1e-6) and np.allclose(white_probs, noise_probs, atol=1e-6):
        print("🚨 MODEL ISSUE: Model gives identical predictions for different inputs!")
        print("   This suggests the model is broken or not properly trained")
    else:
        print("βœ… Model gives different predictions for different inputs")

# Run diagnostic test when app starts
if model is not None:
    test_model_directly()

# Create interface
interface = gr.Interface(
    fn=diagnose_prediction,
    inputs=gr.Image(type="pil", label="Upload an image"),
    outputs=gr.Label(label="Diagnostic Results - Check Logs!"),
    title="DIAGNOSTIC MODE - Check Application Logs",
    description="This version shows detailed diagnostic info in the logs. Upload an image and check the logs tab."
)

if __name__ == "__main__":
    interface.launch()