# app.py import streamlit as st import numpy as np import os import tensorflow as tf import logging from PIL import Image # Configure logging logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) # Set page configuration st.set_page_config( page_title="Breast Cancer Prediction", page_icon="๐ฉบ", layout="wide", initial_sidebar_state="expanded" ) # Disable GPU to save memory tf.config.set_visible_devices([], 'GPU') logger.info("TensorFlow configured for CPU-only") # ===== Model Loading ===== MODEL_FILE = "final_combined_model.keras" @st.cache_resource(show_spinner=False) def load_model(): """Load TensorFlow model from local file with caching""" try: # Verify file exists if not os.path.exists(MODEL_FILE): logger.error(f"โ Model file not found: {MODEL_FILE}") return None logger.info(f"โณ Loading model from local file: {MODEL_FILE}") # Load model with memory optimization model = tf.keras.models.load_model(MODEL_FILE, compile=False) # Test prediction to verify loading test_input = np.random.rand(1, 224, 224, 1).astype(np.float32) test_pred = model.predict(test_input, verbose=0) logger.info(f"๐งช Test prediction: {test_pred[0][0]:.4f}") logger.info("โ Model loaded successfully") return model except Exception as e: logger.error(f"โ Error loading model: {e}") # Print detailed traceback import traceback logger.error(traceback.format_exc()) return None # Load model at startup model = load_model() # ===== Image Preprocessing ===== def preprocess_image(image): """Preprocess image for model prediction""" try: # Convert to PIL Image if isinstance(image, np.ndarray): img = Image.fromarray(image.astype('uint8')) else: img = image # Processing pipeline img = img.convert('L') # Grayscale img = img.resize((224, 224)) # Resize img_array = np.array(img) / 255.0 # Normalize # Add batch and channel dimensions return img_array[np.newaxis, ..., np.newaxis] except Exception as e: logger.error(f"๐ผ๏ธ Image preprocessing error: {e}") return None # ===== Prediction Function ===== def predict(image): """Make prediction using the loaded model""" if model is None: return "Model failed to load", "Check logs", None try: # Preprocess image processed_image = preprocess_image(image) if processed_image is None: return "Invalid image", "Try another", image # Make prediction prediction = model.predict(processed_image, verbose=0)[0][0] # Format results confidence = abs(prediction - 0.5) + 0.5 # Convert to 0.5-1.0 scale result = "Malignant" if prediction > 0.5 else "Benign" return result, f"{confidence*100:.2f}%", image except Exception as e: error_msg = f"Prediction error: {str(e)}" logger.error(error_msg) return error_msg, "Try again", image # ===== Streamlit UI ===== # Custom CSS for styling st.markdown(""" """, unsafe_allow_html=True) # Header st.markdown("
Confidence: {st.session_state.confidence}
" "