brain_tumourrr / train_model.py
saad1BM's picture
Upload 3 files
a55aba1 verified
# train_model.py
import tensorflow as tf
from tensorflow.keras.applications import VGG16
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Flatten, Dense, Dropout
# --- Step 1: Define Data Paths and Parameters ---
IMAGE_SIZE = (224, 224) # VGG16 needs 224x224 input
BATCH_SIZE = 32
EPOCHS = 5 # Aap isko 10 ya 15 kar sakte hain acche results ke liye
# Update this path to your dataset folder
DATA_DIR = 'dataset'
# --- Step 2: Data Augmentation and Loading ---
# Data Augmentation se model better seekhta hai
train_datagen = ImageDataGenerator(
rescale=1./255, # Normalize pixels to 0-1
shear_range=0.2,
zoom_range=0.2,
horizontal_flip=True,
validation_split=0.2 # Validation data ke liye 20% data use hoga
)
train_generator = train_datagen.flow_from_directory(
DATA_DIR,
target_size=IMAGE_SIZE,
batch_size=BATCH_SIZE,
class_mode='binary', # Tumour (0) or No Tumour (1) - Binary classification
subset='training'
)
validation_generator = train_datagen.flow_from_directory(
DATA_DIR,
target_size=IMAGE_SIZE,
batch_size=BATCH_SIZE,
class_mode='binary',
subset='validation' # Yeh validation data hai
)
print("Data Generators Ready!")
# --- Step 3: Build the Transfer Learning Model (VGG16) ---
# Load Pre-trained VGG16 model (without the top classification layers)
base_model = VGG16(
weights='imagenet',
include_top=False, # Top classification layers ko nahi lena hai
input_shape=(224, 224, 3)
)
# Freeze Base Layers (Requirement: Don't train VGG16 weights)
base_model.trainable = False
# Create the Sequential Model
model = Sequential([
base_model, # VGG16 base
Flatten(), # Flatten the output from VGG16
Dense(512, activation='relu'), # Custom layer 1
Dropout(0.5), # Regularization to prevent overfitting
Dense(1, activation='sigmoid') # Output layer: 1 neuron for binary classification (Tumor/No Tumor)
])
# --- Step 4: Compile and Train the Model ---
model.compile(
optimizer='adam',
loss='binary_crossentropy', # Use binary_crossentropy for 2 classes
metrics=['accuracy']
)
model.summary()
print("Model Training Started...")
history = model.fit(
train_generator,
steps_per_epoch=train_generator.samples // BATCH_SIZE,
epochs=EPOCHS,
validation_data=validation_generator,
validation_steps=validation_generator.samples // BATCH_SIZE
)
print("Model Training Complete!")
# --- Step 5: Save the Model (Requirement) ---
MODEL_FILE_NAME = 'model.keras'
model.save(MODEL_FILE_NAME)
print(f"Model saved as: {MODEL_FILE_NAME}")
# Optional: Print final accuracy
print(f"Final Validation Accuracy: {history.history['val_accuracy'][-1]:.4f}")