File size: 2,868 Bytes
2cc48b9 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 |
import tensorflow as tf
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from tensorflow.keras.applications import VGG16
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Flatten, Dropout
import os
# --- 1. Configuration ---
# Aapke folder structure ke hisaab se path
DATA_DIR = 'dataset/Training'
IMAGE_SIZE = (224, 224) # VGG16 ke liye standard input size
BATCH_SIZE = 32
NUM_CLASSES = 4 # gliomat_tumor, meningioma_tumor, no_tumor, pituitary_tumor
# --- 2. Data Preprocessing aur Augmentation ---
# Yahan hum images ko normalize (0 se 1 ke beech) karenge.
# Aur validation data ke liye, training data mein se hi kuch images nikalenge.
train_datagen = ImageDataGenerator(
rescale=1./255,
validation_split=0.2, # 20% data validation ke liye use hoga
# Aap data augmentation bhi add kar sakte hain (e.g., rotation, zoom)
)
# Training Data Generator (80% images)
train_generator = train_datagen.flow_from_directory(
DATA_DIR,
target_size=IMAGE_SIZE,
batch_size=BATCH_SIZE,
class_mode='categorical', # 4 classes ke liye 'categorical' use karte hain
subset='training'
)
# Validation Data Generator (20% images)
validation_generator = train_datagen.flow_from_directory(
DATA_DIR,
target_size=IMAGE_SIZE,
batch_size=BATCH_SIZE,
class_mode='categorical',
subset='validation'
)
# --- 3. Transfer Learning Model Setup (VGG16) ---
# Base model ko load karein. include_top=False ka matlab hai ki final classification layers nahi leni.
base_model = VGG16(weights='imagenet',
include_top=False,
input_shape=(IMAGE_SIZE[0], IMAGE_SIZE[1], 3))
# Base layers ko freeze karein taaki unke weights change na hon.
for layer in base_model.layers:
layer.trainable = False
# --- 4. Custom Classification Layers Add Karein ---
model = Sequential([
base_model,
Flatten(), # 2D output ko 1D vector mein badalna
Dense(256, activation='relu'), # Pehli hidden layer
Dropout(0.5), # Overfitting se bachne ke liye
Dense(NUM_CLASSES, activation='softmax') # Output layer (4 classes)
])
# Model compile karein
model.compile(optimizer='adam',
loss='categorical_crossentropy',
metrics=['accuracy'])
model.summary()
# --- 5. Model Training ---
print("\n--- Starting Model Training ---")
# Aapko epochs ki value badhaani pad sakti hai (e.g., 20 ya 30) acchi accuracy ke liye.
history = model.fit(
train_generator,
epochs=10,
validation_data=validation_generator
)
# --- 6. Model Save Karein ---
# Ab is trained model ko save kar dein taki Streamlit use kar sake
MODEL_FILENAME = 'brain_tumor_model.keras'
model.save(MODEL_FILENAME)
print(f"\nModel successfully saved as: {MODEL_FILENAME}")
|