import tensorflow as tf from tensorflow.keras.preprocessing.image import ImageDataGenerator from tensorflow.keras.applications import VGG16 from tensorflow.keras.models import Sequential from tensorflow.keras.layers import Dense, Flatten, Dropout import os # --- 1. Configuration --- # Aapke folder structure ke hisaab se path DATA_DIR = 'dataset/Training' IMAGE_SIZE = (224, 224) # VGG16 ke liye standard input size BATCH_SIZE = 32 NUM_CLASSES = 4 # gliomat_tumor, meningioma_tumor, no_tumor, pituitary_tumor # --- 2. Data Preprocessing aur Augmentation --- # Yahan hum images ko normalize (0 se 1 ke beech) karenge. # Aur validation data ke liye, training data mein se hi kuch images nikalenge. train_datagen = ImageDataGenerator( rescale=1./255, validation_split=0.2, # 20% data validation ke liye use hoga # Aap data augmentation bhi add kar sakte hain (e.g., rotation, zoom) ) # Training Data Generator (80% images) train_generator = train_datagen.flow_from_directory( DATA_DIR, target_size=IMAGE_SIZE, batch_size=BATCH_SIZE, class_mode='categorical', # 4 classes ke liye 'categorical' use karte hain subset='training' ) # Validation Data Generator (20% images) validation_generator = train_datagen.flow_from_directory( DATA_DIR, target_size=IMAGE_SIZE, batch_size=BATCH_SIZE, class_mode='categorical', subset='validation' ) # --- 3. Transfer Learning Model Setup (VGG16) --- # Base model ko load karein. include_top=False ka matlab hai ki final classification layers nahi leni. base_model = VGG16(weights='imagenet', include_top=False, input_shape=(IMAGE_SIZE[0], IMAGE_SIZE[1], 3)) # Base layers ko freeze karein taaki unke weights change na hon. for layer in base_model.layers: layer.trainable = False # --- 4. Custom Classification Layers Add Karein --- model = Sequential([ base_model, Flatten(), # 2D output ko 1D vector mein badalna Dense(256, activation='relu'), # Pehli hidden layer Dropout(0.5), # Overfitting se bachne ke liye Dense(NUM_CLASSES, activation='softmax') # Output layer (4 classes) ]) # Model compile karein model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy']) model.summary() # --- 5. Model Training --- print("\n--- Starting Model Training ---") # Aapko epochs ki value badhaani pad sakti hai (e.g., 20 ya 30) acchi accuracy ke liye. history = model.fit( train_generator, epochs=10, validation_data=validation_generator ) # --- 6. Model Save Karein --- # Ab is trained model ko save kar dein taki Streamlit use kar sake MODEL_FILENAME = 'brain_tumor_model.keras' model.save(MODEL_FILENAME) print(f"\nModel successfully saved as: {MODEL_FILENAME}")