File size: 2,035 Bytes
242e5dc
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'

import warnings
warnings.filterwarnings('ignore')

import tensorflow as tf
from keras.models import Sequential
from keras.layers import Conv2D, MaxPooling2D, Flatten, Dense, Dropout
from keras.callbacks import EarlyStopping
import numpy as np

np.random.seed(1337)
classifier = Sequential()

classifier.add(Conv2D(32, (3, 3), input_shape=(128, 128, 3), activation='relu'))
classifier.add(MaxPooling2D(pool_size=(2, 2)))
classifier.add(Conv2D(16, (3, 3), activation='relu'))
classifier.add(MaxPooling2D(pool_size=(2, 2)))
classifier.add(Conv2D(8, (3, 3), activation='relu'))
classifier.add(MaxPooling2D(pool_size=(2, 2)))
classifier.add(Flatten())
classifier.add(Dense(128, activation='relu'))
classifier.add(Dropout(0.5))
classifier.add(Dense(10, activation='softmax'))

classifier.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])
print(classifier.summary())

train_dir = '/home/vignesh/tomato_data/train'
val_dir = '/home/vignesh/tomato_data/val'

train_data_raw = tf.keras.utils.image_dataset_from_directory(
    train_dir,
    labels='inferred',
    label_mode='categorical',
    image_size=(128, 128),
    batch_size=32,
    shuffle=True
)
class_names = train_data_raw.class_names  # Get class names before mapping
train_data = train_data_raw.map(lambda x, y: (x / 255.0, y)).prefetch(tf.data.AUTOTUNE)

val_data = tf.keras.utils.image_dataset_from_directory(
    val_dir,
    labels='inferred',
    label_mode='categorical',
    image_size=(128, 128),
    batch_size=32,
    shuffle=False
)
val_data = val_data.map(lambda x, y: (x / 255.0, y)).prefetch(tf.data.AUTOTUNE)

print({name: idx for idx, name in enumerate(class_names)})

# Early stopping callback
early_stop = EarlyStopping(monitor='val_loss', patience=10,restore_best_weights=True)

classifier.fit(
    train_data,
    epochs=30,
    validation_data=val_data,
)

classifier.save('keras_potato_trained_model(2.h5')
print('Saved trained model as %s ' % 'keras_potato_trained_model.h5')