S1223 commited on
Commit
ed96dd1
·
verified ·
1 Parent(s): 5d7c0ce

Update imageMulticlassClassification.py

Browse files
Files changed (1) hide show
  1. imageMulticlassClassification.py +530 -535
imageMulticlassClassification.py CHANGED
@@ -1,536 +1,531 @@
1
- class ImageMulticlassClassification:
2
- def __init__(self, imgWidth=300, imgHeight=300, batchSize=32):
3
- from time import time
4
- import tensorflow as tf
5
- import matplotlib.pyplot as plt
6
- import pathlib
7
- import datetime
8
- from sklearn.metrics import roc_curve, auc, roc_auc_score
9
- import os
10
- import keras
11
- import numpy as np
12
- import pandas as pd
13
- import tarfile
14
- import sklearn
15
-
16
- self.time = time
17
- self.sklearn = sklearn
18
- self.tf = tf
19
- self.plt = plt
20
- self.pathlib = pathlib
21
- self.datetime = datetime
22
- self.roc_curve = roc_curve
23
- self.roc_auc_score = roc_auc_score
24
- self.auc = auc
25
- self.os = os
26
- self.keras = keras
27
- self.np = np
28
- self.AUTOTUNE = tf.data.AUTOTUNE
29
- self.pd = pd
30
- self.tarfile = tarfile
31
-
32
- self.imgWidth = imgWidth
33
- self.imgHeight = imgHeight
34
- self.numGPU = len(self.tf.config.list_physical_devices('GPU'))
35
- if self.numGPU > 0:
36
- self.batchSize = batchSize * self.numGPU
37
- else:
38
- self.batchSize = batchSize
39
- self.Model = None
40
- self.time_callback = None
41
- self.history = None
42
- self.confusionMatrix = None
43
- self.validation_label = None
44
- self.trainDataset = None
45
- self.validationDataset = None
46
- self.accuracy = None
47
- self.recall = None
48
- self.precision = None
49
- self.f1Score = None
50
- self.modelName = ""
51
-
52
-
53
- def data_MakeDataset(self, datasetUrl=None, datasetPath=None, datasetDirectoryName="Dataset Covid19 Training", ratioValidation=0.2):
54
- """
55
- Purpose:
56
- - Make dataset from parameter
57
-
58
- Parameter:
59
- - datasetUrl: url of dataset
60
- - type: string
61
- - example: "https://storage.googleapis.com/fdataset/Dataset%20Covid19%20Training.tgz"
62
- - datasetPath: path of dataset
63
- - type: string
64
- - example: "C:/Users/User/Desktop/Dataset Covid19 Training.tgz"
65
- - datasetDirectoryName: name of dataset directory
66
- - type: string
67
- - example: "Dataset Covid19 Training"
68
- - ratioValidation: ratio of validation data
69
- - type: float
70
- - example: 0.2
71
-
72
- Return:
73
- - {"success":True, "code":200, "detail":"success"}
74
- """
75
- try:
76
- if datasetUrl is not None:
77
- dataset_url = datasetUrl
78
- data_dir = self.tf.keras.utils.get_file(datasetDirectoryName, origin=dataset_url, untar=True)
79
- data_dir = self.pathlib.Path(data_dir)
80
- elif datasetPath is not None:
81
- currentPath = self.os.getcwd()
82
- if self.os.path.exists(currentPath + "/" + datasetDirectoryName):
83
- # remove dataset directory with all file inside
84
- self.os.system("rm -rf " + currentPath + "/" + datasetDirectoryName)
85
- # extract dataset
86
- my_tar = self.tarfile.open(datasetPath)
87
- # check if dataset directory exist then delete it
88
- my_tar.extractall(currentPath) # specify which folder to extract to
89
- my_tar.close()
90
- data_dir = self.pathlib.Path(f'{currentPath}/{datasetDirectoryName}/')
91
-
92
- image_count = len(list(data_dir.glob('*/*.jpg')))
93
-
94
- train_ds = self.tf.keras.preprocessing.image_dataset_from_directory(
95
- data_dir,
96
- seed=123,
97
- subset="training",
98
- validation_split=ratioValidation,
99
- image_size=(self.imgWidth, self.imgHeight),
100
- batch_size=self.batchSize)
101
-
102
- val_ds = self.tf.keras.preprocessing.image_dataset_from_directory(
103
- data_dir,
104
- seed=123,
105
- subset="validation",
106
- validation_split=ratioValidation,
107
- image_size=(self.imgWidth, self.imgHeight),
108
- batch_size=self.batchSize)
109
-
110
- self.trainDataset = train_ds.cache().shuffle(1000).prefetch(buffer_size=self.AUTOTUNE)
111
- self.validationDataset = val_ds.cache().prefetch(buffer_size=self.AUTOTUNE)
112
-
113
- return {"success":True, "code":200, "detail":"success"}
114
- except Exception as e:
115
- return {"success":False, "code":500, "detail":str(e)}
116
-
117
- def data_PreprocessingDataset(self, typeRandomFlip="horizontal_and_vertical", RandomRotation=0.3, RandomZoom=0.2, shuffleTrainDataset=True, augmentTrainDataset=True):
118
- """
119
- Purpose:
120
- - Preprocessing dataset
121
-
122
- Parameter:
123
- - typeRandomFlip: type of random flip
124
- - type: string
125
- - example: "horizontal_and_vertical"
126
- - options: "horizontal", "vertical", "horizontal_and_vertical"
127
- - RandomRotation: random rotation
128
- - type: float
129
- - example: 0.3
130
- - RandomZoom: random zoom
131
- - type: float
132
- - example: 0.2
133
- - shuffleTrainDataset: shuffle train dataset
134
- - type: bool
135
- - example: True
136
- - augmentTrainDataset: augment train dataset
137
- - type: bool
138
- - example: True
139
-
140
- Return:
141
- - {"success":True, "code":200, "detail":"success"}
142
- """
143
- try:
144
- rescale = self.tf.keras.layers.Rescaling(1.0 / 255, input_shape=(self.imgWidth, self.imgHeight, 3))
145
-
146
- data_augmentation = self.tf.keras.Sequential(
147
- [
148
- self.tf.keras.layers.RandomFlip(typeRandomFlip, input_shape=(self.imgWidth,self.imgHeight,3)),
149
- self.tf.keras.layers.RandomRotation(RandomRotation),
150
- self.tf.keras.layers.RandomZoom(RandomZoom),
151
- ]
152
- )
153
-
154
-
155
- def prepare(ds, shuffle=False, augment=False):
156
- # Rescale dataset
157
- ds = ds.map(lambda x, y: (rescale(x), y), num_parallel_calls=self.AUTOTUNE)
158
-
159
- if shuffle:
160
- ds = ds.shuffle(1024)
161
-
162
- # Use data augmentation only on the training set
163
- if augment:
164
- ds = ds.map(lambda x, y: (data_augmentation(x), y), num_parallel_calls=self.AUTOTUNE,)
165
-
166
- # Use buffered prefecting
167
- return ds.prefetch(buffer_size=self.AUTOTUNE)
168
-
169
- self.trainDataset = prepare(self.trainDataset, shuffle=shuffleTrainDataset, augment=augmentTrainDataset)
170
- self.validationDataset = prepare(self.validationDataset)
171
-
172
- return {"success":True, "code":200, "detail":"success"}
173
- except Exception as e:
174
- return {"success":False, "code":500, "detail":str(e)}
175
-
176
- def data_GetLabelFromDataset(self, dataset):
177
- """
178
- Purpose:
179
- - Get label from dataset
180
-
181
- Parameter:
182
- - dataset: dataset
183
- - type: tf.data.Dataset
184
- - example: trainDataset
185
-
186
- Return:
187
- - {"success":True, "code":200, "detail":"success", "label":array([0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0,
188
- 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1,
189
- 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0,
190
- 1, 1, 0, 0, 0, 0, 0, 0], dtype=int32)}
191
- """
192
- try:
193
- label = self.np.concatenate([y for x, y in dataset], axis=0)
194
- return {"success":True, "code":200, "detail":"success", "label":label}
195
- except Exception as e:
196
- return {"success":False, "code":500, "detail":str(e)}
197
-
198
- def model_make(self, model=None):
199
- """
200
- Purpose:
201
- - Make default model
202
-
203
- Parameter:
204
- - model: model
205
- - type: tf.keras.Model
206
- - example: model
207
- - default: None
208
-
209
- Return:
210
- - {"success":True, "code":200, "detail":"success", "model":model}
211
- """
212
- try:
213
- if model is None:
214
- model = self.tf.keras.Sequential()
215
- base_model = self.tf.keras.applications.DenseNet121(include_top=False, input_shape=(self.imgWidth, self.imgHeight, 3))
216
- base_model.trainable=True
217
- model.add(base_model)
218
- model.add(self.tf.keras.layers.Dropout(0.4))
219
- model.add(self.tf.keras.layers.Flatten())
220
- model.add(self.tf.keras.layers.Dense(128,activation='relu'))
221
- model.add(self.tf.keras.layers.Dropout(0.5))
222
- model.add(self.tf.keras.layers.Dense(32,activation='relu'))
223
- model.add(self.tf.keras.layers.Dense(1, activation="sigmoid"))
224
- self.Model = model
225
- else:
226
- self.Model = model
227
- return {"success":True, "code":200, "detail":"success", "model":self.Model}
228
- except Exception as e:
229
- return {"success":False, "code":500, "detail":str(e)}
230
-
231
- def training_model(self, epochs=10, lossFunction="binary_crossentropy", optimizer="adam", metrics=["accuracy"], device='/GPU:0', modelName=None):
232
- """
233
- Purpose:
234
- - Training model
235
-
236
- Parameter:
237
- - model: model
238
- - type: tf.keras.Model
239
- - example: model
240
- - default: True
241
- - epochs: epochs
242
- - type: int
243
- - example: 10
244
- - lossFunction: loss function
245
- - type: string
246
- - example: "binary_crossentropy"
247
- - options: "binary_crossentropy", "categorical_crossentropy", "sparse_categorical_crossentropy"
248
- - optimizer: optimizer
249
- - type: string
250
- - example: "adam"
251
- - options: "adam", "adamax", "nadam", "rmsprop", "sgd", tf.keras.optimizers.RMSprop(learning_rate=1e-4)
252
- - metrics: metrics
253
- - type: list
254
- - example: ["accuracy"]
255
- - device: device
256
- - type: string
257
- - example: "/GPU:0"
258
- - options: "/CPU:0", "/GPU:0"
259
- - modelName: model name
260
- - type: string
261
- - example: "model"
262
-
263
- Return:
264
- - {"success":True, "code":200, "detail":"success"}
265
- """
266
- try:
267
- if modelName is not None:
268
- self.modelName = modelName
269
-
270
- self.time_callback = TimeHistory()
271
- self.Model.compile(
272
- loss=lossFunction,
273
- optimizer=optimizer,
274
- metrics=metrics,
275
- )
276
-
277
- print(self.Model.summary())
278
-
279
- with self.tf.device(device):
280
- self.history = self.Model.fit(
281
- self.trainDataset, validation_data=self.validationDataset, epochs=epochs, verbose=1, callbacks=[self.time_callback]
282
- )
283
- # make excel file report.xlsx and save data in column 1 is number of training loss, column 2 is training accuracy, column 3 is validation loss, column 4 is validation accuracy, column 5 is training time
284
- dataFrameHistory = self.pd.DataFrame({"training_loss":self.history.history["loss"], "training_accuracy":self.history.history["accuracy"], "validation_loss":self.history.history["val_loss"], "validation_accuracy":self.history.history["val_accuracy"], "training_time":self.time_callback.times})
285
- dataFrameHistory.to_excel(f"report_{self.modelName}.xlsx")
286
-
287
- return {"success":True, "code":200, "detail":"success"}
288
- except Exception as e:
289
- return {"success":False, "code":500, "detail":str(e)}
290
-
291
- def training_model_multiGPU(self, epochs=10, lossFunction="binary_crossentropy", optimizer="adam", metrics=["accuracy"], device='/GPU:0', modelName=None):
292
- """
293
- Purpose:
294
- - Training model with multi GPU support, with mirrored strategy
295
-
296
- Parameter:
297
- - model: model
298
- - type: tf.keras.Model
299
- - example: model
300
- - default: True
301
- - epochs: epochs
302
- - type: int
303
- - example: 10
304
- - lossFunction: loss function
305
- - type: string
306
- - example: "binary_crossentropy"
307
- - options: "binary_crossentropy", "categorical_crossentropy", "sparse_categorical_crossentropy"
308
- - optimizer: optimizer
309
- - type: string
310
- - example: "adam"
311
- - options: "adam", "adamax", "nadam", "rmsprop", "sgd", tf.keras.optimizers.RMSprop(learning_rate=1e-4)
312
- - metrics: metrics
313
- - type: list
314
- - example: ["accuracy"]
315
- - device: device
316
- - type: string
317
- - example: "/GPU:0"
318
- - options: "/CPU:0", "/GPU:0"
319
-
320
- Return:
321
- - {"success":True, "code":200, "detail":"success"}
322
- """
323
- try:
324
- if modelName is not None:
325
- self.modelName = modelName
326
-
327
- self.time_callback = TimeHistory()
328
-
329
- print(self.Model.summary())
330
- strategy = self.tf.distribute.MirroredStrategy()
331
- with strategy.scope():
332
- model = self.Model
333
- model.compile(loss=lossFunction, optimizer=optimizer, metrics=metrics)
334
-
335
- self.history = model.fit(self.trainDataset, validation_data=self.validationDataset, epochs=epochs, verbose=1, callbacks=[self.time_callback])
336
- # make excel file report.xlsx and save data in column 1 is number of training loss, column 2 is training accuracy, column 3 is validation loss, column 4 is validation accuracy, column 5 is training time
337
- dataFrameHistory = self.pd.DataFrame({"training_loss":self.history.history["loss"], "training_accuracy":self.history.history["accuracy"], "validation_loss":self.history.history["val_loss"], "validation_accuracy":self.history.history["val_accuracy"], "training_time":self.time_callback.times})
338
- dataFrameHistory.to_excel(f"report_{self.modelName}.xlsx")
339
-
340
- return {"success":True, "code":200, "detail":"success"}
341
- except Exception as e:
342
- return {"success":False, "code":500, "detail":str(e)}
343
-
344
- def evaluation(self, labelName=["COVID19", "NORMAL"]):
345
- """
346
- Purpose:
347
- - Evaluation model with confusionMatrix, precision, recall, f1Score, accuracy
348
-
349
- Parameter:
350
- - labelName: label name
351
- - type: list
352
- - example: ["COVID19", "NORMAL"]
353
-
354
- Return:
355
- - {"success":True, "code":200, "detail":"success", "confusionMatrix":confusionMatrix, "precision":precision, "recall":recall, "f1Score":f1Score, "accuracy":accuracy}
356
- """
357
- try:
358
- self.Model.evaluate(self.validationDataset)
359
- # get prediction result as label
360
- prediction_result = self.Model.predict(self.validationDataset)
361
- prediction_result = self.np.argmax(prediction_result, axis=1)
362
- self.validation_label = self.np.concatenate([y for x, y in self.validationDataset], axis=0)
363
- # make confusion matrix for multi class using tensorflow
364
- self.confusionMatrix = self.tf.math.confusion_matrix(labels=self.validation_label, predictions=prediction_result).numpy()
365
- # get accuracy, precision, recall, f1Score for multi class
366
- self.accuracy = self.sklearn.metrics.accuracy_score(self.validation_label, prediction_result)
367
- self.precision = self.sklearn.metrics.precision_score(self.validation_label, prediction_result, average="macro")
368
- self.recall = self.sklearn.metrics.recall_score(self.validation_label, prediction_result, average="macro")
369
- self.f1Score = self.sklearn.metrics.f1_score(self.validation_label, prediction_result, average="macro")
370
- self.__drawConfusionMatrix(labelName)
371
- self.__drawROC()
372
- # save accuracy, recall, precision, f1Score in excel file which name is reportScore_.xlsx
373
- dataFrameScore = self.pd.DataFrame({"accuracy":[self.accuracy], "recall":[self.recall], "precision":[self.precision], "f1Score":[self.f1Score]})
374
- dataFrameScore.to_excel(f"reportScore_{self.modelName}.xlsx")
375
- # draw history accuracy with training and validation dataset
376
- self.__drawHistoryAccuracy()
377
- # draw history loss with training and validation dataset
378
- self.__drawHistoryLoss()
379
- return {"success":True, "code":200, "detail":"success"}
380
- except Exception as e:
381
- return {"success":False, "code":500, "detail":str(e)}
382
-
383
- def __drawConfusionMatrix(self, labelName=["COVID19", "NORMAL"]):
384
- # draw confusion matrix with numeric value on the center and library matplotlib with label of validation dataset like this sample https://scikit-learn.org/stable/_images/sklearn-metrics-plot_confusion_matrix-1.png
385
- labelName.sort()
386
- fig, ax = self.plt.subplots()
387
- im = ax.imshow(self.confusionMatrix)
388
- ax.figure.colorbar(im, ax=ax)
389
- ax.set(xticks=self.np.arange(self.confusionMatrix.shape[1]), yticks=self.np.arange(self.confusionMatrix.shape[0]), xticklabels=labelName, yticklabels=labelName, title="Confusion Matrix", ylabel="True label", xlabel="Predicted label")
390
- ax.set_xlabel("Predicted")
391
- ax.set_ylabel("True")
392
- self.plt.setp(ax.get_xticklabels(), rotation=45, ha="right", rotation_mode="anchor")
393
- for i in range(self.confusionMatrix.shape[0]):
394
- for j in range(self.confusionMatrix.shape[1]):
395
- ax.text(j, i, self.confusionMatrix[i, j], ha="center", va="center", color="w")
396
- self.plt.tight_layout()
397
- self.plt.savefig(f"confusionMatrix_{self.modelName}.png")
398
- self.plt.show()
399
- self.plt.close()
400
- # save confusion matrix to excel file
401
- dataFrameConfusionMatrix = self.pd.DataFrame(self.confusionMatrix)
402
- dataFrameConfusionMatrix.to_excel(f"confusionMatrix_{self.modelName}.xlsx")
403
-
404
- def __drawROC(self):
405
- """
406
- Purpose:
407
- - Draw ROC curve like this sample https://scikit-learn.org/stable/_images/sphx_glr_plot_roc_001.png for multi class
408
- """
409
- predictResult = self.Model.predict(self.validationDataset)
410
- fpr, tpr, thresholds = self.sklearn.metrics.roc_curve(self.validation_label, predictResult[:, 1], pos_label=1)
411
- self.auc = self.sklearn.metrics.auc(fpr, tpr)
412
- fig, ax = self.plt.subplots()
413
- ax.plot(fpr, tpr, label="ROC curve (area = %0.2f)" % self.auc)
414
- ax.plot([0, 1], [0, 1], "k--")
415
- ax.set_xlim([0.0, 1.0])
416
- ax.set_ylim([0.0, 1.05])
417
- ax.set_xlabel("False Positive Rate")
418
- ax.set_ylabel("True Positive Rate")
419
- ax.set_title("Receiver operating characteristic")
420
- ax.legend(loc="best")
421
- self.plt.savefig(f"ROC_{self.modelName}.png")
422
- self.plt.show()
423
- self.plt.close()
424
- # save ROC curve to excel file
425
- dataFrameROC = self.pd.DataFrame({"fpr":fpr, "tpr":tpr, "thresholds":thresholds, "auc":self.auc})
426
- dataFrameROC.to_excel(f"ROC_{self.modelName}.xlsx")
427
-
428
- def __drawHistoryAccuracy(self):
429
- """
430
- Purpose:
431
- - Draw history accuracy with training and validation dataset
432
- """
433
- fig, ax = self.plt.subplots()
434
- ax.plot(self.history.history["accuracy"], label="training dataset")
435
- ax.plot(self.history.history["val_accuracy"], label="validation dataset")
436
- ax.set_xlabel("Epoch")
437
- ax.set_ylabel("Accuracy")
438
- ax.set_title("Accuracy")
439
- ax.legend(loc="best")
440
- self.plt.savefig(f"historyAccuracy_{self.modelName}.png")
441
- self.plt.show()
442
- self.plt.close()
443
-
444
- def __drawHistoryLoss(self):
445
- """
446
- Purpose:
447
- - Draw history loss with training and validation dataset
448
- """
449
- fig, ax = self.plt.subplots()
450
- ax.plot(self.history.history["loss"], label="training dataset")
451
- ax.plot(self.history.history["val_loss"], label="validation dataset")
452
- ax.set_xlabel("Epoch")
453
- ax.set_ylabel("Loss")
454
- ax.set_title("Loss")
455
- ax.legend(loc="best")
456
- self.plt.savefig(f"historyLoss_{self.modelName}.png")
457
- self.plt.show()
458
- self.plt.close()
459
-
460
- def import_data_Dataset(self, trainDataset, validationDataset):
461
- """
462
- Purpose:
463
- - Import dataset
464
-
465
- Parameter:
466
- - trainDataset: dataset
467
- - type: tf.data.Dataset
468
- - example: trainDataset
469
- - validationDataset: dataset
470
- - type: tf.data.Dataset
471
- - example: validationDataset
472
-
473
- Return:
474
- - {"success":True, "code":200, "detail":"success"}
475
- """
476
- try:
477
- self.trainDataset = trainDataset
478
- self.validationDataset = validationDataset
479
- return {"success":True, "code":200, "detail":"success"}
480
- except Exception as e:
481
- return {"success":False, "code":500, "detail":str(e)}
482
-
483
- def saveModelWithWeight(self, fileName):
484
- """
485
- Purpose:
486
- - Save model with weight
487
-
488
- Parameter:
489
- - fileName: file name
490
- - type: string
491
- - example: "my_model"
492
- - options: "my_model", "gs://bucket/my_model"
493
-
494
- Return:
495
- - {"success":True, "code":200, "detail":"success"}
496
- """
497
- try:
498
- self.Model.save(fileName)
499
- return {"success":True, "code":200, "detail":"success"}
500
- except Exception as e:
501
- return {"success":False, "code":500, "detail":str(e)}
502
-
503
- def loadModelWithWeightAndCustomObject(self, fileName, customObject):
504
- """
505
- Purpose:
506
- - Load model with weight and custom object
507
-
508
- Parameter:
509
- - fileName: file name
510
- - type: string
511
- - example: "my_model"
512
- - options: "my_model", "gs://bucket/my_model"
513
- - customObject: custom object
514
- - type: dict
515
- - example: {"MyCustomObject":MyCustomObject}
516
-
517
- Return:
518
- - {"success":True, "code":200, "detail":"success"}
519
- """
520
- try:
521
- self.Model = self.tf.keras.models.load_model(fileName, custom_objects=customObject)
522
- return {"success":True, "code":200, "detail":"success"}
523
- except Exception as e:
524
- return {"success":False, "code":500, "detail":str(e)}
525
-
526
- import tensorflow as tf
527
- from time import time
528
- class TimeHistory(tf.keras.callbacks.Callback):
529
- def on_train_begin(self, logs={}):
530
- self.times = []
531
-
532
- def on_epoch_begin(self, batch, logs={}):
533
- self.epoch_time_start = time()
534
-
535
- def on_epoch_end(self, batch, logs={}):
536
  self.times.append(time() - self.epoch_time_start)
 
1
+ class ImageMulticlassClassification:
2
+ def __init__(self, imgWidth=300, imgHeight=300, batchSize=32):
3
+ from time import time
4
+ import tensorflow as tf
5
+ import matplotlib.pyplot as plt
6
+ import pathlib
7
+ import datetime
8
+ from sklearn.metrics import roc_curve, auc, roc_auc_score
9
+ import os
10
+ import keras
11
+ import numpy as np
12
+ import pandas as pd
13
+ import tarfile
14
+ import sklearn
15
+
16
+ self.time = time
17
+ self.sklearn = sklearn
18
+ self.tf = tf
19
+ self.plt = plt
20
+ self.pathlib = pathlib
21
+ self.datetime = datetime
22
+ self.roc_curve = roc_curve
23
+ self.roc_auc_score = roc_auc_score
24
+ self.auc = auc
25
+ self.os = os
26
+ self.keras = keras
27
+ self.np = np
28
+ self.AUTOTUNE = tf.data.AUTOTUNE
29
+ self.pd = pd
30
+ self.tarfile = tarfile
31
+
32
+ self.imgWidth = imgWidth
33
+ self.imgHeight = imgHeight
34
+ self.numGPU = len(self.tf.config.list_physical_devices('GPU'))
35
+ if self.numGPU > 0:
36
+ self.batchSize = batchSize * self.numGPU
37
+ else:
38
+ self.batchSize = batchSize
39
+ self.Model = None
40
+ self.time_callback = None
41
+ self.history = None
42
+ self.confusionMatrix = None
43
+ self.validation_label = None
44
+ self.trainDataset = None
45
+ self.validationDataset = None
46
+ self.accuracy = None
47
+ self.recall = None
48
+ self.precision = None
49
+ self.f1Score = None
50
+ self.modelName = ""
51
+
52
+
53
+ def data_MakeDataset(self, datasetUrl=None, datasetPath=None, datasetDirectoryName="Dataset Covid19 Training", ratioValidation=0.2):
54
+ """
55
+ Purpose:
56
+ - Make dataset from parameter
57
+
58
+ Parameter:
59
+ - datasetUrl: url of dataset
60
+ - type: string
61
+ - example: "https://storage.googleapis.com/fdataset/Dataset%20Covid19%20Training.tgz"
62
+ - datasetPath: path of dataset
63
+ - type: string
64
+ - example: "C:/Users/User/Desktop/Dataset Covid19 Training.tgz"
65
+ - datasetDirectoryName: name of dataset directory
66
+ - type: string
67
+ - example: "Dataset Covid19 Training"
68
+ - ratioValidation: ratio of validation data
69
+ - type: float
70
+ - example: 0.2
71
+
72
+ Return:
73
+ - {"success":True, "code":200, "detail":"success"}
74
+ """
75
+ try:
76
+ if datasetUrl is not None:
77
+ dataset_url = datasetUrl
78
+ data_dir = self.tf.keras.utils.get_file(datasetDirectoryName, origin=dataset_url, untar=True)
79
+ data_dir = self.pathlib.Path(data_dir)
80
+ elif datasetPath is not None:
81
+ currentPath = self.os.getcwd()
82
+ if self.os.path.exists(currentPath + "/" + datasetDirectoryName):
83
+ # remove dataset directory with all file inside
84
+ self.os.system("rm -rf " + currentPath + "/" + datasetDirectoryName)
85
+ # extract dataset
86
+ my_tar = self.tarfile.open(datasetPath)
87
+ # check if dataset directory exist then delete it
88
+ my_tar.extractall(currentPath) # specify which folder to extract to
89
+ my_tar.close()
90
+ data_dir = self.pathlib.Path(f'{currentPath}/{datasetDirectoryName}/')
91
+
92
+ image_count = len(list(data_dir.glob('*/*.jpg')))
93
+
94
+ train_ds = self.tf.keras.preprocessing.image_dataset_from_directory(
95
+ data_dir,
96
+ seed=123,
97
+ subset="training",
98
+ validation_split=ratioValidation,
99
+ image_size=(self.imgWidth, self.imgHeight),
100
+ batch_size=self.batchSize)
101
+
102
+ val_ds = self.tf.keras.preprocessing.image_dataset_from_directory(
103
+ data_dir,
104
+ seed=123,
105
+ subset="validation",
106
+ validation_split=ratioValidation,
107
+ image_size=(self.imgWidth, self.imgHeight),
108
+ batch_size=self.batchSize)
109
+
110
+ self.trainDataset = train_ds.cache().shuffle(1000).prefetch(buffer_size=self.AUTOTUNE)
111
+ self.validationDataset = val_ds.cache().prefetch(buffer_size=self.AUTOTUNE)
112
+
113
+ return {"success":True, "code":200, "detail":"success"}
114
+ except Exception as e:
115
+ return {"success":False, "code":500, "detail":str(e)}
116
+
117
+ def data_PreprocessingDataset(self, typeRandomFlip="horizontal_and_vertical", RandomRotation=0.3, RandomZoom=0.2, shuffleTrainDataset=True, augmentTrainDataset=True):
118
+ """
119
+ Purpose:
120
+ - Preprocessing dataset
121
+
122
+ Parameter:
123
+ - typeRandomFlip: type of random flip
124
+ - type: string
125
+ - example: "horizontal_and_vertical"
126
+ - options: "horizontal", "vertical", "horizontal_and_vertical"
127
+ - RandomRotation: random rotation
128
+ - type: float
129
+ - example: 0.3
130
+ - RandomZoom: random zoom
131
+ - type: float
132
+ - example: 0.2
133
+ - shuffleTrainDataset: shuffle train dataset
134
+ - type: bool
135
+ - example: True
136
+ - augmentTrainDataset: augment train dataset
137
+ - type: bool
138
+ - example: True
139
+
140
+ Return:
141
+ - {"success":True, "code":200, "detail":"success"}
142
+ """
143
+ try:
144
+ rescale = self.tf.keras.layers.Rescaling(1.0 / 255, input_shape=(self.imgWidth, self.imgHeight, 3))
145
+
146
+ data_augmentation = self.tf.keras.Sequential(
147
+ [
148
+ self.tf.keras.layers.RandomFlip(typeRandomFlip, input_shape=(self.imgWidth,self.imgHeight,3)),
149
+ self.tf.keras.layers.RandomRotation(RandomRotation),
150
+ self.tf.keras.layers.RandomZoom(RandomZoom),
151
+ ]
152
+ )
153
+
154
+
155
+ def prepare(ds, shuffle=False, augment=False):
156
+ # Rescale dataset
157
+ ds = ds.map(lambda x, y: (rescale(x), y), num_parallel_calls=self.AUTOTUNE)
158
+
159
+ if shuffle:
160
+ ds = ds.shuffle(1024)
161
+
162
+ # Use data augmentation only on the training set
163
+ if augment:
164
+ ds = ds.map(lambda x, y: (data_augmentation(x), y), num_parallel_calls=self.AUTOTUNE,)
165
+
166
+ # Use buffered prefecting
167
+ return ds.prefetch(buffer_size=self.AUTOTUNE)
168
+
169
+ self.trainDataset = prepare(self.trainDataset, shuffle=shuffleTrainDataset, augment=augmentTrainDataset)
170
+ self.validationDataset = prepare(self.validationDataset)
171
+
172
+ return {"success":True, "code":200, "detail":"success"}
173
+ except Exception as e:
174
+ return {"success":False, "code":500, "detail":str(e)}
175
+
176
+ def data_GetLabelFromDataset(self, dataset):
177
+ """
178
+ Purpose:
179
+ - Get label from dataset
180
+
181
+ Parameter:
182
+ - dataset: dataset
183
+ - type: tf.data.Dataset
184
+ - example: trainDataset
185
+
186
+ Return:
187
+ - {"success":True, "code":200, "detail":"success", "label":array([0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0,
188
+ 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1,
189
+ 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0,
190
+ 1, 1, 0, 0, 0, 0, 0, 0], dtype=int32)}
191
+ """
192
+ try:
193
+ label = self.np.concatenate([y for x, y in dataset], axis=0)
194
+ return {"success":True, "code":200, "detail":"success", "label":label}
195
+ except Exception as e:
196
+ return {"success":False, "code":500, "detail":str(e)}
197
+
198
+ def model_make(self, model=None):
199
+ """
200
+ Purpose:
201
+ - Make default model
202
+
203
+ Parameter:
204
+ - model: model
205
+ - type: tf.keras.Model
206
+ - example: model
207
+ - default: None
208
+
209
+ Return:
210
+ - {"success":True, "code":200, "detail":"success", "model":model}
211
+ """
212
+ try:
213
+ if model is None:
214
+ model = self.tf.keras.Sequential()
215
+ base_model = self.tf.keras.applications.DenseNet121(include_top=False, input_shape=(self.imgWidth, self.imgHeight, 3))
216
+ base_model.trainable=True
217
+ model.add(base_model)
218
+ model.add(self.tf.keras.layers.Dropout(0.4))
219
+ model.add(self.tf.keras.layers.Flatten())
220
+ model.add(self.tf.keras.layers.Dense(128,activation='relu'))
221
+ model.add(self.tf.keras.layers.Dropout(0.5))
222
+ model.add(self.tf.keras.layers.Dense(32,activation='relu'))
223
+ model.add(self.tf.keras.layers.Dense(1, activation="sigmoid"))
224
+ self.Model = model
225
+ else:
226
+ self.Model = model
227
+ return {"success":True, "code":200, "detail":"success", "model":self.Model}
228
+ except Exception as e:
229
+ return {"success":False, "code":500, "detail":str(e)}
230
+
231
+ def training_model(self, epochs=10, lossFunction="binary_crossentropy", optimizer="adam", metrics=["accuracy"], device='/GPU:0', modelName=None):
232
+ """
233
+ Purpose:
234
+ - Training model
235
+
236
+ Parameter:
237
+ - model: model
238
+ - type: tf.keras.Model
239
+ - example: model
240
+ - default: True
241
+ - epochs: epochs
242
+ - type: int
243
+ - example: 10
244
+ - lossFunction: loss function
245
+ - type: string
246
+ - example: "binary_crossentropy"
247
+ - options: "binary_crossentropy", "categorical_crossentropy", "sparse_categorical_crossentropy"
248
+ - optimizer: optimizer
249
+ - type: string
250
+ - example: "adam"
251
+ - options: "adam", "adamax", "nadam", "rmsprop", "sgd", tf.keras.optimizers.RMSprop(learning_rate=1e-4)
252
+ - metrics: metrics
253
+ - type: list
254
+ - example: ["accuracy"]
255
+ - device: device
256
+ - type: string
257
+ - example: "/GPU:0"
258
+ - options: "/CPU:0", "/GPU:0"
259
+ - modelName: model name
260
+ - type: string
261
+ - example: "model"
262
+
263
+ Return:
264
+ - {"success":True, "code":200, "detail":"success"}
265
+ """
266
+ try:
267
+ if modelName is not None:
268
+ self.modelName = modelName
269
+
270
+ self.time_callback = TimeHistory()
271
+ self.Model.compile(
272
+ loss=lossFunction,
273
+ optimizer=optimizer,
274
+ metrics=metrics,
275
+ )
276
+
277
+ print(self.Model.summary())
278
+
279
+ with self.tf.device(device):
280
+ self.history = self.Model.fit(
281
+ self.trainDataset, validation_data=self.validationDataset, epochs=epochs, verbose=1, callbacks=[self.time_callback]
282
+ )
283
+ # make excel file report.xlsx and save data in column 1 is number of training loss, column 2 is training accuracy, column 3 is validation loss, column 4 is validation accuracy, column 5 is training time
284
+ dataFrameHistory = self.pd.DataFrame({"training_loss":self.history.history["loss"], "training_accuracy":self.history.history["accuracy"], "validation_loss":self.history.history["val_loss"], "validation_accuracy":self.history.history["val_accuracy"], "training_time":self.time_callback.times})
285
+ dataFrameHistory.to_excel(f"report_{self.modelName}.xlsx")
286
+
287
+ return {"success":True, "code":200, "detail":"success"}
288
+ except Exception as e:
289
+ return {"success":False, "code":500, "detail":str(e)}
290
+
291
+ def training_model_multiGPU(self, epochs=10, lossFunction="binary_crossentropy", optimizer="adam", metrics=["accuracy"], device='/GPU:0', modelName=None):
292
+ """
293
+ Purpose:
294
+ - Training model with multi GPU support, with mirrored strategy
295
+
296
+ Parameter:
297
+ - model: model
298
+ - type: tf.keras.Model
299
+ - example: model
300
+ - default: True
301
+ - epochs: epochs
302
+ - type: int
303
+ - example: 10
304
+ - lossFunction: loss function
305
+ - type: string
306
+ - example: "binary_crossentropy"
307
+ - options: "binary_crossentropy", "categorical_crossentropy", "sparse_categorical_crossentropy"
308
+ - optimizer: optimizer
309
+ - type: string
310
+ - example: "adam"
311
+ - options: "adam", "adamax", "nadam", "rmsprop", "sgd", tf.keras.optimizers.RMSprop(learning_rate=1e-4)
312
+ - metrics: metrics
313
+ - type: list
314
+ - example: ["accuracy"]
315
+ - device: device
316
+ - type: string
317
+ - example: "/GPU:0"
318
+ - options: "/CPU:0", "/GPU:0"
319
+
320
+ Return:
321
+ - {"success":True, "code":200, "detail":"success"}
322
+ """
323
+ try:
324
+ if modelName is not None:
325
+ self.modelName = modelName
326
+
327
+ self.time_callback = TimeHistory()
328
+
329
+ print(self.Model.summary())
330
+ strategy = self.tf.distribute.MirroredStrategy()
331
+ with strategy.scope():
332
+ model = self.Model
333
+ model.compile(loss=lossFunction, optimizer=optimizer, metrics=metrics)
334
+
335
+ self.history = model.fit(self.trainDataset, validation_data=self.validationDataset, epochs=epochs, verbose=1, callbacks=[self.time_callback])
336
+ # make excel file report.xlsx and save data in column 1 is number of training loss, column 2 is training accuracy, column 3 is validation loss, column 4 is validation accuracy, column 5 is training time
337
+ dataFrameHistory = self.pd.DataFrame({"training_loss":self.history.history["loss"], "training_accuracy":self.history.history["accuracy"], "validation_loss":self.history.history["val_loss"], "validation_accuracy":self.history.history["val_accuracy"], "training_time":self.time_callback.times})
338
+ dataFrameHistory.to_excel(f"report_{self.modelName}.xlsx")
339
+
340
+ return {"success":True, "code":200, "detail":"success"}
341
+ except Exception as e:
342
+ return {"success":False, "code":500, "detail":str(e)}
343
+
344
+ def evaluation(self, labelName=["COVID19", "NORMAL"]):
345
+ """
346
+ Purpose:
347
+ - Evaluation model with confusionMatrix, precision, recall, f1Score, accuracy
348
+
349
+ Parameter:
350
+ - labelName: label name
351
+ - type: list
352
+ - example: ["COVID19", "NORMAL"]
353
+
354
+ Return:
355
+ - {"success":True, "code":200, "detail":"success", "confusionMatrix":confusionMatrix, "precision":precision, "recall":recall, "f1Score":f1Score, "accuracy":accuracy}
356
+ """
357
+ try:
358
+ self.Model.evaluate(self.validationDataset)
359
+ prediction_result = self.Model.predict(self.validationDataset)
360
+ prediction_result = self.np.argmax(prediction_result, axis=1)
361
+ self.validation_label = self.np.concatenate([y for x, y in self.validationDataset], axis=0)
362
+ self.confusionMatrix = self.tf.math.confusion_matrix(labels=self.validation_label, predictions=prediction_result).numpy()
363
+ self.accuracy = self.sklearn.metrics.accuracy_score(self.validation_label, prediction_result)
364
+ self.precision = self.sklearn.metrics.precision_score(self.validation_label, prediction_result, average="macro", zero_division=0)
365
+ self.recall = self.sklearn.metrics.recall_score(self.validation_label, prediction_result, average="macro")
366
+ self.f1Score = self.sklearn.metrics.f1_score(self.validation_label, prediction_result, average="macro")
367
+ self.__drawConfusionMatrix(labelName)
368
+ self.__drawROC()
369
+ dataFrameScore = self.pd.DataFrame({"accuracy":[self.accuracy], "recall":[self.recall], "precision":[self.precision], "f1Score":[self.f1Score]})
370
+ dataFrameScore.to_excel(f"reportScore_{self.modelName}.xlsx")
371
+ self.__drawHistoryAccuracy()
372
+ self.__drawHistoryLoss()
373
+ return {"success":True, "code":200, "detail":"success"}
374
+ except Exception as e:
375
+ return {"success":False, "code":500, "detail":str(e)}
376
+
377
+
378
+ def __drawConfusionMatrix(self, labelName=["COVID19", "NORMAL"]):
379
+ # draw confusion matrix with numeric value on the center and library matplotlib with label of validation dataset like this sample https://scikit-learn.org/stable/_images/sklearn-metrics-plot_confusion_matrix-1.png
380
+ labelName.sort()
381
+ fig, ax = self.plt.subplots()
382
+ im = ax.imshow(self.confusionMatrix)
383
+ ax.figure.colorbar(im, ax=ax)
384
+ ax.set(xticks=self.np.arange(self.confusionMatrix.shape[1]), yticks=self.np.arange(self.confusionMatrix.shape[0]), xticklabels=labelName, yticklabels=labelName, title="Confusion Matrix", ylabel="True label", xlabel="Predicted label")
385
+ ax.set_xlabel("Predicted")
386
+ ax.set_ylabel("True")
387
+ self.plt.setp(ax.get_xticklabels(), rotation=45, ha="right", rotation_mode="anchor")
388
+ for i in range(self.confusionMatrix.shape[0]):
389
+ for j in range(self.confusionMatrix.shape[1]):
390
+ ax.text(j, i, self.confusionMatrix[i, j], ha="center", va="center", color="w")
391
+ self.plt.tight_layout()
392
+ self.plt.savefig(f"confusionMatrix_{self.modelName}.png")
393
+ self.plt.show()
394
+ self.plt.close()
395
+ # save confusion matrix to excel file
396
+ dataFrameConfusionMatrix = self.pd.DataFrame(self.confusionMatrix)
397
+ dataFrameConfusionMatrix.to_excel(f"confusionMatrix_{self.modelName}.xlsx")
398
+
399
+ def __drawROC(self):
400
+ """
401
+ Purpose:
402
+ - Draw ROC curve like this sample https://scikit-learn.org/stable/_images/sphx_glr_plot_roc_001.png for multi class
403
+ """
404
+ predictResult = self.Model.predict(self.validationDataset)
405
+ fpr, tpr, thresholds = self.sklearn.metrics.roc_curve(self.validation_label, predictResult[:, 1], pos_label=1)
406
+ self.auc = self.sklearn.metrics.auc(fpr, tpr)
407
+ fig, ax = self.plt.subplots()
408
+ ax.plot(fpr, tpr, label="ROC curve (area = %0.2f)" % self.auc)
409
+ ax.plot([0, 1], [0, 1], "k--")
410
+ ax.set_xlim([0.0, 1.0])
411
+ ax.set_ylim([0.0, 1.05])
412
+ ax.set_xlabel("False Positive Rate")
413
+ ax.set_ylabel("True Positive Rate")
414
+ ax.set_title("Receiver operating characteristic")
415
+ ax.legend(loc="best")
416
+ self.plt.savefig(f"ROC_{self.modelName}.png")
417
+ self.plt.show()
418
+ self.plt.close()
419
+ # save ROC curve to excel file
420
+ dataFrameROC = self.pd.DataFrame({"fpr":fpr, "tpr":tpr, "thresholds":thresholds, "auc":self.auc})
421
+ dataFrameROC.to_excel(f"ROC_{self.modelName}.xlsx")
422
+
423
+ def __drawHistoryAccuracy(self):
424
+ """
425
+ Purpose:
426
+ - Draw history accuracy with training and validation dataset
427
+ """
428
+ fig, ax = self.plt.subplots()
429
+ ax.plot(self.history.history["accuracy"], label="training dataset")
430
+ ax.plot(self.history.history["val_accuracy"], label="validation dataset")
431
+ ax.set_xlabel("Epoch")
432
+ ax.set_ylabel("Accuracy")
433
+ ax.set_title("Accuracy")
434
+ ax.legend(loc="best")
435
+ self.plt.savefig(f"historyAccuracy_{self.modelName}.png")
436
+ self.plt.show()
437
+ self.plt.close()
438
+
439
+ def __drawHistoryLoss(self):
440
+ """
441
+ Purpose:
442
+ - Draw history loss with training and validation dataset
443
+ """
444
+ fig, ax = self.plt.subplots()
445
+ ax.plot(self.history.history["loss"], label="training dataset")
446
+ ax.plot(self.history.history["val_loss"], label="validation dataset")
447
+ ax.set_xlabel("Epoch")
448
+ ax.set_ylabel("Loss")
449
+ ax.set_title("Loss")
450
+ ax.legend(loc="best")
451
+ self.plt.savefig(f"historyLoss_{self.modelName}.png")
452
+ self.plt.show()
453
+ self.plt.close()
454
+
455
+ def import_data_Dataset(self, trainDataset, validationDataset):
456
+ """
457
+ Purpose:
458
+ - Import dataset
459
+
460
+ Parameter:
461
+ - trainDataset: dataset
462
+ - type: tf.data.Dataset
463
+ - example: trainDataset
464
+ - validationDataset: dataset
465
+ - type: tf.data.Dataset
466
+ - example: validationDataset
467
+
468
+ Return:
469
+ - {"success":True, "code":200, "detail":"success"}
470
+ """
471
+ try:
472
+ self.trainDataset = trainDataset
473
+ self.validationDataset = validationDataset
474
+ return {"success":True, "code":200, "detail":"success"}
475
+ except Exception as e:
476
+ return {"success":False, "code":500, "detail":str(e)}
477
+
478
+ def saveModelWithWeight(self, fileName):
479
+ """
480
+ Purpose:
481
+ - Save model with weight
482
+
483
+ Parameter:
484
+ - fileName: file name
485
+ - type: string
486
+ - example: "my_model"
487
+ - options: "my_model", "gs://bucket/my_model"
488
+
489
+ Return:
490
+ - {"success":True, "code":200, "detail":"success"}
491
+ """
492
+ try:
493
+ self.Model.save(fileName)
494
+ return {"success":True, "code":200, "detail":"success"}
495
+ except Exception as e:
496
+ return {"success":False, "code":500, "detail":str(e)}
497
+
498
+ def loadModelWithWeightAndCustomObject(self, fileName, customObject):
499
+ """
500
+ Purpose:
501
+ - Load model with weight and custom object
502
+
503
+ Parameter:
504
+ - fileName: file name
505
+ - type: string
506
+ - example: "my_model"
507
+ - options: "my_model", "gs://bucket/my_model"
508
+ - customObject: custom object
509
+ - type: dict
510
+ - example: {"MyCustomObject":MyCustomObject}
511
+
512
+ Return:
513
+ - {"success":True, "code":200, "detail":"success"}
514
+ """
515
+ try:
516
+ self.Model = self.tf.keras.models.load_model(fileName, custom_objects=customObject)
517
+ return {"success":True, "code":200, "detail":"success"}
518
+ except Exception as e:
519
+ return {"success":False, "code":500, "detail":str(e)}
520
+
521
+ import tensorflow as tf
522
+ from time import time
523
+ class TimeHistory(tf.keras.callbacks.Callback):
524
+ def on_train_begin(self, logs={}):
525
+ self.times = []
526
+
527
+ def on_epoch_begin(self, batch, logs={}):
528
+ self.epoch_time_start = time()
529
+
530
+ def on_epoch_end(self, batch, logs={}):
 
 
 
 
 
531
  self.times.append(time() - self.epoch_time_start)