codinguru999 commited on
Commit
f07f08e
·
1 Parent(s): 3e034da

App file created

Browse files
Files changed (1) hide show
  1. app.py +179 -0
app.py ADDED
@@ -0,0 +1,179 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ """lung cancerdetection.ipynb
3
+
4
+ Automatically generated by Colaboratory.
5
+
6
+ Original file is located at
7
+ https://colab.research.google.com/drive/1f7VybSnYLPbUVLRLMNQboxQkCYaBCXMs
8
+ """
9
+
10
+ ! pip install -q kaggle
11
+ !pip install gradio
12
+
13
+ from google.colab import files
14
+
15
+ files.upload()
16
+
17
+ ! mkdir ~/.kaggle
18
+
19
+ ! cp kaggle.json ~/.kaggle/
20
+
21
+ ! chmod 600 ~/.kaggle/kaggle.json
22
+
23
+ ! kaggle datasets list
24
+
25
+ !kaggle datasets download -d andrewmvd/lung-and-colon-cancer-histopathological-images
26
+
27
+ !unzip /content/lung-and-colon-cancer-histopathological-images.zip
28
+
29
+ # This Python 3 environment comes with many helpful analytics libraries installed
30
+ # It is defined by the kaggle/python Docker image: https://github.com/kaggle/docker-python
31
+ # For example, here's several helpful packages to load
32
+
33
+ import numpy as np # linear algebra
34
+ import pandas as pd # data processing, CSV file I/O (e.g. pd.read_csv)
35
+
36
+ # Input data files are available in the read-only "../input/" directory
37
+ # For example, running this (by clicking run or pressing Shift+Enter) will list all files under the input directory
38
+
39
+ import os
40
+ for dirname, _, filenames in os.walk('/kaggle/content'):
41
+ for filename in filenames:
42
+ print(os.path.join(dirname, filename))
43
+
44
+ # You can write up to 20GB to the current directory (/kaggle/working/) that gets preserved as output when you create a version using "Save & Run All"
45
+ # You can also write temporary files to /kaggle/temp/, but they won't be saved outside of the current session
46
+
47
+ # importing libraries
48
+
49
+ import tensorflow as tf
50
+ from tensorflow.keras.layers import Input, Lambda, Dense, Flatten
51
+ from tensorflow.keras.models import Model
52
+ from tensorflow.keras.applications.resnet50 import ResNet50
53
+ from tensorflow.keras.applications.resnet50 import preprocess_input
54
+ from tensorflow.keras.preprocessing import image
55
+ from tensorflow.keras.preprocessing.image import ImageDataGenerator,load_img
56
+ from tensorflow.keras.models import Sequential
57
+ import numpy as np
58
+ from glob import glob
59
+ import matplotlib.pyplot as plt
60
+
61
+ image_set = "../content/lung_colon_image_set/lung_image_sets"
62
+
63
+ SIZE_X = SIZE_Y = 224
64
+
65
+ datagen = tf.keras.preprocessing.image.ImageDataGenerator(validation_split = 0.2)
66
+
67
+ train_set = datagen.flow_from_directory(image_set,
68
+ class_mode = "categorical",
69
+ target_size = (SIZE_X,SIZE_Y),
70
+ color_mode="rgb",
71
+ batch_size = 128,
72
+ shuffle = False,
73
+ subset='training',
74
+ seed = 42)
75
+
76
+ validate_set = datagen.flow_from_directory(image_set,
77
+ class_mode = "categorical",
78
+ target_size = (SIZE_X, SIZE_Y),
79
+ color_mode="rgb",
80
+ batch_size = 128,
81
+ shuffle = False,
82
+ subset='validation',
83
+ seed = 42)
84
+
85
+ from google.colab import drive
86
+ drive.mount('/content/drive')
87
+
88
+ IMAGE_SIZE = [224, 224]
89
+
90
+ resnet = ResNet50(input_shape=IMAGE_SIZE + [3], weights='imagenet', include_top=False)
91
+
92
+ # don't train existing weights
93
+ for layer in resnet.layers:
94
+ layer.trainable = False
95
+
96
+ flatten = Flatten()(resnet.output)
97
+ dense = Dense(256, activation = 'relu')(flatten)
98
+ dense = Dense(128, activation = 'relu')(dense)
99
+ prediction = Dense(3, activation = 'softmax')(dense)
100
+
101
+ #creating a model
102
+ model = Model(inputs = resnet.input, outputs = prediction )
103
+
104
+ model.summary()
105
+
106
+ model.compile(loss = 'categorical_crossentropy', optimizer = 'adam', metrics = ['accuracy'])
107
+
108
+ #executing the model
109
+ history = model.fit_generator(train_set, validation_data = (validate_set), epochs = 5, verbose = 1)
110
+
111
+ # plotting the loss
112
+ plt.plot(history.history['loss'],label = 'train_loss')
113
+ plt.plot(history.history['val_loss'], label = 'testing_loss')
114
+ plt.title('loss')
115
+ plt.legend()
116
+ plt.show()
117
+
118
+ # Both Validation and Training accuracy is shown here
119
+
120
+ plt.plot(history.history['accuracy'], label='training_accuracy')
121
+ plt.plot(history.history['val_accuracy'], label='validation accuracy')
122
+ plt.title('Accuracy')
123
+ plt.legend()
124
+ plt.show()
125
+
126
+ # CHECKING THE CONFUSION MATRIX
127
+
128
+ from sklearn.metrics import classification_report
129
+ from sklearn.metrics import confusion_matrix
130
+ from sklearn.metrics import f1_score
131
+ Y_pred = model.predict(validate_set)
132
+ y_pred = np.argmax(Y_pred ,axis =1)
133
+ print('Confusion Matrix')
134
+ confusion_matrix = confusion_matrix(validate_set.classes, y_pred)
135
+ print(confusion_matrix)
136
+ print('Classification Report')
137
+ target_names = ['aca','n', 'scc']
138
+ print(classification_report(validate_set.classes, y_pred, target_names=target_names))
139
+
140
+ result = model.evaluate(validate_set,batch_size=128)
141
+ print("test_loss, test accuracy",result)
142
+
143
+ import pickle
144
+
145
+ with open('model_pkl', 'wb') as files:
146
+ pickle.dump(model, files)
147
+
148
+ img = tf.keras.utils.load_img('/content/lung_colon_image_set/lung_image_sets/lung_aca/lungaca1.jpeg', target_size=(224, 224))
149
+ img_array = tf.keras.utils.img_to_array(img)
150
+ img_array = tf.expand_dims(img_array, 0)
151
+
152
+ # load saved model
153
+ with open('model_pkl' , 'rb') as f:
154
+ lr = pickle.load(f)
155
+ predi=lr.predict(img_array)
156
+ print(predi)
157
+ image_output_class=target_names[np.argmax(predi)]
158
+
159
+ print("The predicted class is", image_output_class)
160
+
161
+ import gradio as gd
162
+ from PIL import Image
163
+
164
+ target_names = ['aca','n', 'scc']
165
+
166
+ def greet_user(name):
167
+ image=gd.inputs.Image()
168
+ pil_image = Image.fromarray(name.astype('uint8'), 'RGB')
169
+ pil_image_resized = pil_image.resize((224,224))
170
+ img_array = tf.keras.utils.img_to_array(pil_image_resized)
171
+ img_array = tf.expand_dims(img_array, 0)
172
+ with open('/content/model_pkl' , 'rb') as f:
173
+ lr = pickle.load(f)
174
+ predi=lr.predict(img_array)
175
+ image_output_class=target_names[np.argmax(predi)]
176
+ return image_output_class
177
+
178
+ app = gd.Interface(fn = greet_user, inputs='image', outputs='text')
179
+ app.launch(share=True,debug=True)