hightowerr commited on
Commit
95c0cea
·
1 Parent(s): 20bc1ec

trainer classifer

Browse files
Files changed (4) hide show
  1. .ipynb_checkpoints/app-checkpoint.ipynb +294 -0
  2. app.ipynb +303 -0
  3. app.py +28 -4
  4. export.pkl +3 -0
.ipynb_checkpoints/app-checkpoint.ipynb ADDED
@@ -0,0 +1,294 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": 47,
6
+ "id": "88aaacc6-8379-4b6b-b3b4-eea4d9acaea0",
7
+ "metadata": {},
8
+ "outputs": [],
9
+ "source": [
10
+ "#|default_exp app"
11
+ ]
12
+ },
13
+ {
14
+ "cell_type": "markdown",
15
+ "id": "0d06de33-59f0-4957-87a2-6154102289a6",
16
+ "metadata": {},
17
+ "source": [
18
+ "#### Sneakers"
19
+ ]
20
+ },
21
+ {
22
+ "cell_type": "code",
23
+ "execution_count": 65,
24
+ "id": "844c3071-836e-46c5-b30e-2b8d96a8e369",
25
+ "metadata": {},
26
+ "outputs": [],
27
+ "source": [
28
+ "#|export\n",
29
+ "from fastai.vision.all import *\n",
30
+ "import gradio as gr\n",
31
+ "from fastai.vision.widgets import *"
32
+ ]
33
+ },
34
+ {
35
+ "cell_type": "code",
36
+ "execution_count": 66,
37
+ "id": "ded0d929-67f6-4a69-b223-dee3ad189139",
38
+ "metadata": {},
39
+ "outputs": [],
40
+ "source": [
41
+ "# btn_upload = SimpleNamespace(data = ['images/IMG_0115.jpg'])"
42
+ ]
43
+ },
44
+ {
45
+ "cell_type": "code",
46
+ "execution_count": 67,
47
+ "id": "6b7aeaaa-8260-45fa-9178-e064b0132c36",
48
+ "metadata": {},
49
+ "outputs": [],
50
+ "source": [
51
+ "# img = PILImage.create(btn_upload.data[-1])\n",
52
+ "# # img.thumbnail((192,192))\n",
53
+ "# # img"
54
+ ]
55
+ },
56
+ {
57
+ "cell_type": "code",
58
+ "execution_count": 68,
59
+ "id": "a0dfbe70-b826-4903-95cf-ae59c498a3b6",
60
+ "metadata": {},
61
+ "outputs": [],
62
+ "source": [
63
+ "# out_pl = widgets.Output()\n",
64
+ "# out_pl.clear_output()\n",
65
+ "# with out_pl: display(img.to_thumb(128,128))\n",
66
+ "# out_pl"
67
+ ]
68
+ },
69
+ {
70
+ "cell_type": "code",
71
+ "execution_count": 69,
72
+ "id": "a5aaeeb6-6a93-4de8-be39-36c4b5d90ee4",
73
+ "metadata": {},
74
+ "outputs": [],
75
+ "source": [
76
+ "# path"
77
+ ]
78
+ },
79
+ {
80
+ "cell_type": "code",
81
+ "execution_count": 70,
82
+ "id": "7fcc53f8-0326-49d0-a110-1140148d6301",
83
+ "metadata": {},
84
+ "outputs": [],
85
+ "source": [
86
+ "# learn_inf.predict(img)"
87
+ ]
88
+ },
89
+ {
90
+ "cell_type": "code",
91
+ "execution_count": 71,
92
+ "id": "6173ae0f-fa63-4a94-b9f6-e0f9534ff8d9",
93
+ "metadata": {},
94
+ "outputs": [],
95
+ "source": [
96
+ "# pred,pred_idx,probs = learn_inf.predict(img)"
97
+ ]
98
+ },
99
+ {
100
+ "cell_type": "code",
101
+ "execution_count": 72,
102
+ "id": "490af644-cb3c-4f8c-9e12-1a7cf9c18fc0",
103
+ "metadata": {},
104
+ "outputs": [],
105
+ "source": [
106
+ "# #hide_output\n",
107
+ "# lbl_pred = widgets.Label()\n",
108
+ "# lbl_pred.value = f'Prediction: {pred}; Probability: {probs[pred_idx]:.04f}'\n",
109
+ "# lbl_pred"
110
+ ]
111
+ },
112
+ {
113
+ "cell_type": "code",
114
+ "execution_count": 73,
115
+ "id": "e3618d64-5db3-4659-b30f-f917939f509a",
116
+ "metadata": {},
117
+ "outputs": [],
118
+ "source": [
119
+ "# #Putting back btn_upload to a widget for next cell\n",
120
+ "# btn_upload = widgets.FileUpload()"
121
+ ]
122
+ },
123
+ {
124
+ "cell_type": "code",
125
+ "execution_count": 74,
126
+ "id": "b2475bb3-b820-4a9d-b3b7-488e9e2971bc",
127
+ "metadata": {},
128
+ "outputs": [],
129
+ "source": [
130
+ "# VBox([widgets.Label('Select your sneakers!'), \n",
131
+ "# btn_upload, btn_run, out_pl, lbl_pred])"
132
+ ]
133
+ },
134
+ {
135
+ "cell_type": "code",
136
+ "execution_count": 75,
137
+ "id": "1cdad192-403a-44d4-b006-661afebe18da",
138
+ "metadata": {},
139
+ "outputs": [],
140
+ "source": [
141
+ "#|export\n",
142
+ "path = Path('../fastai')\n",
143
+ "learn_inf = load_learner(path/'export.pkl')"
144
+ ]
145
+ },
146
+ {
147
+ "cell_type": "code",
148
+ "execution_count": 76,
149
+ "id": "c72e52ad-30b5-4c06-83d4-35095e43d419",
150
+ "metadata": {},
151
+ "outputs": [
152
+ {
153
+ "data": {
154
+ "application/vnd.jupyter.widget-view+json": {
155
+ "model_id": "7ec47a7f52a449a398a190e295f76328",
156
+ "version_major": 2,
157
+ "version_minor": 0
158
+ },
159
+ "text/plain": [
160
+ "Button(description='Classify', style=ButtonStyle())"
161
+ ]
162
+ },
163
+ "execution_count": 76,
164
+ "metadata": {},
165
+ "output_type": "execute_result"
166
+ }
167
+ ],
168
+ "source": [
169
+ "#hide_output\n",
170
+ "btn_run = widgets.Button(description='Classify')\n",
171
+ "btn_run"
172
+ ]
173
+ },
174
+ {
175
+ "cell_type": "code",
176
+ "execution_count": 77,
177
+ "id": "936e0aa5-4344-4b86-9e7d-b21adbd6b5ab",
178
+ "metadata": {},
179
+ "outputs": [],
180
+ "source": [
181
+ "#|export\n",
182
+ "def on_click_classify(img):\n",
183
+ " # img = PILImage.create(btn_upload.data[-1])\n",
184
+ " out_pl = widgets.Output()\n",
185
+ " out_pl.clear_output()\n",
186
+ " with out_pl: display(img.to_thumb(128,128))\n",
187
+ " pred,pred_idx,probs = learn_inf.predict(img)\n",
188
+ " # lbl_pred.value = f'Prediction: {pred}; Probability: {probs[pred_idx]:.04f}'\n",
189
+ " return f'Prediction: {pred}; Probability: {probs[pred_idx]:.04f}'"
190
+ ]
191
+ },
192
+ {
193
+ "cell_type": "code",
194
+ "execution_count": 78,
195
+ "id": "3bb8c7a0-3fb4-4fbb-9361-c62095a541a8",
196
+ "metadata": {},
197
+ "outputs": [],
198
+ "source": [
199
+ "# btn_run.on_click(on_click_classify)"
200
+ ]
201
+ },
202
+ {
203
+ "cell_type": "code",
204
+ "execution_count": 79,
205
+ "id": "a6e2a398-f6c8-4444-b0f2-e538a1b36da2",
206
+ "metadata": {},
207
+ "outputs": [
208
+ {
209
+ "name": "stdout",
210
+ "output_type": "stream",
211
+ "text": [
212
+ "Running on local URL: http://127.0.0.1:7867\n",
213
+ "\n",
214
+ "To create a public link, set `share=True` in `launch()`.\n"
215
+ ]
216
+ },
217
+ {
218
+ "data": {
219
+ "text/plain": []
220
+ },
221
+ "execution_count": 79,
222
+ "metadata": {},
223
+ "output_type": "execute_result"
224
+ }
225
+ ],
226
+ "source": [
227
+ "#|export\n",
228
+ "image = gr.Image()\n",
229
+ "label = gr.Label()\n",
230
+ "examples = ['images/Adi_trainers.jpg', 'images/Nike_trainers.jpg', 'images/Puma_trainers.jpg', 'images/Adidas_trainers.jpg']\n",
231
+ "\n",
232
+ "intf = gr.Interface(fn=on_click_classify, inputs=image, outputs=label, examples=examples)\n",
233
+ "intf.launch(inline=False)"
234
+ ]
235
+ },
236
+ {
237
+ "cell_type": "markdown",
238
+ "id": "16e21449-1821-45a7-829c-acfd92badec1",
239
+ "metadata": {},
240
+ "source": [
241
+ "### Export"
242
+ ]
243
+ },
244
+ {
245
+ "cell_type": "code",
246
+ "execution_count": 80,
247
+ "id": "f5c57105-41fe-4d79-a2b0-d52ed75ea6cb",
248
+ "metadata": {},
249
+ "outputs": [
250
+ {
251
+ "name": "stdout",
252
+ "output_type": "stream",
253
+ "text": [
254
+ "Export successful\n"
255
+ ]
256
+ }
257
+ ],
258
+ "source": [
259
+ "import nbdev\n",
260
+ "nbdev.export.nb_export('app.ipynb', './')\n",
261
+ "print('Export successful')"
262
+ ]
263
+ },
264
+ {
265
+ "cell_type": "code",
266
+ "execution_count": null,
267
+ "id": "cb08b3c4-14a5-4e6b-a573-244954914d50",
268
+ "metadata": {},
269
+ "outputs": [],
270
+ "source": []
271
+ }
272
+ ],
273
+ "metadata": {
274
+ "kernelspec": {
275
+ "display_name": "Python 3 (ipykernel)",
276
+ "language": "python",
277
+ "name": "python3"
278
+ },
279
+ "language_info": {
280
+ "codemirror_mode": {
281
+ "name": "ipython",
282
+ "version": 3
283
+ },
284
+ "file_extension": ".py",
285
+ "mimetype": "text/x-python",
286
+ "name": "python",
287
+ "nbconvert_exporter": "python",
288
+ "pygments_lexer": "ipython3",
289
+ "version": "3.10.12"
290
+ }
291
+ },
292
+ "nbformat": 4,
293
+ "nbformat_minor": 5
294
+ }
app.ipynb ADDED
@@ -0,0 +1,303 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": 1,
6
+ "id": "88aaacc6-8379-4b6b-b3b4-eea4d9acaea0",
7
+ "metadata": {},
8
+ "outputs": [],
9
+ "source": [
10
+ "#|default_exp app"
11
+ ]
12
+ },
13
+ {
14
+ "cell_type": "markdown",
15
+ "id": "0d06de33-59f0-4957-87a2-6154102289a6",
16
+ "metadata": {},
17
+ "source": [
18
+ "#### Sneakers"
19
+ ]
20
+ },
21
+ {
22
+ "cell_type": "code",
23
+ "execution_count": 2,
24
+ "id": "844c3071-836e-46c5-b30e-2b8d96a8e369",
25
+ "metadata": {},
26
+ "outputs": [
27
+ {
28
+ "name": "stderr",
29
+ "output_type": "stream",
30
+ "text": [
31
+ "/home/yunix/.local/lib/python3.10/site-packages/matplotlib/projections/__init__.py:63: UserWarning: Unable to import Axes3D. This may be due to multiple versions of Matplotlib being installed (e.g. as a system package and as a pip package). As a result, the 3D projection is not available.\n",
32
+ " warnings.warn(\"Unable to import Axes3D. This may be due to multiple versions of \"\n"
33
+ ]
34
+ }
35
+ ],
36
+ "source": [
37
+ "#|export\n",
38
+ "from fastai.vision.all import *\n",
39
+ "import gradio as gr\n",
40
+ "from fastai.vision.widgets import *"
41
+ ]
42
+ },
43
+ {
44
+ "cell_type": "code",
45
+ "execution_count": 66,
46
+ "id": "ded0d929-67f6-4a69-b223-dee3ad189139",
47
+ "metadata": {},
48
+ "outputs": [],
49
+ "source": [
50
+ "# btn_upload = SimpleNamespace(data = ['images/IMG_0115.jpg'])"
51
+ ]
52
+ },
53
+ {
54
+ "cell_type": "code",
55
+ "execution_count": 67,
56
+ "id": "6b7aeaaa-8260-45fa-9178-e064b0132c36",
57
+ "metadata": {},
58
+ "outputs": [],
59
+ "source": [
60
+ "# img = PILImage.create(btn_upload.data[-1])\n",
61
+ "# # img.thumbnail((192,192))\n",
62
+ "# # img"
63
+ ]
64
+ },
65
+ {
66
+ "cell_type": "code",
67
+ "execution_count": 68,
68
+ "id": "a0dfbe70-b826-4903-95cf-ae59c498a3b6",
69
+ "metadata": {},
70
+ "outputs": [],
71
+ "source": [
72
+ "# out_pl = widgets.Output()\n",
73
+ "# out_pl.clear_output()\n",
74
+ "# with out_pl: display(img.to_thumb(128,128))\n",
75
+ "# out_pl"
76
+ ]
77
+ },
78
+ {
79
+ "cell_type": "code",
80
+ "execution_count": 69,
81
+ "id": "a5aaeeb6-6a93-4de8-be39-36c4b5d90ee4",
82
+ "metadata": {},
83
+ "outputs": [],
84
+ "source": [
85
+ "# path"
86
+ ]
87
+ },
88
+ {
89
+ "cell_type": "code",
90
+ "execution_count": 70,
91
+ "id": "7fcc53f8-0326-49d0-a110-1140148d6301",
92
+ "metadata": {},
93
+ "outputs": [],
94
+ "source": [
95
+ "# learn_inf.predict(img)"
96
+ ]
97
+ },
98
+ {
99
+ "cell_type": "code",
100
+ "execution_count": 71,
101
+ "id": "6173ae0f-fa63-4a94-b9f6-e0f9534ff8d9",
102
+ "metadata": {},
103
+ "outputs": [],
104
+ "source": [
105
+ "# pred,pred_idx,probs = learn_inf.predict(img)"
106
+ ]
107
+ },
108
+ {
109
+ "cell_type": "code",
110
+ "execution_count": 72,
111
+ "id": "490af644-cb3c-4f8c-9e12-1a7cf9c18fc0",
112
+ "metadata": {},
113
+ "outputs": [],
114
+ "source": [
115
+ "# #hide_output\n",
116
+ "# lbl_pred = widgets.Label()\n",
117
+ "# lbl_pred.value = f'Prediction: {pred}; Probability: {probs[pred_idx]:.04f}'\n",
118
+ "# lbl_pred"
119
+ ]
120
+ },
121
+ {
122
+ "cell_type": "code",
123
+ "execution_count": 73,
124
+ "id": "e3618d64-5db3-4659-b30f-f917939f509a",
125
+ "metadata": {},
126
+ "outputs": [],
127
+ "source": [
128
+ "# #Putting back btn_upload to a widget for next cell\n",
129
+ "# btn_upload = widgets.FileUpload()"
130
+ ]
131
+ },
132
+ {
133
+ "cell_type": "code",
134
+ "execution_count": 74,
135
+ "id": "b2475bb3-b820-4a9d-b3b7-488e9e2971bc",
136
+ "metadata": {},
137
+ "outputs": [],
138
+ "source": [
139
+ "# VBox([widgets.Label('Select your sneakers!'), \n",
140
+ "# btn_upload, btn_run, out_pl, lbl_pred])"
141
+ ]
142
+ },
143
+ {
144
+ "cell_type": "code",
145
+ "execution_count": 5,
146
+ "id": "1cdad192-403a-44d4-b006-661afebe18da",
147
+ "metadata": {},
148
+ "outputs": [],
149
+ "source": [
150
+ "#|export\n",
151
+ "path = Path('.')\n",
152
+ "learn_inf = load_learner(path/'export.pkl')"
153
+ ]
154
+ },
155
+ {
156
+ "cell_type": "code",
157
+ "execution_count": 6,
158
+ "id": "c72e52ad-30b5-4c06-83d4-35095e43d419",
159
+ "metadata": {},
160
+ "outputs": [
161
+ {
162
+ "data": {
163
+ "application/vnd.jupyter.widget-view+json": {
164
+ "model_id": "ae40d4f5ffb846c8875f56eeb6c9249c",
165
+ "version_major": 2,
166
+ "version_minor": 0
167
+ },
168
+ "text/plain": [
169
+ "Button(description='Classify', style=ButtonStyle())"
170
+ ]
171
+ },
172
+ "execution_count": 6,
173
+ "metadata": {},
174
+ "output_type": "execute_result"
175
+ }
176
+ ],
177
+ "source": [
178
+ "#hide_output\n",
179
+ "btn_run = widgets.Button(description='Classify')\n",
180
+ "btn_run"
181
+ ]
182
+ },
183
+ {
184
+ "cell_type": "code",
185
+ "execution_count": 7,
186
+ "id": "936e0aa5-4344-4b86-9e7d-b21adbd6b5ab",
187
+ "metadata": {},
188
+ "outputs": [],
189
+ "source": [
190
+ "#|export\n",
191
+ "def on_click_classify(img):\n",
192
+ " # img = PILImage.create(btn_upload.data[-1])\n",
193
+ " out_pl = widgets.Output()\n",
194
+ " out_pl.clear_output()\n",
195
+ " with out_pl: display(img.to_thumb(128,128))\n",
196
+ " pred,pred_idx,probs = learn_inf.predict(img)\n",
197
+ " # lbl_pred.value = f'Prediction: {pred}; Probability: {probs[pred_idx]:.04f}'\n",
198
+ " return f'Prediction: {pred}; Probability: {probs[pred_idx]:.04f}'"
199
+ ]
200
+ },
201
+ {
202
+ "cell_type": "code",
203
+ "execution_count": 8,
204
+ "id": "3bb8c7a0-3fb4-4fbb-9361-c62095a541a8",
205
+ "metadata": {},
206
+ "outputs": [],
207
+ "source": [
208
+ "# btn_run.on_click(on_click_classify)"
209
+ ]
210
+ },
211
+ {
212
+ "cell_type": "code",
213
+ "execution_count": 12,
214
+ "id": "a6e2a398-f6c8-4444-b0f2-e538a1b36da2",
215
+ "metadata": {},
216
+ "outputs": [
217
+ {
218
+ "name": "stdout",
219
+ "output_type": "stream",
220
+ "text": [
221
+ "Running on local URL: http://127.0.0.1:7863\n",
222
+ "\n",
223
+ "To create a public link, set `share=True` in `launch()`.\n"
224
+ ]
225
+ },
226
+ {
227
+ "data": {
228
+ "text/plain": []
229
+ },
230
+ "execution_count": 12,
231
+ "metadata": {},
232
+ "output_type": "execute_result"
233
+ }
234
+ ],
235
+ "source": [
236
+ "#|export\n",
237
+ "image = gr.Image()\n",
238
+ "label = gr.Label()\n",
239
+ "examples = ['../images/Adi_trainers.jpg', '../images/Nike_trainers.jpg', '../images/Puma_trainers.jpg', '../images/Adidas_trainers.jpg']\n",
240
+ "\n",
241
+ "intf = gr.Interface(fn=on_click_classify, inputs=image, outputs=label, examples=examples)\n",
242
+ "intf.launch(inline=False)"
243
+ ]
244
+ },
245
+ {
246
+ "cell_type": "markdown",
247
+ "id": "16e21449-1821-45a7-829c-acfd92badec1",
248
+ "metadata": {},
249
+ "source": [
250
+ "### Export"
251
+ ]
252
+ },
253
+ {
254
+ "cell_type": "code",
255
+ "execution_count": 13,
256
+ "id": "f5c57105-41fe-4d79-a2b0-d52ed75ea6cb",
257
+ "metadata": {},
258
+ "outputs": [
259
+ {
260
+ "name": "stdout",
261
+ "output_type": "stream",
262
+ "text": [
263
+ "Export successful\n"
264
+ ]
265
+ }
266
+ ],
267
+ "source": [
268
+ "from nbdev.export import nb_export\n",
269
+ "nb_export('app.ipynb', './')\n",
270
+ "print('Export successful')"
271
+ ]
272
+ },
273
+ {
274
+ "cell_type": "code",
275
+ "execution_count": null,
276
+ "id": "cb08b3c4-14a5-4e6b-a573-244954914d50",
277
+ "metadata": {},
278
+ "outputs": [],
279
+ "source": []
280
+ }
281
+ ],
282
+ "metadata": {
283
+ "kernelspec": {
284
+ "display_name": "Python 3 (ipykernel)",
285
+ "language": "python",
286
+ "name": "python3"
287
+ },
288
+ "language_info": {
289
+ "codemirror_mode": {
290
+ "name": "ipython",
291
+ "version": 3
292
+ },
293
+ "file_extension": ".py",
294
+ "mimetype": "text/x-python",
295
+ "name": "python",
296
+ "nbconvert_exporter": "python",
297
+ "pygments_lexer": "ipython3",
298
+ "version": "3.10.12"
299
+ }
300
+ },
301
+ "nbformat": 4,
302
+ "nbformat_minor": 5
303
+ }
app.py CHANGED
@@ -1,7 +1,31 @@
 
 
 
 
 
 
 
1
  import gradio as gr
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2
 
3
- def greet(name):
4
- return "Hello " + name + "!!"
 
 
5
 
6
- demo = gr.Interface(fn=greet, inputs="text", outputs="text")
7
- demo.launch()
 
1
+ # AUTOGENERATED! DO NOT EDIT! File to edit: app.ipynb.
2
+
3
+ # %% auto 0
4
+ __all__ = ['path', 'learn_inf', 'image', 'label', 'examples', 'intf', 'on_click_classify']
5
+
6
+ # %% app.ipynb 2
7
+ from fastai.vision.all import *
8
  import gradio as gr
9
+ from fastai.vision.widgets import *
10
+
11
+ # %% app.ipynb 12
12
+ path = Path('.')
13
+ learn_inf = load_learner(path/'export.pkl')
14
+
15
+ # %% app.ipynb 14
16
+ def on_click_classify(img):
17
+ # img = PILImage.create(btn_upload.data[-1])
18
+ out_pl = widgets.Output()
19
+ out_pl.clear_output()
20
+ with out_pl: display(img.to_thumb(128,128))
21
+ pred,pred_idx,probs = learn_inf.predict(img)
22
+ # lbl_pred.value = f'Prediction: {pred}; Probability: {probs[pred_idx]:.04f}'
23
+ return f'Prediction: {pred}; Probability: {probs[pred_idx]:.04f}'
24
 
25
+ # %% app.ipynb 16
26
+ image = gr.Image()
27
+ label = gr.Label()
28
+ examples = ['../images/Adi_trainers.jpg', '../images/Nike_trainers.jpg', '../images/Puma_trainers.jpg', '../images/Adidas_trainers.jpg']
29
 
30
+ intf = gr.Interface(fn=on_click_classify, inputs=image, outputs=label, examples=examples)
31
+ intf.launch(inline=False)
export.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3d0301f60385c5509173862e4eb83b572112931ac8a859726b936ab2a921d1ff
3
+ size 46970622