franz96521 commited on
Commit
ad5cb02
·
1 Parent(s): 25db2cf
BilletesMexico/weights/checkpoint CHANGED
@@ -1,2 +1,2 @@
1
- model_checkpoint_path: "weights"
2
- all_model_checkpoint_paths: "weights"
 
1
+ model_checkpoint_path: "weights2"
2
+ all_model_checkpoint_paths: "weights2"
BilletesMexico/weights/weights2.data-00000-of-00001 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b60e3f0c28466e1395942cde7827c958fb3c0fa444340fa699aaf69febbda0c2
3
+ size 97943034
BilletesMexico/weights/weights2.index ADDED
Binary file (29.5 kB). View file
 
billetes.ipynb CHANGED
@@ -2,7 +2,7 @@
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
- "execution_count": 1,
6
  "metadata": {},
7
  "outputs": [
8
  {
@@ -19,7 +19,6 @@
19
  "import matplotlib.pyplot as plt\n",
20
  "import matplotlib.image as mpimg\n",
21
  "import numpy as np\n",
22
- "import tensorflow as tf\n",
23
  "from tensorflow import keras\n",
24
  "from tensorflow.keras import layers\n",
25
  "import numpy as np\n",
@@ -27,14 +26,15 @@
27
  "import os\n",
28
  "from IPython.display import clear_output\n",
29
  "import PIL.Image as Image\n",
30
- "print(tf.version.VERSION)\n",
31
- "\n",
32
- "\n"
 
33
  ]
34
  },
35
  {
36
  "cell_type": "code",
37
- "execution_count": 4,
38
  "metadata": {},
39
  "outputs": [],
40
  "source": [
@@ -49,17 +49,17 @@
49
  },
50
  {
51
  "cell_type": "code",
52
- "execution_count": 5,
53
  "metadata": {},
54
  "outputs": [
55
  {
56
  "name": "stdout",
57
  "output_type": "stream",
58
  "text": [
59
- "Found 395 files belonging to 5 classes.\n",
60
- "Using 316 files for training.\n",
61
- "Found 395 files belonging to 5 classes.\n",
62
- "Using 79 files for validation.\n"
63
  ]
64
  }
65
  ],
@@ -85,7 +85,7 @@
85
  },
86
  {
87
  "cell_type": "code",
88
- "execution_count": 6,
89
  "metadata": {},
90
  "outputs": [
91
  {
@@ -105,7 +105,7 @@
105
  },
106
  {
107
  "cell_type": "code",
108
- "execution_count": 7,
109
  "metadata": {},
110
  "outputs": [],
111
  "source": [
@@ -116,20 +116,25 @@
116
  },
117
  {
118
  "cell_type": "code",
119
- "execution_count": 8,
120
  "metadata": {},
121
  "outputs": [],
122
  "source": [
123
- "import tensorflow as tf\n",
124
- "import tensorflow_hub as hub\n",
125
- "from tensorflow.keras import layers\n",
126
  "movilenet =hub.KerasLayer('https://tfhub.dev/google/tf2-preview/mobilenet_v2/feature_vector/4')\n",
127
  "movilenet.trainable=False\n"
128
  ]
129
  },
 
 
 
 
 
 
 
130
  {
131
  "cell_type": "code",
132
- "execution_count": 9,
133
  "metadata": {},
134
  "outputs": [
135
  {
@@ -140,36 +145,31 @@
140
  "_________________________________________________________________\n",
141
  " Layer (type) Output Shape Param # \n",
142
  "=================================================================\n",
143
- " random_flip (RandomFlip) (None, 224, 224, 3) 0 \n",
144
- " \n",
145
- " random_rotation (RandomRota (None, 224, 224, 3) 0 \n",
146
- " tion) \n",
147
- " \n",
148
- " random_contrast (RandomCont (None, 224, 224, 3) 0 \n",
149
- " rast) \n",
150
  " \n",
151
- " random_zoom (RandomZoom) (None, 224, 224, 3) 0 \n",
152
  " \n",
153
- " keras_layer_1 (KerasLayer) (None, 1001) 3540265 \n",
154
- " \n",
155
- " dense (Dense) (None, 5) 5010 \n",
156
  " \n",
157
  "=================================================================\n",
158
- "Total params: 3,545,275\n",
159
- "Trainable params: 5,010\n",
160
- "Non-trainable params: 3,540,265\n",
161
  "_________________________________________________________________\n"
162
  ]
163
  }
164
  ],
165
  "source": [
166
- "model = tf.keras.Sequential([layers.RandomFlip(\"horizontal_and_vertical\"),\n",
167
- " layers.RandomRotation(0.2),\n",
168
- " layers.RandomContrast(.2),\n",
 
169
  " #layers.RandomBrightness(.2),\n",
170
- " layers.RandomZoom(.2),\n",
171
- " hub.KerasLayer(\"https://tfhub.dev/google/tf2-preview/mobilenet_v2/classification/4\", output_shape=[1001]),\n",
 
172
  " #movilenet,\n",
 
173
  " tf.keras.layers.Dense(num_classes)\n",
174
  " \n",
175
  "\n",
@@ -182,7 +182,7 @@
182
  },
183
  {
184
  "cell_type": "code",
185
- "execution_count": 10,
186
  "metadata": {},
187
  "outputs": [],
188
  "source": [
@@ -216,7 +216,7 @@
216
  },
217
  {
218
  "cell_type": "code",
219
- "execution_count": 11,
220
  "metadata": {},
221
  "outputs": [
222
  {
@@ -224,74 +224,74 @@
224
  "output_type": "stream",
225
  "text": [
226
  "Epoch 1/30\n",
227
- "3/3 [==============================] - 9s 921ms/step - loss: 2.4682 - acc: 0.2437 - val_loss: 2.2278 - val_acc: 0.2278\n",
228
  "Epoch 2/30\n",
229
- "3/3 [==============================] - 1s 149ms/step - loss: 1.6423 - acc: 0.3671 - val_loss: 1.9007 - val_acc: 0.3418\n",
230
  "Epoch 3/30\n",
231
- "3/3 [==============================] - 1s 151ms/step - loss: 1.1681 - acc: 0.5570 - val_loss: 1.3871 - val_acc: 0.4684\n",
232
  "Epoch 4/30\n",
233
- "3/3 [==============================] - 1s 151ms/step - loss: 0.7328 - acc: 0.7563 - val_loss: 1.0329 - val_acc: 0.6203\n",
234
  "Epoch 5/30\n",
235
- "3/3 [==============================] - 1s 147ms/step - loss: 0.5758 - acc: 0.8259 - val_loss: 0.9046 - val_acc: 0.6835\n",
236
  "Epoch 6/30\n",
237
- "3/3 [==============================] - 1s 151ms/step - loss: 0.3592 - acc: 0.9114 - val_loss: 0.8051 - val_acc: 0.7089\n",
238
  "Epoch 7/30\n",
239
- "3/3 [==============================] - 1s 146ms/step - loss: 0.2827 - acc: 0.9430 - val_loss: 0.7079 - val_acc: 0.7342\n",
240
  "Epoch 8/30\n",
241
- "3/3 [==============================] - 1s 149ms/step - loss: 0.2392 - acc: 0.9494 - val_loss: 0.6183 - val_acc: 0.7595\n",
242
  "Epoch 9/30\n",
243
- "3/3 [==============================] - 1s 153ms/step - loss: 0.2218 - acc: 0.9557 - val_loss: 0.5485 - val_acc: 0.7848\n",
244
  "Epoch 10/30\n",
245
- "3/3 [==============================] - 1s 146ms/step - loss: 0.2056 - acc: 0.9589 - val_loss: 0.4901 - val_acc: 0.8481\n",
246
  "Epoch 11/30\n",
247
- "3/3 [==============================] - 1s 153ms/step - loss: 0.1493 - acc: 0.9652 - val_loss: 0.4952 - val_acc: 0.8228\n",
248
  "Epoch 12/30\n",
249
- "3/3 [==============================] - 1s 150ms/step - loss: 0.1321 - acc: 0.9842 - val_loss: 0.5236 - val_acc: 0.7975\n",
250
  "Epoch 13/30\n",
251
- "3/3 [==============================] - 1s 150ms/step - loss: 0.1146 - acc: 0.9842 - val_loss: 0.5648 - val_acc: 0.7975\n",
252
  "Epoch 14/30\n",
253
- "3/3 [==============================] - 1s 149ms/step - loss: 0.1094 - acc: 0.9715 - val_loss: 0.6045 - val_acc: 0.7975\n",
254
  "Epoch 15/30\n",
255
- "3/3 [==============================] - 1s 150ms/step - loss: 0.0935 - acc: 0.9810 - val_loss: 0.6233 - val_acc: 0.7848\n",
256
  "Epoch 16/30\n",
257
- "3/3 [==============================] - 1s 145ms/step - loss: 0.0959 - acc: 0.9810 - val_loss: 0.6089 - val_acc: 0.7848\n",
258
  "Epoch 17/30\n",
259
- "3/3 [==============================] - 1s 153ms/step - loss: 0.0788 - acc: 0.9937 - val_loss: 0.5710 - val_acc: 0.7848\n",
260
  "Epoch 18/30\n",
261
- "3/3 [==============================] - 1s 149ms/step - loss: 0.0767 - acc: 0.9937 - val_loss: 0.5456 - val_acc: 0.7975\n",
262
  "Epoch 19/30\n",
263
- "3/3 [==============================] - 1s 148ms/step - loss: 0.0727 - acc: 0.9810 - val_loss: 0.5637 - val_acc: 0.7975\n",
264
  "Epoch 20/30\n",
265
- "3/3 [==============================] - 1s 152ms/step - loss: 0.0801 - acc: 0.9905 - val_loss: 0.5570 - val_acc: 0.7975\n",
266
  "Epoch 21/30\n",
267
- "3/3 [==============================] - 1s 152ms/step - loss: 0.0676 - acc: 0.9842 - val_loss: 0.5506 - val_acc: 0.7975\n",
268
  "Epoch 22/30\n",
269
- "3/3 [==============================] - 1s 144ms/step - loss: 0.0691 - acc: 0.9873 - val_loss: 0.5589 - val_acc: 0.7975\n",
270
  "Epoch 23/30\n",
271
- "3/3 [==============================] - 1s 149ms/step - loss: 0.0911 - acc: 0.9778 - val_loss: 0.5682 - val_acc: 0.7975\n",
272
  "Epoch 24/30\n",
273
- "3/3 [==============================] - 1s 150ms/step - loss: 0.0602 - acc: 0.9873 - val_loss: 0.5239 - val_acc: 0.7975\n",
274
  "Epoch 25/30\n",
275
- "3/3 [==============================] - 1s 149ms/step - loss: 0.0581 - acc: 0.9905 - val_loss: 0.4833 - val_acc: 0.8101\n",
276
  "Epoch 26/30\n",
277
- "3/3 [==============================] - 1s 150ms/step - loss: 0.0525 - acc: 0.9968 - val_loss: 0.4710 - val_acc: 0.8101\n",
278
  "Epoch 27/30\n",
279
- "3/3 [==============================] - 1s 148ms/step - loss: 0.0531 - acc: 0.9937 - val_loss: 0.4709 - val_acc: 0.8101\n",
280
  "Epoch 28/30\n",
281
- "3/3 [==============================] - 1s 146ms/step - loss: 0.0820 - acc: 0.9842 - val_loss: 0.4476 - val_acc: 0.8228\n",
282
  "Epoch 29/30\n",
283
- "3/3 [==============================] - 1s 150ms/step - loss: 0.0373 - acc: 1.0000 - val_loss: 0.4158 - val_acc: 0.8481\n",
284
  "Epoch 30/30\n",
285
- "3/3 [==============================] - 1s 147ms/step - loss: 0.0432 - acc: 0.9968 - val_loss: 0.4003 - val_acc: 0.8608\n"
286
  ]
287
  },
288
  {
289
  "data": {
290
  "text/plain": [
291
- "<keras.callbacks.History at 0x2087722aa30>"
292
  ]
293
  },
294
- "execution_count": 11,
295
  "metadata": {},
296
  "output_type": "execute_result"
297
  }
@@ -299,17 +299,24 @@
299
  "source": [
300
  "model.fit(train_ds,\n",
301
  " validation_data=val_ds,\n",
302
- " epochs=30,)\n",
303
  " "
304
  ]
305
  },
306
  {
307
  "cell_type": "code",
308
- "execution_count": 13,
 
 
 
 
 
 
 
309
  "metadata": {},
310
  "outputs": [],
311
  "source": [
312
- "model.save_weights(weights_path+'/weights')"
313
  ]
314
  },
315
  {
@@ -321,7 +328,7 @@
321
  },
322
  {
323
  "cell_type": "code",
324
- "execution_count": 15,
325
  "metadata": {},
326
  "outputs": [
327
  {
@@ -334,10 +341,10 @@
334
  {
335
  "data": {
336
  "text/plain": [
337
- "<matplotlib.image.AxesImage at 0x24f9b4d1130>"
338
  ]
339
  },
340
- "execution_count": 15,
341
  "metadata": {},
342
  "output_type": "execute_result"
343
  },
@@ -364,7 +371,7 @@
364
  },
365
  {
366
  "cell_type": "code",
367
- "execution_count": 16,
368
  "metadata": {},
369
  "outputs": [
370
  {
@@ -373,7 +380,7 @@
373
  "'50'"
374
  ]
375
  },
376
- "execution_count": 16,
377
  "metadata": {},
378
  "output_type": "execute_result"
379
  }
@@ -394,9 +401,17 @@
394
  },
395
  {
396
  "cell_type": "code",
397
- "execution_count": null,
398
  "metadata": {},
399
- "outputs": [],
 
 
 
 
 
 
 
 
400
  "source": [
401
  "from IPython.display import clear_output\n",
402
  "\n",
@@ -424,6 +439,77 @@
424
  "captura.release()\n",
425
  "cv2.destroyAllWindows()"
426
  ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
427
  }
428
  ],
429
  "metadata": {
 
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
+ "execution_count": 7,
6
  "metadata": {},
7
  "outputs": [
8
  {
 
19
  "import matplotlib.pyplot as plt\n",
20
  "import matplotlib.image as mpimg\n",
21
  "import numpy as np\n",
 
22
  "from tensorflow import keras\n",
23
  "from tensorflow.keras import layers\n",
24
  "import numpy as np\n",
 
26
  "import os\n",
27
  "from IPython.display import clear_output\n",
28
  "import PIL.Image as Image\n",
29
+ "import tensorflow as tf\n",
30
+ "import tensorflow_hub as hub\n",
31
+ "from tensorflow.keras import layers\n",
32
+ "print(tf.version.VERSION)"
33
  ]
34
  },
35
  {
36
  "cell_type": "code",
37
+ "execution_count": 8,
38
  "metadata": {},
39
  "outputs": [],
40
  "source": [
 
49
  },
50
  {
51
  "cell_type": "code",
52
+ "execution_count": 9,
53
  "metadata": {},
54
  "outputs": [
55
  {
56
  "name": "stdout",
57
  "output_type": "stream",
58
  "text": [
59
+ "Found 17907 files belonging to 5 classes.\n",
60
+ "Using 14326 files for training.\n",
61
+ "Found 17907 files belonging to 5 classes.\n",
62
+ "Using 3581 files for validation.\n"
63
  ]
64
  }
65
  ],
 
85
  },
86
  {
87
  "cell_type": "code",
88
+ "execution_count": 10,
89
  "metadata": {},
90
  "outputs": [
91
  {
 
105
  },
106
  {
107
  "cell_type": "code",
108
+ "execution_count": 11,
109
  "metadata": {},
110
  "outputs": [],
111
  "source": [
 
116
  },
117
  {
118
  "cell_type": "code",
119
+ "execution_count": null,
120
  "metadata": {},
121
  "outputs": [],
122
  "source": [
123
+ "\n",
 
 
124
  "movilenet =hub.KerasLayer('https://tfhub.dev/google/tf2-preview/mobilenet_v2/feature_vector/4')\n",
125
  "movilenet.trainable=False\n"
126
  ]
127
  },
128
+ {
129
+ "cell_type": "markdown",
130
+ "metadata": {},
131
+ "source": [
132
+ "# modelo 1"
133
+ ]
134
+ },
135
  {
136
  "cell_type": "code",
137
+ "execution_count": 12,
138
  "metadata": {},
139
  "outputs": [
140
  {
 
145
  "_________________________________________________________________\n",
146
  " Layer (type) Output Shape Param # \n",
147
  "=================================================================\n",
148
+ " keras_layer (KerasLayer) (None, 1001) 23853833 \n",
 
 
 
 
 
 
149
  " \n",
150
+ " dense (Dense) (None, 200) 200400 \n",
151
  " \n",
152
+ " dense_1 (Dense) (None, 5) 1005 \n",
 
 
153
  " \n",
154
  "=================================================================\n",
155
+ "Total params: 24,055,238\n",
156
+ "Trainable params: 201,405\n",
157
+ "Non-trainable params: 23,853,833\n",
158
  "_________________________________________________________________\n"
159
  ]
160
  }
161
  ],
162
  "source": [
163
+ "model = tf.keras.Sequential([\n",
164
+ " #layers.RandomFlip(\"horizontal_and_vertical\"),\n",
165
+ " #layers.RandomRotation(0.2),\n",
166
+ " #layers.RandomContrast(.2),\n",
167
  " #layers.RandomBrightness(.2),\n",
168
+ " #layers.RandomZoom(.2),\n",
169
+ " # hub.KerasLayer(\"https://tfhub.dev/google/tf2-preview/mobilenet_v2/classification/4\", output_shape=[1001],trainable=False),\n",
170
+ " hub.KerasLayer(\"https://tfhub.dev/google/imagenet/inception_v3/classification/5\",trainable=False),\n",
171
  " #movilenet,\n",
172
+ " tf.keras.layers.Dense(int(1001/num_classes)),\n",
173
  " tf.keras.layers.Dense(num_classes)\n",
174
  " \n",
175
  "\n",
 
182
  },
183
  {
184
  "cell_type": "code",
185
+ "execution_count": 13,
186
  "metadata": {},
187
  "outputs": [],
188
  "source": [
 
216
  },
217
  {
218
  "cell_type": "code",
219
+ "execution_count": 14,
220
  "metadata": {},
221
  "outputs": [
222
  {
 
224
  "output_type": "stream",
225
  "text": [
226
  "Epoch 1/30\n",
227
+ "112/112 [==============================] - 51s 342ms/step - loss: 0.7923 - acc: 0.7865 - val_loss: 0.4684 - val_acc: 0.8472\n",
228
  "Epoch 2/30\n",
229
+ "112/112 [==============================] - 33s 292ms/step - loss: 0.4040 - acc: 0.8689 - val_loss: 0.3798 - val_acc: 0.8741\n",
230
  "Epoch 3/30\n",
231
+ "112/112 [==============================] - 33s 291ms/step - loss: 0.3405 - acc: 0.8910 - val_loss: 0.3805 - val_acc: 0.8735\n",
232
  "Epoch 4/30\n",
233
+ "112/112 [==============================] - 33s 291ms/step - loss: 0.3149 - acc: 0.8969 - val_loss: 0.3886 - val_acc: 0.8799\n",
234
  "Epoch 5/30\n",
235
+ "112/112 [==============================] - 33s 292ms/step - loss: 0.2916 - acc: 0.9032 - val_loss: 0.3701 - val_acc: 0.8802\n",
236
  "Epoch 6/30\n",
237
+ "112/112 [==============================] - 33s 293ms/step - loss: 0.2735 - acc: 0.9118 - val_loss: 0.3271 - val_acc: 0.8992\n",
238
  "Epoch 7/30\n",
239
+ "112/112 [==============================] - 33s 292ms/step - loss: 0.2507 - acc: 0.9169 - val_loss: 0.3618 - val_acc: 0.8897\n",
240
  "Epoch 8/30\n",
241
+ "112/112 [==============================] - 34s 296ms/step - loss: 0.2326 - acc: 0.9220 - val_loss: 0.3089 - val_acc: 0.9034\n",
242
  "Epoch 9/30\n",
243
+ "112/112 [==============================] - 34s 296ms/step - loss: 0.2302 - acc: 0.9229 - val_loss: 0.3106 - val_acc: 0.8989\n",
244
  "Epoch 10/30\n",
245
+ "112/112 [==============================] - 34s 299ms/step - loss: 0.2245 - acc: 0.9245 - val_loss: 0.2983 - val_acc: 0.9056\n",
246
  "Epoch 11/30\n",
247
+ "112/112 [==============================] - 33s 291ms/step - loss: 0.2203 - acc: 0.9250 - val_loss: 0.2885 - val_acc: 0.9084\n",
248
  "Epoch 12/30\n",
249
+ "112/112 [==============================] - 33s 291ms/step - loss: 0.1971 - acc: 0.9315 - val_loss: 0.2875 - val_acc: 0.9053\n",
250
  "Epoch 13/30\n",
251
+ "112/112 [==============================] - 33s 292ms/step - loss: 0.1951 - acc: 0.9338 - val_loss: 0.2849 - val_acc: 0.9101\n",
252
  "Epoch 14/30\n",
253
+ "112/112 [==============================] - 33s 292ms/step - loss: 0.1906 - acc: 0.9368 - val_loss: 0.2891 - val_acc: 0.9078\n",
254
  "Epoch 15/30\n",
255
+ "112/112 [==============================] - 33s 292ms/step - loss: 0.1937 - acc: 0.9338 - val_loss: 0.3313 - val_acc: 0.8953\n",
256
  "Epoch 16/30\n",
257
+ "112/112 [==============================] - 33s 291ms/step - loss: 0.1865 - acc: 0.9349 - val_loss: 0.2739 - val_acc: 0.9134\n",
258
  "Epoch 17/30\n",
259
+ "112/112 [==============================] - 33s 293ms/step - loss: 0.1842 - acc: 0.9357 - val_loss: 0.3166 - val_acc: 0.8995\n",
260
  "Epoch 18/30\n",
261
+ "112/112 [==============================] - 35s 306ms/step - loss: 0.1843 - acc: 0.9374 - val_loss: 0.2798 - val_acc: 0.9078\n",
262
  "Epoch 19/30\n",
263
+ "112/112 [==============================] - 34s 297ms/step - loss: 0.1736 - acc: 0.9395 - val_loss: 0.2665 - val_acc: 0.9140\n",
264
  "Epoch 20/30\n",
265
+ "112/112 [==============================] - 33s 294ms/step - loss: 0.1759 - acc: 0.9396 - val_loss: 0.2782 - val_acc: 0.9065\n",
266
  "Epoch 21/30\n",
267
+ "112/112 [==============================] - 33s 292ms/step - loss: 0.1713 - acc: 0.9423 - val_loss: 0.2938 - val_acc: 0.9073\n",
268
  "Epoch 22/30\n",
269
+ "112/112 [==============================] - 33s 292ms/step - loss: 0.1726 - acc: 0.9402 - val_loss: 0.3162 - val_acc: 0.9067\n",
270
  "Epoch 23/30\n",
271
+ "112/112 [==============================] - 33s 294ms/step - loss: 0.1714 - acc: 0.9430 - val_loss: 0.2765 - val_acc: 0.9123\n",
272
  "Epoch 24/30\n",
273
+ "112/112 [==============================] - 33s 291ms/step - loss: 0.1597 - acc: 0.9452 - val_loss: 0.2709 - val_acc: 0.9143\n",
274
  "Epoch 25/30\n",
275
+ "112/112 [==============================] - 33s 294ms/step - loss: 0.1550 - acc: 0.9451 - val_loss: 0.2921 - val_acc: 0.9025\n",
276
  "Epoch 26/30\n",
277
+ "112/112 [==============================] - 33s 294ms/step - loss: 0.1589 - acc: 0.9429 - val_loss: 0.2790 - val_acc: 0.9145\n",
278
  "Epoch 27/30\n",
279
+ "112/112 [==============================] - 33s 294ms/step - loss: 0.1605 - acc: 0.9449 - val_loss: 0.2705 - val_acc: 0.9162\n",
280
  "Epoch 28/30\n",
281
+ "112/112 [==============================] - 33s 293ms/step - loss: 0.1653 - acc: 0.9418 - val_loss: 0.2837 - val_acc: 0.9137\n",
282
  "Epoch 29/30\n",
283
+ "112/112 [==============================] - 33s 292ms/step - loss: 0.1657 - acc: 0.9423 - val_loss: 0.3002 - val_acc: 0.9059\n",
284
  "Epoch 30/30\n",
285
+ "112/112 [==============================] - 33s 292ms/step - loss: 0.1650 - acc: 0.9437 - val_loss: 0.2658 - val_acc: 0.9165\n"
286
  ]
287
  },
288
  {
289
  "data": {
290
  "text/plain": [
291
+ "<keras.callbacks.History at 0x1899825f400>"
292
  ]
293
  },
294
+ "execution_count": 14,
295
  "metadata": {},
296
  "output_type": "execute_result"
297
  }
 
299
  "source": [
300
  "model.fit(train_ds,\n",
301
  " validation_data=val_ds,\n",
302
+ " epochs=30)\n",
303
  " "
304
  ]
305
  },
306
  {
307
  "cell_type": "code",
308
+ "execution_count": null,
309
+ "metadata": {},
310
+ "outputs": [],
311
+ "source": []
312
+ },
313
+ {
314
+ "cell_type": "code",
315
+ "execution_count": 15,
316
  "metadata": {},
317
  "outputs": [],
318
  "source": [
319
+ "model.save_weights(weights_path+'/weights2')"
320
  ]
321
  },
322
  {
 
328
  },
329
  {
330
  "cell_type": "code",
331
+ "execution_count": 16,
332
  "metadata": {},
333
  "outputs": [
334
  {
 
341
  {
342
  "data": {
343
  "text/plain": [
344
+ "<matplotlib.image.AxesImage at 0x18999edd1c0>"
345
  ]
346
  },
347
+ "execution_count": 16,
348
  "metadata": {},
349
  "output_type": "execute_result"
350
  },
 
371
  },
372
  {
373
  "cell_type": "code",
374
+ "execution_count": 17,
375
  "metadata": {},
376
  "outputs": [
377
  {
 
380
  "'50'"
381
  ]
382
  },
383
+ "execution_count": 17,
384
  "metadata": {},
385
  "output_type": "execute_result"
386
  }
 
401
  },
402
  {
403
  "cell_type": "code",
404
+ "execution_count": 18,
405
  "metadata": {},
406
+ "outputs": [
407
+ {
408
+ "name": "stdout",
409
+ "output_type": "stream",
410
+ "text": [
411
+ "100\n"
412
+ ]
413
+ }
414
+ ],
415
  "source": [
416
  "from IPython.display import clear_output\n",
417
  "\n",
 
439
  "captura.release()\n",
440
  "cv2.destroyAllWindows()"
441
  ]
442
+ },
443
+ {
444
+ "cell_type": "markdown",
445
+ "metadata": {},
446
+ "source": [
447
+ "# imgae augmentation"
448
+ ]
449
+ },
450
+ {
451
+ "cell_type": "code",
452
+ "execution_count": 6,
453
+ "metadata": {},
454
+ "outputs": [
455
+ {
456
+ "name": "stdout",
457
+ "output_type": "stream",
458
+ "text": [
459
+ "Initialised with 1782 image(s) found.\n",
460
+ "Output directory set to BilletesMexico/BilletesMexico_img\\output."
461
+ ]
462
+ },
463
+ {
464
+ "name": "stderr",
465
+ "output_type": "stream",
466
+ "text": [
467
+ "Processing <PIL.Image.Image image mode=RGB size=640x480 at 0x18999F2B910>: 26%|██▌ | 1564/6000 [00:18<00:53, 83.09 Samples/s] \n"
468
+ ]
469
+ },
470
+ {
471
+ "ename": "ValueError",
472
+ "evalue": "image has wrong mode",
473
+ "output_type": "error",
474
+ "traceback": [
475
+ "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
476
+ "\u001b[1;31mValueError\u001b[0m Traceback (most recent call last)",
477
+ "\u001b[1;32mc:\\Users\\franz\\Billdetector\\billetes.ipynb Cell 22'\u001b[0m in \u001b[0;36m<module>\u001b[1;34m\u001b[0m\n\u001b[0;32m <a href='vscode-notebook-cell:/c%3A/Users/franz/Billdetector/billetes.ipynb#ch0000028?line=12'>13</a>\u001b[0m p\u001b[39m.\u001b[39mrandom_color(\u001b[39m.3\u001b[39m,min_factor\u001b[39m=\u001b[39m\u001b[39m.5\u001b[39m,max_factor\u001b[39m=\u001b[39m\u001b[39m.99\u001b[39m)\n\u001b[0;32m <a href='vscode-notebook-cell:/c%3A/Users/franz/Billdetector/billetes.ipynb#ch0000028?line=14'>15</a>\u001b[0m \u001b[39m#p.random_erasing(.1,rectangle_area=.2)\u001b[39;00m\n\u001b[0;32m <a href='vscode-notebook-cell:/c%3A/Users/franz/Billdetector/billetes.ipynb#ch0000028?line=15'>16</a>\u001b[0m \u001b[39m#p.rotate_without_crop(.2,max_left_rotation=10,max_right_rotation=10)\u001b[39;00m\n\u001b[0;32m <a href='vscode-notebook-cell:/c%3A/Users/franz/Billdetector/billetes.ipynb#ch0000028?line=16'>17</a>\u001b[0m \u001b[39m#p.zoom_random(.2,percentage_area=.5)\u001b[39;00m\n\u001b[1;32m---> <a href='vscode-notebook-cell:/c%3A/Users/franz/Billdetector/billetes.ipynb#ch0000028?line=17'>18</a>\u001b[0m p\u001b[39m.\u001b[39;49msample(\u001b[39m6000\u001b[39;49m)\n",
478
+ "File \u001b[1;32m~\\.conda\\envs\\tf-gpu\\lib\\site-packages\\Augmentor\\Pipeline.py:364\u001b[0m, in \u001b[0;36mPipeline.sample\u001b[1;34m(self, n, multi_threaded)\u001b[0m\n\u001b[0;32m <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/site-packages/Augmentor/Pipeline.py?line=361'>362</a>\u001b[0m \u001b[39mwith\u001b[39;00m tqdm(total\u001b[39m=\u001b[39m\u001b[39mlen\u001b[39m(augmentor_images), desc\u001b[39m=\u001b[39m\u001b[39m\"\u001b[39m\u001b[39mExecuting Pipeline\u001b[39m\u001b[39m\"\u001b[39m, unit\u001b[39m=\u001b[39m\u001b[39m\"\u001b[39m\u001b[39m Samples\u001b[39m\u001b[39m\"\u001b[39m) \u001b[39mas\u001b[39;00m progress_bar:\n\u001b[0;32m <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/site-packages/Augmentor/Pipeline.py?line=362'>363</a>\u001b[0m \u001b[39mwith\u001b[39;00m ThreadPoolExecutor(max_workers\u001b[39m=\u001b[39m\u001b[39mNone\u001b[39;00m) \u001b[39mas\u001b[39;00m executor:\n\u001b[1;32m--> <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/site-packages/Augmentor/Pipeline.py?line=363'>364</a>\u001b[0m \u001b[39mfor\u001b[39;00m result \u001b[39min\u001b[39;00m executor\u001b[39m.\u001b[39mmap(\u001b[39mself\u001b[39m, augmentor_images):\n\u001b[0;32m <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/site-packages/Augmentor/Pipeline.py?line=364'>365</a>\u001b[0m progress_bar\u001b[39m.\u001b[39mset_description(\u001b[39m\"\u001b[39m\u001b[39mProcessing \u001b[39m\u001b[39m%s\u001b[39;00m\u001b[39m\"\u001b[39m \u001b[39m%\u001b[39m result)\n\u001b[0;32m <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/site-packages/Augmentor/Pipeline.py?line=365'>366</a>\u001b[0m progress_bar\u001b[39m.\u001b[39mupdate(\u001b[39m1\u001b[39m)\n",
479
+ "File \u001b[1;32m~\\.conda\\envs\\tf-gpu\\lib\\concurrent\\futures\\_base.py:608\u001b[0m, in \u001b[0;36mExecutor.map.<locals>.result_iterator\u001b[1;34m()\u001b[0m\n\u001b[0;32m <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/concurrent/futures/_base.py?line=604'>605</a>\u001b[0m \u001b[39mwhile\u001b[39;00m fs:\n\u001b[0;32m <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/concurrent/futures/_base.py?line=605'>606</a>\u001b[0m \u001b[39m# Careful not to keep a reference to the popped future\u001b[39;00m\n\u001b[0;32m <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/concurrent/futures/_base.py?line=606'>607</a>\u001b[0m \u001b[39mif\u001b[39;00m timeout \u001b[39mis\u001b[39;00m \u001b[39mNone\u001b[39;00m:\n\u001b[1;32m--> <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/concurrent/futures/_base.py?line=607'>608</a>\u001b[0m \u001b[39myield\u001b[39;00m fs\u001b[39m.\u001b[39;49mpop()\u001b[39m.\u001b[39;49mresult()\n\u001b[0;32m <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/concurrent/futures/_base.py?line=608'>609</a>\u001b[0m \u001b[39melse\u001b[39;00m:\n\u001b[0;32m <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/concurrent/futures/_base.py?line=609'>610</a>\u001b[0m \u001b[39myield\u001b[39;00m fs\u001b[39m.\u001b[39mpop()\u001b[39m.\u001b[39mresult(end_time \u001b[39m-\u001b[39m time\u001b[39m.\u001b[39mmonotonic())\n",
480
+ "File \u001b[1;32m~\\.conda\\envs\\tf-gpu\\lib\\concurrent\\futures\\_base.py:438\u001b[0m, in \u001b[0;36mFuture.result\u001b[1;34m(self, timeout)\u001b[0m\n\u001b[0;32m <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/concurrent/futures/_base.py?line=435'>436</a>\u001b[0m \u001b[39mraise\u001b[39;00m CancelledError()\n\u001b[0;32m <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/concurrent/futures/_base.py?line=436'>437</a>\u001b[0m \u001b[39melif\u001b[39;00m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_state \u001b[39m==\u001b[39m FINISHED:\n\u001b[1;32m--> <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/concurrent/futures/_base.py?line=437'>438</a>\u001b[0m \u001b[39mreturn\u001b[39;00m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49m__get_result()\n\u001b[0;32m <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/concurrent/futures/_base.py?line=439'>440</a>\u001b[0m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_condition\u001b[39m.\u001b[39mwait(timeout)\n\u001b[0;32m <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/concurrent/futures/_base.py?line=441'>442</a>\u001b[0m \u001b[39mif\u001b[39;00m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_state \u001b[39min\u001b[39;00m [CANCELLED, CANCELLED_AND_NOTIFIED]:\n",
481
+ "File \u001b[1;32m~\\.conda\\envs\\tf-gpu\\lib\\concurrent\\futures\\_base.py:390\u001b[0m, in \u001b[0;36mFuture.__get_result\u001b[1;34m(self)\u001b[0m\n\u001b[0;32m <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/concurrent/futures/_base.py?line=387'>388</a>\u001b[0m \u001b[39mif\u001b[39;00m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_exception:\n\u001b[0;32m <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/concurrent/futures/_base.py?line=388'>389</a>\u001b[0m \u001b[39mtry\u001b[39;00m:\n\u001b[1;32m--> <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/concurrent/futures/_base.py?line=389'>390</a>\u001b[0m \u001b[39mraise\u001b[39;00m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_exception\n\u001b[0;32m <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/concurrent/futures/_base.py?line=390'>391</a>\u001b[0m \u001b[39mfinally\u001b[39;00m:\n\u001b[0;32m <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/concurrent/futures/_base.py?line=391'>392</a>\u001b[0m \u001b[39m# Break a reference cycle with the exception in self._exception\u001b[39;00m\n\u001b[0;32m <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/concurrent/futures/_base.py?line=392'>393</a>\u001b[0m \u001b[39mself\u001b[39m \u001b[39m=\u001b[39m \u001b[39mNone\u001b[39;00m\n",
482
+ "File \u001b[1;32m~\\.conda\\envs\\tf-gpu\\lib\\concurrent\\futures\\thread.py:52\u001b[0m, in \u001b[0;36m_WorkItem.run\u001b[1;34m(self)\u001b[0m\n\u001b[0;32m <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/concurrent/futures/thread.py?line=48'>49</a>\u001b[0m \u001b[39mreturn\u001b[39;00m\n\u001b[0;32m <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/concurrent/futures/thread.py?line=50'>51</a>\u001b[0m \u001b[39mtry\u001b[39;00m:\n\u001b[1;32m---> <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/concurrent/futures/thread.py?line=51'>52</a>\u001b[0m result \u001b[39m=\u001b[39m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39mfn(\u001b[39m*\u001b[39m\u001b[39mself\u001b[39m\u001b[39m.\u001b[39margs, \u001b[39m*\u001b[39m\u001b[39m*\u001b[39m\u001b[39mself\u001b[39m\u001b[39m.\u001b[39mkwargs)\n\u001b[0;32m <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/concurrent/futures/thread.py?line=52'>53</a>\u001b[0m \u001b[39mexcept\u001b[39;00m \u001b[39mBaseException\u001b[39;00m \u001b[39mas\u001b[39;00m exc:\n\u001b[0;32m <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/concurrent/futures/thread.py?line=53'>54</a>\u001b[0m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39mfuture\u001b[39m.\u001b[39mset_exception(exc)\n",
483
+ "File \u001b[1;32m~\\.conda\\envs\\tf-gpu\\lib\\site-packages\\Augmentor\\Pipeline.py:105\u001b[0m, in \u001b[0;36mPipeline.__call__\u001b[1;34m(self, augmentor_image)\u001b[0m\n\u001b[0;32m <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/site-packages/Augmentor/Pipeline.py?line=91'>92</a>\u001b[0m \u001b[39mdef\u001b[39;00m \u001b[39m__call__\u001b[39m(\u001b[39mself\u001b[39m, augmentor_image):\n\u001b[0;32m <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/site-packages/Augmentor/Pipeline.py?line=92'>93</a>\u001b[0m \u001b[39m\"\"\"\u001b[39;00m\n\u001b[0;32m <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/site-packages/Augmentor/Pipeline.py?line=93'>94</a>\u001b[0m \u001b[39m Function used by the ThreadPoolExecutor to process the pipeline\u001b[39;00m\n\u001b[0;32m <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/site-packages/Augmentor/Pipeline.py?line=94'>95</a>\u001b[0m \u001b[39m using multiple threads. Do not call directly.\u001b[39;00m\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/site-packages/Augmentor/Pipeline.py?line=102'>103</a>\u001b[0m \u001b[39m :return: None\u001b[39;00m\n\u001b[0;32m <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/site-packages/Augmentor/Pipeline.py?line=103'>104</a>\u001b[0m \u001b[39m \"\"\"\u001b[39;00m\n\u001b[1;32m--> <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/site-packages/Augmentor/Pipeline.py?line=104'>105</a>\u001b[0m \u001b[39mreturn\u001b[39;00m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49m_execute(augmentor_image)\n",
484
+ "File \u001b[1;32m~\\.conda\\envs\\tf-gpu\\lib\\site-packages\\Augmentor\\Pipeline.py:233\u001b[0m, in \u001b[0;36mPipeline._execute\u001b[1;34m(self, augmentor_image, save_to_disk, multi_threaded)\u001b[0m\n\u001b[0;32m <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/site-packages/Augmentor/Pipeline.py?line=230'>231</a>\u001b[0m r \u001b[39m=\u001b[39m \u001b[39mround\u001b[39m(random\u001b[39m.\u001b[39muniform(\u001b[39m0\u001b[39m, \u001b[39m1\u001b[39m), \u001b[39m1\u001b[39m)\n\u001b[0;32m <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/site-packages/Augmentor/Pipeline.py?line=231'>232</a>\u001b[0m \u001b[39mif\u001b[39;00m r \u001b[39m<\u001b[39m\u001b[39m=\u001b[39m operation\u001b[39m.\u001b[39mprobability:\n\u001b[1;32m--> <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/site-packages/Augmentor/Pipeline.py?line=232'>233</a>\u001b[0m images \u001b[39m=\u001b[39m operation\u001b[39m.\u001b[39;49mperform_operation(images)\n\u001b[0;32m <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/site-packages/Augmentor/Pipeline.py?line=234'>235</a>\u001b[0m \u001b[39m# TEMP FOR TESTING\u001b[39;00m\n\u001b[0;32m <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/site-packages/Augmentor/Pipeline.py?line=235'>236</a>\u001b[0m \u001b[39m# save_to_disk = False\u001b[39;00m\n\u001b[0;32m <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/site-packages/Augmentor/Pipeline.py?line=237'>238</a>\u001b[0m \u001b[39mif\u001b[39;00m save_to_disk:\n",
485
+ "File \u001b[1;32m~\\.conda\\envs\\tf-gpu\\lib\\site-packages\\Augmentor\\Operations.py:417\u001b[0m, in \u001b[0;36mRandomContrast.perform_operation\u001b[1;34m(self, images)\u001b[0m\n\u001b[0;32m <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/site-packages/Augmentor/Operations.py?line=413'>414</a>\u001b[0m augmented_images \u001b[39m=\u001b[39m []\n\u001b[0;32m <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/site-packages/Augmentor/Operations.py?line=415'>416</a>\u001b[0m \u001b[39mfor\u001b[39;00m image \u001b[39min\u001b[39;00m images:\n\u001b[1;32m--> <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/site-packages/Augmentor/Operations.py?line=416'>417</a>\u001b[0m augmented_images\u001b[39m.\u001b[39mappend(do(image))\n\u001b[0;32m <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/site-packages/Augmentor/Operations.py?line=418'>419</a>\u001b[0m \u001b[39mreturn\u001b[39;00m augmented_images\n",
486
+ "File \u001b[1;32m~\\.conda\\envs\\tf-gpu\\lib\\site-packages\\Augmentor\\Operations.py:412\u001b[0m, in \u001b[0;36mRandomContrast.perform_operation.<locals>.do\u001b[1;34m(image)\u001b[0m\n\u001b[0;32m <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/site-packages/Augmentor/Operations.py?line=408'>409</a>\u001b[0m \u001b[39mdef\u001b[39;00m \u001b[39mdo\u001b[39m(image):\n\u001b[0;32m <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/site-packages/Augmentor/Operations.py?line=410'>411</a>\u001b[0m image_enhancer_contrast \u001b[39m=\u001b[39m ImageEnhance\u001b[39m.\u001b[39mContrast(image)\n\u001b[1;32m--> <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/site-packages/Augmentor/Operations.py?line=411'>412</a>\u001b[0m \u001b[39mreturn\u001b[39;00m image_enhancer_contrast\u001b[39m.\u001b[39;49menhance(factor)\n",
487
+ "File \u001b[1;32m~\\.conda\\envs\\tf-gpu\\lib\\site-packages\\PIL\\ImageEnhance.py:36\u001b[0m, in \u001b[0;36m_Enhance.enhance\u001b[1;34m(self, factor)\u001b[0m\n\u001b[0;32m <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/site-packages/PIL/ImageEnhance.py?line=24'>25</a>\u001b[0m \u001b[39mdef\u001b[39;00m \u001b[39menhance\u001b[39m(\u001b[39mself\u001b[39m, factor):\n\u001b[0;32m <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/site-packages/PIL/ImageEnhance.py?line=25'>26</a>\u001b[0m \u001b[39m\"\"\"\u001b[39;00m\n\u001b[0;32m <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/site-packages/PIL/ImageEnhance.py?line=26'>27</a>\u001b[0m \u001b[39m Returns an enhanced image.\u001b[39;00m\n\u001b[0;32m <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/site-packages/PIL/ImageEnhance.py?line=27'>28</a>\u001b[0m \n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/site-packages/PIL/ImageEnhance.py?line=33'>34</a>\u001b[0m \u001b[39m :rtype: :py:class:`~PIL.Image.Image`\u001b[39;00m\n\u001b[0;32m <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/site-packages/PIL/ImageEnhance.py?line=34'>35</a>\u001b[0m \u001b[39m \"\"\"\u001b[39;00m\n\u001b[1;32m---> <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/site-packages/PIL/ImageEnhance.py?line=35'>36</a>\u001b[0m \u001b[39mreturn\u001b[39;00m Image\u001b[39m.\u001b[39;49mblend(\u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49mdegenerate, \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49mimage, factor)\n",
488
+ "File \u001b[1;32m~\\.conda\\envs\\tf-gpu\\lib\\site-packages\\PIL\\Image.py:3052\u001b[0m, in \u001b[0;36mblend\u001b[1;34m(im1, im2, alpha)\u001b[0m\n\u001b[0;32m <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/site-packages/PIL/Image.py?line=3049'>3050</a>\u001b[0m im1\u001b[39m.\u001b[39mload()\n\u001b[0;32m <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/site-packages/PIL/Image.py?line=3050'>3051</a>\u001b[0m im2\u001b[39m.\u001b[39mload()\n\u001b[1;32m-> <a href='file:///c%3A/Users/franz/.conda/envs/tf-gpu/lib/site-packages/PIL/Image.py?line=3051'>3052</a>\u001b[0m \u001b[39mreturn\u001b[39;00m im1\u001b[39m.\u001b[39m_new(core\u001b[39m.\u001b[39;49mblend(im1\u001b[39m.\u001b[39;49mim, im2\u001b[39m.\u001b[39;49mim, alpha))\n",
489
+ "\u001b[1;31mValueError\u001b[0m: image has wrong mode"
490
+ ]
491
+ }
492
+ ],
493
+ "source": [
494
+ "import Augmentor\n",
495
+ "# Passing the path of the image directory\n",
496
+ "p = Augmentor.Pipeline(data_path)\n",
497
+ " \n",
498
+ "# Defining augmentation parameters and generating 5 samples\n",
499
+ "p.flip_left_right(0.5)\n",
500
+ "#p.black_and_white(0.1)\n",
501
+ "p.rotate(0.3, 10, 10)\n",
502
+ "p.skew(0.4, 0.5)\n",
503
+ "p.zoom(probability = 0.2, min_factor = .5, max_factor = 1.5)\n",
504
+ "\n",
505
+ "p.random_contrast(0.2,min_factor=0.3,max_factor=.9)\n",
506
+ "p.random_color(.3,min_factor=.5,max_factor=.99)\n",
507
+ "\n",
508
+ "#p.random_erasing(.1,rectangle_area=.2)\n",
509
+ "#p.rotate_without_crop(.2,max_left_rotation=10,max_right_rotation=10)\n",
510
+ "#p.zoom_random(.2,percentage_area=.5)\n",
511
+ "p.sample(6000)"
512
+ ]
513
  }
514
  ],
515
  "metadata": {