File size: 2,376 Bytes
0f1dee6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
# -*- coding: utf-8 -*-
"""Untitled8.ipynb

Automatically generated by Colaboratory.

Original file is located at
    https://colab.research.google.com/drive/1VfD6Tk-uTrJOBlTJImKWkPqp3EPsrU6F
"""

import tensorflow as tf
import matplotlib.pyplot as plt
import seaborn as sn
import numpy as np
import pandas as pd
import math
import datetime
import platform

mnist_dataset = tf.keras.datasets.fashion_mnist
(x_train, y_train), (x_test, y_test) = mnist_dataset.load_data()
x_train_odd=x_train
y_train_odd=y_train

print('x_train:', x_train_odd.shape)
print('y_train:', y_train_odd.shape)
print('x_test:', x_test.shape)
print('y_test:', y_test.shape)

plt.imshow(x_test[0], cmap=plt.cm.binary)
plt.show()

x_train_normalized = x_train / 255
x_test_normalized = x_test / 255

model = tf.keras.models.Sequential()

# Input layers.
model.add(tf.keras.layers.Flatten(input_shape=x_train_normalized.shape[1:]))
model.add(tf.keras.layers.Dense(
    units=256,
    activation=tf.keras.activations.relu,
    kernel_regularizer=tf.keras.regularizers.l2(0.002)
))

# Hidden layers.
model.add(tf.keras.layers.Dense(
    units=256,
    activation=tf.keras.activations.relu,
    kernel_regularizer=tf.keras.regularizers.l2(0.002)
))

# Output layers.
model.add(tf.keras.layers.Dense(
    units=10,
    activation=tf.keras.activations.softmax
))

model.summary()

tf.keras.utils.plot_model(
    model,
    show_shapes=True,
    show_layer_names=True,
)

adam_optimizer = tf.keras.optimizers.Adam(learning_rate=0.001)

model.compile(
    optimizer=adam_optimizer,
    loss=tf.keras.losses.sparse_categorical_crossentropy,
    metrics=['accuracy']
)

log_dir=".logs/fit/" + datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
tensorboard_callback = tf.keras.callbacks.TensorBoard(log_dir=log_dir, histogram_freq=1)

training_history = model.fit(
    x_train_normalized,
    y_train,
    epochs=20,
    validation_data=(x_test_normalized, y_test),
    callbacks=[tensorboard_callback]
)

# Commented out IPython magic to ensure Python compatibility.
# %%capture
# train_loss, train_accuracy = model.evaluate(x_train_normalized, y_train)
# validation_loss, validation_accuracy = model.evaluate(x_test_normalized, y_test)

print('Training loss: ', train_loss)
print('Training accuracy: ', train_accuracy)
print('Validation loss: ', validation_loss)
print('Validation accuracy: ', validation_accuracy)