In [1]:
import tensorflow as tf
import numpy as np
import datetime, os

import matplotlib.pyplot as plt
plt.style.use('fivethirtyeight')

import warnings
warnings.filterwarnings('ignore')

Gradient Calculation

Example 1: Gradient of y = x^3 for x = 5

In [2]:
# Initialize a variable
x = tf.Variable(5.0)

# Initiate the gradient tape
with tf.GradientTape() as tape:
    y = x ** 3
    
# Access the gradient -- derivative of y with respect to x
dy_dx = tape.gradient(y, x)
In [3]:
print(f'{dy_dx} is the gradient of y=x^3 with x={x.numpy()}')
75.0 is the gradient of y=x^3 with x=5.0

Example 2: Estimating y = 4 using SGD

True y value

In [4]:
y = 4 

Initialize a random estimate (yhat) for y

In [5]:
yhat = tf.Variable([tf.random.normal([1])])
print(f'Initializing yhat={yhat.numpy()}')
Initializing yhat=[[-0.3535073]]

Updating yhat using gradient descent

Loss function, L = (yhat - y)^2

In [6]:
learning_rate = 1e-2                            # learning rate for SGD
result = []
                                    
# Compute the derivative of the loss with respect to x, and perform the SGD update.
for i in range(500):
    with tf.GradientTape() as tape:
        loss = (yhat - y)**2 

    # loss minimization using gradient tape
    grad = tape.gradient(loss, yhat)
    new_yhat = yhat - learning_rate*grad        # sgd update
    yhat.assign(new_yhat)                       # update the value of f(x)
    result.append(yhat.numpy()[0])

# Plot the evolution of yhat as we optimize towards y
plt.figure(figsize=(20,10))

plt.plot(result)
plt.plot([0, 500],[y,y])

plt.legend(('Predicted', 'True'))
plt.xlabel('Iteration')
plt.ylabel('y value')
plt.title('Evoluation of yhat as we optimize towards y');

TensorBoard for Visualization

In [7]:
# Load the TensorBoard notebook extension
%load_ext tensorboard
In [8]:
fashion_mnist = tf.keras.datasets.fashion_mnist

(x_train, y_train),(x_test, y_test) = fashion_mnist.load_data()
x_train, x_test = x_train / 255.0, x_test / 255.0 # normalizing the data
In [9]:
def create_model():
    return tf.keras.models.Sequential([
        tf.keras.layers.Flatten(input_shape=(28, 28)),
        tf.keras.layers.Dense(512, activation='relu'),
        tf.keras.layers.Dropout(0.2),
        tf.keras.layers.Dense(10, activation='softmax')
  ])
In [10]:
def train_model():
    
    model = create_model()
    model.compile(optimizer='adam',
                  loss='sparse_categorical_crossentropy',
                  metrics=['accuracy'])

    # two extra line of code
    logdir = os.path.join("logs", datetime.datetime.now().strftime("%Y%m%d-%H%M%S"))
    tb_callbacks = [tf.keras.callbacks.TensorBoard(log_dir=logdir, histogram_freq=1)]    
    
    model.fit(x=x_train, y=y_train, epochs=5, validation_data=(x_test, y_test), callbacks=tb_callbacks)

train_model()
Epoch 1/5
1875/1875 [==============================] - 9s 4ms/step - loss: 0.4987 - accuracy: 0.8219 - val_loss: 0.4254 - val_accuracy: 0.8460
Epoch 2/5
1875/1875 [==============================] - 8s 4ms/step - loss: 0.3828 - accuracy: 0.8591 - val_loss: 0.3848 - val_accuracy: 0.8603
Epoch 3/5
1875/1875 [==============================] - 8s 4ms/step - loss: 0.3515 - accuracy: 0.8710 - val_loss: 0.3703 - val_accuracy: 0.8667
Epoch 4/5
1875/1875 [==============================] - 8s 4ms/step - loss: 0.3294 - accuracy: 0.8771 - val_loss: 0.3700 - val_accuracy: 0.8642
Epoch 5/5
1875/1875 [==============================] - 8s 4ms/step - loss: 0.3137 - accuracy: 0.8837 - val_loss: 0.3448 - val_accuracy: 0.8708
In [11]:
%tensorboard --logdir logs

Keras

Keras is a deep learning API for Python and is one of the most powerful and easy to use library which runs on top of popular libraries like TensorFlow and Theano for creating deep learning models.

In [12]:
import keras

Example 1: Using keras to fit y = 2x - 1 for six input features and labels

In [13]:
from keras.models import Sequential
from tensorflow.keras.utils import to_categorical
from tensorflow.keras.layers import Dense, MaxPooling2D, Conv2D, Flatten
In [14]:
model = Sequential()
model.add(Dense(1, input_shape=(1,)))
model.compile(optimizer='sgd', loss='mean_squared_error')

X = np.array([-1.0,0.0,1.0,2.0,3.0,4.0])
y = np.array([-3.,-1.,1.,3.,5.,7.])

model.fit(X, y, epochs=500)
print(model.predict([10.0]))
Epoch 1/500
1/1 [==============================] - 0s 194ms/step - loss: 30.2472
Epoch 2/500
1/1 [==============================] - 0s 4ms/step - loss: 24.1110
Epoch 3/500
1/1 [==============================] - 0s 3ms/step - loss: 19.2769
Epoch 4/500
1/1 [==============================] - 0s 4ms/step - loss: 15.4674
Epoch 5/500
1/1 [==============================] - 0s 4ms/step - loss: 12.4640
Epoch 6/500
1/1 [==============================] - 0s 4ms/step - loss: 10.0950
Epoch 7/500
1/1 [==============================] - 0s 4ms/step - loss: 8.2253
Epoch 8/500
1/1 [==============================] - 0s 3ms/step - loss: 6.7485
Epoch 9/500
1/1 [==============================] - 0s 2ms/step - loss: 5.5809
Epoch 10/500
1/1 [==============================] - 0s 2ms/step - loss: 4.6567
Epoch 11/500
1/1 [==============================] - 0s 877us/step - loss: 3.9242
Epoch 12/500
1/1 [==============================] - 0s 4ms/step - loss: 3.3425
Epoch 13/500
1/1 [==============================] - 0s 2ms/step - loss: 2.8796
Epoch 14/500
1/1 [==============================] - 0s 2ms/step - loss: 2.5103
Epoch 15/500
1/1 [==============================] - 0s 4ms/step - loss: 2.2147
Epoch 16/500
1/1 [==============================] - 0s 2ms/step - loss: 1.9772
Epoch 17/500
1/1 [==============================] - 0s 3ms/step - loss: 1.7855
Epoch 18/500
1/1 [==============================] - 0s 3ms/step - loss: 1.6300
Epoch 19/500
1/1 [==============================] - 0s 3ms/step - loss: 1.5031
Epoch 20/500
1/1 [==============================] - 0s 3ms/step - loss: 1.3986
Epoch 21/500
1/1 [==============================] - 0s 2ms/step - loss: 1.3120
Epoch 22/500
1/1 [==============================] - 0s 3ms/step - loss: 1.2395
Epoch 23/500
1/1 [==============================] - 0s 5ms/step - loss: 1.1783
Epoch 24/500
1/1 [==============================] - 0s 4ms/step - loss: 1.1259
Epoch 25/500
1/1 [==============================] - 0s 3ms/step - loss: 1.0806
Epoch 26/500
1/1 [==============================] - 0s 5ms/step - loss: 1.0409
Epoch 27/500
1/1 [==============================] - 0s 3ms/step - loss: 1.0058
Epoch 28/500
1/1 [==============================] - 0s 3ms/step - loss: 0.9744
Epoch 29/500
1/1 [==============================] - 0s 5ms/step - loss: 0.9459
Epoch 30/500
1/1 [==============================] - 0s 4ms/step - loss: 0.9198
Epoch 31/500
1/1 [==============================] - 0s 3ms/step - loss: 0.8956
Epoch 32/500
1/1 [==============================] - 0s 5ms/step - loss: 0.8731
Epoch 33/500
1/1 [==============================] - 0s 4ms/step - loss: 0.8519
Epoch 34/500
1/1 [==============================] - 0s 3ms/step - loss: 0.8318
Epoch 35/500
1/1 [==============================] - 0s 4ms/step - loss: 0.8127
Epoch 36/500
1/1 [==============================] - 0s 2ms/step - loss: 0.7944
Epoch 37/500
1/1 [==============================] - 0s 3ms/step - loss: 0.7769
Epoch 38/500
1/1 [==============================] - 0s 4ms/step - loss: 0.7599
Epoch 39/500
1/1 [==============================] - 0s 3ms/step - loss: 0.7436
Epoch 40/500
1/1 [==============================] - 0s 2ms/step - loss: 0.7277
Epoch 41/500
1/1 [==============================] - 0s 4ms/step - loss: 0.7123
Epoch 42/500
1/1 [==============================] - 0s 4ms/step - loss: 0.6973
Epoch 43/500
1/1 [==============================] - 0s 3ms/step - loss: 0.6826
Epoch 44/500
1/1 [==============================] - 0s 4ms/step - loss: 0.6684
Epoch 45/500
1/1 [==============================] - 0s 4ms/step - loss: 0.6545
Epoch 46/500
1/1 [==============================] - 0s 2ms/step - loss: 0.6409
Epoch 47/500
1/1 [==============================] - 0s 2ms/step - loss: 0.6276
Epoch 48/500
1/1 [==============================] - 0s 3ms/step - loss: 0.6146
Epoch 49/500
1/1 [==============================] - 0s 5ms/step - loss: 0.6019
Epoch 50/500
1/1 [==============================] - 0s 2ms/step - loss: 0.5895
Epoch 51/500
1/1 [==============================] - 0s 3ms/step - loss: 0.5774
Epoch 52/500
1/1 [==============================] - 0s 3ms/step - loss: 0.5655
Epoch 53/500
1/1 [==============================] - 0s 4ms/step - loss: 0.5538
Epoch 54/500
1/1 [==============================] - 0s 3ms/step - loss: 0.5424
Epoch 55/500
1/1 [==============================] - 0s 3ms/step - loss: 0.5313
Epoch 56/500
1/1 [==============================] - 0s 3ms/step - loss: 0.5203
Epoch 57/500
1/1 [==============================] - 0s 3ms/step - loss: 0.5096
Epoch 58/500
1/1 [==============================] - 0s 2ms/step - loss: 0.4992
Epoch 59/500
1/1 [==============================] - 0s 3ms/step - loss: 0.4889
Epoch 60/500
1/1 [==============================] - 0s 3ms/step - loss: 0.4789
Epoch 61/500
1/1 [==============================] - 0s 2ms/step - loss: 0.4690
Epoch 62/500
1/1 [==============================] - 0s 4ms/step - loss: 0.4594
Epoch 63/500
1/1 [==============================] - 0s 2ms/step - loss: 0.4499
Epoch 64/500
1/1 [==============================] - 0s 4ms/step - loss: 0.4407
Epoch 65/500
1/1 [==============================] - 0s 3ms/step - loss: 0.4316
Epoch 66/500
1/1 [==============================] - 0s 3ms/step - loss: 0.4228
Epoch 67/500
1/1 [==============================] - 0s 3ms/step - loss: 0.4141
Epoch 68/500
1/1 [==============================] - 0s 5ms/step - loss: 0.4056
Epoch 69/500
1/1 [==============================] - 0s 2ms/step - loss: 0.3973
Epoch 70/500
1/1 [==============================] - 0s 1ms/step - loss: 0.3891
Epoch 71/500
1/1 [==============================] - 0s 4ms/step - loss: 0.3811
Epoch 72/500
1/1 [==============================] - 0s 3ms/step - loss: 0.3733
Epoch 73/500
1/1 [==============================] - 0s 2ms/step - loss: 0.3656
Epoch 74/500
1/1 [==============================] - 0s 2ms/step - loss: 0.3581
Epoch 75/500
1/1 [==============================] - 0s 6ms/step - loss: 0.3507
Epoch 76/500
1/1 [==============================] - 0s 2ms/step - loss: 0.3435
Epoch 77/500
1/1 [==============================] - 0s 2ms/step - loss: 0.3365
Epoch 78/500
1/1 [==============================] - 0s 3ms/step - loss: 0.3296
Epoch 79/500
1/1 [==============================] - 0s 4ms/step - loss: 0.3228
Epoch 80/500
1/1 [==============================] - 0s 4ms/step - loss: 0.3162
Epoch 81/500
1/1 [==============================] - 0s 3ms/step - loss: 0.3097
Epoch 82/500
1/1 [==============================] - 0s 3ms/step - loss: 0.3033
Epoch 83/500
1/1 [==============================] - 0s 3ms/step - loss: 0.2971
Epoch 84/500
1/1 [==============================] - 0s 2ms/step - loss: 0.2910
Epoch 85/500
1/1 [==============================] - 0s 2ms/step - loss: 0.2850
Epoch 86/500
1/1 [==============================] - 0s 4ms/step - loss: 0.2791
Epoch 87/500
1/1 [==============================] - 0s 2ms/step - loss: 0.2734
Epoch 88/500
1/1 [==============================] - 0s 2ms/step - loss: 0.2678
Epoch 89/500
1/1 [==============================] - 0s 2ms/step - loss: 0.2623
Epoch 90/500
1/1 [==============================] - 0s 3ms/step - loss: 0.2569
Epoch 91/500
1/1 [==============================] - 0s 3ms/step - loss: 0.2516
Epoch 92/500
1/1 [==============================] - 0s 1ms/step - loss: 0.2465
Epoch 93/500
1/1 [==============================] - 0s 3ms/step - loss: 0.2414
Epoch 94/500
1/1 [==============================] - 0s 3ms/step - loss: 0.2364
Epoch 95/500
1/1 [==============================] - 0s 3ms/step - loss: 0.2316
Epoch 96/500
1/1 [==============================] - 0s 3ms/step - loss: 0.2268
Epoch 97/500
1/1 [==============================] - 0s 2ms/step - loss: 0.2222
Epoch 98/500
1/1 [==============================] - 0s 5ms/step - loss: 0.2176
Epoch 99/500
1/1 [==============================] - 0s 3ms/step - loss: 0.2131
Epoch 100/500
1/1 [==============================] - 0s 4ms/step - loss: 0.2088
Epoch 101/500
1/1 [==============================] - 0s 3ms/step - loss: 0.2045
Epoch 102/500
1/1 [==============================] - 0s 3ms/step - loss: 0.2003
Epoch 103/500
1/1 [==============================] - 0s 2ms/step - loss: 0.1962
Epoch 104/500
1/1 [==============================] - 0s 3ms/step - loss: 0.1921
Epoch 105/500
1/1 [==============================] - 0s 4ms/step - loss: 0.1882
Epoch 106/500
1/1 [==============================] - 0s 3ms/step - loss: 0.1843
Epoch 107/500
1/1 [==============================] - 0s 3ms/step - loss: 0.1805
Epoch 108/500
1/1 [==============================] - 0s 3ms/step - loss: 0.1768
Epoch 109/500
1/1 [==============================] - 0s 4ms/step - loss: 0.1732
Epoch 110/500
1/1 [==============================] - 0s 3ms/step - loss: 0.1696
Epoch 111/500
1/1 [==============================] - 0s 3ms/step - loss: 0.1661
Epoch 112/500
1/1 [==============================] - 0s 5ms/step - loss: 0.1627
Epoch 113/500
1/1 [==============================] - 0s 3ms/step - loss: 0.1594
Epoch 114/500
1/1 [==============================] - 0s 2ms/step - loss: 0.1561
Epoch 115/500
1/1 [==============================] - 0s 2ms/step - loss: 0.1529
Epoch 116/500
1/1 [==============================] - 0s 3ms/step - loss: 0.1498
Epoch 117/500
1/1 [==============================] - 0s 2ms/step - loss: 0.1467
Epoch 118/500
1/1 [==============================] - 0s 2ms/step - loss: 0.1437
Epoch 119/500
1/1 [==============================] - 0s 3ms/step - loss: 0.1407
Epoch 120/500
1/1 [==============================] - 0s 4ms/step - loss: 0.1378
Epoch 121/500
1/1 [==============================] - 0s 2ms/step - loss: 0.1350
Epoch 122/500
1/1 [==============================] - 0s 3ms/step - loss: 0.1322
Epoch 123/500
1/1 [==============================] - 0s 2ms/step - loss: 0.1295
Epoch 124/500
1/1 [==============================] - 0s 3ms/step - loss: 0.1269
Epoch 125/500
1/1 [==============================] - 0s 3ms/step - loss: 0.1243
Epoch 126/500
1/1 [==============================] - 0s 2ms/step - loss: 0.1217
Epoch 127/500
1/1 [==============================] - 0s 2ms/step - loss: 0.1192
Epoch 128/500
1/1 [==============================] - 0s 2ms/step - loss: 0.1168
Epoch 129/500
1/1 [==============================] - 0s 3ms/step - loss: 0.1144
Epoch 130/500
1/1 [==============================] - 0s 2ms/step - loss: 0.1120
Epoch 131/500
1/1 [==============================] - 0s 2ms/step - loss: 0.1097
Epoch 132/500
1/1 [==============================] - 0s 2ms/step - loss: 0.1075
Epoch 133/500
1/1 [==============================] - 0s 4ms/step - loss: 0.1052
Epoch 134/500
1/1 [==============================] - 0s 3ms/step - loss: 0.1031
Epoch 135/500
1/1 [==============================] - 0s 3ms/step - loss: 0.1010
Epoch 136/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0989
Epoch 137/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0969
Epoch 138/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0949
Epoch 139/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0929
Epoch 140/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0910
Epoch 141/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0891
Epoch 142/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0873
Epoch 143/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0855
Epoch 144/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0838
Epoch 145/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0820
Epoch 146/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0804
Epoch 147/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0787
Epoch 148/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0771
Epoch 149/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0755
Epoch 150/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0740
Epoch 151/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0724
Epoch 152/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0709
Epoch 153/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0695
Epoch 154/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0681
Epoch 155/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0667
Epoch 156/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0653
Epoch 157/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0640
Epoch 158/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0626
Epoch 159/500
1/1 [==============================] - 0s 4ms/step - loss: 0.0614
Epoch 160/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0601
Epoch 161/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0589
Epoch 162/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0577
Epoch 163/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0565
Epoch 164/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0553
Epoch 165/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0542
Epoch 166/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0531
Epoch 167/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0520
Epoch 168/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0509
Epoch 169/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0499
Epoch 170/500
1/1 [==============================] - 0s 4ms/step - loss: 0.0488
Epoch 171/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0478
Epoch 172/500
1/1 [==============================] - 0s 4ms/step - loss: 0.0468
Epoch 173/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0459
Epoch 174/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0449
Epoch 175/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0440
Epoch 176/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0431
Epoch 177/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0422
Epoch 178/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0414
Epoch 179/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0405
Epoch 180/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0397
Epoch 181/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0389
Epoch 182/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0381
Epoch 183/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0373
Epoch 184/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0365
Epoch 185/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0358
Epoch 186/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0350
Epoch 187/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0343
Epoch 188/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0336
Epoch 189/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0329
Epoch 190/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0322
Epoch 191/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0316
Epoch 192/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0309
Epoch 193/500
1/1 [==============================] - 0s 5ms/step - loss: 0.0303
Epoch 194/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0297
Epoch 195/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0291
Epoch 196/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0285
Epoch 197/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0279
Epoch 198/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0273
Epoch 199/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0267
Epoch 200/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0262
Epoch 201/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0257
Epoch 202/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0251
Epoch 203/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0246
Epoch 204/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0241
Epoch 205/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0236
Epoch 206/500
1/1 [==============================] - 0s 1ms/step - loss: 0.0231
Epoch 207/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0227
Epoch 208/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0222
Epoch 209/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0217
Epoch 210/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0213
Epoch 211/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0209
Epoch 212/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0204
Epoch 213/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0200
Epoch 214/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0196
Epoch 215/500
1/1 [==============================] - 0s 4ms/step - loss: 0.0192
Epoch 216/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0188
Epoch 217/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0184
Epoch 218/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0180
Epoch 219/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0177
Epoch 220/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0173
Epoch 221/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0169
Epoch 222/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0166
Epoch 223/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0163
Epoch 224/500
1/1 [==============================] - 0s 4ms/step - loss: 0.0159
Epoch 225/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0156
Epoch 226/500
1/1 [==============================] - 0s 5ms/step - loss: 0.0153
Epoch 227/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0150
Epoch 228/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0147
Epoch 229/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0144
Epoch 230/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0141
Epoch 231/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0138
Epoch 232/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0135
Epoch 233/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0132
Epoch 234/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0129
Epoch 235/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0127
Epoch 236/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0124
Epoch 237/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0122
Epoch 238/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0119
Epoch 239/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0117
Epoch 240/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0114
Epoch 241/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0112
Epoch 242/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0110
Epoch 243/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0107
Epoch 244/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0105
Epoch 245/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0103
Epoch 246/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0101
Epoch 247/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0099
Epoch 248/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0097
Epoch 249/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0095
Epoch 250/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0093
Epoch 251/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0091
Epoch 252/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0089
Epoch 253/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0087
Epoch 254/500
1/1 [==============================] - 0s 4ms/step - loss: 0.0085
Epoch 255/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0084
Epoch 256/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0082
Epoch 257/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0080
Epoch 258/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0079
Epoch 259/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0077
Epoch 260/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0075
Epoch 261/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0074
Epoch 262/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0072
Epoch 263/500
1/1 [==============================] - 0s 5ms/step - loss: 0.0071
Epoch 264/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0069
Epoch 265/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0068
Epoch 266/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0067
Epoch 267/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0065
Epoch 268/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0064
Epoch 269/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0063
Epoch 270/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0061
Epoch 271/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0060
Epoch 272/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0059
Epoch 273/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0058
Epoch 274/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0056
Epoch 275/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0055
Epoch 276/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0054
Epoch 277/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0053
Epoch 278/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0052
Epoch 279/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0051
Epoch 280/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0050
Epoch 281/500
1/1 [==============================] - 0s 4ms/step - loss: 0.0049
Epoch 282/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0048
Epoch 283/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0047
Epoch 284/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0046
Epoch 285/500
1/1 [==============================] - 0s 4ms/step - loss: 0.0045
Epoch 286/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0044
Epoch 287/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0043
Epoch 288/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0042
Epoch 289/500
1/1 [==============================] - 0s 5ms/step - loss: 0.0041
Epoch 290/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0040
Epoch 291/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0040
Epoch 292/500
1/1 [==============================] - 0s 4ms/step - loss: 0.0039
Epoch 293/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0038
Epoch 294/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0037
Epoch 295/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0036
Epoch 296/500
1/1 [==============================] - 0s 4ms/step - loss: 0.0036
Epoch 297/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0035
Epoch 298/500
1/1 [==============================] - 0s 4ms/step - loss: 0.0034
Epoch 299/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0034
Epoch 300/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0033
Epoch 301/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0032
Epoch 302/500
1/1 [==============================] - 0s 4ms/step - loss: 0.0032
Epoch 303/500
1/1 [==============================] - 0s 4ms/step - loss: 0.0031
Epoch 304/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0030
Epoch 305/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0030
Epoch 306/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0029
Epoch 307/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0028
Epoch 308/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0028
Epoch 309/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0027
Epoch 310/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0027
Epoch 311/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0026
Epoch 312/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0026
Epoch 313/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0025
Epoch 314/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0025
Epoch 315/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0024
Epoch 316/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0024
Epoch 317/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0023
Epoch 318/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0023
Epoch 319/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0022
Epoch 320/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0022
Epoch 321/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0021
Epoch 322/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0021
Epoch 323/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0020
Epoch 324/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0020
Epoch 325/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0020
Epoch 326/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0019
Epoch 327/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0019
Epoch 328/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0018
Epoch 329/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0018
Epoch 330/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0018
Epoch 331/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0017
Epoch 332/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0017
Epoch 333/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0017
Epoch 334/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0016
Epoch 335/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0016
Epoch 336/500
1/1 [==============================] - 0s 4ms/step - loss: 0.0016
Epoch 337/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0015
Epoch 338/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0015
Epoch 339/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0015
Epoch 340/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0014
Epoch 341/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0014
Epoch 342/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0014
Epoch 343/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0013
Epoch 344/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0013
Epoch 345/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0013
Epoch 346/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0013
Epoch 347/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0012
Epoch 348/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0012
Epoch 349/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0012
Epoch 350/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0012
Epoch 351/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0011
Epoch 352/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0011
Epoch 353/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0011
Epoch 354/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0011
Epoch 355/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0010
Epoch 356/500
1/1 [==============================] - 0s 3ms/step - loss: 0.0010
Epoch 357/500
1/1 [==============================] - 0s 2ms/step - loss: 0.0010
Epoch 358/500
1/1 [==============================] - 0s 3ms/step - loss: 9.8661e-04
Epoch 359/500
1/1 [==============================] - 0s 3ms/step - loss: 9.6634e-04
Epoch 360/500
1/1 [==============================] - 0s 2ms/step - loss: 9.4649e-04
Epoch 361/500
1/1 [==============================] - 0s 2ms/step - loss: 9.2705e-04
Epoch 362/500
1/1 [==============================] - 0s 2ms/step - loss: 9.0800e-04
Epoch 363/500
1/1 [==============================] - 0s 3ms/step - loss: 8.8935e-04
Epoch 364/500
1/1 [==============================] - 0s 2ms/step - loss: 8.7109e-04
Epoch 365/500
1/1 [==============================] - 0s 2ms/step - loss: 8.5319e-04
Epoch 366/500
1/1 [==============================] - 0s 3ms/step - loss: 8.3567e-04
Epoch 367/500
1/1 [==============================] - 0s 3ms/step - loss: 8.1851e-04
Epoch 368/500
1/1 [==============================] - 0s 3ms/step - loss: 8.0169e-04
Epoch 369/500
1/1 [==============================] - 0s 2ms/step - loss: 7.8522e-04
Epoch 370/500
1/1 [==============================] - 0s 2ms/step - loss: 7.6909e-04
Epoch 371/500
1/1 [==============================] - 0s 2ms/step - loss: 7.5330e-04
Epoch 372/500
1/1 [==============================] - 0s 2ms/step - loss: 7.3783e-04
Epoch 373/500
1/1 [==============================] - 0s 2ms/step - loss: 7.2267e-04
Epoch 374/500
1/1 [==============================] - 0s 2ms/step - loss: 7.0783e-04
Epoch 375/500
1/1 [==============================] - 0s 3ms/step - loss: 6.9329e-04
Epoch 376/500
1/1 [==============================] - 0s 4ms/step - loss: 6.7905e-04
Epoch 377/500
1/1 [==============================] - 0s 2ms/step - loss: 6.6510e-04
Epoch 378/500
1/1 [==============================] - 0s 2ms/step - loss: 6.5144e-04
Epoch 379/500
1/1 [==============================] - 0s 2ms/step - loss: 6.3806e-04
Epoch 380/500
1/1 [==============================] - 0s 3ms/step - loss: 6.2495e-04
Epoch 381/500
1/1 [==============================] - 0s 3ms/step - loss: 6.1212e-04
Epoch 382/500
1/1 [==============================] - 0s 2ms/step - loss: 5.9954e-04
Epoch 383/500
1/1 [==============================] - 0s 3ms/step - loss: 5.8723e-04
Epoch 384/500
1/1 [==============================] - 0s 2ms/step - loss: 5.7516e-04
Epoch 385/500
1/1 [==============================] - 0s 3ms/step - loss: 5.6335e-04
Epoch 386/500
1/1 [==============================] - 0s 2ms/step - loss: 5.5178e-04
Epoch 387/500
1/1 [==============================] - 0s 2ms/step - loss: 5.4044e-04
Epoch 388/500
1/1 [==============================] - 0s 2ms/step - loss: 5.2934e-04
Epoch 389/500
1/1 [==============================] - 0s 2ms/step - loss: 5.1847e-04
Epoch 390/500
1/1 [==============================] - 0s 3ms/step - loss: 5.0782e-04
Epoch 391/500
1/1 [==============================] - 0s 2ms/step - loss: 4.9739e-04
Epoch 392/500
1/1 [==============================] - 0s 2ms/step - loss: 4.8717e-04
Epoch 393/500
1/1 [==============================] - 0s 2ms/step - loss: 4.7717e-04
Epoch 394/500
1/1 [==============================] - 0s 3ms/step - loss: 4.6737e-04
Epoch 395/500
1/1 [==============================] - 0s 3ms/step - loss: 4.5777e-04
Epoch 396/500
1/1 [==============================] - 0s 3ms/step - loss: 4.4836e-04
Epoch 397/500
1/1 [==============================] - 0s 2ms/step - loss: 4.3915e-04
Epoch 398/500
1/1 [==============================] - 0s 2ms/step - loss: 4.3013e-04
Epoch 399/500
1/1 [==============================] - 0s 2ms/step - loss: 4.2130e-04
Epoch 400/500
1/1 [==============================] - 0s 2ms/step - loss: 4.1264e-04
Epoch 401/500
1/1 [==============================] - 0s 2ms/step - loss: 4.0417e-04
Epoch 402/500
1/1 [==============================] - 0s 2ms/step - loss: 3.9587e-04
Epoch 403/500
1/1 [==============================] - 0s 3ms/step - loss: 3.8773e-04
Epoch 404/500
1/1 [==============================] - 0s 2ms/step - loss: 3.7977e-04
Epoch 405/500
1/1 [==============================] - 0s 2ms/step - loss: 3.7197e-04
Epoch 406/500
1/1 [==============================] - 0s 3ms/step - loss: 3.6433e-04
Epoch 407/500
1/1 [==============================] - 0s 2ms/step - loss: 3.5684e-04
Epoch 408/500
1/1 [==============================] - 0s 2ms/step - loss: 3.4952e-04
Epoch 409/500
1/1 [==============================] - 0s 2ms/step - loss: 3.4234e-04
Epoch 410/500
1/1 [==============================] - 0s 2ms/step - loss: 3.3530e-04
Epoch 411/500
1/1 [==============================] - 0s 2ms/step - loss: 3.2842e-04
Epoch 412/500
1/1 [==============================] - 0s 3ms/step - loss: 3.2167e-04
Epoch 413/500
1/1 [==============================] - 0s 2ms/step - loss: 3.1506e-04
Epoch 414/500
1/1 [==============================] - 0s 2ms/step - loss: 3.0859e-04
Epoch 415/500
1/1 [==============================] - 0s 2ms/step - loss: 3.0225e-04
Epoch 416/500
1/1 [==============================] - 0s 2ms/step - loss: 2.9604e-04
Epoch 417/500
1/1 [==============================] - 0s 4ms/step - loss: 2.8996e-04
Epoch 418/500
1/1 [==============================] - 0s 2ms/step - loss: 2.8400e-04
Epoch 419/500
1/1 [==============================] - 0s 2ms/step - loss: 2.7817e-04
Epoch 420/500
1/1 [==============================] - 0s 2ms/step - loss: 2.7246e-04
Epoch 421/500
1/1 [==============================] - 0s 3ms/step - loss: 2.6686e-04
Epoch 422/500
1/1 [==============================] - 0s 3ms/step - loss: 2.6138e-04
Epoch 423/500
1/1 [==============================] - 0s 2ms/step - loss: 2.5601e-04
Epoch 424/500
1/1 [==============================] - 0s 2ms/step - loss: 2.5075e-04
Epoch 425/500
1/1 [==============================] - 0s 3ms/step - loss: 2.4560e-04
Epoch 426/500
1/1 [==============================] - 0s 2ms/step - loss: 2.4056e-04
Epoch 427/500
1/1 [==============================] - 0s 3ms/step - loss: 2.3561e-04
Epoch 428/500
1/1 [==============================] - 0s 2ms/step - loss: 2.3078e-04
Epoch 429/500
1/1 [==============================] - 0s 2ms/step - loss: 2.2604e-04
Epoch 430/500
1/1 [==============================] - 0s 4ms/step - loss: 2.2139e-04
Epoch 431/500
1/1 [==============================] - 0s 2ms/step - loss: 2.1684e-04
Epoch 432/500
1/1 [==============================] - 0s 2ms/step - loss: 2.1239e-04
Epoch 433/500
1/1 [==============================] - 0s 3ms/step - loss: 2.0803e-04
Epoch 434/500
1/1 [==============================] - 0s 2ms/step - loss: 2.0375e-04
Epoch 435/500
1/1 [==============================] - 0s 3ms/step - loss: 1.9957e-04
Epoch 436/500
1/1 [==============================] - 0s 2ms/step - loss: 1.9547e-04
Epoch 437/500
1/1 [==============================] - 0s 3ms/step - loss: 1.9145e-04
Epoch 438/500
1/1 [==============================] - 0s 2ms/step - loss: 1.8752e-04
Epoch 439/500
1/1 [==============================] - 0s 4ms/step - loss: 1.8367e-04
Epoch 440/500
1/1 [==============================] - 0s 2ms/step - loss: 1.7990e-04
Epoch 441/500
1/1 [==============================] - 0s 3ms/step - loss: 1.7620e-04
Epoch 442/500
1/1 [==============================] - 0s 2ms/step - loss: 1.7258e-04
Epoch 443/500
1/1 [==============================] - 0s 3ms/step - loss: 1.6904e-04
Epoch 444/500
1/1 [==============================] - 0s 3ms/step - loss: 1.6557e-04
Epoch 445/500
1/1 [==============================] - 0s 2ms/step - loss: 1.6217e-04
Epoch 446/500
1/1 [==============================] - 0s 3ms/step - loss: 1.5884e-04
Epoch 447/500
1/1 [==============================] - 0s 2ms/step - loss: 1.5557e-04
Epoch 448/500
1/1 [==============================] - 0s 3ms/step - loss: 1.5238e-04
Epoch 449/500
1/1 [==============================] - 0s 3ms/step - loss: 1.4925e-04
Epoch 450/500
1/1 [==============================] - 0s 3ms/step - loss: 1.4618e-04
Epoch 451/500
1/1 [==============================] - 0s 2ms/step - loss: 1.4318e-04
Epoch 452/500
1/1 [==============================] - 0s 3ms/step - loss: 1.4024e-04
Epoch 453/500
1/1 [==============================] - 0s 3ms/step - loss: 1.3736e-04
Epoch 454/500
1/1 [==============================] - 0s 2ms/step - loss: 1.3454e-04
Epoch 455/500
1/1 [==============================] - 0s 2ms/step - loss: 1.3177e-04
Epoch 456/500
1/1 [==============================] - 0s 3ms/step - loss: 1.2906e-04
Epoch 457/500
1/1 [==============================] - 0s 2ms/step - loss: 1.2641e-04
Epoch 458/500
1/1 [==============================] - 0s 2ms/step - loss: 1.2382e-04
Epoch 459/500
1/1 [==============================] - 0s 2ms/step - loss: 1.2127e-04
Epoch 460/500
1/1 [==============================] - 0s 2ms/step - loss: 1.1878e-04
Epoch 461/500
1/1 [==============================] - 0s 3ms/step - loss: 1.1634e-04
Epoch 462/500
1/1 [==============================] - 0s 2ms/step - loss: 1.1395e-04
Epoch 463/500
1/1 [==============================] - 0s 3ms/step - loss: 1.1161e-04
Epoch 464/500
1/1 [==============================] - 0s 3ms/step - loss: 1.0932e-04
Epoch 465/500
1/1 [==============================] - 0s 4ms/step - loss: 1.0707e-04
Epoch 466/500
1/1 [==============================] - 0s 2ms/step - loss: 1.0488e-04
Epoch 467/500
1/1 [==============================] - 0s 3ms/step - loss: 1.0272e-04
Epoch 468/500
1/1 [==============================] - 0s 2ms/step - loss: 1.0061e-04
Epoch 469/500
1/1 [==============================] - 0s 2ms/step - loss: 9.8544e-05
Epoch 470/500
1/1 [==============================] - 0s 4ms/step - loss: 9.6520e-05
Epoch 471/500
1/1 [==============================] - 0s 1ms/step - loss: 9.4538e-05
Epoch 472/500
1/1 [==============================] - 0s 3ms/step - loss: 9.2596e-05
Epoch 473/500
1/1 [==============================] - 0s 3ms/step - loss: 9.0694e-05
Epoch 474/500
1/1 [==============================] - 0s 2ms/step - loss: 8.8831e-05
Epoch 475/500
1/1 [==============================] - 0s 3ms/step - loss: 8.7006e-05
Epoch 476/500
1/1 [==============================] - 0s 2ms/step - loss: 8.5219e-05
Epoch 477/500
1/1 [==============================] - 0s 2ms/step - loss: 8.3469e-05
Epoch 478/500
1/1 [==============================] - 0s 2ms/step - loss: 8.1755e-05
Epoch 479/500
1/1 [==============================] - 0s 3ms/step - loss: 8.0076e-05
Epoch 480/500
1/1 [==============================] - 0s 2ms/step - loss: 7.8430e-05
Epoch 481/500
1/1 [==============================] - 0s 3ms/step - loss: 7.6819e-05
Epoch 482/500
1/1 [==============================] - 0s 2ms/step - loss: 7.5242e-05
Epoch 483/500
1/1 [==============================] - 0s 2ms/step - loss: 7.3697e-05
Epoch 484/500
1/1 [==============================] - 0s 5ms/step - loss: 7.2183e-05
Epoch 485/500
1/1 [==============================] - 0s 4ms/step - loss: 7.0700e-05
Epoch 486/500
1/1 [==============================] - 0s 3ms/step - loss: 6.9248e-05
Epoch 487/500
1/1 [==============================] - 0s 3ms/step - loss: 6.7826e-05
Epoch 488/500
1/1 [==============================] - 0s 2ms/step - loss: 6.6432e-05
Epoch 489/500
1/1 [==============================] - 0s 3ms/step - loss: 6.5067e-05
Epoch 490/500
1/1 [==============================] - 0s 3ms/step - loss: 6.3732e-05
Epoch 491/500
1/1 [==============================] - 0s 2ms/step - loss: 6.2422e-05
Epoch 492/500
1/1 [==============================] - 0s 3ms/step - loss: 6.1140e-05
Epoch 493/500
1/1 [==============================] - 0s 2ms/step - loss: 5.9884e-05
Epoch 494/500
1/1 [==============================] - 0s 2ms/step - loss: 5.8654e-05
Epoch 495/500
1/1 [==============================] - 0s 3ms/step - loss: 5.7449e-05
Epoch 496/500
1/1 [==============================] - 0s 2ms/step - loss: 5.6269e-05
Epoch 497/500
1/1 [==============================] - 0s 2ms/step - loss: 5.5113e-05
Epoch 498/500
1/1 [==============================] - 0s 3ms/step - loss: 5.3981e-05
Epoch 499/500
1/1 [==============================] - 0s 3ms/step - loss: 5.2872e-05
Epoch 500/500
1/1 [==============================] - 0s 3ms/step - loss: 5.1787e-05
[[18.979004]]

Example 2: Using keras for MNIST classification task

In [15]:
# Load Dataset

from keras.datasets import mnist

(train_images, train_labels), (test_images, test_labels) = mnist.load_data()
In [16]:
train_images.shape
Out[16]:
(60000, 28, 28)

Network Architecture

In [17]:
model = Sequential()

# First layer
model.add(Conv2D(32, (3,3), activation='relu', input_shape=(28,28,1)))
model.add(MaxPooling2D((2,2)))

# Second layer
model.add(Conv2D(64, (3,3), activation='relu'))
model.add(MaxPooling2D((2,2)))

# Third layer
model.add(Conv2D(64, (3,3), activation='relu'))
model.add(Flatten())

# Fourth layer
model.add(Dense(64, activation='relu'))

# Fifth layer : output
model.add(Dense(10, activation='softmax'))

Model Summary

In [18]:
model.summary()
Model: "sequential_2"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 conv2d (Conv2D)             (None, 26, 26, 32)        320       
                                                                 
 max_pooling2d (MaxPooling2D  (None, 13, 13, 32)       0         
 )                                                               
                                                                 
 conv2d_1 (Conv2D)           (None, 11, 11, 64)        18496     
                                                                 
 max_pooling2d_1 (MaxPooling  (None, 5, 5, 64)         0         
 2D)                                                             
                                                                 
 conv2d_2 (Conv2D)           (None, 3, 3, 64)          36928     
                                                                 
 flatten_1 (Flatten)         (None, 576)               0         
                                                                 
 dense_3 (Dense)             (None, 64)                36928     
                                                                 
 dense_4 (Dense)             (None, 10)                650       
                                                                 
=================================================================
Total params: 93,322
Trainable params: 93,322
Non-trainable params: 0
_________________________________________________________________

Train the Convnet

In [19]:
# reshape training data
train_images = train_images.reshape((60000, 28, 28, 1))
train_images = train_images.astype('float32')/255

# reshape test data
test_images = test_images.reshape((10000, 28, 28, 1))
test_images = test_images.astype('float32')/255

# convert class vector (int) to binary class matrix
train_labels = to_categorical(train_labels)
test_labels = to_categorical(test_labels)

# compile the model
model.compile(optimizer='rmsprop',
              loss='categorical_crossentropy',
              metrics=['accuracy'])

# fit or train the model
model.fit(train_images, train_labels, epochs=5, batch_size=64)
Epoch 1/5
938/938 [==============================] - 26s 27ms/step - loss: 0.1701 - accuracy: 0.9477
Epoch 2/5
938/938 [==============================] - 25s 27ms/step - loss: 0.0460 - accuracy: 0.9858
Epoch 3/5
938/938 [==============================] - 22s 24ms/step - loss: 0.0320 - accuracy: 0.9902
Epoch 4/5
938/938 [==============================] - 21s 22ms/step - loss: 0.0248 - accuracy: 0.9921
Epoch 5/5
938/938 [==============================] - 21s 23ms/step - loss: 0.0191 - accuracy: 0.9939
Out[19]:
<keras.callbacks.History at 0x197db9dd520>

Evaluate the Model

In [20]:
# measure loss and accuracy
test_loss, test_acc = model.evaluate(test_images, test_labels)
test_acc
313/313 [==============================] - 1s 4ms/step - loss: 0.0361 - accuracy: 0.9902
Out[20]:
0.9901999831199646

Save Model

In [21]:
# save model for future use
model.save('mnist_model.h5')

Making Predictions

In [22]:
from keras.models import load_model
from keras.preprocessing.image import load_img, img_to_array

model = load_model('mnist_model.h5')

Testing the model with the following sample image:

In [23]:
# load the image
img = load_img('sample_image.png', grayscale=True, target_size=(28,28))

# conver to array
img = img_to_array(img)

# reshape into required format
img = img.reshape(1,28,28,1)

# prepare pixel dat
img = img.astype('float32')/255

# predict the class
digit = np.argmax(model.predict(img),axis=1)

# output predicted class
print(digit[0])
8