1. Simple Autoencoder

In [1]:
# https://blog.keras.io/building-autoencoders-in-keras.html

from keras.layers import Input, Dense
from keras.models import Model

# this is the size of our encoded representations
encoding_dim = 32  #  784 to 32 flots

# this is our input placeholder
input_img = Input(shape=(784,))

# "encoded" is the encoded representation of the input
encoded = Dense(encoding_dim, activation='relu')(input_img)

# "decoded" is the lossy reconstruction of the input
decoded = Dense(784, activation='sigmoid')(encoded)

# this model maps an input to its reconstruction
autoencoder = Model(input_img, decoded)
autoencoder.summary()
Using TensorFlow backend.
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
input_1 (InputLayer)         (None, 784)               0         
_________________________________________________________________
dense_1 (Dense)              (None, 32)                25120     
_________________________________________________________________
dense_2 (Dense)              (None, 784)               25872     
=================================================================
Total params: 50,992
Trainable params: 50,992
Non-trainable params: 0
_________________________________________________________________
In [2]:
# this model maps an input to its encoded representation
encoder = Model(input_img, encoded)
encoder.summary()
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
input_1 (InputLayer)         (None, 784)               0         
_________________________________________________________________
dense_1 (Dense)              (None, 32)                25120     
=================================================================
Total params: 25,120
Trainable params: 25,120
Non-trainable params: 0
_________________________________________________________________
In [4]:
# create a placeholder for an encoded (32-dimensional) input
encoded_input = Input(shape=(encoding_dim,))
# retrieve the last layer of the autoencoder model
decoder_layer = autoencoder.layers[-1]
# create the decoder model
decoder = Model(encoded_input, decoder_layer(encoded_input))
decoder.summary()
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
input_3 (InputLayer)         (None, 32)                0         
_________________________________________________________________
dense_2 (Dense)              (None, 784)               25872     
=================================================================
Total params: 25,872
Trainable params: 25,872
Non-trainable params: 0
_________________________________________________________________
In [5]:
autoencoder.compile(optimizer='adadelta', loss='binary_crossentropy')
In [6]:
from keras.datasets import mnist
import numpy as np
(x_train, _), (x_test, _) = mnist.load_data()

x_train = x_train.astype('float32') / 255.
x_test = x_test.astype('float32') / 255.
x_train = x_train.reshape((len(x_train), np.prod(x_train.shape[1:])))
x_test = x_test.reshape((len(x_test), np.prod(x_test.shape[1:])))
print( x_train.shape )
print( x_test.shape )
(60000, 784)
(10000, 784)
In [7]:
autoencoder.fit(x_train, x_train,
                epochs=50,
                batch_size=256,
                shuffle=True,
                validation_data=(x_test, x_test))
Train on 60000 samples, validate on 10000 samples
Epoch 1/50
60000/60000 [==============================] - 6s - loss: 0.3792 - val_loss: 0.2732
Epoch 2/50
60000/60000 [==============================] - 2s - loss: 0.2659 - val_loss: 0.2551
Epoch 3/50
60000/60000 [==============================] - 2s - loss: 0.2446 - val_loss: 0.2314
Epoch 4/50
60000/60000 [==============================] - 2s - loss: 0.2235 - val_loss: 0.2131
Epoch 5/50
60000/60000 [==============================] - 2s - loss: 0.2081 - val_loss: 0.2002
Epoch 6/50
60000/60000 [==============================] - 2s - loss: 0.1968 - val_loss: 0.1905
Epoch 7/50
60000/60000 [==============================] - 2s - loss: 0.1883 - val_loss: 0.1830
Epoch 8/50
60000/60000 [==============================] - 2s - loss: 0.1816 - val_loss: 0.1770
Epoch 9/50
60000/60000 [==============================] - 2s - loss: 0.1760 - val_loss: 0.1718
Epoch 10/50
60000/60000 [==============================] - 2s - loss: 0.1711 - val_loss: 0.1673
Epoch 11/50
60000/60000 [==============================] - 2s - loss: 0.1668 - val_loss: 0.1631
Epoch 12/50
60000/60000 [==============================] - 2s - loss: 0.1628 - val_loss: 0.1594
Epoch 13/50
60000/60000 [==============================] - 2s - loss: 0.1591 - val_loss: 0.1558
Epoch 14/50
60000/60000 [==============================] - 2s - loss: 0.1557 - val_loss: 0.1524
Epoch 15/50
60000/60000 [==============================] - 2s - loss: 0.1525 - val_loss: 0.1493
Epoch 16/50
60000/60000 [==============================] - 2s - loss: 0.1495 - val_loss: 0.1465
Epoch 17/50
60000/60000 [==============================] - 2s - loss: 0.1468 - val_loss: 0.1438
Epoch 18/50
60000/60000 [==============================] - 2s - loss: 0.1442 - val_loss: 0.1414
Epoch 19/50
60000/60000 [==============================] - 2s - loss: 0.1418 - val_loss: 0.1391
Epoch 20/50
60000/60000 [==============================] - 2s - loss: 0.1396 - val_loss: 0.1368
Epoch 21/50
60000/60000 [==============================] - 2s - loss: 0.1374 - val_loss: 0.1348
Epoch 22/50
60000/60000 [==============================] - 2s - loss: 0.1354 - val_loss: 0.1328
Epoch 23/50
60000/60000 [==============================] - 2s - loss: 0.1335 - val_loss: 0.1309
Epoch 24/50
60000/60000 [==============================] - 2s - loss: 0.1316 - val_loss: 0.1290
Epoch 25/50
60000/60000 [==============================] - 2s - loss: 0.1298 - val_loss: 0.1272
Epoch 26/50
60000/60000 [==============================] - 2s - loss: 0.1281 - val_loss: 0.1255
Epoch 27/50
60000/60000 [==============================] - 2s - loss: 0.1264 - val_loss: 0.1238
Epoch 28/50
60000/60000 [==============================] - 2s - loss: 0.1247 - val_loss: 0.1223
Epoch 29/50
60000/60000 [==============================] - 2s - loss: 0.1232 - val_loss: 0.1207
Epoch 30/50
60000/60000 [==============================] - 2s - loss: 0.1217 - val_loss: 0.1193
Epoch 31/50
60000/60000 [==============================] - 2s - loss: 0.1203 - val_loss: 0.1179
Epoch 32/50
60000/60000 [==============================] - 2s - loss: 0.1190 - val_loss: 0.1166
Epoch 33/50
60000/60000 [==============================] - 2s - loss: 0.1178 - val_loss: 0.1154
Epoch 34/50
60000/60000 [==============================] - 2s - loss: 0.1166 - val_loss: 0.1144
Epoch 35/50
60000/60000 [==============================] - 2s - loss: 0.1155 - val_loss: 0.1132
Epoch 36/50
60000/60000 [==============================] - 2s - loss: 0.1145 - val_loss: 0.1123
Epoch 37/50
60000/60000 [==============================] - 2s - loss: 0.1136 - val_loss: 0.1114
Epoch 38/50
60000/60000 [==============================] - 2s - loss: 0.1127 - val_loss: 0.1105
Epoch 39/50
60000/60000 [==============================] - 2s - loss: 0.1119 - val_loss: 0.1097
Epoch 40/50
60000/60000 [==============================] - 2s - loss: 0.1111 - val_loss: 0.1090
Epoch 41/50
60000/60000 [==============================] - 2s - loss: 0.1104 - val_loss: 0.1083
Epoch 42/50
60000/60000 [==============================] - 2s - loss: 0.1097 - val_loss: 0.1077
Epoch 43/50
60000/60000 [==============================] - 2s - loss: 0.1091 - val_loss: 0.1071
Epoch 44/50
60000/60000 [==============================] - 2s - loss: 0.1085 - val_loss: 0.1066
Epoch 45/50
60000/60000 [==============================] - 2s - loss: 0.1080 - val_loss: 0.1061
Epoch 46/50
60000/60000 [==============================] - 2s - loss: 0.1075 - val_loss: 0.1056
Epoch 47/50
60000/60000 [==============================] - 2s - loss: 0.1070 - val_loss: 0.1051
Epoch 48/50
60000/60000 [==============================] - 2s - loss: 0.1066 - val_loss: 0.1047
Epoch 49/50
60000/60000 [==============================] - 2s - loss: 0.1062 - val_loss: 0.1043
Epoch 50/50
60000/60000 [==============================] - 2s - loss: 0.1058 - val_loss: 0.1039
Out[7]:
<keras.callbacks.History at 0x2e10ef65550>
In [13]:
# encode and decode some digits
# note that we take them from the *test* set
encoded_imgs = encoder.predict(x_test)
decoded_imgs = decoder.predict(encoded_imgs)
print(encoded_imgs.shape, decoded_imgs.shape)
(10000, 32) (10000, 784)
In [14]:
# use Matplotlib (don't ask)
import matplotlib.pyplot as plt

n = 10  # how many digits we will display
plt.figure(figsize=(20, 4))
for i in range(n):
    # display original
    ax = plt.subplot(2, n, i + 1)
    plt.imshow(x_test[i].reshape(28, 28))
    plt.gray()
    ax.get_xaxis().set_visible(False)
    ax.get_yaxis().set_visible(False)

    # display reconstruction
    ax = plt.subplot(2, n, i + 1 + n)
    plt.imshow(decoded_imgs[i].reshape(28, 28))
    plt.gray()
    ax.get_xaxis().set_visible(False)
    ax.get_yaxis().set_visible(False)
plt.show()

1.1 Regulation

In [15]:
from keras import regularizers

encoding_dim = 32

input_img = Input(shape=(784,))
# add a Dense layer with a L1 activity regularizer
encoded = Dense(encoding_dim, activation='relu',
                activity_regularizer=regularizers.l1(10e-5))(input_img)
decoded = Dense(784, activation='sigmoid')(encoded)

autoencoder = Model(input_img, decoded)
In [17]:
autoencoder.compile(optimizer='adadelta', loss='binary_crossentropy')
autoencoder.fit(x_train, x_train,
                epochs=100,
                batch_size=256,
                shuffle=True,
                validation_data=(x_test, x_test))
Train on 60000 samples, validate on 10000 samples
Epoch 1/100
60000/60000 [==============================] - 3s - loss: 0.6846 - val_loss: 0.6607
Epoch 2/100
60000/60000 [==============================] - 2s - loss: 0.6440 - val_loss: 0.6273
Epoch 3/100
60000/60000 [==============================] - 2s - loss: 0.6111 - val_loss: 0.5952
Epoch 4/100
60000/60000 [==============================] - 2s - loss: 0.5802 - val_loss: 0.5655
Epoch 5/100
60000/60000 [==============================] - 2s - loss: 0.5518 - val_loss: 0.5386
Epoch 6/100
60000/60000 [==============================] - 2s - loss: 0.5263 - val_loss: 0.5146
Epoch 7/100
60000/60000 [==============================] - 2s - loss: 0.5036 - val_loss: 0.4932
Epoch 8/100
60000/60000 [==============================] - 2s - loss: 0.4835 - val_loss: 0.4744
Epoch 9/100
60000/60000 [==============================] - 2s - loss: 0.4658 - val_loss: 0.4577
Epoch 10/100
60000/60000 [==============================] - 2s - loss: 0.4501 - val_loss: 0.4430
Epoch 11/100
60000/60000 [==============================] - 2s - loss: 0.4362 - val_loss: 0.4299
Epoch 12/100
60000/60000 [==============================] - 2s - loss: 0.4239 - val_loss: 0.4183
Epoch 13/100
60000/60000 [==============================] - 2s - loss: 0.4129 - val_loss: 0.4079
Epoch 14/100
60000/60000 [==============================] - 2s - loss: 0.4030 - val_loss: 0.3986
Epoch 15/100
60000/60000 [==============================] - 2s - loss: 0.3942 - val_loss: 0.3902
Epoch 16/100
60000/60000 [==============================] - 2s - loss: 0.3862 - val_loss: 0.3826
Epoch 17/100
60000/60000 [==============================] - 2s - loss: 0.3790 - val_loss: 0.3758
Epoch 18/100
60000/60000 [==============================] - 2s - loss: 0.3725 - val_loss: 0.3696
Epoch 19/100
60000/60000 [==============================] - 2s - loss: 0.3665 - val_loss: 0.3639
Epoch 20/100
60000/60000 [==============================] - 2s - loss: 0.3611 - val_loss: 0.3587
Epoch 21/100
60000/60000 [==============================] - 2s - loss: 0.3561 - val_loss: 0.3539
Epoch 22/100
60000/60000 [==============================] - 2s - loss: 0.3516 - val_loss: 0.3496
Epoch 23/100
60000/60000 [==============================] - 2s - loss: 0.3474 - val_loss: 0.3455
Epoch 24/100
60000/60000 [==============================] - 2s - loss: 0.3435 - val_loss: 0.3418
Epoch 25/100
60000/60000 [==============================] - 2s - loss: 0.3399 - val_loss: 0.3384
Epoch 26/100
60000/60000 [==============================] - 2s - loss: 0.3366 - val_loss: 0.3352
Epoch 27/100
60000/60000 [==============================] - 2s - loss: 0.3335 - val_loss: 0.3322
Epoch 28/100
60000/60000 [==============================] - 2s - loss: 0.3307 - val_loss: 0.3294
Epoch 29/100
60000/60000 [==============================] - 2s - loss: 0.3280 - val_loss: 0.3269
Epoch 30/100
60000/60000 [==============================] - 2s - loss: 0.3255 - val_loss: 0.3245
Epoch 31/100
60000/60000 [==============================] - 2s - loss: 0.3232 - val_loss: 0.3222
Epoch 32/100
60000/60000 [==============================] - 2s - loss: 0.3210 - val_loss: 0.3201
Epoch 33/100
60000/60000 [==============================] - 2s - loss: 0.3189 - val_loss: 0.3181
Epoch 34/100
60000/60000 [==============================] - 2s - loss: 0.3170 - val_loss: 0.3162
Epoch 35/100
60000/60000 [==============================] - 2s - loss: 0.3152 - val_loss: 0.3145
Epoch 36/100
60000/60000 [==============================] - 2s - loss: 0.3135 - val_loss: 0.3128
Epoch 37/100
60000/60000 [==============================] - 2s - loss: 0.3119 - val_loss: 0.3112
Epoch 38/100
60000/60000 [==============================] - 2s - loss: 0.3103 - val_loss: 0.3097
Epoch 39/100
60000/60000 [==============================] - 2s - loss: 0.3089 - val_loss: 0.3083
Epoch 40/100
60000/60000 [==============================] - 2s - loss: 0.3075 - val_loss: 0.3070
Epoch 41/100
60000/60000 [==============================] - 2s - loss: 0.3062 - val_loss: 0.3057
Epoch 42/100
60000/60000 [==============================] - 2s - loss: 0.3050 - val_loss: 0.3045
Epoch 43/100
60000/60000 [==============================] - 2s - loss: 0.3038 - val_loss: 0.3034
Epoch 44/100
60000/60000 [==============================] - 2s - loss: 0.3027 - val_loss: 0.3023
Epoch 45/100
60000/60000 [==============================] - 2s - loss: 0.3016 - val_loss: 0.3012
Epoch 46/100
60000/60000 [==============================] - 2s - loss: 0.3006 - val_loss: 0.3002
Epoch 47/100
60000/60000 [==============================] - 2s - loss: 0.2997 - val_loss: 0.2993
Epoch 48/100
60000/60000 [==============================] - 2s - loss: 0.2987 - val_loss: 0.2984
Epoch 49/100
60000/60000 [==============================] - 2s - loss: 0.2979 - val_loss: 0.2975
Epoch 50/100
60000/60000 [==============================] - 2s - loss: 0.2970 - val_loss: 0.2967
Epoch 51/100
60000/60000 [==============================] - 2s - loss: 0.2962 - val_loss: 0.2959
Epoch 52/100
60000/60000 [==============================] - 2s - loss: 0.2954 - val_loss: 0.2951
Epoch 53/100
60000/60000 [==============================] - 2s - loss: 0.2947 - val_loss: 0.2944
Epoch 54/100
60000/60000 [==============================] - 2s - loss: 0.2940 - val_loss: 0.2937
Epoch 55/100
60000/60000 [==============================] - 2s - loss: 0.2933 - val_loss: 0.2930
Epoch 56/100
60000/60000 [==============================] - 2s - loss: 0.2926 - val_loss: 0.2924
Epoch 57/100
60000/60000 [==============================] - 2s - loss: 0.2920 - val_loss: 0.2917
Epoch 58/100
60000/60000 [==============================] - 2s - loss: 0.2914 - val_loss: 0.2911
Epoch 59/100
60000/60000 [==============================] - 2s - loss: 0.2908 - val_loss: 0.2906
Epoch 60/100
60000/60000 [==============================] - 2s - loss: 0.2902 - val_loss: 0.2900
Epoch 61/100
60000/60000 [==============================] - 2s - loss: 0.2897 - val_loss: 0.2895
Epoch 62/100
60000/60000 [==============================] - 2s - loss: 0.2892 - val_loss: 0.2889
Epoch 63/100
60000/60000 [==============================] - 2s - loss: 0.2886 - val_loss: 0.2884
Epoch 64/100
60000/60000 [==============================] - 2s - loss: 0.2882 - val_loss: 0.2880
Epoch 65/100
60000/60000 [==============================] - 2s - loss: 0.2877 - val_loss: 0.2875
Epoch 66/100
60000/60000 [==============================] - 2s - loss: 0.2872 - val_loss: 0.2870
Epoch 67/100
60000/60000 [==============================] - 2s - loss: 0.2868 - val_loss: 0.2866
Epoch 68/100
60000/60000 [==============================] - 2s - loss: 0.2864 - val_loss: 0.2862
Epoch 69/100
60000/60000 [==============================] - 2s - loss: 0.2859 - val_loss: 0.2858
Epoch 70/100
60000/60000 [==============================] - 2s - loss: 0.2855 - val_loss: 0.2854
Epoch 71/100
60000/60000 [==============================] - 2s - loss: 0.2851 - val_loss: 0.2850
Epoch 72/100
60000/60000 [==============================] - 2s - loss: 0.2848 - val_loss: 0.2846
Epoch 73/100
60000/60000 [==============================] - 2s - loss: 0.2844 - val_loss: 0.2842
Epoch 74/100
60000/60000 [==============================] - 2s - loss: 0.2840 - val_loss: 0.2839
Epoch 75/100
60000/60000 [==============================] - 2s - loss: 0.2837 - val_loss: 0.2835
Epoch 76/100
60000/60000 [==============================] - 2s - loss: 0.2834 - val_loss: 0.2832
Epoch 77/100
60000/60000 [==============================] - 2s - loss: 0.2830 - val_loss: 0.2829
Epoch 78/100
60000/60000 [==============================] - 2s - loss: 0.2827 - val_loss: 0.2826
Epoch 79/100
60000/60000 [==============================] - 2s - loss: 0.2824 - val_loss: 0.2823
Epoch 80/100
60000/60000 [==============================] - 2s - loss: 0.2821 - val_loss: 0.2820
Epoch 81/100
60000/60000 [==============================] - 2s - loss: 0.2818 - val_loss: 0.2817
Epoch 82/100
60000/60000 [==============================] - 2s - loss: 0.2815 - val_loss: 0.2814
Epoch 83/100
60000/60000 [==============================] - 2s - loss: 0.2813 - val_loss: 0.2811
Epoch 84/100
60000/60000 [==============================] - 2s - loss: 0.2810 - val_loss: 0.2808
Epoch 85/100
60000/60000 [==============================] - 2s - loss: 0.2807 - val_loss: 0.2806
Epoch 86/100
60000/60000 [==============================] - 2s - loss: 0.2805 - val_loss: 0.2803
Epoch 87/100
60000/60000 [==============================] - 2s - loss: 0.2802 - val_loss: 0.2801
Epoch 88/100
60000/60000 [==============================] - 2s - loss: 0.2800 - val_loss: 0.2798
Epoch 89/100
60000/60000 [==============================] - 2s - loss: 0.2798 - val_loss: 0.2796
Epoch 90/100
60000/60000 [==============================] - 2s - loss: 0.2795 - val_loss: 0.2794
Epoch 91/100
60000/60000 [==============================] - 2s - loss: 0.2793 - val_loss: 0.2792
Epoch 92/100
60000/60000 [==============================] - 2s - loss: 0.2791 - val_loss: 0.2789
Epoch 93/100
60000/60000 [==============================] - 2s - loss: 0.2789 - val_loss: 0.2787
Epoch 94/100
60000/60000 [==============================] - 2s - loss: 0.2787 - val_loss: 0.2785
Epoch 95/100
60000/60000 [==============================] - 2s - loss: 0.2785 - val_loss: 0.2783
Epoch 96/100
60000/60000 [==============================] - 2s - loss: 0.2783 - val_loss: 0.2781
Epoch 97/100
60000/60000 [==============================] - 2s - loss: 0.2781 - val_loss: 0.2779
Epoch 98/100
60000/60000 [==============================] - 2s - loss: 0.2779 - val_loss: 0.2777
Epoch 99/100
60000/60000 [==============================] - 2s - loss: 0.2777 - val_loss: 0.2776
Epoch 100/100
60000/60000 [==============================] - 2s - loss: 0.2775 - val_loss: 0.2774
Out[17]:
<keras.callbacks.History at 0x2e19c2af470>
In [18]:
# encode and decode some digits
# note that we take them from the *test* set
encoded_imgs = encoder.predict(x_test)
decoded_imgs = decoder.predict(encoded_imgs)
print(encoded_imgs.shape, decoded_imgs.shape)
(10000, 32) (10000, 784)
In [19]:
n = 10  # how many digits we will display
plt.figure(figsize=(20, 4))
for i in range(n):
    # display original
    ax = plt.subplot(2, n, i + 1)
    plt.imshow(x_test[i].reshape(28, 28))
    plt.gray()
    ax.get_xaxis().set_visible(False)
    ax.get_yaxis().set_visible(False)

    # display reconstruction
    ax = plt.subplot(2, n, i + 1 + n)
    plt.imshow(decoded_imgs[i].reshape(28, 28))
    plt.gray()
    ax.get_xaxis().set_visible(False)
    ax.get_yaxis().set_visible(False)
plt.show()
In [ ]:
 

2. Deep Autoencoder

In [20]:
input_img = Input(shape=(784,))
encoded = Dense(128, activation='relu')(input_img)
encoded = Dense(64, activation='relu')(encoded)
encoded = Dense(32, activation='relu')(encoded)

decoded = Dense(64, activation='relu')(encoded)
decoded = Dense(128, activation='relu')(decoded)
decoded = Dense(784, activation='sigmoid')(decoded)
In [30]:
autoencoder = Model(input_img, decoded)
autoencoder.compile(optimizer='adadelta', loss='binary_crossentropy')
history = autoencoder.fit(
    x_train, x_train,
    epochs=100,
    batch_size=256,
    shuffle=True,
    validation_data=(x_test, x_test))
Train on 60000 samples, validate on 10000 samples
Epoch 1/100
60000/60000 [==============================] - 4s - loss: 0.1025 - val_loss: 0.1007
Epoch 2/100
60000/60000 [==============================] - 3s - loss: 0.1023 - val_loss: 0.1013
Epoch 3/100
60000/60000 [==============================] - 3s - loss: 0.1021 - val_loss: 0.1006
Epoch 4/100
60000/60000 [==============================] - 3s - loss: 0.1019 - val_loss: 0.1006
Epoch 5/100
60000/60000 [==============================] - 3s - loss: 0.1017 - val_loss: 0.1001
Epoch 6/100
60000/60000 [==============================] - 3s - loss: 0.1015 - val_loss: 0.1006
Epoch 7/100
60000/60000 [==============================] - 3s - loss: 0.1015 - val_loss: 0.1003
Epoch 8/100
60000/60000 [==============================] - 3s - loss: 0.1012 - val_loss: 0.0999
Epoch 9/100
60000/60000 [==============================] - 3s - loss: 0.1012 - val_loss: 0.1005
Epoch 10/100
60000/60000 [==============================] - 3s - loss: 0.1009 - val_loss: 0.1004
Epoch 11/100
60000/60000 [==============================] - 3s - loss: 0.1009 - val_loss: 0.1001
Epoch 12/100
60000/60000 [==============================] - 3s - loss: 0.1006 - val_loss: 0.0988
Epoch 13/100
60000/60000 [==============================] - 3s - loss: 0.1004 - val_loss: 0.1006
Epoch 14/100
60000/60000 [==============================] - 3s - loss: 0.1004 - val_loss: 0.0998
Epoch 15/100
60000/60000 [==============================] - 3s - loss: 0.1004 - val_loss: 0.0993
Epoch 16/100
60000/60000 [==============================] - 3s - loss: 0.1002 - val_loss: 0.0995
Epoch 17/100
60000/60000 [==============================] - 3s - loss: 0.1001 - val_loss: 0.0997
Epoch 18/100
60000/60000 [==============================] - 3s - loss: 0.0999 - val_loss: 0.0991
Epoch 19/100
60000/60000 [==============================] - 3s - loss: 0.0998 - val_loss: 0.0997
Epoch 20/100
60000/60000 [==============================] - 3s - loss: 0.0998 - val_loss: 0.0984
Epoch 21/100
60000/60000 [==============================] - 3s - loss: 0.0996 - val_loss: 0.0987
Epoch 22/100
60000/60000 [==============================] - 3s - loss: 0.0994 - val_loss: 0.0988
Epoch 23/100
60000/60000 [==============================] - 3s - loss: 0.0994 - val_loss: 0.0981
Epoch 24/100
60000/60000 [==============================] - 3s - loss: 0.0992 - val_loss: 0.0987
Epoch 25/100
60000/60000 [==============================] - 3s - loss: 0.0993 - val_loss: 0.0972
Epoch 26/100
60000/60000 [==============================] - 3s - loss: 0.0990 - val_loss: 0.0979
Epoch 27/100
60000/60000 [==============================] - 3s - loss: 0.0990 - val_loss: 0.0986
Epoch 28/100
60000/60000 [==============================] - 3s - loss: 0.0988 - val_loss: 0.0987
Epoch 29/100
60000/60000 [==============================] - 3s - loss: 0.0986 - val_loss: 0.0973
Epoch 30/100
60000/60000 [==============================] - 3s - loss: 0.0986 - val_loss: 0.0971
Epoch 31/100
60000/60000 [==============================] - 3s - loss: 0.0985 - val_loss: 0.0977
Epoch 32/100
60000/60000 [==============================] - 3s - loss: 0.0984 - val_loss: 0.0977
Epoch 33/100
60000/60000 [==============================] - 3s - loss: 0.0983 - val_loss: 0.0979
Epoch 34/100
60000/60000 [==============================] - 3s - loss: 0.0983 - val_loss: 0.0971
Epoch 35/100
60000/60000 [==============================] - 3s - loss: 0.0981 - val_loss: 0.0968
Epoch 36/100
60000/60000 [==============================] - 3s - loss: 0.0981 - val_loss: 0.0972
Epoch 37/100
60000/60000 [==============================] - 3s - loss: 0.0979 - val_loss: 0.0971
Epoch 38/100
60000/60000 [==============================] - 3s - loss: 0.0978 - val_loss: 0.0965
Epoch 39/100
60000/60000 [==============================] - 3s - loss: 0.0977 - val_loss: 0.0970
Epoch 40/100
60000/60000 [==============================] - 3s - loss: 0.0977 - val_loss: 0.0970
Epoch 41/100
60000/60000 [==============================] - 3s - loss: 0.0975 - val_loss: 0.0959
Epoch 42/100
60000/60000 [==============================] - 3s - loss: 0.0974 - val_loss: 0.0976
Epoch 43/100
60000/60000 [==============================] - 3s - loss: 0.0973 - val_loss: 0.0962
Epoch 44/100
60000/60000 [==============================] - 3s - loss: 0.0971 - val_loss: 0.0965
Epoch 45/100
60000/60000 [==============================] - 3s - loss: 0.0972 - val_loss: 0.0966
Epoch 46/100
60000/60000 [==============================] - 3s - loss: 0.0970 - val_loss: 0.0960
Epoch 47/100
60000/60000 [==============================] - 3s - loss: 0.0969 - val_loss: 0.0954
Epoch 48/100
60000/60000 [==============================] - 3s - loss: 0.0968 - val_loss: 0.0964
Epoch 49/100
60000/60000 [==============================] - 3s - loss: 0.0967 - val_loss: 0.0966
Epoch 50/100
60000/60000 [==============================] - 3s - loss: 0.0966 - val_loss: 0.0964
Epoch 51/100
60000/60000 [==============================] - 3s - loss: 0.0966 - val_loss: 0.0950
Epoch 52/100
60000/60000 [==============================] - 3s - loss: 0.0965 - val_loss: 0.0961
Epoch 53/100
60000/60000 [==============================] - 3s - loss: 0.0964 - val_loss: 0.0951
Epoch 54/100
60000/60000 [==============================] - 3s - loss: 0.0963 - val_loss: 0.0957
Epoch 55/100
60000/60000 [==============================] - 3s - loss: 0.0961 - val_loss: 0.0958
Epoch 56/100
60000/60000 [==============================] - 3s - loss: 0.0961 - val_loss: 0.0947
Epoch 57/100
60000/60000 [==============================] - 3s - loss: 0.0959 - val_loss: 0.0952
Epoch 58/100
60000/60000 [==============================] - 3s - loss: 0.0960 - val_loss: 0.0960
Epoch 59/100
60000/60000 [==============================] - 3s - loss: 0.0958 - val_loss: 0.0956
Epoch 60/100
60000/60000 [==============================] - 3s - loss: 0.0958 - val_loss: 0.0946
Epoch 61/100
60000/60000 [==============================] - 3s - loss: 0.0956 - val_loss: 0.0942
Epoch 62/100
60000/60000 [==============================] - 3s - loss: 0.0956 - val_loss: 0.0948
Epoch 63/100
60000/60000 [==============================] - 3s - loss: 0.0954 - val_loss: 0.0953
Epoch 64/100
60000/60000 [==============================] - 3s - loss: 0.0953 - val_loss: 0.0935
Epoch 65/100
60000/60000 [==============================] - 3s - loss: 0.0953 - val_loss: 0.0941
Epoch 66/100
60000/60000 [==============================] - 3s - loss: 0.0953 - val_loss: 0.0948
Epoch 67/100
60000/60000 [==============================] - 3s - loss: 0.0952 - val_loss: 0.0945
Epoch 68/100
60000/60000 [==============================] - 3s - loss: 0.0951 - val_loss: 0.0947
Epoch 69/100
60000/60000 [==============================] - 3s - loss: 0.0950 - val_loss: 0.0959
Epoch 70/100
60000/60000 [==============================] - 3s - loss: 0.0949 - val_loss: 0.0932
Epoch 71/100
60000/60000 [==============================] - 3s - loss: 0.0948 - val_loss: 0.0934
Epoch 72/100
60000/60000 [==============================] - 3s - loss: 0.0946 - val_loss: 0.0942
Epoch 73/100
60000/60000 [==============================] - 3s - loss: 0.0947 - val_loss: 0.0934
Epoch 74/100
60000/60000 [==============================] - 3s - loss: 0.0945 - val_loss: 0.0945
Epoch 75/100
60000/60000 [==============================] - 3s - loss: 0.0944 - val_loss: 0.0936
Epoch 76/100
60000/60000 [==============================] - 3s - loss: 0.0945 - val_loss: 0.0934
Epoch 77/100
60000/60000 [==============================] - 3s - loss: 0.0943 - val_loss: 0.0937
Epoch 78/100
60000/60000 [==============================] - 3s - loss: 0.0942 - val_loss: 0.0932
Epoch 79/100
60000/60000 [==============================] - 3s - loss: 0.0942 - val_loss: 0.0938
Epoch 80/100
60000/60000 [==============================] - 3s - loss: 0.0941 - val_loss: 0.0934
Epoch 81/100
60000/60000 [==============================] - 3s - loss: 0.0940 - val_loss: 0.0928
Epoch 82/100
60000/60000 [==============================] - 3s - loss: 0.0939 - val_loss: 0.0940
Epoch 83/100
60000/60000 [==============================] - 3s - loss: 0.0939 - val_loss: 0.0927
Epoch 84/100
60000/60000 [==============================] - 3s - loss: 0.0937 - val_loss: 0.0931
Epoch 85/100
60000/60000 [==============================] - 3s - loss: 0.0937 - val_loss: 0.0948
Epoch 86/100
60000/60000 [==============================] - 3s - loss: 0.0935 - val_loss: 0.0935
Epoch 87/100
60000/60000 [==============================] - 3s - loss: 0.0935 - val_loss: 0.0929
Epoch 88/100
60000/60000 [==============================] - 3s - loss: 0.0934 - val_loss: 0.0927
Epoch 89/100
60000/60000 [==============================] - 3s - loss: 0.0934 - val_loss: 0.0921
Epoch 90/100
60000/60000 [==============================] - 3s - loss: 0.0933 - val_loss: 0.0928
Epoch 91/100
60000/60000 [==============================] - 3s - loss: 0.0932 - val_loss: 0.0927
Epoch 92/100
60000/60000 [==============================] - 3s - loss: 0.0932 - val_loss: 0.0931
Epoch 93/100
60000/60000 [==============================] - 3s - loss: 0.0932 - val_loss: 0.0930
Epoch 94/100
60000/60000 [==============================] - 3s - loss: 0.0930 - val_loss: 0.0920
Epoch 95/100
60000/60000 [==============================] - 3s - loss: 0.0929 - val_loss: 0.0920
Epoch 96/100
60000/60000 [==============================] - 3s - loss: 0.0930 - val_loss: 0.0921
Epoch 97/100
60000/60000 [==============================] - 3s - loss: 0.0928 - val_loss: 0.0917
Epoch 98/100
60000/60000 [==============================] - 3s - loss: 0.0929 - val_loss: 0.0925
Epoch 99/100
60000/60000 [==============================] - 3s - loss: 0.0927 - val_loss: 0.0920
Epoch 100/100
60000/60000 [==============================] - 3s - loss: 0.0926 - val_loss: 0.0921
In [39]:
score = autoencoder.evaluate(x_test, x_test)
score
 9888/10000 [============================>.] - ETA: 0s
Out[39]:
0.092111111736297602
In [40]:
# list all data in history
print(history.history.keys())
dict_keys(['val_loss', 'loss'])
In [42]:
# summarize history for accuracy
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('model loss')
plt.ylabel('binary_crossentropy')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='upper right')
plt.show()
In [43]:
# encode and decode some digits
# note that we take them from the *test* set
encoded_imgs = encoder.predict(x_test)
decoded_imgs = decoder.predict(encoded_imgs)
print(encoded_imgs.shape, decoded_imgs.shape)
(10000, 32) (10000, 784)
In [44]:
n = 10  # how many digits we will display
plt.figure(figsize=(20, 4))
for i in range(n):
    # display original
    ax = plt.subplot(2, n, i + 1)
    plt.imshow(x_test[i].reshape(28, 28))
    plt.gray()
    ax.get_xaxis().set_visible(False)
    ax.get_yaxis().set_visible(False)

    # display reconstruction
    ax = plt.subplot(2, n, i + 1 + n)
    plt.imshow(decoded_imgs[i].reshape(28, 28))
    plt.gray()
    ax.get_xaxis().set_visible(False)
    ax.get_yaxis().set_visible(False)
plt.show()
In [ ]:
 

3. Convolutional autoencoder

In [45]:
from keras.layers import Input, Dense, Conv2D, MaxPooling2D, UpSampling2D
from keras.models import Model
from keras import backend as K

input_img = Input(shape=(28, 28, 1))  # adapt this if using `channels_first` image data format

x = Conv2D(16, (3, 3), activation='relu', padding='same')(input_img)
x = MaxPooling2D((2, 2), padding='same')(x)
x = Conv2D(8, (3, 3), activation='relu', padding='same')(x)
x = MaxPooling2D((2, 2), padding='same')(x)
x = Conv2D(8, (3, 3), activation='relu', padding='same')(x)
encoded = MaxPooling2D((2, 2), padding='same')(x)

# at this point the representation is (4, 4, 8) i.e. 128-dimensional

x = Conv2D(8, (3, 3), activation='relu', padding='same')(encoded)
x = UpSampling2D((2, 2))(x)
x = Conv2D(8, (3, 3), activation='relu', padding='same')(x)
x = UpSampling2D((2, 2))(x)
x = Conv2D(16, (3, 3), activation='relu')(x)
x = UpSampling2D((2, 2))(x)
decoded = Conv2D(1, (3, 3), activation='sigmoid', padding='same')(x)

autoencoder = Model(input_img, decoded)
autoencoder.compile(optimizer='adadelta', loss='binary_crossentropy')
autoencoder.summary()
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
input_6 (InputLayer)         (None, 28, 28, 1)         0         
_________________________________________________________________
conv2d_1 (Conv2D)            (None, 28, 28, 16)        160       
_________________________________________________________________
max_pooling2d_1 (MaxPooling2 (None, 14, 14, 16)        0         
_________________________________________________________________
conv2d_2 (Conv2D)            (None, 14, 14, 8)         1160      
_________________________________________________________________
max_pooling2d_2 (MaxPooling2 (None, 7, 7, 8)           0         
_________________________________________________________________
conv2d_3 (Conv2D)            (None, 7, 7, 8)           584       
_________________________________________________________________
max_pooling2d_3 (MaxPooling2 (None, 4, 4, 8)           0         
_________________________________________________________________
conv2d_4 (Conv2D)            (None, 4, 4, 8)           584       
_________________________________________________________________
up_sampling2d_1 (UpSampling2 (None, 8, 8, 8)           0         
_________________________________________________________________
conv2d_5 (Conv2D)            (None, 8, 8, 8)           584       
_________________________________________________________________
up_sampling2d_2 (UpSampling2 (None, 16, 16, 8)         0         
_________________________________________________________________
conv2d_6 (Conv2D)            (None, 14, 14, 16)        1168      
_________________________________________________________________
up_sampling2d_3 (UpSampling2 (None, 28, 28, 16)        0         
_________________________________________________________________
conv2d_7 (Conv2D)            (None, 28, 28, 1)         145       
=================================================================
Total params: 4,385
Trainable params: 4,385
Non-trainable params: 0
_________________________________________________________________
In [46]:
(x_train, _), (x_test, _) = mnist.load_data()
x_train = x_train.astype('float32') / 255.
x_test = x_test.astype('float32') / 255.
x_train = np.reshape(x_train, (len(x_train), 28, 28, 1))  
x_test = np.reshape(x_test, (len(x_test), 28, 28, 1))  
In [47]:
# r-tensorflow: tensorboard --logdir=/tmp/autoencoder

from keras.callbacks import TensorBoard
autoencoder.fit(x_train, x_train,
                epochs=50,
                batch_size=128,
                shuffle=True,
                validation_data=(x_test, x_test),
                callbacks=[TensorBoard(log_dir='/tmp/autoencoder')])
Train on 60000 samples, validate on 10000 samples
Epoch 1/50
60000/60000 [==============================] - 29s - loss: 0.2223 - val_loss: 0.1730
Epoch 2/50
60000/60000 [==============================] - 25s - loss: 0.1623 - val_loss: 0.1510
Epoch 3/50
60000/60000 [==============================] - 24s - loss: 0.1479 - val_loss: 0.1420
Epoch 4/50
60000/60000 [==============================] - 24s - loss: 0.1393 - val_loss: 0.1351
Epoch 5/50
60000/60000 [==============================] - 24s - loss: 0.1336 - val_loss: 0.1311
Epoch 6/50
60000/60000 [==============================] - 23s - loss: 0.1294 - val_loss: 0.1273
Epoch 7/50
60000/60000 [==============================] - 23s - loss: 0.1263 - val_loss: 0.1226
Epoch 8/50
60000/60000 [==============================] - 23s - loss: 0.1234 - val_loss: 0.1209
Epoch 9/50
60000/60000 [==============================] - 23s - loss: 0.1209 - val_loss: 0.1183
Epoch 10/50
60000/60000 [==============================] - 23s - loss: 0.1191 - val_loss: 0.1174
Epoch 11/50
60000/60000 [==============================] - 23s - loss: 0.1170 - val_loss: 0.1139
Epoch 12/50
60000/60000 [==============================] - 23s - loss: 0.1158 - val_loss: 0.1150
Epoch 13/50
60000/60000 [==============================] - 23s - loss: 0.1149 - val_loss: 0.1112
Epoch 14/50
60000/60000 [==============================] - 23s - loss: 0.1138 - val_loss: 0.1150
Epoch 15/50
60000/60000 [==============================] - 23s - loss: 0.1126 - val_loss: 0.1103
Epoch 16/50
60000/60000 [==============================] - 23s - loss: 0.1119 - val_loss: 0.1107
Epoch 17/50
60000/60000 [==============================] - 23s - loss: 0.1110 - val_loss: 0.1079
Epoch 18/50
60000/60000 [==============================] - 23s - loss: 0.1102 - val_loss: 0.1079
Epoch 19/50
60000/60000 [==============================] - 23s - loss: 0.1096 - val_loss: 0.1070s: 0.
Epoch 20/50
60000/60000 [==============================] - 23s - loss: 0.1089 - val_loss: 0.1071
Epoch 21/50
60000/60000 [==============================] - 23s - loss: 0.1086 - val_loss: 0.1081
Epoch 22/50
60000/60000 [==============================] - 23s - loss: 0.1078 - val_loss: 0.1056
Epoch 23/50
60000/60000 [==============================] - 23s - loss: 0.1073 - val_loss: 0.1054
Epoch 24/50
60000/60000 [==============================] - 23s - loss: 0.1070 - val_loss: 0.1065s: 0
Epoch 25/50
60000/60000 [==============================] - 23s - loss: 0.1063 - val_loss: 0.1041
Epoch 26/50
60000/60000 [==============================] - 23s - loss: 0.1059 - val_loss: 0.1041
Epoch 27/50
60000/60000 [==============================] - 23s - loss: 0.1054 - val_loss: 0.1040
Epoch 28/50
60000/60000 [==============================] - 23s - loss: 0.1051 - val_loss: 0.1041
Epoch 29/50
60000/60000 [==============================] - 24s - loss: 0.1047 - val_loss: 0.1039
Epoch 30/50
60000/60000 [==============================] - 23s - loss: 0.1042 - val_loss: 0.1014
Epoch 31/50
60000/60000 [==============================] - 23s - loss: 0.1039 - val_loss: 0.1047
Epoch 32/50
60000/60000 [==============================] - 23s - loss: 0.1035 - val_loss: 0.1013
Epoch 33/50
60000/60000 [==============================] - 23s - loss: 0.1036 - val_loss: 0.1011
Epoch 34/50
60000/60000 [==============================] - 23s - loss: 0.1029 - val_loss: 0.1008
Epoch 35/50
60000/60000 [==============================] - 23s - loss: 0.1026 - val_loss: 0.1024
Epoch 36/50
60000/60000 [==============================] - 23s - loss: 0.1025 - val_loss: 0.1003
Epoch 37/50
60000/60000 [==============================] - 23s - loss: 0.1023 - val_loss: 0.1030
Epoch 38/50
60000/60000 [==============================] - 23s - loss: 0.1017 - val_loss: 0.1020
Epoch 39/50
60000/60000 [==============================] - 23s - loss: 0.1016 - val_loss: 0.1012
Epoch 40/50
60000/60000 [==============================] - 23s - loss: 0.1009 - val_loss: 0.0998
Epoch 41/50
60000/60000 [==============================] - 23s - loss: 0.1007 - val_loss: 0.0991
Epoch 42/50
60000/60000 [==============================] - 23s - loss: 0.1006 - val_loss: 0.0989
Epoch 43/50
60000/60000 [==============================] - 23s - loss: 0.1004 - val_loss: 0.0983s: 
Epoch 44/50
60000/60000 [==============================] - 23s - loss: 0.1001 - val_loss: 0.0983
Epoch 45/50
60000/60000 [==============================] - 23s - loss: 0.1000 - val_loss: 0.0988
Epoch 46/50
60000/60000 [==============================] - 24s - loss: 0.0996 - val_loss: 0.0980
Epoch 47/50
60000/60000 [==============================] - 24s - loss: 0.0995 - val_loss: 0.0986
Epoch 48/50
60000/60000 [==============================] - 24s - loss: 0.0992 - val_loss: 0.0987
Epoch 49/50
60000/60000 [==============================] - 24s - loss: 0.0992 - val_loss: 0.0982
Epoch 50/50
60000/60000 [==============================] - 24s - loss: 0.0989 - val_loss: 0.0967
Out[47]:
<keras.callbacks.History at 0x2e19dc1de80>
In [50]:
# encode and decode some digits
# note that we take them from the *test* set
# encoded_imgs = encoder.predict(x_test)
# decoded_imgs = decoder.predict(encoded_imgs)
# print(encoded_imgs.shape, decoded_imgs.shape)

# score
score = autoencoder.evaluate(x_test, x_test)
print("\n", score)
 9728/10000 [============================>.] - ETA: 0s
 0.096730926919
In [ ]:
 

4. Denoising

In [52]:
from keras.datasets import mnist
import numpy as np

(x_train, _), (x_test, _) = mnist.load_data()

x_train = x_train.astype('float32') / 255.
x_test = x_test.astype('float32') / 255.
x_train = np.reshape(x_train, (len(x_train), 28, 28, 1))  # adapt this if using `channels_first` image data format
x_test = np.reshape(x_test, (len(x_test), 28, 28, 1))  # adapt this if using `channels_first` image data format

noise_factor = 0.5
x_train_noisy = x_train + noise_factor * np.random.normal(loc=0.0, scale=1.0, size=x_train.shape) 
x_test_noisy = x_test + noise_factor * np.random.normal(loc=0.0, scale=1.0, size=x_test.shape) 

x_train_noisy = np.clip(x_train_noisy, 0., 1.)
x_test_noisy = np.clip(x_test_noisy, 0., 1.)
In [55]:
n = 10
plt.figure(figsize=(20, 2))
for i in range(n):
    ax = plt.subplot(1, n, i+1)
    plt.imshow(x_test_noisy[i].reshape(28, 28))
    plt.gray()
    ax.get_xaxis().set_visible(False)
    ax.get_yaxis().set_visible(False)
plt.show()
<matplotlib.figure.Figure at 0x2e12cdfb518>
<matplotlib.figure.Figure at 0x2e12eea6898>
In [56]:
input_img = Input(shape=(28, 28, 1))  # adapt this if using `channels_first` image data format

x = Conv2D(32, (3, 3), activation='relu', padding='same')(input_img)
x = MaxPooling2D((2, 2), padding='same')(x)
x = Conv2D(32, (3, 3), activation='relu', padding='same')(x)
encoded = MaxPooling2D((2, 2), padding='same')(x)

# at this point the representation is (7, 7, 32)

x = Conv2D(32, (3, 3), activation='relu', padding='same')(encoded)
x = UpSampling2D((2, 2))(x)
x = Conv2D(32, (3, 3), activation='relu', padding='same')(x)
x = UpSampling2D((2, 2))(x)
decoded = Conv2D(1, (3, 3), activation='sigmoid', padding='same')(x)

autoencoder = Model(input_img, decoded)
autoencoder.compile(optimizer='adadelta', loss='binary_crossentropy')
autoencoder.summary()
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
input_7 (InputLayer)         (None, 28, 28, 1)         0         
_________________________________________________________________
conv2d_8 (Conv2D)            (None, 28, 28, 32)        320       
_________________________________________________________________
max_pooling2d_4 (MaxPooling2 (None, 14, 14, 32)        0         
_________________________________________________________________
conv2d_9 (Conv2D)            (None, 14, 14, 32)        9248      
_________________________________________________________________
max_pooling2d_5 (MaxPooling2 (None, 7, 7, 32)          0         
_________________________________________________________________
conv2d_10 (Conv2D)           (None, 7, 7, 32)          9248      
_________________________________________________________________
up_sampling2d_4 (UpSampling2 (None, 14, 14, 32)        0         
_________________________________________________________________
conv2d_11 (Conv2D)           (None, 14, 14, 32)        9248      
_________________________________________________________________
up_sampling2d_5 (UpSampling2 (None, 28, 28, 32)        0         
_________________________________________________________________
conv2d_12 (Conv2D)           (None, 28, 28, 1)         289       
=================================================================
Total params: 28,353
Trainable params: 28,353
Non-trainable params: 0
_________________________________________________________________
In [57]:
autoencoder.fit(x_train_noisy, x_train,
                epochs=100,
                batch_size=128,
                shuffle=True,
                validation_data=(x_test_noisy, x_test),
                callbacks=[TensorBoard(log_dir='/tmp/tb', histogram_freq=0, write_graph=False)])
Train on 60000 samples, validate on 10000 samples
Epoch 1/100
60000/60000 [==============================] - 35s - loss: 0.1890 - val_loss: 0.1292
Epoch 2/100
60000/60000 [==============================] - 35s - loss: 0.1240 - val_loss: 0.1187
Epoch 3/100
60000/60000 [==============================] - 35s - loss: 0.1160 - val_loss: 0.1118
Epoch 4/100
60000/60000 [==============================] - 35s - loss: 0.1115 - val_loss: 0.1088
Epoch 5/100
60000/60000 [==============================] - 35s - loss: 0.1084 - val_loss: 0.1048
Epoch 6/100
60000/60000 [==============================] - 35s - loss: 0.1065 - val_loss: 0.1058
Epoch 7/100
60000/60000 [==============================] - 35s - loss: 0.1050 - val_loss: 0.1040
Epoch 8/100
60000/60000 [==============================] - 35s - loss: 0.1037 - val_loss: 0.1018
Epoch 9/100
60000/60000 [==============================] - 36s - loss: 0.1029 - val_loss: 0.1013
Epoch 10/100
60000/60000 [==============================] - 36s - loss: 0.1023 - val_loss: 0.1015
Epoch 11/100
60000/60000 [==============================] - 36s - loss: 0.1019 - val_loss: 0.0999
Epoch 12/100
60000/60000 [==============================] - 36s - loss: 0.1013 - val_loss: 0.0995
Epoch 13/100
60000/60000 [==============================] - 36s - loss: 0.1010 - val_loss: 0.0990
Epoch 14/100
60000/60000 [==============================] - 36s - loss: 0.1006 - val_loss: 0.0994
Epoch 15/100
60000/60000 [==============================] - 36s - loss: 0.1002 - val_loss: 0.0995
Epoch 16/100
60000/60000 [==============================] - 36s - loss: 0.0999 - val_loss: 0.0987
Epoch 17/100
60000/60000 [==============================] - 35s - loss: 0.0996 - val_loss: 0.0986
Epoch 18/100
60000/60000 [==============================] - 35s - loss: 0.0995 - val_loss: 0.0996
Epoch 19/100
60000/60000 [==============================] - 35s - loss: 0.0991 - val_loss: 0.0977
Epoch 20/100
60000/60000 [==============================] - 35s - loss: 0.0990 - val_loss: 0.0976
Epoch 21/100
60000/60000 [==============================] - 35s - loss: 0.0988 - val_loss: 0.0978
Epoch 22/100
60000/60000 [==============================] - 35s - loss: 0.0986 - val_loss: 0.0972
Epoch 23/100
60000/60000 [==============================] - 35s - loss: 0.0984 - val_loss: 0.0970
Epoch 24/100
60000/60000 [==============================] - 35s - loss: 0.0982 - val_loss: 0.0977
Epoch 25/100
60000/60000 [==============================] - 35s - loss: 0.0981 - val_loss: 0.0971
Epoch 26/100
60000/60000 [==============================] - 35s - loss: 0.0980 - val_loss: 0.0977
Epoch 27/100
60000/60000 [==============================] - 35s - loss: 0.0978 - val_loss: 0.0973
Epoch 28/100
60000/60000 [==============================] - 35s - loss: 0.0977 - val_loss: 0.0974
Epoch 29/100
60000/60000 [==============================] - 35s - loss: 0.0976 - val_loss: 0.0970
Epoch 30/100
60000/60000 [==============================] - 35s - loss: 0.0975 - val_loss: 0.0963
Epoch 31/100
60000/60000 [==============================] - 35s - loss: 0.0973 - val_loss: 0.0961
Epoch 32/100
60000/60000 [==============================] - 35s - loss: 0.0971 - val_loss: 0.0959
Epoch 33/100
60000/60000 [==============================] - 35s - loss: 0.0972 - val_loss: 0.0966
Epoch 34/100
60000/60000 [==============================] - 35s - loss: 0.0971 - val_loss: 0.0962
Epoch 35/100
60000/60000 [==============================] - 35s - loss: 0.0970 - val_loss: 0.0961
Epoch 36/100
60000/60000 [==============================] - 35s - loss: 0.0969 - val_loss: 0.0957
Epoch 37/100
60000/60000 [==============================] - 35s - loss: 0.0968 - val_loss: 0.0961
Epoch 38/100
60000/60000 [==============================] - 35s - loss: 0.0967 - val_loss: 0.0954
Epoch 39/100
60000/60000 [==============================] - 35s - loss: 0.0966 - val_loss: 0.0958
Epoch 40/100
60000/60000 [==============================] - 35s - loss: 0.0965 - val_loss: 0.0959
Epoch 41/100
60000/60000 [==============================] - 35s - loss: 0.0965 - val_loss: 0.0953
Epoch 42/100
60000/60000 [==============================] - 35s - loss: 0.0964 - val_loss: 0.0960
Epoch 43/100
60000/60000 [==============================] - 35s - loss: 0.0963 - val_loss: 0.0953
Epoch 44/100
60000/60000 [==============================] - 35s - loss: 0.0963 - val_loss: 0.0954
Epoch 45/100
60000/60000 [==============================] - 35s - loss: 0.0962 - val_loss: 0.0960
Epoch 46/100
60000/60000 [==============================] - 35s - loss: 0.0962 - val_loss: 0.0950
Epoch 47/100
60000/60000 [==============================] - 35s - loss: 0.0961 - val_loss: 0.0951
Epoch 48/100
60000/60000 [==============================] - 35s - loss: 0.0960 - val_loss: 0.0949
Epoch 49/100
60000/60000 [==============================] - 35s - loss: 0.0960 - val_loss: 0.0949
Epoch 50/100
60000/60000 [==============================] - 36s - loss: 0.0959 - val_loss: 0.0955
Epoch 51/100
60000/60000 [==============================] - 36s - loss: 0.0959 - val_loss: 0.0950
Epoch 52/100
60000/60000 [==============================] - 36s - loss: 0.0958 - val_loss: 0.0949
Epoch 53/100
60000/60000 [==============================] - 36s - loss: 0.0958 - val_loss: 0.0947
Epoch 54/100
60000/60000 [==============================] - 37s - loss: 0.0958 - val_loss: 0.0947
Epoch 55/100
60000/60000 [==============================] - 37s - loss: 0.0958 - val_loss: 0.0946
Epoch 56/100
60000/60000 [==============================] - 37s - loss: 0.0956 - val_loss: 0.0948
Epoch 57/100
60000/60000 [==============================] - 37s - loss: 0.0956 - val_loss: 0.0958
Epoch 58/100
60000/60000 [==============================] - 37s - loss: 0.0956 - val_loss: 0.0947
Epoch 59/100
60000/60000 [==============================] - 37s - loss: 0.0956 - val_loss: 0.0946
Epoch 60/100
60000/60000 [==============================] - 37s - loss: 0.0955 - val_loss: 0.0945
Epoch 61/100
60000/60000 [==============================] - 37s - loss: 0.0955 - val_loss: 0.0946
Epoch 62/100
60000/60000 [==============================] - 37s - loss: 0.0954 - val_loss: 0.0945
Epoch 63/100
60000/60000 [==============================] - 37s - loss: 0.0954 - val_loss: 0.0944
Epoch 64/100
60000/60000 [==============================] - 37s - loss: 0.0953 - val_loss: 0.0949
Epoch 65/100
60000/60000 [==============================] - 37s - loss: 0.0953 - val_loss: 0.0944
Epoch 66/100
60000/60000 [==============================] - 37s - loss: 0.0953 - val_loss: 0.0944
Epoch 67/100
60000/60000 [==============================] - 37s - loss: 0.0953 - val_loss: 0.0943
Epoch 68/100
60000/60000 [==============================] - 37s - loss: 0.0952 - val_loss: 0.0945
Epoch 69/100
60000/60000 [==============================] - 37s - loss: 0.0952 - val_loss: 0.0944
Epoch 70/100
60000/60000 [==============================] - 37s - loss: 0.0952 - val_loss: 0.0943
Epoch 71/100
60000/60000 [==============================] - 37s - loss: 0.0952 - val_loss: 0.0963
Epoch 72/100
60000/60000 [==============================] - 37s - loss: 0.0950 - val_loss: 0.0943
Epoch 73/100
60000/60000 [==============================] - 37s - loss: 0.0951 - val_loss: 0.0943
Epoch 74/100
60000/60000 [==============================] - 37s - loss: 0.0950 - val_loss: 0.0944
Epoch 75/100
60000/60000 [==============================] - 37s - loss: 0.0950 - val_loss: 0.0942
Epoch 76/100
60000/60000 [==============================] - 37s - loss: 0.0950 - val_loss: 0.0941
Epoch 77/100
60000/60000 [==============================] - 36s - loss: 0.0950 - val_loss: 0.0942
Epoch 78/100
60000/60000 [==============================] - 36s - loss: 0.0949 - val_loss: 0.0940
Epoch 79/100
60000/60000 [==============================] - 37s - loss: 0.0949 - val_loss: 0.0940
Epoch 80/100
60000/60000 [==============================] - 37s - loss: 0.0949 - val_loss: 0.0943
Epoch 81/100
60000/60000 [==============================] - 37s - loss: 0.0948 - val_loss: 0.0944
Epoch 82/100
60000/60000 [==============================] - 37s - loss: 0.0948 - val_loss: 0.0941
Epoch 83/100
60000/60000 [==============================] - 37s - loss: 0.0948 - val_loss: 0.0940
Epoch 84/100
60000/60000 [==============================] - 37s - loss: 0.0948 - val_loss: 0.0941
Epoch 85/100
60000/60000 [==============================] - 37s - loss: 0.0948 - val_loss: 0.0939
Epoch 86/100
60000/60000 [==============================] - 37s - loss: 0.0948 - val_loss: 0.0938
Epoch 87/100
60000/60000 [==============================] - 36s - loss: 0.0947 - val_loss: 0.0950
Epoch 88/100
60000/60000 [==============================] - 36s - loss: 0.0947 - val_loss: 0.0950
Epoch 89/100
60000/60000 [==============================] - 36s - loss: 0.0947 - val_loss: 0.0940
Epoch 90/100
60000/60000 [==============================] - 36s - loss: 0.0947 - val_loss: 0.0939
Epoch 91/100
60000/60000 [==============================] - 36s - loss: 0.0947 - val_loss: 0.0945
Epoch 92/100
60000/60000 [==============================] - 36s - loss: 0.0946 - val_loss: 0.0939
Epoch 93/100
60000/60000 [==============================] - 36s - loss: 0.0946 - val_loss: 0.0939
Epoch 94/100
60000/60000 [==============================] - 36s - loss: 0.0946 - val_loss: 0.0939
Epoch 95/100
60000/60000 [==============================] - 35s - loss: 0.0946 - val_loss: 0.0944
Epoch 96/100
60000/60000 [==============================] - 35s - loss: 0.0946 - val_loss: 0.0942
Epoch 97/100
60000/60000 [==============================] - 35s - loss: 0.0945 - val_loss: 0.0940
Epoch 98/100
60000/60000 [==============================] - 35s - loss: 0.0945 - val_loss: 0.0943
Epoch 99/100
60000/60000 [==============================] - 35s - loss: 0.0945 - val_loss: 0.0938
Epoch 100/100
60000/60000 [==============================] - 35s - loss: 0.0945 - val_loss: 0.0941
Out[57]:
<keras.callbacks.History at 0x2e197f94668>
In [59]:
# score
score = autoencoder.evaluate(x_test, x_test)
print("\n", score)
 9952/10000 [============================>.] - ETA: 0s
 0.0745966090679
In [62]:
n = 10
plt.figure(figsize=(20, 4))
for i in range(n):
    ax = plt.subplot(2, n, i + 1)
    plt.imshow(x_test_noisy[i].reshape(28, 28))
    plt.gray()
    ax.get_xaxis().set_visible(False)
    ax.get_yaxis().set_visible(False)

    ax = plt.subplot(2, n, i+1+n)
    plt.imshow(x_test[i].reshape(28, 28))
    plt.gray()
    ax.get_xaxis().set_visible(False)
    ax.get_yaxis().set_visible(False)
    
plt.show()
In [ ]:
 
In [ ]:
 
In [ ]:
 

5. Sequence to Sequence Autoencoder

In [ ]:
# sample code ... no run
from keras.layers import Input, LSTM, RepeatVector
from keras.models import Model

inputs = Input(shape=(timesteps, input_dim))
encoded = LSTM(latent_dim)(inputs)

decoded = RepeatVector(timesteps)(encoded)
decoded = LSTM(input_dim, return_sequences=True)(decoded)

sequence_autoencoder = Model(inputs, decoded)
encoder = Model(inputs, encoded)
In [ ]:
 

6. Variational Autoencoder (VAE)

In [ ]:
 
In [ ]:
 
In [ ]:
 
In [ ]:
 
In [ ]:
 
In [ ]:
 
In [ ]:
 
In [ ]:
 
In [ ]:
 
In [ ]:
 
In [ ]:
 
In [ ]:
 
In [ ]:
 
In [ ]:
 
In [ ]:
 
In [ ]: