Skip to content
Snippets Groups Projects
Commit 7e7d206f authored by Saravanan G's avatar Saravanan G
Browse files

Upload New File

parent 3a56a457
No related branches found
No related tags found
No related merge requests found
%% Cell type:code id: tags:
```
from keras.datasets import cifar100
(x_train,_), (x_test,_) = cifar100.load_data()
```
%% Output
Using TensorFlow backend.
%% Cell type:code id: tags:
```
import cv2
import numpy as np
```
%% Cell type:code id: tags:
```
x_traing=[]
x_testg=[]
for i in range(50000):
x_traing.append(cv2.cvtColor(x_train[i], cv2.COLOR_BGR2GRAY))
for i in range(10000):
x_testg.append(cv2.cvtColor(x_test[i], cv2.COLOR_BGR2GRAY))
```
%% Cell type:code id: tags:
```
x_traing=np.array(x_traing)
x_testg=np.array(x_testg)
x_traing=x_traing.reshape(50000,32,32,1)
x_testg=x_testg.reshape(10000,32,32,1)
```
%% Cell type:code id: tags:
```
x_traing = x_traing/255.0
x_testg = x_testg/255.0
x_train = x_train/255.0
x_test = x_test/255.0
x_testg.shape
```
%% Output
(10000, 32, 32, 1)
%% Cell type:code id: tags:
```
from keras.layers import Input, Dense, Conv2D, MaxPooling2D, UpSampling2D
from keras.models import Model
from keras import backend as K
input_img = Input(shape=(32,32,1))
x = Conv2D(32, (3, 3), activation='relu', padding='same')(input_img)
x = MaxPooling2D((2, 2), padding='same')(x)
x= Conv2D(64, (3, 3), activation='relu', padding='same')(x)
x= Conv2D(64, (3, 3), activation='relu', padding='same')(x)
x= MaxPooling2D((2, 2), padding='same')(x) # 8x8x128
encoded = Conv2D(128, (3, 3), activation='relu', padding='same')(x)
x = UpSampling2D((2, 2))(encoded)
x = Conv2D(128, (3, 3), activation='relu', padding='same')(x)
x = Conv2D(128, (3, 3), activation='relu', padding='same')(x)
x = UpSampling2D((2, 2))(x)
x= Conv2D(64, (3, 3), activation='relu', padding='same')(x)
decoded = Conv2D(3, (3, 3), activation='linear', padding='same')(x)
autoencoder = Model(input_img, decoded)
autoencoder.compile(optimizer='adam', loss='mean_squared_error',metrics=['accuracy'])
```
%% Cell type:code id: tags:
```
autoencoder.summary()
```
%% Output
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
input_6 (InputLayer) (None, 32, 32, 1) 0
_________________________________________________________________
conv2d_35 (Conv2D) (None, 32, 32, 32) 320
_________________________________________________________________
max_pooling2d_11 (MaxPooling (None, 16, 16, 32) 0
_________________________________________________________________
conv2d_36 (Conv2D) (None, 16, 16, 64) 18496
_________________________________________________________________
conv2d_37 (Conv2D) (None, 16, 16, 64) 36928
_________________________________________________________________
max_pooling2d_12 (MaxPooling (None, 8, 8, 64) 0
_________________________________________________________________
conv2d_38 (Conv2D) (None, 8, 8, 128) 73856
_________________________________________________________________
up_sampling2d_11 (UpSampling (None, 16, 16, 128) 0
_________________________________________________________________
conv2d_39 (Conv2D) (None, 16, 16, 128) 147584
_________________________________________________________________
conv2d_40 (Conv2D) (None, 16, 16, 128) 147584
_________________________________________________________________
up_sampling2d_12 (UpSampling (None, 32, 32, 128) 0
_________________________________________________________________
conv2d_41 (Conv2D) (None, 32, 32, 64) 73792
_________________________________________________________________
conv2d_42 (Conv2D) (None, 32, 32, 3) 1731
=================================================================
Total params: 500,291
Trainable params: 500,291
Non-trainable params: 0
_________________________________________________________________
%% Cell type:code id: tags:
```
autoencoder.fit(x_traing,x_train,epochs=5,batch_size=256)
```
%% Output
Epoch 1/5
50000/50000 [==============================] - 27s 549us/step - loss: 0.0095 - acc: 0.5086
Epoch 2/5
50000/50000 [==============================] - 27s 548us/step - loss: 0.0094 - acc: 0.5101
Epoch 3/5
50000/50000 [==============================] - 27s 549us/step - loss: 0.0094 - acc: 0.5113
Epoch 4/5
50000/50000 [==============================] - 27s 549us/step - loss: 0.0093 - acc: 0.5142
Epoch 5/5
50000/50000 [==============================] - 27s 548us/step - loss: 0.0093 - acc: 0.5141
<keras.callbacks.History at 0x7f7798d939e8>
%% Cell type:code id: tags:
```
import matplotlib.pyplot as plt #test image in black and white
%matplotlib inline
t_imgg=x_traing[50]
t_imgg = t_imgg.reshape(32,32)
plt.subplot(2,1,1)
plt.imshow(t_imgg,cmap='gray')
t_colorimg = x_train[50]
t_colorimg = t_colorimg.reshape(32,32,3)
plt.subplot(2,1,2)
plt.imshow(t_colorimg)
```
%% Output
<matplotlib.image.AxesImage at 0x7f77988ab358>
%% Cell type:code id: tags:
```
t_imgg = t_imgg.reshape(1,32,32,1)
result = autoencoder.predict(t_imgg)
result = result[0]
# result
plt.figure(figsize=(2,2))
plt.imshow(result)
```
%% Output
Clipping input data to the valid range for imshow with RGB data ([0..1] for floats or [0..255] for integers).
<matplotlib.image.AxesImage at 0x7f77987bc0b8>
%% Cell type:code id: tags:
```
```
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment