Skip to content
Snippets Groups Projects
Commit 63aae23e authored by Indrakanti Aishwarya's avatar Indrakanti Aishwarya
Browse files

Upload New File

parent 3430e876
No related branches found
No related tags found
No related merge requests found
%% Cell type:code id: tags:
``` python
#1#Header
import csv
import numpy as np
import os
from os import urandom
from keras.models import model_from_json
```
%% Cell type:code id: tags:
``` python
#2#Defining Global Variables
num_rounds = 10
m = 0
o = 0
```
%% Cell type:code id: tags:
``` python
#3#Defining WORDSIZE
def WORD_SIZE():
return(16);
```
%% Cell type:code id: tags:
``` python
#4#Defining S-Box
s_box_mapping_np = np.array([0, 4, 1, 5, 2, 6, 3, 7, 8, 12, 9, 13, 10, 14, 11, 15], dtype=np.uint8)
def s_box(input_bits):
input_bits_int = int(input_bits)
output_bits_int = s_box_mapping_np[input_bits_int]
return output_bits_int
```
%% Cell type:code id: tags:
``` python
#5#Defining P-Box
def decimal_to_binary_list(value, num_bits=4):
return np.array([int(x) for x in format(value, f'0{num_bits}b')], dtype=np.uint8)
def p_box(c_decimal, d_decimal):
c = decimal_to_binary_list(c_decimal)
d = decimal_to_binary_list(d_decimal)
e = np.zeros(8, dtype=np.uint8)
e[0] = c[0]
e[1] = d[0]
e[2] = c[3]
e[3] = d[3]
e[4] = c[1]
e[5] = d[1]
e[6] = c[2]
e[7] = d[2]
return e
```
%% Cell type:code id: tags:
``` python
#6#Defining L-Box
def l_box(f, g):
if len(f) != 8 or len(g) != 8:
raise ValueError("Both input arrays f and g should have exactly 8 elements")
h = np.zeros(16, dtype=np.uint8)
h[0] = f[0]
h[1] = g[0]
h[2] = f[7]
h[3] = g[7]
h[4] = f[1]
h[5] = g[1]
h[6] = f[6]
h[7] = g[6]
h[8] = f[2]
h[9] = g[2]
h[10] = f[5]
h[11] = g[5]
h[12] = f[3]
h[13] = g[3]
h[14] = f[4]
h[15] = g[4]
#print(h)
return h
```
%% Cell type:code id: tags:
``` python
#7#Defining F-function for Right Side of Plaintext
def binary_array_to_integer(output):
int_output = ''.join(map(str, output))
return int(int_output, 2)
def f_function(x, key, d):
q=0
global m
if isinstance(x, int):
x = [x]
input_parts = np.zeros((len(x), 4), dtype=np.uint16)
for i, val in enumerate(x):
input_parts[i] = np.array([val >> 12, (val >> 8) & 0xF, (val >> 4) & 0xF, val & 0xF])
s_box_outputs = np.array([[s_box(element) for element in part] for part in input_parts])
p_box_outputs = np.zeros((len(x), 2, 8), dtype=np.uint8)
for i in range(len(x)):
p_box_outputs[i] = np.array([p_box(s_box_outputs[i][0], s_box_outputs[i][1]), p_box(s_box_outputs[i][2], s_box_outputs[i][3])])
final_outputs = np.zeros(len(x), dtype=np.uint32)
for i in range(len(x)):
final_output = np.array(l_box(p_box_outputs[i][0], p_box_outputs[i][1]))
k = key[q][(m+1) % 4]
output = final_output ^ k
output = binary_array_to_integer(output)
final_outputs[i] = output
q +=1
if (m < 2):
m +=2
else:
m = 0
return final_outputs
```
%% Cell type:code id: tags:
``` python
#8#Defining F-function for Left Side of Plaintext
def binary_array_to_integer(output):
int_output = ''.join(map(str, output))
return int(int_output, 2)
def ff_function(x, key, d):
q=0
global o
if isinstance(x, int):
x = [x]
input_parts = np.zeros((len(x), 4), dtype=np.uint16)
for i, val in enumerate(x):
input_parts[i] = np.array([val >> 12, (val >> 8) & 0xF, (val >> 4) & 0xF, val & 0xF])
s_box_outputs = np.array([[s_box(element) for element in part] for part in input_parts])
p_box_outputs = np.zeros((len(x), 2, 8), dtype=np.uint8)
for i in range(len(x)):
p_box_outputs[i] = np.array([p_box(s_box_outputs[i][0], s_box_outputs[i][1]), p_box(s_box_outputs[i][2], s_box_outputs[i][3])])
final_outputs = np.zeros(len(x), dtype=np.uint32)
for i in range(len(x)):
final_output = np.array(l_box(p_box_outputs[i][0], p_box_outputs[i][1]))
k = key[q][o % 4]
output = final_output ^ k
output = binary_array_to_integer(output)
final_outputs[i] = output
q +=1
if (o < 2):
o +=2
else:
o = 0
return final_outputs
```
%% Cell type:code id: tags:
``` python
#9#Convert the ciphertext pairs into Binary array
def convert_to_binary(row):
bin_array = np.zeros(64, dtype=np.uint8)
for i, num in enumerate(row):
binary_str = format(num, '016b')
for j, b in enumerate(binary_str):
bin_array[i * 16 + j] = int(b)
return bin_array
```
%% Cell type:code id: tags:
``` python
#10#Encryption Function
def lcb_encrypt(plaintext, key, rounds, d):
left_plaintext = np.uint16(plaintext[0])
right_plaintext = np.uint16(plaintext[1])
L, R = left_plaintext, right_plaintext
n = 0
while n < rounds:
L, R = f_function(R, key, d), ff_function(L, key, d)
n += 1
return (L, R)
```
%% Cell type:code id: tags:
``` python
#11#Fuction for generation of keys
import random
def generate_hex_keys(num_keys, length=16):
hex_chars = "0123456789ABCDEF"
keys_str = ["".join(random.choices(hex_chars, k=length)) for _ in range(num_keys)]
return keys_str
def to_binary(value, bits):
return format(value, f'0{bits}b')
def generate_round_keys(num_keys):
random_keys_hex = generate_hex_keys(num_keys)
round_keys = []
for random_key_hex in random_keys_hex:
random_key = int(random_key_hex, 16)
K1 = (random_key >> 48) & 0xFFFF
K2 = (random_key >> 32) & 0xFFFF
K3 = (random_key >> 16) & 0xFFFF
K4 = random_key & 0xFFFF
k1_bin = to_binary(K1, 16)
k2_bin = to_binary(K2, 16)
k3_bin = to_binary(K3, 16)
k4_bin = to_binary(K4, 16)
k1_np_array = np.array([int(bit) for bit in k1_bin])
k2_np_array = np.array([int(bit) for bit in k2_bin])
k3_np_array = np.array([int(bit) for bit in k3_bin])
k4_np_array = np.array([int(bit) for bit in k4_bin])
round_key = np.array([k1_np_array, k2_np_array, k3_np_array, k4_np_array])
round_keys.append(round_key)
round_key = np.array(round_keys)
return round_key
```
%% Cell type:code id: tags:
``` python
#12#Make dataset
def make_train_data(n, nr, diff=(0,0x836F)):
Y = np.frombuffer(urandom(n), dtype=np.uint8);
Y = Y & 1;
plaintext = np.frombuffer(urandom(4*n), dtype=np.uint32);
plain0l = np.empty(n, dtype=np.uint16)
plain0r = np.empty(n, dtype=np.uint16)
for i in range(n):
plain0l[i] = (plaintext[i] >> 16) & 0xffff
plain0r[i] = plaintext[i] & 0xffff
plain1l = plain0l ^ diff[0]; plain1r = plain0r ^ diff[1];
num_rand_samples = np.sum(Y==0);
plain1l[Y==0] = np.frombuffer(urandom(2*num_rand_samples),dtype=np.uint16);
plain1r[Y==0] = np.frombuffer(urandom(2*num_rand_samples),dtype=np.uint16);
round_key = generate_round_keys(n)
ctdata0l, ctdata0r = lcb_encrypt((plain0l, plain0r), round_key, nr, n)
ctdata1l, ctdata1r = lcb_encrypt((plain1l, plain1r), round_key, nr, n)
ctdata = np.vstack((ctdata0l, ctdata0r, ctdata1l, ctdata1r)).T
X = np.array([convert_to_binary(row) for row in ctdata])
with open("VDataset_NewP.csv", "w", newline='') as f:
writer = csv.writer(f)
writer.writerow(["plain0l", "plain0r", "plain1l", "plain1r","Y"])
for i in range(n):
writer.writerow([plain0l[i], plain0r[i], plain1l[i], plain1r[i],Y[i]])
with open("VDataset_NewC.csv", "w", newline='') as f:
writer = csv.writer(f)
writer.writerow(["ctdata0l", "ctdata0r", "ctdata1l", "ctdata1r","Y"])
for i in range(n):
writer.writerow([ctdata0l[i], ctdata0r[i], ctdata1l[i], ctdata1r[i],Y[i]])
return(X,Y);
```
%% Cell type:code id: tags:
``` python
make_train_data(10**5, num_rounds)
```
%% Output
(array([[1, 1, 0, ..., 0, 1, 0],
[0, 0, 0, ..., 1, 0, 1],
[1, 1, 1, ..., 1, 1, 0],
...,
[0, 1, 0, ..., 0, 1, 0],
[1, 0, 1, ..., 0, 0, 0],
[1, 0, 0, ..., 1, 0, 0]], dtype=uint8),
array([1, 1, 1, ..., 0, 0, 0], dtype=uint8))
%% Cell type:code id: tags:
``` python
#13#Creation of Model
from pickle import dump
from keras.callbacks import ModelCheckpoint, LearningRateScheduler
from keras.models import Model
from keras.optimizers import Adam
from keras.layers import Dense, Conv1D, Input, Reshape, Permute, Add, Flatten, BatchNormalization, Activation
from keras import backend as K
from keras.regularizers import l2
bs = 5000;
wdir = './freshly_trained_nets/'
def cyclic_lr(num_epochs, high_lr, low_lr):
res = lambda i: low_lr + ((num_epochs-1) - i % num_epochs)/(num_epochs-1) * (high_lr - low_lr);
return(res);
def make_checkpoint(datei):
res = ModelCheckpoint(datei, monitor='val_loss', save_best_only = True);
return(res);
#make residual tower of convolutional blocks
def make_resnet(num_blocks=2, num_filters=32, num_outputs=1, d1=64, d2=64, word_size=16, ks=3,depth=5, reg_param=0.0001, final_activation='sigmoid'):
#Input and preprocessing layers
inp = Input(shape=(num_blocks * word_size * 2,));
rs = Reshape((2 * num_blocks, word_size))(inp);
perm = Permute((2,1))(rs);
#add a single residual layer that will expand the data to num_filters channels
#this is a bit-sliced layer
conv0 = Conv1D(num_filters, kernel_size=1, padding='same', kernel_regularizer=l2(reg_param))(perm);
conv0 = BatchNormalization()(conv0);
conv0 = Activation('relu')(conv0);
#add residual blocks
shortcut = conv0;
for i in range(depth):
conv1 = Conv1D(num_filters, kernel_size=ks, padding='same', kernel_regularizer=l2(reg_param))(shortcut);
conv1 = BatchNormalization()(conv1);
conv1 = Activation('relu')(conv1);
conv2 = Conv1D(num_filters, kernel_size=ks, padding='same',kernel_regularizer=l2(reg_param))(conv1);
conv2 = BatchNormalization()(conv2);
conv2 = Activation('relu')(conv2);
shortcut = Add()([shortcut, conv2]);
#add prediction head
flat1 = Flatten()(shortcut);
dense1 = Dense(d1,kernel_regularizer=l2(reg_param))(flat1);
dense1 = BatchNormalization()(dense1);
dense1 = Activation('relu')(dense1);
dense2 = Dense(d2, kernel_regularizer=l2(reg_param))(dense1);
dense2 = BatchNormalization()(dense2);
dense2 = Activation('relu')(dense2);
out = Dense(num_outputs, activation=final_activation, kernel_regularizer=l2(reg_param))(dense2);
model = Model(inputs=inp, outputs=out);
return(model);
def train_LCB_distinguisher(num_epochs, num_rounds, depth):
#create the network
print(num_rounds)
print(depth)
net = make_resnet(depth=depth, reg_param=10**-5);
net.compile(optimizer='adam',loss='mse',metrics=['acc']);
#generate training and validation data
X, Y = make_train_data(10**6,num_rounds);
X_eval, Y_eval = make_train_data(10**5, num_rounds);
#set up model checkpoint
check = make_checkpoint(wdir+'ghor_Rk_0000_836F_Round_'+str(num_rounds)+'_depth_'+str(depth)+'.h5');
#create learnrate schedule
lr = LearningRateScheduler(cyclic_lr(10,0.002, 0.0001));
#train and evaluate
#print(X_eval)
h = net.fit(X,Y,epochs=num_epochs,batch_size=bs,validation_data=(X_eval, Y_eval), callbacks=[lr,check]);
np.save(wdir+'h'+str(num_rounds)+'r_depth'+str(depth)+'.npy', h.history['val_acc']);
np.save(wdir+'h'+str(num_rounds)+'r_depth'+str(depth)+'.npy', h.history['val_loss']);
dump(h.history,open(wdir+'hist'+str(num_rounds)+'r_depth'+str(depth)+'.p','wb'));
print("Best validation accuracy: ", np.max(h.history['val_acc']));
return(net, h);
```
%% Cell type:code id: tags:
``` python
#14#Training the Model
num_epochs = 200
depth = 10
trained_net, history = train_LCB_distinguisher(num_epochs, num_rounds, depth)
```
%% Output
10
10
Epoch 1/200
200/200 [==============================] - 185s 909ms/step - loss: 0.0114 - acc: 0.9959 - val_loss: 0.0240 - val_acc: 0.9776 - lr: 0.0020
Epoch 2/200
200/200 [==============================] - 182s 909ms/step - loss: 0.0064 - acc: 1.0000 - val_loss: 0.0056 - val_acc: 0.9999 - lr: 0.0018
Epoch 3/200
200/200 [==============================] - 190s 951ms/step - loss: 0.0049 - acc: 1.0000 - val_loss: 0.0044 - val_acc: 1.0000 - lr: 0.0016
Epoch 4/200
200/200 [==============================] - 183s 916ms/step - loss: 0.0038 - acc: 1.0000 - val_loss: 0.0037 - val_acc: 1.0000 - lr: 0.0014
Epoch 5/200
200/200 [==============================] - 183s 914ms/step - loss: 0.0030 - acc: 1.0000 - val_loss: 0.0030 - val_acc: 1.0000 - lr: 0.0012
Epoch 6/200
200/200 [==============================] - 185s 923ms/step - loss: 0.0024 - acc: 1.0000 - val_loss: 0.0023 - val_acc: 1.0000 - lr: 9.4444e-04
Epoch 7/200
200/200 [==============================] - 183s 916ms/step - loss: 0.0020 - acc: 1.0000 - val_loss: 0.0019 - val_acc: 1.0000 - lr: 7.3333e-04
Epoch 8/200
200/200 [==============================] - 180s 902ms/step - loss: 0.0017 - acc: 1.0000 - val_loss: 0.0017 - val_acc: 1.0000 - lr: 5.2222e-04
Epoch 9/200
200/200 [==============================] - 181s 903ms/step - loss: 0.0016 - acc: 1.0000 - val_loss: 0.0015 - val_acc: 1.0000 - lr: 3.1111e-04
Epoch 10/200
200/200 [==============================] - 181s 905ms/step - loss: 0.0015 - acc: 1.0000 - val_loss: 0.0015 - val_acc: 1.0000 - lr: 1.0000e-04
Epoch 11/200
200/200 [==============================] - 181s 904ms/step - loss: 0.0011 - acc: 1.0000 - val_loss: 0.4747 - val_acc: 0.5018 - lr: 0.0020
Epoch 12/200
200/200 [==============================] - 181s 904ms/step - loss: 6.9676e-04 - acc: 1.0000 - val_loss: 0.1912 - val_acc: 0.6449 - lr: 0.0018
Epoch 13/200
200/200 [==============================] - 186s 931ms/step - loss: 5.0213e-04 - acc: 1.0000 - val_loss: 8.2996e-04 - val_acc: 0.9995 - lr: 0.0016
Epoch 14/200
200/200 [==============================] - 181s 908ms/step - loss: 3.9845e-04 - acc: 1.0000 - val_loss: 0.3903 - val_acc: 0.5018 - lr: 0.0014
Epoch 15/200
200/200 [==============================] - 183s 915ms/step - loss: 2.6665e-04 - acc: 1.0000 - val_loss: 0.4915 - val_acc: 0.5018 - lr: 0.0012
Epoch 16/200
200/200 [==============================] - 182s 910ms/step - loss: 2.0211e-04 - acc: 1.0000 - val_loss: 0.4945 - val_acc: 0.5018 - lr: 9.4444e-04
Epoch 17/200
200/200 [==============================] - 185s 926ms/step - loss: 1.6426e-04 - acc: 1.0000 - val_loss: 0.4659 - val_acc: 0.5018 - lr: 7.3333e-04
Epoch 18/200
200/200 [==============================] - 185s 926ms/step - loss: 1.4096e-04 - acc: 1.0000 - val_loss: 0.1450 - val_acc: 0.7232 - lr: 5.2222e-04
Epoch 19/200
200/200 [==============================] - 181s 904ms/step - loss: 1.2714e-04 - acc: 1.0000 - val_loss: 0.0015 - val_acc: 1.0000 - lr: 3.1111e-04
Epoch 20/200
200/200 [==============================] - 182s 908ms/step - loss: 1.2059e-04 - acc: 1.0000 - val_loss: 1.2217e-04 - val_acc: 1.0000 - lr: 1.0000e-04
Epoch 21/200
200/200 [==============================] - 181s 905ms/step - loss: 0.0044 - acc: 0.9980 - val_loss: 0.0059 - val_acc: 0.9974 - lr: 0.0020
Epoch 22/200
200/200 [==============================] - 181s 904ms/step - loss: 0.0037 - acc: 1.0000 - val_loss: 0.0036 - val_acc: 1.0000 - lr: 0.0018
Epoch 23/200
200/200 [==============================] - 181s 904ms/step - loss: 0.0034 - acc: 1.0000 - val_loss: 0.0033 - val_acc: 1.0000 - lr: 0.0016
Epoch 24/200
200/200 [==============================] - 181s 903ms/step - loss: 0.0032 - acc: 1.0000 - val_loss: 0.0031 - val_acc: 1.0000 - lr: 0.0014
Epoch 25/200
200/200 [==============================] - 181s 903ms/step - loss: 0.0031 - acc: 1.0000 - val_loss: 0.0030 - val_acc: 1.0000 - lr: 0.0012
Epoch 26/200
200/200 [==============================] - 182s 908ms/step - loss: 0.0029 - acc: 1.0000 - val_loss: 0.0029 - val_acc: 1.0000 - lr: 9.4444e-04
Epoch 27/200
200/200 [==============================] - 181s 905ms/step - loss: 0.0028 - acc: 1.0000 - val_loss: 0.0028 - val_acc: 1.0000 - lr: 7.3333e-04
Epoch 28/200
200/200 [==============================] - 181s 903ms/step - loss: 0.0027 - acc: 1.0000 - val_loss: 0.0027 - val_acc: 1.0000 - lr: 5.2222e-04
Epoch 29/200
200/200 [==============================] - 180s 902ms/step - loss: 0.0027 - acc: 1.0000 - val_loss: 0.0027 - val_acc: 1.0000 - lr: 3.1111e-04
Epoch 30/200
200/200 [==============================] - 181s 904ms/step - loss: 0.0026 - acc: 1.0000 - val_loss: 0.0026 - val_acc: 1.0000 - lr: 1.0000e-04
Epoch 31/200
200/200 [==============================] - 183s 917ms/step - loss: 0.0025 - acc: 1.0000 - val_loss: 0.0034 - val_acc: 1.0000 - lr: 0.0020
Epoch 32/200
200/200 [==============================] - 181s 905ms/step - loss: 0.0022 - acc: 1.0000 - val_loss: 0.0025 - val_acc: 1.0000 - lr: 0.0018
Epoch 33/200
200/200 [==============================] - 182s 908ms/step - loss: 0.0020 - acc: 1.0000 - val_loss: 0.0019 - val_acc: 1.0000 - lr: 0.0016
Epoch 34/200
200/200 [==============================] - 181s 907ms/step - loss: 0.0018 - acc: 1.0000 - val_loss: 0.0018 - val_acc: 1.0000 - lr: 0.0014
Epoch 35/200
200/200 [==============================] - 181s 907ms/step - loss: 0.0017 - acc: 1.0000 - val_loss: 0.0019 - val_acc: 1.0000 - lr: 0.0012
Epoch 36/200
200/200 [==============================] - 181s 905ms/step - loss: 0.0016 - acc: 1.0000 - val_loss: 0.0015 - val_acc: 1.0000 - lr: 9.4444e-04
Epoch 37/200
200/200 [==============================] - 182s 908ms/step - loss: 0.0015 - acc: 1.0000 - val_loss: 0.0015 - val_acc: 1.0000 - lr: 7.3333e-04
Epoch 38/200
200/200 [==============================] - 182s 909ms/step - loss: 0.0014 - acc: 1.0000 - val_loss: 0.0014 - val_acc: 1.0000 - lr: 5.2222e-04
Epoch 39/200
200/200 [==============================] - 182s 908ms/step - loss: 0.0014 - acc: 1.0000 - val_loss: 0.0014 - val_acc: 1.0000 - lr: 3.1111e-04
Epoch 40/200
200/200 [==============================] - 181s 907ms/step - loss: 0.0013 - acc: 1.0000 - val_loss: 0.0013 - val_acc: 1.0000 - lr: 1.0000e-04
Epoch 41/200
200/200 [==============================] - 182s 909ms/step - loss: 0.0012 - acc: 1.0000 - val_loss: 0.0375 - val_acc: 0.9868 - lr: 0.0020
Epoch 42/200
200/200 [==============================] - 182s 908ms/step - loss: 0.0010 - acc: 1.0000 - val_loss: 0.1601 - val_acc: 0.6503 - lr: 0.0018
Epoch 43/200
200/200 [==============================] - 182s 909ms/step - loss: 8.7272e-04 - acc: 1.0000 - val_loss: 0.0358 - val_acc: 0.9965 - lr: 0.0016
Epoch 44/200
200/200 [==============================] - 199s 996ms/step - loss: 7.5967e-04 - acc: 1.0000 - val_loss: 0.2835 - val_acc: 0.5024 - lr: 0.0014
Epoch 45/200
200/200 [==============================] - 187s 934ms/step - loss: 6.7187e-04 - acc: 1.0000 - val_loss: 0.0131 - val_acc: 1.0000 - lr: 0.0012
Epoch 46/200
200/200 [==============================] - 182s 908ms/step - loss: 6.0448e-04 - acc: 1.0000 - val_loss: 0.0045 - val_acc: 1.0000 - lr: 9.4444e-04
Epoch 47/200
200/200 [==============================] - 187s 938ms/step - loss: 5.5327e-04 - acc: 1.0000 - val_loss: 0.0013 - val_acc: 1.0000 - lr: 7.3333e-04
Epoch 48/200
200/200 [==============================] - 204s 1s/step - loss: 5.1591e-04 - acc: 1.0000 - val_loss: 5.5068e-04 - val_acc: 1.0000 - lr: 5.2222e-04
Epoch 49/200
200/200 [==============================] - 191s 956ms/step - loss: 4.9090e-04 - acc: 1.0000 - val_loss: 5.0161e-04 - val_acc: 1.0000 - lr: 3.1111e-04
Epoch 50/200
200/200 [==============================] - 187s 934ms/step - loss: 4.7809e-04 - acc: 1.0000 - val_loss: 4.7770e-04 - val_acc: 1.0000 - lr: 1.0000e-04
Epoch 51/200
200/200 [==============================] - 181s 904ms/step - loss: 0.0014 - acc: 0.9998 - val_loss: 0.1151 - val_acc: 0.8641 - lr: 0.0020
Epoch 52/200
200/200 [==============================] - 187s 934ms/step - loss: 9.5738e-04 - acc: 1.0000 - val_loss: 0.3482 - val_acc: 0.5020 - lr: 0.0018
Epoch 53/200
200/200 [==============================] - 181s 904ms/step - loss: 6.7179e-04 - acc: 1.0000 - val_loss: 0.4190 - val_acc: 0.5018 - lr: 0.0016
Epoch 54/200
200/200 [==============================] - 181s 904ms/step - loss: 5.1030e-04 - acc: 1.0000 - val_loss: 0.3700 - val_acc: 0.5019 - lr: 0.0014
Epoch 55/200
200/200 [==============================] - 181s 904ms/step - loss: 4.0714e-04 - acc: 1.0000 - val_loss: 0.2277 - val_acc: 0.5630 - lr: 0.0012
Epoch 56/200
200/200 [==============================] - 181s 905ms/step - loss: 3.3943e-04 - acc: 1.0000 - val_loss: 0.0590 - val_acc: 0.9308 - lr: 9.4444e-04
Epoch 57/200
200/200 [==============================] - 181s 904ms/step - loss: 2.9440e-04 - acc: 1.0000 - val_loss: 0.0220 - val_acc: 0.9963 - lr: 7.3333e-04
Epoch 58/200
200/200 [==============================] - 181s 904ms/step - loss: 2.6228e-04 - acc: 1.0000 - val_loss: 4.3736e-04 - val_acc: 1.0000 - lr: 5.2222e-04
Epoch 59/200
200/200 [==============================] - 181s 904ms/step - loss: 2.4230e-04 - acc: 1.0000 - val_loss: 2.6776e-04 - val_acc: 1.0000 - lr: 3.1111e-04
Epoch 60/200
200/200 [==============================] - 181s 904ms/step - loss: 2.3241e-04 - acc: 1.0000 - val_loss: 2.3119e-04 - val_acc: 1.0000 - lr: 1.0000e-04
Epoch 61/200
200/200 [==============================] - 180s 902ms/step - loss: 3.1112e-04 - acc: 1.0000 - val_loss: 8.1905e-04 - val_acc: 0.9997 - lr: 0.0020
Epoch 62/200
200/200 [==============================] - 181s 903ms/step - loss: 3.3318e-04 - acc: 1.0000 - val_loss: 0.4971 - val_acc: 0.5018 - lr: 0.0018
Epoch 63/200
200/200 [==============================] - 183s 914ms/step - loss: 1.6852e-04 - acc: 1.0000 - val_loss: 0.4981 - val_acc: 0.5018 - lr: 0.0016
Epoch 64/200
200/200 [==============================] - 187s 934ms/step - loss: 1.1411e-04 - acc: 1.0000 - val_loss: 0.4975 - val_acc: 0.5018 - lr: 0.0014
Epoch 65/200
200/200 [==============================] - 189s 943ms/step - loss: 8.4985e-05 - acc: 1.0000 - val_loss: 0.4952 - val_acc: 0.5018 - lr: 0.0012
Epoch 66/200
200/200 [==============================] - 181s 904ms/step - loss: 1.0603e-04 - acc: 1.0000 - val_loss: 0.0023 - val_acc: 0.9974 - lr: 9.4444e-04
Epoch 67/200
200/200 [==============================] - 181s 905ms/step - loss: 1.2690e-04 - acc: 1.0000 - val_loss: 0.0074 - val_acc: 0.9999 - lr: 7.3333e-04
Epoch 68/200
200/200 [==============================] - 187s 933ms/step - loss: 8.4066e-05 - acc: 1.0000 - val_loss: 0.0879 - val_acc: 0.8975 - lr: 5.2222e-04
Epoch 69/200
200/200 [==============================] - 181s 907ms/step - loss: 6.8841e-05 - acc: 1.0000 - val_loss: 5.1311e-04 - val_acc: 1.0000 - lr: 3.1111e-04
Epoch 70/200
200/200 [==============================] - 181s 907ms/step - loss: 6.3039e-05 - acc: 1.0000 - val_loss: 6.8196e-05 - val_acc: 1.0000 - lr: 1.0000e-04
Epoch 71/200
200/200 [==============================] - 181s 906ms/step - loss: 2.2853e-04 - acc: 1.0000 - val_loss: 0.5024 - val_acc: 0.4984 - lr: 0.0020
Epoch 72/200
200/200 [==============================] - 180s 900ms/step - loss: 4.0886e-04 - acc: 1.0000 - val_loss: 0.4943 - val_acc: 0.5018 - lr: 0.0018
Epoch 73/200
200/200 [==============================] - 183s 913ms/step - loss: 1.9104e-04 - acc: 1.0000 - val_loss: 0.4975 - val_acc: 0.5018 - lr: 0.0016
Epoch 74/200
200/200 [==============================] - 182s 908ms/step - loss: 1.1183e-04 - acc: 1.0000 - val_loss: 0.4980 - val_acc: 0.5018 - lr: 0.0014
Epoch 75/200
200/200 [==============================] - 188s 939ms/step - loss: 6.2444e-05 - acc: 1.0000 - val_loss: 0.4977 - val_acc: 0.5018 - lr: 0.0012
Epoch 76/200
200/200 [==============================] - 189s 943ms/step - loss: 4.3502e-05 - acc: 1.0000 - val_loss: 0.4976 - val_acc: 0.5018 - lr: 9.4444e-04
Epoch 77/200
200/200 [==============================] - 181s 905ms/step - loss: 3.2875e-05 - acc: 1.0000 - val_loss: 0.4972 - val_acc: 0.5018 - lr: 7.3333e-04
Epoch 78/200
200/200 [==============================] - 181s 904ms/step - loss: 2.6406e-05 - acc: 1.0000 - val_loss: 0.4931 - val_acc: 0.5018 - lr: 5.2222e-04
Epoch 79/200
200/200 [==============================] - 181s 906ms/step - loss: 2.3363e-05 - acc: 1.0000 - val_loss: 0.4424 - val_acc: 0.5018 - lr: 3.1111e-04
Epoch 80/200
200/200 [==============================] - 184s 920ms/step - loss: 2.2037e-05 - acc: 1.0000 - val_loss: 2.0193e-04 - val_acc: 1.0000 - lr: 1.0000e-04
Epoch 81/200
200/200 [==============================] - 193s 966ms/step - loss: 1.7932e-05 - acc: 1.0000 - val_loss: 0.4637 - val_acc: 0.5018 - lr: 0.0020
Epoch 82/200
200/200 [==============================] - 189s 944ms/step - loss: 0.0020 - acc: 0.9994 - val_loss: 0.5345 - val_acc: 0.4482 - lr: 0.0018
Epoch 83/200
200/200 [==============================] - 182s 910ms/step - loss: 0.0025 - acc: 1.0000 - val_loss: 0.0024 - val_acc: 1.0000 - lr: 0.0016
Epoch 84/200
200/200 [==============================] - 181s 907ms/step - loss: 0.0023 - acc: 1.0000 - val_loss: 0.0022 - val_acc: 1.0000 - lr: 0.0014
Epoch 85/200
200/200 [==============================] - 186s 930ms/step - loss: 0.0022 - acc: 1.0000 - val_loss: 0.0021 - val_acc: 1.0000 - lr: 0.0012
Epoch 86/200
200/200 [==============================] - 182s 908ms/step - loss: 0.0020 - acc: 1.0000 - val_loss: 0.0020 - val_acc: 1.0000 - lr: 9.4444e-04
Epoch 87/200
200/200 [==============================] - 182s 910ms/step - loss: 0.0019 - acc: 1.0000 - val_loss: 0.0019 - val_acc: 1.0000 - lr: 7.3333e-04
Epoch 88/200
200/200 [==============================] - 183s 915ms/step - loss: 0.0019 - acc: 1.0000 - val_loss: 0.0018 - val_acc: 1.0000 - lr: 5.2222e-04
Epoch 89/200
200/200 [==============================] - 183s 914ms/step - loss: 0.0018 - acc: 1.0000 - val_loss: 0.0018 - val_acc: 1.0000 - lr: 3.1111e-04
Epoch 90/200
200/200 [==============================] - 182s 911ms/step - loss: 0.0018 - acc: 1.0000 - val_loss: 0.0018 - val_acc: 1.0000 - lr: 1.0000e-04
Epoch 91/200
200/200 [==============================] - 182s 912ms/step - loss: 0.0017 - acc: 1.0000 - val_loss: 0.0016 - val_acc: 1.0000 - lr: 0.0020
Epoch 92/200
200/200 [==============================] - 182s 910ms/step - loss: 0.0015 - acc: 1.0000 - val_loss: 0.0015 - val_acc: 1.0000 - lr: 0.0018
Epoch 93/200
200/200 [==============================] - 182s 911ms/step - loss: 0.0013 - acc: 1.0000 - val_loss: 0.0013 - val_acc: 1.0000 - lr: 0.0016
Epoch 94/200
200/200 [==============================] - 185s 925ms/step - loss: 0.0012 - acc: 1.0000 - val_loss: 0.0012 - val_acc: 1.0000 - lr: 0.0014
Epoch 95/200
200/200 [==============================] - 181s 904ms/step - loss: 0.0011 - acc: 1.0000 - val_loss: 0.0011 - val_acc: 1.0000 - lr: 0.0012
Epoch 96/200
200/200 [==============================] - 180s 902ms/step - loss: 0.0010 - acc: 1.0000 - val_loss: 0.0010 - val_acc: 1.0000 - lr: 9.4444e-04
Epoch 97/200
200/200 [==============================] - 180s 902ms/step - loss: 9.7535e-04 - acc: 1.0000 - val_loss: 9.5283e-04 - val_acc: 1.0000 - lr: 7.3333e-04
Epoch 98/200
200/200 [==============================] - 180s 901ms/step - loss: 9.2974e-04 - acc: 1.0000 - val_loss: 9.1339e-04 - val_acc: 1.0000 - lr: 5.2222e-04
Epoch 99/200
200/200 [==============================] - 181s 907ms/step - loss: 8.9887e-04 - acc: 1.0000 - val_loss: 8.8789e-04 - val_acc: 1.0000 - lr: 3.1111e-04
Epoch 100/200
200/200 [==============================] - 187s 934ms/step - loss: 8.8294e-04 - acc: 1.0000 - val_loss: 8.7880e-04 - val_acc: 1.0000 - lr: 1.0000e-04
Epoch 101/200
200/200 [==============================] - 180s 902ms/step - loss: 0.0012 - acc: 0.9999 - val_loss: 0.0013 - val_acc: 0.9998 - lr: 0.0020
Epoch 102/200
200/200 [==============================] - 180s 902ms/step - loss: 0.0010 - acc: 1.0000 - val_loss: 9.3628e-04 - val_acc: 1.0000 - lr: 0.0018
Epoch 103/200
200/200 [==============================] - 179s 897ms/step - loss: 8.4824e-04 - acc: 1.0000 - val_loss: 8.5005e-04 - val_acc: 1.0000 - lr: 0.0016
Epoch 104/200
200/200 [==============================] - 180s 900ms/step - loss: 7.1619e-04 - acc: 1.0000 - val_loss: 7.0104e-04 - val_acc: 1.0000 - lr: 0.0014
Epoch 105/200
200/200 [==============================] - 214s 1s/step - loss: 6.2045e-04 - acc: 1.0000 - val_loss: 6.2361e-04 - val_acc: 1.0000 - lr: 0.0012
Epoch 106/200
200/200 [==============================] - 183s 915ms/step - loss: 5.5093e-04 - acc: 1.0000 - val_loss: 5.3495e-04 - val_acc: 1.0000 - lr: 9.4444e-04
Epoch 107/200
200/200 [==============================] - 185s 926ms/step - loss: 4.9948e-04 - acc: 1.0000 - val_loss: 4.9466e-04 - val_acc: 1.0000 - lr: 7.3333e-04
Epoch 108/200
200/200 [==============================] - 187s 935ms/step - loss: 4.6283e-04 - acc: 1.0000 - val_loss: 5.3213e-04 - val_acc: 1.0000 - lr: 5.2222e-04
Epoch 109/200
200/200 [==============================] - 188s 941ms/step - loss: 4.3871e-04 - acc: 1.0000 - val_loss: 4.3065e-04 - val_acc: 1.0000 - lr: 3.1111e-04
Epoch 110/200
200/200 [==============================] - 194s 973ms/step - loss: 4.2627e-04 - acc: 1.0000 - val_loss: 4.2329e-04 - val_acc: 1.0000 - lr: 1.0000e-04
Epoch 111/200
200/200 [==============================] - 206s 1s/step - loss: 3.6728e-04 - acc: 1.0000 - val_loss: 0.0929 - val_acc: 0.8698 - lr: 0.0020
Epoch 112/200
200/200 [==============================] - 214s 1s/step - loss: 2.8893e-04 - acc: 1.0000 - val_loss: 2.6767e-04 - val_acc: 1.0000 - lr: 0.0018
Epoch 113/200
200/200 [==============================] - 214s 1s/step - loss: 2.3273e-04 - acc: 1.0000 - val_loss: 0.1472 - val_acc: 0.6903 - lr: 0.0016
Epoch 114/200
200/200 [==============================] - 213s 1s/step - loss: 1.8670e-04 - acc: 1.0000 - val_loss: 0.1450 - val_acc: 0.7015 - lr: 0.0014
Epoch 115/200
200/200 [==============================] - 213s 1s/step - loss: 1.5695e-04 - acc: 1.0000 - val_loss: 0.0115 - val_acc: 1.0000 - lr: 0.0012
Epoch 116/200
200/200 [==============================] - 214s 1s/step - loss: 1.3597e-04 - acc: 1.0000 - val_loss: 3.2649e-04 - val_acc: 1.0000 - lr: 9.4444e-04
Epoch 117/200
200/200 [==============================] - 214s 1s/step - loss: 1.2097e-04 - acc: 1.0000 - val_loss: 0.4788 - val_acc: 0.5018 - lr: 7.3333e-04
Epoch 118/200
200/200 [==============================] - 214s 1s/step - loss: 2.2300e-04 - acc: 1.0000 - val_loss: 2.4725e-04 - val_acc: 1.0000 - lr: 5.2222e-04
Epoch 119/200
200/200 [==============================] - 214s 1s/step - loss: 2.1805e-04 - acc: 1.0000 - val_loss: 2.0893e-04 - val_acc: 1.0000 - lr: 3.1111e-04
Epoch 120/200
200/200 [==============================] - 214s 1s/step - loss: 2.0614e-04 - acc: 1.0000 - val_loss: 2.0339e-04 - val_acc: 1.0000 - lr: 1.0000e-04
Epoch 121/200
200/200 [==============================] - 185s 926ms/step - loss: 1.6093e-04 - acc: 1.0000 - val_loss: 0.4219 - val_acc: 0.5018 - lr: 0.0020
Epoch 122/200
200/200 [==============================] - 185s 926ms/step - loss: 1.1050e-04 - acc: 1.0000 - val_loss: 0.4460 - val_acc: 0.5018 - lr: 0.0018
Epoch 123/200
200/200 [==============================] - 183s 914ms/step - loss: 3.3806e-04 - acc: 1.0000 - val_loss: 2.9386e-04 - val_acc: 1.0000 - lr: 0.0016
Epoch 124/200
200/200 [==============================] - 181s 904ms/step - loss: 1.9949e-04 - acc: 1.0000 - val_loss: 0.4264 - val_acc: 0.5018 - lr: 0.0014
Epoch 125/200
200/200 [==============================] - 182s 908ms/step - loss: 1.4096e-04 - acc: 1.0000 - val_loss: 0.2692 - val_acc: 0.5107 - lr: 0.0012
Epoch 126/200
200/200 [==============================] - 181s 903ms/step - loss: 1.0977e-04 - acc: 1.0000 - val_loss: 0.0105 - val_acc: 1.0000 - lr: 9.4444e-04
Epoch 127/200
200/200 [==============================] - 182s 909ms/step - loss: 9.1792e-05 - acc: 1.0000 - val_loss: 4.5129e-04 - val_acc: 1.0000 - lr: 7.3333e-04
Epoch 128/200
200/200 [==============================] - 181s 905ms/step - loss: 8.0692e-05 - acc: 1.0000 - val_loss: 1.1738e-04 - val_acc: 1.0000 - lr: 5.2222e-04
Epoch 129/200
200/200 [==============================] - 181s 907ms/step - loss: 7.4031e-05 - acc: 1.0000 - val_loss: 7.6682e-05 - val_acc: 1.0000 - lr: 3.1111e-04
Epoch 130/200
200/200 [==============================] - 181s 906ms/step - loss: 7.0827e-05 - acc: 1.0000 - val_loss: 7.0510e-05 - val_acc: 1.0000 - lr: 1.0000e-04
Epoch 131/200
200/200 [==============================] - 181s 905ms/step - loss: 5.6762e-05 - acc: 1.0000 - val_loss: 0.4818 - val_acc: 0.5018 - lr: 0.0020
Epoch 132/200
200/200 [==============================] - 182s 908ms/step - loss: 3.9235e-05 - acc: 1.0000 - val_loss: 0.4837 - val_acc: 0.5018 - lr: 0.0018
Epoch 133/200
200/200 [==============================] - 181s 907ms/step - loss: 2.9483e-05 - acc: 1.0000 - val_loss: 0.4621 - val_acc: 0.5018 - lr: 0.0016
Epoch 134/200
200/200 [==============================] - 181s 906ms/step - loss: 8.8404e-04 - acc: 0.9999 - val_loss: 9.7526e-04 - val_acc: 1.0000 - lr: 0.0014
Epoch 135/200
200/200 [==============================] - 181s 906ms/step - loss: 8.3623e-04 - acc: 1.0000 - val_loss: 0.0013 - val_acc: 1.0000 - lr: 0.0012
Epoch 136/200
200/200 [==============================] - 181s 906ms/step - loss: 6.6218e-04 - acc: 1.0000 - val_loss: 0.0236 - val_acc: 0.9904 - lr: 9.4444e-04
Epoch 137/200
200/200 [==============================] - 181s 905ms/step - loss: 5.6179e-04 - acc: 1.0000 - val_loss: 8.9467e-04 - val_acc: 1.0000 - lr: 7.3333e-04
Epoch 138/200
200/200 [==============================] - 181s 906ms/step - loss: 4.9843e-04 - acc: 1.0000 - val_loss: 5.4970e-04 - val_acc: 1.0000 - lr: 5.2222e-04
Epoch 139/200
200/200 [==============================] - 181s 906ms/step - loss: 4.6030e-04 - acc: 1.0000 - val_loss: 4.5565e-04 - val_acc: 1.0000 - lr: 3.1111e-04
Epoch 140/200
200/200 [==============================] - 181s 905ms/step - loss: 4.4187e-04 - acc: 1.0000 - val_loss: 4.3794e-04 - val_acc: 1.0000 - lr: 1.0000e-04
Epoch 141/200
200/200 [==============================] - 181s 905ms/step - loss: 3.5985e-04 - acc: 1.0000 - val_loss: 0.4717 - val_acc: 0.5018 - lr: 0.0020
Epoch 142/200
200/200 [==============================] - 212s 1s/step - loss: 2.5639e-04 - acc: 1.0000 - val_loss: 0.3537 - val_acc: 0.5018 - lr: 0.0018
Epoch 143/200
200/200 [==============================] - 190s 952ms/step - loss: 1.9486e-04 - acc: 1.0000 - val_loss: 0.0238 - val_acc: 0.9944 - lr: 0.0016
Epoch 144/200
200/200 [==============================] - 192s 961ms/step - loss: 1.5588e-04 - acc: 1.0000 - val_loss: 5.5978e-04 - val_acc: 0.9998 - lr: 0.0014
Epoch 145/200
200/200 [==============================] - 181s 906ms/step - loss: 1.2962e-04 - acc: 1.0000 - val_loss: 9.1726e-04 - val_acc: 1.0000 - lr: 0.0012
Epoch 146/200
200/200 [==============================] - 181s 904ms/step - loss: 1.1111e-04 - acc: 1.0000 - val_loss: 2.5750e-04 - val_acc: 1.0000 - lr: 9.4444e-04
Epoch 147/200
200/200 [==============================] - 181s 903ms/step - loss: 9.8085e-05 - acc: 1.0000 - val_loss: 9.7593e-05 - val_acc: 1.0000 - lr: 7.3333e-04
Epoch 148/200
200/200 [==============================] - 181s 905ms/step - loss: 1.1093e-04 - acc: 1.0000 - val_loss: 0.6840 - val_acc: 0.2697 - lr: 5.2222e-04
Epoch 149/200
200/200 [==============================] - 181s 904ms/step - loss: 1.7264e-04 - acc: 1.0000 - val_loss: 1.5816e-04 - val_acc: 1.0000 - lr: 3.1111e-04
Epoch 150/200
200/200 [==============================] - 180s 902ms/step - loss: 1.5343e-04 - acc: 1.0000 - val_loss: 1.4886e-04 - val_acc: 1.0000 - lr: 1.0000e-04
Epoch 151/200
200/200 [==============================] - 180s 900ms/step - loss: 1.1442e-04 - acc: 1.0000 - val_loss: 0.4817 - val_acc: 0.5018 - lr: 0.0020
Epoch 152/200
200/200 [==============================] - 180s 900ms/step - loss: 7.5607e-05 - acc: 1.0000 - val_loss: 0.4857 - val_acc: 0.5018 - lr: 0.0018
Epoch 153/200
200/200 [==============================] - 180s 900ms/step - loss: 5.0521e-05 - acc: 1.0000 - val_loss: 0.4959 - val_acc: 0.5018 - lr: 0.0016
Epoch 154/200
200/200 [==============================] - 182s 909ms/step - loss: 3.6988e-05 - acc: 1.0000 - val_loss: 0.4636 - val_acc: 0.5018 - lr: 0.0014
Epoch 155/200
200/200 [==============================] - 181s 904ms/step - loss: 2.9382e-05 - acc: 1.0000 - val_loss: 0.1145 - val_acc: 0.8209 - lr: 0.0012
Epoch 156/200
200/200 [==============================] - 181s 905ms/step - loss: 2.4769e-05 - acc: 1.0000 - val_loss: 5.0902e-04 - val_acc: 1.0000 - lr: 9.4444e-04
Epoch 157/200
200/200 [==============================] - 181s 904ms/step - loss: 2.1742e-05 - acc: 1.0000 - val_loss: 4.3060e-05 - val_acc: 1.0000 - lr: 7.3333e-04
Epoch 158/200
200/200 [==============================] - 181s 905ms/step - loss: 1.9691e-05 - acc: 1.0000 - val_loss: 2.2383e-05 - val_acc: 1.0000 - lr: 5.2222e-04
Epoch 159/200
200/200 [==============================] - 181s 903ms/step - loss: 1.8412e-05 - acc: 1.0000 - val_loss: 1.8346e-05 - val_acc: 1.0000 - lr: 3.1111e-04
Epoch 160/200
200/200 [==============================] - 181s 904ms/step - loss: 1.7785e-05 - acc: 1.0000 - val_loss: 1.7664e-05 - val_acc: 1.0000 - lr: 1.0000e-04
Epoch 161/200
200/200 [==============================] - 181s 904ms/step - loss: 0.0051 - acc: 0.9974 - val_loss: 0.0642 - val_acc: 0.9316 - lr: 0.0020
Epoch 162/200
200/200 [==============================] - 181s 907ms/step - loss: 0.0029 - acc: 1.0000 - val_loss: 0.0028 - val_acc: 1.0000 - lr: 0.0018
Epoch 163/200
200/200 [==============================] - 181s 905ms/step - loss: 0.0027 - acc: 1.0000 - val_loss: 0.0026 - val_acc: 1.0000 - lr: 0.0016
Epoch 164/200
200/200 [==============================] - 181s 906ms/step - loss: 0.0025 - acc: 1.0000 - val_loss: 0.0025 - val_acc: 1.0000 - lr: 0.0014
Epoch 165/200
200/200 [==============================] - 181s 905ms/step - loss: 0.0024 - acc: 1.0000 - val_loss: 0.0023 - val_acc: 1.0000 - lr: 0.0012
Epoch 166/200
200/200 [==============================] - 181s 905ms/step - loss: 0.0023 - acc: 1.0000 - val_loss: 0.0022 - val_acc: 1.0000 - lr: 9.4444e-04
Epoch 167/200
200/200 [==============================] - 182s 908ms/step - loss: 0.0022 - acc: 1.0000 - val_loss: 0.0022 - val_acc: 1.0000 - lr: 7.3333e-04
Epoch 168/200
200/200 [==============================] - 181s 907ms/step - loss: 0.0022 - acc: 1.0000 - val_loss: 0.0021 - val_acc: 1.0000 - lr: 5.2222e-04
Epoch 169/200
200/200 [==============================] - 181s 905ms/step - loss: 0.0021 - acc: 1.0000 - val_loss: 0.0021 - val_acc: 1.0000 - lr: 3.1111e-04
Epoch 170/200
200/200 [==============================] - 182s 908ms/step - loss: 0.0021 - acc: 1.0000 - val_loss: 0.0021 - val_acc: 1.0000 - lr: 1.0000e-04
Epoch 171/200
200/200 [==============================] - 182s 908ms/step - loss: 0.0020 - acc: 1.0000 - val_loss: 0.0019 - val_acc: 1.0000 - lr: 0.0020
Epoch 172/200
200/200 [==============================] - 182s 911ms/step - loss: 0.0018 - acc: 1.0000 - val_loss: 0.0017 - val_acc: 1.0000 - lr: 0.0018
Epoch 173/200
200/200 [==============================] - 183s 915ms/step - loss: 0.0017 - acc: 1.0000 - val_loss: 0.0016 - val_acc: 1.0000 - lr: 0.0016
Epoch 174/200
200/200 [==============================] - 183s 917ms/step - loss: 0.0015 - acc: 1.0000 - val_loss: 0.0015 - val_acc: 1.0000 - lr: 0.0014
Epoch 175/200
200/200 [==============================] - 184s 920ms/step - loss: 0.0015 - acc: 1.0000 - val_loss: 0.0014 - val_acc: 1.0000 - lr: 0.0012
Epoch 176/200
200/200 [==============================] - 184s 918ms/step - loss: 0.0014 - acc: 1.0000 - val_loss: 0.0013 - val_acc: 1.0000 - lr: 9.4444e-04
Epoch 177/200
200/200 [==============================] - 183s 915ms/step - loss: 0.0013 - acc: 1.0000 - val_loss: 0.0013 - val_acc: 1.0000 - lr: 7.3333e-04
Epoch 178/200
200/200 [==============================] - 183s 913ms/step - loss: 0.0013 - acc: 1.0000 - val_loss: 0.0013 - val_acc: 1.0000 - lr: 5.2222e-04
Epoch 179/200
200/200 [==============================] - 182s 912ms/step - loss: 0.0013 - acc: 1.0000 - val_loss: 0.0012 - val_acc: 1.0000 - lr: 3.1111e-04
Epoch 180/200
200/200 [==============================] - 184s 918ms/step - loss: 0.0012 - acc: 1.0000 - val_loss: 0.0012 - val_acc: 1.0000 - lr: 1.0000e-04
Epoch 181/200
200/200 [==============================] - 187s 934ms/step - loss: 0.0012 - acc: 1.0000 - val_loss: 0.0011 - val_acc: 1.0000 - lr: 0.0020
Epoch 182/200
200/200 [==============================] - 185s 927ms/step - loss: 0.0010 - acc: 1.0000 - val_loss: 9.6330e-04 - val_acc: 1.0000 - lr: 0.0018
Epoch 183/200
200/200 [==============================] - 185s 926ms/step - loss: 9.1766e-04 - acc: 1.0000 - val_loss: 8.7060e-04 - val_acc: 1.0000 - lr: 0.0016
Epoch 184/200
200/200 [==============================] - 185s 927ms/step - loss: 8.3431e-04 - acc: 1.0000 - val_loss: 7.9592e-04 - val_acc: 1.0000 - lr: 0.0014
Epoch 185/200
200/200 [==============================] - 185s 926ms/step - loss: 7.6764e-04 - acc: 1.0000 - val_loss: 7.3644e-04 - val_acc: 1.0000 - lr: 0.0012
Epoch 186/200
200/200 [==============================] - 185s 927ms/step - loss: 7.1441e-04 - acc: 1.0000 - val_loss: 6.8889e-04 - val_acc: 1.0000 - lr: 9.4444e-04
Epoch 187/200
200/200 [==============================] - 185s 926ms/step - loss: 6.7345e-04 - acc: 1.0000 - val_loss: 6.5429e-04 - val_acc: 1.0000 - lr: 7.3333e-04
Epoch 188/200
200/200 [==============================] - 185s 925ms/step - loss: 6.4192e-04 - acc: 1.0000 - val_loss: 6.2633e-04 - val_acc: 1.0000 - lr: 5.2222e-04
Epoch 189/200
200/200 [==============================] - 185s 926ms/step - loss: 6.2029e-04 - acc: 1.0000 - val_loss: 6.0985e-04 - val_acc: 1.0000 - lr: 3.1111e-04
Epoch 190/200
200/200 [==============================] - 185s 926ms/step - loss: 6.0916e-04 - acc: 1.0000 - val_loss: 6.0429e-04 - val_acc: 1.0000 - lr: 1.0000e-04
Epoch 191/200
200/200 [==============================] - 184s 920ms/step - loss: 5.5169e-04 - acc: 1.0000 - val_loss: 5.0742e-04 - val_acc: 1.0000 - lr: 0.0020
Epoch 192/200
200/200 [==============================] - 184s 922ms/step - loss: 0.0027 - acc: 0.9986 - val_loss: 0.5014 - val_acc: 0.5005 - lr: 0.0018
Epoch 193/200
200/200 [==============================] - 184s 921ms/step - loss: 0.0024 - acc: 1.0000 - val_loss: 0.0023 - val_acc: 1.0000 - lr: 0.0016
Epoch 194/200
200/200 [==============================] - 185s 923ms/step - loss: 0.0023 - acc: 1.0000 - val_loss: 0.0022 - val_acc: 1.0000 - lr: 0.0014
Epoch 195/200
200/200 [==============================] - 183s 916ms/step - loss: 0.0022 - acc: 1.0000 - val_loss: 0.0021 - val_acc: 1.0000 - lr: 0.0012
Epoch 196/200
200/200 [==============================] - 183s 917ms/step - loss: 0.0021 - acc: 1.0000 - val_loss: 0.0020 - val_acc: 1.0000 - lr: 9.4444e-04
Epoch 197/200
200/200 [==============================] - 183s 917ms/step - loss: 0.0020 - acc: 1.0000 - val_loss: 0.0020 - val_acc: 1.0000 - lr: 7.3333e-04
Epoch 198/200
200/200 [==============================] - 184s 918ms/step - loss: 0.0020 - acc: 1.0000 - val_loss: 0.0019 - val_acc: 1.0000 - lr: 5.2222e-04
Epoch 199/200
200/200 [==============================] - 184s 918ms/step - loss: 0.0019 - acc: 1.0000 - val_loss: 0.0019 - val_acc: 1.0000 - lr: 3.1111e-04
Epoch 200/200
200/200 [==============================] - 183s 917ms/step - loss: 0.0019 - acc: 1.0000 - val_loss: 0.0019 - val_acc: 1.0000 - lr: 1.0000e-04
Best validation accuracy: 1.0
%% Cell type:code id: tags:
``` python
#15#Create JSON File
# Convert the model architecture to JSON format
import json
from keras.models import model_from_json
model_json = trained_net.to_json()
# Save the model architecture as a JSON file (optional)
filename = f'ghor_Rk_0000_836F_Round_{num_rounds}_depth_10.json'
print(filename)
with open(filename, "w") as json_file:
json.dump(json.loads(model_json), json_file, indent=4)
```
%% Output
ghor_Rk_0000_836F_Round_10_depth_10.json
%% Cell type:code id: tags:
``` python
#16#Evaluate Function
def evaluate(net,X,Y):
Z = net.predict(X,batch_size=10000).flatten();
Zbin = (Z > 0.5);
diff = Y - Z; mse = np.mean(diff*diff);
n = len(Z); n0 = np.sum(Y==0); n1 = np.sum(Y==1);
acc = np.sum(Zbin == Y) / n;
tpr = np.sum(Zbin[Y==1]) / n1;
tnr = np.sum(Zbin[Y==0] == 0) / n0;
mreal = np.median(Z[Y==1]);
high_random = np.sum(Z[Y==0] > mreal) / n0;
print("Accuracy: ", acc, "TPR: ", tpr, "TNR: ", tnr, "MSE:", mse);
print("Percentage of random pairs with score higher than median of real pairs:", 100*high_random);
```
%% Cell type:code id: tags:
``` python
#17#Evaluate Function Call
import numpy as np
from keras.models import model_from_json
#load distinguishers
json_file = open('ghor_Rk_0000_836F_Round_10_depth_10.json','r');
json_model = json_file.read();
net10 = model_from_json(json_model);
net10.load_weights('ghor_Rk_0000_836F_Round_10_depth_10.h5');
X_test_stacked, Y_test_stacked = make_train_data(100000, num_rounds)
evaluate(net10, X_test_stacked, Y_test_stacked);
```
%% Output
10/10 [==============================] - 3s 273ms/step
Accuracy: 1.0 TPR: 1.0 TNR: 1.0 MSE: 3.327519e-07
Percentage of random pairs with score higher than median of real pairs: 0.0
%% Cell type:code id: tags:
``` python
```
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment