Computing the Jacobian Matrix in Tensorflow - python

I am trying to compute Jacobian Matrix in Tensorflow for the following Network:
but It didn't work with my Neural Network!
I found jacobian Matrix code in https://medium.com/unit8-machine-learning-publication/computing-the-jacobian-matrix-of-a-neural-network-in-python-4f162e5db180
Unfortunately it doesn't work with my network ... the problem message is "ValueError: Cannot feed value of shape (1, 51000) for Tensor 'dense_1_input:0', which has shape '(?, 6)'"
I think that is the problem in the loop function inside jacobian_tensorflow function?
# Importing some Libraries
from keras.models import Sequential
from keras.layers import Dense
from keras.optimizers import SGD
import numpy as np
import statsmodels.api as sm
from sklearn.metrics import mean_squared_error
import matplotlib.pyplot as plt
from tqdm import tqdm
import tensorflow as tf
# Simulation of some data
np.random.seed (245)
nobs =10000
# Definition normalverteilte Features
x1= np.random.normal(size=nobs ,scale=1)
x2= np.random.normal(size=nobs ,scale=1)
x3= np.random.normal(size=nobs ,scale=1)
x4= np.random.normal(size=nobs ,scale=1)
x5= np.random.normal(size=nobs ,scale=1)
# Features
X= np.c_[np.ones((nobs ,1)),x1,x2,x3,x4,x5]
y= np.cos(x1) + np.sin(x2) + 2*x3 + x4 + 0.01*x5 + np.random.normal(size=nobs , scale=0.01)
#Learningrate
LR=0.05
# Number of Neurons
Neuron_Out=1
Neuron_Hidden1=64
Neuron_Hidden2=32
#The Activation function
Activate_output='linear' # für letzte Schicht verwende ich linear
Activate_hidden='relu' # unterschied ist Hidden-Layer-Neuronen werden nicht linear transformiert
#The Optimizer
Optimizer= SGD(lr=LR)
# The loss function
loss='mean_squared_error'
# Splitting Data
from sklearn.model_selection import train_test_split
x_train , x_test , y_train , y_test = train_test_split(X, y, test_size =0.15, random_state =77)
## Neural Network
from tensorflow import set_random_seed
set_random_seed (245)
# As in Medium Essa
sess = tf.InteractiveSession()
sess.run(tf.initialize_all_variables())
#Initialize the ANN
model_ANN= Sequential()
# Hidden Layer-> hier wird Hidden Layer definiert-> Anzahl der Neuronen hier sind 64, 32
# input ist 6 (also 1,x1,x2,x3,x4,x5)-> one is the first column in X Matrix
model_ANN.add(Dense(Neuron_Hidden1, activation=Activate_hidden, input_shape=(6,), use_bias=True))
model_ANN.add(Dense(Neuron_Hidden2, activation=Activate_hidden, use_bias=True))
#Output Layer-> hier wird Output-Layer defniniert
model_ANN.add(Dense(Neuron_Out, activation=Activate_output,use_bias=True))
model_ANN.summary()
#Fit the model
history_ANN=model_ANN.fit(
x_train, # training data
y_train, # training targets
epochs=125)
def jacobian_tensorflow(x):
jacobian_matrix = []
for m in range(Neuron_Out):
# We iterate over the M elements of the output vector
grad_func = tf.gradients(model_ANN.output[:, m], model_ANN.input)
gradients = sess.run(grad_func, feed_dict={model_ANN.input: x.reshape((1, x.size))})
jacobian_matrix.append(gradients[0][0,:])
return np.array(jacobian_matrix)
#Jacobian matrix computation
def jacobian_tensorflow(x):
jacobian_matrix = []
for m in range(Neuron_Out):
# We iterate over the M elements of the output vector
grad_func = tf.gradients(model_ANN.output[:, m], model_ANN.input)
gradients = sess.run(grad_func, feed_dict={model_ANN.input: x.reshape((1, x.size))})
jacobian_matrix.append(gradients[0][0,:])
return np.array(jacobian_matrix)
jacobian_tensorflow(x_train)
How I could use Jacobian Computation Function for my Network?
Thanks in Advance

I have modified your code to fix the error and now its working. There were few errors like compile statement missing, function defined twice and forcing the reshape of input for the Dense layer even though the shape was good for feed_dict in jacobian_tensorflow function. Have added comments in the code for changes.
Fixed Code -
%tensorflow_version 1.x
# Importing some Libraries
from keras.models import Sequential
from keras.layers import Dense
from keras.optimizers import SGD
import numpy as np
import statsmodels.api as sm
from sklearn.metrics import mean_squared_error
import matplotlib.pyplot as plt
from tqdm import tqdm
import tensorflow as tf
# Simulation of some data
np.random.seed (245)
nobs =10000
# Definition normalverteilte Features
x1= np.random.normal(size=nobs ,scale=1)
x2= np.random.normal(size=nobs ,scale=1)
x3= np.random.normal(size=nobs ,scale=1)
x4= np.random.normal(size=nobs ,scale=1)
x5= np.random.normal(size=nobs ,scale=1)
# Features
X= np.c_[np.ones((nobs ,1)),x1,x2,x3,x4,x5]
y= np.cos(x1) + np.sin(x2) + 2*x3 + x4 + 0.01*x5 + np.random.normal(size=nobs , scale=0.01)
#Learningrate
LR=0.05
# Number of Neurons
Neuron_Out=1
Neuron_Hidden1=64
Neuron_Hidden2=32
#The Activation function
Activate_output='linear' # für letzte Schicht verwende ich linear
Activate_hidden='relu' # unterschied ist Hidden-Layer-Neuronen werden nicht linear transformiert
#The Optimizer
Optimizer= SGD(lr=LR)
# The loss function
loss='mean_squared_error'
# Splitting Data
from sklearn.model_selection import train_test_split
x_train , x_test , y_train , y_test = train_test_split(X, y, test_size =0.15, random_state =77)
## Neural Network
from tensorflow import set_random_seed
set_random_seed (245)
# As in Medium Essa
sess = tf.InteractiveSession()
sess.run(tf.initialize_all_variables())
#Initialize the ANN
model_ANN= Sequential()
# Hidden Layer-> hier wird Hidden Layer definiert-> Anzahl der Neuronen hier sind 64, 32
# input ist 6 (also 1,x1,x2,x3,x4,x5)-> one is the first column in X Matrix
model_ANN.add(Dense(Neuron_Hidden1, activation=Activate_hidden, input_shape=(6,), use_bias=True))
model_ANN.add(Dense(Neuron_Hidden2, activation=Activate_hidden, use_bias=True))
#Output Layer-> hier wird Output-Layer defniniert
model_ANN.add(Dense(Neuron_Out, activation=Activate_output,use_bias=True))
model_ANN.summary()
# Added the compile statement
model_ANN.compile(loss=loss, optimizer=Optimizer, metrics=['accuracy'])
#Fit the model
history_ANN=model_ANN.fit(
x_train, # training data
y_train, # training targets
epochs=125)
#Jacobian matrix computation
def jacobian_tensorflow(x):
jacobian_matrix = []
for m in range(Neuron_Out):
# We iterate over the M elements of the output vector
grad_func = tf.gradients(model_ANN.output[:, m],model_ANN.input)
gradients = sess.run(grad_func, feed_dict={model_ANN.input: x}) # Removed x.reshape((1, x.size)) as reshape is not required bcoz dense accepts the shape
jacobian_matrix.append(gradients[0][0,:])
return np.array(jacobian_matrix)
jacobian_tensorflow(x_train)
Output -
/tensorflow-1.15.2/python3.6/tensorflow_core/python/client/session.py:1750: UserWarning: An interactive session is already active. This can cause out-of-memory errors in some cases. You must explicitly call `InteractiveSession.close()` to release resources held by the other session(s).
warnings.warn('An interactive session is already active. This can '
Model: "sequential_4"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
dense_10 (Dense) (None, 64) 448
_________________________________________________________________
dense_11 (Dense) (None, 32) 2080
_________________________________________________________________
dense_12 (Dense) (None, 1) 33
=================================================================
Total params: 2,561
Trainable params: 2,561
Non-trainable params: 0
_________________________________________________________________
Epoch 1/125
8500/8500 [==============================] - 1s 82us/step - loss: 0.1999 - accuracy: 0.0000e+00
Epoch 2/125
8500/8500 [==============================] - 1s 79us/step - loss: 0.0501 - accuracy: 0.0000e+00
Epoch 3/125
8500/8500 [==============================] - 1s 80us/step - loss: 0.0277 - accuracy: 0.0000e+00
Epoch 4/125
8500/8500 [==============================] - 1s 79us/step - loss: 0.0219 - accuracy: 0.0000e+00
Epoch 5/125
8500/8500 [==============================] - 1s 80us/step - loss: 0.0190 - accuracy: 0.0000e+00
Epoch 6/125
8500/8500 [==============================] - 1s 79us/step - loss: 0.0114 - accuracy: 0.0000e+00
Epoch 7/125
8500/8500 [==============================] - 1s 78us/step - loss: 0.0138 - accuracy: 0.0000e+00
Epoch 8/125
8500/8500 [==============================] - 1s 81us/step - loss: 0.0092 - accuracy: 0.0000e+00
Epoch 9/125
8500/8500 [==============================] - 1s 80us/step - loss: 0.0072 - accuracy: 0.0000e+00
Epoch 10/125
8500/8500 [==============================] - 1s 79us/step - loss: 0.0073 - accuracy: 0.0000e+00
Epoch 11/125
8500/8500 [==============================] - 1s 79us/step - loss: 0.0065 - accuracy: 0.0000e+00
Epoch 12/125
8500/8500 [==============================] - 1s 80us/step - loss: 0.0070 - accuracy: 0.0000e+00
Epoch 13/125
8500/8500 [==============================] - 1s 78us/step - loss: 0.0055 - accuracy: 0.0000e+00
Epoch 14/125
8500/8500 [==============================] - 1s 78us/step - loss: 0.0055 - accuracy: 0.0000e+00
Epoch 15/125
8500/8500 [==============================] - 1s 79us/step - loss: 0.0055 - accuracy: 0.0000e+00
Epoch 16/125
8500/8500 [==============================] - 1s 80us/step - loss: 0.0057 - accuracy: 0.0000e+00
Epoch 17/125
8500/8500 [==============================] - 1s 79us/step - loss: 0.0045 - accuracy: 0.0000e+00
Epoch 18/125
8500/8500 [==============================] - 1s 79us/step - loss: 0.0052 - accuracy: 0.0000e+00
Epoch 19/125
8500/8500 [==============================] - 1s 79us/step - loss: 0.0055 - accuracy: 0.0000e+00
Epoch 20/125
8500/8500 [==============================] - 1s 79us/step - loss: 0.0040 - accuracy: 0.0000e+00
Epoch 21/125
8500/8500 [==============================] - 1s 80us/step - loss: 0.0045 - accuracy: 0.0000e+00
Epoch 22/125
8500/8500 [==============================] - 1s 80us/step - loss: 0.0040 - accuracy: 0.0000e+00
Epoch 23/125
8500/8500 [==============================] - 1s 78us/step - loss: 0.0041 - accuracy: 0.0000e+00
Epoch 24/125
8500/8500 [==============================] - 1s 80us/step - loss: 0.0044 - accuracy: 0.0000e+00
Epoch 25/125
8500/8500 [==============================] - 1s 80us/step - loss: 0.0037 - accuracy: 0.0000e+00
Epoch 26/125
8500/8500 [==============================] - 1s 78us/step - loss: 0.0046 - accuracy: 0.0000e+00
Epoch 27/125
8500/8500 [==============================] - 1s 79us/step - loss: 0.0042 - accuracy: 0.0000e+00
Epoch 28/125
8500/8500 [==============================] - 1s 80us/step - loss: 0.0039 - accuracy: 0.0000e+00
Epoch 29/125
8500/8500 [==============================] - 1s 78us/step - loss: 0.0037 - accuracy: 0.0000e+00
Epoch 30/125
8500/8500 [==============================] - 1s 80us/step - loss: 0.0042 - accuracy: 0.0000e+00
Epoch 31/125
8500/8500 [==============================] - 1s 79us/step - loss: 0.0033 - accuracy: 0.0000e+00
Epoch 32/125
8500/8500 [==============================] - 1s 79us/step - loss: 0.0032 - accuracy: 0.0000e+00
Epoch 33/125
8500/8500 [==============================] - 1s 78us/step - loss: 0.0033 - accuracy: 0.0000e+00
Epoch 34/125
8500/8500 [==============================] - 1s 79us/step - loss: 0.0038 - accuracy: 0.0000e+00
Epoch 35/125
8500/8500 [==============================] - 1s 78us/step - loss: 0.0035 - accuracy: 0.0000e+00
Epoch 36/125
8500/8500 [==============================] - 1s 78us/step - loss: 0.0032 - accuracy: 0.0000e+00
Epoch 37/125
8500/8500 [==============================] - 1s 80us/step - loss: 0.0032 - accuracy: 0.0000e+00
Epoch 38/125
8500/8500 [==============================] - 1s 78us/step - loss: 0.0032 - accuracy: 0.0000e+00
Epoch 39/125
8500/8500 [==============================] - 1s 79us/step - loss: 0.0032 - accuracy: 0.0000e+00
Epoch 40/125
8500/8500 [==============================] - 1s 81us/step - loss: 0.0028 - accuracy: 0.0000e+00
Epoch 41/125
8500/8500 [==============================] - 1s 80us/step - loss: 0.0032 - accuracy: 0.0000e+00
Epoch 42/125
8500/8500 [==============================] - 1s 77us/step - loss: 0.0029 - accuracy: 0.0000e+00
Epoch 43/125
8500/8500 [==============================] - 1s 80us/step - loss: 0.0029 - accuracy: 0.0000e+00
Epoch 44/125
8500/8500 [==============================] - 1s 80us/step - loss: 0.0029 - accuracy: 0.0000e+00
Epoch 45/125
8500/8500 [==============================] - 1s 79us/step - loss: 0.0028 - accuracy: 0.0000e+00
Epoch 46/125
8500/8500 [==============================] - 1s 80us/step - loss: 0.0031 - accuracy: 0.0000e+00
Epoch 47/125
8500/8500 [==============================] - 1s 80us/step - loss: 0.0025 - accuracy: 0.0000e+00
Epoch 48/125
8500/8500 [==============================] - 1s 79us/step - loss: 0.0022 - accuracy: 0.0000e+00
Epoch 49/125
8500/8500 [==============================] - 1s 80us/step - loss: 0.0027 - accuracy: 0.0000e+00
Epoch 50/125
8500/8500 [==============================] - 1s 79us/step - loss: 0.0028 - accuracy: 0.0000e+00
Epoch 51/125
8500/8500 [==============================] - 1s 80us/step - loss: 0.0024 - accuracy: 0.0000e+00
Epoch 52/125
8500/8500 [==============================] - 1s 78us/step - loss: 0.0024 - accuracy: 0.0000e+00
Epoch 53/125
8500/8500 [==============================] - 1s 79us/step - loss: 0.0023 - accuracy: 0.0000e+00
Epoch 54/125
8500/8500 [==============================] - 1s 78us/step - loss: 0.0026 - accuracy: 0.0000e+00
Epoch 55/125
8500/8500 [==============================] - 1s 81us/step - loss: 0.0023 - accuracy: 0.0000e+00
Epoch 56/125
8500/8500 [==============================] - 1s 80us/step - loss: 0.0020 - accuracy: 0.0000e+00
Epoch 57/125
8500/8500 [==============================] - 1s 78us/step - loss: 0.0024 - accuracy: 0.0000e+00
Epoch 58/125
8500/8500 [==============================] - 1s 80us/step - loss: 0.0022 - accuracy: 0.0000e+00
Epoch 59/125
8500/8500 [==============================] - 1s 80us/step - loss: 0.0021 - accuracy: 0.0000e+00
Epoch 60/125
8500/8500 [==============================] - 1s 78us/step - loss: 0.0022 - accuracy: 0.0000e+00
Epoch 61/125
8500/8500 [==============================] - 1s 80us/step - loss: 0.0020 - accuracy: 0.0000e+00
Epoch 62/125
8500/8500 [==============================] - 1s 81us/step - loss: 0.0022 - accuracy: 0.0000e+00
Epoch 63/125
8500/8500 [==============================] - 1s 80us/step - loss: 0.0022 - accuracy: 0.0000e+00
Epoch 64/125
8500/8500 [==============================] - 1s 82us/step - loss: 0.0023 - accuracy: 0.0000e+00
Epoch 65/125
8500/8500 [==============================] - 1s 81us/step - loss: 0.0023 - accuracy: 0.0000e+00
Epoch 66/125
8500/8500 [==============================] - 1s 80us/step - loss: 0.0018 - accuracy: 0.0000e+00
Epoch 67/125
8500/8500 [==============================] - 1s 81us/step - loss: 0.0021 - accuracy: 0.0000e+00
Epoch 68/125
8500/8500 [==============================] - 1s 80us/step - loss: 0.0020 - accuracy: 0.0000e+00
Epoch 69/125
8500/8500 [==============================] - 1s 80us/step - loss: 0.0020 - accuracy: 0.0000e+00
Epoch 70/125
8500/8500 [==============================] - 1s 81us/step - loss: 0.0018 - accuracy: 0.0000e+00
Epoch 71/125
8500/8500 [==============================] - 1s 80us/step - loss: 0.0019 - accuracy: 0.0000e+00
Epoch 72/125
8500/8500 [==============================] - 1s 79us/step - loss: 0.0018 - accuracy: 0.0000e+00
Epoch 73/125
8500/8500 [==============================] - 1s 80us/step - loss: 0.0018 - accuracy: 0.0000e+00
Epoch 74/125
8500/8500 [==============================] - 1s 81us/step - loss: 0.0018 - accuracy: 0.0000e+00
Epoch 75/125
8500/8500 [==============================] - 1s 81us/step - loss: 0.0019 - accuracy: 0.0000e+00
Epoch 76/125
8500/8500 [==============================] - 1s 81us/step - loss: 0.0018 - accuracy: 0.0000e+00
Epoch 77/125
8500/8500 [==============================] - 1s 83us/step - loss: 0.0016 - accuracy: 0.0000e+00
Epoch 78/125
8500/8500 [==============================] - 1s 79us/step - loss: 0.0019 - accuracy: 0.0000e+00
Epoch 79/125
8500/8500 [==============================] - 1s 79us/step - loss: 0.0019 - accuracy: 0.0000e+00
Epoch 80/125
8500/8500 [==============================] - 1s 80us/step - loss: 0.0016 - accuracy: 0.0000e+00
Epoch 81/125
8500/8500 [==============================] - 1s 80us/step - loss: 0.0018 - accuracy: 0.0000e+00
Epoch 82/125
8500/8500 [==============================] - 1s 81us/step - loss: 0.0017 - accuracy: 0.0000e+00
Epoch 83/125
8500/8500 [==============================] - 1s 81us/step - loss: 0.0017 - accuracy: 0.0000e+00
Epoch 84/125
8500/8500 [==============================] - 1s 80us/step - loss: 0.0016 - accuracy: 0.0000e+00
Epoch 85/125
8500/8500 [==============================] - 1s 82us/step - loss: 0.0017 - accuracy: 0.0000e+00
Epoch 86/125
8500/8500 [==============================] - 1s 81us/step - loss: 0.0017 - accuracy: 0.0000e+00
Epoch 87/125
8500/8500 [==============================] - 1s 83us/step - loss: 0.0017 - accuracy: 0.0000e+00
Epoch 88/125
8500/8500 [==============================] - 1s 80us/step - loss: 0.0016 - accuracy: 0.0000e+00
Epoch 89/125
8500/8500 [==============================] - 1s 82us/step - loss: 0.0016 - accuracy: 0.0000e+00
Epoch 90/125
8500/8500 [==============================] - 1s 81us/step - loss: 0.0018 - accuracy: 0.0000e+00
Epoch 91/125
8500/8500 [==============================] - 1s 81us/step - loss: 0.0015 - accuracy: 0.0000e+00
Epoch 92/125
8500/8500 [==============================] - 1s 82us/step - loss: 0.0014 - accuracy: 0.0000e+00
Epoch 93/125
8500/8500 [==============================] - 1s 80us/step - loss: 0.0016 - accuracy: 0.0000e+00
Epoch 94/125
8500/8500 [==============================] - 1s 81us/step - loss: 0.0016 - accuracy: 0.0000e+00
Epoch 95/125
8500/8500 [==============================] - 1s 83us/step - loss: 0.0014 - accuracy: 0.0000e+00
Epoch 96/125
8500/8500 [==============================] - 1s 81us/step - loss: 0.0014 - accuracy: 0.0000e+00
Epoch 97/125
8500/8500 [==============================] - 1s 81us/step - loss: 0.0016 - accuracy: 0.0000e+00
Epoch 98/125
8500/8500 [==============================] - 1s 81us/step - loss: 0.0015 - accuracy: 0.0000e+00
Epoch 99/125
8500/8500 [==============================] - 1s 81us/step - loss: 0.0014 - accuracy: 0.0000e+00
Epoch 100/125
8500/8500 [==============================] - 1s 80us/step - loss: 0.0015 - accuracy: 0.0000e+00
Epoch 101/125
8500/8500 [==============================] - 1s 81us/step - loss: 0.0014 - accuracy: 0.0000e+00
Epoch 102/125
8500/8500 [==============================] - 1s 82us/step - loss: 0.0015 - accuracy: 0.0000e+00
Epoch 103/125
8500/8500 [==============================] - 1s 81us/step - loss: 0.0013 - accuracy: 0.0000e+00
Epoch 104/125
8500/8500 [==============================] - 1s 81us/step - loss: 0.0014 - accuracy: 0.0000e+00
Epoch 105/125
8500/8500 [==============================] - 1s 81us/step - loss: 0.0013 - accuracy: 0.0000e+00
Epoch 106/125
8500/8500 [==============================] - 1s 81us/step - loss: 0.0012 - accuracy: 0.0000e+00
Epoch 107/125
8500/8500 [==============================] - 1s 82us/step - loss: 0.0014 - accuracy: 0.0000e+00
Epoch 108/125
8500/8500 [==============================] - 1s 80us/step - loss: 0.0012 - accuracy: 0.0000e+00
Epoch 109/125
8500/8500 [==============================] - 1s 81us/step - loss: 0.0013 - accuracy: 0.0000e+00
Epoch 110/125
8500/8500 [==============================] - 1s 81us/step - loss: 0.0012 - accuracy: 0.0000e+00
Epoch 111/125
8500/8500 [==============================] - 1s 81us/step - loss: 0.0012 - accuracy: 0.0000e+00
Epoch 112/125
8500/8500 [==============================] - 1s 80us/step - loss: 0.0011 - accuracy: 0.0000e+00
Epoch 113/125
8500/8500 [==============================] - 1s 81us/step - loss: 0.0014 - accuracy: 0.0000e+00
Epoch 114/125
8500/8500 [==============================] - 1s 81us/step - loss: 0.0011 - accuracy: 0.0000e+00
Epoch 115/125
8500/8500 [==============================] - 1s 79us/step - loss: 0.0012 - accuracy: 0.0000e+00
Epoch 116/125
8500/8500 [==============================] - 1s 82us/step - loss: 0.0013 - accuracy: 0.0000e+00
Epoch 117/125
8500/8500 [==============================] - 1s 80us/step - loss: 0.0013 - accuracy: 0.0000e+00
Epoch 118/125
8500/8500 [==============================] - 1s 82us/step - loss: 0.0011 - accuracy: 0.0000e+00
Epoch 119/125
8500/8500 [==============================] - 1s 79us/step - loss: 0.0012 - accuracy: 0.0000e+00
Epoch 120/125
8500/8500 [==============================] - 1s 80us/step - loss: 0.0012 - accuracy: 0.0000e+00
Epoch 121/125
8500/8500 [==============================] - 1s 80us/step - loss: 0.0011 - accuracy: 0.0000e+00
Epoch 122/125
8500/8500 [==============================] - 1s 80us/step - loss: 0.0011 - accuracy: 0.0000e+00
Epoch 123/125
8500/8500 [==============================] - 1s 82us/step - loss: 0.0012 - accuracy: 0.0000e+00
Epoch 124/125
8500/8500 [==============================] - 1s 81us/step - loss: 0.0011 - accuracy: 0.0000e+00
Epoch 125/125
8500/8500 [==============================] - 1s 80us/step - loss: 0.0018 - accuracy: 0.0000e+00
array([[ 0.6434634 , -0.09752402, 0.8342059 , 1.6331654 , 0.82901144,
-0.00917255]], dtype=float32)
Hope this answers your question. Happy Learning.

Related

keras sequential model returning loss: nan [closed]

Closed. This question does not meet Stack Overflow guidelines. It is not currently accepting answers.
This question does not appear to be about programming within the scope defined in the help center.
Closed 1 year ago.
Improve this question
I am trying to implement an artificial neural network in python using 'keras'. The problem I am facing is that my model is returning the 'loss: nan' for every epoch. I want to mention that the dataset that I have used from the csv file has a column with some missing values. I would like to asked if is this 'nan' is due to these missing data? and is there any way to get a numerical value of loss intead of 'nan'?
following is my code,
# Importing the Keras libraries and packages
import keras
from keras.models import Sequential
from keras.layers import Dense
# Initialising the ANN
classifier = Sequential()
# Adding the input layer and the first hidden layer
classifier.add(Dense(6, kernel_initializer = 'uniform', activation = 'relu', input_dim = 7))
# Adding the second hidden layer
classifier.add(Dense(6, kernel_initializer = 'uniform', activation = 'relu'))
# Adding the output layer
classifier.add(Dense(1, kernel_initializer = 'uniform', activation = 'sigmoid'))
# Compiling the ANN
classifier.compile(optimizer = 'adam', loss = 'binary_crossentropy', metrics = ['accuracy'])
# Fitting the ANN to the Training set
classifier.fit(X_train, y_train, batch_size = 10, epochs = 100)
# Part 3 - Making the predictions and evaluating the model
# Predicting the Test set results
y_pred = classifier.predict(X_test)
y_pred = (y_pred > 0.5)
# Making the Confusion Matrix
from sklearn.metrics import confusion_matrix
cm = confusion_matrix(y_test, y_pred)
print(cm)
following is the output I got,
Epoch 1/100
72/72 [==============================] - 1s 1ms/step - loss: nan - accuracy: 0.6299
Epoch 2/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.6133
Epoch 3/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.5996
Epoch 4/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.6477
Epoch 5/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.6131
Epoch 6/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.6256
Epoch 7/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.5933
Epoch 8/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.5806
Epoch 9/100
72/72 [==============================] - 0s 2ms/step - loss: nan - accuracy: 0.6123
Epoch 10/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.6342
Epoch 11/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.5910
Epoch 12/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.6151
Epoch 13/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.5999
Epoch 14/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.5828
Epoch 15/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.6317
Epoch 16/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.5782
Epoch 17/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.6085
Epoch 18/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.6383
Epoch 19/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.6102
Epoch 20/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.5944
Epoch 21/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.5966
Epoch 22/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.6168
Epoch 23/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.6010
Epoch 24/100
72/72 [==============================] - 0s 2ms/step - loss: nan - accuracy: 0.5946
Epoch 25/100
72/72 [==============================] - 0s 2ms/step - loss: nan - accuracy: 0.6586
Epoch 26/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.6527
Epoch 27/100
72/72 [==============================] - 0s 2ms/step - loss: nan - accuracy: 0.6222
Epoch 28/100
72/72 [==============================] - 0s 2ms/step - loss: nan - accuracy: 0.6322
Epoch 29/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.6211
Epoch 30/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.6261
Epoch 31/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.6164
Epoch 32/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.6242
Epoch 33/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.5785
Epoch 34/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.6115
Epoch 35/100
72/72 [==============================] - 0s 2ms/step - loss: nan - accuracy: 0.6290
Epoch 36/100
72/72 [==============================] - 0s 2ms/step - loss: nan - accuracy: 0.5886
Epoch 37/100
72/72 [==============================] - 0s 2ms/step - loss: nan - accuracy: 0.6225
Epoch 38/100
72/72 [==============================] - 0s 2ms/step - loss: nan - accuracy: 0.6109
Epoch 39/100
72/72 [==============================] - 0s 2ms/step - loss: nan - accuracy: 0.5951
Epoch 40/100
72/72 [==============================] - 0s 2ms/step - loss: nan - accuracy: 0.6126
Epoch 41/100
72/72 [==============================] - 0s 2ms/step - loss: nan - accuracy: 0.6096
Epoch 42/100
72/72 [==============================] - 0s 2ms/step - loss: nan - accuracy: 0.6146
Epoch 43/100
72/72 [==============================] - 0s 2ms/step - loss: nan - accuracy: 0.6031
Epoch 44/100
72/72 [==============================] - 0s 2ms/step - loss: nan - accuracy: 0.6232
Epoch 45/100
72/72 [==============================] - 0s 2ms/step - loss: nan - accuracy: 0.6403
Epoch 46/100
72/72 [==============================] - 0s 2ms/step - loss: nan - accuracy: 0.6068
Epoch 47/100
72/72 [==============================] - 0s 2ms/step - loss: nan - accuracy: 0.6126
Epoch 48/100
72/72 [==============================] - 0s 2ms/step - loss: nan - accuracy: 0.5879
Epoch 49/100
72/72 [==============================] - 0s 2ms/step - loss: nan - accuracy: 0.6173
Epoch 50/100
72/72 [==============================] - 0s 2ms/step - loss: nan - accuracy: 0.6379
Epoch 51/100
72/72 [==============================] - 0s 2ms/step - loss: nan - accuracy: 0.6323
Epoch 52/100
72/72 [==============================] - 0s 2ms/step - loss: nan - accuracy: 0.6202
Epoch 53/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.5956
Epoch 54/100
72/72 [==============================] - 0s 2ms/step - loss: nan - accuracy: 0.6069
Epoch 55/100
72/72 [==============================] - 0s 2ms/step - loss: nan - accuracy: 0.6090
Epoch 56/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.6400
Epoch 57/100
72/72 [==============================] - 0s 2ms/step - loss: nan - accuracy: 0.6293
Epoch 58/100
72/72 [==============================] - 0s 2ms/step - loss: nan - accuracy: 0.6008
Epoch 59/100
72/72 [==============================] - 0s 2ms/step - loss: nan - accuracy: 0.6422
Epoch 60/100
72/72 [==============================] - 0s 2ms/step - loss: nan - accuracy: 0.6270
Epoch 61/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.5997
Epoch 62/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.5959
Epoch 63/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.6409
Epoch 64/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.6185
Epoch 65/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.6096
Epoch 66/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.6240
Epoch 67/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.6176
Epoch 68/100
72/72 [==============================] - 0s 2ms/step - loss: nan - accuracy: 0.5958
Epoch 69/100
72/72 [==============================] - 0s 2ms/step - loss: nan - accuracy: 0.5994
Epoch 70/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.6375
Epoch 71/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.6238
Epoch 72/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.6214
Epoch 73/100
72/72 [==============================] - 0s 2ms/step - loss: nan - accuracy: 0.6147
Epoch 74/100
72/72 [==============================] - 0s 2ms/step - loss: nan - accuracy: 0.6046
Epoch 75/100
72/72 [==============================] - 0s 2ms/step - loss: nan - accuracy: 0.5876
Epoch 76/100
72/72 [==============================] - 0s 2ms/step - loss: nan - accuracy: 0.6085
Epoch 77/100
72/72 [==============================] - 0s 2ms/step - loss: nan - accuracy: 0.6265
Epoch 78/100
72/72 [==============================] - 0s 2ms/step - loss: nan - accuracy: 0.5964
Epoch 79/100
72/72 [==============================] - 0s 2ms/step - loss: nan - accuracy: 0.6177
Epoch 80/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.6143
Epoch 81/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.6093
Epoch 82/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.6162
Epoch 83/100
72/72 [==============================] - 0s 2ms/step - loss: nan - accuracy: 0.5974
Epoch 84/100
72/72 [==============================] - 0s 2ms/step - loss: nan - accuracy: 0.6151
Epoch 85/100
72/72 [==============================] - 0s 2ms/step - loss: nan - accuracy: 0.6304
Epoch 86/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.6148
Epoch 87/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.6224
Epoch 88/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.6076
Epoch 89/100
72/72 [==============================] - 0s 2ms/step - loss: nan - accuracy: 0.6228
Epoch 90/100
72/72 [==============================] - 0s 2ms/step - loss: nan - accuracy: 0.6203
Epoch 91/100
72/72 [==============================] - 0s 2ms/step - loss: nan - accuracy: 0.6431
Epoch 92/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.6258
Epoch 93/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.6425
Epoch 94/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.6436
Epoch 95/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.6318
Epoch 96/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.6273
Epoch 97/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.6230
Epoch 98/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.5846
Epoch 99/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.5866
Epoch 100/100
72/72 [==============================] - 0s 1ms/step - loss: nan - accuracy: 0.6027
[[110 0]
[ 69 0]]
The values of X_train are as follows,
X_train values
As discussed in the comments section, the issue is that your input dataset X_train contains NaNs. Since any mathematical operation involving a NaN value results in NaN (and the loss function directly depends on X_train) your loss also ends up being NaN.
To overcome this issue, you can impute the missing values. For example, replacing NaNs with 0 is a common way to tackle missing values (though not necessarily the best). Another typical choice is to impute NaNs with the mean or median value of the corresponding feature. In any case, you can always see what works best via the validation loss.

How do I update my model acc and val_acc?

I am currently working on a project to create a word prediction model. There are 800,000 datasets, but 0.5% is used separately as a prototype, and the training data size is as follows.
I want to know why loss and val_loss decrease during training, but acc and val_acc remain the same.
Train Data Set : 31471
my model's parameters
epochs=0
optimizer = tensorflow.keras.optimizers.SGD(lr=0.01)
loss_func = 'categorical_crossentropy'
hidden_1_neural = 128
hidden_2_neural = 64
hidden_1_dropout = 0.1
hidden_2_dropout = 0
activation = 'relu'
out_put_activation='softmax'
embedding_dim = 10
Training : optimizer=SGD
Epoch 1/100
1259/1259 [==============================] - 15s 12ms/step - loss: 8.6827 - accuracy: 0.1164 - val_loss: 8.3275 - val_accuracy: 0.1300
Epoch 2/100
1259/1259 [==============================] - 12s 10ms/step - loss: 8.3446 - accuracy: 0.1178 - val_loss: 8.1969 - val_accuracy: 0.1300
Epoch 3/100
1259/1259 [==============================] - 13s 10ms/step - loss: 8.2007 - accuracy: 0.1178 - val_loss: 8.0654 - val_accuracy: 0.1300
Epoch 4/100
1259/1259 [==============================] - 13s 10ms/step - loss: 8.0747 - accuracy: 0.1178 - val_loss: 7.9659 - val_accuracy: 0.1300
Epoch 5/100
1259/1259 [==============================] - 12s 10ms/step - loss: 7.9752 - accuracy: 0.1178 - val_loss: 7.8901 - val_accuracy: 0.1300
Epoch 6/100
1259/1259 [==============================] - 13s 10ms/step - loss: 7.8923 - accuracy: 0.1178 - val_loss: 7.8225 - val_accuracy: 0.1300
Epoch 7/100
1259/1259 [==============================] - 13s 11ms/step - loss: 7.8232 - accuracy: 0.1178 - val_loss: 7.7742 - val_accuracy: 0.1300
Epoch 8/100
1259/1259 [==============================] - 13s 10ms/step - loss: 7.7664 - accuracy: 0.1178 - val_loss: 7.7329 - val_accuracy: 0.1300
Epoch 9/100
1259/1259 [==============================] - 13s 10ms/step - loss: 7.7186 - accuracy: 0.1178 - val_loss: 7.7037 - val_accuracy: 0.1300
Epoch 10/100
1259/1259 [==============================] - 14s 11ms/step - loss: 7.6785 - accuracy: 0.1178 - val_loss: 7.6797 - val_accuracy: 0.1300
Epoch 11/100
1259/1259 [==============================] - 13s 10ms/step - loss: 7.6450 - accuracy: 0.1178 - val_loss: 7.6598 - val_accuracy: 0.1300
Epoch 12/100
1259/1259 [==============================] - 13s 10ms/step - loss: 7.6165 - accuracy: 0.1178 - val_loss: 7.6524 - val_accuracy: 0.1300
Epoch 13/100
1259/1259 [==============================] - 13s 10ms/step - loss: 7.5922 - accuracy: 0.1178 - val_loss: 7.6367 - val_accuracy: 0.1300
Epoch 14/100
1259/1259 [==============================] - 13s 10ms/step - loss: 7.5712 - accuracy: 0.1178 - val_loss: 7.6332 - val_accuracy: 0.1300
Epoch 15/100
1259/1259 [==============================] - 13s 10ms/step - loss: 7.5534 - accuracy: 0.1178 - val_loss: 7.6280 - val_accuracy: 0.1300
Epoch 16/100
1259/1259 [==============================] - 14s 11ms/step - loss: 7.5373 - accuracy: 0.1178 - val_loss: 7.6238 - val_accuracy: 0.1300
Epoch 17/100
1259/1259 [==============================] - 14s 11ms/step - loss: 7.5234 - accuracy: 0.1178 - val_loss: 7.6239 - val_accuracy: 0.1300
Epoch 18/100
1259/1259 [==============================] - 14s 11ms/step - loss: 7.5107 - accuracy: 0.1178 - val_loss: 7.6246 - val_accuracy: 0.1300
Epoch 19/100
1259/1259 [==============================] - 13s 10ms/step - loss: 7.4995 - accuracy: 0.1178 - val_loss: 7.6208 - val_accuracy: 0.1300
Epoch 20/100
1259/1259 [==============================] - 13s 10ms/step - loss: 7.4893 - accuracy: 0.1178 - val_loss: 7.6222 - val_accuracy: 0.1300
Epoch 21/100
1259/1259 [==============================] - 12s 10ms/step - loss: 7.4798 - accuracy: 0.1178 - val_loss: 7.6239 - val_accuracy: 0.1300
Epoch 22/100
1259/1259 [==============================] - 13s 10ms/step - loss: 7.4710 - accuracy: 0.1178 - val_loss: 7.6246 - val_accuracy: 0.1300
Epoch 23/100
1259/1259 [==============================] - 13s 10ms/step - loss: 7.4634 - accuracy: 0.1178 - val_loss: 7.6286 - val_accuracy: 0.1300
Epoch 24/100
1259/1259 [==============================] - 13s 10ms/step - loss: 7.4561 - accuracy: 0.1178 - val_loss: 7.6315 - val_accuracy: 0.1300
Epoch 25/100
1259/1259 [==============================] - 13s 10ms/step - loss: 7.4492 - accuracy: 0.1178 - val_loss: 7.6363 - val_accuracy: 0.1300
Epoch 26/100
1259/1259 [==============================] - 13s 11ms/step - loss: 7.4432 - accuracy: 0.1178 - val_loss: 7.6363 - val_accuracy: 0.1300
Epoch 27/100
1259/1259 [==============================] - 13s 10ms/step - loss: 7.4370 - accuracy: 0.1178 - val_loss: 7.6396 - val_accuracy: 0.1300
Epoch 28/100
1259/1259 [==============================] - 12s 10ms/step - loss: 7.4321 - accuracy: 0.1178 - val_loss: 7.6433 - val_accuracy: 0.1300
Epoch 29/100
1259/1259 [==============================] - 12s 10ms/step - loss: 7.4264 - accuracy: 0.1178 - val_loss: 7.6484 - val_accuracy: 0.1300
Epoch 30/100
1259/1259 [==============================] - 12s 10ms/step - loss: 7.4214 - accuracy: 0.1178 - val_loss: 7.6568 - val_accuracy: 0.1300
Epoch 31/100
1259/1259 [==============================] - 13s 10ms/step - loss: 7.4173 - accuracy: 0.1178 - val_loss: 7.6591 - val_accuracy: 0.1300
Epoch 32/100
1259/1259 [==============================] - 13s 10ms/step - loss: 7.4122 - accuracy: 0.1178 - val_loss: 7.6672 - val_accuracy: 0.1300
Epoch 33/100
1259/1259 [==============================] - 13s 10ms/step - loss: 7.4084 - accuracy: 0.1178 - val_loss: 7.6637 - val_accuracy: 0.1300
Epoch 34/100
1259/1259 [==============================] - 14s 11ms/step - loss: 7.4047 - accuracy: 0.1178 - val_loss: 7.6674 - val_accuracy: 0.1300
Epoch 35/100
1259/1259 [==============================] - 13s 11ms/step - loss: 7.4007 - accuracy: 0.1178 - val_loss: 7.6710 - val_accuracy: 0.1300
Epoch 36/100
1259/1259 [==============================] - 13s 10ms/step - loss: 7.3977 - accuracy: 0.1178 - val_loss: 7.6747 - val_accuracy: 0.1300
Epoch 37/100
1259/1259 [==============================] - 14s 11ms/step - loss: 7.3936 - accuracy: 0.1178 - val_loss: 7.6788 - val_accuracy: 0.1300
Epoch 38/100
1259/1259 [==============================] - 13s 11ms/step - loss: 7.3905 - accuracy: 0.1178 - val_loss: 7.6854 - val_accuracy: 0.1300
Epoch 39/100
1259/1259 [==============================] - 13s 10ms/step - loss: 7.3874 - accuracy: 0.1178 - val_loss: 7.6879 - val_accuracy: 0.1300
Epoch 40/100
1259/1259 [==============================] - 13s 11ms/step - loss: 7.3848 - accuracy: 0.1178 - val_loss: 7.6914 - val_accuracy: 0.1300
Epoch 41/100
1259/1259 [==============================] - 13s 11ms/step - loss: 7.3819 - accuracy: 0.1178 - val_loss: 7.6973 - val_accuracy: 0.1300
Epoch 42/100
1259/1259 [==============================] - 14s 11ms/step - loss: 7.3787 - accuracy: 0.1178 - val_loss: 7.6993 - val_accuracy: 0.1300
Epoch 43/100
1259/1259 [==============================] - 14s 11ms/step - loss: 7.3762 - accuracy: 0.1178 - val_loss: 7.7056 - val_accuracy: 0.1300
Epoch 44/100
1259/1259 [==============================] - 13s 10ms/step - loss: 7.3737 - accuracy: 0.1178 - val_loss: 7.7069 - val_accuracy: 0.1300
Epoch 45/100
1259/1259 [==============================] - 13s 10ms/step - loss: 7.3711 - accuracy: 0.1178 - val_loss: 7.7115 - val_accuracy: 0.1300
Epoch 46/100
1259/1259 [==============================] - 13s 10ms/step - loss: 7.3683 - accuracy: 0.1178 - val_loss: 7.7161 - val_accuracy: 0.1300
Epoch 47/100
1259/1259 [==============================] - 13s 11ms/step - loss: 7.3662 - accuracy: 0.1178 - val_loss: 7.7211 - val_accuracy: 0.1300
Epoch 48/100
1259/1259 [==============================] - 12s 10ms/step - loss: 7.3643 - accuracy: 0.1178 - val_loss: 7.7230 - val_accuracy: 0.1300
Epoch 49/100
1259/1259 [==============================] - 12s 10ms/step - loss: 7.3619 - accuracy: 0.1178 - val_loss: 7.7278 - val_accuracy: 0.1300
Epoch 50/100
1259/1259 [==============================] - 14s 11ms/step - loss: 7.3597 - accuracy: 0.1178 - val_loss: 7.7334 - val_accuracy: 0.1300
Epoch 51/100
1259/1259 [==============================] - 13s 11ms/step - loss: 7.3579 - accuracy: 0.1178 - val_loss: 7.7357 - val_accuracy: 0.1300
Epoch 52/100
1259/1259 [==============================] - 13s 11ms/step - loss: 7.3560 - accuracy: 0.1178 - val_loss: 7.7445 - val_accuracy: 0.1300
Epoch 53/100
1259/1259 [==============================] - 13s 10ms/step - loss: 7.3541 - accuracy: 0.1178 - val_loss: 7.7450 - val_accuracy: 0.1300
Epoch 54/100
1259/1259 [==============================] - 13s 11ms/step - loss: 7.3518 - accuracy: 0.1178 - val_loss: 7.7577 - val_accuracy: 0.1300
Epoch 55/100
1259/1259 [==============================] - 13s 11ms/step - loss: 7.3504 - accuracy: 0.1178 - val_loss: 7.7527 - val_accuracy: 0.1300
Epoch 56/100
1259/1259 [==============================] - 13s 10ms/step - loss: 7.3485 - accuracy: 0.1178 - val_loss: 7.7569 - val_accuracy: 0.1300
Epoch 57/100
1259/1259 [==============================] - 13s 11ms/step - loss: 7.3472 - accuracy: 0.1178 - val_loss: 7.7567 - val_accuracy: 0.1300
Epoch 58/100
1259/1259 [==============================] - 13s 10ms/step - loss: 7.3462 - accuracy: 0.1178 - val_loss: 7.7610 - val_accuracy: 0.1300
Epoch 59/100
1259/1259 [==============================] - 13s 10ms/step - loss: 7.3444 - accuracy: 0.1178 - val_loss: 7.7650 - val_accuracy: 0.1300
Epoch 60/100
1259/1259 [==============================] - 13s 10ms/step - loss: 7.3426 - accuracy: 0.1178 - val_loss: 7.7676 - val_accuracy: 0.1300
Epoch 61/100
1259/1259 [==============================] - 13s 10ms/step - loss: 7.3406 - accuracy: 0.1178 - val_loss: 7.7711 - val_accuracy: 0.1300
Epoch 62/100
1259/1259 [==============================] - 13s 10ms/step - loss: 7.3398 - accuracy: 0.1178 - val_loss: 7.7753 - val_accuracy: 0.1300
Epoch 63/100
1259/1259 [==============================] - 13s 10ms/step - loss: 7.3381 - accuracy: 0.1178 - val_loss: 7.7841 - val_accuracy: 0.1300
Epoch 64/100
1259/1259 [==============================] - 13s 10ms/step - loss: 7.3375 - accuracy: 0.1178 - val_loss: 7.7857 - val_accuracy: 0.1300
Epoch 65/100
1259/1259 [==============================] - 13s 10ms/step - loss: 7.3359 - accuracy: 0.1178 - val_loss: 7.7862 - val_accuracy: 0.1300
Epoch 66/100
1259/1259 [==============================] - 13s 10ms/step - loss: 7.3345 - accuracy: 0.1178 - val_loss: 7.7889 - val_accuracy: 0.1300
Epoch 67/100
1259/1259 [==============================] - 13s 10ms/step - loss: 7.3336 - accuracy: 0.1178 - val_loss: 7.7951 - val_accuracy: 0.1300
Epoch 68/100
1259/1259 [==============================] - 13s 10ms/step - loss: 7.3321 - accuracy: 0.1178 - val_loss: 7.7976 - val_accuracy: 0.1300
Epoch 69/100
1259/1259 [==============================] - 13s 11ms/step - loss: 7.3309 - accuracy: 0.1178 - val_loss: 7.7996 - val_accuracy: 0.1300
Epoch 70/100
1259/1259 [==============================] - 14s 11ms/step - loss: 7.3297 - accuracy: 0.1178 - val_loss: 7.8092 - val_accuracy: 0.1300
Epoch 71/100
1259/1259 [==============================] - 14s 11ms/step - loss: 7.3286 - accuracy: 0.1178 - val_loss: 7.8060 - val_accuracy: 0.1300
Epoch 72/100
1259/1259 [==============================] - 14s 11ms/step - loss: 7.3279 - accuracy: 0.1178 - val_loss: 7.8098 - val_accuracy: 0.1300
Epoch 73/100
1259/1259 [==============================] - 14s 11ms/step - loss: 7.3261 - accuracy: 0.1178 - val_loss: 7.8125 - val_accuracy: 0.1300
Epoch 74/100
1259/1259 [==============================] - 14s 11ms/step - loss: 7.3249 - accuracy: 0.1178 - val_loss: 7.8165 - val_accuracy: 0.1300
Epoch 75/100
1259/1259 [==============================] - 15s 12ms/step - loss: 7.3244 - accuracy: 0.1178 - val_loss: 7.8197 - val_accuracy: 0.1300
Epoch 76/100
1259/1259 [==============================] - 14s 11ms/step - loss: 7.3239 - accuracy: 0.1178 - val_loss: 7.8224 - val_accuracy: 0.1300
Epoch 77/100
1259/1259 [==============================] - 14s 11ms/step - loss: 7.3226 - accuracy: 0.1178 - val_loss: 7.8259 - val_accuracy: 0.1300
Epoch 78/100
1259/1259 [==============================] - 14s 11ms/step - loss: 7.3217 - accuracy: 0.1178 - val_loss: 7.8311 - val_accuracy: 0.1300
Epoch 79/100
1259/1259 [==============================] - 13s 11ms/step - loss: 7.3206 - accuracy: 0.1178 - val_loss: 7.8353 - val_accuracy: 0.1300
Epoch 80/100
1259/1259 [==============================] - 14s 11ms/step - loss: 7.3197 - accuracy: 0.1178 - val_loss: 7.8423 - val_accuracy: 0.1300
Epoch 81/100
1259/1259 [==============================] - 14s 11ms/step - loss: 7.3193 - accuracy: 0.1178 - val_loss: 7.8391 - val_accuracy: 0.1300
Epoch 82/100
1259/1259 [==============================] - 14s 11ms/step - loss: 7.3180 - accuracy: 0.1178 - val_loss: 7.8399 - val_accuracy: 0.1300
Epoch 83/100
1259/1259 [==============================] - 13s 11ms/step - loss: 7.3172 - accuracy: 0.1178 - val_loss: 7.8495 - val_accuracy: 0.1300
Epoch 84/100
1259/1259 [==============================] - 13s 10ms/step - loss: 7.3165 - accuracy: 0.1178 - val_loss: 7.8492 - val_accuracy: 0.1300
Epoch 85/100
1259/1259 [==============================] - 13s 11ms/step - loss: 7.3151 - accuracy: 0.1178 - val_loss: 7.8505 - val_accuracy: 0.1300
Epoch 86/100
1259/1259 [==============================] - 14s 11ms/step - loss: 7.3150 - accuracy: 0.1178 - val_loss: 7.8527 - val_accuracy: 0.1300
Epoch 87/100
1259/1259 [==============================] - 14s 11ms/step - loss: 7.3143 - accuracy: 0.1178 - val_loss: 7.8555 - val_accuracy: 0.1300
Epoch 88/100
1259/1259 [==============================] - 16s 13ms/step - loss: 7.3132 - accuracy: 0.1178 - val_loss: 7.8578 - val_accuracy: 0.1300
Epoch 89/100
1259/1259 [==============================] - 15s 12ms/step - loss: 7.3128 - accuracy: 0.1178 - val_loss: 7.8605 - val_accuracy: 0.1300
Epoch 90/100
1259/1259 [==============================] - 13s 10ms/step - loss: 7.3125 - accuracy: 0.1178 - val_loss: 7.8639 - val_accuracy: 0.1300
Epoch 91/100
1259/1259 [==============================] - 12s 10ms/step - loss: 7.3114 - accuracy: 0.1178 - val_loss: 7.8733 - val_accuracy: 0.1300
Epoch 92/100
1259/1259 [==============================] - 12s 10ms/step - loss: 7.3108 - accuracy: 0.1178 - val_loss: 7.8717 - val_accuracy: 0.1300
Epoch 93/100
1259/1259 [==============================] - 12s 10ms/step - loss: 7.3097 - accuracy: 0.1178 - val_loss: 7.8742 - val_accuracy: 0.1300
Epoch 94/100
1259/1259 [==============================] - 12s 10ms/step - loss: 7.3095 - accuracy: 0.1178 - val_loss: 7.8750 - val_accuracy: 0.1300
Epoch 95/100
1259/1259 [==============================] - 12s 10ms/step - loss: 7.3086 - accuracy: 0.1178 - val_loss: 7.8805 - val_accuracy: 0.1300
Epoch 96/100
1259/1259 [==============================] - 12s 10ms/step - loss: 7.3083 - accuracy: 0.1178 - val_loss: 7.8804 - val_accuracy: 0.1300
Epoch 97/100
1259/1259 [==============================] - 12s 10ms/step - loss: 7.3077 - accuracy: 0.1178 - val_loss: 7.8858 - val_accuracy: 0.1300
Epoch 98/100
1259/1259 [==============================] - 14s 11ms/step - loss: 7.3070 - accuracy: 0.1178 - val_loss: 7.8868 - val_accuracy: 0.1300
Epoch 99/100
1259/1259 [==============================] - 13s 10ms/step - loss: 7.3062 - accuracy: 0.1178 - val_loss: 7.8913 - val_accuracy: 0.1300
Epoch 100/100
1259/1259 [==============================] - 13s 10ms/step - loss: 7.3059 - accuracy: 0.1178 - val_loss: 7.8924 - val_accuracy: 0.1300
If I use adam as the optimizer, loss decreases and acc increases, but val_loss and val_acc increase.
#1 add context
model summary :
Model: "functional_23"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
input_12 (InputLayer) [(None, 2)] 0
_________________________________________________________________
embedding_11 (Embedding) (None, 2, 10) 104270
_________________________________________________________________
lstm_22 (LSTM) (None, 2, 128) 71168
_________________________________________________________________
lstm_23 (LSTM) (None, 64) 49408
_________________________________________________________________
dense_11 (Dense) (None, 10427) 677755
=================================================================
Total params: 902,601
Trainable params: 902,601
Non-trainable params: 0
_________________________________________________________________
training : optimizer = Adam
Epoch 1/100
1259/1259 [==============================] - 11s 9ms/step - loss: 8.0481 - accuracy: 0.1177 - val_loss: 7.6862 - val_accuracy: 0.1300
Epoch 2/100
1259/1259 [==============================] - 10s 8ms/step - loss: 7.2587 - accuracy: 0.1178 - val_loss: 8.0457 - val_accuracy: 0.1300
Epoch 3/100
1259/1259 [==============================] - 12s 9ms/step - loss: 7.0693 - accuracy: 0.1178 - val_loss: 8.3413 - val_accuracy: 0.1300
Epoch 4/100
1259/1259 [==============================] - 10s 8ms/step - loss: 6.9767 - accuracy: 0.1178 - val_loss: 8.4930 - val_accuracy: 0.1300
Epoch 5/100
1259/1259 [==============================] - 10s 8ms/step - loss: 6.8866 - accuracy: 0.1178 - val_loss: 9.0810 - val_accuracy: 0.1300
Epoch 6/100
1259/1259 [==============================] - 10s 8ms/step - loss: 6.7718 - accuracy: 0.1178 - val_loss: 9.5166 - val_accuracy: 0.1303
Epoch 7/100
1259/1259 [==============================] - 12s 9ms/step - loss: 6.6101 - accuracy: 0.1204 - val_loss: 10.2690 - val_accuracy: 0.1385
Epoch 8/100
1259/1259 [==============================] - 11s 9ms/step - loss: 6.4294 - accuracy: 0.1291 - val_loss: 10.5882 - val_accuracy: 0.1405
Epoch 9/100
1259/1259 [==============================] - 11s 9ms/step - loss: 6.2603 - accuracy: 0.1316 - val_loss: 10.7328 - val_accuracy: 0.1395
Epoch 10/100
1259/1259 [==============================] - 12s 9ms/step - loss: 6.1231 - accuracy: 0.1351 - val_loss: 11.0442 - val_accuracy: 0.1405
Epoch 11/100
1259/1259 [==============================] - 11s 9ms/step - loss: 6.0100 - accuracy: 0.1366 - val_loss: 11.2861 - val_accuracy: 0.1401
Epoch 12/100
1259/1259 [==============================] - 11s 9ms/step - loss: 5.8962 - accuracy: 0.1378 - val_loss: 11.4858 - val_accuracy: 0.1366
Epoch 13/100
1259/1259 [==============================] - 12s 9ms/step - loss: 5.7899 - accuracy: 0.1389 - val_loss: 11.5724 - val_accuracy: 0.1379
Epoch 14/100
1259/1259 [==============================] - 11s 9ms/step - loss: 5.6857 - accuracy: 0.1397 - val_loss: 12.1945 - val_accuracy: 0.1392
Epoch 15/100
1259/1259 [==============================] - 12s 9ms/step - loss: 5.5770 - accuracy: 0.1416 - val_loss: 12.4677 - val_accuracy: 0.1389
Epoch 16/100
1259/1259 [==============================] - 11s 9ms/step - loss: 5.4650 - accuracy: 0.1436 - val_loss: 13.1879 - val_accuracy: 0.1398
Epoch 17/100
1259/1259 [==============================] - 12s 10ms/step - loss: 5.3608 - accuracy: 0.1448 - val_loss: 13.3614 - val_accuracy: 0.1392
Epoch 18/100
1259/1259 [==============================] - 11s 9ms/step - loss: 5.2428 - accuracy: 0.1468 - val_loss: 13.8756 - val_accuracy: 0.1373
Epoch 19/100
1259/1259 [==============================] - 11s 9ms/step - loss: 5.1173 - accuracy: 0.1506 - val_loss: 14.5616 - val_accuracy: 0.1344
Epoch 20/100
1259/1259 [==============================] - 10s 8ms/step - loss: 4.9850 - accuracy: 0.1519 - val_loss: 15.1821 - val_accuracy: 0.1322
Epoch 21/100
1259/1259 [==============================] - 11s 9ms/step - loss: 4.8699 - accuracy: 0.1563 - val_loss: 15.8595 - val_accuracy: 0.1246
Epoch 22/100
1259/1259 [==============================] - 10s 8ms/step - loss: 4.7625 - accuracy: 0.1609 - val_loss: 16.9606 - val_accuracy: 0.1274
Epoch 23/100
1259/1259 [==============================] - 11s 9ms/step - loss: 4.6529 - accuracy: 0.1648 - val_loss: 17.2735 - val_accuracy: 0.1255
Epoch 24/100
1259/1259 [==============================] - 11s 8ms/step - loss: 4.5586 - accuracy: 0.1665 - val_loss: 17.6336 - val_accuracy: 0.1268
Epoch 25/100
1259/1259 [==============================] - 11s 8ms/step - loss: 4.4696 - accuracy: 0.1719 - val_loss: 18.8503 - val_accuracy: 0.1239
Epoch 26/100
1259/1259 [==============================] - 11s 9ms/step - loss: 4.3908 - accuracy: 0.1768 - val_loss: 18.8996 - val_accuracy: 0.1271
Epoch 27/100
1259/1259 [==============================] - 15s 12ms/step - loss: 4.3114 - accuracy: 0.1809 - val_loss: 20.1614 - val_accuracy: 0.1271
Epoch 28/100
1259/1259 [==============================] - 11s 9ms/step - loss: 4.2313 - accuracy: 0.1856 - val_loss: 19.8104 - val_accuracy: 0.1239
Epoch 29/100
1259/1259 [==============================] - 15s 12ms/step - loss: 4.1639 - accuracy: 0.1898 - val_loss: 21.2305 - val_accuracy: 0.1268
Epoch 30/100
1259/1259 [==============================] - 11s 9ms/step - loss: 4.0977 - accuracy: 0.1964 - val_loss: 22.0776 - val_accuracy: 0.1290
Epoch 31/100
1259/1259 [==============================] - 12s 9ms/step - loss: 4.0339 - accuracy: 0.2020 - val_loss: 22.2132 - val_accuracy: 0.1284
Epoch 32/100
1259/1259 [==============================] - 11s 9ms/step - loss: 3.9690 - accuracy: 0.2041 - val_loss: 22.7188 - val_accuracy: 0.1303
Epoch 33/100
1259/1259 [==============================] - 11s 9ms/step - loss: 3.9047 - accuracy: 0.2060 - val_loss: 23.6534 - val_accuracy: 0.1277
Epoch 34/100
1259/1259 [==============================] - 12s 10ms/step - loss: 3.8326 - accuracy: 0.2119 - val_loss: 24.6426 - val_accuracy: 0.1255
Epoch 35/100
1259/1259 [==============================] - 11s 9ms/step - loss: 3.7886 - accuracy: 0.2203 - val_loss: 23.4429 - val_accuracy: 0.1214
Epoch 36/100
1259/1259 [==============================] - 11s 9ms/step - loss: 3.7441 - accuracy: 0.2277 - val_loss: 23.9890 - val_accuracy: 0.1246
Epoch 37/100
1259/1259 [==============================] - 11s 9ms/step - loss: 3.6865 - accuracy: 0.2305 - val_loss: 25.8336 - val_accuracy: 0.1262
Epoch 38/100
1259/1259 [==============================] - 11s 9ms/step - loss: 3.6346 - accuracy: 0.2368 - val_loss: 26.5063 - val_accuracy: 0.1195
Epoch 39/100
1259/1259 [==============================] - 11s 9ms/step - loss: 3.5873 - accuracy: 0.2434 - val_loss: 26.5917 - val_accuracy: 0.1249
Epoch 40/100
1259/1259 [==============================] - 11s 9ms/step - loss: 3.5522 - accuracy: 0.2452 - val_loss: 26.5287 - val_accuracy: 0.1214
Epoch 41/100
1259/1259 [==============================] - 11s 9ms/step - loss: 3.4908 - accuracy: 0.2509 - val_loss: 27.0090 - val_accuracy: 0.1255
Epoch 42/100
1259/1259 [==============================] - 11s 9ms/step - loss: 3.4511 - accuracy: 0.2560 - val_loss: 27.7853 - val_accuracy: 0.1201
Epoch 43/100
1259/1259 [==============================] - 11s 9ms/step - loss: 3.4017 - accuracy: 0.2629 - val_loss: 27.8698 - val_accuracy: 0.1169
Epoch 44/100
1259/1259 [==============================] - 11s 9ms/step - loss: 3.3732 - accuracy: 0.2718 - val_loss: 28.2814 - val_accuracy: 0.1230
Epoch 45/100
1259/1259 [==============================] - 11s 9ms/step - loss: 3.3030 - accuracy: 0.2763 - val_loss: 29.2292 - val_accuracy: 0.1227
Epoch 46/100
1259/1259 [==============================] - 11s 9ms/step - loss: 3.2584 - accuracy: 0.2841 - val_loss: 28.8271 - val_accuracy: 0.1211
Epoch 47/100
1259/1259 [==============================] - 11s 9ms/step - loss: 3.2145 - accuracy: 0.2907 - val_loss: 30.1880 - val_accuracy: 0.1220
Epoch 48/100
1259/1259 [==============================] - 11s 9ms/step - loss: 3.1666 - accuracy: 0.3000 - val_loss: 29.0877 - val_accuracy: 0.1150
Epoch 49/100
1259/1259 [==============================] - 12s 9ms/step - loss: 3.1291 - accuracy: 0.3031 - val_loss: 30.4579 - val_accuracy: 0.1265
Epoch 50/100
1259/1259 [==============================] - 11s 9ms/step - loss: 3.0989 - accuracy: 0.3113 - val_loss: 30.1047 - val_accuracy: 0.1109
Epoch 51/100
1259/1259 [==============================] - 11s 9ms/step - loss: 3.0430 - accuracy: 0.3180 - val_loss: 30.4653 - val_accuracy: 0.1207
Epoch 52/100
1259/1259 [==============================] - 12s 9ms/step - loss: 3.0016 - accuracy: 0.3242 - val_loss: 29.9269 - val_accuracy: 0.1207
Epoch 53/100
1259/1259 [==============================] - 12s 9ms/step - loss: 2.9472 - accuracy: 0.3358 - val_loss: 30.7540 - val_accuracy: 0.1115
Epoch 54/100
1259/1259 [==============================] - 12s 9ms/step - loss: 2.9289 - accuracy: 0.3397 - val_loss: 31.4299 - val_accuracy: 0.1147
Epoch 55/100
1259/1259 [==============================] - 12s 9ms/step - loss: 2.8597 - accuracy: 0.3513 - val_loss: 31.6839 - val_accuracy: 0.1195
Epoch 56/100
1259/1259 [==============================] - 12s 9ms/step - loss: 2.8454 - accuracy: 0.3586 - val_loss: 32.0642 - val_accuracy: 0.1192
Epoch 57/100
1259/1259 [==============================] - 11s 9ms/step - loss: 2.8153 - accuracy: 0.3668 - val_loss: 32.8230 - val_accuracy: 0.1099
Epoch 58/100
1259/1259 [==============================] - 11s 9ms/step - loss: 2.7687 - accuracy: 0.3722 - val_loss: 33.0815 - val_accuracy: 0.1052
Epoch 59/100
1259/1259 [==============================] - 12s 10ms/step - loss: 2.7297 - accuracy: 0.3837 - val_loss: 32.4366 - val_accuracy: 0.1071
Epoch 60/100
1259/1259 [==============================] - 12s 9ms/step - loss: 2.7077 - accuracy: 0.3884 - val_loss: 32.3653 - val_accuracy: 0.1182
Epoch 61/100
1259/1259 [==============================] - 12s 9ms/step - loss: 2.6574 - accuracy: 0.3970 - val_loss: 32.7342 - val_accuracy: 0.1153
Epoch 62/100
1259/1259 [==============================] - 11s 9ms/step - loss: 2.6173 - accuracy: 0.4048 - val_loss: 33.3435 - val_accuracy: 0.1106
Epoch 63/100
1259/1259 [==============================] - 11s 9ms/step - loss: 2.6145 - accuracy: 0.4094 - val_loss: 32.7989 - val_accuracy: 0.1119
Epoch 64/100
1259/1259 [==============================] - 12s 9ms/step - loss: 2.5724 - accuracy: 0.4115 - val_loss: 32.9530 - val_accuracy: 0.1080
Epoch 65/100
1259/1259 [==============================] - 11s 9ms/step - loss: 2.5247 - accuracy: 0.4273 - val_loss: 33.1921 - val_accuracy: 0.1020
Epoch 66/100
1259/1259 [==============================] - 11s 9ms/step - loss: 2.4935 - accuracy: 0.4287 - val_loss: 33.1907 - val_accuracy: 0.1131
Epoch 67/100
1259/1259 [==============================] - 12s 9ms/step - loss: 2.4738 - accuracy: 0.4344 - val_loss: 33.8599 - val_accuracy: 0.1099
Epoch 68/100
1259/1259 [==============================] - 12s 9ms/step - loss: 2.4751 - accuracy: 0.4383 - val_loss: 34.0607 - val_accuracy: 0.1065
Epoch 69/100
1259/1259 [==============================] - 12s 9ms/step - loss: 2.4106 - accuracy: 0.4451 - val_loss: 33.5866 - val_accuracy: 0.1144
Epoch 70/100
1259/1259 [==============================] - 12s 9ms/step - loss: 2.3821 - accuracy: 0.4553 - val_loss: 33.7491 - val_accuracy: 0.1163
Epoch 71/100
1259/1259 [==============================] - 12s 9ms/step - loss: 2.3647 - accuracy: 0.4584 - val_loss: 34.5417 - val_accuracy: 0.1084
Epoch 72/100
1259/1259 [==============================] - 12s 9ms/step - loss: 2.3422 - accuracy: 0.4631 - val_loss: 34.1619 - val_accuracy: 0.1109
Epoch 73/100
1259/1259 [==============================] - 12s 9ms/step - loss: 2.3076 - accuracy: 0.4702 - val_loss: 34.0050 - val_accuracy: 0.1084
Epoch 74/100
1259/1259 [==============================] - 12s 9ms/step - loss: 2.3071 - accuracy: 0.4740 - val_loss: 34.2133 - val_accuracy: 0.1147
Epoch 75/100
1259/1259 [==============================] - 12s 9ms/step - loss: 2.2503 - accuracy: 0.4759 - val_loss: 33.9111 - val_accuracy: 0.1058
Epoch 76/100
1259/1259 [==============================] - 12s 9ms/step - loss: 2.2167 - accuracy: 0.4925 - val_loss: 35.0675 - val_accuracy: 0.1125
Epoch 77/100
1259/1259 [==============================] - 12s 10ms/step - loss: 2.2121 - accuracy: 0.4908 - val_loss: 35.0796 - val_accuracy: 0.1071
Epoch 78/100
1259/1259 [==============================] - 11s 9ms/step - loss: 2.1943 - accuracy: 0.4936 - val_loss: 34.2224 - val_accuracy: 0.1084
Epoch 79/100
1259/1259 [==============================] - 13s 11ms/step - loss: 2.1579 - accuracy: 0.5009 - val_loss: 34.5191 - val_accuracy: 0.1077
Epoch 80/100
1259/1259 [==============================] - 13s 10ms/step - loss: 2.1489 - accuracy: 0.5049 - val_loss: 35.8632 - val_accuracy: 0.1090
Epoch 81/100
1259/1259 [==============================] - 12s 10ms/step - loss: 2.1266 - accuracy: 0.5052 - val_loss: 34.8432 - val_accuracy: 0.1074
Epoch 82/100
1259/1259 [==============================] - 12s 9ms/step - loss: 2.0830 - accuracy: 0.5130 - val_loss: 35.7247 - val_accuracy: 0.1033
Epoch 83/100
1259/1259 [==============================] - 13s 11ms/step - loss: 2.0682 - accuracy: 0.5209 - val_loss: 35.3208 - val_accuracy: 0.1065
Epoch 84/100
1259/1259 [==============================] - 12s 9ms/step - loss: 2.0702 - accuracy: 0.5256 - val_loss: 35.3447 - val_accuracy: 0.1061
Epoch 85/100
1259/1259 [==============================] - 11s 9ms/step - loss: 2.0445 - accuracy: 0.5174 - val_loss: 34.5911 - val_accuracy: 0.1077

How to know my neural network model's accuracy?

I have trained my neural network model. I want to know my model's accuracy from this training epoch. Do I have to get the average or just the last one?
here's my output
25/25 - 12s - loss: 1.3415 - accuracy: 0.3800 - val_loss: 1.0626 - val_accuracy: 0.5000
Epoch 2/20
25/25 - 12s - loss: 1.0254 - accuracy: 0.5000 - val_loss: 1.1129 - val_accuracy: 0.4000
Epoch 3/20
25/25 - 12s - loss: 0.9160 - accuracy: 0.6500 - val_loss: 0.8640 - val_accuracy: 0.7000
Epoch 4/20
25/25 - 12s - loss: 0.8237 - accuracy: 0.6300 - val_loss: 0.8494 - val_accuracy: 0.6000
Epoch 5/20
25/25 - 11s - loss: 0.7411 - accuracy: 0.7320 - val_loss: 0.7320 - val_accuracy: 0.8000
Epoch 6/20
25/25 - 12s - loss: 0.7625 - accuracy: 0.6600 - val_loss: 1.0259 - val_accuracy: 0.6000
Epoch 7/20
25/25 - 12s - loss: 0.8317 - accuracy: 0.6800 - val_loss: 0.5907 - val_accuracy: 0.7500
Epoch 8/20
25/25 - 12s - loss: 0.5557 - accuracy: 0.8100 - val_loss: 0.4630 - val_accuracy: 0.9000
Epoch 9/20
25/25 - 11s - loss: 0.6640 - accuracy: 0.7629 - val_loss: 0.3308 - val_accuracy: 0.9500
Epoch 10/20
25/25 - 12s - loss: 0.5674 - accuracy: 0.8200 - val_loss: 0.5039 - val_accuracy: 0.8000
Epoch 11/20
25/25 - 12s - loss: 0.5566 - accuracy: 0.8200 - val_loss: 0.2161 - val_accuracy: 0.9500
Epoch 12/20
25/25 - 16s - loss: 0.5190 - accuracy: 0.8400 - val_loss: 0.3210 - val_accuracy: 0.8500
Epoch 13/20
25/25 - 12s - loss: 0.5437 - accuracy: 0.7800 - val_loss: 0.7253 - val_accuracy: 0.6500
Epoch 14/20
25/25 - 12s - loss: 0.5035 - accuracy: 0.8300 - val_loss: 0.4291 - val_accuracy: 0.8500
Epoch 15/20
25/25 - 11s - loss: 0.4276 - accuracy: 0.8600 - val_loss: 0.2902 - val_accuracy: 0.8500
Epoch 16/20
25/25 - 11s - loss: 0.4913 - accuracy: 0.8000 - val_loss: 0.3027 - val_accuracy: 0.9000
Epoch 17/20
25/25 - 11s - loss: 0.2931 - accuracy: 0.9100 - val_loss: 0.2718 - val_accuracy: 0.9000
Epoch 18/20
25/25 - 11s - loss: 0.4554 - accuracy: 0.8500 - val_loss: 0.4412 - val_accuracy: 0.8000
Epoch 19/20
25/25 - 11s - loss: 0.3803 - accuracy: 0.8400 - val_loss: 0.2479 - val_accuracy: 1.0000
Epoch 20/20
25/25 - 12s - loss: 0.2692 - accuracy: 0.9200 - val_loss: 0.1805 - val_accuracy: 1.0000
<tensorflow.python.keras.callbacks.History at 0x7f64eec7ada0>```
Assuming you train your model like this:
history = model.fit(...)
you can access accuracy through history.history['acc']. Other useful metrics:
loss - loss
val_acc - validation accuracy
val_loss - validation loss
Last two are present only if you have validation set.

Loss doesn't decrease on Google App Engine, but it does on Jupyter Notebook

I am running the same lines of code w/ the same source files on both Google App Engine and Jupyter notebook:
model = load_model("test.h5")
model.compile(optimizer=Adam(lr=1e-2, decay=0), loss="binary_crossentropy", metrics=['accuracy'])
with open("data.json", 'r') as f:
data = json.load(f)
X = data[0]
y = data[1]
history = model.fit(X, y, validation_split=0, epochs=50, batch_size=10)
The output of GAE is as follows:
Epoch 1/50
2/2 [==============================] - 1s 316ms/step - loss: 8.0590 - acc: 0.5000
Epoch 2/50
2/2 [==============================] - 0s 50ms/step - loss: 8.0590 - acc: 0.5000
Epoch 3/50
2/2 [==============================] - 0s 40ms/step - loss: 8.0590 - acc: 0.5000
Epoch 4/50
2/2 [==============================] - 0s 37ms/step - loss: 8.0590 - acc: 0.5000
Epoch 5/50
2/2 [==============================] - 0s 34ms/step - loss: 8.0590 - acc: 0.5000
Epoch 6/50
2/2 [==============================] - 0s 40ms/step - loss: 8.0590 - acc: 0.5000
Epoch 7/50
2/2 [==============================] - 0s 44ms/step - loss: 8.0590 - acc: 0.5000
Epoch 8/50
2/2 [==============================] - 0s 40ms/step - loss: 8.0590 - acc: 0.5000
Epoch 9/50
2/2 [==============================] - 0s 31ms/step - loss: 8.0590 - acc: 0.5000
Epoch 10/50
2/2 [==============================] - 0s 40ms/step - loss: 8.0590 - acc: 0.5000
...
Epoch 50/50
2/2 [==============================] - 0s 45ms/step - loss: 8.0590 - acc: 0.5000
Whereas Jupyter Notebook is:
Epoch 1/50
2/2 [==============================] - 0s 164ms/step - loss: 952036.8125 - accuracy: 0.5000
Epoch 2/50
2/2 [==============================] - 0s 39ms/step - loss: 393826.0000 - accuracy: 0.5000
Epoch 3/50
2/2 [==============================] - 0s 38ms/step - loss: 99708.9375 - accuracy: 0.5000
Epoch 4/50
2/2 [==============================] - 0s 39ms/step - loss: 8989.7822 - accuracy: 0.5000
Epoch 5/50
2/2 [==============================] - 0s 39ms/step - loss: 8760.8223 - accuracy: 0.5000
Epoch 6/50
2/2 [==============================] - 0s 40ms/step - loss: 3034.8613 - accuracy: 0.5000
Epoch 7/50
2/2 [==============================] - 0s 40ms/step - loss: 167.2695 - accuracy: 0.0000e+00
Epoch 8/50
2/2 [==============================] - 0s 39ms/step - loss: 0.6670 - accuracy: 1.0000
Epoch 9/50
2/2 [==============================] - 0s 41ms/step - loss: 0.6619 - accuracy: 1.0000
Epoch 10/50
2/2 [==============================] - 0s 40ms/step - loss: 0.6551 - accuracy: 1.0000
...
Epoch 50/50
2/2 [==============================] - 0s 42ms/step - loss: 0.3493 - accuracy: 1.0000
Why might this be the case? I'm pretty lost at this point. Both machines have keras==2.2.4 and tensorflow==1.14.0 installed.

How to use weights from a specific epoc in Keras?

I am running 50 epocs for a Neural Network using Keras.
Here's my verbose response to it.
You can see that Epoc 47 (loss: 0.0065 - acc: 0.9980) has a much higher Accuracy and lesser cost that the last epoc and thus more suitable.
I am new to Keras, and would like to know if it is possible to use the instance of the model model in a particular epoc (in this case, epoc 47) instead of the final one?
EDIT: I do not want to run the training again with epoc = 47, that seems like a waste of time and resource.
Epoch 1/50
- 32s - loss: 0.4603 - acc: 0.8541
Epoch 2/50
- 31s - loss: 0.1140 - acc: 0.9655
Epoch 3/50
- 31s - loss: 0.0805 - acc: 0.9754
Epoch 4/50
- 38s - loss: 0.0663 - acc: 0.9792
Epoch 5/50
- 47s - loss: 0.0551 - acc: 0.9829
Epoch 6/50
- 39s - loss: 0.0487 - acc: 0.9846
Epoch 7/50
- 38s - loss: 0.0454 - acc: 0.9853
Epoch 8/50
- 37s - loss: 0.0399 - acc: 0.9873
Epoch 9/50
- 42s - loss: 0.0376 - acc: 0.9881
Epoch 10/50
- 42s - loss: 0.0332 - acc: 0.9896
Epoch 11/50
- 41s - loss: 0.0333 - acc: 0.9893
Epoch 12/50
- 39s - loss: 0.0286 - acc: 0.9911
Epoch 13/50
- 36s - loss: 0.0281 - acc: 0.9905
Epoch 14/50
- 35s - loss: 0.0258 - acc: 0.9918
Epoch 15/50
- 37s - loss: 0.0250 - acc: 0.9915
Epoch 16/50
- 35s - loss: 0.0236 - acc: 0.9920
Epoch 17/50
- 41s - loss: 0.0212 - acc: 0.9932
Epoch 18/50
- 33s - loss: 0.0219 - acc: 0.9928
Epoch 19/50
- 36s - loss: 0.0198 - acc: 0.9935
Epoch 20/50
- 37s - loss: 0.0172 - acc: 0.9941
Epoch 21/50
- 35s - loss: 0.0187 - acc: 0.9938
Epoch 22/50
- 38s - loss: 0.0182 - acc: 0.9939
Epoch 23/50
- 33s - loss: 0.0163 - acc: 0.9945
Epoch 24/50
- 35s - loss: 0.0148 - acc: 0.9949
Epoch 25/50
- 33s - loss: 0.0148 - acc: 0.9951
Epoch 26/50
- 37s - loss: 0.0143 - acc: 0.9951
Epoch 27/50
- 36s - loss: 0.0143 - acc: 0.9949
Epoch 28/50
- 34s - loss: 0.0129 - acc: 0.9958
Epoch 29/50
- 36s - loss: 0.0112 - acc: 0.9962
Epoch 30/50
- 34s - loss: 0.0112 - acc: 0.9961
Epoch 31/50
- 34s - loss: 0.0144 - acc: 0.9954
Epoch 32/50
- 40s - loss: 0.0132 - acc: 0.9952
Epoch 33/50
- 40s - loss: 0.0107 - acc: 0.9964
Epoch 34/50
- 43s - loss: 0.0118 - acc: 0.9958
Epoch 35/50
- 36s - loss: 0.0113 - acc: 0.9961
Epoch 36/50
- 34s - loss: 0.0101 - acc: 0.9963
Epoch 37/50
- 37s - loss: 0.0102 - acc: 0.9966
Epoch 38/50
- 37s - loss: 0.0098 - acc: 0.9965
Epoch 39/50
- 35s - loss: 0.0097 - acc: 0.9966
Epoch 40/50
- 35s - loss: 0.0102 - acc: 0.9963
Epoch 41/50
- 34s - loss: 0.0081 - acc: 0.9972
Epoch 42/50
- 36s - loss: 0.0075 - acc: 0.9976
Epoch 43/50
- 32s - loss: 0.0075 - acc: 0.9975
Epoch 44/50
- 32s - loss: 0.0088 - acc: 0.9971
Epoch 45/50
- 31s - loss: 0.0107 - acc: 0.9968
Epoch 46/50
- 32s - loss: 0.0089 - acc: 0.9970
Epoch 47/50
- 33s - loss: 0.0065 - acc: 0.9980
Epoch 48/50
- 30s - loss: 0.0076 - acc: 0.9975
Epoch 49/50
- 30s - loss: 0.0073 - acc: 0.9978
Epoch 50/50
- 30s - loss: 0.0090 - acc: 0.9971

Categories