python - Convolutional neural net testing accuracy stays constant after each epoch -
i see improving training accuracies after each iteration, testing accuracy stays fixed @ 0.7545 after each epoch. understand hitting ceiling on accuracy @ point don't understand why don't @ least see slight variations in accuracies (up or down). i'm training on 800 images total.
things i've tried: - switch sgd optimizer. - start learning rate of 0.01 , reduce until 0.00000001. - remove regularization layers.
#params dropout_prob = 0.2 activation_function = 'relu' loss_function = 'categorical_crossentropy' verbose_level = 1 convolutional_batches = 32 convolutional_epochs = 10 inp_shape = x_train.shape[1:] num_classes = 3 opt = sgd(lr=0.00001) opt2 = 'adam' def train_convolutional_neural(): y_train_cat = np_utils.to_categorical(y_train, 3) y_test_cat = np_utils.to_categorical(y_test, 3) model = sequential() model.add(conv2d(filters=16, kernel_size=(3, 3), input_shape=inp_shape)) model.add(conv2d(filters=32, kernel_size=(3, 3))) model.add(maxpooling2d(pool_size = (2,2))) model.add(dropout(rate=dropout_prob)) model.add(flatten()) #model.add(dense(64,activation=activation_function)) model.add(dropout(rate=dropout_prob)) model.add(dense(32,activation=activation_function)) model.add(dense(num_classes,activation='softmax')) model.summary() model.compile(loss=loss_function, optimizer=opt, metrics=['accuracy']) history = model.fit(x_train, y_train_cat, batch_size=convolutional_batches, epochs = convolutional_epochs, verbose = verbose_level, validation_data=(x_test, y_test_cat)) model.save('./models/convolutional_model.h5')
Comments
Post a Comment