From c336615d82fa18e28e5b9d7e66941c6536a648e6 Mon Sep 17 00:00:00 2001 From: Antonello Lobianco Date: Wed, 6 Sep 2023 11:59:38 +0200 Subject: [PATCH] NN bugfixes: multiple training of NeuralNetworkEstimator(), correct number of info(test)["loss_per_epoch"] 1. When `NeuralNetworkEstimator()` (i.e. with no layers specified) is trained multiple times the default layer network were not saved in the hyperparameters so the second time was rising an error 2. `info(test)["loss_per_epoch"]` and `info(test)["pars_per_epoch"]` by reporting the values pre-first fitting where nepochs+1 length. This was a problem in particular for multiple training of 1 epochs --- src/Nn/Nn.jl | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/src/Nn/Nn.jl b/src/Nn/Nn.jl index ba3d0338..56d6fc9a 100644 --- a/src/Nn/Nn.jl +++ b/src/Nn/Nn.jl @@ -653,10 +653,10 @@ function train!(nn::NN,x,y; epochs=100, batch_size=min(size(x,1),32), sequential @showprogress showTime "Training the Neural Network..." for t in 1:epochs batches = batch(n,batch_size,sequential=sequential,rng=rng) n_batches = length(batches) - if t == 1 - if (verbosity >= STD) push!(ϵ_epochs,ϵ_epoch); end - if (verbosity > STD) push!(θ_epochs,θ_epoch); end - end + #if t == 1 # removed otherwise the array of losses/pars would be nepochs+1 + # if (verbosity >= STD) push!(ϵ_epochs,ϵ_epoch); end + # if (verbosity > STD) push!(θ_epochs,θ_epoch); end + #end for (i,batch) in enumerate(batches) xbatch = x[batch, :] ybatch = y[batch, :] @@ -1038,6 +1038,7 @@ function fit!(m::NeuralNetworkEstimator,X,Y) l4 = VectorFunctionLayer(nDy,f=softmax) layers = [l1,l2,l3,l4] end + m.hpar.layers = layers end # Check that the first layer has the dimensions of X and the last layer has the output dimensions of Y nn_isize_tuple = size(layers[1])[1]