diff --git a/src/cnn/backpropagation.c b/src/cnn/backpropagation.c index 5950891..339482d 100644 --- a/src/cnn/backpropagation.c +++ b/src/cnn/backpropagation.c @@ -87,7 +87,7 @@ void backward_linearisation(Kernel_nn* ker, float*** input, float*** input_z, fl for (int k=0; k < dim_input; k++) { for (int l=0; l < dim_input; l++) { for (int j=0; j < size_output; j++) { - ker->d_weights[cpt][j] += input[i][k][l]*output[j]/(depth_input*dim_input*dim_input*size_output); + ker->d_weights[cpt][j] += input[i][k][l]*output[j]/nb_elem; } cpt++; } diff --git a/src/cnn/utils.c b/src/cnn/utils.c index 30c3410..bf79217 100644 --- a/src/cnn/utils.c +++ b/src/cnn/utils.c @@ -124,12 +124,14 @@ Network* copy_network(Network* network) { for (int i=0; i < size-1; i++) { network_cp->kernel[i] = (Kernel*)nalloc(sizeof(Kernel)); if (!network->kernel[i]->nn && !network->kernel[i]->cnn) { // Cas de la couche de linéarisation + copyVar(kernel[i]->pooling); copyVar(kernel[i]->activation); copyVar(kernel[i]->linearisation); // 1 network_cp->kernel[i]->cnn = NULL; network_cp->kernel[i]->nn = NULL; } else if (!network->kernel[i]->cnn) { // Cas du NN + copyVar(kernel[i]->pooling); copyVar(kernel[i]->activation); copyVar(kernel[i]->linearisation); // 0 @@ -161,6 +163,7 @@ Network* copy_network(Network* network) { } } else { // Cas du CNN + copyVar(kernel[i]->pooling); copyVar(kernel[i]->activation); copyVar(kernel[i]->linearisation); // 0