diff --git a/src/cnn/backpropagation.c b/src/cnn/backpropagation.c index f4ba882..199635d 100644 --- a/src/cnn/backpropagation.c +++ b/src/cnn/backpropagation.c @@ -86,7 +86,7 @@ void backward_linearisation(Kernel_nn* ker, float*** input, float*** input_z, fl for (int k=0; k < dim_input; k++) { for (int l=0; l < dim_input; l++) { for (int j=0; j < size_output; j++) { - ker->d_weights[cpt][j] += input[i][k][l]*output[j]; + ker->d_weights[cpt][j] += input[i][k][l]*output[j]/(depth_input*dim_input*dim_input*size_output); } cpt++; } diff --git a/src/cnn/include/update.h b/src/cnn/include/update.h index 4b50be9..ce6d635 100644 --- a/src/cnn/include/update.h +++ b/src/cnn/include/update.h @@ -3,6 +3,8 @@ #ifndef DEF_UPDATE_H #define DEF_UPDATE_H +#define MAX_RESEAU 100000000 + /* * Met à jours les poids à partir de données obtenus après plusieurs backpropagations * Puis met à 0 tous les d_weights diff --git a/src/cnn/update.c b/src/cnn/update.c index c2c61aa..439dea0 100644 --- a/src/cnn/update.c +++ b/src/cnn/update.c @@ -26,6 +26,11 @@ void update_weights(Network* network, Network* d_network) { for (int d=0; dw[a][b][c][d] -= network->learning_rate * d_cnn->d_w[a][b][c][d]; d_cnn->d_w[a][b][c][d] = 0; + + if (cnn->w[a][b][c][d] > MAX_RESEAU) + cnn->w[a][b][c][d] = MAX_RESEAU; + else if (cnn->w[a][b][c][d] < -MAX_RESEAU) + cnn->w[a][b][c][d] = -MAX_RESEAU; } } } @@ -48,6 +53,11 @@ void update_weights(Network* network, Network* d_network) { for (int b=0; bweights[a][b] -= network->learning_rate * d_nn->d_weights[a][b]; d_nn->d_weights[a][b] = 0; + + if (nn->weights[a][b] > MAX_RESEAU) + nn->weights[a][b] = MAX_RESEAU; + else if (nn->weights[a][b] < -MAX_RESEAU) + nn->weights[a][b] = -MAX_RESEAU; } } } @@ -77,6 +87,11 @@ void update_bias(Network* network, Network* d_network) { for (int c=0; cbias[a][b][c] -= network->learning_rate * d_cnn->d_bias[a][b][c]; d_cnn->d_bias[a][b][c] = 0; + + if (cnn->bias[a][b][c] > MAX_RESEAU) + cnn->bias[a][b][c] = MAX_RESEAU; + else if (cnn->bias[a][b][c] < -MAX_RESEAU) + cnn->bias[a][b][c] = -MAX_RESEAU; } } } @@ -86,6 +101,11 @@ void update_bias(Network* network, Network* d_network) { for (int a=0; abias[a] -= network->learning_rate * d_nn->d_bias[a]; d_nn->d_bias[a] = 0; + + if (nn->bias[a] > MAX_RESEAU) + nn->bias[a] = MAX_RESEAU; + else if (nn->bias[a] < -MAX_RESEAU) + nn->bias[a] = -MAX_RESEAU; } } else { // Pooling (void)0; // Ne rien faire pour la couche pooling