From 50af676027bdcd0a562c2827760b262028e87eca Mon Sep 17 00:00:00 2001 From: julienChemillier Date: Tue, 15 Nov 2022 18:15:18 +0100 Subject: [PATCH] Learning_rate is a (NON NULL) float --- src/cnn/creation.c | 4 ++-- src/cnn/include/creation.h | 4 ++-- src/cnn/include/struct.h | 2 +- src/cnn/train.c | 2 +- src/cnn/update.c | 10 +++++----- 5 files changed, 11 insertions(+), 11 deletions(-) diff --git a/src/cnn/creation.c b/src/cnn/creation.c index 26b258c..d068e5f 100644 --- a/src/cnn/creation.c +++ b/src/cnn/creation.c @@ -6,7 +6,7 @@ #include "include/creation.h" -Network* create_network(int max_size, int learning_rate, int dropout, int initialisation, int input_dim, int input_depth) { +Network* create_network(int max_size, float learning_rate, int dropout, int initialisation, int input_dim, int input_depth) { if (dropout < 0 || dropout > 100) { printf("Erreur, la probabilité de dropout n'est pas respecté, elle doit être comprise entre 0 et 100\n"); } @@ -33,7 +33,7 @@ Network* create_network(int max_size, int learning_rate, int dropout, int initia return network; } -Network* create_network_lenet5(int learning_rate, int dropout, int activation, int initialisation, int input_dim, int input_depth) { +Network* create_network_lenet5(float learning_rate, int dropout, int activation, int initialisation, int input_dim, int input_depth) { Network* network = create_network(8, learning_rate, dropout, initialisation, input_dim, input_depth); network->kernel[0]->activation = activation; network->kernel[0]->linearisation = 0; diff --git a/src/cnn/include/creation.h b/src/cnn/include/creation.h index 081a8b2..31d69bd 100644 --- a/src/cnn/include/creation.h +++ b/src/cnn/include/creation.h @@ -7,12 +7,12 @@ /* * Créé un réseau qui peut contenir max_size couche (dont celle d'input et d'output) */ -Network* create_network(int max_size, int learning_rate, int dropout, int initialisation, int input_dim, int input_depth); +Network* create_network(int max_size, float learning_rate, int dropout, int initialisation, int input_dim, int input_depth); /* * Renvoie un réseau suivant l'architecture LeNet5 */ -Network* create_network_lenet5(int learning_rate, int dropout, int activation, int initialisation, int input_dim, int input_depth); +Network* create_network_lenet5(float learning_rate, int dropout, int activation, int initialisation, int input_dim, int input_depth); /* * Créé et alloue de la mémoire à une couche de type input cube diff --git a/src/cnn/include/struct.h b/src/cnn/include/struct.h index 267294e..3dfd529 100644 --- a/src/cnn/include/struct.h +++ b/src/cnn/include/struct.h @@ -30,7 +30,7 @@ typedef struct Kernel { typedef struct Network{ int dropout; // Contient la probabilité d'abandon d'un neurone dans [0, 100] (entiers) - int learning_rate; // Taux d'apprentissage du réseau + float learning_rate; // Taux d'apprentissage du réseau int initialisation; // Contient le type d'initialisation int max_size; // Taille du tableau contenant le réseau int size; // Taille actuelle du réseau (size ≤ max_size) diff --git a/src/cnn/train.c b/src/cnn/train.c index 00067ed..7084191 100644 --- a/src/cnn/train.c +++ b/src/cnn/train.c @@ -105,7 +105,7 @@ void train(int dataset_type, char* images_file, char* labels_file, char* data_di } // Initialisation du réseau - Network* network = create_network_lenet5(0, 0, TANH, GLOROT, input_dim, input_depth); + Network* network = create_network_lenet5(0.01, 0, TANH, GLOROT, input_dim, input_depth); #ifdef USE_MULTITHREADING int nb_remaining_images; // Nombre d'images restantes à lancer pour une série de threads diff --git a/src/cnn/update.c b/src/cnn/update.c index 419bdb6..a251506 100644 --- a/src/cnn/update.c +++ b/src/cnn/update.c @@ -20,7 +20,7 @@ void update_weights(Network* network) { for (int b=0; bw[a][b][c][d] += network->learning_rate * cnn->d_w[a][b][c][d]; + cnn->w[a][b][c][d] -= network->learning_rate * cnn->d_w[a][b][c][d]; cnn->d_w[a][b][c][d] = 0; } } @@ -31,7 +31,7 @@ void update_weights(Network* network) { Kernel_nn* nn = k_i->nn; for (int a=0; aweights[a][b] += network->learning_rate * nn->d_weights[a][b]; + nn->weights[a][b] -= network->learning_rate * nn->d_weights[a][b]; nn->d_weights[a][b] = 0; } } @@ -40,7 +40,7 @@ void update_weights(Network* network) { int input_size = input_width*input_width*input_depth; for (int a=0; aweights[a][b] += network->learning_rate * nn->d_weights[a][b]; + nn->weights[a][b] -= network->learning_rate * nn->d_weights[a][b]; nn->d_weights[a][b] = 0; } } @@ -65,7 +65,7 @@ void update_bias(Network* network) { for (int a=0; abias[a][b][c] += network->learning_rate * cnn->d_bias[a][b][c]; + cnn->bias[a][b][c] -= network->learning_rate * cnn->d_bias[a][b][c]; cnn->d_bias[a][b][c] = 0; } } @@ -73,7 +73,7 @@ void update_bias(Network* network) { } else if (k_i->nn) { // Full connection Kernel_nn* nn = k_i->nn; for (int a=0; abias[a] += network->learning_rate * nn->d_bias[a]; + nn->bias[a] -= network->learning_rate * nn->d_bias[a]; nn->d_bias[a] = 0; } } else { // Pooling