From 89402b9ee6d5a5a340b9de1381abb5f6ff427a55 Mon Sep 17 00:00:00 2001 From: julienChemillier Date: Wed, 8 Mar 2023 20:48:34 +0100 Subject: [PATCH] Replace numbers by defines --- src/cnn/backpropagation.c | 2 +- src/cnn/cnn.c | 14 +++++++----- src/cnn/creation.c | 36 +++++++++++++++---------------- src/cnn/free.c | 6 +++--- src/cnn/include/backpropagation.h | 2 +- src/cnn/include/creation.h | 4 ++-- src/cnn/include/free.h | 4 ++-- src/cnn/include/struct.h | 7 ++++++ src/cnn/neuron_io.c | 10 ++++++--- src/cnn/print.c | 2 +- src/cnn/update.c | 4 ++-- test/cnn_structure.c | 2 +- 12 files changed, 54 insertions(+), 39 deletions(-) diff --git a/src/cnn/backpropagation.c b/src/cnn/backpropagation.c index 0987f86..4a2045d 100644 --- a/src/cnn/backpropagation.c +++ b/src/cnn/backpropagation.c @@ -27,7 +27,7 @@ void softmax_backward_cross_entropy(float* input, float* output, int size) { } } -void backward_2d_pooling(float*** input, float*** output, int input_width, int output_width, int depth) { +void backward_average_pooling(float*** input, float*** output, int input_width, int output_width, int depth) { /* Input et output ont la même profondeur (depth) */ //int size = output_width - input_width +1; diff --git a/src/cnn/cnn.c b/src/cnn/cnn.c index b6b7f99..a9cdd10 100644 --- a/src/cnn/cnn.c +++ b/src/cnn/cnn.c @@ -115,7 +115,7 @@ void forward_propagation(Network* network) { apply_function_to_matrix(activation, output, output_depth, output_width); } else if (k_i->nn) { // Full connection - if (k_i->linearisation == 0) { // Vecteur -> Vecteur + if (k_i->linearisation == DOESNT_LINEARISE) { // Vecteur -> Vecteur make_dense(k_i->nn, input[0][0], output[0][0], input_width, output_width); } else { // Matrice -> Vecteur make_dense_linearized(k_i->nn, input, output[0][0], input_depth, input_width, output_width); @@ -128,9 +128,9 @@ void forward_propagation(Network* network) { printf_error("Le réseau ne peut pas finir par un pooling layer\n"); return; } else { // Pooling sur une matrice - if (pooling == 1) { + if (pooling == AVG_POOLING) { make_average_pooling(input, output, input_width/output_width, output_depth, output_width); - } else if (pooling == 2) { + } else if (pooling == MAX_POOLING) { make_max_pooling(input, output, input_width/output_width, output_depth, output_width); } else { printf_error("Impossible de reconnaître le type de couche de pooling: "); @@ -178,13 +178,17 @@ void backward_propagation(Network* network, int wanted_number) { backward_convolution(k_i->cnn, input, input_z, output, input_depth, input_width, output_depth, output_width, d_f, i==0); } else if (k_i->nn) { // Full connection ptr d_f = get_activation_function(-activation); - if (k_i->linearisation == 0) { // Vecteur -> Vecteur + if (k_i->linearisation == DOESNT_LINEARISE) { // Vecteur -> Vecteur backward_dense(k_i->nn, input[0][0], input_z[0][0], output[0][0], input_width, output_width, d_f, i==0); } else { // Matrice -> vecteur backward_linearisation(k_i->nn, input, input_z, output[0][0], input_depth, input_width, output_width, d_f); } } else { // Pooling - backward_2d_pooling(input, output, input_width, output_width, input_depth); // Depth pour input et output a la même valeur + if (k_i->pooling == AVG_POOLING) { + backward_average_pooling(input, output, input_width, output_width, input_depth); // Depth pour input et output a la même valeur + } else { + printf_error("La backpropagation de ce pooling n'est pas encore implémentée\n"); + } } } } diff --git a/src/cnn/creation.c b/src/cnn/creation.c index 2ff2559..12f32f4 100644 --- a/src/cnn/creation.c +++ b/src/cnn/creation.c @@ -11,7 +11,7 @@ Network* create_network(int max_size, float learning_rate, int dropout, int initialisation, int input_dim, int input_depth) { if (dropout < 0 || dropout > 100) { - printf_error("la probabilité de dropout n'est pas respecté, elle doit être comprise entre 0 et 100\n"); + printf_error("La probabilité de dropout n'est pas respecté, elle doit être comprise entre 0 et 100\n"); } Network* network = (Network*)nalloc(1, sizeof(Network)); network->learning_rate = learning_rate; @@ -27,7 +27,7 @@ Network* create_network(int max_size, float learning_rate, int dropout, int init for (int i=0; i < max_size-1; i++) { network->kernel[i] = (Kernel*)nalloc(1, sizeof(Kernel)); } - network->kernel[0]->linearisation = 0; + network->kernel[0]->linearisation = DOESNT_LINEARISE; network->width[0] = input_dim; network->depth[0] = input_depth; network->kernel[0]->nn = NULL; @@ -41,9 +41,9 @@ Network* create_network_lenet5(float learning_rate, int dropout, int activation, Network* network = create_network(8, learning_rate, dropout, initialisation, input_dim, input_depth); network->kernel[0]->activation = activation; add_convolution(network, 6, 28, activation); - add_2d_average_pooling(network, 14); + add_average_pooling(network, 14); add_convolution(network, 16, 10, activation); - add_2d_average_pooling(network, 5); + add_average_pooling(network, 5); add_dense_linearisation(network, 120, activation); add_dense(network, 84, activation); add_dense(network, 10, SOFTMAX); @@ -98,7 +98,7 @@ void create_a_line_input_z_layer(Network* network, int pos, int dim) { network->depth[pos] = 1; } -void add_2d_average_pooling(Network* network, int dim_output) { +void add_average_pooling(Network* network, int dim_output) { int n = network->size; int k_pos = n-1; int dim_input = network->width[k_pos]; @@ -113,14 +113,14 @@ void add_2d_average_pooling(Network* network, int dim_output) { network->kernel[k_pos]->cnn = NULL; network->kernel[k_pos]->nn = NULL; network->kernel[k_pos]->activation = IDENTITY; // Ne contient pas de fonction d'activation - network->kernel[k_pos]->linearisation = 0; - network->kernel[k_pos]->pooling = 1; + network->kernel[k_pos]->linearisation = DOESNT_LINEARISE; + network->kernel[k_pos]->pooling = AVG_POOLING; create_a_cube_input_layer(network, n, network->depth[n-1], network->width[n-1]/2); create_a_cube_input_z_layer(network, n, network->depth[n-1], network->width[n-1]/2); // Will it be used ? network->size++; } -void add_2d_max_pooling(Network* network, int dim_output) { +void add_max_pooling(Network* network, int dim_output) { int n = network->size; int k_pos = n-1; int dim_input = network->width[k_pos]; @@ -135,8 +135,8 @@ void add_2d_max_pooling(Network* network, int dim_output) { network->kernel[k_pos]->cnn = NULL; network->kernel[k_pos]->nn = NULL; network->kernel[k_pos]->activation = IDENTITY; // Ne contient pas de fonction d'activation - network->kernel[k_pos]->linearisation = 0; - network->kernel[k_pos]->pooling = 2; + network->kernel[k_pos]->linearisation = DOESNT_LINEARISE; + network->kernel[k_pos]->pooling = MAX_POOLING; create_a_cube_input_layer(network, n, network->depth[n-1], network->width[n-1]/2); create_a_cube_input_z_layer(network, n, network->depth[n-1], network->width[n-1]/2); // Will it be used ? network->size++; @@ -156,8 +156,8 @@ void add_convolution(Network* network, int depth_output, int dim_output, int act int kernel_size = dim_input - dim_output +1; network->kernel[k_pos]->nn = NULL; network->kernel[k_pos]->activation = activation; - network->kernel[k_pos]->linearisation = 0; - network->kernel[k_pos]->pooling = 0; + network->kernel[k_pos]->linearisation = DOESNT_LINEARISE; + network->kernel[k_pos]->pooling = NO_POOLING; network->kernel[k_pos]->cnn = (Kernel_cnn*)nalloc(1, sizeof(Kernel_cnn)); Kernel_cnn* cnn = network->kernel[k_pos]->cnn; @@ -215,8 +215,8 @@ void add_dense(Network* network, int size_output, int activation) { network->kernel[k_pos]->nn = (Kernel_nn*)nalloc(1, sizeof(Kernel_nn)); Kernel_nn* nn = network->kernel[k_pos]->nn; network->kernel[k_pos]->activation = activation; - network->kernel[k_pos]->linearisation = 0; - network->kernel[k_pos]->pooling = 0; + network->kernel[k_pos]->linearisation = DOESNT_LINEARISE; + network->kernel[k_pos]->pooling = NO_POOLING; nn->size_input = size_input; nn->size_output = size_output; nn->bias = (float*)nalloc(size_output, sizeof(float)); @@ -235,7 +235,7 @@ void add_dense(Network* network, int size_output, int activation) { } } - initialisation_1d_matrix(network->initialisation, nn->bias, size_output, size_input); + initialisation_1d_matrix(network->initialisation, nn->bias, size_output, size_input, size_output); initialisation_2d_matrix(network->initialisation, nn->weights, size_input, size_output, size_input, size_output); create_a_line_input_layer(network, n, size_output); create_a_line_input_z_layer(network, n, size_output); @@ -256,8 +256,8 @@ void add_dense_linearisation(Network* network, int size_output, int activation) network->kernel[k_pos]->nn = (Kernel_nn*)nalloc(1, sizeof(Kernel_nn)); Kernel_nn* nn = network->kernel[k_pos]->nn; network->kernel[k_pos]->activation = activation; - network->kernel[k_pos]->linearisation = 1; - network->kernel[k_pos]->pooling = 0; + network->kernel[k_pos]->linearisation = DO_LINEARISE; + network->kernel[k_pos]->pooling = NO_POOLING; nn->size_input = size_input; nn->size_output = size_output; @@ -275,7 +275,7 @@ void add_dense_linearisation(Network* network, int size_output, int activation) nn->d_weights[i][j] = 0.; } } - initialisation_1d_matrix(network->initialisation, nn->bias, size_output, size_input); + initialisation_1d_matrix(network->initialisation, nn->bias, size_output, size_input, size_output); initialisation_2d_matrix(network->initialisation, nn->weights, size_input, size_output, size_input, size_output); create_a_line_input_layer(network, n, size_output); create_a_line_input_z_layer(network, n, size_output); diff --git a/src/cnn/free.c b/src/cnn/free.c index 7fd9b17..b9a4943 100644 --- a/src/cnn/free.c +++ b/src/cnn/free.c @@ -27,7 +27,7 @@ void free_a_line_input_layer(Network* network, int pos) { gree(network->input_z[pos]); } -void free_2d_pooling(Network* network, int pos) { +void free_pooling(Network* network, int pos) { free_a_cube_input_layer(network, pos+1, network->depth[pos+1], network->width[pos+1]); } @@ -120,13 +120,13 @@ void free_network(Network* network) { if (network->kernel[i]->cnn != NULL) { // Convolution free_convolution(network, i); } else if (network->kernel[i]->nn != NULL) { - if (network->kernel[i]->linearisation == 0) { // Dense non linearized + if (network->kernel[i]->linearisation == DOESNT_LINEARISE) { // Dense non linearized free_dense(network, i); } else { // Dense linearisation free_dense_linearisation(network, i); } } else { // Pooling - free_2d_pooling(network, i); + free_pooling(network, i); } } free_network_creation(network); diff --git a/src/cnn/include/backpropagation.h b/src/cnn/include/backpropagation.h index 3dbd6eb..15a6a85 100644 --- a/src/cnn/include/backpropagation.h +++ b/src/cnn/include/backpropagation.h @@ -29,7 +29,7 @@ void softmax_backward_cross_entropy(float* input, float* output, int size); * Transfert les informations d'erreur à travers une couche d'average pooling * en considérant cross_entropy comme fonction d'erreur */ -void backward_2d_pooling(float*** input, float*** output, int input_width, int output_width, int depth); +void backward_average_pooling(float*** input, float*** output, int input_width, int output_width, int depth); /* * Transfert les informations d'erreur à travers une couche fully connected diff --git a/src/cnn/include/creation.h b/src/cnn/include/creation.h index 95d6a9e..6e51541 100644 --- a/src/cnn/include/creation.h +++ b/src/cnn/include/creation.h @@ -37,12 +37,12 @@ void create_a_line_input_layer(Network* network, int pos, int dim); /* * Ajoute au réseau une couche d'average pooling valide de dimension dim*dim */ -void add_2d_average_pooling(Network* network, int dim_output); +void add_average_pooling(Network* network, int dim_output); /* * Ajoute au réseau une couche de max pooling valide de dimension dim*dim */ -void add_2d_max_pooling(Network* network, int dim_output); +void add_max_pooling(Network* network, int dim_output); /* * Ajoute au réseau une couche de convolution dim*dim et initialise les kernels diff --git a/src/cnn/include/free.h b/src/cnn/include/free.h index 92a5c3a..6bffedb 100644 --- a/src/cnn/include/free.h +++ b/src/cnn/include/free.h @@ -16,9 +16,9 @@ void free_a_cube_input_layer(Network* network, int pos, int depth, int dim); void free_a_line_input_layer(Network* network, int pos); /* -* Libère l'espace mémoire alloué dans 'add_2d_average_pooling' ou 'add_2d_max_pooling' (creation.c) +* Libère l'espace mémoire alloué dans 'add_average_pooling' ou 'add_max_pooling' (creation.c) */ -void free_2d_pooling(Network* network, int pos); +void free_pooling(Network* network, int pos); /* * Libère l'espace mémoire dans 'add_convolution' (creation.c) diff --git a/src/cnn/include/struct.h b/src/cnn/include/struct.h index 1837027..90df7eb 100644 --- a/src/cnn/include/struct.h +++ b/src/cnn/include/struct.h @@ -1,6 +1,13 @@ #ifndef DEF_STRUCT_H #define DEF_STRUCT_H +#define NO_POOLING 0 +#define AVG_POOLING 1 +#define MAX_POOLING 2 + +#define DOESNT_LINEARISE 0 +#define DO_LINEARISE 1 + typedef struct Kernel_cnn { // Noyau ayant une couche matricielle en sortie int k_size; // k_size = dim_input - dim_output + 1 diff --git a/src/cnn/neuron_io.c b/src/cnn/neuron_io.c index 7a62470..c29fdc1 100644 --- a/src/cnn/neuron_io.c +++ b/src/cnn/neuron_io.c @@ -12,6 +12,10 @@ #define MAGIC_NUMBER 1012 +#define CNN 0 +#define NN 1 +#define POOLING 2 + #define bufferAdd(val) {buffer[indice_buffer] = val; indice_buffer++;} void write_network(char* filename, Network* network) { @@ -226,7 +230,7 @@ Network* read_network(char* filename) { Kernel* read_kernel(int type_couche, int output_dim, FILE* ptr) { Kernel* kernel = (Kernel*)nalloc(1, sizeof(Kernel)); - if (type_couche == 0) { // Cas du CNN + if (type_couche == CNN) { // Cas du CNN // Lecture du "Pré-corps" kernel->cnn = (Kernel_cnn*)nalloc(1, sizeof(Kernel_cnn)); kernel->nn = NULL; @@ -278,7 +282,7 @@ Kernel* read_kernel(int type_couche, int output_dim, FILE* ptr) { } } } - } else if (type_couche == 1) { // Cas du NN + } else if (type_couche == NN) { // Cas du NN // Lecture du "Pré-corps" kernel->nn = (Kernel_nn*)nalloc(1, sizeof(Kernel_nn)); kernel->cnn = NULL; @@ -313,7 +317,7 @@ Kernel* read_kernel(int type_couche, int output_dim, FILE* ptr) { nn->d_weights[i][j] = 0.; } } - } else if (type_couche == 2) { // Cas du Pooling Layer + } else if (type_couche == POOLING) { // Cas du Pooling Layer uint32_t pooling, linearisation; (void) !fread(&linearisation, sizeof(linearisation), 1, ptr); (void) !fread(&pooling, sizeof(pooling), 1, ptr); diff --git a/src/cnn/print.c b/src/cnn/print.c index 33b8f7a..4005218 100644 --- a/src/cnn/print.c +++ b/src/cnn/print.c @@ -50,7 +50,7 @@ void print_kernel_cnn(Kernel_cnn* ker, int depth_input, int dim_input, int depth void print_pooling(int size, int pooling) { print_bar; purple; - if (pooling == 1) { + if (pooling == AVG_POOLING) { printf("-------Average Pooling %dx%d-------\n", size ,size); } else { printf("-------Max Pooling %dx%d-------\n", size ,size); diff --git a/src/cnn/update.c b/src/cnn/update.c index 5a29755..8ec3807 100644 --- a/src/cnn/update.c +++ b/src/cnn/update.c @@ -43,7 +43,7 @@ void update_weights(Network* network, Network* d_network) { } } } else if (k_i->nn) { // Full connection - if (k_i->linearisation == 0) { // Vecteur -> Vecteur + if (k_i->linearisation == DOESNT_LINEARISE) { // Vecteur -> Vecteur Kernel_nn* nn = k_i->nn; Kernel_nn* d_nn = dk_i->nn; @@ -139,7 +139,7 @@ void reset_d_weights(Network* network) { } } } else if (k_i->nn) { // Full connection - if (k_i->linearisation == 0) { // Vecteur -> Vecteur + if (k_i->linearisation == DOESNT_LINEARISE) { // Vecteur -> Vecteur Kernel_nn* nn = k_i_1->nn; for (int a=0; a < input_width; a++) { diff --git a/test/cnn_structure.c b/test/cnn_structure.c index 4b6b6da..4a8ec2b 100644 --- a/test/cnn_structure.c +++ b/test/cnn_structure.c @@ -20,7 +20,7 @@ int main() { for (int i=0; i < network->size-1; i++) { kernel = network->kernel[i]; if ((!kernel->cnn)&&(!kernel->nn)) { - if (kernel->pooling == 1) { + if (kernel->pooling == AVG_POOLING) { printf("\n==== Couche %d de type "YELLOW"Average Pooling"RESET" ====\n", i); } else { printf("\n==== Couche %d de type "YELLOW"Max Pooling"RESET" ====\n", i);