diff --git a/src/cnn/creation.c b/src/cnn/creation.c index 53728aa..c12e573 100644 --- a/src/cnn/creation.c +++ b/src/cnn/creation.c @@ -34,73 +34,6 @@ Network* create_network(int max_size, float learning_rate, int dropout, int init return network; } -Network* create_network_lenet5(float learning_rate, int dropout, int activation, int initialisation, int input_width, int input_depth) { - Network* network = create_network(8, learning_rate, dropout, initialisation, input_width, input_depth); - add_convolution(network, 5, 6, 1, 0, activation); - add_average_pooling(network, 2, 2, 0); - add_convolution(network, 5, 16, 1, 0, activation); - add_average_pooling(network, 2, 2, 0); - add_dense_linearisation(network, 120, activation); - add_dense(network, 84, activation); - add_dense(network, 10, SOFTMAX); - return network; -} - -Network* create_network_alexnet(float learning_rate, int dropout, int activation, int initialisation, int size_output) { - Network* network = create_network(12, learning_rate, dropout, initialisation, 227, 3); - add_convolution(network, 11, 96, 4, 0, activation); - add_average_pooling(network, 3, 2, 0); - add_convolution(network, 5, 256, 1, 2, activation); - add_average_pooling(network, 3, 2, 0); - add_convolution(network, 3, 384, 1, 1, activation); - add_convolution(network, 3, 384, 1, 1, activation); - add_convolution(network, 3, 256, 1, 1, activation); - add_average_pooling(network, 3, 2, 0); - add_dense_linearisation(network, 4096, activation); - add_dense(network, 4096, activation); - add_dense(network, size_output, SOFTMAX); - return network; -} - -Network* create_network_VGG16(float learning_rate, int dropout, int activation, int initialisation, int size_output) { - Network* network = create_network(23, learning_rate, dropout, initialisation, 256, 3); - add_convolution(network, 3, 64, 1, 0, activation); // Conv3-64 - add_convolution(network, 3, 64, 1, 0, activation); // Conv3-64 - add_average_pooling(network, 2, 2, 0); // Max Pool - - add_convolution(network, 3, 128, 1, 0, activation); // Conv3-128 - add_convolution(network, 1, 128, 1, 0, activation); // Conv1-128 - add_average_pooling(network, 2, 2, 0); // Max Pool - - add_convolution(network, 3, 256, 1, 0, activation); // Conv3-256 - add_convolution(network, 3, 256, 1, 0, activation); // Conv3-256 - add_convolution(network, 1, 256, 1, 0, activation); // Conv1-256 - add_average_pooling(network, 2, 2, 0); // Max Pool - - add_convolution(network, 3, 512, 1, 0, activation); // Conv3-512 - add_convolution(network, 3, 512, 1, 0, activation); // Conv3-512 - add_convolution(network, 1, 512, 1, 0, activation); // Conv1-512 - add_average_pooling(network, 2, 2, 0); // Max Pool - - add_convolution(network, 3, 512, 1, 0, activation); // Conv3-512 - add_convolution(network, 3, 512, 1, 0, activation); // Conv3-512 - add_convolution(network, 1, 512, 1, 0, activation); // Conv1-512 - add_average_pooling(network, 2, 2, 0); // Max Pool - - add_dense_linearisation(network, 2048, activation); - add_dense(network, 2048, activation); - add_dense(network, 256, activation); - add_dense(network, size_output, SOFTMAX); - return network; -} - -Network* create_simple_one(float learning_rate, int dropout, int activation, int initialisation, int input_width, int input_depth) { - Network* network = create_network(3, learning_rate, dropout, initialisation, input_width, input_depth); - add_dense_linearisation(network, 80, activation); - add_dense(network, 10, SOFTMAX); - return network; -} - void create_a_cube_input_layer(Network* network, int pos, int depth, int dim) { network->input[pos] = (float***)nalloc(depth, sizeof(float**)); for (int i=0; i < depth; i++) { diff --git a/src/cnn/include/creation.h b/src/cnn/include/creation.h index 0c1bca1..07ce339 100644 --- a/src/cnn/include/creation.h +++ b/src/cnn/include/creation.h @@ -9,28 +9,6 @@ */ Network* create_network(int max_size, float learning_rate, int dropout, int initialisation, int input_width, int input_depth); -/* -* Renvoie un réseau suivant l'architecture LeNet5 -*/ -Network* create_network_lenet5(float learning_rate, int dropout, int activation, int initialisation, int input_width, int input_depth); - -/* -* Renvoie un réseau suivant l'architecture AlexNet -* C'est à dire en entrée 3x227x227 et une sortie de taille 'size_output' -*/ -Network* create_network_alexnet(float learning_rate, int dropout, int activation, int initialisation, int size_output); - -/* -* Renvoie un réseau suivant l'architecture VGG16 modifiée pour prendre en entrée 3x256x256 -* et une sortie de taille 'size_output' -*/ -Network* create_network_VGG16(float learning_rate, int dropout, int activation, int initialisation, int size_output); - -/* -* Renvoie un réseau sans convolution, similaire à celui utilisé dans src/dense -*/ -Network* create_simple_one(float learning_rate, int dropout, int activation, int initialisation, int input_width, int input_depth); - /* * Créé et alloue de la mémoire à une couche de type input cube */ diff --git a/src/cnn/include/models.h b/src/cnn/include/models.h new file mode 100644 index 0000000..1c92596 --- /dev/null +++ b/src/cnn/include/models.h @@ -0,0 +1,29 @@ +#include +#include + +#include "struct.h" + +#ifndef DEF_MODELS_H +#define DEF_MODELS_H +/* +* Renvoie un réseau suivant l'architecture LeNet5 +*/ +Network* create_network_lenet5(float learning_rate, int dropout, int activation, int initialisation, int input_width, int input_depth); + +/* +* Renvoie un réseau suivant l'architecture AlexNet +* C'est à dire en entrée 3x227x227 et une sortie de taille 'size_output' +*/ +Network* create_network_alexnet(float learning_rate, int dropout, int activation, int initialisation, int size_output); + +/* +* Renvoie un réseau suivant l'architecture VGG16 modifiée pour prendre en entrée 3x256x256 +* et une sortie de taille 'size_output' +*/ +Network* create_network_VGG16(float learning_rate, int dropout, int activation, int initialisation, int size_output); + +/* +* Renvoie un réseau sans convolution, similaire à celui utilisé dans src/dense +*/ +Network* create_simple_one(float learning_rate, int dropout, int activation, int initialisation, int input_width, int input_depth); +#endif \ No newline at end of file diff --git a/src/cnn/models.c b/src/cnn/models.c new file mode 100644 index 0000000..00613b0 --- /dev/null +++ b/src/cnn/models.c @@ -0,0 +1,75 @@ +#include +#include + +#include "include/creation.h" +#include "include/function.h" +#include "include/struct.h" + +#include "include/models.h" + +Network* create_network_lenet5(float learning_rate, int dropout, int activation, int initialisation, int input_width, int input_depth) { + Network* network = create_network(8, learning_rate, dropout, initialisation, input_width, input_depth); + add_convolution(network, 5, 6, 1, 0, activation); + add_average_pooling(network, 2, 2, 0); + add_convolution(network, 5, 16, 1, 0, activation); + add_average_pooling(network, 2, 2, 0); + add_dense_linearisation(network, 120, activation); + add_dense(network, 84, activation); + add_dense(network, 10, SOFTMAX); + return network; +} + +Network* create_network_alexnet(float learning_rate, int dropout, int activation, int initialisation, int size_output) { + Network* network = create_network(12, learning_rate, dropout, initialisation, 227, 3); + add_convolution(network, 11, 96, 4, 0, activation); + add_average_pooling(network, 3, 2, 0); + add_convolution(network, 5, 256, 1, 2, activation); + add_average_pooling(network, 3, 2, 0); + add_convolution(network, 3, 384, 1, 1, activation); + add_convolution(network, 3, 384, 1, 1, activation); + add_convolution(network, 3, 256, 1, 1, activation); + add_average_pooling(network, 3, 2, 0); + add_dense_linearisation(network, 4096, activation); + add_dense(network, 4096, activation); + add_dense(network, size_output, SOFTMAX); + return network; +} + +Network* create_network_VGG16(float learning_rate, int dropout, int activation, int initialisation, int size_output) { + Network* network = create_network(23, learning_rate, dropout, initialisation, 256, 3); + add_convolution(network, 3, 64, 1, 0, activation); // Conv3-64 + add_convolution(network, 3, 64, 1, 0, activation); // Conv3-64 + add_average_pooling(network, 2, 2, 0); // Max Pool + + add_convolution(network, 3, 128, 1, 0, activation); // Conv3-128 + add_convolution(network, 1, 128, 1, 0, activation); // Conv1-128 + add_average_pooling(network, 2, 2, 0); // Max Pool + + add_convolution(network, 3, 256, 1, 0, activation); // Conv3-256 + add_convolution(network, 3, 256, 1, 0, activation); // Conv3-256 + add_convolution(network, 1, 256, 1, 0, activation); // Conv1-256 + add_average_pooling(network, 2, 2, 0); // Max Pool + + add_convolution(network, 3, 512, 1, 0, activation); // Conv3-512 + add_convolution(network, 3, 512, 1, 0, activation); // Conv3-512 + add_convolution(network, 1, 512, 1, 0, activation); // Conv1-512 + add_average_pooling(network, 2, 2, 0); // Max Pool + + add_convolution(network, 3, 512, 1, 0, activation); // Conv3-512 + add_convolution(network, 3, 512, 1, 0, activation); // Conv3-512 + add_convolution(network, 1, 512, 1, 0, activation); // Conv1-512 + add_average_pooling(network, 2, 2, 0); // Max Pool + + add_dense_linearisation(network, 2048, activation); + add_dense(network, 2048, activation); + add_dense(network, 256, activation); + add_dense(network, size_output, SOFTMAX); + return network; +} + +Network* create_simple_one(float learning_rate, int dropout, int activation, int initialisation, int input_width, int input_depth) { + Network* network = create_network(3, learning_rate, dropout, initialisation, input_width, input_depth); + add_dense_linearisation(network, 80, activation); + add_dense(network, 10, SOFTMAX); + return network; +} \ No newline at end of file