2022-07-05 08:13:25 +02:00
|
|
|
#include <stdio.h>
|
|
|
|
#include <stdlib.h>
|
2022-10-24 12:54:51 +02:00
|
|
|
|
2023-02-18 13:10:00 +01:00
|
|
|
#include "../include/memory_management.h"
|
2022-10-24 12:54:51 +02:00
|
|
|
#include "include/initialisation.h"
|
2022-09-16 14:53:35 +02:00
|
|
|
#include "include/function.h"
|
2023-01-28 22:04:38 +01:00
|
|
|
#include "../include/utils.h"
|
2022-10-24 12:54:51 +02:00
|
|
|
|
|
|
|
#include "include/creation.h"
|
2022-07-05 08:13:25 +02:00
|
|
|
|
2022-11-15 18:15:18 +01:00
|
|
|
Network* create_network(int max_size, float learning_rate, int dropout, int initialisation, int input_dim, int input_depth) {
|
2022-09-09 17:39:07 +02:00
|
|
|
if (dropout < 0 || dropout > 100) {
|
2022-07-05 08:13:25 +02:00
|
|
|
printf("Erreur, la probabilité de dropout n'est pas respecté, elle doit être comprise entre 0 et 100\n");
|
|
|
|
}
|
2023-01-28 22:04:38 +01:00
|
|
|
Network* network = (Network*)nalloc(sizeof(Network));
|
2022-10-03 10:04:11 +02:00
|
|
|
network->learning_rate = learning_rate;
|
2023-01-17 15:34:29 +01:00
|
|
|
network->max_size = max_size;
|
|
|
|
network->dropout = dropout;
|
|
|
|
network->initialisation = initialisation;
|
|
|
|
network->size = 1;
|
2023-01-28 22:04:38 +01:00
|
|
|
network->input = (float****)nalloc(sizeof(float***)*max_size);
|
|
|
|
network->input_z = (float****)nalloc(sizeof(float***)*max_size);
|
|
|
|
network->kernel = (Kernel**)nalloc(sizeof(Kernel*)*(max_size-1));
|
|
|
|
network->width = (int*)nalloc(sizeof(int*)*max_size);
|
|
|
|
network->depth = (int*)nalloc(sizeof(int*)*max_size);
|
2023-01-17 15:25:34 +01:00
|
|
|
for (int i=0; i < max_size-1; i++) {
|
2023-01-28 22:04:38 +01:00
|
|
|
network->kernel[i] = (Kernel*)nalloc(sizeof(Kernel));
|
2022-07-05 08:13:25 +02:00
|
|
|
}
|
2023-01-30 09:39:45 +01:00
|
|
|
network->kernel[0]->linearisation = 0;
|
2023-01-17 15:34:29 +01:00
|
|
|
network->width[0] = input_dim;
|
|
|
|
network->depth[0] = input_depth;
|
|
|
|
network->kernel[0]->nn = NULL;
|
|
|
|
network->kernel[0]->cnn = NULL;
|
2022-11-23 10:41:19 +01:00
|
|
|
create_a_cube_input_layer(network, 0, input_depth, input_dim);
|
2023-01-17 15:34:29 +01:00
|
|
|
create_a_cube_input_z_layer(network, 0, input_depth, input_dim);
|
2022-07-05 08:13:25 +02:00
|
|
|
return network;
|
|
|
|
}
|
|
|
|
|
2022-11-15 18:15:18 +01:00
|
|
|
Network* create_network_lenet5(float learning_rate, int dropout, int activation, int initialisation, int input_dim, int input_depth) {
|
2023-01-17 12:49:35 +01:00
|
|
|
Network* network = create_network(8, learning_rate, dropout, initialisation, input_dim, input_depth);
|
2023-01-17 15:34:29 +01:00
|
|
|
network->kernel[0]->activation = activation;
|
2022-10-03 10:22:12 +02:00
|
|
|
add_convolution(network, 6, 28, activation);
|
|
|
|
add_2d_average_pooling(network, 14);
|
|
|
|
add_convolution(network, 16, 10, activation);
|
|
|
|
add_2d_average_pooling(network, 5);
|
|
|
|
add_dense_linearisation(network, 120, activation);
|
|
|
|
add_dense(network, 84, activation);
|
|
|
|
add_dense(network, 10, SOFTMAX);
|
2022-07-05 08:13:25 +02:00
|
|
|
return network;
|
|
|
|
}
|
|
|
|
|
2023-01-21 18:59:59 +01:00
|
|
|
Network* create_simple_one(float learning_rate, int dropout, int activation, int initialisation, int input_dim, int input_depth) {
|
|
|
|
Network* network = create_network(3, learning_rate, dropout, initialisation, input_dim, input_depth);
|
|
|
|
network->kernel[0]->activation = activation;
|
|
|
|
add_dense_linearisation(network, 80, activation);
|
|
|
|
add_dense(network, 10, SOFTMAX);
|
|
|
|
return network;
|
|
|
|
}
|
|
|
|
|
2022-07-05 08:13:25 +02:00
|
|
|
void create_a_cube_input_layer(Network* network, int pos, int depth, int dim) {
|
2023-01-28 22:04:38 +01:00
|
|
|
network->input[pos] = (float***)nalloc(sizeof(float**)*depth);
|
2022-09-09 17:39:07 +02:00
|
|
|
for (int i=0; i < depth; i++) {
|
2023-01-28 22:04:38 +01:00
|
|
|
network->input[pos][i] = (float**)nalloc(sizeof(float*)*dim);
|
2022-09-09 17:39:07 +02:00
|
|
|
for (int j=0; j < dim; j++) {
|
2023-01-28 22:04:38 +01:00
|
|
|
network->input[pos][i][j] = (float*)nalloc(sizeof(float)*dim);
|
2022-07-05 08:13:25 +02:00
|
|
|
}
|
|
|
|
}
|
2022-09-12 17:56:44 +02:00
|
|
|
network->width[pos] = dim;
|
|
|
|
network->depth[pos] = depth;
|
2022-07-05 08:13:25 +02:00
|
|
|
}
|
|
|
|
|
2022-10-31 20:08:42 +01:00
|
|
|
void create_a_cube_input_z_layer(Network* network, int pos, int depth, int dim) {
|
2023-01-28 22:04:38 +01:00
|
|
|
network->input_z[pos] = (float***)nalloc(sizeof(float**)*depth);
|
2022-10-31 20:08:42 +01:00
|
|
|
for (int i=0; i < depth; i++) {
|
2023-01-28 22:04:38 +01:00
|
|
|
network->input_z[pos][i] = (float**)nalloc(sizeof(float*)*dim);
|
2022-10-31 20:08:42 +01:00
|
|
|
for (int j=0; j < dim; j++) {
|
2023-01-28 22:04:38 +01:00
|
|
|
network->input_z[pos][i][j] = (float*)nalloc(sizeof(float)*dim);
|
2022-10-31 20:08:42 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
network->width[pos] = dim;
|
|
|
|
network->depth[pos] = depth;
|
|
|
|
}
|
|
|
|
|
2022-07-05 08:13:25 +02:00
|
|
|
void create_a_line_input_layer(Network* network, int pos, int dim) {
|
2023-01-28 22:04:38 +01:00
|
|
|
network->input[pos] = (float***)nalloc(sizeof(float**));
|
|
|
|
network->input[pos][0] = (float**)nalloc(sizeof(float*));
|
|
|
|
network->input[pos][0][0] = (float*)nalloc(sizeof(float)*dim);
|
2022-09-19 18:39:49 +02:00
|
|
|
network->width[pos] = dim;
|
|
|
|
network->depth[pos] = 1;
|
2022-07-05 08:13:25 +02:00
|
|
|
}
|
|
|
|
|
2022-10-31 20:08:42 +01:00
|
|
|
void create_a_line_input_z_layer(Network* network, int pos, int dim) {
|
2023-01-28 22:04:38 +01:00
|
|
|
network->input_z[pos] = (float***)nalloc(sizeof(float**));
|
|
|
|
network->input_z[pos][0] = (float**)nalloc(sizeof(float*));
|
|
|
|
network->input_z[pos][0][0] = (float*)nalloc(sizeof(float)*dim);
|
2022-10-31 20:08:42 +01:00
|
|
|
network->width[pos] = dim;
|
|
|
|
network->depth[pos] = 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
void add_2d_average_pooling(Network* network, int dim_output) {
|
2022-07-05 08:13:25 +02:00
|
|
|
int n = network->size;
|
2022-09-30 15:50:29 +02:00
|
|
|
int k_pos = n-1;
|
2022-10-03 10:22:12 +02:00
|
|
|
int dim_input = network->width[k_pos];
|
2022-07-05 08:13:25 +02:00
|
|
|
if (network->max_size == n) {
|
|
|
|
printf("Impossible de rajouter une couche d'average pooling, le réseau est déjà plein\n");
|
|
|
|
return;
|
|
|
|
}
|
2022-10-31 20:08:42 +01:00
|
|
|
if (dim_input%dim_output != 0) {
|
2022-09-30 15:50:29 +02:00
|
|
|
printf("Erreur de dimension dans l'average pooling\n");
|
2022-07-05 08:13:25 +02:00
|
|
|
return;
|
|
|
|
}
|
2022-09-30 15:50:29 +02:00
|
|
|
network->kernel[k_pos]->cnn = NULL;
|
|
|
|
network->kernel[k_pos]->nn = NULL;
|
2022-11-12 14:20:13 +01:00
|
|
|
network->kernel[k_pos]->activation = IDENTITY; // Ne contient pas de fonction d'activation
|
2023-01-11 12:09:41 +01:00
|
|
|
network->kernel[k_pos]->linearisation = 0;
|
2023-01-30 09:39:45 +01:00
|
|
|
network->kernel[k_pos]->pooling = 1;
|
|
|
|
create_a_cube_input_layer(network, n, network->depth[n-1], network->width[n-1]/2);
|
|
|
|
create_a_cube_input_z_layer(network, n, network->depth[n-1], network->width[n-1]/2); // Will it be used ?
|
|
|
|
network->size++;
|
|
|
|
}
|
|
|
|
|
|
|
|
void add_2d_max_pooling(Network* network, int dim_output) {
|
|
|
|
int n = network->size;
|
|
|
|
int k_pos = n-1;
|
|
|
|
int dim_input = network->width[k_pos];
|
|
|
|
if (network->max_size == n) {
|
|
|
|
printf("Impossible de rajouter une couche de max pooling, le réseau est déjà plein\n");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
if (dim_input%dim_output != 0) {
|
|
|
|
printf("Erreur de dimension dans le max pooling\n");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
network->kernel[k_pos]->cnn = NULL;
|
|
|
|
network->kernel[k_pos]->nn = NULL;
|
|
|
|
network->kernel[k_pos]->activation = IDENTITY; // Ne contient pas de fonction d'activation
|
|
|
|
network->kernel[k_pos]->linearisation = 0;
|
|
|
|
network->kernel[k_pos]->pooling = 2;
|
2022-09-30 15:50:29 +02:00
|
|
|
create_a_cube_input_layer(network, n, network->depth[n-1], network->width[n-1]/2);
|
2022-11-03 16:29:53 +01:00
|
|
|
create_a_cube_input_z_layer(network, n, network->depth[n-1], network->width[n-1]/2); // Will it be used ?
|
2022-07-05 08:13:25 +02:00
|
|
|
network->size++;
|
|
|
|
}
|
|
|
|
|
2022-10-03 10:22:12 +02:00
|
|
|
void add_convolution(Network* network, int depth_output, int dim_output, int activation) {
|
2022-09-09 17:39:07 +02:00
|
|
|
int n = network->size;
|
2022-09-30 15:50:29 +02:00
|
|
|
int k_pos = n-1;
|
2022-07-05 08:13:25 +02:00
|
|
|
if (network->max_size == n) {
|
2022-09-16 14:53:35 +02:00
|
|
|
printf("Impossible de rajouter une couche de convolution, le réseau est déjà plein \n");
|
2022-07-05 08:13:25 +02:00
|
|
|
return;
|
|
|
|
}
|
2022-10-03 10:22:12 +02:00
|
|
|
int depth_input = network->depth[k_pos];
|
|
|
|
int dim_input = network->width[k_pos];
|
|
|
|
|
2022-09-30 15:50:29 +02:00
|
|
|
int bias_size = dim_output;
|
|
|
|
int kernel_size = dim_input - dim_output +1;
|
|
|
|
network->kernel[k_pos]->nn = NULL;
|
|
|
|
network->kernel[k_pos]->activation = activation;
|
2022-11-04 08:31:58 +01:00
|
|
|
network->kernel[k_pos]->linearisation = 0;
|
2023-01-30 09:39:45 +01:00
|
|
|
network->kernel[k_pos]->pooling = 0;
|
2023-01-28 22:04:38 +01:00
|
|
|
network->kernel[k_pos]->cnn = (Kernel_cnn*)nalloc(sizeof(Kernel_cnn));
|
2022-09-30 15:50:29 +02:00
|
|
|
Kernel_cnn* cnn = network->kernel[k_pos]->cnn;
|
2022-09-10 18:23:16 +02:00
|
|
|
|
|
|
|
cnn->k_size = kernel_size;
|
2022-09-16 14:53:35 +02:00
|
|
|
cnn->rows = depth_input;
|
|
|
|
cnn->columns = depth_output;
|
2023-02-19 13:38:33 +01:00
|
|
|
cnn->weights = (float****)nalloc(sizeof(float***)*depth_input);
|
|
|
|
cnn->d_weights = (float****)nalloc(sizeof(float***)*depth_input);
|
2022-09-16 14:53:35 +02:00
|
|
|
for (int i=0; i < depth_input; i++) {
|
2023-02-19 13:38:33 +01:00
|
|
|
cnn->weights[i] = (float***)nalloc(sizeof(float**)*depth_output);
|
|
|
|
cnn->d_weights[i] = (float***)nalloc(sizeof(float**)*depth_output);
|
2022-09-16 14:53:35 +02:00
|
|
|
for (int j=0; j < depth_output; j++) {
|
2023-02-19 13:38:33 +01:00
|
|
|
cnn->weights[i][j] = (float**)nalloc(sizeof(float*)*kernel_size);
|
|
|
|
cnn->d_weights[i][j] = (float**)nalloc(sizeof(float*)*kernel_size);
|
2022-09-09 17:39:07 +02:00
|
|
|
for (int k=0; k < kernel_size; k++) {
|
2023-02-19 13:38:33 +01:00
|
|
|
cnn->weights[i][j][k] = (float*)nalloc(sizeof(float)*kernel_size);
|
|
|
|
cnn->d_weights[i][j][k] = (float*)nalloc(sizeof(float)*kernel_size);
|
2023-01-28 22:04:38 +01:00
|
|
|
for (int l=0; l < kernel_size; l++) {
|
2023-02-19 13:38:33 +01:00
|
|
|
cnn->d_weights[i][j][k][l] = 0.;
|
2023-01-28 22:04:38 +01:00
|
|
|
}
|
2022-07-05 08:13:25 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2023-01-28 22:04:38 +01:00
|
|
|
cnn->bias = (float***)nalloc(sizeof(float**)*depth_output);
|
|
|
|
cnn->d_bias = (float***)nalloc(sizeof(float**)*depth_output);
|
2022-09-16 14:53:35 +02:00
|
|
|
for (int i=0; i < depth_output; i++) {
|
2023-01-28 22:04:38 +01:00
|
|
|
cnn->bias[i] = (float**)nalloc(sizeof(float*)*bias_size);
|
|
|
|
cnn->d_bias[i] = (float**)nalloc(sizeof(float*)*bias_size);
|
2022-09-19 18:39:49 +02:00
|
|
|
for (int j=0; j < bias_size; j++) {
|
2023-01-28 22:04:38 +01:00
|
|
|
cnn->bias[i][j] = (float*)nalloc(sizeof(float)*bias_size);
|
|
|
|
cnn->d_bias[i][j] = (float*)nalloc(sizeof(float)*bias_size);
|
|
|
|
for (int k=0; k < bias_size; k++) {
|
|
|
|
cnn->d_bias[i][j][k] = 0.;
|
|
|
|
}
|
2022-07-05 08:13:25 +02:00
|
|
|
}
|
|
|
|
}
|
2022-11-04 10:54:32 +01:00
|
|
|
int n_in = network->width[n-1]*network->width[n-1]*network->depth[n-1];
|
|
|
|
int n_out = network->width[n]*network->width[n]*network->depth[n];
|
|
|
|
initialisation_3d_matrix(network->initialisation, cnn->bias, depth_output, dim_output, dim_output, n_in, n_out);
|
2023-02-19 13:38:33 +01:00
|
|
|
initialisation_4d_matrix(network->initialisation, cnn->weights, depth_input, depth_output, kernel_size, kernel_size, n_in, n_out);
|
2022-09-19 18:39:49 +02:00
|
|
|
create_a_cube_input_layer(network, n, depth_output, bias_size);
|
2022-10-31 20:08:42 +01:00
|
|
|
create_a_cube_input_z_layer(network, n, depth_output, bias_size);
|
2022-07-05 08:13:25 +02:00
|
|
|
network->size++;
|
|
|
|
}
|
|
|
|
|
2023-02-19 12:53:08 +01:00
|
|
|
void add_dense(Network* network, int size_output, int activation) {
|
2022-07-05 08:13:25 +02:00
|
|
|
int n = network->size;
|
2022-09-30 15:50:29 +02:00
|
|
|
int k_pos = n-1;
|
2023-02-19 12:50:27 +01:00
|
|
|
int size_input = network->width[k_pos];
|
2022-07-05 08:13:25 +02:00
|
|
|
if (network->max_size == n) {
|
|
|
|
printf("Impossible de rajouter une couche dense, le réseau est déjà plein\n");
|
|
|
|
return;
|
|
|
|
}
|
2022-09-30 15:50:29 +02:00
|
|
|
network->kernel[k_pos]->cnn = NULL;
|
2023-01-28 22:04:38 +01:00
|
|
|
network->kernel[k_pos]->nn = (Kernel_nn*)nalloc(sizeof(Kernel_nn));
|
2022-09-30 15:50:29 +02:00
|
|
|
Kernel_nn* nn = network->kernel[k_pos]->nn;
|
|
|
|
network->kernel[k_pos]->activation = activation;
|
2022-11-04 08:31:58 +01:00
|
|
|
network->kernel[k_pos]->linearisation = 0;
|
2023-01-30 09:39:45 +01:00
|
|
|
network->kernel[k_pos]->pooling = 0;
|
2023-02-19 12:50:27 +01:00
|
|
|
nn->size_input = size_input;
|
2023-02-19 12:53:08 +01:00
|
|
|
nn->size_output = size_output;
|
|
|
|
nn->bias = (float*)nalloc(sizeof(float)*size_output);
|
|
|
|
nn->d_bias = (float*)nalloc(sizeof(float)*size_output);
|
|
|
|
for (int i=0; i < size_output; i++) {
|
2023-01-28 22:04:38 +01:00
|
|
|
nn->d_bias[i] = 0.;
|
|
|
|
}
|
|
|
|
|
2023-02-19 12:50:27 +01:00
|
|
|
nn->weights = (float**)nalloc(sizeof(float*)*size_input);
|
|
|
|
nn->d_weights = (float**)nalloc(sizeof(float*)*size_input);
|
|
|
|
for (int i=0; i < size_input; i++) {
|
2023-02-19 12:53:08 +01:00
|
|
|
nn->weights[i] = (float*)nalloc(sizeof(float)*size_output);
|
|
|
|
nn->d_weights[i] = (float*)nalloc(sizeof(float)*size_output);
|
|
|
|
for (int j=0; j < size_output; j++) {
|
2023-01-28 22:04:38 +01:00
|
|
|
nn->d_weights[i][j] = 0.;
|
|
|
|
}
|
2022-07-05 08:13:25 +02:00
|
|
|
}
|
2023-01-28 22:04:38 +01:00
|
|
|
|
2023-02-19 12:53:08 +01:00
|
|
|
initialisation_1d_matrix(network->initialisation, nn->bias, size_output, size_input);
|
|
|
|
initialisation_2d_matrix(network->initialisation, nn->weights, size_input, size_output, size_input, size_output);
|
|
|
|
create_a_line_input_layer(network, n, size_output);
|
|
|
|
create_a_line_input_z_layer(network, n, size_output);
|
2022-09-19 18:39:49 +02:00
|
|
|
network->size++;
|
|
|
|
}
|
|
|
|
|
2023-02-19 12:53:08 +01:00
|
|
|
void add_dense_linearisation(Network* network, int size_output, int activation) {
|
2023-02-19 12:50:27 +01:00
|
|
|
// Can replace size_input by a research of this dim
|
2022-09-19 18:39:49 +02:00
|
|
|
|
|
|
|
int n = network->size;
|
2022-09-30 15:50:29 +02:00
|
|
|
int k_pos = n-1;
|
2023-02-19 12:50:27 +01:00
|
|
|
int size_input = network->depth[k_pos]*network->width[k_pos]*network->width[k_pos];
|
2022-09-19 18:39:49 +02:00
|
|
|
if (network->max_size == n) {
|
|
|
|
printf("Impossible de rajouter une couche dense, le réseau est déjà plein\n");
|
|
|
|
return;
|
|
|
|
}
|
2022-09-30 15:50:29 +02:00
|
|
|
network->kernel[k_pos]->cnn = NULL;
|
2023-01-28 22:04:38 +01:00
|
|
|
network->kernel[k_pos]->nn = (Kernel_nn*)nalloc(sizeof(Kernel_nn));
|
2022-09-30 15:50:29 +02:00
|
|
|
Kernel_nn* nn = network->kernel[k_pos]->nn;
|
|
|
|
network->kernel[k_pos]->activation = activation;
|
2022-11-04 08:31:58 +01:00
|
|
|
network->kernel[k_pos]->linearisation = 1;
|
2023-01-30 09:39:45 +01:00
|
|
|
network->kernel[k_pos]->pooling = 0;
|
2023-02-19 12:50:27 +01:00
|
|
|
nn->size_input = size_input;
|
2023-02-19 12:53:08 +01:00
|
|
|
nn->size_output = size_output;
|
2023-01-17 15:34:29 +01:00
|
|
|
|
2023-02-19 12:53:08 +01:00
|
|
|
nn->bias = (float*)nalloc(sizeof(float)*size_output);
|
|
|
|
nn->d_bias = (float*)nalloc(sizeof(float)*size_output);
|
|
|
|
for (int i=0; i < size_output; i++) {
|
2023-01-28 22:04:38 +01:00
|
|
|
nn->d_bias[i] = 0.;
|
|
|
|
}
|
2023-02-19 12:50:27 +01:00
|
|
|
nn->weights = (float**)nalloc(sizeof(float*)*size_input);
|
|
|
|
nn->d_weights = (float**)nalloc(sizeof(float*)*size_input);
|
|
|
|
for (int i=0; i < size_input; i++) {
|
2023-02-19 12:53:08 +01:00
|
|
|
nn->weights[i] = (float*)nalloc(sizeof(float)*size_output);
|
|
|
|
nn->d_weights[i] = (float*)nalloc(sizeof(float)*size_output);
|
|
|
|
for (int j=0; j < size_output; j++) {
|
2023-01-28 22:04:38 +01:00
|
|
|
nn->d_weights[i][j] = 0.;
|
|
|
|
}
|
2022-09-19 18:39:49 +02:00
|
|
|
}
|
2023-02-19 12:53:08 +01:00
|
|
|
initialisation_1d_matrix(network->initialisation, nn->bias, size_output, size_input);
|
|
|
|
initialisation_2d_matrix(network->initialisation, nn->weights, size_input, size_output, size_input, size_output);
|
|
|
|
create_a_line_input_layer(network, n, size_output);
|
|
|
|
create_a_line_input_z_layer(network, n, size_output);
|
2022-07-05 08:13:25 +02:00
|
|
|
network->size++;
|
|
|
|
}
|