2023-05-15 10:44:09 +02:00
|
|
|
#include <stdbool.h>
|
2022-07-05 08:13:25 +02:00
|
|
|
#include <stdlib.h>
|
|
|
|
#include <stdio.h>
|
2022-10-24 12:54:51 +02:00
|
|
|
|
2023-05-12 16:16:34 +02:00
|
|
|
#include "../common/include/memory_management.h"
|
2023-01-28 22:04:38 +01:00
|
|
|
|
2022-09-16 14:53:35 +02:00
|
|
|
#include "include/free.h"
|
2022-07-05 08:13:25 +02:00
|
|
|
|
|
|
|
void free_a_cube_input_layer(Network* network, int pos, int depth, int dim) {
|
2022-09-09 17:39:07 +02:00
|
|
|
for (int i=0; i < depth; i++) {
|
|
|
|
for (int j=0; j < dim; j++) {
|
2023-05-15 10:44:09 +02:00
|
|
|
gree(network->input[pos][i][j], true);
|
|
|
|
gree(network->input_z[pos][i][j], true);
|
2022-07-05 08:13:25 +02:00
|
|
|
}
|
2023-05-15 10:44:09 +02:00
|
|
|
gree(network->input[pos][i], true);
|
|
|
|
gree(network->input_z[pos][i], true);
|
2022-07-05 08:13:25 +02:00
|
|
|
}
|
2023-05-15 10:44:09 +02:00
|
|
|
gree(network->input[pos], true);
|
|
|
|
gree(network->input_z[pos], true);
|
2022-07-05 08:13:25 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
void free_a_line_input_layer(Network* network, int pos) {
|
2023-05-13 11:17:32 +02:00
|
|
|
// Libère l'espace mémoire de network->input[pos] et network->input_z[pos]
|
|
|
|
// lorsque ces couches sont denses (donc sont des matrice de dimension 1)
|
2023-05-15 10:44:09 +02:00
|
|
|
gree(network->input[pos][0][0], true);
|
|
|
|
gree(network->input_z[pos][0][0], true);
|
|
|
|
gree(network->input[pos][0], true);
|
|
|
|
gree(network->input_z[pos][0], true);
|
|
|
|
gree(network->input[pos], true);
|
|
|
|
gree(network->input_z[pos], true);
|
2022-07-05 08:13:25 +02:00
|
|
|
}
|
|
|
|
|
2023-03-08 20:48:34 +01:00
|
|
|
void free_pooling(Network* network, int pos) {
|
2023-05-13 11:17:32 +02:00
|
|
|
// Le pooling n'alloue rien d'autre que l'input
|
2022-10-05 11:10:13 +02:00
|
|
|
free_a_cube_input_layer(network, pos+1, network->depth[pos+1], network->width[pos+1]);
|
2022-07-05 08:13:25 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
void free_convolution(Network* network, int pos) {
|
2022-10-02 16:16:14 +02:00
|
|
|
Kernel_cnn* k_pos = network->kernel[pos]->cnn;
|
|
|
|
int c = k_pos->columns;
|
|
|
|
int k_size = k_pos->k_size;
|
|
|
|
int r = k_pos->rows;
|
2023-05-13 11:17:32 +02:00
|
|
|
int bias_size = network->width[pos+1];
|
2022-10-05 11:10:13 +02:00
|
|
|
free_a_cube_input_layer(network, pos+1, network->depth[pos+1], network->width[pos+1]);
|
2023-03-18 13:25:58 +01:00
|
|
|
for (int i=0; i < c; i++) {
|
|
|
|
for (int j=0; j < bias_size; j++) {
|
2023-05-15 10:44:09 +02:00
|
|
|
gree(k_pos->bias[i][j], true);
|
|
|
|
gree(k_pos->d_bias[i][j], true);
|
2023-03-27 18:17:50 +02:00
|
|
|
#ifdef ADAM_CNN_BIAS
|
2023-05-15 10:44:09 +02:00
|
|
|
gree(k_pos->s_d_bias[i][j], true);
|
|
|
|
gree(k_pos->v_d_bias[i][j], true);
|
2023-03-27 18:17:50 +02:00
|
|
|
#endif
|
2023-03-18 13:25:58 +01:00
|
|
|
}
|
2023-05-15 10:44:09 +02:00
|
|
|
gree(k_pos->bias[i], true);
|
|
|
|
gree(k_pos->d_bias[i], true);
|
2023-03-27 18:17:50 +02:00
|
|
|
#ifdef ADAM_CNN_BIAS
|
2023-05-15 10:44:09 +02:00
|
|
|
gree(k_pos->s_d_bias[i], true);
|
|
|
|
gree(k_pos->v_d_bias[i], true);
|
2023-03-27 18:17:50 +02:00
|
|
|
#endif
|
2023-03-18 13:25:58 +01:00
|
|
|
}
|
2023-05-15 10:44:09 +02:00
|
|
|
gree(k_pos->bias, true);
|
|
|
|
gree(k_pos->d_bias, true);
|
2023-03-27 18:17:50 +02:00
|
|
|
#ifdef ADAM_CNN_BIAS
|
2023-05-15 10:44:09 +02:00
|
|
|
gree(k_pos->s_d_bias, true);
|
|
|
|
gree(k_pos->v_d_bias, true);
|
2023-03-27 18:17:50 +02:00
|
|
|
#endif
|
2022-07-05 08:13:25 +02:00
|
|
|
|
2022-09-09 17:39:07 +02:00
|
|
|
for (int i=0; i < r; i++) {
|
|
|
|
for (int j=0; j < c; j++) {
|
|
|
|
for (int k=0; k < k_size; k++) {
|
2023-05-15 10:44:09 +02:00
|
|
|
gree(k_pos->weights[i][j][k], true);
|
|
|
|
gree(k_pos->d_weights[i][j][k], true);
|
2023-03-27 18:17:50 +02:00
|
|
|
#ifdef ADAM_CNN_WEIGHTS
|
2023-05-15 10:44:09 +02:00
|
|
|
gree(k_pos->s_d_weights[i][j][k], true);
|
|
|
|
gree(k_pos->v_d_weights[i][j][k], true);
|
2023-03-27 18:17:50 +02:00
|
|
|
#endif
|
2022-07-05 08:13:25 +02:00
|
|
|
}
|
2023-05-15 10:44:09 +02:00
|
|
|
gree(k_pos->weights[i][j], true);
|
|
|
|
gree(k_pos->d_weights[i][j], true);
|
2023-03-27 18:17:50 +02:00
|
|
|
#ifdef ADAM_CNN_WEIGHTS
|
2023-05-15 10:44:09 +02:00
|
|
|
gree(k_pos->s_d_weights[i][j], true);
|
|
|
|
gree(k_pos->v_d_weights[i][j], true);
|
2023-03-27 18:17:50 +02:00
|
|
|
#endif
|
2022-07-05 08:13:25 +02:00
|
|
|
}
|
2023-05-15 10:44:09 +02:00
|
|
|
gree(k_pos->weights[i], true);
|
|
|
|
gree(k_pos->d_weights[i], true);
|
2023-03-27 18:17:50 +02:00
|
|
|
#ifdef ADAM_CNN_WEIGHTS
|
2023-05-15 10:44:09 +02:00
|
|
|
gree(k_pos->s_d_weights[i], true);
|
|
|
|
gree(k_pos->v_d_weights[i], true);
|
2023-03-27 18:17:50 +02:00
|
|
|
#endif
|
2022-07-05 08:13:25 +02:00
|
|
|
}
|
2023-05-15 10:44:09 +02:00
|
|
|
gree(k_pos->weights, true);
|
|
|
|
gree(k_pos->d_weights, true);
|
2023-03-27 18:17:50 +02:00
|
|
|
#ifdef ADAM_CNN_WEIGHTS
|
2023-05-15 10:44:09 +02:00
|
|
|
gree(k_pos->s_d_weights, true);
|
|
|
|
gree(k_pos->v_d_weights, true);
|
2023-03-27 18:17:50 +02:00
|
|
|
#endif
|
2022-07-05 08:13:25 +02:00
|
|
|
|
2023-05-15 10:44:09 +02:00
|
|
|
gree(k_pos, true);
|
2022-07-05 08:13:25 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
void free_dense(Network* network, int pos) {
|
2022-10-02 16:16:14 +02:00
|
|
|
free_a_line_input_layer(network, pos+1);
|
|
|
|
Kernel_nn* k_pos = network->kernel[pos]->nn;
|
2023-02-19 12:50:27 +01:00
|
|
|
int dim = k_pos->size_input;
|
2022-09-09 17:39:07 +02:00
|
|
|
for (int i=0; i < dim; i++) {
|
2023-05-15 10:44:09 +02:00
|
|
|
gree(k_pos->weights[i], true);
|
|
|
|
gree(k_pos->d_weights[i], true);
|
2023-03-27 18:17:50 +02:00
|
|
|
#ifdef ADAM_DENSE_WEIGHTS
|
2023-05-15 10:44:09 +02:00
|
|
|
gree(k_pos->s_d_weights[i], true);
|
|
|
|
gree(k_pos->v_d_weights[i], true);
|
2023-03-27 18:17:50 +02:00
|
|
|
#endif
|
2022-07-05 08:13:25 +02:00
|
|
|
}
|
2023-05-15 10:44:09 +02:00
|
|
|
gree(k_pos->weights, true);
|
|
|
|
gree(k_pos->d_weights, true);
|
2023-03-27 18:17:50 +02:00
|
|
|
#ifdef ADAM_DENSE_WEIGHTS
|
2023-05-15 10:44:09 +02:00
|
|
|
gree(k_pos->s_d_weights, true);
|
|
|
|
gree(k_pos->v_d_weights, true);
|
2023-03-27 18:17:50 +02:00
|
|
|
#endif
|
2022-07-05 08:13:25 +02:00
|
|
|
|
2023-05-15 10:44:09 +02:00
|
|
|
gree(k_pos->bias, true);
|
|
|
|
gree(k_pos->d_bias, true);
|
2023-03-27 18:17:50 +02:00
|
|
|
#ifdef ADAM_DENSE_BIAS
|
2023-05-15 10:44:09 +02:00
|
|
|
gree(k_pos->s_d_bias, true);
|
|
|
|
gree(k_pos->v_d_bias, true);
|
2023-03-27 18:17:50 +02:00
|
|
|
#endif
|
2022-07-05 08:13:25 +02:00
|
|
|
|
2023-05-15 10:44:09 +02:00
|
|
|
gree(k_pos, true);
|
2022-10-02 16:16:14 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
void free_dense_linearisation(Network* network, int pos) {
|
|
|
|
free_a_line_input_layer(network, pos+1);
|
|
|
|
Kernel_nn* k_pos = network->kernel[pos]->nn;
|
2023-02-19 12:50:27 +01:00
|
|
|
int dim = k_pos->size_input;
|
2022-10-02 16:16:14 +02:00
|
|
|
for (int i=0; i < dim; i++) {
|
2023-05-15 10:44:09 +02:00
|
|
|
gree(k_pos->weights[i], true);
|
|
|
|
gree(k_pos->d_weights[i], true);
|
2023-03-27 18:17:50 +02:00
|
|
|
#ifdef ADAM_DENSE_WEIGHTS
|
2023-05-15 10:44:09 +02:00
|
|
|
gree(k_pos->s_d_weights[i], true);
|
|
|
|
gree(k_pos->v_d_weights[i], true);
|
2023-03-27 18:17:50 +02:00
|
|
|
#endif
|
2022-10-02 16:16:14 +02:00
|
|
|
}
|
2023-05-15 10:44:09 +02:00
|
|
|
gree(k_pos->weights, true);
|
|
|
|
gree(k_pos->d_weights, true);
|
2023-03-27 18:17:50 +02:00
|
|
|
#ifdef ADAM_DENSE_WEIGHTS
|
2023-05-15 10:44:09 +02:00
|
|
|
gree(k_pos->s_d_weights, true);
|
|
|
|
gree(k_pos->v_d_weights, true);
|
2023-03-27 18:17:50 +02:00
|
|
|
#endif
|
2022-10-02 16:16:14 +02:00
|
|
|
|
2023-05-15 10:44:09 +02:00
|
|
|
gree(k_pos->bias, true);
|
|
|
|
gree(k_pos->d_bias, true);
|
2023-03-27 18:17:50 +02:00
|
|
|
#ifdef ADAM_DENSE_BIAS
|
2023-05-15 10:44:09 +02:00
|
|
|
gree(k_pos->s_d_bias, true);
|
|
|
|
gree(k_pos->v_d_bias, true);
|
2023-03-27 18:17:50 +02:00
|
|
|
#endif
|
2022-10-02 16:16:14 +02:00
|
|
|
|
2023-05-15 10:44:09 +02:00
|
|
|
gree(k_pos, true);
|
2022-07-05 08:13:25 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
void free_network_creation(Network* network) {
|
2023-05-13 11:17:32 +02:00
|
|
|
// On libère l'input correspondant à l'image: input[0] (car elle n'appartient à aucune couche)
|
2022-09-12 17:56:44 +02:00
|
|
|
free_a_cube_input_layer(network, 0, network->depth[0], network->width[0]);
|
2023-05-13 11:17:32 +02:00
|
|
|
|
2023-02-18 13:10:00 +01:00
|
|
|
for (int i=0; i < network->max_size-1; i++) {
|
2023-05-15 10:44:09 +02:00
|
|
|
gree(network->kernel[i], true);
|
2023-02-18 13:10:00 +01:00
|
|
|
}
|
2023-05-15 10:44:09 +02:00
|
|
|
gree(network->width, true);
|
|
|
|
gree(network->depth, true);
|
|
|
|
gree(network->kernel, true);
|
|
|
|
gree(network->input, true);
|
|
|
|
gree(network->input_z, true);
|
2023-01-28 22:04:38 +01:00
|
|
|
|
2023-05-15 10:44:09 +02:00
|
|
|
gree(network, true);
|
2022-07-05 08:13:25 +02:00
|
|
|
}
|
|
|
|
|
2022-10-05 11:10:13 +02:00
|
|
|
void free_network(Network* network) {
|
2023-05-15 10:44:09 +02:00
|
|
|
#if defined(USE_CUDA) || defined(TEST_MEMORY_MANAGEMENT)
|
|
|
|
// Supprimer toute la mémoire allouée avec nalloc directement
|
|
|
|
// Il n'y a alors plus besoin de parcourir tout le réseau,
|
|
|
|
// mais il faut que TOUTE la mémoire du réseau ait été allouée de cette manière
|
|
|
|
// et que cela soit le cas UNIQUEMENT pour la mémoire allouée au réseau
|
|
|
|
free_all_memory();
|
|
|
|
#else
|
|
|
|
for (int i=network->size-2; i>=0; i--) {
|
|
|
|
if (network->kernel[i]->cnn != NULL) {
|
|
|
|
// Convolution
|
|
|
|
free_convolution(network, i);
|
|
|
|
}
|
|
|
|
else if (network->kernel[i]->nn != NULL) {
|
|
|
|
// Dense
|
|
|
|
if (network->kernel[i]->linearisation == DOESNT_LINEARISE) {
|
|
|
|
// Dense normale
|
|
|
|
free_dense(network, i);
|
|
|
|
} else {
|
|
|
|
// Dense qui linéarise
|
|
|
|
free_dense_linearisation(network, i);
|
|
|
|
}
|
2023-05-13 11:17:32 +02:00
|
|
|
} else {
|
2023-05-15 10:44:09 +02:00
|
|
|
// Pooling
|
|
|
|
free_pooling(network, i);
|
2022-10-05 11:10:13 +02:00
|
|
|
}
|
|
|
|
}
|
2023-05-15 10:44:09 +02:00
|
|
|
free_network_creation(network);
|
|
|
|
#endif
|
2022-07-05 08:13:25 +02:00
|
|
|
}
|