Fix various memory leaks in mnist NN

This commit is contained in:
augustin64 2023-01-14 14:52:40 +01:00
parent 48dcded57a
commit dd6fb046c7
5 changed files with 47 additions and 30 deletions

View File

@ -248,6 +248,16 @@ void train(int epochs, int layers, int neurons, char* recovery, char* image_file
for (int j=0; j < nb_threads; j++) { for (int j=0; j < nb_threads; j++) {
free(train_parameters[j]); free(train_parameters[j]);
} }
for (int i=0; i < nb_images_total; i++) {
for (int j=0; j < height; j++) {
free(images[i][j]);
}
free(images[i]);
}
free(images);
free(labels);
free(shuffle_indices); free(shuffle_indices);
free(train_parameters); free(train_parameters);
// On libère les espaces mémoire utilisés spécialement sur le CPU // On libère les espaces mémoire utilisés spécialement sur le CPU

View File

@ -84,14 +84,15 @@ void deletion_of_network(Network* network) {
for (int i=0; i < network->nb_layers; i++) { for (int i=0; i < network->nb_layers; i++) {
layer = network->layers[i]; layer = network->layers[i];
if (i != network->nb_layers-1) { // On exclut la dernière couche dont les neurones ne contiennent pas de poids sortants
for (int j=0; j < network->layers[i]->nb_neurons; j++) { for (int j=0; j < network->layers[i]->nb_neurons; j++) {
neuron = layer->neurons[j]; neuron = layer->neurons[j];
if (i != network->nb_layers-1) { // On exclut la dernière couche dont les neurones ne contiennent pas de poids sortants
free(neuron->weights); free(neuron->weights);
free(neuron->back_weights); free(neuron->back_weights);
free(neuron->last_back_weights); free(neuron->last_back_weights);
free(neuron);
} }
free(neuron);
} }
free(layer->neurons); free(layer->neurons);
free(network->layers[i]); free(network->layers[i]);

View File

@ -24,15 +24,17 @@ Neuron* read_neuron(uint32_t nb_weights, FILE *ptr) {
neuron->last_back_bias = 0.0; neuron->last_back_bias = 0.0;
neuron->back_bias = 0.0; neuron->back_bias = 0.0;
neuron->last_back_weights = (float*)malloc(sizeof(float)*nb_weights); if (nb_weights != 0) {
neuron->back_weights = (float*)malloc(sizeof(float)*nb_weights); neuron->last_back_weights = (float*)malloc(sizeof(float)*nb_weights);
neuron->weights = (float*)malloc(sizeof(float)*nb_weights); neuron->back_weights = (float*)malloc(sizeof(float)*nb_weights);
neuron->weights = (float*)malloc(sizeof(float)*nb_weights);
for (int i=0; i < (int)nb_weights; i++) { for (int i=0; i < (int)nb_weights; i++) {
fread(&tmp, sizeof(float), 1, ptr); fread(&tmp, sizeof(float), 1, ptr);
neuron->weights[i] = tmp; neuron->weights[i] = tmp;
neuron->back_weights[i] = 0.0; neuron->back_weights[i] = 0.0;
neuron->last_back_weights[i] = 0.0; neuron->last_back_weights[i] = 0.0;
}
} }
return neuron; return neuron;

View File

@ -101,13 +101,15 @@ void create_network(char* filename, int sortie) {
neuron->back_bias = 0.; neuron->back_bias = 0.;
neuron->last_back_bias = 0.; neuron->last_back_bias = 0.;
neuron->weights = (float*)malloc(sizeof(float)*neurons_per_layer[i+1]); if (i != network->nb_layers-1) {
neuron->back_weights = (float*)malloc(sizeof(float)*neurons_per_layer[i+1]); neuron->weights = (float*)malloc(sizeof(float)*neurons_per_layer[i+1]);
neuron->last_back_weights = (float*)malloc(sizeof(float)*neurons_per_layer[i+1]); neuron->back_weights = (float*)malloc(sizeof(float)*neurons_per_layer[i+1]);
for (int k=0; k < neurons_per_layer[i+1]; k++) { neuron->last_back_weights = (float*)malloc(sizeof(float)*neurons_per_layer[i+1]);
neuron->weights[k] = 0.; for (int k=0; k < neurons_per_layer[i+1]; k++) {
neuron->back_weights[k] = 0.; neuron->weights[k] = 0.;
neuron->last_back_weights[k] = 0.; neuron->back_weights[k] = 0.;
neuron->last_back_weights[k] = 0.;
}
} }
layer->neurons[j] = neuron; layer->neurons[j] = neuron;
} }

View File

@ -11,19 +11,21 @@
Neuron* creer_neuron(int nb_sortants) { Neuron* creer_neuron(int nb_sortants) {
Neuron* neuron = (Neuron*)malloc(sizeof(Neuron)); Neuron* neuron = (Neuron*)malloc(sizeof(Neuron));
neuron->weights = (float*)malloc(sizeof(float)*nb_sortants); if (nb_sortants != 0) {
neuron->back_weights = (float*)malloc(sizeof(float)*nb_sortants); neuron->weights = (float*)malloc(sizeof(float)*nb_sortants);
neuron->last_back_weights = (float*)malloc(sizeof(float)*nb_sortants); neuron->back_weights = (float*)malloc(sizeof(float)*nb_sortants);
neuron->last_back_weights = (float*)malloc(sizeof(float)*nb_sortants);
for (int i=0; i < nb_sortants; i++) { for (int i=0; i < nb_sortants; i++) {
neuron->weights[i] = 0.5; neuron->weights[i] = 0.5;
neuron->back_weights[i] = 0.0; neuron->back_weights[i] = 0.0;
neuron->last_back_weights[i] = 0.0; neuron->last_back_weights[i] = 0.0;
}
neuron->z = 0.0;
neuron->bias = 0.0;
neuron->back_bias = 0.0;
neuron->last_back_bias = 0.0;
} }
neuron->z = 0.0;
neuron->bias = 0.0;
neuron->back_bias = 0.0;
neuron->last_back_bias = 0.0;
return neuron; return neuron;
} }