mirror of
https://github.com/augustin64/projet-tipe
synced 2025-01-24 07:36:24 +01:00
Add cnn knuth shuffle
This commit is contained in:
parent
4057982adc
commit
963a4afcff
@ -7,13 +7,13 @@
|
||||
* Met à jours les poids à partir de données obtenus après plusieurs backpropagations
|
||||
* Puis met à 0 tous les d_weights
|
||||
*/
|
||||
void update_weights(Network* network, Network* d_network);
|
||||
void update_weights(Network* network, Network* d_network, int nb_images);
|
||||
|
||||
/*
|
||||
* Met à jours les biais à partir de données obtenus après plusieurs backpropagations
|
||||
* Puis met à 0 tous les d_bias
|
||||
*/
|
||||
void update_bias(Network* network, Network* d_network);
|
||||
void update_bias(Network* network, Network* d_network, int nb_images);
|
||||
|
||||
/*
|
||||
* Met à 0 toutes les données de backpropagation de poids
|
||||
|
@ -8,6 +8,16 @@
|
||||
#ifndef DEF_UTILS_H
|
||||
#define DEF_UTILS_H
|
||||
|
||||
/*
|
||||
* Échange deux éléments d'un tableau
|
||||
*/
|
||||
void swap(int* tab, int i, int j);
|
||||
|
||||
/*
|
||||
* Mélange un tableau avec le mélange de Knuth
|
||||
*/
|
||||
void knuth_shuffle(int* tab, int n);
|
||||
|
||||
/*
|
||||
* Vérifie si deux réseaux sont égaux
|
||||
*/
|
||||
|
@ -32,6 +32,7 @@ void* train_thread(void* parameters) {
|
||||
|
||||
int*** images = param->images;
|
||||
int* labels = (int*)param->labels;
|
||||
int* index = param->index;
|
||||
|
||||
int width = param->width;
|
||||
int height = param->height;
|
||||
@ -41,31 +42,31 @@ void* train_thread(void* parameters) {
|
||||
float accuracy = 0.;
|
||||
for (int i=start; i < start+nb_images; i++) {
|
||||
if (dataset_type == 0) {
|
||||
write_image_in_network_32(images[i], height, width, network->input[0][0]);
|
||||
write_image_in_network_32(images[index[i]], height, width, network->input[0][0]);
|
||||
forward_propagation(network);
|
||||
maxi = indice_max(network->input[network->size-1][0][0], 10);
|
||||
backward_propagation(network, labels[i]);
|
||||
|
||||
if (maxi == labels[i]) {
|
||||
if (maxi == labels[index[i]]) {
|
||||
accuracy += 1.;
|
||||
}
|
||||
} else {
|
||||
if (!param->dataset->images[i]) {
|
||||
image = loadJpegImageFile(param->dataset->fileNames[i]);
|
||||
param->dataset->images[i] = image->lpData;
|
||||
if (!param->dataset->images[index[i]]) {
|
||||
image = loadJpegImageFile(param->dataset->fileNames[index[i]]);
|
||||
param->dataset->images[index[i]] = image->lpData;
|
||||
free(image);
|
||||
}
|
||||
write_image_in_network_260(param->dataset->images[i], height, width, network->input[0]);
|
||||
write_image_in_network_260(param->dataset->images[index[i]], height, width, network->input[0]);
|
||||
forward_propagation(network);
|
||||
maxi = indice_max(network->input[network->size-1][0][0], param->dataset->numCategories);
|
||||
backward_propagation(network, param->dataset->labels[i]);
|
||||
backward_propagation(network, param->dataset->labels[index[i]]);
|
||||
|
||||
if (maxi == (int)param->dataset->labels[i]) {
|
||||
if (maxi == (int)param->dataset->labels[index[i]]) {
|
||||
accuracy += 1.;
|
||||
}
|
||||
|
||||
free(param->dataset->images[i]);
|
||||
param->dataset->images[i] = NULL;
|
||||
free(param->dataset->images[index[i]]);
|
||||
param->dataset->images[index[i]] = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
@ -85,9 +86,10 @@ void train(int dataset_type, char* images_file, char* labels_file, char* data_di
|
||||
int nb_images_total_remaining; // Images restantes dans un batch
|
||||
int batches_epoques; // Batches par époque
|
||||
|
||||
int*** images;
|
||||
unsigned int* labels;
|
||||
jpegDataset* dataset;
|
||||
int*** images; // Images sous forme de tableau de tableaux de tableaux de pixels (degré de gris, MNIST)
|
||||
unsigned int* labels; // Labels associés aux images du dataset MNIST
|
||||
jpegDataset* dataset; // Structure de données décrivant un dataset d'images jpeg
|
||||
int* shuffle_index; // shuffle_index[i] contient le nouvel index de l'élément à l'emplacement i avant mélange
|
||||
|
||||
if (dataset_type == 0) { // Type MNIST
|
||||
// Chargement des images du set de données MNIST
|
||||
@ -109,7 +111,12 @@ void train(int dataset_type, char* images_file, char* labels_file, char* data_di
|
||||
}
|
||||
|
||||
// Initialisation du réseau
|
||||
Network* network = create_network_lenet5(0.01, 0, TANH, GLOROT, input_dim, input_depth);
|
||||
Network* network = create_network_lenet5(1, 0, TANH, GLOROT, input_dim, input_depth);
|
||||
|
||||
shuffle_index = (int*)malloc(sizeof(int)*nb_images_total);
|
||||
for (int i=0; i < nb_images_total; i++) {
|
||||
shuffle_index[i] = i;
|
||||
}
|
||||
|
||||
#ifdef USE_MULTITHREADING
|
||||
int nb_remaining_images; // Nombre d'images restantes à lancer pour une série de threads
|
||||
@ -139,6 +146,7 @@ void train(int dataset_type, char* images_file, char* labels_file, char* data_di
|
||||
param->labels = NULL;
|
||||
}
|
||||
param->nb_images = BATCHES / nb_threads;
|
||||
param->index = shuffle_index;
|
||||
}
|
||||
#else
|
||||
// Création des paramètres donnés à l'unique
|
||||
@ -163,6 +171,7 @@ void train(int dataset_type, char* images_file, char* labels_file, char* data_di
|
||||
train_params->labels = NULL;
|
||||
}
|
||||
train_params->nb_images = BATCHES;
|
||||
train_params->index = shuffle_index;
|
||||
#endif
|
||||
|
||||
for (int i=0; i < epochs; i++) {
|
||||
@ -172,6 +181,7 @@ void train(int dataset_type, char* images_file, char* labels_file, char* data_di
|
||||
// du multi-threading car chaque copie du réseau initiale sera légèrement différente
|
||||
// et donnera donc des résultats différents sur les mêmes images.
|
||||
accuracy = 0.;
|
||||
knuth_shuffle(shuffle_index, nb_images_total);
|
||||
batches_epoques = div_up(nb_images_total, BATCHES);
|
||||
nb_images_total_remaining = nb_images_total;
|
||||
for (int j=0; j < batches_epoques; j++) {
|
||||
@ -201,14 +211,16 @@ void train(int dataset_type, char* images_file, char* labels_file, char* data_di
|
||||
pthread_join( tid[k], NULL );
|
||||
accuracy += train_parameters[k]->accuracy / (float) nb_images_total;
|
||||
|
||||
update_weights(network, train_parameters[k]->network);
|
||||
update_bias(network, train_parameters[k]->network);
|
||||
update_weights(network, train_parameters[k]->network, train_parameters[k]->nb_images);
|
||||
update_bias(network, train_parameters[k]->network, train_parameters[k]->nb_images);
|
||||
free_network(train_parameters[k]->network);
|
||||
}
|
||||
current_accuracy = accuracy * nb_images_total/((j+1)*BATCHES);
|
||||
printf("\rThreads [%d]\tÉpoque [%d/%d]\tImage [%d/%d]\tAccuracy: "YELLOW"%0.1f%%"RESET" ", nb_threads, i, epochs, BATCHES*(j+1), nb_images_total, current_accuracy*100);
|
||||
fflush(stdout);
|
||||
#else
|
||||
(void)nb_images_total_remaining; // Juste pour enlever un warning
|
||||
|
||||
train_params->start = j*BATCHES;
|
||||
|
||||
train_thread((void*)train_params);
|
||||
@ -216,8 +228,8 @@ void train(int dataset_type, char* images_file, char* labels_file, char* data_di
|
||||
accuracy += train_params->accuracy / (float) nb_images_total;
|
||||
current_accuracy = accuracy * nb_images_total/((j+1)*BATCHES);
|
||||
|
||||
update_weights(network, network);
|
||||
update_bias(network, network);
|
||||
update_weights(network, network, train_params->nb_images);
|
||||
update_bias(network, network, train_params->nb_images);
|
||||
|
||||
printf("\rÉpoque [%d/%d]\tImage [%d/%d]\tAccuracy: "YELLOW"%0.1f%%"RESET" ", i, epochs, BATCHES*(j+1), nb_images_total, current_accuracy*100);
|
||||
fflush(stdout);
|
||||
@ -230,6 +242,7 @@ void train(int dataset_type, char* images_file, char* labels_file, char* data_di
|
||||
#endif
|
||||
write_network(out, network);
|
||||
}
|
||||
free(shuffle_index);
|
||||
free_network(network);
|
||||
#ifdef USE_MULTITHREADING
|
||||
free(tid);
|
||||
|
@ -3,7 +3,7 @@
|
||||
#include "include/update.h"
|
||||
#include "include/struct.h"
|
||||
|
||||
void update_weights(Network* network, Network* d_network) {
|
||||
void update_weights(Network* network, Network* d_network, int nb_images) {
|
||||
int n = network->size;
|
||||
int input_depth, input_width, output_depth, output_width, k_size;
|
||||
Kernel* k_i;
|
||||
@ -24,7 +24,7 @@ void update_weights(Network* network, Network* d_network) {
|
||||
for (int b=0; b<output_depth; b++) {
|
||||
for (int c=0; c<k_size; c++) {
|
||||
for (int d=0; d<k_size; d++) {
|
||||
cnn->w[a][b][c][d] -= network->learning_rate * d_cnn->d_w[a][b][c][d];
|
||||
cnn->w[a][b][c][d] -= (network->learning_rate/nb_images) * d_cnn->d_w[a][b][c][d];
|
||||
d_cnn->d_w[a][b][c][d] = 0;
|
||||
}
|
||||
}
|
||||
@ -36,7 +36,7 @@ void update_weights(Network* network, Network* d_network) {
|
||||
Kernel_nn* d_nn = dk_i->nn;
|
||||
for (int a=0; a<input_width; a++) {
|
||||
for (int b=0; b<output_width; b++) {
|
||||
nn->weights[a][b] -= network->learning_rate * d_nn->d_weights[a][b];
|
||||
nn->weights[a][b] -= (network->learning_rate/nb_images) * d_nn->d_weights[a][b];
|
||||
d_nn->d_weights[a][b] = 0;
|
||||
}
|
||||
}
|
||||
@ -46,7 +46,7 @@ void update_weights(Network* network, Network* d_network) {
|
||||
int input_size = input_width*input_width*input_depth;
|
||||
for (int a=0; a<input_size; a++) {
|
||||
for (int b=0; b<output_width; b++) {
|
||||
nn->weights[a][b] -= network->learning_rate * d_nn->d_weights[a][b];
|
||||
nn->weights[a][b] -= (network->learning_rate/nb_images) * d_nn->d_weights[a][b];
|
||||
d_nn->d_weights[a][b] = 0;
|
||||
}
|
||||
}
|
||||
@ -57,7 +57,7 @@ void update_weights(Network* network, Network* d_network) {
|
||||
}
|
||||
}
|
||||
|
||||
void update_bias(Network* network, Network* d_network) {
|
||||
void update_bias(Network* network, Network* d_network, int nb_images) {
|
||||
|
||||
int n = network->size;
|
||||
int output_width, output_depth;
|
||||
@ -75,7 +75,7 @@ void update_bias(Network* network, Network* d_network) {
|
||||
for (int a=0; a<output_depth; a++) {
|
||||
for (int b=0; b<output_width; b++) {
|
||||
for (int c=0; c<output_width; c++) {
|
||||
cnn->bias[a][b][c] -= network->learning_rate * d_cnn->d_bias[a][b][c];
|
||||
cnn->bias[a][b][c] -= (network->learning_rate/nb_images) * d_cnn->d_bias[a][b][c];
|
||||
d_cnn->d_bias[a][b][c] = 0;
|
||||
}
|
||||
}
|
||||
@ -84,7 +84,7 @@ void update_bias(Network* network, Network* d_network) {
|
||||
Kernel_nn* nn = k_i->nn;
|
||||
Kernel_nn* d_nn = dk_i->nn;
|
||||
for (int a=0; a<output_width; a++) {
|
||||
nn->bias[a] -= network->learning_rate * d_nn->d_bias[a];
|
||||
nn->bias[a] -= (network->learning_rate/nb_images) * d_nn->d_bias[a];
|
||||
d_nn->d_bias[a] = 0;
|
||||
}
|
||||
} else { // Pooling
|
||||
|
@ -17,6 +17,18 @@ if (network1->var != network2->var) {
|
||||
return false; \
|
||||
}
|
||||
|
||||
void swap(int* tab, int i, int j) {
|
||||
int tmp = tab[i];
|
||||
tab[i] = tab[j];
|
||||
tab[j] = tmp;
|
||||
}
|
||||
|
||||
void knuth_shuffle(int* tab, int n) {
|
||||
for(int i=1; i < n; i++) {
|
||||
swap(tab, i, rand() %i);
|
||||
}
|
||||
}
|
||||
|
||||
bool equals_networks(Network* network1, Network* network2) {
|
||||
int output_dim;
|
||||
checkEquals(size, "size", -1);
|
||||
|
@ -245,6 +245,7 @@ void train(int epochs, int layers, int neurons, char* recovery, char* image_file
|
||||
for (int j=0; j < nb_threads; j++) {
|
||||
free(train_parameters[j]);
|
||||
}
|
||||
free(shuffle_indices);
|
||||
free(train_parameters);
|
||||
// On libère les espaces mémoire utilisés spécialement sur le CPU
|
||||
free(tid);
|
||||
|
Loading…
Reference in New Issue
Block a user