Add main.c & train.c

This commit is contained in:
augustin64 2022-10-01 17:53:14 +02:00
parent e3099106c7
commit dfe7fc7731
12 changed files with 438 additions and 45 deletions

3
.gitignore vendored
View File

@ -6,4 +6,5 @@
.test-cache .test-cache
.vscode .vscode
*.bin *.bin
app-secret app-secret
data/50States10K

34
.vscode/launch.json vendored
View File

@ -55,11 +55,41 @@
"preLaunchTask": "build-mnist" "preLaunchTask": "build-mnist"
}, },
{ {
"name": "cnn/main", "name": "cnn/main train",
"type": "cppdbg", "type": "cppdbg",
"request": "launch", "request": "launch",
"program": "${workspaceFolder}/out/cnn_main", "program": "${workspaceFolder}/out/cnn_main",
"args": [], "args": [
"train",
"--dataset", "mnist",
"--images","data/mnist/train-images-idx3-ubyte",
"--labels","data/mnist/train-labels-idx1-ubyte",
"--epochs", "10"
],
"stopAtEntry": true,
"cwd": "${workspaceFolder}",
"environment": [],
"externalConsole": false,
"MIMode": "gdb",
"miDebuggerPath": "/usr/bin/gdb",
"setupCommands": [
{
"description": "Enable pretty-printing for gdb",
"text": "-enable-pretty-printing",
"ignoreFailures": false
}
],
"preLaunchTask": "build-cnn"
},
{
"name": "cnn/main dev-conv",
"type": "cppdbg",
"request": "launch",
"program": "${workspaceFolder}/out/cnn_main",
"args": [
"dev",
"--conv"
],
"stopAtEntry": true, "stopAtEntry": true,
"cwd": "${workspaceFolder}", "cwd": "${workspaceFolder}",
"environment": [], "environment": [],

View File

@ -136,19 +136,4 @@ float* generate_wanted_output(float wanted_number) {
} }
} }
return wanted_output; return wanted_output;
}
int main() {
Network* network = create_network_lenet5(0, TANH, GLOROT_NORMAL);
for (int i=0; i<8; i++) {
printf("%d %d \n", network->depth[i], network->width[i]);
}
printf("Kernel:\n");
for (int i=0; i<7; i++) {
if (network->kernel[i]->cnn!=NULL) {
printf("%d -> %d %d\n", i, network->kernel[i]->cnn->rows, network->kernel[i]->cnn->k_size);
}
}
forward_propagation(network);
return 0;
} }

View File

@ -28,8 +28,8 @@ Network* create_network(int max_size, int dropout, int initialisation, int input
return network; return network;
} }
Network* create_network_lenet5(int dropout, int activation, int initialisation) { Network* create_network_lenet5(int dropout, int activation, int initialisation, int input_dim, int input_depth) {
Network* network = create_network(8, dropout, initialisation, 32, 1); Network* network = create_network(8, dropout, initialisation, input_dim, input_depth);
network->kernel[0]->activation = activation; network->kernel[0]->activation = activation;
network->kernel[0]->linearisation = 0; network->kernel[0]->linearisation = 0;
add_convolution(network, 1, 32, 6, 28, activation); add_convolution(network, 1, 32, 6, 28, activation);

View File

@ -12,7 +12,7 @@ Network* create_network(int max_size, int dropout, int initialisation, int input
/* /*
* Renvoie un réseau suivant l'architecture LeNet5 * Renvoie un réseau suivant l'architecture LeNet5
*/ */
Network* create_network_lenet5(int dropout, int activation, int initialisation); Network* create_network_lenet5(int dropout, int activation, int initialisation, int input_dim, int input_depth);
/* /*
* Créé et alloue de la mémoire à une couche de type input cube * Créé et alloue de la mémoire à une couche de type input cube

14
src/cnn/include/main.h Normal file
View File

@ -0,0 +1,14 @@
#ifndef DEF_MAIN_H
#define DEF_MAIN_H
/*
* Affiche les arguments potentiels à l'utilisateur.
*/
void help(char* call);
/*
* Fonction utilisée à des fins de développement du réseau convolutif
*/
void dev_conv();
#endif

35
src/cnn/include/train.h Normal file
View File

@ -0,0 +1,35 @@
#ifndef DEF_TRAIN_H
#define DEF_TRAIN_H
#define EPOCHS 10
#define BATCHES 100
#define USE_MULTITHREADING
/*
* Structure donnée en argument à la fonction 'train_thread'
*/
typedef struct TrainParameters {
Network* network;
int*** images;
unsigned int* labels;
int width;
int height;
int dataset_type;
char* data_dir;
int start;
int nb_images;
float accuracy;
} TrainParameters;
/*
* Fonction auxiliaire d'entraînement destinée à être exécutée sur plusieurs threads à la fois
*/
void* train_thread(void* parameters);
/*
* Fonction principale d'entraînement du réseau neuronal convolutif
*/
void train(int dataset_type, char* images_file, char* labels_file, char* data_dir, int epochs, char* out);
#endif

142
src/cnn/main.c Normal file
View File

@ -0,0 +1,142 @@
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include <math.h>
#include <float.h>
#include "train.c"
#include "include/cnn.h"
#include "../colors.h"
#include "include/main.h"
void help(char* call) {
printf("Usage: %s ( train | dev ) [OPTIONS]\n\n", call);
printf("OPTIONS:\n");
printf("\tdev:\n");
printf("\t\t--conv | -c\tTester la fonction dev_conv().\n");
printf("\ttrain:\n");
printf("\t\t--dataset | -d (mnist|jpg)\tFormat du set de données.\n");
printf("\t(mnist)\t--images | -i [FILENAME]\tFichier contenant les images.\n");
printf("\t(mnist)\t--labels | -l [FILENAME]\tFichier contenant les labels.\n");
printf("\t (jpg) \t--datadir | -dd [FOLDER]\tDossier contenant les images.\n");
printf("\t\t--epochs | -e [int]\t\tNombre d'époques.\n");
printf("\t\t--out | -o [FILENAME]\tFichier où écrire le réseau de neurones.\n");
}
void dev_conv() {
Network* network = create_network_lenet5(0, TANH, GLOROT_NORMAL, 32, 1);
for (int i=0; i < 8; i++) {
printf("%d %d \n", network->depth[i], network->width[i]);
}
printf("Kernel:\n");
for (int i=0; i<7; i++) {
if (network->kernel[i]->cnn!=NULL) {
printf("%d -> %d %d\n", i, network->kernel[i]->cnn->rows, network->kernel[i]->cnn->k_size);
}
}
forward_propagation(network);
}
int main(int argc, char* argv[]) {
if (argc < 2) {
printf("Pas d'action spécifiée\n");
help(argv[0]);
return 1;
}
if (! strcmp(argv[1], "dev")) {
int option = 0;
// 0 pour la fonction dev_conv()
int i = 2;
while (i < argc) {
// Utiliser un switch serait sans doute plus élégant
if ((! strcmp(argv[i], "--conv"))||(! strcmp(argv[i], "-c"))) {
option = 0;
i++;
} else {
printf("Option choisie inconnue: %s\n", argv[i]);
i++;
}
}
if (option == 0) {
dev_conv();
return 0;
}
printf("Option choisie inconnue: dev %d\n", option);
return 1;
}
if (! strcmp(argv[1], "train")) {
char* dataset = NULL;
char* images_file = NULL;
char* labels_file = NULL;
char* data_dir = NULL;
int epochs = EPOCHS;
int dataset_type = 0;
char* out = NULL;
int i = 2;
while (i < argc) {
if ((! strcmp(argv[i], "--dataset"))||(! strcmp(argv[i], "-d"))) {
dataset = argv[i+1];
i += 2;
}
else if ((! strcmp(argv[i], "--images"))||(! strcmp(argv[i], "-i"))) {
images_file = argv[i+1];
i += 2;
}
else if ((! strcmp(argv[i], "--labels"))||(! strcmp(argv[i], "-l"))) {
labels_file = argv[i+1];
i += 2;
}
else if ((! strcmp(argv[i], "--datadir"))||(! strcmp(argv[i], "-dd"))) {
data_dir = argv[i+1];
i += 2;
}
else if ((! strcmp(argv[i], "--epochs"))||(! strcmp(argv[i], "-e"))) {
epochs = strtol(argv[i+1], NULL, 10);
i += 2;
}
else if ((! strcmp(argv[i], "--out"))||(! strcmp(argv[i], "-o"))) {
out = argv[i+1];
i += 2;
} else {
printf("Option choisie inconnue: %s\n", argv[i]);
i++;
}
}
if ((dataset!=NULL) && !strcmp(dataset, "mnist")) {
dataset_type = 0;
if (!images_file) {
printf("Pas de fichier d'images spécifié\n");
return 1;
}
if (!labels_file) {
printf("Pas de fichier de labels spécifié\n");
return 1;
}
}
else if ((dataset!=NULL) && !strcmp(dataset, "jpg")) {
dataset_type = 1;
if (!data_dir) {
printf("Pas de dossier de données spécifié.\n");
return 1;
}
}
else {
printf("Pas de type de dataset spécifié.\n");
return 1;
}
if (!out) {
printf("Pas de fichier de sortie spécifié, défaut: out.bin\n");
out = "out.bin";
}
train(dataset_type, images_file, labels_file, data_dir, epochs, out);
return 0;
}
printf("Option choisie non reconnue: %s\n", argv[1]);
help(argv[0]);
return 1;
}

181
src/cnn/train.c Normal file
View File

@ -0,0 +1,181 @@
#include <stdlib.h>
#include <stdio.h>
#include <float.h>
#include <pthread.h>
#include <sys/sysinfo.h>
#include "../mnist/mnist.c"
#include "../colors.h"
#include "neuron_io.c"
#include "cnn.c"
#include "include/train.h"
void* train_thread(void* parameters) {
TrainParameters* param = (TrainParameters*)parameters;
Network* network = param->network;
int*** images = param->images;
int* labels = param->labels;
int width = param->width;
int height = param->height;
int dataset_type = param->dataset_type;
int start = param->start;
int nb_images = param->nb_images;
float accuracy = 0.;
for (int i=start; i < start+nb_images; i++) {
if (dataset_type == 0) {
// TODO write_image_in_network_32(images[i], height, width, network_input);
//forward_propagation(network);
//backward_propagation(network, labels[i]);
// TODO get_indice_max(network last layer)
// TODO if indice_max == labels[i] then accuracy += 1.
} else {
printf_error("Dataset de type JPG non implémenté\n");
exit(1);
}
}
param->accuracy = accuracy;
return NULL;
}
void train(int dataset_type, char* images_file, char* labels_file, char* data_dir, int epochs, char* out) {
int input_dim = -1;
int input_depth = -1;
float accuracy;
int nb_images_total;
int nb_remaining_images;
int*** images;
unsigned int* labels;
if (dataset_type == 0) { // Type MNIST
// Chargement des images du set de données MNIST
int* parameters = read_mnist_images_parameters(images_file);
nb_images_total = parameters[0];
free(parameters);
images = read_mnist_images(images_file);
labels = read_mnist_labels(labels_file);
input_dim = 32;
input_depth = 1;
} else { // TODO Type JPG
input_dim = 256;
input_depth = 3;
nb_images_total = 0;
printf_error("Dataset de type jpg non-implémenté.\n");
exit(1);
}
// Initialisation du réseau
Network* network = create_network_lenet5(0, TANH, GLOROT_NORMAL, input_dim, input_depth);
#ifdef USE_MULTITHREADING
// Récupération du nombre de threads disponibles
int nb_threads = get_nprocs();
pthread_t *tid = (pthread_t*)malloc(nb_threads * sizeof(pthread_t));
// Création des paramètres donnés à chaque thread dans le cas du multi-threading
TrainParameters** train_parameters = (TrainParameters**)malloc(sizeof(TrainParameters*)*nb_threads);
TrainParameters* param;
for (int k=0; k < nb_threads; k++) {
train_parameters[k] = (TrainParameters*)malloc(sizeof(TrainParameters));
param = train_parameters[k];
param->dataset_type = dataset_type;
if (dataset_type == 0) {
param->images = images;
param->labels = labels;
param->data_dir = NULL;
param->width = 28;
param->height = 28;
} else {
param->data_dir = data_dir;
param->images = NULL;
param->labels = NULL;
}
param->nb_images = BATCHES / nb_threads;
}
#else
// Création des paramètres donnés à l'unique
// thread dans l'hypothèse ou le multi-threading n'est pas utilisé.
// Cela est utile à des fins de débogage notamment,
// où l'utilisation de threads rend vite les choses plus compliquées qu'elles ne le sont.
TrainParameters* train_params = (TrainParameters*)malloc(sizeof(TrainParameters));
train_params->network = network;
train_params->dataset_type = dataset_type;
if (dataset_type == 0) {
train_params->images = images;
train_params->labels = labels;
train_params->data_dir = NULL;
} else {
train_params->data_dir = data_dir;
train_params->images = NULL;
train_params->labels = NULL;
}
train_params->nb_images = BATCHES;
#endif
for (int i=0; i < epochs; i++) {
// La variable accuracy permet d'avoir une ESTIMATION
// du taux de réussite et de l'entraînement du réseau,
// mais n'est en aucun cas une valeur réelle dans le cas
// du multi-threading car chaque copie du réseau initiale sera légèrement différente
// et donnera donc des résultats différents sur les mêmes images.
accuracy = 0.;
for (int j=0; j < nb_images_total / BATCHES; j++) {
nb_remaining_images = BATCHES;
#ifdef USE_MULTITHREADING
for (int k=0; k < nb_threads; k++) {
if (k == nb_threads-1) {
train_parameters[k]->nb_images = nb_remaining_images;
nb_remaining_images = 0;
} else {
nb_remaining_images -= BATCHES / nb_threads;
}
// TODO train_parameters[k]->network = copy_network(network);
train_parameters[k]->start = BATCHES*j + (nb_images_total/BATCHES)*k;
pthread_create( &tid[j], NULL, train_thread, (void*) train_parameters[k]);
}
for (int k=0; k < nb_threads; k++) {
// TODO joindre les threads et afficher la progression
// On attend la terminaison de chaque thread un à un
pthread_join( tid[j], NULL );
accuracy += train_parameters[k]->accuracy / (float) nb_images_total;
// TODO patch_network(network, train_parameters[k]->network, train_parameters[k]->nb_images);
// TODO free_network(train_parameters[k]->network);
}
printf("\rThreads [%d]\tÉpoque [%d/%d]\tImage [%d/%d]\tAccuracy: %0.1f%%", nb_threads, i, epochs, BATCHES*(j+1), nb_images_total, accuracy*100);
#else
train_params->start = j*BATCHES;
train_thread((void*)train_params);
accuracy += train_params->accuracy / (float) nb_images_total;
printf("\rÉpoque [%d/%d]\tImage [%d/%d]\tAccuracy: %0.1f%%", i, epochs, BATCHES*(j+1), nb_images_total, accuracy*100);
#endif
}
#ifdef USE_MULTITHREADING
printf("\rThreads [%d]\tÉpoque [%d/%d]\tImage [%d/%d]\tAccuracy: %0.1f%%\n", nb_threads, i, epochs, nb_images_total, nb_images_total, accuracy*100);
#else
printf("\rÉpoque [%d/%d]\tImage [%d/%d]\tAccuracy: %0.1f%%\n", i, epochs, nb_images_total, nb_images_total, accuracy*100);
#endif
write_network(out, network);
}
// TODO free_network(network)
#ifdef USE_MULTITHREADING
free(tid);
#else
free(train_params);
#endif
}

View File

@ -34,7 +34,7 @@ void write_image_in_network(int** image, Network* network, int height, int width
* Sous fonction de 'train' assignée à un thread * Sous fonction de 'train' assignée à un thread
* parameters: voir la structure 'TrainParameters' * parameters: voir la structure 'TrainParameters'
*/ */
void* train_images(void* parameters); void* train_thread(void* parameters);
/* /*
* Fonction d'entraînement du réseau * Fonction d'entraînement du réseau

View File

@ -20,7 +20,7 @@
#endif #endif
/* /*
* Structure donnée en argument à la fonction 'train_images' * Structure donnée en argument à la fonction 'train_thread'
*/ */
typedef struct TrainParameters { typedef struct TrainParameters {
Network* network; Network* network;
@ -95,7 +95,7 @@ void write_image_in_network(int** image, Network* network, int height, int width
} }
} }
void* train_images(void* parameters) { void* train_thread(void* parameters) {
TrainParameters* param = (TrainParameters*)parameters; TrainParameters* param = (TrainParameters*)parameters;
Network* network = param->network; Network* network = param->network;
Layer* last_layer = network->layers[network->nb_layers-1]; Layer* last_layer = network->layers[network->nb_layers-1];
@ -187,6 +187,7 @@ void train(int epochs, int layers, int neurons, char* recovery, char* image_file
int nb_remaining_images = 0; // Nombre d'images restantes dans un batch int nb_remaining_images = 0; // Nombre d'images restantes dans un batch
int height = parameters[1]; int height = parameters[1];
int width = parameters[2]; int width = parameters[2];
free(parameters);
int*** images = read_mnist_images(image_file); int*** images = read_mnist_images(image_file);
unsigned int* labels = read_mnist_labels(label_file); unsigned int* labels = read_mnist_labels(label_file);
@ -201,20 +202,22 @@ void train(int epochs, int layers, int neurons, char* recovery, char* image_file
} }
TrainParameters** train_parameters = (TrainParameters**)malloc(sizeof(TrainParameters*)*nb_threads); TrainParameters** train_parameters = (TrainParameters**)malloc(sizeof(TrainParameters*)*nb_threads);
for (int j=0; j < nb_threads; j++) {
train_parameters[j] = (TrainParameters*)malloc(sizeof(TrainParameters));
train_parameters[j]->images = (int***)images;
train_parameters[j]->labels = (int*)labels;
train_parameters[j]->height = height;
train_parameters[j]->width = width;
train_parameters[j]->nb_images = BATCHES / nb_threads;
}
for (int i=0; i < epochs; i++) { for (int i=0; i < epochs; i++) {
accuracy = 0.; accuracy = 0.;
for (int k=0; k < nb_images_total / BATCHES; k++) { for (int k=0; k < nb_images_total / BATCHES; k++) {
nb_remaining_images = BATCHES; nb_remaining_images = BATCHES;
for (int j=0; j < nb_threads; j++) { for (int j=0; j < nb_threads; j++) {
train_parameters[j] = (TrainParameters*)malloc(sizeof(TrainParameters));
train_parameters[j]->network = copy_network(network); train_parameters[j]->network = copy_network(network);
train_parameters[j]->images = (int***)images;
train_parameters[j]->labels = (int*)labels;
train_parameters[j]->nb_images = BATCHES / nb_threads;
train_parameters[j]->start = nb_images_total - BATCHES*(nb_images_total / BATCHES - k -1) - nb_remaining_images + start; train_parameters[j]->start = nb_images_total - BATCHES*(nb_images_total / BATCHES - k -1) - nb_remaining_images + start;
train_parameters[j]->height = height;
train_parameters[j]->width = width;
if (j == nb_threads-1) { if (j == nb_threads-1) {
train_parameters[j]->nb_images = nb_remaining_images; train_parameters[j]->nb_images = nb_remaining_images;
@ -225,7 +228,7 @@ void train(int epochs, int layers, int neurons, char* recovery, char* image_file
// Création des threads sur le GPU // Création des threads sur le GPU
#else #else
// Création des threads sur le CPU // Création des threads sur le CPU
pthread_create( &tid[j], NULL, train_images, (void*) train_parameters[j]); pthread_create( &tid[j], NULL, train_thread, (void*) train_parameters[j]);
#endif #endif
} }
for(int j=0; j < nb_threads; j++ ) { for(int j=0; j < nb_threads; j++ ) {
@ -240,11 +243,10 @@ void train(int epochs, int layers, int neurons, char* recovery, char* image_file
patch_delta(delta_network, train_parameters[j]->network, train_parameters[j]->nb_images); patch_delta(delta_network, train_parameters[j]->network, train_parameters[j]->nb_images);
patch_network(network, train_parameters[j]->network, train_parameters[j]->nb_images); patch_network(network, train_parameters[j]->network, train_parameters[j]->nb_images);
deletion_of_network(train_parameters[j]->network); deletion_of_network(train_parameters[j]->network);
free(train_parameters[j]);
} }
printf("\rThread [%d/%d]\tÉpoque [%d/%d]\tImage [%d/%d]\tAccuracy: %0.1f%%", nb_threads, nb_threads, i, epochs, BATCHES*(k+1), nb_images_total, accuracy*100); printf("\rThreads [%d]\tÉpoque [%d/%d]\tImage [%d/%d]\tAccuracy: %0.1f%%", nb_threads, i, epochs, BATCHES*(k+1), nb_images_total, accuracy*100);
} }
printf("\rThread [%d/%d]\tÉpoque [%d/%d]\tImage [%d/%d]\tAccuracy: %0.1f%%\n", nb_threads, nb_threads, i, epochs, nb_images_total, nb_images_total, accuracy*100); printf("\rThreads [%d]\tÉpoque [%d/%d]\tImage [%d/%d]\tAccuracy: %0.1f%%\n", nb_threads, i, epochs, nb_images_total, nb_images_total, accuracy*100);
write_network(out, network); write_network(out, network);
if (delta != NULL) if (delta != NULL)
write_delta_network(delta, delta_network); write_delta_network(delta, delta_network);
@ -254,6 +256,9 @@ void train(int epochs, int layers, int neurons, char* recovery, char* image_file
deletion_of_network(delta_network); deletion_of_network(delta_network);
} }
deletion_of_network(network); deletion_of_network(network);
for (int j=0; j < nb_threads; j++) {
free(train_parameters[j]);
}
free(train_parameters); free(train_parameters);
#ifdef __CUDACC__ #ifdef __CUDACC__
// On libère les espaces mémoires utilisés sur le GPU // On libère les espaces mémoires utilisés sur le GPU
@ -271,6 +276,7 @@ float** recognize(char* modele, char* entree) {
int nb_images = parameters[0]; int nb_images = parameters[0];
int height = parameters[1]; int height = parameters[1];
int width = parameters[2]; int width = parameters[2];
free(parameters);
int*** images = read_mnist_images(entree); int*** images = read_mnist_images(entree);
float** results = (float**)malloc(sizeof(float*)*nb_images); float** results = (float**)malloc(sizeof(float*)*nb_images);
@ -286,7 +292,6 @@ float** recognize(char* modele, char* entree) {
} }
} }
deletion_of_network(network); deletion_of_network(network);
free(parameters);
return results; return results;
} }
@ -371,7 +376,7 @@ int main(int argc, char* argv[]) {
if (argc < 2) { if (argc < 2) {
printf("Pas d'action spécifiée\n"); printf("Pas d'action spécifiée\n");
help(argv[0]); help(argv[0]);
exit(1); return 1;
} }
if (! strcmp(argv[1], "train")) { if (! strcmp(argv[1], "train")) {
int epochs = EPOCHS; int epochs = EPOCHS;
@ -425,11 +430,11 @@ int main(int argc, char* argv[]) {
} }
if (! images) { if (! images) {
printf("Pas de fichier d'images spécifié\n"); printf("Pas de fichier d'images spécifié\n");
exit(1); return 1;
} }
if (! labels) { if (! labels) {
printf("Pas de fichier de labels spécifié\n"); printf("Pas de fichier de labels spécifié\n");
exit(1); return 1;
} }
if (! out) { if (! out) {
printf("Pas de fichier de sortie spécifié, default: out.bin\n"); printf("Pas de fichier de sortie spécifié, default: out.bin\n");
@ -437,7 +442,7 @@ int main(int argc, char* argv[]) {
} }
// Entraînement en sourçant neural_network.c // Entraînement en sourçant neural_network.c
train(epochs, layers, neurons, recovery, images, labels, out, delta, nb_images, start); train(epochs, layers, neurons, recovery, images, labels, out, delta, nb_images, start);
exit(0); return 0;
} }
if (! strcmp(argv[1], "recognize")) { if (! strcmp(argv[1], "recognize")) {
char* in = NULL; char* in = NULL;
@ -461,18 +466,18 @@ int main(int argc, char* argv[]) {
} }
if (! in) { if (! in) {
printf("Pas d'entrée spécifiée\n"); printf("Pas d'entrée spécifiée\n");
exit(1); return 1;
} }
if (! modele) { if (! modele) {
printf("Pas de modèle spécifié\n"); printf("Pas de modèle spécifié\n");
exit(1); return 1;
} }
if (! out) { if (! out) {
out = "text"; out = "text";
} }
print_recognize(modele, in, out); print_recognize(modele, in, out);
// Reconnaissance puis affichage des données sous le format spécifié // Reconnaissance puis affichage des données sous le format spécifié
exit(0); return 0;
} }
if (! strcmp(argv[1], "test")) { if (! strcmp(argv[1], "test")) {
char* modele = NULL; char* modele = NULL;
@ -496,7 +501,7 @@ int main(int argc, char* argv[]) {
} }
} }
test(modele, images, labels, preview_fails); test(modele, images, labels, preview_fails);
exit(0); return 0;
} }
printf("Option choisie non reconnue: %s\n", argv[1]); printf("Option choisie non reconnue: %s\n", argv[1]);
help(argv[0]); help(argv[0]);

View File

@ -12,7 +12,7 @@
int main() { int main() {
printf("Création du réseau\n"); printf("Création du réseau\n");
Network* network = create_network_lenet5(0, 3, 2); Network* network = create_network_lenet5(0, 3, 2, 32, 1);
printf("OK\n"); printf("OK\n");
printf("Écriture du réseau\n"); printf("Écriture du réseau\n");