mirror of
https://github.com/augustin64/projet-tipe
synced 2025-01-23 23:26:25 +01:00
English traduction of variables
This commit is contained in:
parent
55fb2ecf75
commit
698e72f56e
@ -3,10 +3,10 @@
|
||||
### 22 Avril 2022 [b30bedd](https://github.com/julienChemillier/TIPE/commit/b30bedd375e23ec7c2e5b10acf397a10885d8b5e)
|
||||
Le réseau minimise la fonction d'erreur (différence entre sortie voulue et obtenue).
|
||||
Cela donne comme résultat une précision de 10.2% en moyenne soit à peine mieux qu'aléatoire.
|
||||
Chaque image renvoie les mêmes poids sur la dernière couche.
|
||||
Voici un tableau comparant la fréquence d'apparition de chaque chiffre et l'activation associée sur la dernière couche :
|
||||
Chaque image renvoie les mêmes poids sur la dernière layer.
|
||||
Voici un tableau comparant la fréquence d'apparition de chaque chiffre et l'activation associée sur la dernière layer :
|
||||
|
||||
| Chiffre | Nombre d'occurences dans le set d'entraînement | Activation du neurone sortant | Rapport |
|
||||
| Chiffre | Nombre d'occurences dans le set d'entraînement | Activation du neuron sortant | Rapport |
|
||||
| --- | --- | --- | --- |
|
||||
| 0 | 23692 | 0.483112 | 49040 |
|
||||
| 1 | 26968 | 0.508133 | 53072 |
|
||||
|
10
make.sh
10
make.sh
@ -54,12 +54,12 @@ fi
|
||||
if [[ $1 == "train" ]]; then
|
||||
[[ -f "$OUT/main" ]] || $0 build
|
||||
[[ $2 ]] || set -- "$1" "train"
|
||||
[[ $3 == "-r" || $3 == "--recover" ]] && RECOVER="-r .cache/reseau.bin"
|
||||
[[ $3 == "-r" || $3 == "--recover" ]] && RECOVER="-r .cache/network.bin"
|
||||
mkdir -p .cache
|
||||
"$OUT/main" train \
|
||||
--images "data/mnist/$2-images-idx3-ubyte" \
|
||||
--labels "data/mnist/$2-labels-idx1-ubyte" \
|
||||
--out ".cache/reseau.bin" \
|
||||
--out ".cache/network.bin" \
|
||||
$RECOVER
|
||||
exit 0
|
||||
fi
|
||||
@ -68,9 +68,9 @@ if [[ $1 == "recognize" ]]; then
|
||||
if [[ $2 ]]; then
|
||||
[[ $3 ]] || set -- "$1" "$2" "text"
|
||||
[[ -f "$OUT/main" ]] || $0 build
|
||||
[[ -f ".cache/reseau.bin" ]] || $0 train train
|
||||
[[ -f ".cache/network.bin" ]] || $0 train train
|
||||
"$OUT/main" recognize \
|
||||
--modele ".cache/reseau.bin" \
|
||||
--modele ".cache/network.bin" \
|
||||
--in "$2" \
|
||||
--out "$3"
|
||||
exit 0
|
||||
@ -82,7 +82,7 @@ fi
|
||||
|
||||
if [[ $1 == "webserver" ]]; then
|
||||
[[ -f "$OUT/main" ]] || $0 build
|
||||
[[ -f ".cache/reseau.bin" ]] || $0 train train
|
||||
[[ -f ".cache/network.bin" ]] || $0 train train
|
||||
FLASK_APP="src/webserver/app.py" flask run
|
||||
exit 0
|
||||
fi
|
||||
|
104
src/mnist/main.c
104
src/mnist/main.c
@ -26,59 +26,59 @@ void help(char* call) {
|
||||
printf("OPTIONS:\n");
|
||||
printf("\ttrain:\n");
|
||||
printf("\t\t--batches | -b [int]\tNombre de batches.\n");
|
||||
printf("\t\t--couches | -c [int]\tNombres de couches.\n");
|
||||
printf("\t\t--neurons | -n [int]\tNombre de neurones sur la première couche.\n");
|
||||
printf("\t\t--layers | -c [int]\tNombres de layers.\n");
|
||||
printf("\t\t--neurons | -n [int]\tNombre de neurons sur la première layer.\n");
|
||||
printf("\t\t--recover | -r [FILENAME]\tRécupérer depuis un modèle existant.\n");
|
||||
printf("\t\t--images | -i [FILENAME]\tFichier contenant les images.\n");
|
||||
printf("\t\t--labels | -l [FILENAME]\tFichier contenant les labels.\n");
|
||||
printf("\t\t--out | -o [FILENAME]\tFichier où écrire le réseau de neurones.\n");
|
||||
printf("\t\t--out | -o [FILENAME]\tFichier où écrire le réseau de neurons.\n");
|
||||
printf("\trecognize:\n");
|
||||
printf("\t\t--modele | -m [FILENAME]\tFichier contenant le réseau de neurones.\n");
|
||||
printf("\t\t--modele | -m [FILENAME]\tFichier contenant le réseau de neurons.\n");
|
||||
printf("\t\t--in | -i [FILENAME]\tFichier contenant les images à reconnaître.\n");
|
||||
printf("\t\t--out | -o (text|json)\tFormat de sortie.\n");
|
||||
printf("\ttest:\n");
|
||||
printf("\t\t--images | -i [FILENAME]\tFichier contenant les images.\n");
|
||||
printf("\t\t--labels | -l [FILENAME]\tFichier contenant les labels.\n");
|
||||
printf("\t\t--modele | -m [FILENAME]\tFichier contenant le réseau de neurones.\n");
|
||||
printf("\t\t--modele | -m [FILENAME]\tFichier contenant le réseau de neurons.\n");
|
||||
}
|
||||
|
||||
|
||||
void ecrire_image_dans_reseau(int** image, Reseau* reseau, int height, int width) {
|
||||
void write_image_in_network(int** image, Network* network, int height, int width) {
|
||||
for (int i=0; i < height; i++) {
|
||||
for (int j=0; j < width; j++) {
|
||||
reseau->couches[0]->neurones[i*height+j]->z = (float)image[i][j] / 255.0f;
|
||||
network->layers[0]->neurons[i*height+j]->z = (float)image[i][j] / 255.0f;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void train(int batches, int couches, int neurons, char* recovery, char* image_file, char* label_file, char* out) {
|
||||
void train(int batches, int layers, int neurons, char* recovery, char* image_file, char* label_file, char* out) {
|
||||
// Entraînement du réseau sur le set de données MNIST
|
||||
Reseau* reseau;
|
||||
Network* network;
|
||||
|
||||
//int* repartition = malloc(sizeof(int)*couches);
|
||||
int nb_neurones_der = 10;
|
||||
int repartition[3] = {784, 32, nb_neurones_der};
|
||||
//int* repartition = malloc(sizeof(int)*layers);
|
||||
int nb_neurons_der = 10;
|
||||
int repartition[3] = {784, 32, nb_neurons_der};
|
||||
|
||||
float* sortie = malloc(sizeof(float)*nb_neurones_der);
|
||||
int* sortie_voulue;
|
||||
float* sortie = malloc(sizeof(float)*nb_neurons_der);
|
||||
int* desired_output;
|
||||
float accuracy;
|
||||
//generer_repartition(couches, repartition);
|
||||
//generer_repartition(layers, repartition);
|
||||
|
||||
/*
|
||||
* On repart d'un réseau déjà créée stocké dans un fichier
|
||||
* ou on repart de zéro si aucune backup n'est fournie
|
||||
* */
|
||||
if (! recovery) {
|
||||
reseau = malloc(sizeof(Reseau));
|
||||
creation_du_reseau_neuronal(reseau, repartition, couches);
|
||||
initialisation_du_reseau_neuronal(reseau);
|
||||
network = malloc(sizeof(Network));
|
||||
network_creation(network, repartition, layers);
|
||||
network_initialisation(network);
|
||||
} else {
|
||||
reseau = lire_reseau(recovery);
|
||||
network = read_network(recovery);
|
||||
printf("Backup restaurée.\n");
|
||||
}
|
||||
|
||||
Couche* der_couche = reseau->couches[reseau->nb_couches-1];
|
||||
Layer* der_layer = network->layers[network->nb_layers-1];
|
||||
|
||||
// Chargement des images du set de données MNIST
|
||||
int* parameters = read_mnist_images_parameters(image_file);
|
||||
@ -96,29 +96,29 @@ void train(int batches, int couches, int neurons, char* recovery, char* image_fi
|
||||
for (int j=0; j < nb_images; j++) {
|
||||
printf("\rBatch [%d/%d]\tImage [%d/%d]",i, batches, j, nb_images);
|
||||
|
||||
ecrire_image_dans_reseau(images[j], reseau, height, width);
|
||||
sortie_voulue = creation_de_la_sortie_voulue(reseau, labels[j]);
|
||||
forward_propagation(reseau);
|
||||
backward_propagation(reseau, sortie_voulue);
|
||||
write_image_in_network(images[j], network, height, width);
|
||||
desired_output = desired_output_creation(network, labels[j]);
|
||||
forward_propagation(network);
|
||||
backward_propagation(network, desired_output);
|
||||
|
||||
for (int k=0; k < nb_neurones_der; k++) {
|
||||
sortie[k] = der_couche->neurones[k]->z;
|
||||
for (int k=0; k < nb_neurons_der; k++) {
|
||||
sortie[k] = der_layer->neurons[k]->z;
|
||||
}
|
||||
if (indice_max(sortie, nb_neurones_der) == labels[j]) {
|
||||
if (indice_max(sortie, nb_neurons_der) == labels[j]) {
|
||||
accuracy += 1. / (float)nb_images;
|
||||
}
|
||||
free(sortie_voulue);
|
||||
free(desired_output);
|
||||
}
|
||||
modification_du_reseau_neuronal(reseau, nb_images);
|
||||
network_modification(network, nb_images);
|
||||
printf("\rBatch [%d/%d]\tImage [%d/%d]\tAccuracy: %0.1f%%\n",i, batches, nb_images, nb_images, accuracy*100);
|
||||
ecrire_reseau(out, reseau);
|
||||
write_network(out, network);
|
||||
}
|
||||
suppression_du_reseau_neuronal(reseau);
|
||||
deletion_of_network(network);
|
||||
}
|
||||
|
||||
float** recognize(char* modele, char* entree) {
|
||||
Reseau* reseau = lire_reseau(modele);
|
||||
Couche* derniere_couche = reseau->couches[reseau->nb_couches-1];
|
||||
Network* network = read_network(modele);
|
||||
Layer* derniere_layer = network->layers[network->nb_layers-1];
|
||||
|
||||
int* parameters = read_mnist_images_parameters(entree);
|
||||
int nb_images = parameters[0];
|
||||
@ -129,25 +129,25 @@ float** recognize(char* modele, char* entree) {
|
||||
float** results = malloc(sizeof(float*)*nb_images);
|
||||
|
||||
for (int i=0; i < nb_images; i++) {
|
||||
results[i] = malloc(sizeof(float)*derniere_couche->nb_neurones);
|
||||
results[i] = malloc(sizeof(float)*derniere_layer->nb_neurons);
|
||||
|
||||
ecrire_image_dans_reseau(images[i], reseau, height, width);
|
||||
forward_propagation(reseau);
|
||||
write_image_in_network(images[i], network, height, width);
|
||||
forward_propagation(network);
|
||||
|
||||
for (int j=0; j < derniere_couche->nb_neurones; j++) {
|
||||
results[i][j] = derniere_couche->neurones[j]->z;
|
||||
for (int j=0; j < derniere_layer->nb_neurons; j++) {
|
||||
results[i][j] = derniere_layer->neurons[j]->z;
|
||||
}
|
||||
}
|
||||
suppression_du_reseau_neuronal(reseau);
|
||||
deletion_of_network(network);
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
void print_recognize(char* modele, char* entree, char* sortie) {
|
||||
Reseau* reseau = lire_reseau(modele);
|
||||
int nb_der_couche = reseau->couches[reseau->nb_couches-1]->nb_neurones;
|
||||
Network* network = read_network(modele);
|
||||
int nb_der_layer = network->layers[network->nb_layers-1]->nb_neurons;
|
||||
|
||||
suppression_du_reseau_neuronal(reseau);
|
||||
deletion_of_network(network);
|
||||
|
||||
int* parameters = read_mnist_images_parameters(entree);
|
||||
int nb_images = parameters[0];
|
||||
@ -163,11 +163,11 @@ void print_recognize(char* modele, char* entree, char* sortie) {
|
||||
else
|
||||
printf("\"%d\" : [", i);
|
||||
|
||||
for (int j=0; j < nb_der_couche; j++) {
|
||||
for (int j=0; j < nb_der_layer; j++) {
|
||||
if (! strcmp(sortie, "json")) {
|
||||
printf("%f", resultats[i][j]);
|
||||
|
||||
if (j+1 < nb_der_couche) {
|
||||
if (j+1 < nb_der_layer) {
|
||||
printf(", ");
|
||||
}
|
||||
} else
|
||||
@ -187,10 +187,10 @@ void print_recognize(char* modele, char* entree, char* sortie) {
|
||||
}
|
||||
|
||||
void test(char* modele, char* fichier_images, char* fichier_labels) {
|
||||
Reseau* reseau = lire_reseau(modele);
|
||||
int nb_der_couche = reseau->couches[reseau->nb_couches-1]->nb_neurones;
|
||||
Network* network = read_network(modele);
|
||||
int nb_der_layer = network->layers[network->nb_layers-1]->nb_neurons;
|
||||
|
||||
suppression_du_reseau_neuronal(reseau);
|
||||
deletion_of_network(network);
|
||||
|
||||
int* parameters = read_mnist_images_parameters(fichier_images);
|
||||
int nb_images = parameters[0];
|
||||
@ -200,7 +200,7 @@ void test(char* modele, char* fichier_images, char* fichier_labels) {
|
||||
float accuracy;
|
||||
|
||||
for (int i=0; i < nb_images; i++) {
|
||||
if (indice_max(resultats[i], nb_der_couche) == labels[i]) {
|
||||
if (indice_max(resultats[i], nb_der_layer) == labels[i]) {
|
||||
accuracy += 1. / (float)nb_images;
|
||||
}
|
||||
}
|
||||
@ -216,7 +216,7 @@ int main(int argc, char* argv[]) {
|
||||
}
|
||||
if (! strcmp(argv[1], "train")) {
|
||||
int batches = 100;
|
||||
int couches = 3;
|
||||
int layers = 3;
|
||||
int neurons = 784;
|
||||
char* images = NULL;
|
||||
char* labels = NULL;
|
||||
@ -229,8 +229,8 @@ int main(int argc, char* argv[]) {
|
||||
batches = strtol(argv[i+1], NULL, 10);
|
||||
i += 2;
|
||||
} else
|
||||
if ((! strcmp(argv[i], "--couches"))||(! strcmp(argv[i], "-c"))) {
|
||||
couches = strtol(argv[i+1], NULL, 10);
|
||||
if ((! strcmp(argv[i], "--layers"))||(! strcmp(argv[i], "-c"))) {
|
||||
layers = strtol(argv[i+1], NULL, 10);
|
||||
i += 2;
|
||||
} else if ((! strcmp(argv[i], "--neurons"))||(! strcmp(argv[i], "-n"))) {
|
||||
neurons = strtol(argv[i+1], NULL, 10);
|
||||
@ -265,7 +265,7 @@ int main(int argc, char* argv[]) {
|
||||
out = "out.bin";
|
||||
}
|
||||
// Entraînement en sourçant neural_network.c
|
||||
train(batches, couches, neurons, recovery, images, labels, out);
|
||||
train(batches, layers, neurons, recovery, images, labels, out);
|
||||
exit(0);
|
||||
}
|
||||
if (! strcmp(argv[1], "recognize")) {
|
||||
|
@ -9,8 +9,8 @@
|
||||
#include "struct/neuron.h"
|
||||
|
||||
// Définit le taux d'apprentissage du réseau neuronal, donc la rapidité d'adaptation du modèle (compris entre 0 et 1)
|
||||
//Cette valeur peut évoluer au fur et à mesure des époques (linéaire c'est mieux)
|
||||
#define TAUX_APPRENTISSAGE 2.
|
||||
// Cette valeur peut évoluer au fur et à mesure des époques (linéaire c'est mieux)
|
||||
#define LEARNING_RATE 0.5
|
||||
//Retourne un nombre aléatoire entre 0 et 1
|
||||
#define RAND_DOUBLE() ((double)rand())/((double)RAND_MAX)
|
||||
//Coefficient leaking ReLU
|
||||
@ -30,7 +30,7 @@ float sigmoid(float x){
|
||||
return 1/(1 + exp(-x));
|
||||
}
|
||||
|
||||
float sigmoid_derivee(float x){
|
||||
float sigmoid_derivative(float x){
|
||||
float tmp = exp(-x);
|
||||
return tmp/((1+tmp)*(1+tmp));
|
||||
}
|
||||
@ -41,32 +41,32 @@ float leaky_ReLU(float x){
|
||||
return COEFF_LEAKY_RELU;
|
||||
}
|
||||
|
||||
float leaky_ReLU_derivee(float x){
|
||||
float leaky_ReLU_derivative(float x){
|
||||
if (x > 0)
|
||||
return 1;
|
||||
return COEFF_LEAKY_RELU;
|
||||
}
|
||||
|
||||
void creation_du_reseau_neuronal(Reseau* reseau, int* neurones_par_couche, int nb_couches) {
|
||||
void network_creation(Network* network, int* neurons_per_layer, int nb_layers) {
|
||||
/* Créé et alloue de la mémoire aux différentes variables dans le réseau neuronal*/
|
||||
Couche* couche;
|
||||
Layer* layer;
|
||||
|
||||
reseau->nb_couches = nb_couches;
|
||||
reseau->couches = (Couche**)malloc(sizeof(Couche*)*nb_couches);
|
||||
network->nb_layers = nb_layers;
|
||||
network->layers = (Layer**)malloc(sizeof(Layer*)*nb_layers);
|
||||
|
||||
for (int i=0; i < nb_couches; i++) {
|
||||
reseau->couches[i] = (Couche*)malloc(sizeof(Couche));
|
||||
couche = reseau->couches[i];
|
||||
couche->nb_neurones = neurones_par_couche[i]; // nombre de neurones pour la couche
|
||||
couche->neurones = (Neurone**)malloc(sizeof(Neurone*)*reseau->couches[i]->nb_neurones); // Création des différents neurones dans la couche
|
||||
for (int i=0; i < nb_layers; i++) {
|
||||
network->layers[i] = (Layer*)malloc(sizeof(Layer));
|
||||
layer = network->layers[i];
|
||||
layer->nb_neurons = neurons_per_layer[i]; // Nombre de neurones pour la layer
|
||||
layer->neurons = (Neuron**)malloc(sizeof(Neuron*)*network->layers[i]->nb_neurons); // Création des différents neurones dans la couche
|
||||
|
||||
for (int j=0; j < couche->nb_neurones; j++) {
|
||||
couche->neurones[j] = (Neurone*)malloc(sizeof(Neurone));
|
||||
for (int j=0; j < layer->nb_neurons; j++) {
|
||||
layer->neurons[j] = (Neuron*)malloc(sizeof(Neuron));
|
||||
|
||||
if (i != reseau->nb_couches-1) { // On exclut la dernière couche dont les neurones ne contiennent pas de poids sortants
|
||||
couche->neurones[j]->poids_sortants = (float*)malloc(sizeof(float)*neurones_par_couche[i+1]);// Création des poids sortants du neurone
|
||||
couche->neurones[j]->d_poids_sortants = (float*)malloc(sizeof(float)*neurones_par_couche[i+1]);
|
||||
couche->neurones[j]->last_d_poids_sortants = (float*)malloc(sizeof(float)*neurones_par_couche[i+1]);
|
||||
if (i != network->nb_layers-1) { // On exclut la dernière couche dont les neurones ne contiennent pas de poids sortants
|
||||
layer->neurons[j]->weights = (float*)malloc(sizeof(float)*neurons_per_layer[i+1]);// Création des poids sortants du neurone
|
||||
layer->neurons[j]->back_weights = (float*)malloc(sizeof(float)*neurons_per_layer[i+1]);
|
||||
layer->neurons[j]->last_back_weights = (float*)malloc(sizeof(float)*neurons_per_layer[i+1]);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -75,120 +75,130 @@ void creation_du_reseau_neuronal(Reseau* reseau, int* neurones_par_couche, int n
|
||||
|
||||
|
||||
|
||||
void suppression_du_reseau_neuronal(Reseau* reseau) {
|
||||
void deletion_of_network(Network* network) {
|
||||
/* Libère l'espace mémoire alloué aux différentes variables dans la fonction
|
||||
'creation_du_reseau' */
|
||||
'creation_du_network' */
|
||||
Layer* layer;
|
||||
Neuron* neuron;
|
||||
|
||||
for (int i=0; i<reseau->nb_couches; i++) {
|
||||
if (i!=reseau->nb_couches-1) { // On exclut la dernière couche dont les neurones ne contiennent pas de poids sortants
|
||||
for (int j=0; j<reseau->couches[i]->nb_neurones; j++) {
|
||||
free(reseau->couches[i]->neurones[j]->poids_sortants);
|
||||
free(reseau->couches[i]->neurones[j]->d_poids_sortants);
|
||||
for (int i=0; i<network->nb_layers; i++) {
|
||||
layer = network->layers[i];
|
||||
if (i!=network->nb_layers-1) { // On exclut la dernière couche dont les neurons ne contiennent pas de poids sortants
|
||||
for (int j=0; j<network->layers[i]->nb_neurons; j++) {
|
||||
neuron = layer->neurons[j];
|
||||
free(neuron->weights);
|
||||
free(neuron->back_weights);
|
||||
}
|
||||
}
|
||||
free(reseau->couches[i]->neurones); // On libère enfin la liste des neurones de la couche
|
||||
free(layer->neurons); // On libère enfin la liste des neurones de la couche
|
||||
}
|
||||
free(reseau); // Pour finir, on libère le réseau neronal contenant la liste des couches
|
||||
free(network); // Pour finir, on libère le réseau neuronal contenant la liste des couches
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
void forward_propagation(Reseau* reseau) {
|
||||
void forward_propagation(Network* network) {
|
||||
/* Effectue une propagation en avant du réseau neuronal lorsque les données
|
||||
on été insérées dans la première couche. Le résultat de la propagation se
|
||||
trouve dans la dernière couche */
|
||||
Couche* couche; // Couche actuelle
|
||||
Couche* pre_couche; // Couche précédente
|
||||
Layer* layer; // Couche actuelle
|
||||
Layer* pre_layer; // Couche précédente
|
||||
Neuron* neuron;
|
||||
float sum;
|
||||
float max_z;
|
||||
|
||||
for (int i=1; i < reseau->nb_couches; i++) { // La première couche contient déjà des valeurs
|
||||
for (int i=1; i < network->nb_layers; i++) { // La première couche contient déjà des valeurs
|
||||
sum = 0;
|
||||
max_z = INT_MIN;
|
||||
couche = reseau->couches[i];
|
||||
pre_couche = reseau->couches[i-1];
|
||||
layer = network->layers[i];
|
||||
pre_layer = network->layers[i-1];
|
||||
|
||||
for (int j=0; j < couche->nb_neurones; j++) {
|
||||
couche->neurones[j]->z = couche->neurones[j]->biais;
|
||||
for (int j=0; j < layer->nb_neurons; j++) {
|
||||
neuron = layer->neurons[j];
|
||||
neuron->z = neuron->bias;
|
||||
|
||||
for (int k=0; k < pre_couche->nb_neurones; k++) {
|
||||
couche->neurones[j]->z += pre_couche->neurones[k]->z * pre_couche->neurones[k]->poids_sortants[j];
|
||||
for (int k=0; k < pre_layer->nb_neurons; k++) {
|
||||
neuron->z += pre_layer->neurons[k]->z * pre_layer->neurons[k]->weights[j];
|
||||
}
|
||||
|
||||
if (i < reseau->nb_couches-1) { // Pour toutes les couches sauf la dernière on utilise la fonction leaky_ReLU (a*z si z<0, z sinon)
|
||||
couche->neurones[j]->z = leaky_ReLU(couche->neurones[j]->z);
|
||||
} else { // Pour la dernière couche on utilise la fonction sigmoid permettant d'obtenir un résultat entre 0 et 1 à savoir une probabilité
|
||||
max_z = max(max_z, couche->neurones[j]->z);
|
||||
if (i < network->nb_layers-1) { // Pour toutes les couches sauf la dernière on utilise la fonction leaky_ReLU (a*z si z<0, z sinon)
|
||||
neuron->z = leaky_ReLU(neuron->z);
|
||||
} else { // Pour la dernière layer on utilise la fonction sigmoid permettant d'obtenir un résultat entre 0 et 1 à savoir une probabilité
|
||||
max_z = max(max_z, neuron->z);
|
||||
}
|
||||
}
|
||||
}
|
||||
int last_layer = reseau->nb_couches-1;
|
||||
int size_last_layer = reseau->couches[last_layer]->nb_neurones;
|
||||
layer = network->layers[network->nb_layers-1];
|
||||
int size_last_layer = layer->nb_neurons;
|
||||
|
||||
for (int j=0; j < size_last_layer; j++) {
|
||||
reseau->couches[last_layer]->neurones[j]->z = exp(reseau->couches[last_layer]->neurones[j]->z - max_z);
|
||||
sum += reseau->couches[last_layer]->neurones[j]->z;
|
||||
neuron = layer->neurons[j];
|
||||
neuron->z = exp(neuron->z - max_z);
|
||||
sum += neuron->z;
|
||||
}
|
||||
for (int j=0; j < size_last_layer; j++) {
|
||||
reseau->couches[last_layer]->neurones[j]->z = reseau->couches[last_layer]->neurones[j]->z / sum;
|
||||
neuron = layer->neurons[j];
|
||||
neuron->z = neuron->z / sum;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
int* creation_de_la_sortie_voulue(Reseau* reseau, int pos_nombre_voulu) {
|
||||
int* desired_output_creation(Network* network, int wanted_number) {
|
||||
/* Renvoie la liste des sorties voulues à partir du nombre
|
||||
de couches, de la liste du nombre de neurones par couche et de la
|
||||
position du résultat voulue, */
|
||||
int nb_neurones = reseau->couches[reseau->nb_couches-1]->nb_neurones;
|
||||
int nb_neurons = network->layers[network->nb_layers-1]->nb_neurons;
|
||||
|
||||
int* sortie_voulue = (int*)malloc(sizeof(int)*nb_neurones);
|
||||
int* desired_output = (int*)malloc(sizeof(int)*nb_neurons);
|
||||
|
||||
for (int i=0; i < nb_neurones; i++) // On initialise toutes les sorties à 0 par défault
|
||||
sortie_voulue[i] = 0;
|
||||
for (int i=0; i < nb_neurons; i++) // On initialise toutes les sorties à 0 par défaut
|
||||
desired_output[i] = 0;
|
||||
|
||||
sortie_voulue[pos_nombre_voulu] = 1; // Seule la sortie voulue vaut 1
|
||||
return sortie_voulue;
|
||||
desired_output[wanted_number] = 1; // Seule la sortie voulue vaut 1
|
||||
return desired_output;
|
||||
}
|
||||
|
||||
|
||||
|
||||
void backward_propagation(Reseau* reseau, int* sortie_voulue) {
|
||||
void backward_propagation(Network* network, int* desired_output) {
|
||||
/* Effectue une propagation en arrière du réseau neuronal */
|
||||
Neurone* neurone;
|
||||
Neurone* neurone2;
|
||||
Neuron* neuron;
|
||||
Neuron* neuron2;
|
||||
float changes;
|
||||
float tmp;
|
||||
|
||||
int i = reseau->nb_couches-2;
|
||||
int i = network->nb_layers-2;
|
||||
int neurons_nb = network->layers[i+1]->nb_neurons;
|
||||
// On commence par parcourir tous les neurones de la couche finale
|
||||
for (int j=0; j < reseau->couches[i+1]->nb_neurones; j++) {
|
||||
neurone = reseau->couches[i+1]->neurones[j];
|
||||
tmp = (sortie_voulue[j]==1) ? neurone->z - 1 : neurone->z;
|
||||
for (int k=0; k < reseau->couches[i]->nb_neurones; k++) {
|
||||
reseau->couches[i]->neurones[k]->d_poids_sortants[j] += reseau->couches[i]->neurones[k]->z*tmp;
|
||||
reseau->couches[i]->neurones[k]->last_d_poids_sortants[j] = reseau->couches[i]->neurones[k]->z*tmp;
|
||||
//if (k==0)printf("\n %f ->%f", reseau->couches[i+1]->neurones[j]->z*tmp, tmp);
|
||||
for (int j=0; j < network->layers[i+1]->nb_neurons; j++) {
|
||||
neuron = network->layers[i+1]->neurons[j];
|
||||
tmp = (desired_output[j]==1) ? neuron->z - 1 : neuron->z;
|
||||
for (int k=0; k < network->layers[i]->nb_neurons; k++) {
|
||||
neuron2 = network->layers[i]->neurons[k];
|
||||
neuron2->back_weights[j] += neuron2->z*tmp;
|
||||
neuron2->last_back_weights[j] = neuron2->z*tmp;
|
||||
}
|
||||
neurone->d_biais += tmp;
|
||||
//printf("\n%f", neurone->d_biais);
|
||||
neuron->last_back_bias = tmp;
|
||||
neuron->back_bias += tmp;
|
||||
}
|
||||
i--;
|
||||
for (; i >= 0; i--) {
|
||||
for (int j=0; j < reseau->couches[i+1]->nb_neurones; j++) {
|
||||
for (i--; i >= 0; i--) {
|
||||
neurons_nb = network->layers[i+1]->nb_neurons;
|
||||
for (int j=0; j < neurons_nb; j++) {
|
||||
neuron = network->layers[i+1]->neurons[j];
|
||||
changes = 0;
|
||||
for (int k=0; k < reseau->couches[i+2]->nb_neurones; k++) {
|
||||
//printf("Couche %d Neurone %d Poids %f\n", i+1, j, reseau->couches[i+1]->neurones[j]->poids_sortants[k]);
|
||||
changes += (reseau->couches[i+1]->neurones[j]->poids_sortants[k]*reseau->couches[i+1]->neurones[j]->last_d_poids_sortants[k])/reseau->couches[i+1]->nb_neurones;
|
||||
for (int k=0; k < network->layers[i+2]->nb_neurons; k++) {
|
||||
changes += (neuron->weights[k]*neuron->last_back_weights[k])/neurons_nb;
|
||||
}
|
||||
changes = changes*leaky_ReLU_derivee(reseau->couches[i+1]->neurones[j]->z);
|
||||
reseau->couches[i+1]->neurones[j]->d_biais += changes;
|
||||
reseau->couches[i+1]->neurones[j]->last_d_biais = changes;
|
||||
for (int l=0; l < reseau->couches[i]->nb_neurones; l++){
|
||||
//printf("%f\n", changes);
|
||||
reseau->couches[i]->neurones[l]->d_poids_sortants[j] += reseau->couches[i]->neurones[l]->poids_sortants[j]*changes;
|
||||
reseau->couches[i]->neurones[l]->last_d_poids_sortants[j] = reseau->couches[i]->neurones[l]->poids_sortants[j]*changes;
|
||||
changes = changes*leaky_ReLU_derivative(neuron->z);
|
||||
neuron->back_bias += changes;
|
||||
neuron->last_back_bias = changes;
|
||||
for (int l=0; l < network->layers[i]->nb_neurons; l++){
|
||||
neuron2 = network->layers[i]->neurons[l];
|
||||
neuron2->back_weights[j] += neuron2->weights[j]*changes;
|
||||
neuron2->last_back_weights[j] = neuron2->weights[j]*changes;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -197,37 +207,37 @@ void backward_propagation(Reseau* reseau, int* sortie_voulue) {
|
||||
|
||||
|
||||
|
||||
void modification_du_reseau_neuronal(Reseau* reseau, uint32_t nb_modifs) {
|
||||
void network_modification(Network* network, uint32_t nb_modifs) {
|
||||
/* Modifie les poids et le biais des neurones du réseau neuronal à partir
|
||||
du nombre de couches et de la liste du nombre de neurone par couche */
|
||||
Neurone* neurone;
|
||||
Neuron* neuron;
|
||||
|
||||
for (int i=0; i < reseau->nb_couches; i++) { // on exclut la dernière couche
|
||||
for (int j=0; j < reseau->couches[i]->nb_neurones; j++) {
|
||||
neurone = reseau->couches[i]->neurones[j];
|
||||
if (neurone->biais != 0 && PRINT_BIAIS)
|
||||
printf("C %d\tN %d\tb: %f \tDb: %f\n", i, j, neurone->biais, (TAUX_APPRENTISSAGE/nb_modifs) * neurone->d_biais);
|
||||
neurone->biais -= (TAUX_APPRENTISSAGE/nb_modifs) * neurone->d_biais; // On modifie le biais du neurone à partir des données de la propagation en arrière
|
||||
neurone->d_biais = 0;
|
||||
for (int i=0; i < network->nb_layers; i++) { // on exclut la dernière couche
|
||||
for (int j=0; j < network->layers[i]->nb_neurons; j++) {
|
||||
neuron = network->layers[i]->neurons[j];
|
||||
if (neuron->bias != 0 && PRINT_BIAIS)
|
||||
printf("C %d\tN %d\tb: %f \tDb: %f\n", i, j, neuron->bias, (LEARNING_RATE/nb_modifs) * neuron->back_bias);
|
||||
neuron->bias -= (LEARNING_RATE/nb_modifs) * neuron->back_bias; // On modifie le biais du neurone à partir des données de la propagation en arrière
|
||||
neuron->back_bias = 0;
|
||||
|
||||
if (neurone->biais > MAX_RESEAU)
|
||||
neurone->biais = MAX_RESEAU;
|
||||
else if (neurone->biais < -MAX_RESEAU)
|
||||
neurone->biais = -MAX_RESEAU;
|
||||
if (neuron->bias > MAX_RESEAU)
|
||||
neuron->bias = MAX_RESEAU;
|
||||
else if (neuron->bias < -MAX_RESEAU)
|
||||
neuron->bias = -MAX_RESEAU;
|
||||
|
||||
if (i!=reseau->nb_couches-1) {
|
||||
for (int k=0; k < reseau->couches[i+1]->nb_neurones; k++) {
|
||||
if (neurone->poids_sortants[k] != 0 && PRINT_POIDS)
|
||||
printf("C %d\tN %d -> %d\tp: %f \tDp: %f\n", i, j, k, neurone->poids_sortants[k], (TAUX_APPRENTISSAGE/nb_modifs) * neurone->d_poids_sortants[k]);
|
||||
neurone->poids_sortants[k] -= (TAUX_APPRENTISSAGE/nb_modifs) * neurone->d_poids_sortants[k]; // On modifie le poids du neurone à partir des données de la propagation en arrière
|
||||
neurone->d_poids_sortants[k] = 0;
|
||||
if (i!=network->nb_layers-1) {
|
||||
for (int k=0; k < network->layers[i+1]->nb_neurons; k++) {
|
||||
if (neuron->weights[k] != 0 && PRINT_POIDS)
|
||||
printf("C %d\tN %d -> %d\tp: %f \tDp: %f\n", i, j, k, neuron->weights[k], (LEARNING_RATE/nb_modifs) * neuron->back_weights[k]);
|
||||
neuron->weights[k] -= (LEARNING_RATE/nb_modifs) * neuron->back_weights[k]; // On modifie le poids du neurone à partir des données de la propagation en arrière
|
||||
neuron->back_weights[k] = 0;
|
||||
|
||||
if (neurone->poids_sortants[k] > MAX_RESEAU) {
|
||||
neurone->poids_sortants[k] = MAX_RESEAU;
|
||||
if (neuron->weights[k] > MAX_RESEAU) {
|
||||
neuron->weights[k] = MAX_RESEAU;
|
||||
printf("Erreur, max du réseau atteint");
|
||||
}
|
||||
else if (neurone->poids_sortants[k] < -MAX_RESEAU) {
|
||||
neurone->poids_sortants[k] = -MAX_RESEAU;
|
||||
else if (neuron->weights[k] < -MAX_RESEAU) {
|
||||
neuron->weights[k] = -MAX_RESEAU;
|
||||
printf("Erreur, min du réseau atteint");
|
||||
}
|
||||
}
|
||||
@ -239,64 +249,59 @@ void modification_du_reseau_neuronal(Reseau* reseau, uint32_t nb_modifs) {
|
||||
|
||||
|
||||
|
||||
void initialisation_du_reseau_neuronal(Reseau* reseau) {
|
||||
/* Initialise les variables du réseau neuronal (biais, poids, ...)
|
||||
en suivant de la méthode de Xavier ...... à partir du nombre de couches et de la liste du nombre de neurone par couche */
|
||||
Neurone* neurone;
|
||||
double borne_superieure;
|
||||
double borne_inferieure;
|
||||
double ecart_bornes;
|
||||
void network_initialisation(Network* network) {
|
||||
/* Initialise les variables du réseau neuronal (bias, poids, ...)
|
||||
en suivant de la méthode de Xavier ...... à partir du nombre de couches et de la liste du nombre de neurone par couches */
|
||||
Neuron* neuron;
|
||||
double upper_bound;
|
||||
double lower_bound;
|
||||
double bound_gap;
|
||||
|
||||
int nb_layers_loop = network->nb_layers -1;
|
||||
|
||||
upper_bound = 1/sqrt((double)network->layers[nb_layers_loop]->nb_neurons);
|
||||
lower_bound = -upper_bound;
|
||||
bound_gap = upper_bound - lower_bound;
|
||||
|
||||
srand(time(0));
|
||||
for (int i=0; i < reseau->nb_couches-1; i++) { // On exclut la dernière couche
|
||||
for (int j=0; j < reseau->couches[i]->nb_neurones; j++) {
|
||||
for (int i=0; i < nb_layers_loop; i++) { // On exclut la dernière couche
|
||||
for (int j=0; j < network->layers[i]->nb_neurons; j++) {
|
||||
|
||||
neurone = reseau->couches[i]->neurones[j];
|
||||
neuron = network->layers[i]->neurons[j];
|
||||
// Initialisation des bornes supérieure et inférieure
|
||||
borne_superieure = 1/sqrt((double)reseau->couches[reseau->nb_couches-1]->nb_neurones);
|
||||
borne_inferieure = -borne_superieure;
|
||||
ecart_bornes = borne_superieure - borne_inferieure;
|
||||
|
||||
for (int k=0; k < reseau->couches[i+1]->nb_neurones; k++) { // Pour chaque neurone de la couche suivante auquel le neurone est relié
|
||||
neurone->poids_sortants[k] = borne_inferieure + RAND_DOUBLE()*ecart_bornes;
|
||||
neurone->d_poids_sortants[k] = 0;
|
||||
neurone->last_d_poids_sortants[k] = 0;
|
||||
if (i!=nb_layers_loop) {
|
||||
for (int k=0; k < network->layers[i+1]->nb_neurons; k++) {
|
||||
neuron->weights[k] = lower_bound + RAND_DOUBLE()*bound_gap;
|
||||
neuron->back_weights[k] = 0;
|
||||
neuron->last_back_weights[k] = 0;
|
||||
}
|
||||
}
|
||||
if (i > 0) {// Pour tous les neurones n'étant pas dans la première couche
|
||||
neurone->biais = borne_inferieure + RAND_DOUBLE()*ecart_bornes;
|
||||
neurone->d_biais = 0;
|
||||
neurone->last_d_biais = 0;
|
||||
neuron->bias = lower_bound + RAND_DOUBLE()*bound_gap;
|
||||
neuron->back_bias = 0;
|
||||
neuron->last_back_bias = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
borne_superieure = 1/sqrt((double)reseau->couches[reseau->nb_couches-1]->nb_neurones);
|
||||
borne_inferieure = -borne_superieure;
|
||||
ecart_bornes = borne_superieure - borne_inferieure;
|
||||
|
||||
for (int j=0; j < reseau->couches[reseau->nb_couches-1]->nb_neurones; j++) {// Intialisation de la dernière couche exclue ci-dessus
|
||||
neurone = reseau->couches[reseau->nb_couches-1]->neurones[j];
|
||||
neurone->biais = borne_inferieure + RAND_DOUBLE()*ecart_bornes;
|
||||
neurone->d_biais = 0;
|
||||
neurone->last_d_biais = 0;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
float erreur_sortie(Reseau* reseau, int numero_voulu){
|
||||
float loss_computing(Network* network, int numero_voulu){
|
||||
/* Renvoie l'erreur du réseau neuronal pour une sortie */
|
||||
float erreur = 0;
|
||||
float neurone_value;
|
||||
float neuron_value;
|
||||
|
||||
for (int i=0; i < reseau->nb_couches-1; i++) {
|
||||
neurone_value = reseau->couches[reseau->nb_couches-1]->neurones[i]->z;
|
||||
for (int i=0; i < network->nb_layers-1; i++) {
|
||||
neuron_value = network->layers[network->nb_layers-1]->neurons[i]->z;
|
||||
|
||||
if (i == numero_voulu) {
|
||||
erreur += (1-neurone_value)*(1-neurone_value);
|
||||
erreur += (1-neuron_value)*(1-neuron_value);
|
||||
}
|
||||
else {
|
||||
erreur += neurone_value*neurone_value;
|
||||
erreur += neuron_value*neuron_value;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -12,16 +12,16 @@
|
||||
|
||||
float max(float a, float b);
|
||||
float sigmoid(float x);
|
||||
float sigmoid_derivee(float x);
|
||||
float sigmoid_derivative(float x);
|
||||
float leaky_ReLU(float x);
|
||||
float leaky_ReLU_derivee(float x);
|
||||
void creation_du_reseau_neuronal(Reseau* reseau_neuronal, int* neurones_par_couche, int nb_couches);
|
||||
void suppression_du_reseau_neuronal(Reseau* reseau_neuronal);
|
||||
void forward_propagation(Reseau* reseau_neuronal);
|
||||
int* creation_de_la_sortie_voulue(Reseau* reseau_neuronal, int pos_nombre_voulu);
|
||||
void backward_propagation(Reseau* reseau_neuronal, int* sortie_voulue);
|
||||
void modification_du_reseau_neuronal(Reseau* reseau_neuronal, uint32_t nb_modifs);
|
||||
void initialisation_du_reseau_neuronal(Reseau* reseau_neuronal);
|
||||
float erreur_sortie(Reseau* reseau, int numero_voulu);
|
||||
float leaky_ReLU_derivative(float x);
|
||||
void network_creation(Network* network_neuronal, int* neurons_per_layer, int nb_layers);
|
||||
void deletion_of_network(Network* network_neuronal);
|
||||
void forward_propagation(Network* network_neuronal);
|
||||
int* desired_output_creation(Network* network_neuronal, int wanted_number);
|
||||
void backward_propagation(Network* network_neuronal, int* desired_output);
|
||||
void network_modification(Network* network_neuronal, uint32_t nb_modifs);
|
||||
void network_initialisation(Network* network_neuronal);
|
||||
float loss_computing(Network* network, int numero_voulu);
|
||||
|
||||
#endif
|
||||
|
@ -8,57 +8,57 @@
|
||||
|
||||
|
||||
|
||||
Neurone* lire_neurone(uint32_t nb_poids_sortants, FILE *ptr) {
|
||||
Neurone* neurone = malloc(sizeof(Neurone));
|
||||
Neuron* read_neuron(uint32_t nb_weights, FILE *ptr) {
|
||||
Neuron* neuron = malloc(sizeof(Neuron));
|
||||
float activation;
|
||||
float biais;
|
||||
float bias;
|
||||
float tmp;
|
||||
|
||||
fread(&activation, sizeof(float), 1, ptr);
|
||||
fread(&biais, sizeof(float), 1, ptr);
|
||||
fread(&bias, sizeof(float), 1, ptr);
|
||||
|
||||
neurone->biais = biais;
|
||||
neuron->bias = bias;
|
||||
|
||||
neurone->z = 0.0;
|
||||
neurone->last_d_biais = 0.0;
|
||||
neurone->d_biais = 0.0;
|
||||
neuron->z = 0.0;
|
||||
neuron->last_back_bias = 0.0;
|
||||
neuron->back_bias = 0.0;
|
||||
|
||||
float* poids_sortants = malloc(sizeof(float)*nb_poids_sortants);
|
||||
float* weights = malloc(sizeof(float)*nb_weights);
|
||||
|
||||
neurone->last_d_poids_sortants = malloc(sizeof(float)*nb_poids_sortants);
|
||||
neurone->d_poids_sortants = malloc(sizeof(float)*nb_poids_sortants);
|
||||
neurone->poids_sortants = poids_sortants;
|
||||
neuron->last_back_weights = malloc(sizeof(float)*nb_weights);
|
||||
neuron->back_weights = malloc(sizeof(float)*nb_weights);
|
||||
neuron->weights = weights;
|
||||
|
||||
for (int i=0; i < nb_poids_sortants; i++) {
|
||||
for (int i=0; i < nb_weights; i++) {
|
||||
fread(&tmp, sizeof(float), 1, ptr);
|
||||
neurone->poids_sortants[i] = tmp;
|
||||
neurone->d_poids_sortants[i] = 0.0;
|
||||
neurone->last_d_poids_sortants[i] = 0.0;
|
||||
neuron->weights[i] = tmp;
|
||||
neuron->back_weights[i] = 0.0;
|
||||
neuron->last_back_weights[i] = 0.0;
|
||||
}
|
||||
|
||||
return neurone;
|
||||
return neuron;
|
||||
}
|
||||
|
||||
|
||||
// Lit une couche de neurones
|
||||
Neurone** lire_neurones(uint32_t nb_neurones, uint32_t nb_poids_sortants, FILE *ptr) {
|
||||
Neurone** neurones = malloc(sizeof(Neurone*)*nb_neurones);
|
||||
for (int i=0; i < nb_neurones; i++) {
|
||||
neurones[i] = lire_neurone(nb_poids_sortants, ptr);
|
||||
Neuron** read_neurons(uint32_t nb_neurons, uint32_t nb_weights, FILE *ptr) {
|
||||
Neuron** neurons = malloc(sizeof(Neuron*)*nb_neurons);
|
||||
for (int i=0; i < nb_neurons; i++) {
|
||||
neurons[i] = read_neuron(nb_weights, ptr);
|
||||
}
|
||||
return neurones;
|
||||
return neurons;
|
||||
}
|
||||
|
||||
|
||||
// Charge l'entièreté du réseau neuronal depuis un fichier binaire
|
||||
Reseau* lire_reseau(char* filename) {
|
||||
Network* read_network(char* filename) {
|
||||
FILE *ptr;
|
||||
Reseau* reseau = malloc(sizeof(Reseau));
|
||||
Network* network = malloc(sizeof(Network));
|
||||
|
||||
ptr = fopen(filename, "rb");
|
||||
|
||||
uint32_t magic_number;
|
||||
uint32_t nb_couches;
|
||||
uint32_t nb_layers;
|
||||
uint32_t tmp;
|
||||
|
||||
fread(&magic_number, sizeof(uint32_t), 1, ptr);
|
||||
@ -67,41 +67,41 @@ Reseau* lire_reseau(char* filename) {
|
||||
exit(1);
|
||||
}
|
||||
|
||||
fread(&nb_couches, sizeof(uint32_t), 1, ptr);
|
||||
reseau->nb_couches = nb_couches;
|
||||
fread(&nb_layers, sizeof(uint32_t), 1, ptr);
|
||||
network->nb_layers = nb_layers;
|
||||
|
||||
|
||||
Couche** couches = malloc(sizeof(Couche*)*nb_couches);
|
||||
uint32_t nb_neurones_couche[nb_couches+1];
|
||||
Layer** layers = malloc(sizeof(Layer*)*nb_layers);
|
||||
uint32_t nb_neurons_layer[nb_layers+1];
|
||||
|
||||
reseau->couches = couches;
|
||||
network->layers = layers;
|
||||
|
||||
for (int i=0; i < nb_couches; i++) {
|
||||
couches[i] = malloc(sizeof(Couche));
|
||||
for (int i=0; i < nb_layers; i++) {
|
||||
layers[i] = malloc(sizeof(Layer));
|
||||
fread(&tmp, sizeof(tmp), 1, ptr);
|
||||
couches[i]->nb_neurones = tmp;
|
||||
nb_neurones_couche[i] = tmp;
|
||||
layers[i]->nb_neurons = tmp;
|
||||
nb_neurons_layer[i] = tmp;
|
||||
}
|
||||
nb_neurones_couche[nb_couches] = 0;
|
||||
nb_neurons_layer[nb_layers] = 0;
|
||||
|
||||
for (int i=0; i < nb_couches; i++) {
|
||||
couches[i]->neurones = lire_neurones(couches[i]->nb_neurones, nb_neurones_couche[i+1], ptr);
|
||||
for (int i=0; i < nb_layers; i++) {
|
||||
layers[i]->neurons = read_neurons(layers[i]->nb_neurons, nb_neurons_layer[i+1], ptr);
|
||||
}
|
||||
|
||||
fclose(ptr);
|
||||
return reseau;
|
||||
return network;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
// Écrit un neurone dans le fichier pointé par *ptr
|
||||
void ecrire_neurone(Neurone* neurone, int poids_sortants, FILE *ptr) {
|
||||
float buffer[poids_sortants+2];
|
||||
void ecrire_neuron(Neuron* neuron, int weights, FILE *ptr) {
|
||||
float buffer[weights+2];
|
||||
|
||||
buffer[1] = neurone->biais;
|
||||
for (int i=0; i < poids_sortants; i++) {
|
||||
buffer[i+2] = neurone->poids_sortants[i];
|
||||
buffer[1] = neuron->bias;
|
||||
for (int i=0; i < weights; i++) {
|
||||
buffer[i+2] = neuron->weights[i];
|
||||
}
|
||||
|
||||
fwrite(buffer, sizeof(buffer), 1, ptr);
|
||||
@ -109,28 +109,28 @@ void ecrire_neurone(Neurone* neurone, int poids_sortants, FILE *ptr) {
|
||||
|
||||
|
||||
// Stocke l'entièreté du réseau neuronal dans un fichier binaire
|
||||
int ecrire_reseau(char* filename, Reseau* reseau) {
|
||||
int write_network(char* filename, Network* network) {
|
||||
FILE *ptr;
|
||||
int nb_couches = reseau->nb_couches;
|
||||
int nb_neurones[nb_couches+1];
|
||||
int nb_layers = network->nb_layers;
|
||||
int nb_neurons[nb_layers+1];
|
||||
|
||||
ptr = fopen(filename, "wb");
|
||||
|
||||
uint32_t buffer[nb_couches+2];
|
||||
uint32_t buffer[nb_layers+2];
|
||||
|
||||
buffer[0] = MAGIC_NUMBER;
|
||||
buffer[1] = nb_couches;
|
||||
for (int i=0; i < nb_couches; i++) {
|
||||
buffer[i+2] = reseau->couches[i]->nb_neurones;
|
||||
nb_neurones[i] = reseau->couches[i]->nb_neurones;
|
||||
buffer[1] = nb_layers;
|
||||
for (int i=0; i < nb_layers; i++) {
|
||||
buffer[i+2] = network->layers[i]->nb_neurons;
|
||||
nb_neurons[i] = network->layers[i]->nb_neurons;
|
||||
}
|
||||
nb_neurones[nb_couches] = 0;
|
||||
nb_neurons[nb_layers] = 0;
|
||||
|
||||
fwrite(buffer, sizeof(buffer), 1, ptr);
|
||||
|
||||
for (int i=0; i < nb_couches; i++) {
|
||||
for (int j=0; j < nb_neurones[i]; j++) {
|
||||
ecrire_neurone(reseau->couches[i]->neurones[j], nb_neurones[i+1], ptr);
|
||||
for (int i=0; i < nb_layers; i++) {
|
||||
for (int j=0; j < nb_neurons[i]; j++) {
|
||||
ecrire_neuron(network->layers[i]->neurons[j], nb_neurons[i+1], ptr);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -7,11 +7,11 @@
|
||||
#ifndef DEF_NEURON_IO_H
|
||||
#define DEF_NEURON_IO_H
|
||||
|
||||
Neurone* lire_neurone(uint32_t nb_poids_sortants, FILE *ptr);
|
||||
Neurone** lire_neurones(uint32_t nb_neurones, uint32_t nb_poids_sortants, FILE *ptr);
|
||||
Reseau* lire_reseau(char* filename);
|
||||
void ecrire_neurone(Neurone* neurone, int poids_sortants, FILE *ptr);
|
||||
int ecrire_reseau(char* filename, Reseau* reseau);
|
||||
Neuron* read_neuron(uint32_t nb_weights, FILE *ptr);
|
||||
Neuron** read_neurons(uint32_t nb_neurons, uint32_t nb_weights, FILE *ptr);
|
||||
Network* read_network(char* filename);
|
||||
void ecrire_neuron(Neuron* neuron, int weights, FILE *ptr);
|
||||
int write_network(char* filename, Network* network);
|
||||
|
||||
|
||||
#endif
|
||||
|
@ -1,26 +1,26 @@
|
||||
#ifndef DEF_NEURON_H
|
||||
#define DEF_NEURON_H
|
||||
|
||||
typedef struct Neurone{
|
||||
float* poids_sortants; // Liste de tous les poids des arêtes sortants du neurone
|
||||
float biais; // Caractérise le biais du neurone
|
||||
typedef struct Neuron{
|
||||
float* weights; // Liste de tous les poids des arêtes sortants du neurone
|
||||
float bias; // Caractérise le bias du neurone
|
||||
float z; // Sauvegarde des calculs faits sur le neurone (programmation dynamique)
|
||||
|
||||
float *d_poids_sortants; // Changement des poids sortants lors de la backpropagation
|
||||
float *last_d_poids_sortants; // Dernier changement de d_poid_sortants
|
||||
float d_biais; // Changement du biais lors de la backpropagation
|
||||
float last_d_biais; // Dernier changement de d_biais
|
||||
} Neurone;
|
||||
float *back_weights; // Changement des poids sortants lors de la backpropagation
|
||||
float *last_back_weights; // Dernier changement de d_poid_sortants
|
||||
float back_bias; // Changement du bias lors de la backpropagation
|
||||
float last_back_bias; // Dernier changement de back_bias
|
||||
} Neuron;
|
||||
|
||||
|
||||
typedef struct Couche{
|
||||
int nb_neurones; // Nombre de neurones dans la couche (longueur du tableau ci-dessous)
|
||||
Neurone** neurones; // Tableau des neurones dans la couche
|
||||
} Couche;
|
||||
typedef struct Layer{
|
||||
int nb_neurons; // Nombre de neurones dans la couche (longueur du tableau ci-dessous)
|
||||
Neuron** neurons; // Tableau des neurones dans la couche
|
||||
} Layer;
|
||||
|
||||
typedef struct Reseau{
|
||||
int nb_couches; // Nombre de couches dans le réseau neuronal (longueur du tableau ci-dessous)
|
||||
Couche** couches; // Tableau des couches dans le réseau neuronal
|
||||
} Reseau;
|
||||
typedef struct Network{
|
||||
int nb_layers; // Nombre de couches dans le réseau neuronal (longueur du tableau ci-dessous)
|
||||
Layer** layers; // Tableau des couches dans le réseau neuronal
|
||||
} Network;
|
||||
|
||||
#endif
|
@ -11,46 +11,46 @@
|
||||
Contient un ensemble de fonctions utiles pour le débogage
|
||||
*/
|
||||
void help(char* call) {
|
||||
printf("Usage: %s ( print-poids | print-biais | creer-reseau ) [OPTIONS]\n\n", call);
|
||||
printf("Usage: %s ( print-poids | print-bias | creer-network ) [OPTIONS]\n\n", call);
|
||||
printf("OPTIONS:\n");
|
||||
printf("\tprint-poids:\n");
|
||||
printf("\t\t--reseau | -r [FILENAME]\tFichier contenant le réseau de neurones.\n");
|
||||
printf("\tprint-biais:\n");
|
||||
printf("\t\t--reseau | -r [FILENAME]\tFichier contenant le réseau de neurones.\n");
|
||||
printf("\t\t--network | -r [FILENAME]\tFichier contenant le réseau de neurons.\n");
|
||||
printf("\tprint-bias:\n");
|
||||
printf("\t\t--network | -r [FILENAME]\tFichier contenant le réseau de neurons.\n");
|
||||
printf("\tcount-labels:\n");
|
||||
printf("\t\t--labels | -l [FILENAME]\tFichier contenant les labels.\n");
|
||||
printf("\tcreer-reseau:\n");
|
||||
printf("\t\t--out | -o [FILENAME]\tFichier où écrire le réseau de neurones.\n");
|
||||
printf("\tcreer-network:\n");
|
||||
printf("\t\t--out | -o [FILENAME]\tFichier où écrire le réseau de neurons.\n");
|
||||
printf("\t\t--number | -n [int]\tNuméro à privilégier\n");
|
||||
}
|
||||
|
||||
|
||||
void print_biais(char* filename) {
|
||||
Reseau* reseau = lire_reseau(".cache/reseau.bin");
|
||||
void print_bias(char* filename) {
|
||||
Network* network = read_network(".cache/network.bin");
|
||||
|
||||
for (int i=1; i < reseau->nb_couches -1; i++) {
|
||||
printf("Couche %d\n", i);
|
||||
for (int j=0; j < reseau->couches[i]->nb_neurones; j++) {
|
||||
printf("Couche %d\tNeurone %d\tBiais: %f\n", i, j, reseau->couches[i]->neurones[j]->biais);
|
||||
for (int i=1; i < network->nb_layers -1; i++) {
|
||||
printf("Layer %d\n", i);
|
||||
for (int j=0; j < network->layers[i]->nb_neurons; j++) {
|
||||
printf("Layer %d\tNeuron %d\tBiais: %f\n", i, j, network->layers[i]->neurons[j]->bias);
|
||||
}
|
||||
}
|
||||
suppression_du_reseau_neuronal(reseau);
|
||||
deletion_of_network(network);
|
||||
}
|
||||
|
||||
void print_poids(char* filename) {
|
||||
Reseau* reseau = lire_reseau(".cache/reseau.bin");
|
||||
Network* network = read_network(".cache/network.bin");
|
||||
|
||||
for (int i=0; i < reseau->nb_couches -1; i++) {
|
||||
printf("Couche %d\n", i);
|
||||
for (int j=0; j < reseau->couches[i]->nb_neurones; j++) {
|
||||
printf("Couche %d\tNeurone %d\tPoids: [", i, j);
|
||||
for (int k=0; k < reseau->couches[i+1]->nb_neurones; k++) {
|
||||
printf("%f, ", reseau->couches[i]->neurones[j]->poids_sortants[k]);
|
||||
for (int i=0; i < network->nb_layers -1; i++) {
|
||||
printf("Layer %d\n", i);
|
||||
for (int j=0; j < network->layers[i]->nb_neurons; j++) {
|
||||
printf("Layer %d\tNeuron %d\tPoids: [", i, j);
|
||||
for (int k=0; k < network->layers[i+1]->nb_neurons; k++) {
|
||||
printf("%f, ", network->layers[i]->neurons[j]->weights[k]);
|
||||
}
|
||||
printf("]\n");
|
||||
}
|
||||
}
|
||||
suppression_du_reseau_neuronal(reseau);
|
||||
deletion_of_network(network);
|
||||
}
|
||||
|
||||
void count_labels(char* filename) {
|
||||
@ -74,46 +74,46 @@ void count_labels(char* filename) {
|
||||
}
|
||||
}
|
||||
|
||||
void creer_reseau(char* filename, int sortie) {
|
||||
Reseau* reseau = malloc(sizeof(Reseau));
|
||||
Couche* couche;
|
||||
Neurone* neurone;
|
||||
reseau->nb_couches = 3;
|
||||
void create_network(char* filename, int sortie) {
|
||||
Network* network = malloc(sizeof(Network));
|
||||
Layer* layer;
|
||||
Neuron* neuron;
|
||||
network->nb_layers = 3;
|
||||
|
||||
reseau->couches = malloc(sizeof(Couche*)*reseau->nb_couches);
|
||||
int neurones_par_couche[4] = {784, 1, 10, 0};
|
||||
for (int i=0; i < reseau->nb_couches; i++) {
|
||||
reseau->couches[i] = malloc(sizeof(Couche));
|
||||
couche = reseau->couches[i];
|
||||
couche->nb_neurones = neurones_par_couche[i];
|
||||
couche->neurones = malloc(sizeof(Neurone*)*couche->nb_neurones);
|
||||
for (int j=0; j < couche->nb_neurones; j++) {
|
||||
couche->neurones[j] = malloc(sizeof(Neurone));
|
||||
neurone = couche->neurones[j];
|
||||
network->layers = malloc(sizeof(Layer*)*network->nb_layers);
|
||||
int neurons_per_layer[4] = {784, 1, 10, 0};
|
||||
for (int i=0; i < network->nb_layers; i++) {
|
||||
network->layers[i] = malloc(sizeof(Layer));
|
||||
layer = network->layers[i];
|
||||
layer->nb_neurons = neurons_per_layer[i];
|
||||
layer->neurons = malloc(sizeof(Neuron*)*layer->nb_neurons);
|
||||
for (int j=0; j < layer->nb_neurons; j++) {
|
||||
layer->neurons[j] = malloc(sizeof(Neuron));
|
||||
neuron = layer->neurons[j];
|
||||
|
||||
neurone->biais = 0.;
|
||||
neurone->z = 0.;
|
||||
neuron->bias = 0.;
|
||||
neuron->z = 0.;
|
||||
|
||||
neurone->d_biais = 0.;
|
||||
neurone->last_d_biais = 0.;
|
||||
neuron->back_bias = 0.;
|
||||
neuron->last_back_bias = 0.;
|
||||
|
||||
neurone->poids_sortants = malloc(sizeof(float)*neurones_par_couche[i+1]);
|
||||
neurone->d_poids_sortants = malloc(sizeof(float)*neurones_par_couche[i+1]);
|
||||
neurone->last_d_poids_sortants = malloc(sizeof(float)*neurones_par_couche[i+1]);
|
||||
for (int k=0; k < neurones_par_couche[i+1]; k++) {
|
||||
neurone->poids_sortants[k] = 0.;
|
||||
neurone->d_poids_sortants[k] = 0.;
|
||||
neurone->last_d_poids_sortants[k] = 0.;
|
||||
neuron->weights = malloc(sizeof(float)*neurons_per_layer[i+1]);
|
||||
neuron->back_weights = malloc(sizeof(float)*neurons_per_layer[i+1]);
|
||||
neuron->last_back_weights = malloc(sizeof(float)*neurons_per_layer[i+1]);
|
||||
for (int k=0; k < neurons_per_layer[i+1]; k++) {
|
||||
neuron->weights[k] = 0.;
|
||||
neuron->back_weights[k] = 0.;
|
||||
neuron->last_back_weights[k] = 0.;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (int j=0; j < neurones_par_couche[0]; j++) {
|
||||
reseau->couches[0]->neurones[j]->poids_sortants[0] = 1;
|
||||
for (int j=0; j < neurons_per_layer[0]; j++) {
|
||||
network->layers[0]->neurons[j]->weights[0] = 1;
|
||||
}
|
||||
reseau->couches[1]->neurones[0]->poids_sortants[sortie] = 1;
|
||||
ecrire_reseau(filename, reseau);
|
||||
suppression_du_reseau_neuronal(reseau);
|
||||
network->layers[1]->neurons[0]->weights[sortie] = 1;
|
||||
write_network(filename, network);
|
||||
deletion_of_network(network);
|
||||
}
|
||||
|
||||
|
||||
@ -128,7 +128,7 @@ int main(int argc, char* argv[]) {
|
||||
char* filename = NULL;
|
||||
int i = 2;
|
||||
while (i < argc) {
|
||||
if ((! strcmp(argv[i], "--reseau"))||(! strcmp(argv[i], "-r"))) {
|
||||
if ((! strcmp(argv[i], "--network"))||(! strcmp(argv[i], "-r"))) {
|
||||
filename = argv[i+1];
|
||||
i += 2;
|
||||
} else {
|
||||
@ -137,16 +137,16 @@ int main(int argc, char* argv[]) {
|
||||
}
|
||||
}
|
||||
if (! filename) {
|
||||
printf("Pas de fichier spécifié, utilisation de '.cache/reseau.bin'\n");
|
||||
filename = ".cache/reseau.bin";
|
||||
printf("Pas de fichier spécifié, utilisation de '.cache/network.bin'\n");
|
||||
filename = ".cache/network.bin";
|
||||
}
|
||||
print_poids(filename);
|
||||
exit(1);
|
||||
} else if (! strcmp(argv[1], "print-biais")) {
|
||||
} else if (! strcmp(argv[1], "print-bias")) {
|
||||
char* filename = NULL;
|
||||
int i = 2;
|
||||
while (i < argc) {
|
||||
if ((! strcmp(argv[i], "--reseau"))||(! strcmp(argv[i], "-r"))) {
|
||||
if ((! strcmp(argv[i], "--network"))||(! strcmp(argv[i], "-r"))) {
|
||||
filename = argv[i+1];
|
||||
i += 2;
|
||||
} else {
|
||||
@ -155,12 +155,12 @@ int main(int argc, char* argv[]) {
|
||||
}
|
||||
}
|
||||
if (! filename) {
|
||||
printf("Pas de fichier spécifié, utilisation de '.cache/reseau.bin'\n");
|
||||
filename = ".cache/reseau.bin";
|
||||
printf("Pas de fichier spécifié, utilisation de '.cache/network.bin'\n");
|
||||
filename = ".cache/network.bin";
|
||||
}
|
||||
print_biais(filename);
|
||||
print_bias(filename);
|
||||
exit(1);
|
||||
} else if (! strcmp(argv[1], "creer-reseau")) {
|
||||
} else if (! strcmp(argv[1], "creer-network")) {
|
||||
char* out = NULL;
|
||||
int n = -1;
|
||||
int i = 2;
|
||||
|
@ -40,7 +40,7 @@ def recognize_mnist(image):
|
||||
output = subprocess.check_output([
|
||||
'out/main',
|
||||
'recognize',
|
||||
'--modele', '.cache/reseau.bin',
|
||||
'--modele', '.cache/network.bin',
|
||||
'--in', '.cache/image.bin',
|
||||
'--out', 'json'
|
||||
]).decode("utf-8")
|
||||
|
@ -5,7 +5,7 @@
|
||||
#include "../src/mnist/mnist.c"
|
||||
|
||||
|
||||
void test_lecture(int nb_images, int width, int height, int*** images, unsigned int* labels) {
|
||||
void read_test(int nb_images, int width, int height, int*** images, unsigned int* labels) {
|
||||
printf("\tLecture des labels\n");
|
||||
for (int i=0; i < nb_images; i++) {
|
||||
(void)labels[i];
|
||||
@ -45,7 +45,7 @@ int main() {
|
||||
printf("OK\n");
|
||||
printf("Vérification de l'accès en lecture\n");
|
||||
|
||||
test_lecture(nb_images, width, height, images, labels);
|
||||
read_test(nb_images, width, height, images, labels);
|
||||
|
||||
printf("OK\n");
|
||||
return 1;
|
||||
|
@ -9,19 +9,19 @@
|
||||
int main() {
|
||||
printf("Création du réseau\n");
|
||||
|
||||
Reseau* reseau_neuronal = malloc(sizeof(Reseau));
|
||||
Network* network_neuronal = malloc(sizeof(Network));
|
||||
int tab[5] = {30, 25, 20, 15, 10};
|
||||
creation_du_reseau_neuronal(reseau_neuronal, tab, 5);
|
||||
network_creation(network_neuronal, tab, 5);
|
||||
|
||||
printf("OK\n");
|
||||
printf("Initialisation du réseau\n");
|
||||
|
||||
initialisation_du_reseau_neuronal(reseau_neuronal);
|
||||
network_initialisation(network_neuronal);
|
||||
|
||||
printf("OK\n");
|
||||
printf("Enregistrement du réseau\n");
|
||||
|
||||
ecrire_reseau(".test-cache/random_reseau.bin", reseau_neuronal);
|
||||
write_network(".test-cache/random_network.bin", network_neuronal);
|
||||
|
||||
printf("OK\n");
|
||||
return 1;
|
||||
|
@ -6,61 +6,61 @@
|
||||
#include "../src/mnist/neuron_io.c"
|
||||
|
||||
|
||||
Neurone* creer_neurone(int nb_sortants) {
|
||||
Neurone* neurone = malloc(2*sizeof(float*)+6*sizeof(float));
|
||||
neurone->poids_sortants = malloc(sizeof(float)*nb_sortants);
|
||||
neurone->d_poids_sortants = malloc(sizeof(float)*nb_sortants);
|
||||
neurone->last_d_poids_sortants = malloc(sizeof(float)*nb_sortants);
|
||||
Neuron* creer_neuron(int nb_sortants) {
|
||||
Neuron* neuron = malloc(2*sizeof(float*)+6*sizeof(float));
|
||||
neuron->weights = malloc(sizeof(float)*nb_sortants);
|
||||
neuron->back_weights = malloc(sizeof(float)*nb_sortants);
|
||||
neuron->last_back_weights = malloc(sizeof(float)*nb_sortants);
|
||||
|
||||
for (int i=0; i < nb_sortants; i++) {
|
||||
neurone->poids_sortants[i] = 0.5;
|
||||
neurone->d_poids_sortants[i] = 0.0;
|
||||
neurone->last_d_poids_sortants[i] = 0.0;
|
||||
neuron->weights[i] = 0.5;
|
||||
neuron->back_weights[i] = 0.0;
|
||||
neuron->last_back_weights[i] = 0.0;
|
||||
}
|
||||
neurone->z = 0.0;
|
||||
neurone->biais = 0.0;
|
||||
neurone->d_biais = 0.0;
|
||||
neurone->last_d_biais = 0.0;
|
||||
neuron->z = 0.0;
|
||||
neuron->bias = 0.0;
|
||||
neuron->back_bias = 0.0;
|
||||
neuron->last_back_bias = 0.0;
|
||||
|
||||
return neurone;
|
||||
return neuron;
|
||||
}
|
||||
|
||||
|
||||
Couche* creer_couche(int nb_neurones, int nb_sortants) {
|
||||
Couche* couche = malloc(sizeof(int)+sizeof(Neurone**));
|
||||
Neurone** tab = malloc(sizeof(Neurone*)*nb_neurones);
|
||||
Layer* creer_layer(int nb_neurons, int nb_sortants) {
|
||||
Layer* layer = malloc(sizeof(int)+sizeof(Neuron**));
|
||||
Neuron** tab = malloc(sizeof(Neuron*)*nb_neurons);
|
||||
|
||||
couche->nb_neurones = nb_neurones;
|
||||
couche->neurones = tab;
|
||||
layer->nb_neurons = nb_neurons;
|
||||
layer->neurons = tab;
|
||||
|
||||
for (int i=0; i<nb_neurones; i++) {
|
||||
tab[i] = creer_neurone(nb_sortants);
|
||||
for (int i=0; i<nb_neurons; i++) {
|
||||
tab[i] = creer_neuron(nb_sortants);
|
||||
}
|
||||
return couche;
|
||||
return layer;
|
||||
};
|
||||
|
||||
|
||||
Reseau* creer_reseau(int nb_couches, int nb_max_neurones, int nb_min_neurones) {
|
||||
Reseau* reseau = malloc(sizeof(int)+sizeof(Couche**));
|
||||
reseau->couches = malloc(sizeof(Couche*)*nb_couches);
|
||||
int nb_neurones[nb_couches+1];
|
||||
Network* create_network(int nb_layers, int nb_max_neurons, int nb_min_neurons) {
|
||||
Network* network = malloc(sizeof(int)+sizeof(Layer**));
|
||||
network->layers = malloc(sizeof(Layer*)*nb_layers);
|
||||
int nb_neurons[nb_layers+1];
|
||||
|
||||
reseau->nb_couches = nb_couches;
|
||||
network->nb_layers = nb_layers;
|
||||
|
||||
for (int i=0; i < nb_couches; i++) {
|
||||
nb_neurones[i] = i*(nb_min_neurones-nb_max_neurones)/(nb_couches-1) + nb_max_neurones;
|
||||
for (int i=0; i < nb_layers; i++) {
|
||||
nb_neurons[i] = i*(nb_min_neurons-nb_max_neurons)/(nb_layers-1) + nb_max_neurons;
|
||||
}
|
||||
nb_neurones[nb_couches] = 0;
|
||||
nb_neurons[nb_layers] = 0;
|
||||
|
||||
for (int i=0; i < nb_couches; i++) {
|
||||
reseau->couches[i] = creer_couche(nb_neurones[i], nb_neurones[i+1]);
|
||||
for (int i=0; i < nb_layers; i++) {
|
||||
network->layers[i] = creer_layer(nb_neurons[i], nb_neurons[i+1]);
|
||||
}
|
||||
return reseau;
|
||||
return network;
|
||||
}
|
||||
|
||||
int main() {
|
||||
Reseau* reseau = creer_reseau(5, 300, 10);
|
||||
ecrire_reseau(".test-cache/neuron_io.bin", reseau);
|
||||
Reseau* reseau2 = lire_reseau(".test-cache/neuron_io.bin");
|
||||
Network* network = create_network(5, 300, 10);
|
||||
write_network(".test-cache/neuron_io.bin", network);
|
||||
Network* network2 = read_network(".test-cache/neuron_io.bin");
|
||||
return 1;
|
||||
}
|
Loading…
Reference in New Issue
Block a user