Rename mnist folder to dense

This commit is contained in:
augustin64 2023-03-13 13:55:09 +01:00
parent 2a88621c34
commit 04087c3de4
20 changed files with 94 additions and 65 deletions

View File

@ -1,19 +1,19 @@
BUILDDIR := ./build
SRCDIR := ./src
CACHE_DIR := ./.cache
NVCC := nvcc
NVCC := nvcc-no
CUDA_INCLUDE := /opt/cuda/include # Default installation path for ArchLinux, may be different
NVCC_INSTALLED := $(shell command -v $(NVCC) 2> /dev/null)
MNIST_SRCDIR := $(SRCDIR)/mnist
DENSE_SRCDIR := $(SRCDIR)/dense
CNN_SRCDIR := $(SRCDIR)/cnn
MNIST_SRC := $(wildcard $(MNIST_SRCDIR)/*.c)
MNIST_SRC := $(wildcard $(DENSE_SRCDIR)/*.c)
CNN_SRC := $(wildcard $(CNN_SRCDIR)/*.c)
CNN_SRC_CUDA := $(wildcard $(CNN_SRCDIR)/*.cu)
MNIST_OBJ = $(filter-out $(BUILDDIR)/mnist_main.o $(BUILDDIR)/mnist_utils.o $(BUILDDIR)/mnist_preview.o, $(MNIST_SRC:$(MNIST_SRCDIR)/%.c=$(BUILDDIR)/mnist_%.o))
MNIST_OBJ = $(filter-out $(BUILDDIR)/dense_main.o $(BUILDDIR)/dense_utils.o $(BUILDDIR)/dense_preview.o, $(MNIST_SRC:$(DENSE_SRCDIR)/%.c=$(BUILDDIR)/dense_%.o))
CNN_OBJ = $(filter-out $(BUILDDIR)/cnn_main.o $(BUILDDIR)/cnn_preview.o $(BUILDDIR)/cnn_export.o, $(CNN_SRC:$(CNN_SRCDIR)/%.c=$(BUILDDIR)/cnn_%.o))
CNN_OBJ_CUDA = $(CNN_SRC:$(CNN_SRCDIR)/%.cu=$(BUILDDIR)/cnn_%.o)
@ -40,30 +40,23 @@ NVCCFLAGS = -g
# -fsanitize=address -lasan
#! WARNING: test/cnn-neuron_io fails with this option enabled
all: mnist cnn;
all: dense cnn;
#
# Build mnist
# Build dense
#
# Executables
mnist: $(BUILDDIR)/mnist-main $(BUILDDIR)/mnist-utils $(BUILDDIR)/mnist-preview;
dense: $(BUILDDIR)/dense-main $(BUILDDIR)/dense-utils $(BUILDDIR)/dense-preview;
$(BUILDDIR)/mnist-main: $(MNIST_SRCDIR)/main.c $(BUILDDIR)/mnist.o $(BUILDDIR)/mnist_neuron_io.o $(BUILDDIR)/mnist_neural_network.o
$(CC) $(MNIST_SRCDIR)/main.c $(BUILDDIR)/mnist.o $(BUILDDIR)/mnist_neuron_io.o $(BUILDDIR)/mnist_neural_network.o -o $(BUILDDIR)/mnist-main $(CFLAGS) $(LD_CFLAGS)
$(BUILDDIR)/mnist-utils: $(MNIST_SRCDIR)/utils.c $(BUILDDIR)/mnist_neural_network.o $(BUILDDIR)/mnist_neuron_io.o $(BUILDDIR)/mnist.o
$(BUILDDIR)/dense-main: $(DENSE_SRCDIR)/main.c $(BUILDDIR)/mnist.o $(BUILDDIR)/dense_neuron_io.o $(BUILDDIR)/dense_neural_network.o
$(CC) $^ -o $@ $(CFLAGS) $(LD_CFLAGS)
$(BUILDDIR)/mnist-preview: $(MNIST_SRCDIR)/preview.c $(BUILDDIR)/mnist.o
$(BUILDDIR)/dense-utils: $(DENSE_SRCDIR)/utils.c $(BUILDDIR)/dense_neural_network.o $(BUILDDIR)/dense_neuron_io.o $(BUILDDIR)/mnist.o
$(CC) $^ -o $@ $(CFLAGS) $(LD_CFLAGS)
# .o files
$(BUILDDIR)/mnist.o: $(MNIST_SRCDIR)/mnist.c $(MNIST_SRCDIR)/include/mnist.h
$(CC) -c $< -o $@ $(CFLAGS)
$(BUILDDIR)/dense-preview: $(DENSE_SRCDIR)/preview.c $(BUILDDIR)/mnist.o
$(CC) $^ -o $@ $(CFLAGS) $(LD_CFLAGS)
$(BUILDDIR)/mnist.cuda.o: $(MNIST_SRCDIR)/mnist.c $(MNIST_SRCDIR)/include/mnist.h
$(CC) -c $< -o $@ $(CFLAGS) -DUSE_CUDA -lcuda -I$(CUDA_INCLUDE)
$(BUILDDIR)/mnist_%.o: $(MNIST_SRCDIR)/%.c $(MNIST_SRCDIR)/include/%.h
$(BUILDDIR)/dense_%.o: $(DENSE_SRCDIR)/%.c $(DENSE_SRCDIR)/include/%.h
$(CC) -c $< -o $@ $(CFLAGS)
@ -171,8 +164,7 @@ prepare-tests:
$(BUILDDIR)/test-cnn_%: $(TEST_SRCDIR)/cnn_%.c $(CNN_OBJ) $(BUILDDIR)/colors.o $(BUILDDIR)/mnist.o $(BUILDDIR)/utils.o $(BUILDDIR)/memory_management.o
$(CC) $^ -o $@ $(CFLAGS) $(LD_CFLAGS)
# mnist.o est déjà inclus en tant que mnist_mnist.o
$(BUILDDIR)/test-mnist_%: $(TEST_SRCDIR)/mnist_%.c $(MNIST_OBJ) $(BUILDDIR)/colors.o
$(BUILDDIR)/test-dense_%: $(TEST_SRCDIR)/dense_%.c $(MNIST_OBJ) $(BUILDDIR)/colors.o $(BUILDDIR)/mnist.o
$(CC) $^ -o $@ $(CFLAGS) $(LD_CFLAGS)
$(BUILDDIR)/test-memory_management: $(TEST_SRCDIR)/memory_management.c $(BUILDDIR)/colors.o $(BUILDDIR)/utils.o $(BUILDDIR)/test_memory_management.o
@ -208,9 +200,9 @@ endif
webserver: $(CACHE_DIR)/mnist-reseau-fully-connected.bin $(CACHE_DIR)/mnist-reseau-cnn.bin
FLASK_APP="src/webserver/app.py" flask run
$(CACHE_DIR)/mnist-reseau-fully-connected.bin: $(BUILDDIR)/mnist-main
$(CACHE_DIR)/mnist-reseau-fully-connected.bin: $(BUILDDIR)/dense-main
@mkdir -p $(CACHE_DIR)
$(BUILDDIR)/mnist-main train \
$(BUILDDIR)/dense-main train \
--images "data/mnist/train-images-idx3-ubyte" \
--labels "data/mnist/train-labels-idx1-ubyte" \
--out "$(CACHE_DIR)/mnist-reseau-fully-connected.bin"

View File

@ -4,7 +4,7 @@
#include <string.h>
#include "../include/memory_management.h"
#include "../mnist/include/mnist.h"
#include "../include/mnist.h"
#include "include/neuron_io.h"
#include "include/struct.h"
#include "include/jpeg.h"

View File

@ -8,7 +8,7 @@
#include <omp.h>
#include "../include/memory_management.h"
#include "../mnist/include/mnist.h"
#include "../include/mnist.h"
#include "include/initialisation.h"
#include "include/test_network.h"
#include "include/neuron_io.h"
@ -107,6 +107,7 @@ void train(int dataset_type, char* images_file, char* labels_file, char* data_di
float loss;
float batch_loss; // May be redundant with loss, but gives more informations
float accuracy;
float batch_accuracy;
float current_accuracy;
@ -257,6 +258,7 @@ void train(int dataset_type, char* images_file, char* labels_file, char* data_di
for (int j=0; j < batches_epoques; j++) {
batch_loss = 0.;
batch_accuracy = 0.;
#ifdef USE_MULTITHREADING
if (j == batches_epoques-1) {
nb_remaining_images = nb_images_total_remaining;
@ -293,6 +295,7 @@ void train(int dataset_type, char* images_file, char* labels_file, char* data_di
accuracy += train_parameters[k]->accuracy / (float) nb_images_total;
loss += train_parameters[k]->loss/nb_images_total;
batch_loss += train_parameters[k]->loss/BATCHES;
batch_accuracy += train_parameters[k]->accuracy / (float) BATCHES; // C'est faux pour le dernier batch mais on ne l'affiche pas pour lui (enfin très rapidement)
}
}
@ -304,7 +307,7 @@ void train(int dataset_type, char* images_file, char* labels_file, char* data_di
}
}
current_accuracy = accuracy * nb_images_total/((j+1)*BATCHES);
printf("\rThreads [%d]\tÉpoque [%d/%d]\tImage [%d/%d]\tAccuracy: " YELLOW "%0.2f%%" RESET, nb_threads, i, epochs, BATCHES*(j+1), nb_images_total, current_accuracy*100);
printf("\rThreads [%d]\tÉpoque [%d/%d]\tImage [%d/%d]\tAccuracy: " YELLOW "%0.2f%%" RESET " \tBatch Accuracy: " YELLOW "%0.2f%%" RESET, nb_threads, i, epochs, BATCHES*(j+1), nb_images_total, current_accuracy*100, batch_accuracy*100);
fflush(stdout);
#else
(void)nb_images_total_remaining; // Juste pour enlever un warning
@ -320,13 +323,14 @@ void train(int dataset_type, char* images_file, char* labels_file, char* data_di
accuracy += train_params->accuracy / (float) nb_images_total;
current_accuracy = accuracy * nb_images_total/((j+1)*BATCHES);
batch_accuracy += train_params->accuracy / (float)BATCHES;
loss += train_params->loss/nb_images_total;
batch_loss += train_params->loss/BATCHES;
update_weights(network, network);
update_bias(network, network);
printf("\rÉpoque [%d/%d]\tImage [%d/%d]\tAccuracy: " YELLOW "%0.4f%%" RESET, i, epochs, BATCHES*(j+1), nb_images_total, current_accuracy*100);
printf("\rÉpoque [%d/%d]\tImage [%d/%d]\tAccuracy: " YELLOW "%0.4f%%" RESET "\tBatch Accuracy: " YELLOW "%0.2f%%" RESET, i, epochs, BATCHES*(j+1), nb_images_total, current_accuracy*100, batch_accuracy*100);
fflush(stdout);
#endif
}

View File

@ -4,12 +4,29 @@
#include <string.h>
#include <math.h>
#include <time.h>
#include <stdbool.h>
#include "neuron.h"
#ifndef DEF_NEURAL_NETWORK_H
#define DEF_NEURAL_NETWORK_H
#define LEARNING_RATE 0.1
// Retourne un nombre aléatoire entre 0 et 1
#define RAND_DOUBLE() ((double)rand())/((double)RAND_MAX)
//Coefficient leaking ReLU
#define COEFF_LEAKY_RELU 0.2
#define MAX_RESEAU 100000
#define PRINT_POIDS false
#define PRINT_BIAIS false
// Mettre à 1 pour désactiver
#define DROPOUT 0.7
#define ENTRY_DROPOUT 0.85
bool drop(float prob);
/*
* Fonction max pour les floats
@ -41,7 +58,7 @@ void deletion_of_network(Network* network);
* les données on é insérées dans la première couche. Le résultat
* de la propagation se trouve dans la dernière couche
*/
void forward_propagation(Network* network);
void forward_propagation(Network* network, bool is_training);
/*
* Renvoie la liste des sorties voulues à partir du nombre voulu

View File

@ -7,7 +7,7 @@
#include <sys/sysinfo.h>
#include "include/main.h"
#include "include/mnist.h"
#include "../include/mnist.h"
#include "../include/colors.h"
#include "include/neuron_io.h"
#include "include/neural_network.h"
@ -47,7 +47,7 @@ void print_image(unsigned int width, unsigned int height, int** image, float* pr
int indice_max(float* tab, int n) {
int indice = -1;
float maxi = FLT_MIN;
float maxi = -FLT_MAX;
for (int i=0; i < n; i++) {
if (tab[i] > maxi) {
@ -87,7 +87,11 @@ void help(char* call) {
void write_image_in_network(int** image, Network* network, int height, int width) {
for (int i=0; i < height; i++) {
for (int j=0; j < width; j++) {
network->layers[0]->neurons[i*height+j]->z = (float)image[i][j] / 255.0f;
if (!drop(ENTRY_DROPOUT)) {
network->layers[0]->neurons[i*height+j]->z = (float)image[i][j] / 255.0f;
} else {
network->layers[0]->neurons[i*height+j]->z = 0;
}
}
}
}
@ -113,7 +117,7 @@ void* train_thread(void* parameters) {
for (int i=start; i < start+nb_images; i++) {
write_image_in_network(images[shuffle[i]], network, height, width);
desired_output = desired_output_creation(network, labels[shuffle[i]]);
forward_propagation(network);
forward_propagation(network, true);
backward_propagation(network, desired_output);
for (int k=0; k < nb_neurons_last_layer; k++) {
@ -138,7 +142,7 @@ void train(int epochs, int layers, int neurons, char* recovery, char* image_file
//int* repartition = malloc(sizeof(int)*layers);
int nb_neurons_last_layer = 10;
int repartition[3] = {neurons, 42, nb_neurons_last_layer};
int repartition[2] = {neurons, nb_neurons_last_layer};
float accuracy;
float current_accuracy;
@ -239,6 +243,8 @@ void train(int epochs, int layers, int neurons, char* recovery, char* image_file
write_network(out, network);
if (delta != NULL)
write_delta_network(delta, delta_network);
test(out, "data/mnist/t10k-images-idx3-ubyte", "data/mnist/t10k-labels-idx1-ubyte", false);
}
write_network(out, network);
if (delta != NULL) {
@ -293,7 +299,7 @@ float** recognize(char* modele, char* entree) {
results[i] = (float*)malloc(sizeof(float)*last_layer->nb_neurons);
write_image_in_network(images[i], network, height, width);
forward_propagation(network);
forward_propagation(network, false);
for (int j=0; j < last_layer->nb_neurons; j++) {
results[i][j] = last_layer->neurons[j]->z;
@ -388,7 +394,7 @@ int main(int argc, char* argv[]) {
}
if (! strcmp(argv[1], "train")) {
int epochs = EPOCHS;
int layers = 3;
int layers = 2;
int neurons = 784;
int nb_images = -1;
int start = 0;

View File

@ -7,18 +7,9 @@
#include <time.h>
#include "include/neuron.h"
#include "include/neural_network.h"
// Définit le taux d'apprentissage du réseau neuronal, donc la rapidité d'adaptation du modèle (compris entre 0 et 1)
// Cette valeur peut évoluer au fur et à mesure des époques (linéaire c'est mieux)
#define LEARNING_RATE 0.1
// Retourne un nombre aléatoire entre 0 et 1
#define RAND_DOUBLE() ((double)rand())/((double)RAND_MAX)
//Coefficient leaking ReLU
#define COEFF_LEAKY_RELU 0.2
#define MAX_RESEAU 100000
#define PRINT_POIDS false
#define PRINT_BIAIS false
#ifndef __CUDACC__
// The functions and macros below are already defined when using NVCC
@ -30,6 +21,10 @@ float max(float a, float b){
#endif
bool drop(float prob) {
return (rand() % 100) > 100*prob;
}
float sigmoid(float x){
return 1/(1 + exp(-x));
}
@ -104,12 +99,12 @@ void deletion_of_network(Network* network) {
void forward_propagation(Network* network) {
void forward_propagation(Network* network, bool is_training) {
Layer* layer; // Couche actuelle
Layer* pre_layer; // Couche précédente
Neuron* neuron;
float sum;
float max_z;
float max_z = INT_MIN;
for (int i=1; i < network->nb_layers; i++) { // La première couche contient déjà des valeurs
sum = 0;
@ -126,7 +121,18 @@ void forward_propagation(Network* network) {
}
if (i < network->nb_layers-1) {
neuron->z = leaky_ReLU(neuron->z);
if (!is_training) {
if (j == 0) {
neuron->z = ENTRY_DROPOUT*leaky_ReLU(neuron->z);
} else {
neuron->z = DROPOUT*leaky_ReLU(neuron->z);
}
} else if (!drop(DROPOUT)) {
neuron->z = leaky_ReLU(neuron->z);
} else {
neuron->z = 0.;
}
} else { // Softmax seulement pour la dernière couche
max_z = max(max_z, neuron->z);
}
@ -190,12 +196,16 @@ void backward_propagation(Network* network, int* desired_output) {
changes += (neuron->weights[k]*neuron->last_back_weights[k])/neurons_nb;
}
changes = changes*leaky_ReLU_derivative(neuron->z);
neuron->back_bias += changes;
neuron->last_back_bias = changes;
if (neuron->z != 0) {
neuron->back_bias += changes;
neuron->last_back_bias = changes;
}
for (int l=0; l < network->layers[i]->nb_neurons; l++){
neuron2 = network->layers[i]->neurons[l];
neuron2->back_weights[j] += neuron2->weights[j]*changes;
neuron2->last_back_weights[j] = neuron2->weights[j]*changes;
if (neuron->z != 0) {
neuron2->back_weights[j] += neuron2->weights[j]*changes;
neuron2->last_back_weights[j] = neuron2->weights[j]*changes;
}
}
}
}

View File

@ -3,7 +3,7 @@
#include <stdint.h>
#include <inttypes.h>
#include "include/mnist.h"
#include "../include/mnist.h"
void print_image(unsigned int width, unsigned int height, int** image) {

View File

@ -5,7 +5,7 @@
#include "include/neural_network.h"
#include "include/neuron_io.h"
#include "include/mnist.h"
#include "../include/mnist.h"
/*
Contient un ensemble de fonctions utiles pour le débogage

View File

@ -48,7 +48,7 @@ def recognize_mnist(image):
try:
output = subprocess.check_output([
'build/mnist-main',
'build/dense-main',
'recognize',
'--modele', '.cache/mnist-reseau-fully-connected.bin',
'--in', '.cache/image-idx3-ubyte',

View File

@ -2,7 +2,7 @@
#include <stdio.h>
#include <inttypes.h>
#include "../src/mnist/include/mnist.h"
#include "../src/include/mnist.h"
#include "../src/include/colors.h"

View File

@ -3,8 +3,8 @@
#include <stdint.h>
#include <inttypes.h>
#include "../src/mnist/include/neural_network.h"
#include "../src/mnist/include/neuron_io.h"
#include "../src/dense/include/neural_network.h"
#include "../src/dense/include/neuron_io.h"
#include "../src/include/colors.h"
int main() {

View File

@ -4,8 +4,8 @@
#include <stdint.h>
#include <inttypes.h>
#include "../src/mnist/include/neuron_io.h"
#include "../src/mnist/include/neural_network.h"
#include "../src/dense/include/neuron_io.h"
#include "../src/dense/include/neural_network.h"
#include "../src/include/colors.h"

View File

@ -3,21 +3,21 @@
set -e
OUT="build"
make $OUT/mnist-utils
make $OUT/dense-utils
echo "Compte des labels"
"$OUT/mnist-utils" count-labels -l data/mnist/t10k-labels-idx1-ubyte
"$OUT/dense-utils" count-labels -l data/mnist/t10k-labels-idx1-ubyte
echo -e "\033[32mOK\033[0m"
echo "Création du réseau"
mkdir -p .test-cache
"$OUT/mnist-utils" creer-reseau -n 3 -o .test-cache/reseau.bin
"$OUT/dense-utils" creer-reseau -n 3 -o .test-cache/reseau.bin
echo -e "\033[32mOK\033[0m"
echo "Affichage poids"
"$OUT/mnist-utils" print-poids -r .test-cache/reseau.bin > /dev/null
"$OUT/dense-utils" print-poids -r .test-cache/reseau.bin > /dev/null
echo -e "\033[32mOK\033[0m"
echo "Affichage biais"
"$OUT/mnist-utils" print-biais -r .test-cache/reseau.bin > /dev/null
"$OUT/dense-utils" print-biais -r .test-cache/reseau.bin > /dev/null
echo -e "\033[32mOK\033[0m"