Learning_rate is a (NON NULL) float

This commit is contained in:
julienChemillier 2022-11-15 18:15:18 +01:00
parent d728530d1a
commit 50af676027
5 changed files with 11 additions and 11 deletions

View File

@ -6,7 +6,7 @@
#include "include/creation.h"
Network* create_network(int max_size, int learning_rate, int dropout, int initialisation, int input_dim, int input_depth) {
Network* create_network(int max_size, float learning_rate, int dropout, int initialisation, int input_dim, int input_depth) {
if (dropout < 0 || dropout > 100) {
printf("Erreur, la probabilité de dropout n'est pas respecté, elle doit être comprise entre 0 et 100\n");
}
@ -33,7 +33,7 @@ Network* create_network(int max_size, int learning_rate, int dropout, int initia
return network;
}
Network* create_network_lenet5(int learning_rate, int dropout, int activation, int initialisation, int input_dim, int input_depth) {
Network* create_network_lenet5(float learning_rate, int dropout, int activation, int initialisation, int input_dim, int input_depth) {
Network* network = create_network(8, learning_rate, dropout, initialisation, input_dim, input_depth);
network->kernel[0]->activation = activation;
network->kernel[0]->linearisation = 0;

View File

@ -7,12 +7,12 @@
/*
* Créé un réseau qui peut contenir max_size couche (dont celle d'input et d'output)
*/
Network* create_network(int max_size, int learning_rate, int dropout, int initialisation, int input_dim, int input_depth);
Network* create_network(int max_size, float learning_rate, int dropout, int initialisation, int input_dim, int input_depth);
/*
* Renvoie un réseau suivant l'architecture LeNet5
*/
Network* create_network_lenet5(int learning_rate, int dropout, int activation, int initialisation, int input_dim, int input_depth);
Network* create_network_lenet5(float learning_rate, int dropout, int activation, int initialisation, int input_dim, int input_depth);
/*
* Créé et alloue de la mémoire à une couche de type input cube

View File

@ -30,7 +30,7 @@ typedef struct Kernel {
typedef struct Network{
int dropout; // Contient la probabilité d'abandon d'un neurone dans [0, 100] (entiers)
int learning_rate; // Taux d'apprentissage du réseau
float learning_rate; // Taux d'apprentissage du réseau
int initialisation; // Contient le type d'initialisation
int max_size; // Taille du tableau contenant le réseau
int size; // Taille actuelle du réseau (size ≤ max_size)

View File

@ -105,7 +105,7 @@ void train(int dataset_type, char* images_file, char* labels_file, char* data_di
}
// Initialisation du réseau
Network* network = create_network_lenet5(0, 0, TANH, GLOROT, input_dim, input_depth);
Network* network = create_network_lenet5(0.01, 0, TANH, GLOROT, input_dim, input_depth);
#ifdef USE_MULTITHREADING
int nb_remaining_images; // Nombre d'images restantes à lancer pour une série de threads

View File

@ -20,7 +20,7 @@ void update_weights(Network* network) {
for (int b=0; b<output_depth; b++) {
for (int c=0; c<k_size; c++) {
for (int d=0; d<k_size; d++) {
cnn->w[a][b][c][d] += network->learning_rate * cnn->d_w[a][b][c][d];
cnn->w[a][b][c][d] -= network->learning_rate * cnn->d_w[a][b][c][d];
cnn->d_w[a][b][c][d] = 0;
}
}
@ -31,7 +31,7 @@ void update_weights(Network* network) {
Kernel_nn* nn = k_i->nn;
for (int a=0; a<input_width; a++) {
for (int b=0; b<output_width; b++) {
nn->weights[a][b] += network->learning_rate * nn->d_weights[a][b];
nn->weights[a][b] -= network->learning_rate * nn->d_weights[a][b];
nn->d_weights[a][b] = 0;
}
}
@ -40,7 +40,7 @@ void update_weights(Network* network) {
int input_size = input_width*input_width*input_depth;
for (int a=0; a<input_size; a++) {
for (int b=0; b<output_width; b++) {
nn->weights[a][b] += network->learning_rate * nn->d_weights[a][b];
nn->weights[a][b] -= network->learning_rate * nn->d_weights[a][b];
nn->d_weights[a][b] = 0;
}
}
@ -65,7 +65,7 @@ void update_bias(Network* network) {
for (int a=0; a<output_depth; a++) {
for (int b=0; b<output_width; b++) {
for (int c=0; c<output_width; c++) {
cnn->bias[a][b][c] += network->learning_rate * cnn->d_bias[a][b][c];
cnn->bias[a][b][c] -= network->learning_rate * cnn->d_bias[a][b][c];
cnn->d_bias[a][b][c] = 0;
}
}
@ -73,7 +73,7 @@ void update_bias(Network* network) {
} else if (k_i->nn) { // Full connection
Kernel_nn* nn = k_i->nn;
for (int a=0; a<output_width; a++) {
nn->bias[a] += network->learning_rate * nn->d_bias[a];
nn->bias[a] -= network->learning_rate * nn->d_bias[a];
nn->d_bias[a] = 0;
}
} else { // Pooling