mirror of
https://github.com/augustin64/projet-tipe
synced 2025-02-02 19:39:39 +01:00
Simplify code & typo
This commit is contained in:
parent
4720fb18e1
commit
3643f4c50e
@ -7,7 +7,7 @@ typedef struct Neuron {
|
||||
float z; // Sauvegarde des calculs faits sur le neurone
|
||||
|
||||
float *back_weights; // Changement des poids sortants lors de la backpropagation
|
||||
float *last_back_weights; // Dernier changement de d_poid_sortants
|
||||
float *last_back_weights; // Dernier changement de d_poids_sortants
|
||||
float back_bias; // Changement du bias lors de la backpropagation
|
||||
float last_back_bias; // Dernier changement de back_bias
|
||||
} Neuron;
|
||||
|
@ -92,42 +92,44 @@ void add_convolution(Network* network, int nb_filter, int kernel_size, int activ
|
||||
int r = network->dim[n-1][1];
|
||||
int c = nb_filter;
|
||||
network->kernel[n]->nn = NULL;
|
||||
network->kernel[n]->cnn = (Kernel_cnn*)malloc(sizeof(Kernel_cnn));
|
||||
network->kernel[n]->activation = activation;
|
||||
network->kernel[n]->cnn->k_size = kernel_size;
|
||||
network->kernel[n]->cnn->rows = r;
|
||||
network->kernel[n]->cnn->columns = c;
|
||||
network->kernel[n]->cnn->w = (float****)malloc(sizeof(float***)*r);
|
||||
network->kernel[n]->cnn->d_w = (float****)malloc(sizeof(float***)*r);
|
||||
network->kernel[n]->cnn = (Kernel_cnn*)malloc(sizeof(Kernel_cnn));
|
||||
Kernel_cnn* cnn = network->kernel[n]->cnn;
|
||||
|
||||
cnn->k_size = kernel_size;
|
||||
cnn->rows = r;
|
||||
cnn->columns = c;
|
||||
cnn->w = (float****)malloc(sizeof(float***)*r);
|
||||
cnn->d_w = (float****)malloc(sizeof(float***)*r);
|
||||
for (int i=0; i < r; i++) {
|
||||
network->kernel[n]->cnn->w[i] = (float***)malloc(sizeof(float**)*c);
|
||||
network->kernel[n]->cnn->d_w[i] = (float***)malloc(sizeof(float**)*c);
|
||||
cnn->w[i] = (float***)malloc(sizeof(float**)*c);
|
||||
cnn->d_w[i] = (float***)malloc(sizeof(float**)*c);
|
||||
for (int j=0; j < c; j++) {
|
||||
network->kernel[n]->cnn->w[i][j] = (float**)malloc(sizeof(float*)*kernel_size);
|
||||
network->kernel[n]->cnn->d_w[i][j] = (float**)malloc(sizeof(float*)*kernel_size);
|
||||
cnn->w[i][j] = (float**)malloc(sizeof(float*)*kernel_size);
|
||||
cnn->d_w[i][j] = (float**)malloc(sizeof(float*)*kernel_size);
|
||||
for (int k=0; k < kernel_size; k++) {
|
||||
network->kernel[n]->cnn->w[i][j][k] = (float*)malloc(sizeof(float)*kernel_size);
|
||||
network->kernel[n]->cnn->d_w[i][j][k] = (float*)malloc(sizeof(float)*kernel_size);
|
||||
cnn->w[i][j][k] = (float*)malloc(sizeof(float)*kernel_size);
|
||||
cnn->d_w[i][j][k] = (float*)malloc(sizeof(float)*kernel_size);
|
||||
}
|
||||
}
|
||||
}
|
||||
network->kernel[n]->cnn->bias = (float***)malloc(sizeof(float**)*c);
|
||||
network->kernel[n]->cnn->d_bias = (float***)malloc(sizeof(float**)*c);
|
||||
cnn->bias = (float***)malloc(sizeof(float**)*c);
|
||||
cnn->d_bias = (float***)malloc(sizeof(float**)*c);
|
||||
for (int i=0; i < c; i++) {
|
||||
network->kernel[n]->cnn->bias[i] = (float**)malloc(sizeof(float*)*kernel_size);
|
||||
network->kernel[n]->cnn->d_bias[i] = (float**)malloc(sizeof(float*)*kernel_size);
|
||||
cnn->bias[i] = (float**)malloc(sizeof(float*)*kernel_size);
|
||||
cnn->d_bias[i] = (float**)malloc(sizeof(float*)*kernel_size);
|
||||
for (int j=0; j < kernel_size; j++) {
|
||||
network->kernel[n]->cnn->bias[i][j] = (float*)malloc(sizeof(float)*kernel_size);
|
||||
network->kernel[n]->cnn->d_bias[i][j] = (float*)malloc(sizeof(float)*kernel_size);
|
||||
cnn->bias[i][j] = (float*)malloc(sizeof(float)*kernel_size);
|
||||
cnn->d_bias[i][j] = (float*)malloc(sizeof(float)*kernel_size);
|
||||
}
|
||||
}
|
||||
create_a_cube_input_layer(network, n, c, network->dim[n-1][0] - 2*(kernel_size/2));
|
||||
int n_int = network->dim[n-1][0]*network->dim[n-1][0]*network->dim[n-1][1];
|
||||
int n_out = network->dim[n][0]*network->dim[n][0]*network->dim[n][1];
|
||||
initialisation_3d_matrix(network->initialisation, network->kernel[n]->cnn->bias, c, kernel_size, kernel_size, n_int+n_out);
|
||||
initialisation_3d_matrix(ZERO, network->kernel[n]->cnn->d_bias, c, kernel_size, kernel_size, n_int+n_out);
|
||||
initialisation_4d_matrix(network->initialisation, network->kernel[n]->cnn->w, r, c, kernel_size, kernel_size, n_int+n_out);
|
||||
initialisation_4d_matrix(ZERO, network->kernel[n]->cnn->d_w, r, c, kernel_size, kernel_size, n_int+n_out);
|
||||
initialisation_3d_matrix(network->initialisation, cnn->bias, c, kernel_size, kernel_size, n_int+n_out);
|
||||
initialisation_3d_matrix(ZERO, cnn->d_bias, c, kernel_size, kernel_size, n_int+n_out);
|
||||
initialisation_4d_matrix(network->initialisation, cnn->w, r, c, kernel_size, kernel_size, n_int+n_out);
|
||||
initialisation_4d_matrix(ZERO, cnn->d_w, r, c, kernel_size, kernel_size, n_int+n_out);
|
||||
network->size++;
|
||||
}
|
||||
|
||||
@ -139,21 +141,22 @@ void add_dense(Network* network, int input_units, int output_units, int activati
|
||||
}
|
||||
network->kernel[n]->cnn = NULL;
|
||||
network->kernel[n]->nn = (Kernel_nn*)malloc(sizeof(Kernel_nn));
|
||||
Kernel_nn* nn = network->kernel[n]->nn;
|
||||
network->kernel[n]->activation = activation;
|
||||
network->kernel[n]->nn->input_units = input_units;
|
||||
network->kernel[n]->nn->output_units = output_units;
|
||||
network->kernel[n]->nn->bias = (float*)malloc(sizeof(float)*output_units);
|
||||
network->kernel[n]->nn->d_bias = (float*)malloc(sizeof(float)*output_units);
|
||||
network->kernel[n]->nn->weights = (float**)malloc(sizeof(float*)*input_units);
|
||||
network->kernel[n]->nn->d_weights = (float**)malloc(sizeof(float*)*input_units);
|
||||
nn->input_units = input_units;
|
||||
nn->output_units = output_units;
|
||||
nn->bias = (float*)malloc(sizeof(float)*output_units);
|
||||
nn->d_bias = (float*)malloc(sizeof(float)*output_units);
|
||||
nn->weights = (float**)malloc(sizeof(float*)*input_units);
|
||||
nn->d_weights = (float**)malloc(sizeof(float*)*input_units);
|
||||
for (int i=0; i < input_units; i++) {
|
||||
network->kernel[n]->nn->weights[i] = (float*)malloc(sizeof(float)*output_units);
|
||||
network->kernel[n]->nn->d_weights[i] = (float*)malloc(sizeof(float)*output_units);
|
||||
nn->weights[i] = (float*)malloc(sizeof(float)*output_units);
|
||||
nn->d_weights[i] = (float*)malloc(sizeof(float)*output_units);
|
||||
}
|
||||
initialisation_1d_matrix(network->initialisation, network->kernel[n]->nn->bias, output_units, output_units+input_units);
|
||||
initialisation_1d_matrix(ZERO, network->kernel[n]->nn->d_bias, output_units, output_units+input_units);
|
||||
initialisation_2d_matrix(network->initialisation, network->kernel[n]->nn->weights, input_units, output_units, output_units+input_units);
|
||||
initialisation_2d_matrix(ZERO, network->kernel[n]->nn->d_weights, input_units, output_units, output_units+input_units);
|
||||
initialisation_1d_matrix(network->initialisation, nn->bias, output_units, output_units+input_units);
|
||||
initialisation_1d_matrix(ZERO, nn->d_bias, output_units, output_units+input_units);
|
||||
initialisation_2d_matrix(network->initialisation, nn->weights, input_units, output_units, output_units+input_units);
|
||||
initialisation_2d_matrix(ZERO, nn->d_weights, input_units, output_units, output_units+input_units);
|
||||
create_a_line_input_layer(network, n, output_units);
|
||||
network->size++;
|
||||
}
|
@ -34,7 +34,7 @@ Layer* creer_layer(int nb_neurons, int nb_sortants) {
|
||||
layer->nb_neurons = nb_neurons;
|
||||
layer->neurons = tab;
|
||||
|
||||
for (int i=0; i<nb_neurons; i++) {
|
||||
for (int i=0; i < nb_neurons; i++) {
|
||||
tab[i] = creer_neuron(nb_sortants);
|
||||
}
|
||||
return layer;
|
||||
|
Loading…
Reference in New Issue
Block a user