mirror of
https://github.com/augustin64/projet-tipe
synced 2025-02-02 19:39:39 +01:00
Change 'input_units' to 'size_input'
This commit is contained in:
parent
3d1a5a5021
commit
c67d2bf697
@ -44,7 +44,7 @@ type | nom de la variable | commentaire
|
|||||||
:---:|:---:|:---:
|
:---:|:---:|:---:
|
||||||
uint32_t|activation|
|
uint32_t|activation|
|
||||||
uint32_t|linearisation|
|
uint32_t|linearisation|
|
||||||
uint32_t|input_units|
|
uint32_t|size_input|
|
||||||
uint32_t|output_units|
|
uint32_t|output_units|
|
||||||
|
|
||||||
#### Si la couche est de type pooling:
|
#### Si la couche est de type pooling:
|
||||||
@ -79,4 +79,4 @@ float|...|
|
|||||||
float|bias[nn->output_units-1]|biais
|
float|bias[nn->output_units-1]|biais
|
||||||
float|weights[0][0]|poids
|
float|weights[0][0]|poids
|
||||||
float|...|
|
float|...|
|
||||||
float|weights[nn->input_units-1][nn->output_units-1]|
|
float|weights[nn->size_input-1][nn->output_units-1]|
|
||||||
|
@ -205,7 +205,7 @@ void add_convolution(Network* network, int depth_output, int dim_output, int act
|
|||||||
void add_dense(Network* network, int output_units, int activation) {
|
void add_dense(Network* network, int output_units, int activation) {
|
||||||
int n = network->size;
|
int n = network->size;
|
||||||
int k_pos = n-1;
|
int k_pos = n-1;
|
||||||
int input_units = network->width[k_pos];
|
int size_input = network->width[k_pos];
|
||||||
if (network->max_size == n) {
|
if (network->max_size == n) {
|
||||||
printf("Impossible de rajouter une couche dense, le réseau est déjà plein\n");
|
printf("Impossible de rajouter une couche dense, le réseau est déjà plein\n");
|
||||||
return;
|
return;
|
||||||
@ -216,7 +216,7 @@ void add_dense(Network* network, int output_units, int activation) {
|
|||||||
network->kernel[k_pos]->activation = activation;
|
network->kernel[k_pos]->activation = activation;
|
||||||
network->kernel[k_pos]->linearisation = 0;
|
network->kernel[k_pos]->linearisation = 0;
|
||||||
network->kernel[k_pos]->pooling = 0;
|
network->kernel[k_pos]->pooling = 0;
|
||||||
nn->input_units = input_units;
|
nn->size_input = size_input;
|
||||||
nn->output_units = output_units;
|
nn->output_units = output_units;
|
||||||
nn->bias = (float*)nalloc(sizeof(float)*output_units);
|
nn->bias = (float*)nalloc(sizeof(float)*output_units);
|
||||||
nn->d_bias = (float*)nalloc(sizeof(float)*output_units);
|
nn->d_bias = (float*)nalloc(sizeof(float)*output_units);
|
||||||
@ -224,9 +224,9 @@ void add_dense(Network* network, int output_units, int activation) {
|
|||||||
nn->d_bias[i] = 0.;
|
nn->d_bias[i] = 0.;
|
||||||
}
|
}
|
||||||
|
|
||||||
nn->weights = (float**)nalloc(sizeof(float*)*input_units);
|
nn->weights = (float**)nalloc(sizeof(float*)*size_input);
|
||||||
nn->d_weights = (float**)nalloc(sizeof(float*)*input_units);
|
nn->d_weights = (float**)nalloc(sizeof(float*)*size_input);
|
||||||
for (int i=0; i < input_units; i++) {
|
for (int i=0; i < size_input; i++) {
|
||||||
nn->weights[i] = (float*)nalloc(sizeof(float)*output_units);
|
nn->weights[i] = (float*)nalloc(sizeof(float)*output_units);
|
||||||
nn->d_weights[i] = (float*)nalloc(sizeof(float)*output_units);
|
nn->d_weights[i] = (float*)nalloc(sizeof(float)*output_units);
|
||||||
for (int j=0; j < output_units; j++) {
|
for (int j=0; j < output_units; j++) {
|
||||||
@ -234,19 +234,19 @@ void add_dense(Network* network, int output_units, int activation) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
initialisation_1d_matrix(network->initialisation, nn->bias, output_units, input_units);
|
initialisation_1d_matrix(network->initialisation, nn->bias, output_units, size_input);
|
||||||
initialisation_2d_matrix(network->initialisation, nn->weights, input_units, output_units, input_units, output_units);
|
initialisation_2d_matrix(network->initialisation, nn->weights, size_input, output_units, size_input, output_units);
|
||||||
create_a_line_input_layer(network, n, output_units);
|
create_a_line_input_layer(network, n, output_units);
|
||||||
create_a_line_input_z_layer(network, n, output_units);
|
create_a_line_input_z_layer(network, n, output_units);
|
||||||
network->size++;
|
network->size++;
|
||||||
}
|
}
|
||||||
|
|
||||||
void add_dense_linearisation(Network* network, int output_units, int activation) {
|
void add_dense_linearisation(Network* network, int output_units, int activation) {
|
||||||
// Can replace input_units by a research of this dim
|
// Can replace size_input by a research of this dim
|
||||||
|
|
||||||
int n = network->size;
|
int n = network->size;
|
||||||
int k_pos = n-1;
|
int k_pos = n-1;
|
||||||
int input_units = network->depth[k_pos]*network->width[k_pos]*network->width[k_pos];
|
int size_input = network->depth[k_pos]*network->width[k_pos]*network->width[k_pos];
|
||||||
if (network->max_size == n) {
|
if (network->max_size == n) {
|
||||||
printf("Impossible de rajouter une couche dense, le réseau est déjà plein\n");
|
printf("Impossible de rajouter une couche dense, le réseau est déjà plein\n");
|
||||||
return;
|
return;
|
||||||
@ -257,7 +257,7 @@ void add_dense_linearisation(Network* network, int output_units, int activation)
|
|||||||
network->kernel[k_pos]->activation = activation;
|
network->kernel[k_pos]->activation = activation;
|
||||||
network->kernel[k_pos]->linearisation = 1;
|
network->kernel[k_pos]->linearisation = 1;
|
||||||
network->kernel[k_pos]->pooling = 0;
|
network->kernel[k_pos]->pooling = 0;
|
||||||
nn->input_units = input_units;
|
nn->size_input = size_input;
|
||||||
nn->output_units = output_units;
|
nn->output_units = output_units;
|
||||||
|
|
||||||
nn->bias = (float*)nalloc(sizeof(float)*output_units);
|
nn->bias = (float*)nalloc(sizeof(float)*output_units);
|
||||||
@ -265,17 +265,17 @@ void add_dense_linearisation(Network* network, int output_units, int activation)
|
|||||||
for (int i=0; i < output_units; i++) {
|
for (int i=0; i < output_units; i++) {
|
||||||
nn->d_bias[i] = 0.;
|
nn->d_bias[i] = 0.;
|
||||||
}
|
}
|
||||||
nn->weights = (float**)nalloc(sizeof(float*)*input_units);
|
nn->weights = (float**)nalloc(sizeof(float*)*size_input);
|
||||||
nn->d_weights = (float**)nalloc(sizeof(float*)*input_units);
|
nn->d_weights = (float**)nalloc(sizeof(float*)*size_input);
|
||||||
for (int i=0; i < input_units; i++) {
|
for (int i=0; i < size_input; i++) {
|
||||||
nn->weights[i] = (float*)nalloc(sizeof(float)*output_units);
|
nn->weights[i] = (float*)nalloc(sizeof(float)*output_units);
|
||||||
nn->d_weights[i] = (float*)nalloc(sizeof(float)*output_units);
|
nn->d_weights[i] = (float*)nalloc(sizeof(float)*output_units);
|
||||||
for (int j=0; j < output_units; j++) {
|
for (int j=0; j < output_units; j++) {
|
||||||
nn->d_weights[i][j] = 0.;
|
nn->d_weights[i][j] = 0.;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
initialisation_1d_matrix(network->initialisation, nn->bias, output_units, input_units);
|
initialisation_1d_matrix(network->initialisation, nn->bias, output_units, size_input);
|
||||||
initialisation_2d_matrix(network->initialisation, nn->weights, input_units, output_units, input_units, output_units);
|
initialisation_2d_matrix(network->initialisation, nn->weights, size_input, output_units, size_input, output_units);
|
||||||
create_a_line_input_layer(network, n, output_units);
|
create_a_line_input_layer(network, n, output_units);
|
||||||
create_a_line_input_z_layer(network, n, output_units);
|
create_a_line_input_z_layer(network, n, output_units);
|
||||||
network->size++;
|
network->size++;
|
||||||
|
@ -70,7 +70,7 @@ void free_convolution(Network* network, int pos) {
|
|||||||
void free_dense(Network* network, int pos) {
|
void free_dense(Network* network, int pos) {
|
||||||
free_a_line_input_layer(network, pos+1);
|
free_a_line_input_layer(network, pos+1);
|
||||||
Kernel_nn* k_pos = network->kernel[pos]->nn;
|
Kernel_nn* k_pos = network->kernel[pos]->nn;
|
||||||
int dim = k_pos->input_units;
|
int dim = k_pos->size_input;
|
||||||
for (int i=0; i < dim; i++) {
|
for (int i=0; i < dim; i++) {
|
||||||
gree(k_pos->weights[i]);
|
gree(k_pos->weights[i]);
|
||||||
gree(k_pos->d_weights[i]);
|
gree(k_pos->d_weights[i]);
|
||||||
@ -87,7 +87,7 @@ void free_dense(Network* network, int pos) {
|
|||||||
void free_dense_linearisation(Network* network, int pos) {
|
void free_dense_linearisation(Network* network, int pos) {
|
||||||
free_a_line_input_layer(network, pos+1);
|
free_a_line_input_layer(network, pos+1);
|
||||||
Kernel_nn* k_pos = network->kernel[pos]->nn;
|
Kernel_nn* k_pos = network->kernel[pos]->nn;
|
||||||
int dim = k_pos->input_units;
|
int dim = k_pos->size_input;
|
||||||
for (int i=0; i < dim; i++) {
|
for (int i=0; i < dim; i++) {
|
||||||
gree(k_pos->weights[i]);
|
gree(k_pos->weights[i]);
|
||||||
gree(k_pos->d_weights[i]);
|
gree(k_pos->d_weights[i]);
|
||||||
|
@ -12,12 +12,12 @@ typedef struct Kernel_cnn {
|
|||||||
} Kernel_cnn;
|
} Kernel_cnn;
|
||||||
|
|
||||||
typedef struct Kernel_nn {
|
typedef struct Kernel_nn {
|
||||||
int input_units; // Nombre d'éléments en entrée
|
int size_input; // Nombre d'éléments en entrée
|
||||||
int output_units; // Nombre d'éléments en sortie
|
int output_units; // Nombre d'éléments en sortie
|
||||||
float* bias; // bias[output_units]
|
float* bias; // bias[output_units]
|
||||||
float* d_bias; // d_bias[output_units]
|
float* d_bias; // d_bias[output_units]
|
||||||
float** weights; // weight[input_units][output_units]
|
float** weights; // weight[size_input][output_units]
|
||||||
float** d_weights; // d_weights[input_units][output_units]
|
float** d_weights; // d_weights[size_input][output_units]
|
||||||
} Kernel_nn;
|
} Kernel_nn;
|
||||||
|
|
||||||
typedef struct Kernel {
|
typedef struct Kernel {
|
||||||
|
@ -111,7 +111,7 @@ void write_couche(Network* network, int indice_couche, int type_couche, FILE* pt
|
|||||||
uint32_t pre_buffer[4];
|
uint32_t pre_buffer[4];
|
||||||
pre_buffer[0] = kernel->activation;
|
pre_buffer[0] = kernel->activation;
|
||||||
pre_buffer[1] = kernel->linearisation;
|
pre_buffer[1] = kernel->linearisation;
|
||||||
pre_buffer[2] = nn->input_units;
|
pre_buffer[2] = nn->size_input;
|
||||||
pre_buffer[3] = nn->output_units;
|
pre_buffer[3] = nn->output_units;
|
||||||
fwrite(pre_buffer, sizeof(pre_buffer), 1, ptr);
|
fwrite(pre_buffer, sizeof(pre_buffer), 1, ptr);
|
||||||
|
|
||||||
@ -122,7 +122,7 @@ void write_couche(Network* network, int indice_couche, int type_couche, FILE* pt
|
|||||||
}
|
}
|
||||||
fwrite(buffer, sizeof(buffer), 1, ptr);
|
fwrite(buffer, sizeof(buffer), 1, ptr);
|
||||||
|
|
||||||
for (int i=0; i < nn->input_units; i++) {
|
for (int i=0; i < nn->size_input; i++) {
|
||||||
indice_buffer = 0;
|
indice_buffer = 0;
|
||||||
float buffer[nn->output_units];
|
float buffer[nn->output_units];
|
||||||
for (int j=0; j < nn->output_units; j++) {
|
for (int j=0; j < nn->output_units; j++) {
|
||||||
@ -287,7 +287,7 @@ Kernel* read_kernel(int type_couche, int output_dim, FILE* ptr) {
|
|||||||
|
|
||||||
kernel->activation = buffer[0];
|
kernel->activation = buffer[0];
|
||||||
kernel->linearisation = buffer[1];
|
kernel->linearisation = buffer[1];
|
||||||
kernel->nn->input_units = buffer[2];
|
kernel->nn->size_input = buffer[2];
|
||||||
kernel->nn->output_units = buffer[3];
|
kernel->nn->output_units = buffer[3];
|
||||||
|
|
||||||
// Lecture du corps
|
// Lecture du corps
|
||||||
@ -302,9 +302,9 @@ Kernel* read_kernel(int type_couche, int output_dim, FILE* ptr) {
|
|||||||
nn->d_bias[i] = 0.;
|
nn->d_bias[i] = 0.;
|
||||||
}
|
}
|
||||||
|
|
||||||
nn->weights = (float**)nalloc(sizeof(float*)*nn->input_units);
|
nn->weights = (float**)nalloc(sizeof(float*)*nn->size_input);
|
||||||
nn->d_weights = (float**)nalloc(sizeof(float*)*nn->input_units);
|
nn->d_weights = (float**)nalloc(sizeof(float*)*nn->size_input);
|
||||||
for (int i=0; i < nn->input_units; i++) {
|
for (int i=0; i < nn->size_input; i++) {
|
||||||
nn->weights[i] = (float*)nalloc(sizeof(float)*nn->output_units);
|
nn->weights[i] = (float*)nalloc(sizeof(float)*nn->output_units);
|
||||||
nn->d_weights[i] = (float*)nalloc(sizeof(float)*nn->output_units);
|
nn->d_weights[i] = (float*)nalloc(sizeof(float)*nn->output_units);
|
||||||
for (int j=0; j < nn->output_units; j++) {
|
for (int j=0; j < nn->output_units; j++) {
|
||||||
|
@ -56,12 +56,12 @@ bool equals_networks(Network* network1, Network* network2) {
|
|||||||
checkEquals(kernel[i]->pooling, "kernel[i]->pooling pour un pooling", i);
|
checkEquals(kernel[i]->pooling, "kernel[i]->pooling pour un pooling", i);
|
||||||
} else if (!network1->kernel[i]->cnn) {
|
} else if (!network1->kernel[i]->cnn) {
|
||||||
// Type NN
|
// Type NN
|
||||||
checkEquals(kernel[i]->nn->input_units, "kernel[i]->nn->input_units", i);
|
checkEquals(kernel[i]->nn->size_input, "kernel[i]->nn->size_input", i);
|
||||||
checkEquals(kernel[i]->nn->output_units, "kernel[i]->nn->output_units", i);
|
checkEquals(kernel[i]->nn->output_units, "kernel[i]->nn->output_units", i);
|
||||||
for (int j=0; j < network1->kernel[i]->nn->output_units; j++) {
|
for (int j=0; j < network1->kernel[i]->nn->output_units; j++) {
|
||||||
checkEquals(kernel[i]->nn->bias[j], "kernel[i]->nn->bias[j]", j);
|
checkEquals(kernel[i]->nn->bias[j], "kernel[i]->nn->bias[j]", j);
|
||||||
}
|
}
|
||||||
for (int j=0; j < network1->kernel[i]->nn->input_units; j++) {
|
for (int j=0; j < network1->kernel[i]->nn->size_input; j++) {
|
||||||
for (int k=0; k < network1->kernel[i]->nn->output_units; k++) {
|
for (int k=0; k < network1->kernel[i]->nn->output_units; k++) {
|
||||||
checkEquals(kernel[i]->nn->weights[j][k], "kernel[i]->nn->weights[j][k]", k);
|
checkEquals(kernel[i]->nn->weights[j][k], "kernel[i]->nn->weights[j][k]", k);
|
||||||
}
|
}
|
||||||
@ -100,7 +100,7 @@ Network* copy_network(Network* network) {
|
|||||||
// Paramètre du réseau
|
// Paramètre du réseau
|
||||||
int size = network->size;
|
int size = network->size;
|
||||||
// Paramètres des couches NN
|
// Paramètres des couches NN
|
||||||
int input_units;
|
int size_input;
|
||||||
int output_units;
|
int output_units;
|
||||||
// Paramètres des couches CNN
|
// Paramètres des couches CNN
|
||||||
int rows;
|
int rows;
|
||||||
@ -137,13 +137,13 @@ Network* copy_network(Network* network) {
|
|||||||
copyVar(kernel[i]->activation);
|
copyVar(kernel[i]->activation);
|
||||||
copyVar(kernel[i]->linearisation); // 0
|
copyVar(kernel[i]->linearisation); // 0
|
||||||
|
|
||||||
input_units = network->kernel[i]->nn->input_units;
|
size_input = network->kernel[i]->nn->size_input;
|
||||||
output_units = network->kernel[i]->nn->output_units;
|
output_units = network->kernel[i]->nn->output_units;
|
||||||
|
|
||||||
network_cp->kernel[i]->cnn = NULL;
|
network_cp->kernel[i]->cnn = NULL;
|
||||||
network_cp->kernel[i]->nn = (Kernel_nn*)nalloc(sizeof(Kernel_nn));
|
network_cp->kernel[i]->nn = (Kernel_nn*)nalloc(sizeof(Kernel_nn));
|
||||||
|
|
||||||
copyVar(kernel[i]->nn->input_units);
|
copyVar(kernel[i]->nn->size_input);
|
||||||
copyVar(kernel[i]->nn->output_units);
|
copyVar(kernel[i]->nn->output_units);
|
||||||
|
|
||||||
network_cp->kernel[i]->nn->bias = (float*)nalloc(sizeof(float)*output_units);
|
network_cp->kernel[i]->nn->bias = (float*)nalloc(sizeof(float)*output_units);
|
||||||
@ -153,9 +153,9 @@ Network* copy_network(Network* network) {
|
|||||||
network_cp->kernel[i]->nn->d_bias[j] = 0.;
|
network_cp->kernel[i]->nn->d_bias[j] = 0.;
|
||||||
}
|
}
|
||||||
|
|
||||||
network_cp->kernel[i]->nn->weights = (float**)nalloc(sizeof(float*)*input_units);
|
network_cp->kernel[i]->nn->weights = (float**)nalloc(sizeof(float*)*size_input);
|
||||||
network_cp->kernel[i]->nn->d_weights = (float**)nalloc(sizeof(float*)*input_units);
|
network_cp->kernel[i]->nn->d_weights = (float**)nalloc(sizeof(float*)*size_input);
|
||||||
for (int j=0; j < input_units; j++) {
|
for (int j=0; j < size_input; j++) {
|
||||||
network_cp->kernel[i]->nn->weights[j] = (float*)nalloc(sizeof(float)*output_units);
|
network_cp->kernel[i]->nn->weights[j] = (float*)nalloc(sizeof(float)*output_units);
|
||||||
network_cp->kernel[i]->nn->d_weights[j] = (float*)nalloc(sizeof(float)*output_units);
|
network_cp->kernel[i]->nn->d_weights[j] = (float*)nalloc(sizeof(float)*output_units);
|
||||||
for (int k=0; k < output_units; k++) {
|
for (int k=0; k < output_units; k++) {
|
||||||
@ -254,7 +254,7 @@ void copy_network_parameters(Network* network_src, Network* network_dest) {
|
|||||||
// Paramètre du réseau
|
// Paramètre du réseau
|
||||||
int size = network_src->size;
|
int size = network_src->size;
|
||||||
// Paramètres des couches NN
|
// Paramètres des couches NN
|
||||||
int input_units;
|
int size_input;
|
||||||
int output_units;
|
int output_units;
|
||||||
// Paramètres des couches CNN
|
// Paramètres des couches CNN
|
||||||
int rows;
|
int rows;
|
||||||
@ -267,13 +267,13 @@ void copy_network_parameters(Network* network_src, Network* network_dest) {
|
|||||||
for (int i=0; i < size-1; i++) {
|
for (int i=0; i < size-1; i++) {
|
||||||
if (!network_src->kernel[i]->cnn && network_src->kernel[i]->nn) { // Cas du NN
|
if (!network_src->kernel[i]->cnn && network_src->kernel[i]->nn) { // Cas du NN
|
||||||
|
|
||||||
input_units = network_src->kernel[i]->nn->input_units;
|
size_input = network_src->kernel[i]->nn->size_input;
|
||||||
output_units = network_src->kernel[i]->nn->output_units;
|
output_units = network_src->kernel[i]->nn->output_units;
|
||||||
|
|
||||||
for (int j=0; j < output_units; j++) {
|
for (int j=0; j < output_units; j++) {
|
||||||
copyVarParams(kernel[i]->nn->bias[j]);
|
copyVarParams(kernel[i]->nn->bias[j]);
|
||||||
}
|
}
|
||||||
for (int j=0; j < input_units; j++) {
|
for (int j=0; j < size_input; j++) {
|
||||||
for (int k=0; k < output_units; k++) {
|
for (int k=0; k < output_units; k++) {
|
||||||
copyVarParams(kernel[i]->nn->weights[j][k]);
|
copyVarParams(kernel[i]->nn->weights[j][k]);
|
||||||
}
|
}
|
||||||
@ -315,7 +315,7 @@ int count_null_weights(Network* network) {
|
|||||||
|
|
||||||
int size = network->size;
|
int size = network->size;
|
||||||
// Paramètres des couches NN
|
// Paramètres des couches NN
|
||||||
int input_units;
|
int size_input;
|
||||||
int output_units;
|
int output_units;
|
||||||
// Paramètres des couches CNN
|
// Paramètres des couches CNN
|
||||||
int rows;
|
int rows;
|
||||||
@ -326,13 +326,13 @@ int count_null_weights(Network* network) {
|
|||||||
for (int i=0; i < size-1; i++) {
|
for (int i=0; i < size-1; i++) {
|
||||||
if (!network->kernel[i]->cnn && network->kernel[i]->nn) { // Cas du NN
|
if (!network->kernel[i]->cnn && network->kernel[i]->nn) { // Cas du NN
|
||||||
|
|
||||||
input_units = network->kernel[i]->nn->input_units;
|
size_input = network->kernel[i]->nn->size_input;
|
||||||
output_units = network->kernel[i]->nn->output_units;
|
output_units = network->kernel[i]->nn->output_units;
|
||||||
|
|
||||||
for (int j=0; j < output_units; j++) {
|
for (int j=0; j < output_units; j++) {
|
||||||
null_bias += fabs(network->kernel[i]->nn->bias[j]) <= epsilon;
|
null_bias += fabs(network->kernel[i]->nn->bias[j]) <= epsilon;
|
||||||
}
|
}
|
||||||
for (int j=0; j < input_units; j++) {
|
for (int j=0; j < size_input; j++) {
|
||||||
for (int k=0; k < output_units; k++) {
|
for (int k=0; k < output_units; k++) {
|
||||||
null_weights += fabs(network->kernel[i]->nn->weights[j][k]) <= epsilon;
|
null_weights += fabs(network->kernel[i]->nn->weights[j][k]) <= epsilon;
|
||||||
}
|
}
|
||||||
|
@ -27,7 +27,7 @@ int main() {
|
|||||||
}
|
}
|
||||||
} else if (!kernel->cnn) {
|
} else if (!kernel->cnn) {
|
||||||
printf("\n==== Couche %d de type "GREEN"NN"RESET" ====\n", i);
|
printf("\n==== Couche %d de type "GREEN"NN"RESET" ====\n", i);
|
||||||
printf("input: %d\n", kernel->nn->input_units);
|
printf("input: %d\n", kernel->nn->size_input);
|
||||||
printf("output: %d\n", kernel->nn->output_units);
|
printf("output: %d\n", kernel->nn->output_units);
|
||||||
} else {
|
} else {
|
||||||
printf("\n==== Couche %d de type "BLUE"CNN"RESET" ====\n", i);
|
printf("\n==== Couche %d de type "BLUE"CNN"RESET" ====\n", i);
|
||||||
|
Loading…
Reference in New Issue
Block a user