tipe/src/cnn/main.c

154 lines
5.1 KiB
C
Raw Normal View History

2022-06-30 10:27:42 +02:00
#include <stdlib.h>
#include <stdio.h>
#include <math.h>
#include <float.h>
2022-09-28 10:20:08 +02:00
#include "../colors.h"
2022-09-26 18:00:31 +02:00
#include "include/initialisation.h"
#include "function.c"
#include "creation.c"
#include "make.c"
2022-09-16 14:53:35 +02:00
#include "include/main.h"
2022-06-30 10:27:42 +02:00
2022-07-05 08:13:25 +02:00
// Augmente les dimensions de l'image d'entrée
#define PADDING_INPUT 2
2022-06-30 10:27:42 +02:00
int will_be_drop(int dropout_prob) {
return (rand() % 100) < dropout_prob;
2022-06-30 10:27:42 +02:00
}
void write_image_in_network_32(int** image, int height, int width, float** input) {
for (int i=0; i < height+2*PADDING_INPUT; i++) {
2022-09-16 14:53:35 +02:00
for (int j=0; j < width+2*PADDING_INPUT; j++) {
if (i < PADDING_INPUT || i >= height+PADDING_INPUT || j < PADDING_INPUT || j >= width+PADDING_INPUT) {
2022-06-30 10:27:42 +02:00
input[i][j] = 0.;
}
else {
input[i][j] = (float)image[i][j] / 255.0f;
}
}
}
}
void forward_propagation(Network* network) {
2022-09-19 18:39:49 +02:00
int activation, input_width, input_depth, output_width, output_depth;
int n = network->size;
float*** input;
float*** output;
2022-09-19 18:39:49 +02:00
Kernel* k_i;
for (int i=0; i < n-1; i++) {
k_i = network->kernel[i];
2022-09-30 15:50:29 +02:00
printf("\n i -> %d :: %d %d \n", i, k_i->cnn==NULL, k_i->nn==NULL);
2022-09-19 18:39:49 +02:00
input_width = network->width[i];
input_depth = network->depth[i];
output_width = network->width[i+1];
output_depth = network->depth[i+1];
2022-09-30 15:50:29 +02:00
activation = k_i->activation;
2022-09-19 18:39:49 +02:00
input = network->input[i];
output = network->input[i+1];
2022-09-30 15:50:29 +02:00
if (k_i->cnn!=NULL) { //CNN
printf("Convolution of cnn: %dx%dx%d -> %dx%dx%d\n", input_depth, input_width, input_width, output_depth, output_width, output_width);
make_convolution(input, k_i->cnn, output, output_width);
choose_apply_function_matrix(activation, output, output_depth, output_width);
2022-06-30 10:27:42 +02:00
}
2022-09-30 15:50:29 +02:00
else if (k_i->nn!=NULL) { //NN
printf("Densification of nn: %dx%dx%d -> %dx%dx%d\n", input_depth, input_width, input_width, output_depth, output_width, output_width);
2022-09-19 18:39:49 +02:00
// Checked if it is a nn which linearise
make_fully_connected(network->input[i][0][0], network->kernel[i]->nn, network->input[i+1][0][0], input_width, output_width);
2022-09-30 15:50:29 +02:00
choose_apply_function_vector(activation, output, output_width);
2022-06-30 10:27:42 +02:00
}
2022-09-30 15:50:29 +02:00
else { //Pooling
2022-09-19 18:39:49 +02:00
if (n-2==i) {
2022-06-30 10:27:42 +02:00
printf("Le réseau ne peut pas finir par une pooling layer");
return;
}
2022-09-19 18:39:49 +02:00
if (1==1) { // Pooling sur une matrice
2022-09-30 15:50:29 +02:00
printf("Average pooling: %dx%dx%d -> %dx%dx%d\n", input_depth, input_width, input_width, output_depth, output_width, output_width);
2022-09-19 18:39:49 +02:00
make_average_pooling(input, output, activation/100, output_depth, output_width);
2022-06-30 10:27:42 +02:00
}
2022-09-30 15:50:29 +02:00
else { // Pooling sur un vecteur
printf("Erreur: le pooling ne se fait que sur une matrice \n");
2022-06-30 10:27:42 +02:00
return;
2022-05-13 15:28:45 +02:00
}
}
}
}
2022-06-30 10:27:42 +02:00
2022-09-26 18:00:31 +02:00
void backward_propagation(Network* network, float wanted_number) { // TODO
2022-09-28 10:20:08 +02:00
printf_warning("Appel de backward_propagation, incomplet\n");
2022-06-30 10:27:42 +02:00
float* wanted_output = generate_wanted_output(wanted_number);
int n = network->size-1;
2022-09-12 17:56:44 +02:00
float loss = compute_cross_entropy_loss(network->input[n][0][0], wanted_output, network->width[n]);
for (int i=n; i >= 0; i--) {
2022-06-30 10:27:42 +02:00
if (i==n) {
2022-09-10 17:17:49 +02:00
if (network->kernel[i]->activation == SOFTMAX) {
2022-09-12 17:56:44 +02:00
int l2 = network->width[i]; // Taille de la dernière couche
2022-09-30 15:50:29 +02:00
//int l1 = network->width[i-1];
for (int j=0; j < l2; j++) {
2022-06-30 10:27:42 +02:00
}
}
else {
2022-09-10 17:17:49 +02:00
printf("Erreur, seule la fonction SOFTMAX est implémentée pour la dernière couche");
2022-06-30 10:27:42 +02:00
return;
}
}
else {
2022-09-10 17:17:49 +02:00
if (network->kernel[i]->activation == SIGMOID) {
2022-06-30 10:27:42 +02:00
}
2022-09-10 17:17:49 +02:00
else if (network->kernel[i]->activation == TANH) {
2022-06-30 10:27:42 +02:00
}
2022-09-10 17:17:49 +02:00
else if (network->kernel[i]->activation == RELU) {
2022-06-30 10:27:42 +02:00
}
}
}
free(wanted_output);
}
float compute_cross_entropy_loss(float* output, float* wanted_output, int len) {
float loss=0.;
for (int i=0; i < len ; i++) {
2022-06-30 10:27:42 +02:00
if (wanted_output[i]==1) {
if (output[i]==0.) {
loss -= log(FLT_EPSILON);
}
else {
loss -= log(output[i]);
}
}
}
return loss;
}
float* generate_wanted_output(float wanted_number) {
float* wanted_output = (float*)malloc(sizeof(float)*10);
for (int i=0; i < 10; i++) {
2022-06-30 10:27:42 +02:00
if (i==wanted_number) {
wanted_output[i]=1;
}
else {
wanted_output[i]=0;
}
}
return wanted_output;
}
int main() {
2022-09-10 17:17:49 +02:00
Network* network = create_network_lenet5(0, TANH, GLOROT_NORMAL);
2022-09-19 18:39:49 +02:00
for (int i=0; i<8; i++) {
printf("%d %d \n", network->depth[i], network->width[i]);
}
printf("Kernel:\n");
for (int i=0; i<7; i++) {
if (network->kernel[i]->cnn!=NULL) {
printf("%d -> %d %d\n", i, network->kernel[i]->cnn->rows, network->kernel[i]->cnn->k_size);
}
}
forward_propagation(network);
return 0;
}