This commit is contained in:
augustin64 2022-11-08 19:25:14 +01:00
commit c8b369f862
3 changed files with 18 additions and 15 deletions

View File

@ -5,11 +5,12 @@
- [Medium](https://medium.com/@14prakash/back-propagation-is-very-simple-who-made-it-complicated-97b794c97e5c) - [Medium](https://medium.com/@14prakash/back-propagation-is-very-simple-who-made-it-complicated-97b794c97e5c)
- [Simeon Kostadinov: Understanding Backpropagation](https://towardsdatascience.com/understanding-backpropagation-algorithm-7bb3aa2f95fd) - [Simeon Kostadinov: Understanding Backpropagation](https://towardsdatascience.com/understanding-backpropagation-algorithm-7bb3aa2f95fd)
- [Tobias Hill: Gradient Descent](https://towardsdatascience.com/part-2-gradient-descent-and-backpropagation-bf90932c066a) - [Tobias Hill: Gradient Descent](https://towardsdatascience.com/part-2-gradient-descent-and-backpropagation-bf90932c066a)
- [Adam Optimizer](https://arxiv.org/pdf/1412.6980.pdf)
## Convolutional Neural Network ## Convolutional Neural Network
- [The Independent Code](https://www.youtube.com/watch?v=Lakz2MoHy6o) - [The Independent Code](https://www.youtube.com/watch?v=Lakz2MoHy6o)
## Jeux de données ## Jeux de données
- [MNIST](http://yann.lecun.com/exdb/mnist/) - [MNIST](http://yann.lecun.com/exdb/mnist/)
- [ImageNet](https://www.image-net.org/index.php) - [ImageNet](https://www.image-net.org/index.php)

View File

@ -1,3 +1,5 @@
#include "struct.h"
#ifndef DEF_UPDATE_H #ifndef DEF_UPDATE_H
#define DEF_UPDATE_H #define DEF_UPDATE_H

View File

@ -11,14 +11,14 @@
#define purple printf("\033[0;35m") #define purple printf("\033[0;35m")
#define reset_color printf("\033[0m") #define reset_color printf("\033[0m")
void print_kernel_cnn(Kernel_cnn* k, int depth_input, int dim_input, int depth_output, int dim_output) { void print_kernel_cnn(Kernel_cnn* ker, int depth_input, int dim_input, int depth_output, int dim_output) {
int k_size = dim_input - dim_output + 1; int k_size = dim_input - dim_output + 1;
// print bias // print bias
green; green;
for (int i=0; i<depth_output; i++) { for (int i=0; i<depth_output; i++) {
for (int j=0; j<dim_output; j++) { for (int j=0; j<dim_output; j++) {
for (int k=0; k<dim_output; k++) { for (int k=0; k<dim_output; k++) {
printf("%.2f", k->bias[i][j][k]); printf("%.2f", ker->bias[i][j][k]);
} }
print_space; print_space;
} }
@ -29,12 +29,12 @@ void print_kernel_cnn(Kernel_cnn* k, int depth_input, int dim_input, int depth_o
//print weights //print weights
red; red;
for (int i=0; i<; i++) { for (int i=0; i<depth_input; i++) {
printf("------Line %d-----\n", i); printf("------Line %d-----\n", i);
for (int j=0; j<; j++) { for (int j=0; j<depth_output; j++) {
for (int k=0; k<; k++) { for (int k=0; k<k_size; k++) {
for (int l=0; l<; l++) { for (int l=0; l<k_size; l++) {
printf("%.2f", k->w[i][j][k][l]); printf("%.2f", ker->w[i][j][k][l]);
} }
print_space; print_space;
} }
@ -56,11 +56,11 @@ void print_pooling(int size) {
print_dspace; print_dspace;
} }
void print_kernel_nn(Kernel_nn* k, int size_input, int size_output) { void print_kernel_nn(Kernel_nn* ker, int size_input, int size_output) {
// print bias // print bias
green; green;
for (int i=0; i<size_output; i++) { for (int i=0; i<size_output; i++) {
printf("%.2f ", k->bias[i]); printf("%.2f ", ker->bias[i]);
} }
print_dspace; print_dspace;
reset_color; reset_color;
@ -69,7 +69,7 @@ void print_kernel_nn(Kernel_nn* k, int size_input, int size_output) {
red; red;
for (int i=0; i<size_output; i++) { for (int i=0; i<size_output; i++) {
for (int j=0; j<size_input; j++) { for (int j=0; j<size_input; j++) {
printf("%.2f ", k->weights[j][i]); printf("%.2f ", ker->weights[j][i]);
} }
print_space; print_space;
} }
@ -97,17 +97,18 @@ void print_input(float*** input, int depth, int dim) {
void print_cnn(Network* network) { void print_cnn(Network* network) {
int n = network->size; int n = network->size;
int input_depth, input_width, output_depth, output_width; int input_depth, input_width, output_depth, output_width;
//float*** output;
//float*** input;
Kernel* k_i; Kernel* k_i;
for (int i=0; i<(n-1); i++) { for (int i=0; i<(n-1); i++) {
input = network->input[i]; //input = network->input[i];
input_depth = network->depth[i]; input_depth = network->depth[i];
input_width = network->width[i]; input_width = network->width[i];
output = network->input[i+1]; //output = network->input[i+1];
output_depth = network->depth[i+1]; output_depth = network->depth[i+1];
output_width = network->width[i+1]; output_width = network->width[i+1];
k_i = network->kernel[i]; k_i = network->kernel[i];
print_input(input, input_depth, input_width);
if (k_i->cnn) { // Convolution if (k_i->cnn) { // Convolution
print_kernel_cnn(k_i->cnn, input_depth, input_width, output_depth, output_width); print_kernel_cnn(k_i->cnn, input_depth, input_width, output_depth, output_width);
@ -119,5 +120,4 @@ void print_cnn(Network* network) {
print_pooling(input_width - output_width +1); print_pooling(input_width - output_width +1);
} }
} }
print_input(input[n-1], network->depth[n-1], network->width[n-1]);
} }