Reduce redundancy in function.c

This commit is contained in:
augustin64 2023-03-03 21:56:42 +01:00
parent 7f88acf17f
commit 177aaa869c
2 changed files with 41 additions and 37 deletions

View File

@ -11,6 +11,7 @@ float max_float(float a, float b) {
return a < b ? b:a; return a < b ? b:a;
} }
float identity(float x) { float identity(float x) {
return x; return x;
} }
@ -20,6 +21,7 @@ float identity_derivative(float x) {
return 1; return 1;
} }
float sigmoid(float x) { float sigmoid(float x) {
return 1/(1 + exp(-x)); return 1/(1 + exp(-x));
} }
@ -29,6 +31,7 @@ float sigmoid_derivative(float x) {
return tmp/((1+tmp)*(1+tmp)); return tmp/((1+tmp)*(1+tmp));
} }
float relu(float x) { float relu(float x) {
return max_float(0, x); return max_float(0, x);
} }
@ -39,6 +42,7 @@ float relu_derivative(float x) {
return 0; return 0;
} }
float leaky_relu(float x) { float leaky_relu(float x) {
if (x>0) if (x>0)
return x; return x;
@ -51,6 +55,7 @@ float leaky_relu_derivative(float x) {
return LEAKER; return LEAKER;
} }
float tanh_(float x) { float tanh_(float x) {
return tanh(x); return tanh(x);
} }
@ -60,6 +65,7 @@ float tanh_derivative(float x) {
return 1 - a*a; return 1 - a*a;
} }
void apply_softmax_input(float ***input, int depth, int rows, int columns) { void apply_softmax_input(float ***input, int depth, int rows, int columns) {
float m = FLT_MIN; float m = FLT_MIN;
float sum=0; float sum=0;
@ -87,6 +93,7 @@ void apply_softmax_input(float ***input, int depth, int rows, int columns) {
} }
} }
void apply_function_input(float (*f)(float), float*** input, int depth, int rows, int columns) { void apply_function_input(float (*f)(float), float*** input, int depth, int rows, int columns) {
for (int i=0; i < depth; i++) { for (int i=0; i < depth; i++) {
for (int j=0; j < rows; j++) { for (int j=0; j < rows; j++) {
@ -97,59 +104,54 @@ void apply_function_input(float (*f)(float), float*** input, int depth, int rows
} }
} }
void choose_apply_function_matrix(int activation, float*** input, int depth, int dim) { void apply_function_to_matrix(int activation, float*** input, int depth, int dim) {
if (activation == RELU) { if (activation == SOFTMAX) {
apply_function_input(relu, input, depth, dim, dim); return apply_softmax_input(input, depth, dim, dim);
} else if (activation == SIGMOID) { }
apply_function_input(sigmoid, input, depth, dim, dim); if (activation > 1) { // Exclude negative values (derivative) and 1 (identity)
} else if (activation == SOFTMAX) { ptr f = get_activation_function(activation);
apply_softmax_input(input, depth, dim, dim); return apply_function_input(f, input, depth, dim, dim);
} else if (activation == TANH) { }
apply_function_input(tanh_, input, depth, dim, dim);
} else if (activation == LEAKY_RELU) {
apply_function_input(leaky_relu, input, depth, dim, dim);
} else {
printf_error("fonction d'activation inconnue (apply_function_to_matrix): "); printf_error("fonction d'activation inconnue (apply_function_to_matrix): ");
printf("%d\n", activation); printf("%d\n", activation);
}
} }
void choose_apply_function_vector(int activation, float*** input, int dim) {
if (activation == RELU) { void apply_function_to_vector(int activation, float*** input, int dim) {
apply_function_input(relu, input, 1, 1, dim); if (activation == SOFTMAX) {
} else if (activation == SIGMOID) { return apply_softmax_input(input, 1, 1, dim);
apply_function_input(sigmoid, input, 1, 1, dim); }
} else if (activation == SOFTMAX) { if (activation > 1) { // Exclude negative values (derivative) and 1 (identity)
apply_softmax_input(input, 1, 1, dim); ptr f = get_activation_function(activation);
} else if (activation == TANH) { return apply_function_input(f, input, 1, 1, dim);
apply_function_input(tanh_, input, 1, 1, dim); }
} else if (activation == LEAKY_RELU) {
apply_function_input(leaky_relu, input, 1, 1, dim);
} else {
printf_error("fonction d'activation inconnue (apply_function_to_vector): "); printf_error("fonction d'activation inconnue (apply_function_to_vector): ");
printf("%d\n", activation); printf("%d\n", activation);
}
} }
ptr get_function_activation(int activation) {
ptr get_activation_function(int activation) {
if (activation == RELU) { if (activation == RELU) {
return &relu; return &relu;
} }
if (activation == -RELU) { if (activation == -RELU) {
return &relu_derivative; return &relu_derivative;
} }
if (activation == -IDENTITY) {
return &identity_derivative;
}
if (activation == IDENTITY) { if (activation == IDENTITY) {
return &identity; return &identity;
} }
if (activation == -IDENTITY) {
return &identity_derivative;
}
if (activation == SIGMOID) { if (activation == SIGMOID) {
return &sigmoid; return &sigmoid;
} }
if (activation == -SIGMOID) { if (activation == -SIGMOID) {
return &sigmoid_derivative; return &sigmoid_derivative;
} }
if (activation == SOFTMAX) { if (activation == SOFTMAX) {
printf_error("impossible de renvoyer la fonction softmax\n"); printf_error("impossible de renvoyer la fonction softmax\n");
return NULL; return NULL;
@ -158,12 +160,14 @@ ptr get_function_activation(int activation) {
printf_error("impossible de renvoyer la dérivée de la fonction softmax\n"); printf_error("impossible de renvoyer la dérivée de la fonction softmax\n");
return NULL; return NULL;
} }
if (activation == TANH) { if (activation == TANH) {
return &tanh_; return &tanh_;
} }
if (activation == -TANH) { if (activation == -TANH) {
return &tanh_derivative; return &tanh_derivative;
} }
if (activation == LEAKY_RELU) { if (activation == LEAKY_RELU) {
return &leaky_relu; return &leaky_relu;
} }

View File

@ -52,18 +52,18 @@ void apply_softmax_input(float ***input, int depth, int rows, int columns);
void apply_function_input(float (*f)(float), float*** input, int depth, int rows, int columns); void apply_function_input(float (*f)(float), float*** input, int depth, int rows, int columns);
/* /*
* Redirige vers la fonction à appliquer sur une matrice * Applique une fonction d'activation (repérée par son identifiant) à une matrice
*/ */
void choose_apply_function_matrix(int activation, float*** input, int depth, int dim); void apply_function_to_matrix(int activation, float*** input, int depth, int dim);
/* /*
* Redirige vers la fonction à appliquer sur un vecteur * Applique une fonction d'activation (repérée par son identifiant) à un vecteur
*/ */
void choose_apply_function_vector(int activation, float*** input, int dim); void apply_function_to_vector(int activation, float*** input, int dim);
/* /*
* Renvoie la fonction d'activation correspondant à son identifiant (activation) * Renvoie la fonction d'activation correspondant à son identifiant (activation)
*/ */
ptr get_function_activation(int activation); ptr get_activation_function(int activation);
#endif #endif