This commit is contained in:
julienChemillier 2023-01-17 15:18:50 +01:00
commit 00797ab3a8
12 changed files with 50 additions and 25 deletions

2
.vscode/launch.json vendored
View File

@ -99,7 +99,7 @@
"--labels", "--labels",
"data/mnist/t10k-labels-idx1-ubyte", "data/mnist/t10k-labels-idx1-ubyte",
"--epochs", "--epochs",
"3" "10"
], ],
"stopAtEntry": true, "stopAtEntry": true,
"cwd": "${workspaceFolder}", "cwd": "${workspaceFolder}",

View File

@ -107,7 +107,7 @@ endif
# #
run-tests: build-tests run-tests: build-tests
$(foreach file, $(wildcard $(TEST_SRCDIR)/*.sh), $(file);) $(foreach file, $(wildcard $(TEST_SRCDIR)/*.sh), $(file);)
@echo "$$(for file in build/test-*; do echo -e \\033[33m#####\\033[0m $$file \\033[33m#####\\033[0m; $$file || echo "Erreur sur $$file"; done)" @echo "$$(for file in build/test-*; do echo -e \\033[33m#####\\033[0m $$file \\033[33m#####\\033[0m; $$file || echo -e "\\033[1m\\033[31mErreur sur $$file\\033[0m"; done)"
build-tests: prepare-tests $(TESTS_OBJ) $(BUILDDIR)/test-cnn_matrix_multiplication $(BUILDDIR)/test-cnn_convolution build-tests: prepare-tests $(TESTS_OBJ) $(BUILDDIR)/test-cnn_matrix_multiplication $(BUILDDIR)/test-cnn_convolution

View File

@ -106,7 +106,7 @@ void forward_propagation(Network* network) {
choose_apply_function_matrix(activation, output, output_depth, output_width); choose_apply_function_matrix(activation, output, output_depth, output_width);
} }
else if (k_i->nn) { // Full connection else if (k_i->nn) { // Full connection
if (input_depth==1) { // Vecteur -> Vecteur if (k_i->linearisation == 0) { // Vecteur -> Vecteur
make_dense(k_i->nn, input[0][0], output[0][0], input_width, output_width); make_dense(k_i->nn, input[0][0], output[0][0], input_width, output_width);
} else { // Matrice -> Vecteur } else { // Matrice -> Vecteur
make_dense_linearised(k_i->nn, input, output[0][0], input_depth, input_width, output_width); make_dense_linearised(k_i->nn, input, output[0][0], input_depth, input_width, output_width);
@ -155,7 +155,7 @@ void backward_propagation(Network* network, float wanted_number) {
backward_convolution(k_i->cnn, input, input_z, output, input_depth, input_width, output_depth, output_width, d_f, i==0); backward_convolution(k_i->cnn, input, input_z, output, input_depth, input_width, output_depth, output_width, d_f, i==0);
} else if (k_i->nn) { // Full connection } else if (k_i->nn) { // Full connection
ptr d_f = get_function_activation(activation); ptr d_f = get_function_activation(activation);
if (input_depth==1) { // Vecteur -> Vecteur if (k_i->linearisation == 0) { // Vecteur -> Vecteur
backward_fully_connected(k_i->nn, input[0][0], input_z[0][0], output[0][0], input_width, output_width, d_f, i==0); backward_fully_connected(k_i->nn, input[0][0], input_z[0][0], output[0][0], input_width, output_width, d_f, i==0);
} else { // Matrice -> vecteur } else { // Matrice -> vecteur
backward_linearisation(k_i->nn, input, input_z, output[0][0], input_depth, input_width, output_width, d_f); backward_linearisation(k_i->nn, input, input_z, output[0][0], input_depth, input_width, output_width, d_f);

View File

@ -34,7 +34,7 @@ Network* create_network(int max_size, float learning_rate, int dropout, int init
} }
Network* create_network_lenet5(float learning_rate, int dropout, int activation, int initialisation, int input_dim, int input_depth) { Network* create_network_lenet5(float learning_rate, int dropout, int activation, int initialisation, int input_dim, int input_depth) {
Network* network = create_network(8, learning_rate, dropout, initialisation, input_dim, input_depth); Network* network = create_network(8, learning_rate, dropout, initialisation, input_dim, input_depth);
network->kernel[0]->activation = activation; network->kernel[0]->activation = activation;
network->kernel[0]->linearisation = 0; network->kernel[0]->linearisation = 0;
add_convolution(network, 6, 28, activation); add_convolution(network, 6, 28, activation);
@ -95,7 +95,6 @@ void add_2d_average_pooling(Network* network, int dim_output) {
printf("Impossible de rajouter une couche d'average pooling, le réseau est déjà plein\n"); printf("Impossible de rajouter une couche d'average pooling, le réseau est déjà plein\n");
return; return;
} }
int kernel_size = dim_input/dim_output;
if (dim_input%dim_output != 0) { if (dim_input%dim_output != 0) {
printf("Erreur de dimension dans l'average pooling\n"); printf("Erreur de dimension dans l'average pooling\n");
return; return;

View File

@ -117,7 +117,7 @@ void free_network(Network* network) {
if (network->kernel[i]->cnn != NULL) { // Convolution if (network->kernel[i]->cnn != NULL) { // Convolution
free_convolution(network, i); free_convolution(network, i);
} else if (network->kernel[i]->nn != NULL) { } else if (network->kernel[i]->nn != NULL) {
if (network->depth[i]==1) { // Dense non linearised if (network->kernel[i]->linearisation == 0) { // Dense non linearised
free_dense(network, i); free_dense(network, i);
} else { // Dense lineariation } else { // Dense lineariation
free_dense_linearisation(network, i); free_dense_linearisation(network, i);

View File

@ -30,7 +30,7 @@ void backward_2d_pooling(float*** input, float*** output, int input_width, int o
void backward_fully_connected(Kernel_nn* ker, float* input, float* input_z, float* output, int size_input, int size_output, ptr d_function, int is_first); void backward_fully_connected(Kernel_nn* ker, float* input, float* input_z, float* output, int size_input, int size_output, ptr d_function, int is_first);
/* /*
* Transfert les informatiosn d'erreur à travers une couche de linéarisation * Transfert les informations d'erreur à travers une couche de linéarisation
*/ */
void backward_linearisation(Kernel_nn* ker, float*** input, float*** input_z, float* output, int depth_input, int dim_input, int size_output, ptr d_function); void backward_linearisation(Kernel_nn* ker, float*** input, float*** input_z, float* output, int depth_input, int dim_input, int size_output, ptr d_function);

View File

@ -32,7 +32,7 @@ void write_network(char* filename, Network* network) {
bufferAdd(network->depth[i]); bufferAdd(network->depth[i]);
} }
for (int i=0; i < size; i++) { for (int i=0; i < size-1; i++) {
if ((!network->kernel[i]->cnn)&&(!network->kernel[i]->nn)) { if ((!network->kernel[i]->cnn)&&(!network->kernel[i]->nn)) {
type_couche[i] = 2; type_couche[i] = 2;
} else if (!network->kernel[i]->cnn) { } else if (!network->kernel[i]->cnn) {
@ -46,7 +46,7 @@ void write_network(char* filename, Network* network) {
fwrite(buffer, sizeof(buffer), 1, ptr); fwrite(buffer, sizeof(buffer), 1, ptr);
// Écriture du pré-corps et corps // Écriture du pré-corps et corps
for (int i=0; i < size; i++) { for (int i=0; i < size-1; i++) {
write_couche(network, i, type_couche[i], ptr); write_couche(network, i, type_couche[i], ptr);
} }
@ -168,12 +168,11 @@ Network* read_network(char* filename) {
} }
// Lecture de chaque couche // Lecture de chaque couche
network->kernel = (Kernel**)malloc(sizeof(Kernel*)*size); network->kernel = (Kernel**)malloc(sizeof(Kernel*)*(size-1));
for (int i=0; i < (int)size-1; i++) { for (int i=0; i < (int)size-1; i++) {
network->kernel[i] = read_kernel(type_couche[i], network->width[i+1], ptr); network->kernel[i] = read_kernel(type_couche[i], network->width[i+1], ptr);
} }
network->kernel[(int)size-1] = read_kernel(type_couche[(int)size-1], -1, ptr);
network->input = (float****)malloc(sizeof(float***)*size); network->input = (float****)malloc(sizeof(float***)*size);
for (int i=0; i < (int)size; i++) { // input[size][couche->depth][couche->dim][couche->dim] for (int i=0; i < (int)size; i++) { // input[size][couche->depth][couche->dim][couche->dim]

View File

@ -46,7 +46,7 @@ void* train_thread(void* parameters) {
write_image_in_network_32(images[index[i]], height, width, network->input[0][0]); write_image_in_network_32(images[index[i]], height, width, network->input[0][0]);
forward_propagation(network); forward_propagation(network);
maxi = indice_max(network->input[network->size-1][0][0], 10); maxi = indice_max(network->input[network->size-1][0][0], 10);
backward_propagation(network, labels[i]); backward_propagation(network, labels[index[i]]);
if (maxi == labels[index[i]]) { if (maxi == labels[index[i]]) {
accuracy += 1.; accuracy += 1.;
@ -294,7 +294,7 @@ void train(int dataset_type, char* images_file, char* labels_file, char* data_di
#else #else
free(train_params); free(train_params);
#endif #endif
if (dataset_type == 0) { if (dataset_type == 0) {
for (int i=0; i < nb_images_total; i++) { for (int i=0; i < nb_images_total; i++) {
for (int j=0; j < 28; j++) { for (int j=0; j < 28; j++) {

View File

@ -31,7 +31,7 @@ void update_weights(Network* network, Network* d_network, int nb_images) {
} }
} }
} else if (k_i->nn) { // Full connection } else if (k_i->nn) { // Full connection
if (input_depth==1) { // Vecteur -> Vecteur if (k_i->linearisation == 0) { // Vecteur -> Vecteur
Kernel_nn* nn = k_i->nn; Kernel_nn* nn = k_i->nn;
Kernel_nn* d_nn = dk_i->nn; Kernel_nn* d_nn = dk_i->nn;
for (int a=0; a<input_width; a++) { for (int a=0; a<input_width; a++) {
@ -119,7 +119,7 @@ void reset_d_weights(Network* network) {
} }
} }
} else if (k_i->nn) { // Full connection } else if (k_i->nn) { // Full connection
if (input_depth==1) { // Vecteur -> Vecteur if (k_i->linearisation == 0) { // Vecteur -> Vecteur
Kernel_nn* nn = k_i_1->nn; Kernel_nn* nn = k_i_1->nn;
for (int a=0; a<input_width; a++) { for (int a=0; a<input_width; a++) {
for (int b=0; b<output_width; b++) { for (int b=0; b<output_width; b++) {

View File

@ -40,7 +40,7 @@ bool equals_networks(Network* network1, Network* network2) {
checkEquals(depth[i], "input_depth", i); checkEquals(depth[i], "input_depth", i);
} }
for (int i=0; i < network1->size; i++) { for (int i=0; i < network1->size-1; i++) {
checkEquals(kernel[i]->activation, "kernel[i]->activation", i); checkEquals(kernel[i]->activation, "kernel[i]->activation", i);
if ((!network1->kernel[i]->cnn ^ !network2->kernel[i]->cnn) || (!network1->kernel[i]->nn ^ !network2->kernel[i]->nn)) { if ((!network1->kernel[i]->cnn ^ !network2->kernel[i]->cnn) || (!network1->kernel[i]->nn ^ !network2->kernel[i]->nn)) {
printf(BOLDRED "[ ERROR ]" RESET "network1->kernel[%d] et network1->kernel[%d] diffèrent de type\n", i, i); printf(BOLDRED "[ ERROR ]" RESET "network1->kernel[%d] et network1->kernel[%d] diffèrent de type\n", i, i);
@ -118,8 +118,8 @@ Network* copy_network(Network* network) {
copyVar(depth[i]); copyVar(depth[i]);
} }
network_cp->kernel = (Kernel**)malloc(sizeof(Kernel*)*size); network_cp->kernel = (Kernel**)malloc(sizeof(Kernel*)*(size-1));
for (int i=0; i < size; i++) { for (int i=0; i < size-1; i++) {
network_cp->kernel[i] = (Kernel*)malloc(sizeof(Kernel)); network_cp->kernel[i] = (Kernel*)malloc(sizeof(Kernel));
if (!network->kernel[i]->nn && !network->kernel[i]->cnn) { // Cas de la couche de linéarisation if (!network->kernel[i]->nn && !network->kernel[i]->cnn) { // Cas de la couche de linéarisation
copyVar(kernel[i]->activation); copyVar(kernel[i]->activation);

View File

@ -70,8 +70,21 @@ def plot_temps_exec(data):
GPUtime = [i["GPUtime"] for i in data] GPUtime = [i["GPUtime"] for i in data]
CPUtime = [i["CPUtime"] for i in data] CPUtime = [i["CPUtime"] for i in data]
plt.plot(x, GPUtime) fig, ax = plt.subplots()
plt.plot(x, CPUtime)
#ax.set_yscale("log")
gputime, = ax.plot(x, GPUtime)
cputime, = ax.plot(x, CPUtime)
gputime.set_label("Temps GPU")
cputime.set_label("Temps CPU")
ax.set_ylabel("Temps d'exécution (secondes)")
ax.set_xlabel("Taille de la matrice d'entrée")
ax.legend()
plt.grid(True)
plt.show() plt.show()
def plot_erreur(data): def plot_erreur(data):

View File

@ -28,7 +28,7 @@ def image_from_file(filepath, dest="./images/"):
png.from_array(data[i], 'L').save(os.path.join(dest, f"{i}.png")) png.from_array(data[i], 'L').save(os.path.join(dest, f"{i}.png"))
def image_from_list(filepath, dest="data.png", exp=False): def image_from_list(filepath, exp=False):
""" """
Enregistre une liste de poids sous forme d'une image Enregistre une liste de poids sous forme d'une image
exp sert à spécifier si il faut passer à une forme exponentielle exp sert à spécifier si il faut passer à une forme exponentielle
@ -54,13 +54,13 @@ def image_from_list(filepath, dest="data.png", exp=False):
for j in range(IMAGE_HEIGHT): for j in range(IMAGE_HEIGHT):
new_data[i][j] = data[i*IMAGE_HEIGHT+j] new_data[i][j] = data[i*IMAGE_HEIGHT+j]
png.from_array(new_data, 'L').save(dest) return new_data
def graph_from_test_reseau(erreurs, reussites): def graph_from_test_reseau(erreurs, reussites):
""" """
Affiche un graphique à partir d'un fichier contenant les Affiche un graphique à partir d'un fichier contenant les
réussites et d'un autre contenant les erreurs (sortie brutes de out/main) réussites et d'un autre contenant les erreurs (sortie brutes de out/mnist_main)
""" """
with open(erreurs, "r", encoding="utf8") as f: with open(erreurs, "r", encoding="utf8") as f:
data = f.read() data = f.read()
@ -113,7 +113,21 @@ def images_neurons(neurons, dest="neurons", exp=False):
Afin de créer un ensemble d'image visualisant les poids Afin de créer un ensemble d'image visualisant les poids
""" """
os.makedirs(dest, exist_ok=True) os.makedirs(dest, exist_ok=True)
data = []
for i in neurons: for i in neurons:
os.system(f"./make.sh utils print-poids-neurone --reseau \ os.system(f"./make.sh utils print-poids-neurone --reseau \
.cache/reseau.bin --neurone {i} > .cache/poids.txt") .cache/reseau.bin --neurone {i} > .cache/poids.txt")
image_from_list(".cache/poids.txt", os.path.join(dest, f"{i}.png"), exp=exp) data.append(image_from_list(".cache/poids.txt", exp=exp))
new_data = data.copy()
for i, _ in enumerate(new_data):
for j, _ in enumerate(new_data[i]):
for k, _ in enumerate(new_data[i][j]):
for l, _ in enumerate(new_data):
if i != l:
new_data[i][j][k] -= data[l][j][k] * 0.11
new_data[i][j][k] = max(int(new_data[i][j][k]), 0)
for i in neurons:
png.from_array(data[i], 'L').save(os.path.join(dest, f"{i}.png"))