diff --git a/sobek/network.py b/sobek/network.py index 84edaf2..dc6091c 100755 --- a/sobek/network.py +++ b/sobek/network.py @@ -51,11 +51,8 @@ class network: self.activations.append(_input) #activation function application - #for i in range(len(_input)): - # _input[i] = network.__sigmoid(_input) _input = network.__sigmoid(_input) - #On peut comparer la performance si on recalcul plus tard if (__storeValues): self.outputs.append(_input) @@ -110,18 +107,14 @@ class network: errorSumsBiases = [np.zeros(layer.shape) for layer in self.biases] self.__errors = [np.zeros(len(layer)) for layer in self.weights] - #rempli self.activations et self.outputs + #Rempli self.activations et self.outputs self.process(_input, True) + self.__desiredOutput = desiredOutput - #A optimiser for layerNumber in range(len(errorSumsWeights)-1, -1, -1): for neuronNumber in range(len(errorSumsWeights[layerNumber])): errorSumsBiases[layerNumber][neuronNumber] += self.__Error(layerNumber, neuronNumber) - #for weightNumber in range(len(errorSumsWeights[layerNumber][neuronNumber])): - #print("layer : " + str(layerNumber) + " neuron : " + str(neuronNumber) + " weight : " + str(weightNumber)) - #errorSumsWeights[layerNumber][neuronNumber][weightNumber] += self.__PartialDerivative(layerNumber, neuronNumber, weightNumber) - #errorSumsWeights[layerNumber][neuronNumber][weightNumber] = errorSumsBiases[layerNumber][neuronNumber] * self.outputs[layerNumber][weightNumber] errorSumsWeights[layerNumber][neuronNumber] = np.dot(errorSumsBiases[layerNumber][neuronNumber],self.outputs[layerNumber]) total = 0 @@ -133,27 +126,6 @@ class network: errorSumsBiases[layerNumber] = np.multiply(errorSumsBiases[layerNumber], -(learningRate/len(inputBatch))) self.biases[layerNumber] = np.add(self.biases[layerNumber], errorSumsBiases[layerNumber]) - #print(self.__biases) - """ - - for layerNumber in range(len(errorSumsWeights)): - for neuronNumber in range(len(errorSumsWeights[layerNumber])): - - errorSumsBiases[layerNumber][neuronNumber] = errorSumsBiases[layerNumber][neuronNumber] / len(inputBatch) - total += errorSumsBiases[layerNumber][neuronNumber] - self.biases[layerNumber][neuronNumber] -= learningRate * errorSumsBiases[layerNumber][neuronNumber] - - for weightNumber in range(len(errorSumsWeights[layerNumber][neuronNumber])): - - #Probablement faisable avec une multiplication de matrices - errorSumsWeights[layerNumber][neuronNumber][weightNumber] = errorSumsWeights[layerNumber][neuronNumber][weightNumber] / len(inputBatch) - - #total += errorSumsWeights[layerNumber][neuronNumber][weightNumber] - - #Probablement faisable avec une somme de matrices - self.weights[layerNumber][neuronNumber][weightNumber] -= learningRate * errorSumsWeights[layerNumber][neuronNumber][weightNumber] - - #print("Error : " + str(total))""" if (visualize): ani = animation.ArtistAnimation(fig, vizualisationData, interval=100) plt.show() @@ -172,9 +144,6 @@ class network: upperLayerLinksSum += self.weights[layer+1][upperLayerNeuron][neuron] * self.__errors[layer+1][upperLayerNeuron] return network.__sigmoid(self.activations[layer][neuron], derivative=True) * upperLayerLinksSum - #def __PartialDerivative(self, layer, neuron, weight): - # return self.__Error(layer, neuron) * self.outputs[layer][weight] - def accuracy(self, inputs, desiredOutputs): if (type(inputs) != list or type(desiredOutputs) != list): raise TypeError("The inputs and desired outputs must be lists of numpy arrays !") diff --git a/test.py b/test.py deleted file mode 100644 index 7f7e5dd..0000000 --- a/test.py +++ /dev/null @@ -1,14 +0,0 @@ -import numpy as np -from sobek.network import network - -test = network(16, 16, 8, 4) -""" -for y in test.weights: - print(y, end="\n\n") - -for y in test.biases: - print(y, end="\n\n")""" - -#print(network.__reLu(8)) - -print(test.process(np.random.default_rng(42).random((16)))) \ No newline at end of file diff --git a/MNIST30epoch b/tests/MNIST30epoch similarity index 100% rename from MNIST30epoch rename to tests/MNIST30epoch diff --git a/MNISTDrawingPrediction.py b/tests/MNISTDrawingPrediction.py similarity index 97% rename from MNISTDrawingPrediction.py rename to tests/MNISTDrawingPrediction.py index e5659b8..1d9e8b6 100644 --- a/MNISTDrawingPrediction.py +++ b/tests/MNISTDrawingPrediction.py @@ -1,7 +1,11 @@ import tkinter from PIL import Image, ImageDraw -from sobek.network import network import numpy as np +from sys import path +path.insert(1, "..") +from sobek.network import network + + class Sketchpad(tkinter.Canvas): def __init__(self, parent, predictionLabel, **kwargs, ): diff --git a/MNISTLearning.py b/tests/MNISTLearning.py similarity index 97% rename from MNISTLearning.py rename to tests/MNISTLearning.py index 6ed386c..f18bc7c 100644 --- a/MNISTLearning.py +++ b/tests/MNISTLearning.py @@ -1,7 +1,10 @@ import numpy as np -from sobek.network import network import gzip import time +from sys import path +path.insert(1, "..") +from sobek.network import network + print("--- Data loading ---") diff --git a/MNISTLoadTest.py b/tests/MNISTLoadTest.py similarity index 95% rename from MNISTLoadTest.py rename to tests/MNISTLoadTest.py index d929474..1e35602 100644 --- a/MNISTLoadTest.py +++ b/tests/MNISTLoadTest.py @@ -1,6 +1,8 @@ import numpy as np -from sobek.network import network import gzip +from sys import path +path.insert(1, "..") +from sobek.network import network print("--- Data loading ---") diff --git a/testLearning.py b/tests/testLearning.py similarity index 95% rename from testLearning.py rename to tests/testLearning.py index 8f25fd4..0823ca2 100644 --- a/testLearning.py +++ b/tests/testLearning.py @@ -1,5 +1,7 @@ import numpy as np import random +from sys import path +path.insert(1, "..") from sobek.network import network random.seed() diff --git a/testLearningNAND.py b/tests/testLearningNAND.py similarity index 87% rename from testLearningNAND.py rename to tests/testLearningNAND.py index 5d65b65..f6795f6 100644 --- a/testLearningNAND.py +++ b/tests/testLearningNAND.py @@ -1,7 +1,9 @@ import numpy as np import random -from sobek.network import network import time +from sys import path +path.insert(1, "..") +from sobek.network import network random.seed() @@ -33,20 +35,12 @@ for i in range(nbRep): startTime = time.perf_counter() - #for j in range(10000): - # inputs = [] - # desiredOutputs = [] - - #if (j%1000 == 0): - # print(j) - - # myNetwork.train(test, result, learningRate) - myNetwork.train(test, result, learningRate, len(test), 10000, visualize=False) endTime = time.perf_counter() learningTime += endTime - startTime learningTime = learningTime / nbRep + test = [] result = [] test.append(np.zeros(2)) diff --git a/testNAND.py b/tests/testNAND.py similarity index 93% rename from testNAND.py rename to tests/testNAND.py index 6465536..11bd4d6 100644 --- a/testNAND.py +++ b/tests/testNAND.py @@ -1,5 +1,6 @@ import numpy as np -import random +from sys import path +path.insert(1, "..") from sobek.network import network myNetwork = network(2, 1) diff --git a/timeTest.py b/tests/timeTest.py similarity index 100% rename from timeTest.py rename to tests/timeTest.py diff --git a/timeTest2.py b/tests/timeTest2.py similarity index 100% rename from timeTest2.py rename to tests/timeTest2.py