From 47e31258cffd4c86b386472519bc7e91402a1309 Mon Sep 17 00:00:00 2001 From: eynard Date: Thu, 2 Dec 2021 17:34:04 +0100 Subject: [PATCH] debut perceptron multicouche --- neuralnetworkbuilder/test.py | 49 ++++++++++++++++++++++++++++++++++++ 1 file changed, 49 insertions(+) create mode 100755 neuralnetworkbuilder/test.py diff --git a/neuralnetworkbuilder/test.py b/neuralnetworkbuilder/test.py new file mode 100755 index 0000000..ab86747 --- /dev/null +++ b/neuralnetworkbuilder/test.py @@ -0,0 +1,49 @@ +import numpy as np + +class network: + + def __init__(self, inputLayerSize, *layerSizes): + self.weights = [] + self.inputLayerSize = inputLayerSize + self.oldLayerSize = inputLayerSize + for layerSize in layerSizes: + self.weights.append( np.random.default_rng(42).random((self.oldLayerSize, layerSize)) ) + self.oldLayerSize = layerSize + self.biases = [[0]*layerSize for layerSize in layerSizes] + self.weights = np.array(self.weights) + self.biases = np.array(self.biases) + + def reLu(value): + return max(0, value) + + def process(self, input): + if type(input) != np.ndarray: + print("non") + if input.size != self.inputLayerSize: + print("vite") + if input.dtype != np.float64: + print("aaa") + for layer, bias in zip(self.weights, self.biases): + print("---------------------") + print(input) + print(layer) + print(bias) + input = np.matmul(input, layer) + input = np.add(input, bias) + with np.nditer(input, op_flags=['readwrite']) as layer: + for neuron in layer: + neuron = network.reLu(neuron) + return input + + +test = network(16, 16, 8, 4) + +for y in test.weights: + print(y, end="\n\n") + +for y in test.biases: + print(y, end="\n\n") + +print(network.reLu(8)) + +print(test.process(np.random.default_rng(42).random((16)))) \ No newline at end of file