diff --git a/sobek/network.py b/sobek/network.py index 75d906a..d081fc8 100755 --- a/sobek/network.py +++ b/sobek/network.py @@ -6,33 +6,36 @@ class network: if type(inputLayerSize) != int: raise TypeError("The input layer size must be an int!") - self.weights = [] - self.inputLayerSize = inputLayerSize - self.oldLayerSize = inputLayerSize + self.__weights = [] + self.__inputLayerSize = inputLayerSize + oldLayerSize = inputLayerSize for layerSize in layerSizes: - self.weights.append( np.random.default_rng(42).random((self.oldLayerSize, layerSize)) ) - self.oldLayerSize = layerSize - self.biases = [[0]*layerSize for layerSize in layerSizes] - self.weights = np.array(self.weights, dtype=object) - self.biases = np.array(self.biases, dtype=object) + self.__weights.append( np.random.default_rng(42).random((oldLayerSize, layerSize)) ) + oldLayerSize = layerSize + self.__biases = [[0]*layerSize for layerSize in layerSizes] + self.__weights = np.array(self.__weights, dtype=object) + self.__biases = np.array(self.__biases, dtype=object) - def reLu(value): + def __reLu(value): return max(0, value) def process(self, input): if type(input) != np.ndarray: raise TypeError("The input must be a vector!") - if input.size != self.inputLayerSize: + if input.size != self.__inputLayerSize: raise ValueError("The input vector has the wrong size!") if input.dtype != np.float64: raise TypeError("The input vector must contain floats!") - for layerWeights, bias in zip(self.weights, self.biases): + for layerWeights, bias in zip(self.__weights, self.__biases): input = np.matmul(input, layerWeights) input = np.add(input, bias) #reLu application with np.nditer(input, op_flags=['readwrite']) as layer: for neuron in layer: - neuron = network.reLu(neuron) + neuron = network.__reLu(neuron) - return input \ No newline at end of file + return input + + def train(self, inputs, results): + \ No newline at end of file diff --git a/test.py b/test.py index 96d04bd..7f7e5dd 100644 --- a/test.py +++ b/test.py @@ -9,6 +9,6 @@ for y in test.weights: for y in test.biases: print(y, end="\n\n")""" -print(network.reLu(8)) +#print(network.__reLu(8)) print(test.process(np.random.default_rng(42).random((16)))) \ No newline at end of file