tout debut training

This commit is contained in:
eynard 2021-12-14 10:44:48 +01:00
parent 75bc43d48f
commit a96908daf4

View File

@ -16,10 +16,17 @@ class network:
self.__weights = np.array(self.__weights, dtype=object) self.__weights = np.array(self.__weights, dtype=object)
self.__biases = np.array(self.__biases, dtype=object) self.__biases = np.array(self.__biases, dtype=object)
def __reLu(value): def __reLu(value, derivative=False):
if (derivative):
return 0 if (value == 0) else 1
return max(0, value) return max(0, value)
def process(self, input): def __sigmoid(value, derivative=False):
if (derivative):
return __sigmoid(value) * (1 - __sigmoid(value))
return 1/(1+np.exp(-value))
def process(self, input, storeValues=False):
if type(input) != np.ndarray: if type(input) != np.ndarray:
raise TypeError("The input must be a vector!") raise TypeError("The input must be a vector!")
if input.size != self.__inputLayerSize: if input.size != self.__inputLayerSize:
@ -38,4 +45,12 @@ class network:
return input return input
def train(self, inputs, results): def train(self, inputs, results):
self.__outputs = 1
#for j in range(1,):
#partialDerivatives
def __Error(layer, output, desiredOutput):
return __ErrorFinalLayerFromValue() if (layer == 1)
def __ErrorFinalLayer(self, neuron):
return __reLu(value, true) * (output - desiredOutput)