Compare commits

...

2 Commits

Author SHA1 Message Date
eynard
7506b3756b debut nouveau model 2022-01-13 16:00:08 +01:00
eynard
c66c0ae87a debut model 2022-01-11 10:35:01 +01:00
3 changed files with 69 additions and 1 deletions

View File

@ -0,0 +1,23 @@
class activationFunction:
def applyTo(value):
pass
def applyDerivateTo(value):
pass
class sigmoid(activationFunction):
def applyTo(value):
return 1.0/(1.0+np.exp(-value))
def applyDerivateTo(value):
return sigmoid.applyTo(value) * (1 - sigmoid.applyTo(value))
class reLu(activationFunction):
def applyTo(value):
return max(0, value)
def applyDerivateTo(value):
return 0 if (value < 0) else 1
class softMax(activationFunction):
pass

30
sobek/layers.py Normal file
View File

@ -0,0 +1,30 @@
import numpy as np
class layer:
def __init__(self, neurons, activationFunction)
self.neurons = neurons
self.activationFunction = activationFunction
def process(_input, __storeValues=False)
class dense(layer):
def process(_input, __storeValues=False):
_input = np.dot(layerWeights, _input)
_input = np.add(_input, layerBias)
if (__storeValues):
self.activation = _input
_input = self.activationFunction.applyTo(_input)
if (__storeValues):
self.output = _input
return _input
class convolution(layer):
pass
class flatten(layer):
pass

View File

@ -115,6 +115,7 @@ class network:
for layerNumber in range(len(errorSumsWeights)-1, -1, -1):
for neuronNumber in range(len(errorSumsWeights[layerNumber])):
errorSumsBiases[layerNumber][neuronNumber] += self.__Error(layerNumber, neuronNumber)
#eventuellemtn sortir de boucle
errorSumsWeights[layerNumber][neuronNumber] = np.dot(errorSumsBiases[layerNumber][neuronNumber],self.outputs[layerNumber])
total = 0
@ -179,4 +180,18 @@ class network:
def networkFromFile(fileName):
with open(fileName, "rb") as file:
return pickle.load(file)
return pickle.load(file)
class model:
def __init__(self, inputWidth, inputHeight, inputChannels):
self.inputWidth = inputWidth
self.inputHeight = inputHeight
self.inputChannels = inputChannels
self.layers = []
def add(layerType, activation):
self.layers.add()