Compare commits
2 Commits
master
...
convolutio
Author | SHA1 | Date | |
---|---|---|---|
|
7506b3756b | ||
|
c66c0ae87a |
23
sobek/activationFunctions.py
Normal file
23
sobek/activationFunctions.py
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
class activationFunction:
|
||||||
|
def applyTo(value):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def applyDerivateTo(value):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class sigmoid(activationFunction):
|
||||||
|
def applyTo(value):
|
||||||
|
return 1.0/(1.0+np.exp(-value))
|
||||||
|
|
||||||
|
def applyDerivateTo(value):
|
||||||
|
return sigmoid.applyTo(value) * (1 - sigmoid.applyTo(value))
|
||||||
|
|
||||||
|
class reLu(activationFunction):
|
||||||
|
def applyTo(value):
|
||||||
|
return max(0, value)
|
||||||
|
|
||||||
|
def applyDerivateTo(value):
|
||||||
|
return 0 if (value < 0) else 1
|
||||||
|
|
||||||
|
class softMax(activationFunction):
|
||||||
|
pass
|
30
sobek/layers.py
Normal file
30
sobek/layers.py
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
import numpy as np
|
||||||
|
|
||||||
|
class layer:
|
||||||
|
def __init__(self, neurons, activationFunction)
|
||||||
|
self.neurons = neurons
|
||||||
|
self.activationFunction = activationFunction
|
||||||
|
|
||||||
|
def process(_input, __storeValues=False)
|
||||||
|
|
||||||
|
class dense(layer):
|
||||||
|
def process(_input, __storeValues=False):
|
||||||
|
|
||||||
|
_input = np.dot(layerWeights, _input)
|
||||||
|
_input = np.add(_input, layerBias)
|
||||||
|
|
||||||
|
if (__storeValues):
|
||||||
|
self.activation = _input
|
||||||
|
|
||||||
|
_input = self.activationFunction.applyTo(_input)
|
||||||
|
|
||||||
|
if (__storeValues):
|
||||||
|
self.output = _input
|
||||||
|
|
||||||
|
return _input
|
||||||
|
|
||||||
|
class convolution(layer):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class flatten(layer):
|
||||||
|
pass
|
@ -115,6 +115,7 @@ class network:
|
|||||||
for layerNumber in range(len(errorSumsWeights)-1, -1, -1):
|
for layerNumber in range(len(errorSumsWeights)-1, -1, -1):
|
||||||
for neuronNumber in range(len(errorSumsWeights[layerNumber])):
|
for neuronNumber in range(len(errorSumsWeights[layerNumber])):
|
||||||
errorSumsBiases[layerNumber][neuronNumber] += self.__Error(layerNumber, neuronNumber)
|
errorSumsBiases[layerNumber][neuronNumber] += self.__Error(layerNumber, neuronNumber)
|
||||||
|
#eventuellemtn sortir de boucle
|
||||||
errorSumsWeights[layerNumber][neuronNumber] = np.dot(errorSumsBiases[layerNumber][neuronNumber],self.outputs[layerNumber])
|
errorSumsWeights[layerNumber][neuronNumber] = np.dot(errorSumsBiases[layerNumber][neuronNumber],self.outputs[layerNumber])
|
||||||
|
|
||||||
total = 0
|
total = 0
|
||||||
@ -179,4 +180,18 @@ class network:
|
|||||||
|
|
||||||
def networkFromFile(fileName):
|
def networkFromFile(fileName):
|
||||||
with open(fileName, "rb") as file:
|
with open(fileName, "rb") as file:
|
||||||
return pickle.load(file)
|
return pickle.load(file)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class model:
|
||||||
|
|
||||||
|
def __init__(self, inputWidth, inputHeight, inputChannels):
|
||||||
|
self.inputWidth = inputWidth
|
||||||
|
self.inputHeight = inputHeight
|
||||||
|
self.inputChannels = inputChannels
|
||||||
|
self.layers = []
|
||||||
|
|
||||||
|
def add(layerType, activation):
|
||||||
|
self.layers.add()
|
Loading…
Reference in New Issue
Block a user