Skip to content

Commit b998ae1

Browse files
committed
Add activation functions and break them out into their own static object
1 parent 453b7b9 commit b998ae1

1 file changed

Lines changed: 28 additions & 20 deletions

File tree

neuralNet.scala

Lines changed: 28 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,32 @@ object NeuralNetUtilities {
2828
}
2929
}
3030

31+
object ActivationFunctions {
32+
def bipolarSigmoidPrime(input: Double) : Double = {
33+
return (1 + sigmoid(input)) * (1 - sigmoid(input)) / 2
34+
}
35+
36+
def bipolarSigmoid(input : Double) : Double = {
37+
return 2/(1 + Math.exp(-input)) - 1
38+
}
39+
40+
def sigmoidPrime(input : Double) : Double = {
41+
return input * (1 - input)
42+
}
43+
44+
def tanhPrime(input : Double) : Double = {
45+
return 3.4318*scala.math.pow((1/scala.math.cosh(2*input)), 2)
46+
}
47+
48+
def tanh(input : Double) : Double = {
49+
return 1.7159 * scala.math.tanh(2/3*input)
50+
}
51+
52+
def sigmoid(input : Double) : Double = {
53+
return 1.0 / (1.0 + Math.exp(-input))
54+
}
55+
56+
}
3157
/** A simple neural network with a single input neuron and a single output neuron and a given number of hidden neurons. */
3258
class NeuralNet(numberInputNeurons : Int , numberHiddenNeurons : Int, learningRate : Double, initialBias : Double) {
3359
private val _outputNeuron = new Neuron(false, initialBias)
@@ -56,19 +82,11 @@ object NeuralNetUtilities {
5682
new Connection(hiddenNeuron, _outputNeuron) // Connect the hidden neuron to the output neuron
5783
}
5884

59-
def sigmoidPrime(input : Double) : Double = {
60-
return input * (1 - input)
61-
}
62-
63-
def tanhPrime(input : Double) : Double = {
64-
return 3.4318*scala.math.pow((1/scala.math.cosh(2*input)), 2)
65-
}
66-
6785
/** Take a supervised output value and backpropogate the error through the neural net. */
6886
def train(input : Array[Double], actual : Double) : Double = {
6987
val result = feedForward(input)
7088
val error = actual - result
71-
val deltaOutput = sigmoidPrime(result) * error // Derivative of the sigmoid function
89+
val deltaOutput = ActivationFunctions.sigmoidPrime(result) * error // Derivative of the sigmoid function
7290
backpropogate(deltaOutput)
7391
return result
7492
}
@@ -159,24 +177,14 @@ object NeuralNetUtilities {
159177
}
160178
}
161179
if (hasInputConnection == true) {
162-
_sum = sigmoid(sum + bias)
180+
_sum = ActivationFunctions.sigmoid(sum + bias)
163181
}
164182
else { // This is an input neuron
165183
_sum = input
166184
}
167185
}
168186
}
169187

170-
/** tanh activation function */
171-
def tanh(input : Double) : Double = {
172-
return 1.7159 * scala.math.tanh(2/3*input)
173-
}
174-
175-
/** Sigmoid activation function */
176-
def sigmoid(input : Double) : Double = {
177-
return 1.0 / (1.0 + Math.exp(-input))
178-
}
179-
180188
/** Return the most recently calculated value */
181189
def output() : Double = {
182190
return _sum

0 commit comments

Comments
 (0)