Skip to content

Commit e778372

Browse files
committed
Seperated backpropagation algorithm from neural network. Added supervised trainer abstraction.
There's no logical reason to have the backpropagation algorithm to reside within the neural network; you should be able to use different learning algorithms on the same neural network. I believe first-year-me originally intended to use inheritance to override the backpropgation in NeuralNetwork subclasses. Bad idea.
1 parent 1fcdcb9 commit e778372

9 files changed

+126
-59
lines changed

GeNeural/GeNeural.csproj

+4-1
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,7 @@
3636
<ItemGroup>
3737
<Compile Include="Activations\IActivator.cs" />
3838
<Compile Include="Activations\LogisticActivator.cs" />
39+
<Compile Include="Training\Backpropagation\StandardBackpropagationTrainer.cs" />
3940
<Compile Include="GeneticNeuralNetworkFacilitator.cs" />
4041
<Compile Include="Genetic\DisimilarityFunctions.cs" />
4142
<Compile Include="Genetic\EfficiencyErrorFunction.cs" />
@@ -49,12 +50,14 @@
4950
<Compile Include="IDeepCloneable.cs" />
5051
<Compile Include="IGenetic.cs" />
5152
<Compile Include="IMutatable.cs" />
53+
<Compile Include="INeuralNetwork.cs" />
54+
<Compile Include="Training\ISupervisedTrainer.cs" />
5255
<Compile Include="NeuralNetwork.cs" />
5356
<Compile Include="Helpers.cs" />
5457
<Compile Include="Neuron.cs" />
5558
<Compile Include="Properties\AssemblyInfo.cs" />
5659
<Compile Include="Sorter.cs" />
57-
<Compile Include="TopologyTrainer.cs" />
60+
<Compile Include="Training\Topology\TopologyTrainer.cs" />
5861
</ItemGroup>
5962
<Import Project="$(MSBuildExtensionsPath32)\Microsoft\Portable\$(TargetFrameworkVersion)\Microsoft.Portable.CSharp.targets" />
6063
<!-- To modify your build process, add your task inside one of the targets below and uncomment it.

GeNeural/GeneticNeuralNetworkFacilitator.cs

+1-1
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ public class GeneticNeuralNetworkFacilitator : IMutatable, IDeepCloneable<Geneti
1818

1919
private double neuronMutationFactorVarianceFactor = 0.01;
2020
private double neuronMutationFactor = 0.50; // Adds round(-x to x) neurons per layer
21-
NeuralNetwork network;
21+
private NeuralNetwork network;
2222
public GeneticNeuralNetworkFacilitator(NeuralNetwork network)
2323
{
2424
this.network = network;

GeNeural/INeuralNetwork.cs

+18
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
using System;
2+
using System.Collections.Generic;
3+
using System.Linq;
4+
using System.Text;
5+
using System.Threading.Tasks;
6+
7+
namespace GeNeural
8+
{
9+
/// TODO: There is likley a better name for this interface. Not all neural networks
10+
/// naturally uphold this interface and many other machine learning algorithms do.
11+
/// <summary>
12+
///
13+
/// </summary>
14+
interface INeuralNetwork
15+
{
16+
double[] CalculateOutputs(double[] inputs);
17+
}
18+
}

GeNeural/NeuralNetwork.cs

+2-47
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ namespace GeNeural
1313
using System.Text;
1414
using System.Threading.Tasks;
1515
public class NotEnoughLayersException : Exception { }
16-
public class NeuralNetwork : IDeepCloneable<NeuralNetwork>
16+
public class NeuralNetwork : IDeepCloneable<NeuralNetwork>, INeuralNetwork
1717
{
1818
private const int INPUT_NEURON_WEIGHTS_COUNT = 2;
1919
private Neuron[][] neurons;
@@ -97,7 +97,7 @@ public double[] CalculateOutputs(double[] inputs)
9797
double[][] outputs = CalculateAllOutputs(inputs);
9898
return outputs[outputs.Length - 1];
9999
}
100-
private double[][] CalculateAllOutputs(double[] inputs)
100+
public double[][] CalculateAllOutputs(double[] inputs)
101101
{
102102
double[][] outputs = new double[neurons.Length][];
103103
outputs[0] = new double[neurons[0].Length];
@@ -204,51 +204,6 @@ public void ResetMomentum()
204204
foreach (Neuron neuron in layer)
205205
neuron.ResetMomentum();
206206
}
207-
public void BackPropagate(double[] inputs, double[] desiredOutputs)
208-
{
209-
double[][] outputs = CalculateAllOutputs(inputs);
210-
double[][] weirdDThing = new double[outputs.Length][];
211-
for (int l = 0; l < outputs.Length; l++)
212-
weirdDThing[l] = new double[outputs[l].Length];
213-
for (int n = 0; n < outputs[outputs.Length - 1].Length; n++)
214-
{
215-
double neuronOutput = outputs[outputs.Length - 1][n];
216-
weirdDThing[weirdDThing.Length - 1][n] = (neuronOutput - desiredOutputs[n]) * neuronOutput * (1 - neuronOutput);
217-
}
218-
for (int l = outputs.Length - 2; l >= 0; l--)
219-
{
220-
for (int n = 0; n < neurons[l].Length; n++)
221-
{
222-
double neuronOutput = outputs[l][n];
223-
double sumThing = 0;
224-
for (int n2 = 0; n2 < neurons[l + 1].Length; n2++)
225-
{
226-
sumThing += weirdDThing[l + 1][n2] * neurons[l + 1][n2].Weights[n];
227-
}
228-
weirdDThing[l][n] = sumThing * neuronOutput * (1 - neuronOutput);
229-
}
230-
}
231-
const double learningFactor = 0.1;
232-
for (int n = 0; n < neurons[0].Length; n++)
233-
{
234-
neurons[0][n].Weights[0] -= learningFactor * weirdDThing[0][n] * -1;
235-
for (int n2 = 0; n2 < inputs.Length; n2++)
236-
{
237-
neurons[0][n].Weights[n2 + 1] -= learningFactor * weirdDThing[0][n] * inputs[n2];
238-
}
239-
}
240-
for (int l = 1; l < neurons.Length; l++)
241-
{
242-
for (int n = 0; n < neurons[l].Length; n++)
243-
{
244-
neurons[l][n].Weights[0] -= learningFactor * weirdDThing[l][n] * -1;
245-
for (int n2 = 0; n2 < neurons[l - 1].Length; n2++)
246-
{
247-
neurons[l][n].Weights[n2 + 1] -= learningFactor * weirdDThing[l][n] * outputs[l - 1][n2];
248-
}
249-
}
250-
}
251-
}
252207
public void AddOutputNeuron(Neuron neuron)
253208
{
254209
int layerIndex = neurons.Length - 1;

GeNeural/Sorter.cs

+1-1
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66

77
namespace GeNeural
88
{
9-
static class Sorter
9+
public static class Sorter
1010
{
1111
private static void Swap<T>(T[] array, int i1, int i2)
1212
{
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,74 @@
1+
using System;
2+
using System.Collections.Generic;
3+
using System.Linq;
4+
using System.Text;
5+
using System.Threading.Tasks;
6+
7+
namespace GeNeural.Training.Backpropagation
8+
{
9+
public sealed class StandardBackpropagationTrainer : ISupervisedTrainer<NeuralNetwork>
10+
{
11+
public void Backpropagation(NeuralNetwork neuralNetwork, double[] inputs, double[] desiredOutputs)
12+
{
13+
// We need to calculate the current outputs, given a set of inputs in order to do backpropagation.
14+
double[][] outputs = neuralNetwork.CalculateAllOutputs(inputs);
15+
// TODO: Revisit 'weirdDThing'.
16+
double[][] weirdDThing = new double[outputs.Length][];
17+
for (int l = 0; l < outputs.Length; l++)
18+
{
19+
weirdDThing[l] = new double[outputs[l].Length];
20+
}
21+
for (int n = 0; n < outputs[outputs.Length - 1].Length; n++)
22+
{
23+
double neuronOutput = outputs[outputs.Length - 1][n];
24+
weirdDThing[weirdDThing.Length - 1][n] = (neuronOutput - desiredOutputs[n]) * neuronOutput * (1 - neuronOutput);
25+
}
26+
for (int l = outputs.Length - 2; l >= 0; l--)
27+
{
28+
int inputLength = neuralNetwork.GetLayer(l).Length;
29+
Neuron[] currentLayer = neuralNetwork.GetLayer(l + 1);
30+
for (int n = 0; n < inputLength; n++)
31+
{
32+
double neuronOutput = outputs[l][n];
33+
double sumThing = 0;
34+
for (int n2 = 0; n2 < currentLayer.Length; n2++)
35+
{
36+
sumThing += weirdDThing[l + 1][n2] * currentLayer[n2].Weights[n];
37+
}
38+
weirdDThing[l][n] = sumThing * neuronOutput * (1 - neuronOutput);
39+
}
40+
}
41+
const double learningFactor = 0.1;
42+
// Now we actually modify the the weights of the neurons.
43+
// The first layer is a special case it doesn't have any previous layers to deal with.
44+
Neuron[] firstLayer = neuralNetwork.GetLayer(0);
45+
for (int n = 0; n < firstLayer.Length; n++)
46+
{
47+
firstLayer[n].Weights[0] -= learningFactor * weirdDThing[0][n] * -1;
48+
for (int n2 = 0; n2 < inputs.Length; n2++)
49+
{
50+
firstLayer[n].Weights[n2 + 1] -= learningFactor * weirdDThing[0][n] * inputs[n2];
51+
}
52+
}
53+
// Now modify the weights for the other neural networks.
54+
for (int l = 1; l < neuralNetwork.LayerCount; l++)
55+
{
56+
Neuron[] currentLayer = neuralNetwork.GetLayer(l);
57+
Neuron[] previousLayer = neuralNetwork.GetLayer(l - 1);
58+
for (int n = 0; n < currentLayer.Length; n++)
59+
{
60+
currentLayer[n].Weights[0] -= learningFactor * weirdDThing[l][n] * -1;
61+
for (int n2 = 0; n2 < previousLayer.Length; n2++)
62+
{
63+
currentLayer[n].Weights[n2 + 1] -= learningFactor * weirdDThing[l][n] * outputs[l - 1][n2];
64+
}
65+
}
66+
}
67+
}
68+
69+
public void Train(NeuralNetwork trainable, double[] trainingInputs, double[] trainingOutputs)
70+
{
71+
Backpropagation(trainable, trainingInputs, trainingOutputs);
72+
}
73+
}
74+
}
+13
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
namespace GeNeural.Training
2+
{
3+
public interface ISupervisedTrainer<T>
4+
{
5+
/// <summary>
6+
/// Trains a given trainable entity to output a certain set of outputs, given a certain set of inputs.
7+
/// </summary>
8+
/// <param name="trainable"></param>
9+
/// <param name="trainingInputs"></param>
10+
/// <param name="trainingOutputs"></param>
11+
void Train(T trainable, double[] trainingInputs, double[] trainingOutputs);
12+
}
13+
}

GeNeural/TopologyTrainer.cs renamed to GeNeural/Training/Topology/TopologyTrainer.cs

+8-9
Original file line numberDiff line numberDiff line change
@@ -4,30 +4,29 @@
44
using System.Linq;
55
using System.Text;
66
using System.Threading.Tasks;
7-
8-
namespace GeNeural.Trainers
7+
namespace GeNeural.Training.Topology
98
{
10-
public class TopologyTrainer
9+
public sealed class TopologyTrainer
1110
{
12-
public static NeuralNetwork ConfigureTopology(double[][] trainingInputs, double [][] trainingDesiredOutputs, double[][] testInputs, double[][] testDesiredOutputs)
11+
public static NeuralNetwork ConfigureTopology(ISupervisedTrainer<NeuralNetwork> trainer, double[][] trainingInputs, double [][] trainingDesiredOutputs, double[][] testInputs, double[][] testDesiredOutputs)
1312
{
1413
NeuralNetwork untrainedStubNetwork = new NeuralNetwork(trainingInputs[0].Length,new int[] { trainingInputs[0].Length, trainingDesiredOutputs[0].Length});
1514
NeuralNetwork trainedStubNetwork = untrainedStubNetwork.DeepClone();
16-
Train(trainedStubNetwork, trainingInputs, trainingDesiredOutputs);
15+
Train(trainer, trainedStubNetwork, trainingInputs, trainingDesiredOutputs);
1716
double stubNetworkError = GetTotalError(trainedStubNetwork, trainingInputs, trainingDesiredOutputs) + GetTotalError(trainedStubNetwork, testInputs, testDesiredOutputs);
1817
while(true)
1918
{
2019
NeuralNetwork untrainedLayerNetwork = untrainedStubNetwork.DeepClone();
2120
untrainedLayerNetwork.InsertAfterLayer(untrainedLayerNetwork.LayerCount - 1);
2221
NeuralNetwork trainedLayerNetwork = untrainedStubNetwork.DeepClone();
23-
Train(trainedLayerNetwork, trainingInputs, trainingDesiredOutputs);
22+
Train(trainer, trainedLayerNetwork, trainingInputs, trainingDesiredOutputs);
2423
double layerNetworkError = GetTotalError(trainedLayerNetwork, trainingInputs, trainingDesiredOutputs) + GetTotalError(trainedLayerNetwork, testInputs, testDesiredOutputs);
2524
while(true)
2625
{
2726
NeuralNetwork untrainedNeuronNetwork = untrainedLayerNetwork.DeepClone();
2827
untrainedNeuronNetwork.AddNeuronNonDestructive(untrainedNeuronNetwork.LayerCount - 2);
2928
NeuralNetwork trainedNeuronNetwork = untrainedNeuronNetwork.DeepClone();
30-
Train(trainedNeuronNetwork, trainingInputs, trainingDesiredOutputs);
29+
Train(trainer, trainedNeuronNetwork, trainingInputs, trainingDesiredOutputs);
3130
double neuronNetworkError = GetTotalError(trainedNeuronNetwork, trainingInputs, trainingDesiredOutputs);
3231
if(neuronNetworkError < layerNetworkError)
3332
{
@@ -69,7 +68,7 @@ public static double GetError(double desiredOutput, double actualOutput)
6968
double difference = desiredOutput - actualOutput;
7069
return difference * difference;
7170
}
72-
public static void Train(NeuralNetwork network, double[][] inputs, double[][] desiredOutputs)
71+
public static void Train<T>(ISupervisedTrainer<T> trainer, T network, double[][] inputs, double[][] desiredOutputs) where T : NeuralNetwork
7372
{
7473

7574
for (int _ = 0; _ < 100; _++)
@@ -78,7 +77,7 @@ public static void Train(NeuralNetwork network, double[][] inputs, double[][] de
7877
{
7978
for (int i = 0; i < 50; i++)
8079
{
81-
network.BackPropagate(inputs[t], desiredOutputs[t]);
80+
trainer.Train(network, inputs[t], desiredOutputs[t]);
8281
}
8382
}
8483
}

GeNeural/obj/Debug/GeNeural.csproj.FileListAbsolute.txt

+5
Original file line numberDiff line numberDiff line change
@@ -7,3 +7,8 @@ C:\Users\eastd\Documents\Visual Studio 2015\Projects\GeNeural\GeNeural\bin\Debug
77
C:\Users\eastd\Documents\Visual Studio 2015\Projects\GeNeural\GeNeural\obj\Debug\GeNeural.dll
88
C:\Users\eastd\Documents\Visual Studio 2015\Projects\GeNeural\GeNeural\obj\Debug\GeNeural.pdb
99
C:\Users\eastd\Documents\Visual Studio 2015\Projects\GeNeural\GeNeural\obj\Debug\GeNeural.csprojResolveAssemblyReference.cache
10+
C:\Users\eastd\Code\GeNeural\GeNeural\obj\Debug\GeNeural.csprojResolveAssemblyReference.cache
11+
C:\Users\eastd\Code\GeNeural\GeNeural\bin\Debug\GeNeural.dll
12+
C:\Users\eastd\Code\GeNeural\GeNeural\bin\Debug\GeNeural.pdb
13+
C:\Users\eastd\Code\GeNeural\GeNeural\obj\Debug\GeNeural.dll
14+
C:\Users\eastd\Code\GeNeural\GeNeural\obj\Debug\GeNeural.pdb

0 commit comments

Comments
 (0)