Skip to content

Commit f866e5c

Browse files
committed
Remove the need for RandomHelper.rnd
Was not utelising following dependency injection. Instead, classes were depending on a static Random variable. Removed the need for this dependency.
1 parent d431ba1 commit f866e5c

15 files changed

+109
-83
lines changed

GeNeural/Activations/LogisticActivator.cs

+2-1
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,8 @@ public double ActivationFunction(double x) {
1111
}
1212

1313
public double GetThresholdThatResultsInZeroOutput() {
14-
return 6; // Not really but close enough to zero
14+
// Not really but close enough to zero
15+
return 6;
1516
}
1617

1718
public double[] GetInactiveNeuronWeights(int weightCount) {

GeNeural/GeNeural.csproj

+12-12
Original file line numberDiff line numberDiff line change
@@ -37,19 +37,19 @@
3737
<Compile Include="Activations\IActivator.cs" />
3838
<Compile Include="Activations\LogisticActivator.cs" />
3939
<Compile Include="Training\Backpropagation\StandardBackpropagationTrainer.cs" />
40-
<Compile Include="GeneticNeuralNetworkFacilitator.cs" />
41-
<Compile Include="Genetic\DisimilarityFunctions.cs" />
42-
<Compile Include="Genetic\EfficiencyErrorFunction.cs" />
43-
<Compile Include="Genetic\GeneTrainer.cs" />
44-
<Compile Include="Genetic\NeuralNetworkGeneTrainer.cs" />
45-
<Compile Include="Genetic\NewGenerationFunctions.cs" />
46-
<Compile Include="Genetic\OutputAccuracyErrorFunctions.cs" />
47-
<Compile Include="Genetic\PartnerSelectionFunction.cs" />
48-
<Compile Include="Genetic\ReproductionFunctions.cs" />
49-
<Compile Include="Genetic\Trainers.cs" />
40+
<Compile Include="Genetics\GeneticNeuralNetworkFacilitator.cs" />
41+
<Compile Include="Genetics\DisimilarityFunctions.cs" />
42+
<Compile Include="Genetics\EfficiencyErrorFunction.cs" />
43+
<Compile Include="Genetics\GeneTrainer.cs" />
44+
<Compile Include="Genetics\NeuralNetworkGeneTrainer.cs" />
45+
<Compile Include="Genetics\NewGenerationFunctions.cs" />
46+
<Compile Include="Genetics\OutputAccuracyErrorFunctions.cs" />
47+
<Compile Include="Genetics\PartnerSelectionFunction.cs" />
48+
<Compile Include="Genetics\ReproductionFunctions.cs" />
49+
<Compile Include="Genetics\Trainers.cs" />
5050
<Compile Include="IDeepCloneable.cs" />
51-
<Compile Include="IGenetic.cs" />
52-
<Compile Include="IMutatable.cs" />
51+
<Compile Include="Genetics\IGenetic.cs" />
52+
<Compile Include="Genetics\IMutatable.cs" />
5353
<Compile Include="INeuralNetwork.cs" />
5454
<Compile Include="Training\ISupervisedTrainer.cs" />
5555
<Compile Include="NeuralNetwork.cs" />

GeNeural/Genetics/EfficiencyErrorFunction.cs

+1-1
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
using System.Text;
55
using System.Threading.Tasks;
66

7-
namespace GeNeural.Genetic {
7+
namespace GeNeural.Genetics {
88
public static partial class Preset {
99
public static class EfficiencyError {
1010
public static double Ignore(long ticks) {

GeNeural/Genetics/GeneTrainer.cs

+1-1
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
using System.Text;
66
using System.Threading.Tasks;
77

8-
namespace GeNeural.Genetic {
8+
namespace GeNeural.Genetics {
99
public delegate T ReproductionFunction<T>(T parent1, T parent2);
1010
public delegate double AttributeDisimilarityFunction(double value1, double value2);
1111
public delegate double GeneticDisimilarityFunction<T>(

GeNeural/Genetics/GeneticNeuralNetworkFacilitator.cs

+16-16
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
using System.Linq;
55
using System.Text;
66
using System.Threading.Tasks;
7-
namespace GeNeural {
7+
namespace GeNeural.Genetics {
88
public class GeneticNeuralNetworkFacilitator : IMutatable, IDeepCloneable<GeneticNeuralNetworkFacilitator> {
99
private const double VARIANCE_FACTOR = 0.01;
1010

@@ -17,8 +17,10 @@ public class GeneticNeuralNetworkFacilitator : IMutatable, IDeepCloneable<Geneti
1717
private double neuronMutationFactorVarianceFactor = 0.01;
1818
private double neuronMutationFactor = 0.50; // Adds round(-x to x) neurons per layer
1919
private NeuralNetwork network;
20-
public GeneticNeuralNetworkFacilitator(NeuralNetwork network) {
20+
private Random rnd;
21+
public GeneticNeuralNetworkFacilitator(NeuralNetwork network, Random random) {
2122
this.network = network;
23+
this.rnd = random;
2224
}
2325
protected GeneticNeuralNetworkFacilitator(GeneticNeuralNetworkFacilitator parent) {
2426
network = parent.network.DeepClone();
@@ -76,38 +78,38 @@ public void Mutate(double mutationFactor = 1) {
7678
public void MutateHiddenLayerCount() {
7779
int numberOfLayersToClone = GetRandomCount(layerMutationFactor);
7880
//Debug.WriteLine("Creating {0} more layers.", numberOfLayersToClone);
79-
if (RandomHelper.rnd.Next(0, 2) == 1) {
81+
if (this.rnd.Next(0, 2) == 1) {
8082
for (int _ = 0; _ < numberOfLayersToClone; _++) {
81-
int layerIndex = RandomHelper.rnd.Next(0, network.LayerCount - 1);
83+
int layerIndex = this.rnd.Next(0, network.LayerCount - 1);
8284
network.InsertAfterLayer(layerIndex);
8385
}
8486
} else {
8587
for (int _ = 0; _ < numberOfLayersToClone; _++) {
8688
if (network.LayerCount <= 1) { break; }
87-
int layerIndex = RandomHelper.rnd.Next(0, network.LayerCount - 1);
89+
int layerIndex = this.rnd.Next(0, network.LayerCount - 1);
8890
network.RemoveLayer(layerIndex);
8991
}
9092
}
9193
}
9294
public void MutateHiddenNeuronCount() {
9395
int numberOfNeuronsToClone = GetRandomCount(neuronMutationFactor);
9496
//Debug.WriteLine("Creating {0} more neurons", numberOfNeuronsToClone);
95-
if (RandomHelper.rnd.Next(0, 2) == 1) {
97+
if (this.rnd.Next(0, 2) == 1) {
9698
for (int _ = 0; _ < numberOfNeuronsToClone; _++) {
9799
if (network.LayerCount <= 1) {
98100
break;
99101
}
100-
int layerIndex = RandomHelper.rnd.Next(0, network.LayerCount - 1);
102+
int layerIndex = this.rnd.Next(0, network.LayerCount - 1);
101103
//Debug.WriteLine("New neuron at layer: {0}", layerIndex);
102-
int neuronIndex = RandomHelper.rnd.Next(0, network.GetLayer(layerIndex).Length);
104+
int neuronIndex = this.rnd.Next(0, network.GetLayer(layerIndex).Length);
103105
network.SplitNeuronNonDestructive(layerIndex, neuronIndex);
104106
}
105107
} else {
106108
for (int _ = 0; _ < numberOfNeuronsToClone; _++) {
107109
if (network.LayerCount <= 1) { break; }
108-
int layerIndex = RandomHelper.rnd.Next(0, network.LayerCount - 1);
110+
int layerIndex = this.rnd.Next(0, network.LayerCount - 1);
109111
//Debug.WriteLine("New neuron at layer: {0}", layerIndex);
110-
int neuronIndex = RandomHelper.rnd.Next(0, network.GetLayer(layerIndex).Length);
112+
int neuronIndex = this.rnd.Next(0, network.GetLayer(layerIndex).Length);
111113
network.RemoveNeuron(layerIndex, neuronIndex);
112114
}
113115
}
@@ -122,22 +124,20 @@ public void MutateWeights() {
122124
double delta = GetDeltaMutatableValue(weightMutationFactor);
123125
weight += delta;
124126
//Debug.WriteLine("Changing weight by: {0}", delta);
125-
//weight += 0.05 + 0.1 * RandomHelper.rnd.NextDouble();//delta;
126-
//weight += RandomHelper.rnd.NextDouble();
127127
neuron.SetWeight(w, weight);
128128
}
129129
}
130130
}
131131
}
132132
public double GetMultiplicativeMutableFactor(double mutableFactor) {
133-
return 1 + (mutableFactor - RandomHelper.rnd.NextDouble() * mutableFactor * 2.0);
133+
return 1 + (mutableFactor - this.rnd.NextDouble() * mutableFactor * 2.0);
134134
}
135135
public int GetRandomCount(double mutationFactor) {
136-
return (int)Math.Round(RandomHelper.rnd.NextDouble() * mutationFactor);
136+
return (int)Math.Round(this.rnd.NextDouble() * mutationFactor);
137137
}
138138
public double GetDeltaMutatableValue(double mutationFactor) {
139-
double delta = RandomHelper.rnd.NextDouble() * mutationFactor;
140-
if (RandomHelper.rnd.Next(0, 2) == 1)
139+
double delta = this.rnd.NextDouble() * mutationFactor;
140+
if (this.rnd.Next(0, 2) == 1)
141141
return delta;
142142
else
143143
return -delta;

GeNeural/Genetics/IGenetic.cs

+1-1
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
using System.Text;
77
using System.Threading.Tasks;
88

9-
namespace GeNeural {
9+
namespace GeNeural.Genetics {
1010
public interface IGeneticVarianceFunction {
1111
double GetGeneticVariance(double value1, double value2);
1212
}

GeNeural/Genetics/IMutatable.cs

+1-1
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
using System.Text;
55
using System.Threading.Tasks;
66

7-
namespace GeNeural {
7+
namespace GeNeural.Genetics {
88
public interface IMutatable {
99
void Mutate(double mutationFactor = 1);
1010
}

GeNeural/Genetics/NeuralNetworkGeneTrainer.cs

+2-2
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
using System.Text;
66
using System.Threading.Tasks;
77

8-
namespace GeNeural.Genetic {
8+
namespace GeNeural.Genetics {
99
public class NeuralNetworkGeneTrainer : GeneTrainer<GeneticNeuralNetworkFacilitator, NeuralNetwork> {
1010
public NeuralNetworkGeneTrainer(GeneticNeuralNetworkFacilitator[] initialPopulation, ReproductionFunction<GeneticNeuralNetworkFacilitator> reproductionFunction, GeneticDisimilarityFunction<GeneticNeuralNetworkFacilitator> geneticDisimilarityFunction, AttributeDisimilarityFunction attributeDisimilarityFunction, ReproduceNewGeneration<GeneticNeuralNetworkFacilitator> newGenerationFunction, OutputAccuracyErrorFunction getOutputAccuracyError, SelectPartnerFunction<GeneticNeuralNetworkFacilitator> selectPartnerFunction, EfficiencyErrorFunction efficiencyErrorFunction)
1111
: base(initialPopulation, reproductionFunction, geneticDisimilarityFunction, attributeDisimilarityFunction, newGenerationFunction, getOutputAccuracyError, selectPartnerFunction, efficiencyErrorFunction) { }
@@ -30,7 +30,7 @@ public override double[] UnfitnessOfPopulation(double[][] inputs, double[][] des
3030
unfitnessOfPopulation[p] = unfitness;
3131
averageUnfitness += unfitness;
3232
}
33-
averageUnfitness /= (double)population.Length;
33+
averageUnfitness /= population.Length;
3434
Debug.WriteLine(averageUnfitness);
3535
return unfitnessOfPopulation;
3636
}

GeNeural/Genetics/NewGenerationFunctions.cs

+4-3
Original file line numberDiff line numberDiff line change
@@ -5,11 +5,12 @@
55
using System.Text;
66
using System.Threading.Tasks;
77

8-
namespace GeNeural.Genetic {
8+
namespace GeNeural.Genetics {
99
public static partial class Preset {
1010
public static class Generation {
1111
public static T[] SimpleProbabalisticNewGeneration<T>
1212
(
13+
Random rnd,
1314
T[] oldGeneration,
1415
double[] unfitnessOfPopulation,
1516
int newPopulationCount,
@@ -26,7 +27,7 @@ AttributeDisimilarityFunction attributeDisimilarityFunction
2627
int i = 0;
2728
// NOTE: When can definately optimize this algoirthm as it is effectively a recurrence equation
2829
while (count < newPopulationCount) {
29-
if (RandomHelper.rnd.NextDouble() < 1.0 / (double)newPopulationCount) {
30+
if (rnd.NextDouble() < 1.0 / newPopulationCount) {
3031
T chosenOne = oldGeneration[i];
3132
double[] geneticDifference = new double[oldGeneration.Length];
3233
for (int p = 0; p < oldGeneration.Length; p++) {
@@ -69,7 +70,7 @@ AttributeDisimilarityFunction attributeDisimilarityFunction
6970
}
7071
T chosenOne = oldGeneration[fittestIndex];
7172
Debug.WriteLine("Fittest index: {0} | {1}", fittestIndex, unfitnessOfPopulation[fittestIndex]);
72-
Debug.WriteLine("Unfitness index: {0} | {1}", unfittestIndex, unfitnessOfPopulation[unfittestIndex]);
73+
Debug.WriteLine("Unfittest index: {0} | {1}", unfittestIndex, unfitnessOfPopulation[unfittestIndex]);
7374
double[] geneticDifference = new double[oldGeneration.Length];
7475
for (int p = 0; p < oldGeneration.Length; p++) {
7576
geneticDifference[p] = geneticDisimilarityFunction(oldGeneration[p], chosenOne, attributeDisimilarityFunction);

GeNeural/Genetics/OutputAccuracyErrorFunctions.cs

+1-1
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
using System.Text;
55
using System.Threading.Tasks;
66

7-
namespace GeNeural.Genetic {
7+
namespace GeNeural.Genetics {
88
public static partial class Preset {
99
public static class AccuracyError {
1010
/// <summary>

GeNeural/Genetics/PartnerSelectionFunction.cs

+5-5
Original file line numberDiff line numberDiff line change
@@ -4,25 +4,25 @@
44
using System.Text;
55
using System.Threading.Tasks;
66

7-
namespace GeNeural.Genetic {
7+
namespace GeNeural.Genetics {
88
public static partial class Preset {
99
public static class PartnerSelection {
1010
/// <summary>
1111
/// Mainly used for quick test purposes.
1212
/// Literally going to select a random entity from the population to reproduce with. Yeah...
1313
/// </summary>
14-
public static T RandomPartnerSelection<T>(T[] population, double[] fitness, double[] geneticDifference) {
15-
int mateIndex = RandomHelper.rnd.Next(0, population.Length);
14+
public static T RandomPartnerSelection<T>(Random rnd, T[] population, double[] fitness, double[] geneticDifference) {
15+
int mateIndex = rnd.Next(0, population.Length);
1616
return population[mateIndex];
1717
}
18-
public static T Probabalistic<T>(T[] population, double[] fitness, double[] geneticDifference) {
18+
public static T Probabalistic<T>(Random rnd, T[] population, double[] fitness, double[] geneticDifference) {
1919
double[] attaction = new double[population.Length];
2020
for (int i = 0; i < geneticDifference.Length; i++) {
2121
attaction[i] = fitness[i] * (1.0 / (geneticDifference[i] + 1));
2222
}
2323
Sorter.QuickSort(geneticDifference, attaction);
2424
for (int p = 0; true; p = (1 + p) % population.Length) {
25-
if (RandomHelper.rnd.NextDouble() < 1 / (double)population.Length) {
25+
if (rnd.NextDouble() < 1 / (double)population.Length) {
2626
return population[p];
2727
}
2828
}

GeNeural/Genetics/ReproductionFunctions.cs

+13-13
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
using System.Linq;
44
using System.Text;
55
using System.Threading.Tasks;
6-
namespace GeNeural.Genetic {
6+
namespace GeNeural.Genetics {
77
public static partial class Preset {
88
public static class Reproduction {
99
public static T AlwaysCloneFirstEntity<T>(T network1, T network2) where T : IDeepCloneable<T> {
@@ -12,14 +12,14 @@ public static T AlwaysCloneFirstEntity<T>(T network1, T network2) where T : IDee
1212
/// <summary>
1313
/// Really just a testing function. 50% chance to clone network1, 50% chance to clone network2.
1414
/// </summary>
15-
public static T CloningCoinToss<T>(T network1, T network2) where T : IDeepCloneable<T> {
16-
return PickAttributeCoinToss(network1.DeepClone(), network2.DeepClone());
15+
public static T CloningCoinToss<T>(Random random, T network1, T network2) where T : IDeepCloneable<T> {
16+
return PickAttributeCoinToss(random, network1.DeepClone(), network2.DeepClone());
1717
}
18-
public static GeneticNeuralNetworkFacilitator DefaultReproduction(GeneticNeuralNetworkFacilitator network1, GeneticNeuralNetworkFacilitator network2) {
18+
public static GeneticNeuralNetworkFacilitator DefaultReproduction(Random random, GeneticNeuralNetworkFacilitator network1, GeneticNeuralNetworkFacilitator network2) {
1919
// Pick a parent to clone
2020
GeneticNeuralNetworkFacilitator chosenClonerParent;
2121
GeneticNeuralNetworkFacilitator mergerParent;
22-
if (RandomHelper.rnd.Next(0, 2) == 1) {
22+
if (random.Next(0, 2) == 1) {
2323
chosenClonerParent = network1;
2424
mergerParent = network2;
2525
} else {
@@ -28,14 +28,14 @@ public static GeneticNeuralNetworkFacilitator DefaultReproduction(GeneticNeuralN
2828
}
2929
GeneticNeuralNetworkFacilitator baby = chosenClonerParent.DeepClone();
3030

31-
baby.LayerMutationFactor = PickAttributeCoinToss(network1.LayerMutationFactor, network2.LayerMutationFactor);
32-
baby.LayerMutationFactorVarianceFactor = PickAttributeCoinToss(network1.LayerMutationFactorVarianceFactor, network2.LayerMutationFactorVarianceFactor);
31+
baby.LayerMutationFactor = PickAttributeCoinToss(random, network1.LayerMutationFactor, network2.LayerMutationFactor);
32+
baby.LayerMutationFactorVarianceFactor = PickAttributeCoinToss(random, network1.LayerMutationFactorVarianceFactor, network2.LayerMutationFactorVarianceFactor);
3333

34-
baby.WeightMutationFactor = PickAttributeCoinToss(network1.WeightMutationFactor, network2.WeightMutationFactor);
35-
baby.WeightMutationFactorVarianceFactor = PickAttributeCoinToss(network1.WeightMutationFactorVarianceFactor, network2.WeightMutationFactorVarianceFactor);
34+
baby.WeightMutationFactor = PickAttributeCoinToss(random, network1.WeightMutationFactor, network2.WeightMutationFactor);
35+
baby.WeightMutationFactorVarianceFactor = PickAttributeCoinToss(random, network1.WeightMutationFactorVarianceFactor, network2.WeightMutationFactorVarianceFactor);
3636

37-
baby.NeuronMutationFactor = PickAttributeCoinToss(network1.NeuronMutationFactor, network2.NeuronMutationFactor);
38-
baby.NeuronMutationFactorVarianceFactor = PickAttributeCoinToss(network1.NeuronMutationFactorVarianceFactor, network2.NeuronMutationFactorVarianceFactor);
37+
baby.NeuronMutationFactor = PickAttributeCoinToss(random, network1.NeuronMutationFactor, network2.NeuronMutationFactor);
38+
baby.NeuronMutationFactorVarianceFactor = PickAttributeCoinToss(random, network1.NeuronMutationFactorVarianceFactor, network2.NeuronMutationFactorVarianceFactor);
3939

4040
NeuralNetwork babyNetwork = baby.Network;
4141
NeuralNetwork mergingNetwork = mergerParent.Network;
@@ -60,8 +60,8 @@ public static GeneticNeuralNetworkFacilitator DefaultReproduction(GeneticNeuralN
6060
}
6161
return baby;
6262
}
63-
private static T PickAttributeCoinToss<T>(T heads, T tails) {
64-
return RandomHelper.rnd.Next(0, 2) == 1 ? heads : tails;
63+
private static T PickAttributeCoinToss<T>(Random random, T heads, T tails) {
64+
return random.Next(0, 2) == 1 ? heads : tails;
6565
}
6666
}
6767
}

0 commit comments

Comments
 (0)