Skip to content

Commit 19e8bfa

Browse files
committed
Fixed bug where push_back(...) was used instead of at(...) when initialising vector<double>.
1 parent 72804e3 commit 19e8bfa

File tree

5 files changed

+51
-35
lines changed

5 files changed

+51
-35
lines changed

GeNeural/GeNeural/Genetics/GeneticNeuralNetworkFacilitator.cs

+4-4
Original file line numberDiff line numberDiff line change
@@ -84,18 +84,18 @@ public GeneticNeuralNetworkFacilitator DeepClone() {
8484

8585
public void Mutate(double mutationFactor = 1) {
8686
weightMutationFactor *= GetMultiplicativeMutableFactor(weightMutationVariance) + GetDeltaMutatableValue(0.000000000000001);
87-
// layerMutationFactor *= GetMultiplicativeMutableFactor(layerMutationVariance) + GetDeltaMutatableValue(0.000000000000001);
87+
layerMutationFactor *= GetMultiplicativeMutableFactor(layerMutationVariance) + GetDeltaMutatableValue(0.000000000000001);
8888
neuronMutationFactor *= GetMultiplicativeMutableFactor(neuronMutationVariance) + GetDeltaMutatableValue(0.000000000000001);
8989

9090
MutateWeights();
9191
// Mutate layers count
92-
MutateHiddenLayerCount();
92+
// MutateHiddenLayerCount();
9393
// Mutate neuron count
94-
MutateHiddenNeuronCount();
94+
// MutateHiddenNeuronCount();
9595
}
9696
public void MutateHiddenLayerCount() {
9797
int numberOfLayersToClone = GetRandomCount(layerMutationFactor);
98-
//Debug.WriteLine("Creating {0} more layers.", numberOfLayersToClone);
98+
// Debug.WriteLine("Creating {0} more layers.", numberOfLayersToClone);
9999
if (this.rnd.Next(0, 2) == 1) {
100100
for (int _ = 0; _ < numberOfLayersToClone; _++) {
101101
int layerIndex = this.rnd.Next(0, network.LayerCount - 1);

GeNeural/GeNeural/NeuralNetwork.cs

+15-11
Original file line numberDiff line numberDiff line change
@@ -14,29 +14,32 @@ protected NeuralNetwork(NeuralNetwork network) {
1414
for (int l = 0; l < neurons.Length; l++) {
1515
neurons[l] = new Neuron[network.neurons[l].Length];
1616
for (int n = 0; n < neurons[l].Length; n++) {
17-
neurons[l][n] = new Neuron(network.neurons[l][n].CloneWeights());
17+
double[] clonedWeights = network.neurons[l][n].CloneWeights();
18+
// Debug.WriteLine("Cloned weight count: {0} vs {1}", network.neurons[l][n].GetWeightSize(), clonedWeights.Length);
19+
neurons[l][n] = new Neuron(clonedWeights);
1820
}
1921
}
2022
}
2123

2224
public NeuralNetwork(int inputCountTempShim, int[] neuralCounts) {
23-
ulong inputCount = (ulong)inputCountTempShim;
25+
ulong inputCount = (ulong)(inputCountTempShim);
26+
// Debug.WriteLine("inputCount = {0}", inputCount);
2427
if (neuralCounts.Length < 1) { throw new Exception(); }
2528
neurons = new Neuron[neuralCounts.Length][];
2629
neurons[0] = new Neuron[neuralCounts[0]];
27-
Debug.WriteLine("Creating input neurons...");
30+
// Debug.WriteLine("Creating input neurons...");
2831
for (int n = 0; n < neurons[0].Length; n++) {
2932
neurons[0][n] = new Neuron(GetInactiveNeuronWeights(inputCount + 1));
3033
}
31-
Debug.WriteLine("Creating other neurons...");
34+
// Debug.WriteLine("Creating other neurons...");
3235
for (int l = 1; l < neuralCounts.Length; l++) {
3336
neurons[l] = new Neuron[neuralCounts[l]];
3437
for (int n = 0; n < neurons[l].Length; n++) {
35-
double[] weights = GetInactiveNeuronWeights((ulong)neurons[l - 1].Length + 1);
38+
double[] weights = GetInactiveNeuronWeights((ulong)(neurons[l - 1].Length) + 1);
3639
neurons[l][n] = new Neuron(weights);
3740
}
3841
}
39-
Debug.WriteLine("Created neurons.");
42+
// Debug.WriteLine("Created neurons.");
4043
}
4144

4245
public int LayerCount {
@@ -54,9 +57,9 @@ public double GetInactiveNeuronInputWeight() {
5457
public void RandomizeWeights(Random random, double min = 0, double max = 1) {
5558
for (int l = 0; l < neurons.Length; l++) {
5659
for (int n = 0; n < neurons[l].Length; n++) {
57-
Debug.WriteLine("Randomizing weight (layer: {0}, neuron: {1})", l, n);
60+
// Debug.WriteLine("Randomizing weight (layer: {0}, neuron: {1})", l, n);
5861
for (ulong w = 0; w < neurons[l][n].GetWeightSize(); w++) {
59-
Debug.WriteLine("Randomizing weight (layer: {0}, neuron: {1}, weight: {2})", l, n, w);
62+
// Debug.WriteLine("Randomizing weight (layer: {0}, neuron: {1}, weight: {2})", l, n, w);
6063
neurons[l][n].SetWeight(w, min + random.NextDouble() * (max - min));
6164
}
6265
}
@@ -81,6 +84,7 @@ public double[] Classify(double[] inputs) {
8184
public double[][] CalculateAllOutputs(double[] inputs) {
8285
double[][] outputs = new double[neurons.Length][];
8386
outputs[0] = new double[neurons[0].Length];
87+
// Debug.WriteLine("[" + string.Join(", ", inputs) + "]");
8488
for (int n = 0; n < neurons[0].Length; n++) {
8589
outputs[0][n] = neurons[0][n].GetOutput(inputs);
8690
}
@@ -102,7 +106,7 @@ public int GetNeuronCount(int layerIndex) {
102106
/// </summary>
103107
public void InsertAfterLayer(int layerIndex) {
104108
Neuron[] layer = new Neuron[neurons[layerIndex].Length];
105-
for (ulong n = 0; n < (ulong)layer.Length; n++) {
109+
for (ulong n = 0; n < (ulong)(layer.Length); n++) {
106110
double[] inputWeights = new double[neurons[layerIndex].Length + 1];
107111

108112
Neuron newNeuron = new Neuron(inputWeights);
@@ -184,13 +188,13 @@ public void AddNonOutputNeuron(int layerIndex, Neuron neuron, double[] outputWei
184188
/// This effectively adds a neuron without causing the network's behaviour/outputs to change.
185189
/// </summary>
186190
public void SplitNeuronNonDestructive(int layerIndex, int neuronIndexCSharp) {
187-
ulong neuronIndex = (ulong)neuronIndexCSharp;
191+
ulong neuronIndex = (ulong)(neuronIndexCSharp);
188192
Neuron duplicatedNeuron = new Neuron(neurons[layerIndex][neuronIndex].CloneWeights());
189193
if (layerIndex == neurons.Length - 1) {
190194
AddOutputNeuron(duplicatedNeuron);
191195
} else {
192196
double[] outputWeights = new double[neurons[layerIndex + 1].Length];
193-
for (ulong n2 = 0; n2 < (ulong)neurons[layerIndex + 1].Length; n2++) {
197+
for (ulong n2 = 0; n2 < (ulong)(neurons[layerIndex + 1].Length); n2++) {
194198
double halvedWeight = neurons[layerIndex + 1][n2].GetNeuronWeight(neuronIndex);
195199
outputWeights[n2] = halvedWeight;
196200
neurons[layerIndex + 1][n2].SetNeuronWeight(neuronIndex, halvedWeight);

GeNeural/GeNeural/Training/Backpropagation/StandardBackpropagationTrainer.cs

+6-6
Original file line numberDiff line numberDiff line change
@@ -20,12 +20,12 @@ public void Backpropagation(NeuralNetwork neuralNetwork, double[] inputs, double
2020
weirdDThing[weirdDThing.Length - 1][n] = (neuronOutput - desiredOutputs[n]) * neuronOutput * (1 - neuronOutput);
2121
}
2222
for (int l = outputs.Length - 2; l >= 0; l--) {
23-
ulong inputLength = (ulong)neuralNetwork.GetLayer(l).Length;
23+
ulong inputLength = (ulong)(neuralNetwork.GetLayer(l).Length);
2424
Neuron[] currentLayer = neuralNetwork.GetLayer(l + 1);
2525
for (ulong n = 0; n < inputLength; n++) {
2626
double neuronOutput = outputs[l][n];
2727
double sumThing = 0;
28-
for (ulong n2 = 0; n2 < (ulong)currentLayer.Length; n2++) {
28+
for (ulong n2 = 0; n2 < (ulong)(currentLayer.Length); n2++) {
2929
sumThing += weirdDThing[l + 1][n2] * currentLayer[n2].GetWeight(n);
3030
}
3131
weirdDThing[l][n] = sumThing * neuronOutput * (1 - neuronOutput);
@@ -42,7 +42,7 @@ public void Backpropagation(NeuralNetwork neuralNetwork, double[] inputs, double
4242
newThresholdWeight -= learningRateFactor * weirdDThing[0][n] * -1;
4343
neuron.SetWeight(0, newThresholdWeight);
4444
// Modify the neuron to input weights
45-
for (ulong n2 = 0; n2 < (ulong)inputs.Length; n2++) {
45+
for (ulong n2 = 0; n2 < (ulong)(inputs.Length); n2++) {
4646
ulong weightIndex = n2 + 1;
4747
double newNeuronToInputWeight = neuron.GetWeight(weightIndex);
4848
newNeuronToInputWeight -= learningRateFactor * weirdDThing[0][n] * inputs[n2];
@@ -53,12 +53,12 @@ public void Backpropagation(NeuralNetwork neuralNetwork, double[] inputs, double
5353
for (int l = 1; l < neuralNetwork.LayerCount; l++) {
5454
Neuron[] currentLayer = neuralNetwork.GetLayer(l);
5555
Neuron[] previousLayer = neuralNetwork.GetLayer(l - 1);
56-
for (ulong n = 0; n < (ulong)currentLayer.Length; n++) {
56+
for (ulong n = 0; n < (ulong)(currentLayer.Length); n++) {
5757
Neuron neuron1 = currentLayer[n];
58-
double newThresholdWeight = neuron1.GetWeight(n);
58+
double newThresholdWeight = neuron1.GetWeight(0);
5959
newThresholdWeight -= learningRateFactor * weirdDThing[l][n] * -1;
6060
neuron1.SetWeight(0, newThresholdWeight);
61-
for (ulong n2 = 0; n2 < (ulong)previousLayer.Length; n2++) {
61+
for (ulong n2 = 0; n2 < (ulong)(previousLayer.Length); n2++) {
6262
Neuron neuron2 = previousLayer[n2];
6363
ulong weightIndex = n2 + 1;
6464
double newNeuronToNeuronWeight = neuron2.GetWeight(weightIndex);

NeuralCLI/NeuralCLI/NeuralCLI.vcxproj

+2-1
Original file line numberDiff line numberDiff line change
@@ -90,7 +90,8 @@
9090
<WarningLevel>Level3</WarningLevel>
9191
<Optimization>Disabled</Optimization>
9292
<PreprocessorDefinitions>WIN32;_DEBUG;%(PreprocessorDefinitions)</PreprocessorDefinitions>
93-
<PrecompiledHeader>Use</PrecompiledHeader>
93+
<PrecompiledHeader>NotUsing</PrecompiledHeader>
94+
<PrecompiledHeaderFile />
9495
</ClCompile>
9596
<Link>
9697
<AdditionalDependencies />

NeuralCLI/NeuralCLI/NeuronCLI.cpp

+24-13
Original file line numberDiff line numberDiff line change
@@ -2,12 +2,14 @@
22
#include <memory>
33
namespace NeuralCLI {
44
Neuron::Neuron(array<double>^ weights) {
5-
System::Diagnostics::Debug::WriteLine("Creating neuron...");
6-
std::vector<double> nativeWeights(weights->Length);
7-
for each (double weight in weights) {
8-
nativeWeights.push_back(weight);
5+
// System::Diagnostics::Debug::WriteLine("Creating neuron...");
6+
std::shared_ptr<std::vector<double>> stdWeights = std::make_shared<std::vector<double>>(weights->Length);
7+
for (size_t w = 0; w < stdWeights->size(); w++) {
8+
stdWeights->at(w) = weights[w];
99
}
10-
this->neuron = new Neural::NeuronC(nativeWeights);
10+
// System::Diagnostics::Debug::WriteLine("Weight count: {0}", stdWeights->size());
11+
this->neuron = new Neural::NeuronC(stdWeights);
12+
// System::Diagnostics::Debug::WriteLine("Actual Weight count: {0}", neuron->GetWeightSize());
1113
}
1214

1315
Neuron::~Neuron() {
@@ -31,9 +33,9 @@ namespace NeuralCLI {
3133
}
3234

3335
void Neuron::SetWeights(array<double>^ weights) {
34-
std::vector<double> stdWeights(weights->Length);
35-
for each (double weight in weights) {
36-
stdWeights.push_back(weight);
36+
std::shared_ptr<std::vector<double>> stdWeights = std::make_shared<std::vector<double>>(weights->Length);
37+
for (size_t w = 0; w < stdWeights->size(); w++) {
38+
stdWeights->at(w) = weights[w];
3739
}
3840
this->neuron->SetWeights(stdWeights);
3941
}
@@ -47,7 +49,10 @@ namespace NeuralCLI {
4749
}
4850

4951
void Neuron::SetWeight(size_t weightIndex, double weight) {
52+
// System::Diagnostics::Debug::WriteLine("Original weight: {0}", this->neuron->GetWeight(weightIndex));
53+
// System::Diagnostics::Debug::WriteLine("New weight: {0}", weight);
5054
this->neuron->SetWeight(weightIndex, weight);
55+
// System::Diagnostics::Debug::WriteLine("Actual weight: {0}", this->neuron->GetWeight(weightIndex));
5156
}
5257

5358
void Neuron::SetThresholdWeight(double weight) {
@@ -60,10 +65,15 @@ namespace NeuralCLI {
6065

6166
double Neuron::GetOutput(array<double>^ inputs) {
6267
std::vector<double> stdInputs(inputs->Length);
63-
for each (double input in inputs) {
64-
stdInputs.push_back(input);
68+
for (int i = 0; i < stdInputs.size(); i++) {
69+
stdInputs.at(i) = inputs[i];
6570
}
66-
return this->neuron->GetOutput(stdInputs);
71+
double output = this->neuron->GetOutput(stdInputs);
72+
/*for (size_t w = 0; w < this->neuron->GetWeightSize(); w++) {
73+
System::Diagnostics::Debug::WriteLine("Actual neuron weight {0}: {1}", w, this->neuron->GetWeight(w));
74+
}
75+
System::Diagnostics::Debug::WriteLine("Received output: {0}", output);*/
76+
return output;
6777
}
6878

6979
size_t Neuron::GetWeightSize() {
@@ -73,8 +83,9 @@ namespace NeuralCLI {
7383
array<double>^ Neuron::CloneWeights() {
7484
size_t weightLength = this->neuron->GetWeightSize();
7585
array<double>^ cliWeights = gcnew array<double>(weightLength);
76-
for (int w = 0; w < weightLength; w++) {
77-
cliWeights[w] = neuron->GetWeight(w);
86+
for (size_t w = 0; w < weightLength; w++) {
87+
cliWeights[w] = this->neuron->GetWeight(w);
88+
// System::Diagnostics::Debug::WriteLine("Cloned weight {0}: {1}", w, cliWeights[w]);
7889
}
7990
return cliWeights;
8091
}

0 commit comments

Comments
 (0)