-
-
Notifications
You must be signed in to change notification settings - Fork 189
/
Copy pathContinuous.php
152 lines (132 loc) · 3.37 KB
/
Continuous.php
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
<?php
namespace Rubix\ML\NeuralNet\Layers;
use Tensor\Matrix;
use Rubix\ML\Deferred;
use Rubix\ML\NeuralNet\Optimizers\Optimizer;
use Rubix\ML\NeuralNet\CostFunctions\LeastSquares;
use Rubix\ML\NeuralNet\CostFunctions\RegressionLoss;
use Rubix\ML\Exceptions\InvalidArgumentException;
use Rubix\ML\Exceptions\RuntimeException;
/**
* Continuous
*
* The Continuous output layer consists of a single linear neuron that outputs a scalar value.
*
* @internal
*
* @category Machine Learning
* @package Rubix/ML
* @author Andrew DalPino
*/
class Continuous implements Output
{
/**
* The function that computes the loss of erroneous activations.
*
* @var RegressionLoss
*/
protected RegressionLoss $costFn;
/**
* The memorized input matrix.
*
* @var Matrix|null
*/
protected ?Matrix $input = null;
/**
* @param RegressionLoss|null $costFn
*/
public function __construct(?RegressionLoss $costFn = null)
{
$this->costFn = $costFn ?? new LeastSquares();
}
/**
* Return the width of the layer.
*
* @return positive-int
*/
public function width() : int
{
return 1;
}
/**
* Initialize the layer with the fan in from the previous layer and return
* the fan out for this layer.
*
* @param positive-int $fanIn
* @throws InvalidArgumentException
* @return positive-int
*/
public function initialize(int $fanIn) : int
{
if ($fanIn !== 1) {
throw new InvalidArgumentException('Fan in must be'
. " equal to 1, $fanIn given.");
}
return 1;
}
/**
* Compute a forward pass through the layer.
*
* @param Matrix $input
* @return Matrix
*/
public function forward(Matrix $input) : Matrix
{
$this->input = $input;
return $input;
}
/**
* Compute an inferential pass through the layer.
*
* @param Matrix $input
* @return Matrix
*/
public function infer(Matrix $input) : Matrix
{
return $input;
}
/**
* Compute the gradient and loss at the output.
*
* @param (int|float)[] $labels
* @param Optimizer $optimizer
* @throws RuntimeException
* @return (Deferred|float)[]
*/
public function back(array $labels, Optimizer $optimizer) : array
{
if (!$this->input) {
throw new RuntimeException('Must perform forward pass'
. ' before backpropagating.');
}
$expected = Matrix::quick([$labels]);
$input = $this->input;
$gradient = new Deferred([$this, 'gradient'], [$input, $expected]);
$loss = $this->costFn->compute($input, $expected);
$this->input = null;
return [$gradient, $loss];
}
/**
* Calculate the gradient for the previous layer.
*
* @param Matrix $input
* @param Matrix $expected
* @return Matrix
*/
public function gradient(Matrix $input, Matrix $expected) : Matrix
{
return $this->costFn->differentiate($input, $expected)
->divide($input->n());
}
/**
* Return the string representation of the object.
*
* @internal
*
* @return string
*/
public function __toString() : string
{
return "Continuous (cost function: {$this->costFn})";
}
}