diff --git a/docs/machine-learning/neural-network/multilayer-perceptron-classifier.md b/docs/machine-learning/neural-network/multilayer-perceptron-classifier.md index d2f746d..a6b060a 100644 --- a/docs/machine-learning/neural-network/multilayer-perceptron-classifier.md +++ b/docs/machine-learning/neural-network/multilayer-perceptron-classifier.md @@ -8,7 +8,7 @@ A multilayer perceptron (MLP) is a feedforward artificial neural network model t * $hiddenLayers (array) - array with the hidden layers configuration, each value represent number of neurons in each layers * $classes (array) - array with the different training set classes (array keys are ignored) * $iterations (int) - number of training iterations -* $theta (int) - network theta parameter +* $learningRate (float) - the learning rate * $activationFunction (ActivationFunction) - neuron activation function ``` diff --git a/src/Phpml/NeuralNetwork/Network/MultilayerPerceptron.php b/src/Phpml/NeuralNetwork/Network/MultilayerPerceptron.php index 9ef3f73..94a8423 100644 --- a/src/Phpml/NeuralNetwork/Network/MultilayerPerceptron.php +++ b/src/Phpml/NeuralNetwork/Network/MultilayerPerceptron.php @@ -46,9 +46,9 @@ abstract class MultilayerPerceptron extends LayeredNetwork implements Estimator, protected $activationFunction; /** - * @var int + * @var float */ - private $theta; + private $learningRate; /** * @var Backpropagation @@ -58,7 +58,7 @@ abstract class MultilayerPerceptron extends LayeredNetwork implements Estimator, /** * @throws InvalidArgumentException */ - public function __construct(int $inputLayerFeatures, array $hiddenLayers, array $classes, int $iterations = 10000, ?ActivationFunction $activationFunction = null, int $theta = 1) + public function __construct(int $inputLayerFeatures, array $hiddenLayers, array $classes, int $iterations = 10000, ?ActivationFunction $activationFunction = null, float $learningRate = 1) { if (empty($hiddenLayers)) { throw InvalidArgumentException::invalidLayersNumber(); @@ -73,7 +73,7 @@ abstract class MultilayerPerceptron extends LayeredNetwork implements Estimator, $this->inputLayerFeatures = $inputLayerFeatures; $this->hiddenLayers = $hiddenLayers; $this->activationFunction = $activationFunction; - $this->theta = $theta; + $this->learningRate = $learningRate; $this->initNetwork(); } @@ -87,7 +87,7 @@ abstract class MultilayerPerceptron extends LayeredNetwork implements Estimator, $this->addBiasNodes(); $this->generateSynapses(); - $this->backpropagation = new Backpropagation($this->theta); + $this->backpropagation = new Backpropagation($this->learningRate); } public function train(array $samples, array $targets): void diff --git a/src/Phpml/NeuralNetwork/Training/Backpropagation.php b/src/Phpml/NeuralNetwork/Training/Backpropagation.php index 98683ab..6722bd1 100644 --- a/src/Phpml/NeuralNetwork/Training/Backpropagation.php +++ b/src/Phpml/NeuralNetwork/Training/Backpropagation.php @@ -10,9 +10,9 @@ use Phpml\NeuralNetwork\Training\Backpropagation\Sigma; class Backpropagation { /** - * @var int + * @var float */ - private $theta; + private $learningRate; /** * @var array @@ -24,9 +24,9 @@ class Backpropagation */ private $prevSigmas = null; - public function __construct(int $theta) + public function __construct(float $learningRate) { - $this->theta = $theta; + $this->learningRate = $learningRate; } /** @@ -43,7 +43,7 @@ class Backpropagation if ($neuron instanceof Neuron) { $sigma = $this->getSigma($neuron, $targetClass, $key, $i == $layersNumber); foreach ($neuron->getSynapses() as $synapse) { - $synapse->changeWeight($this->theta * $sigma * $synapse->getNode()->getOutput()); + $synapse->changeWeight($this->learningRate * $sigma * $synapse->getNode()->getOutput()); } } }