mirror of
https://github.com/Llewellynvdm/php-ml.git
synced 2024-11-22 04:55:10 +00:00
Change from theta to learning rate var name in NN (#159)
This commit is contained in:
parent
333598b472
commit
b1d40bfa30
@ -8,7 +8,7 @@ A multilayer perceptron (MLP) is a feedforward artificial neural network model t
|
||||
* $hiddenLayers (array) - array with the hidden layers configuration, each value represent number of neurons in each layers
|
||||
* $classes (array) - array with the different training set classes (array keys are ignored)
|
||||
* $iterations (int) - number of training iterations
|
||||
* $theta (int) - network theta parameter
|
||||
* $learningRate (float) - the learning rate
|
||||
* $activationFunction (ActivationFunction) - neuron activation function
|
||||
|
||||
```
|
||||
|
@ -46,9 +46,9 @@ abstract class MultilayerPerceptron extends LayeredNetwork implements Estimator,
|
||||
protected $activationFunction;
|
||||
|
||||
/**
|
||||
* @var int
|
||||
* @var float
|
||||
*/
|
||||
private $theta;
|
||||
private $learningRate;
|
||||
|
||||
/**
|
||||
* @var Backpropagation
|
||||
@ -58,7 +58,7 @@ abstract class MultilayerPerceptron extends LayeredNetwork implements Estimator,
|
||||
/**
|
||||
* @throws InvalidArgumentException
|
||||
*/
|
||||
public function __construct(int $inputLayerFeatures, array $hiddenLayers, array $classes, int $iterations = 10000, ?ActivationFunction $activationFunction = null, int $theta = 1)
|
||||
public function __construct(int $inputLayerFeatures, array $hiddenLayers, array $classes, int $iterations = 10000, ?ActivationFunction $activationFunction = null, float $learningRate = 1)
|
||||
{
|
||||
if (empty($hiddenLayers)) {
|
||||
throw InvalidArgumentException::invalidLayersNumber();
|
||||
@ -73,7 +73,7 @@ abstract class MultilayerPerceptron extends LayeredNetwork implements Estimator,
|
||||
$this->inputLayerFeatures = $inputLayerFeatures;
|
||||
$this->hiddenLayers = $hiddenLayers;
|
||||
$this->activationFunction = $activationFunction;
|
||||
$this->theta = $theta;
|
||||
$this->learningRate = $learningRate;
|
||||
|
||||
$this->initNetwork();
|
||||
}
|
||||
@ -87,7 +87,7 @@ abstract class MultilayerPerceptron extends LayeredNetwork implements Estimator,
|
||||
$this->addBiasNodes();
|
||||
$this->generateSynapses();
|
||||
|
||||
$this->backpropagation = new Backpropagation($this->theta);
|
||||
$this->backpropagation = new Backpropagation($this->learningRate);
|
||||
}
|
||||
|
||||
public function train(array $samples, array $targets): void
|
||||
|
@ -10,9 +10,9 @@ use Phpml\NeuralNetwork\Training\Backpropagation\Sigma;
|
||||
class Backpropagation
|
||||
{
|
||||
/**
|
||||
* @var int
|
||||
* @var float
|
||||
*/
|
||||
private $theta;
|
||||
private $learningRate;
|
||||
|
||||
/**
|
||||
* @var array
|
||||
@ -24,9 +24,9 @@ class Backpropagation
|
||||
*/
|
||||
private $prevSigmas = null;
|
||||
|
||||
public function __construct(int $theta)
|
||||
public function __construct(float $learningRate)
|
||||
{
|
||||
$this->theta = $theta;
|
||||
$this->learningRate = $learningRate;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -43,7 +43,7 @@ class Backpropagation
|
||||
if ($neuron instanceof Neuron) {
|
||||
$sigma = $this->getSigma($neuron, $targetClass, $key, $i == $layersNumber);
|
||||
foreach ($neuron->getSynapses() as $synapse) {
|
||||
$synapse->changeWeight($this->theta * $sigma * $synapse->getNode()->getOutput());
|
||||
$synapse->changeWeight($this->learningRate * $sigma * $synapse->getNode()->getOutput());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user