mirror of
https://github.com/Llewellynvdm/php-ml.git
synced 2025-04-08 19:21:52 +00:00
* travis: move coveralls here, decouple from package * composer: use PSR4 * phpunit: simpler config * travis: add ecs run * composer: add ecs dev * use standard vendor/bin directory for dependency bins, confuses with local bins and require gitignore handling * ecs: add PSR2 * [cs] PSR2 spacing fixes * [cs] PSR2 class name fix * [cs] PHP7 fixes - return semicolon spaces, old rand functions, typehints * [cs] fix less strict typehints * fix typehints to make tests pass * ecs: ignore typehint-less elements * [cs] standardize arrays * [cs] standardize docblock, remove unused comments * [cs] use self where possible * [cs] sort class elements, from public to private * [cs] do not use yoda (found less yoda-cases, than non-yoda) * space * [cs] do not assign in condition * [cs] use namespace imports if possible * [cs] use ::class over strings * [cs] fix defaults for arrays properties, properties and constants single spacing * cleanup ecs comments * [cs] use item per line in multi-items array * missing line * misc * rebase
91 lines
2.2 KiB
PHP
91 lines
2.2 KiB
PHP
<?php
|
|
|
|
declare(strict_types=1);
|
|
|
|
namespace Phpml\NeuralNetwork\Training;
|
|
|
|
use Phpml\NeuralNetwork\Node\Neuron;
|
|
use Phpml\NeuralNetwork\Training\Backpropagation\Sigma;
|
|
|
|
class Backpropagation
|
|
{
|
|
/**
|
|
* @var float
|
|
*/
|
|
private $learningRate;
|
|
|
|
/**
|
|
* @var array
|
|
*/
|
|
private $sigmas = null;
|
|
|
|
/**
|
|
* @var array
|
|
*/
|
|
private $prevSigmas = null;
|
|
|
|
public function __construct(float $learningRate)
|
|
{
|
|
$this->learningRate = $learningRate;
|
|
}
|
|
|
|
/**
|
|
* @param mixed $targetClass
|
|
*/
|
|
public function backpropagate(array $layers, $targetClass): void
|
|
{
|
|
$layersNumber = count($layers);
|
|
|
|
// Backpropagation.
|
|
for ($i = $layersNumber; $i > 1; --$i) {
|
|
$this->sigmas = [];
|
|
foreach ($layers[$i - 1]->getNodes() as $key => $neuron) {
|
|
if ($neuron instanceof Neuron) {
|
|
$sigma = $this->getSigma($neuron, $targetClass, $key, $i == $layersNumber);
|
|
foreach ($neuron->getSynapses() as $synapse) {
|
|
$synapse->changeWeight($this->learningRate * $sigma * $synapse->getNode()->getOutput());
|
|
}
|
|
}
|
|
}
|
|
|
|
$this->prevSigmas = $this->sigmas;
|
|
}
|
|
|
|
// Clean some memory (also it helps make MLP persistency & children more maintainable).
|
|
$this->sigmas = null;
|
|
$this->prevSigmas = null;
|
|
}
|
|
|
|
private function getSigma(Neuron $neuron, int $targetClass, int $key, bool $lastLayer): float
|
|
{
|
|
$neuronOutput = $neuron->getOutput();
|
|
$sigma = $neuronOutput * (1 - $neuronOutput);
|
|
|
|
if ($lastLayer) {
|
|
$value = 0;
|
|
if ($targetClass === $key) {
|
|
$value = 1;
|
|
}
|
|
|
|
$sigma *= ($value - $neuronOutput);
|
|
} else {
|
|
$sigma *= $this->getPrevSigma($neuron);
|
|
}
|
|
|
|
$this->sigmas[] = new Sigma($neuron, $sigma);
|
|
|
|
return $sigma;
|
|
}
|
|
|
|
private function getPrevSigma(Neuron $neuron): float
|
|
{
|
|
$sigma = 0.0;
|
|
|
|
foreach ($this->prevSigmas as $neuronSigma) {
|
|
$sigma += $neuronSigma->getSigmaForNeuron($neuron);
|
|
}
|
|
|
|
return $sigma;
|
|
}
|
|
}
|