mirror of
https://github.com/Llewellynvdm/php-ml.git
synced 2024-11-25 06:17:34 +00:00
refactor Backpropagation methods and simplify things
This commit is contained in:
parent
66d029e94f
commit
c506a84164
@ -21,6 +21,11 @@ class Backpropagation implements Training
|
|||||||
*/
|
*/
|
||||||
private $theta;
|
private $theta;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @var array
|
||||||
|
*/
|
||||||
|
private $sigmas;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param Network $network
|
* @param Network $network
|
||||||
* @param int $theta
|
* @param int $theta
|
||||||
@ -71,20 +76,22 @@ class Backpropagation implements Training
|
|||||||
return $resultsWithinError;
|
return $resultsWithinError;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param array $sample
|
||||||
|
* @param array $target
|
||||||
|
*/
|
||||||
private function trainSample(array $sample, array $target)
|
private function trainSample(array $sample, array $target)
|
||||||
{
|
{
|
||||||
$this->network->setInput($sample)->getOutput();
|
$this->network->setInput($sample)->getOutput();
|
||||||
|
$this->sigmas = [];
|
||||||
|
|
||||||
$sigmas = [];
|
|
||||||
$layers = $this->network->getLayers();
|
$layers = $this->network->getLayers();
|
||||||
$layersNumber = count($layers);
|
$layersNumber = count($layers);
|
||||||
|
|
||||||
for ($i = $layersNumber; $i > 1; --$i) {
|
for ($i = $layersNumber; $i > 1; --$i) {
|
||||||
foreach ($layers[$i - 1]->getNodes() as $key => $neuron) {
|
foreach ($layers[$i - 1]->getNodes() as $key => $neuron) {
|
||||||
if ($neuron instanceof Neuron) {
|
if ($neuron instanceof Neuron) {
|
||||||
$neuronOutput = $neuron->getOutput();
|
$sigma = $this->getSigma($neuron, $target, $key, $i == $layersNumber);
|
||||||
$sigma = $neuronOutput * (1 - $neuronOutput) * ($i == $layersNumber ? ($target[$key] - $neuronOutput) : $this->getPrevSigma($sigmas, $neuron));
|
|
||||||
$sigmas[] = new Sigma($neuron, $sigma);
|
|
||||||
foreach ($neuron->getSynapses() as $synapse) {
|
foreach ($neuron->getSynapses() as $synapse) {
|
||||||
$synapse->changeWeight($this->theta * $sigma * $synapse->getNode()->getOutput());
|
$synapse->changeWeight($this->theta * $sigma * $synapse->getNode()->getOutput());
|
||||||
}
|
}
|
||||||
@ -94,21 +101,40 @@ class Backpropagation implements Training
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param Sigma[] $sigmas
|
* @param Neuron $neuron
|
||||||
* @param Neuron $forNeuron
|
* @param array $target
|
||||||
|
* @param int $key
|
||||||
|
* @param bool $lastLayer
|
||||||
|
*
|
||||||
|
* @return float
|
||||||
|
*/
|
||||||
|
private function getSigma(Neuron $neuron, array $target, int $key, bool $lastLayer): float
|
||||||
|
{
|
||||||
|
$neuronOutput = $neuron->getOutput();
|
||||||
|
$sigma = $neuronOutput * (1 - $neuronOutput);
|
||||||
|
|
||||||
|
if ($lastLayer) {
|
||||||
|
$sigma *= ($target[$key] - $neuronOutput);
|
||||||
|
} else {
|
||||||
|
$sigma *= $this->getPrevSigma($neuron);
|
||||||
|
}
|
||||||
|
|
||||||
|
$this->sigmas[] = new Sigma($neuron, $sigma);
|
||||||
|
|
||||||
|
return $sigma;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param Neuron $neuron
|
||||||
*
|
*
|
||||||
* @return float
|
* @return float
|
||||||
*/
|
*/
|
||||||
private function getPrevSigma(array $sigmas, Neuron $forNeuron): float
|
private function getPrevSigma(Neuron $neuron): float
|
||||||
{
|
{
|
||||||
$sigma = 0.0;
|
$sigma = 0.0;
|
||||||
|
|
||||||
foreach ($sigmas as $neuronSigma) {
|
foreach ($this->sigmas as $neuronSigma) {
|
||||||
foreach ($neuronSigma->getNeuron()->getSynapses() as $synapse) {
|
$sigma += $neuronSigma->getSigmaForNeuron($neuron);
|
||||||
if ($synapse->getNode() == $forNeuron) {
|
|
||||||
$sigma += $synapse->getWeight() * $neuronSigma->getSigma();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return $sigma;
|
return $sigma;
|
||||||
|
@ -43,4 +43,22 @@ class Sigma
|
|||||||
{
|
{
|
||||||
return $this->sigma;
|
return $this->sigma;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param Neuron $neuron
|
||||||
|
*
|
||||||
|
* @return float
|
||||||
|
*/
|
||||||
|
public function getSigmaForNeuron(Neuron $neuron): float
|
||||||
|
{
|
||||||
|
$sigma = 0.0;
|
||||||
|
|
||||||
|
foreach ($this->neuron->getSynapses() as $synapse) {
|
||||||
|
if ($synapse->getNode() == $neuron) {
|
||||||
|
$sigma += $synapse->getWeight() * $this->getSigma();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return $sigma;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -18,7 +18,7 @@ class BackpropagationTest extends \PHPUnit_Framework_TestCase
|
|||||||
[[1, 0], [0, 1], [1, 1], [0, 0]],
|
[[1, 0], [0, 1], [1, 1], [0, 0]],
|
||||||
[[1], [1], [0], [0]],
|
[[1], [1], [0], [0]],
|
||||||
$desiredError = 0.2,
|
$desiredError = 0.2,
|
||||||
10000
|
30000
|
||||||
);
|
);
|
||||||
|
|
||||||
$this->assertEquals(0, $network->setInput([1, 1])->getOutput()[0], '', $desiredError);
|
$this->assertEquals(0, $network->setInput([1, 1])->getOutput()[0], '', $desiredError);
|
||||||
|
Loading…
Reference in New Issue
Block a user