mirror of
https://github.com/Llewellynvdm/php-ml.git
synced 2024-11-24 13:57:33 +00:00
Fix the implementation of conjugate gradient method (#184)
* Add unit tests for optimizers * Fix ConjugateGradient * Fix coding style * Fix namespace
This commit is contained in:
parent
e83f7b95d5
commit
d953ef6bfc
@ -31,7 +31,7 @@ class ConjugateGradient extends GD
|
||||
|
||||
for ($i = 0; $i < $this->maxIterations; ++$i) {
|
||||
// Obtain α that minimizes f(θ + α.d)
|
||||
$alpha = $this->getAlpha(array_sum($d));
|
||||
$alpha = $this->getAlpha($d);
|
||||
|
||||
// θ(k+1) = θ(k) + α.d
|
||||
$thetaNew = $this->getNewTheta($alpha, $d);
|
||||
@ -63,7 +63,23 @@ class ConjugateGradient extends GD
|
||||
*/
|
||||
protected function gradient(array $theta): array
|
||||
{
|
||||
[, $gradient] = parent::gradient($theta);
|
||||
[, $updates, $penalty] = parent::gradient($theta);
|
||||
|
||||
// Calculate gradient for each dimension
|
||||
$gradient = [];
|
||||
for ($i = 0; $i <= $this->dimensions; ++$i) {
|
||||
if ($i === 0) {
|
||||
$gradient[$i] = array_sum($updates);
|
||||
} else {
|
||||
$col = array_column($this->samples, $i - 1);
|
||||
$error = 0;
|
||||
foreach ($col as $index => $val) {
|
||||
$error += $val * $updates[$index];
|
||||
}
|
||||
|
||||
$gradient[$i] = $error + $penalty * $theta[$i];
|
||||
}
|
||||
}
|
||||
|
||||
return $gradient;
|
||||
}
|
||||
@ -92,14 +108,14 @@ class ConjugateGradient extends GD
|
||||
* b-1) If cost function decreases, continue enlarging alpha
|
||||
* b-2) If cost function increases, take the midpoint and try again
|
||||
*/
|
||||
protected function getAlpha(float $d): float
|
||||
protected function getAlpha(array $d): float
|
||||
{
|
||||
$small = 0.0001 * $d;
|
||||
$large = 0.01 * $d;
|
||||
$small = MP::muls($d, 0.0001);
|
||||
$large = MP::muls($d, 0.01);
|
||||
|
||||
// Obtain θ + α.d for two initial values, x0 and x1
|
||||
$x0 = MP::adds($this->theta, $small);
|
||||
$x1 = MP::adds($this->theta, $large);
|
||||
$x0 = MP::add($this->theta, $small);
|
||||
$x1 = MP::add($this->theta, $large);
|
||||
|
||||
$epsilon = 0.0001;
|
||||
$iteration = 0;
|
||||
@ -123,12 +139,20 @@ class ConjugateGradient extends GD
|
||||
$error = $fx1 / $this->dimensions;
|
||||
} while ($error <= $epsilon || $iteration++ < 10);
|
||||
|
||||
// Return α = θ / d
|
||||
if ($d == 0) {
|
||||
return $x1[0] - $this->theta[0];
|
||||
// Return α = θ / d
|
||||
// For accuracy, choose a dimension which maximize |d[i]|
|
||||
$imax = 0;
|
||||
for ($i = 1; $i <= $this->dimensions; ++$i) {
|
||||
if (abs($d[$i]) > abs($d[$imax])) {
|
||||
$imax = $i;
|
||||
}
|
||||
}
|
||||
|
||||
return ($x1[0] - $this->theta[0]) / $d;
|
||||
if ($d[$imax] == 0) {
|
||||
return $x1[$imax] - $this->theta[$imax];
|
||||
}
|
||||
|
||||
return ($x1[$imax] - $this->theta[$imax]) / $d[$imax];
|
||||
}
|
||||
|
||||
/**
|
||||
@ -139,22 +163,7 @@ class ConjugateGradient extends GD
|
||||
*/
|
||||
protected function getNewTheta(float $alpha, array $d): array
|
||||
{
|
||||
$theta = $this->theta;
|
||||
|
||||
for ($i = 0; $i < $this->dimensions + 1; ++$i) {
|
||||
if ($i === 0) {
|
||||
$theta[$i] += $alpha * array_sum($d);
|
||||
} else {
|
||||
$sum = 0.0;
|
||||
foreach ($this->samples as $si => $sample) {
|
||||
$sum += $sample[$i - 1] * $d[$si] * $alpha;
|
||||
}
|
||||
|
||||
$theta[$i] += $sum;
|
||||
}
|
||||
}
|
||||
|
||||
return $theta;
|
||||
return MP::add($this->theta, MP::muls($d, $alpha));
|
||||
}
|
||||
|
||||
/**
|
||||
@ -168,10 +177,16 @@ class ConjugateGradient extends GD
|
||||
*/
|
||||
protected function getBeta(array $newTheta): float
|
||||
{
|
||||
$dNew = array_sum($this->gradient($newTheta));
|
||||
$dOld = array_sum($this->gradient($this->theta)) + 1e-100;
|
||||
$gNew = $this->gradient($newTheta);
|
||||
$gOld = $this->gradient($this->theta);
|
||||
$dNew = 0;
|
||||
$dOld = 1e-100;
|
||||
for ($i = 0; $i <= $this->dimensions; ++$i) {
|
||||
$dNew += $gNew[$i] ** 2;
|
||||
$dOld += $gOld[$i] ** 2;
|
||||
}
|
||||
|
||||
return $dNew ** 2 / $dOld ** 2;
|
||||
return $dNew / $dOld;
|
||||
}
|
||||
|
||||
/**
|
||||
|
65
tests/Phpml/Helper/Optimizer/ConjugateGradientTest.php
Normal file
65
tests/Phpml/Helper/Optimizer/ConjugateGradientTest.php
Normal file
@ -0,0 +1,65 @@
|
||||
<?php
|
||||
|
||||
declare(strict_types=1);
|
||||
|
||||
namespace Phpml\Tests\Helper\Optimizer;
|
||||
|
||||
use Phpml\Helper\Optimizer\ConjugateGradient;
|
||||
use PHPUnit\Framework\TestCase;
|
||||
|
||||
class ConjugateGradientTest extends TestCase
|
||||
{
|
||||
public function testRunOptimization(): void
|
||||
{
|
||||
// 200 samples from y = -1 + 2x (i.e. theta = [-1, 2])
|
||||
$samples = [];
|
||||
$targets = [];
|
||||
for ($i = -100; $i <= 100; ++$i) {
|
||||
$x = $i / 100;
|
||||
$samples[] = [$x];
|
||||
$targets[] = -1 + 2 * $x;
|
||||
}
|
||||
|
||||
$callback = function ($theta, $sample, $target) {
|
||||
$y = $theta[0] + $theta[1] * $sample[0];
|
||||
$cost = ($y - $target) ** 2 / 2;
|
||||
$grad = $y - $target;
|
||||
|
||||
return [$cost, $grad];
|
||||
};
|
||||
|
||||
$optimizer = new ConjugateGradient(1);
|
||||
|
||||
$theta = $optimizer->runOptimization($samples, $targets, $callback);
|
||||
|
||||
$this->assertEquals([-1, 2], $theta, '', 0.1);
|
||||
}
|
||||
|
||||
public function testRunOptimization2Dim(): void
|
||||
{
|
||||
// 100 samples from y = -1 + 2x0 - 3x1 (i.e. theta = [-1, 2, -3])
|
||||
$samples = [];
|
||||
$targets = [];
|
||||
for ($i = 0; $i < 100; ++$i) {
|
||||
$x0 = intval($i / 10) / 10;
|
||||
$x1 = ($i % 10) / 10;
|
||||
$samples[] = [$x0, $x1];
|
||||
$targets[] = -1 + 2 * $x0 - 3 * $x1;
|
||||
}
|
||||
|
||||
$callback = function ($theta, $sample, $target) {
|
||||
$y = $theta[0] + $theta[1] * $sample[0] + $theta[2] * $sample[1];
|
||||
$cost = ($y - $target) ** 2 / 2;
|
||||
$grad = $y - $target;
|
||||
|
||||
return [$cost, $grad];
|
||||
};
|
||||
|
||||
$optimizer = new ConjugateGradient(2);
|
||||
$optimizer->setChangeThreshold(1e-6);
|
||||
|
||||
$theta = $optimizer->runOptimization($samples, $targets, $callback);
|
||||
|
||||
$this->assertEquals([-1, 2, -3], $theta, '', 0.1);
|
||||
}
|
||||
}
|
65
tests/Phpml/Helper/Optimizer/GDTest.php
Normal file
65
tests/Phpml/Helper/Optimizer/GDTest.php
Normal file
@ -0,0 +1,65 @@
|
||||
<?php
|
||||
|
||||
declare(strict_types=1);
|
||||
|
||||
namespace Phpml\Tests\Helper\Optimizer;
|
||||
|
||||
use Phpml\Helper\Optimizer\GD;
|
||||
use PHPUnit\Framework\TestCase;
|
||||
|
||||
class GDTest extends TestCase
|
||||
{
|
||||
public function testRunOptimization(): void
|
||||
{
|
||||
// 200 samples from y = -1 + 2x (i.e. theta = [-1, 2])
|
||||
$samples = [];
|
||||
$targets = [];
|
||||
for ($i = -100; $i <= 100; ++$i) {
|
||||
$x = $i / 100;
|
||||
$samples[] = [$x];
|
||||
$targets[] = -1 + 2 * $x;
|
||||
}
|
||||
|
||||
$callback = function ($theta, $sample, $target) {
|
||||
$y = $theta[0] + $theta[1] * $sample[0];
|
||||
$cost = ($y - $target) ** 2 / 2;
|
||||
$grad = $y - $target;
|
||||
|
||||
return [$cost, $grad];
|
||||
};
|
||||
|
||||
$optimizer = new GD(1);
|
||||
|
||||
$theta = $optimizer->runOptimization($samples, $targets, $callback);
|
||||
|
||||
$this->assertEquals([-1, 2], $theta, '', 0.1);
|
||||
}
|
||||
|
||||
public function testRunOptimization2Dim(): void
|
||||
{
|
||||
// 100 samples from y = -1 + 2x0 - 3x1 (i.e. theta = [-1, 2, -3])
|
||||
$samples = [];
|
||||
$targets = [];
|
||||
for ($i = 0; $i < 100; ++$i) {
|
||||
$x0 = intval($i / 10) / 10;
|
||||
$x1 = ($i % 10) / 10;
|
||||
$samples[] = [$x0, $x1];
|
||||
$targets[] = -1 + 2 * $x0 - 3 * $x1;
|
||||
}
|
||||
|
||||
$callback = function ($theta, $sample, $target) {
|
||||
$y = $theta[0] + $theta[1] * $sample[0] + $theta[2] * $sample[1];
|
||||
$cost = ($y - $target) ** 2 / 2;
|
||||
$grad = $y - $target;
|
||||
|
||||
return [$cost, $grad];
|
||||
};
|
||||
|
||||
$optimizer = new GD(2);
|
||||
$optimizer->setChangeThreshold(1e-6);
|
||||
|
||||
$theta = $optimizer->runOptimization($samples, $targets, $callback);
|
||||
|
||||
$this->assertEquals([-1, 2, -3], $theta, '', 0.1);
|
||||
}
|
||||
}
|
65
tests/Phpml/Helper/Optimizer/StochasticGDTest.php
Normal file
65
tests/Phpml/Helper/Optimizer/StochasticGDTest.php
Normal file
@ -0,0 +1,65 @@
|
||||
<?php
|
||||
|
||||
declare(strict_types=1);
|
||||
|
||||
namespace Phpml\Tests\Helper\Optimizer;
|
||||
|
||||
use Phpml\Helper\Optimizer\StochasticGD;
|
||||
use PHPUnit\Framework\TestCase;
|
||||
|
||||
class StochasticGDTest extends TestCase
|
||||
{
|
||||
public function testRunOptimization(): void
|
||||
{
|
||||
// 200 samples from y = -1 + 2x (i.e. theta = [-1, 2])
|
||||
$samples = [];
|
||||
$targets = [];
|
||||
for ($i = -100; $i <= 100; ++$i) {
|
||||
$x = $i / 100;
|
||||
$samples[] = [$x];
|
||||
$targets[] = -1 + 2 * $x;
|
||||
}
|
||||
|
||||
$callback = function ($theta, $sample, $target) {
|
||||
$y = $theta[0] + $theta[1] * $sample[0];
|
||||
$cost = ($y - $target) ** 2 / 2;
|
||||
$grad = $y - $target;
|
||||
|
||||
return [$cost, $grad];
|
||||
};
|
||||
|
||||
$optimizer = new StochasticGD(1);
|
||||
|
||||
$theta = $optimizer->runOptimization($samples, $targets, $callback);
|
||||
|
||||
$this->assertEquals([-1, 2], $theta, '', 0.1);
|
||||
}
|
||||
|
||||
public function testRunOptimization2Dim(): void
|
||||
{
|
||||
// 100 samples from y = -1 + 2x0 - 3x1 (i.e. theta = [-1, 2, -3])
|
||||
$samples = [];
|
||||
$targets = [];
|
||||
for ($i = 0; $i < 100; ++$i) {
|
||||
$x0 = intval($i / 10) / 10;
|
||||
$x1 = ($i % 10) / 10;
|
||||
$samples[] = [$x0, $x1];
|
||||
$targets[] = -1 + 2 * $x0 - 3 * $x1;
|
||||
}
|
||||
|
||||
$callback = function ($theta, $sample, $target) {
|
||||
$y = $theta[0] + $theta[1] * $sample[0] + $theta[2] * $sample[1];
|
||||
$cost = ($y - $target) ** 2 / 2;
|
||||
$grad = $y - $target;
|
||||
|
||||
return [$cost, $grad];
|
||||
};
|
||||
|
||||
$optimizer = new StochasticGD(2);
|
||||
$optimizer->setChangeThreshold(1e-6);
|
||||
|
||||
$theta = $optimizer->runOptimization($samples, $targets, $callback);
|
||||
|
||||
$this->assertEquals([-1, 2, -3], $theta, '', 0.1);
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue
Block a user