2018-01-12 09:53:43 +00:00
|
|
|
<?php
|
|
|
|
|
|
|
|
declare(strict_types=1);
|
|
|
|
|
|
|
|
namespace Phpml\Tests\Helper\Optimizer;
|
|
|
|
|
2018-02-23 22:05:46 +00:00
|
|
|
use Phpml\Exception\InvalidArgumentException;
|
2018-01-12 09:53:43 +00:00
|
|
|
use Phpml\Helper\Optimizer\ConjugateGradient;
|
|
|
|
use PHPUnit\Framework\TestCase;
|
|
|
|
|
|
|
|
class ConjugateGradientTest extends TestCase
|
|
|
|
{
|
|
|
|
public function testRunOptimization(): void
|
|
|
|
{
|
|
|
|
// 200 samples from y = -1 + 2x (i.e. theta = [-1, 2])
|
|
|
|
$samples = [];
|
|
|
|
$targets = [];
|
|
|
|
for ($i = -100; $i <= 100; ++$i) {
|
|
|
|
$x = $i / 100;
|
|
|
|
$samples[] = [$x];
|
|
|
|
$targets[] = -1 + 2 * $x;
|
|
|
|
}
|
|
|
|
|
|
|
|
$callback = function ($theta, $sample, $target) {
|
|
|
|
$y = $theta[0] + $theta[1] * $sample[0];
|
2019-03-25 13:55:14 +00:00
|
|
|
$cost = (($y - $target) ** 2) / 2;
|
2018-01-12 09:53:43 +00:00
|
|
|
$grad = $y - $target;
|
|
|
|
|
|
|
|
return [$cost, $grad];
|
|
|
|
};
|
|
|
|
|
|
|
|
$optimizer = new ConjugateGradient(1);
|
|
|
|
|
|
|
|
$theta = $optimizer->runOptimization($samples, $targets, $callback);
|
|
|
|
|
2018-10-28 06:44:52 +00:00
|
|
|
self::assertEquals([-1, 2], $theta, '', 0.1);
|
2018-01-12 09:53:43 +00:00
|
|
|
}
|
|
|
|
|
2018-02-23 22:05:46 +00:00
|
|
|
public function testRunOptimizationWithCustomInitialTheta(): void
|
|
|
|
{
|
|
|
|
// 200 samples from y = -1 + 2x (i.e. theta = [-1, 2])
|
|
|
|
$samples = [];
|
|
|
|
$targets = [];
|
|
|
|
for ($i = -100; $i <= 100; ++$i) {
|
|
|
|
$x = $i / 100;
|
|
|
|
$samples[] = [$x];
|
|
|
|
$targets[] = -1 + 2 * $x;
|
|
|
|
}
|
|
|
|
|
|
|
|
$callback = function ($theta, $sample, $target) {
|
|
|
|
$y = $theta[0] + $theta[1] * $sample[0];
|
2019-03-25 13:55:14 +00:00
|
|
|
$cost = (($y - $target) ** 2) / 2;
|
2018-02-23 22:05:46 +00:00
|
|
|
$grad = $y - $target;
|
|
|
|
|
|
|
|
return [$cost, $grad];
|
|
|
|
};
|
|
|
|
|
|
|
|
$optimizer = new ConjugateGradient(1);
|
|
|
|
// set very weak theta to trigger very bad result
|
2018-03-04 21:44:22 +00:00
|
|
|
$optimizer->setTheta([0.0000001, 0.0000001]);
|
2018-02-23 22:05:46 +00:00
|
|
|
|
|
|
|
$theta = $optimizer->runOptimization($samples, $targets, $callback);
|
|
|
|
|
2018-10-28 06:44:52 +00:00
|
|
|
self::assertEquals([-1.087708, 2.212034], $theta, '', 0.000001);
|
2018-02-23 22:05:46 +00:00
|
|
|
}
|
|
|
|
|
2018-01-12 09:53:43 +00:00
|
|
|
public function testRunOptimization2Dim(): void
|
|
|
|
{
|
|
|
|
// 100 samples from y = -1 + 2x0 - 3x1 (i.e. theta = [-1, 2, -3])
|
|
|
|
$samples = [];
|
|
|
|
$targets = [];
|
|
|
|
for ($i = 0; $i < 100; ++$i) {
|
|
|
|
$x0 = intval($i / 10) / 10;
|
|
|
|
$x1 = ($i % 10) / 10;
|
|
|
|
$samples[] = [$x0, $x1];
|
|
|
|
$targets[] = -1 + 2 * $x0 - 3 * $x1;
|
|
|
|
}
|
|
|
|
|
|
|
|
$callback = function ($theta, $sample, $target) {
|
|
|
|
$y = $theta[0] + $theta[1] * $sample[0] + $theta[2] * $sample[1];
|
2019-03-25 13:55:14 +00:00
|
|
|
$cost = (($y - $target) ** 2) / 2;
|
2018-01-12 09:53:43 +00:00
|
|
|
$grad = $y - $target;
|
|
|
|
|
|
|
|
return [$cost, $grad];
|
|
|
|
};
|
|
|
|
|
|
|
|
$optimizer = new ConjugateGradient(2);
|
|
|
|
$optimizer->setChangeThreshold(1e-6);
|
|
|
|
|
|
|
|
$theta = $optimizer->runOptimization($samples, $targets, $callback);
|
|
|
|
|
2018-10-28 06:44:52 +00:00
|
|
|
self::assertEquals([-1, 2, -3], $theta, '', 0.1);
|
2018-01-12 09:53:43 +00:00
|
|
|
}
|
2018-02-23 22:05:46 +00:00
|
|
|
|
|
|
|
public function testThrowExceptionOnInvalidTheta(): void
|
|
|
|
{
|
|
|
|
$opimizer = new ConjugateGradient(2);
|
|
|
|
|
|
|
|
$this->expectException(InvalidArgumentException::class);
|
2018-03-04 21:44:22 +00:00
|
|
|
$opimizer->setTheta([0.15]);
|
2018-02-23 22:05:46 +00:00
|
|
|
}
|
2018-01-12 09:53:43 +00:00
|
|
|
}
|