php-ml/tests/Phpml/NeuralNetwork/ActivationFunction/SigmoidTest.php
David Monllaó e83f7b95d5 Fix activation functions support (#163)
- Backpropagation using the neuron activation functions derivative
- instead of hardcoded sigmoid derivative
- Added missing activation functions derivatives
- Sigmoid forced for the output layer
- Updated ThresholdedReLU default threshold to 0 (acts as a ReLU)
- Unit tests for derivatives
- Unit tests for classifiers using different activation functions
- Added missing docs
2018-01-09 11:09:59 +01:00

58 lines
1.4 KiB
PHP

<?php
declare(strict_types=1);
namespace Phpml\Tests\NeuralNetwork\ActivationFunction;
use Phpml\NeuralNetwork\ActivationFunction\Sigmoid;
use PHPUnit\Framework\TestCase;
class SigmoidTest extends TestCase
{
/**
* @dataProvider sigmoidProvider
*/
public function testSigmoidActivationFunction($beta, $expected, $value): void
{
$sigmoid = new Sigmoid($beta);
$this->assertEquals($expected, $sigmoid->compute($value), '', 0.001);
}
public function sigmoidProvider(): array
{
return [
[1.0, 1, 7.25],
[2.0, 1, 3.75],
[1.0, 0.5, 0],
[0.5, 0.5, 0],
[1.0, 0, -7.25],
[2.0, 0, -3.75],
];
}
/**
* @dataProvider sigmoidDerivativeProvider
*/
public function testSigmoidDerivative($beta, $expected, $value): void
{
$sigmoid = new Sigmoid($beta);
$activatedValue = $sigmoid->compute($value);
$this->assertEquals($expected, $sigmoid->differentiate($value, $activatedValue), '', 0.001);
}
public function sigmoidDerivativeProvider(): array
{
return [
[1.0, 0, -10],
[1, 0.006, -5],
[1.0, 0.25, 0],
[1, 0.006, 5],
[1.0, 0, 10],
[2.0, 0.25, 0],
[0.5, 0.246, 0.5],
[0.5, 0.241, 0.75],
];
}
}