php-ml/tests/Phpml/NeuralNetwork/ActivationFunction/BinaryStepTest.php
David Monllaó e83f7b95d5 Fix activation functions support (#163)
- Backpropagation using the neuron activation functions derivative
- instead of hardcoded sigmoid derivative
- Added missing activation functions derivatives
- Sigmoid forced for the output layer
- Updated ThresholdedReLU default threshold to 0 (acts as a ReLU)
- Unit tests for derivatives
- Unit tests for classifiers using different activation functions
- Added missing docs
2018-01-09 11:09:59 +01:00

50 lines
1.1 KiB
PHP

<?php
declare(strict_types=1);
namespace Phpml\Tests\NeuralNetwork\ActivationFunction;
use Phpml\NeuralNetwork\ActivationFunction\BinaryStep;
use PHPUnit\Framework\TestCase;
class BinaryStepTest extends TestCase
{
/**
* @dataProvider binaryStepProvider
*/
public function testBinaryStepActivationFunction($expected, $value): void
{
$binaryStep = new BinaryStep();
$this->assertEquals($expected, $binaryStep->compute($value));
}
public function binaryStepProvider(): array
{
return [
[1, 1],
[1, 0],
[0, -0.1],
];
}
/**
* @dataProvider binaryStepDerivativeProvider
*/
public function testBinaryStepDerivative($expected, $value): void
{
$binaryStep = new BinaryStep();
$activatedValue = $binaryStep->compute($value);
$this->assertEquals($expected, $binaryStep->differentiate($value, $activatedValue));
}
public function binaryStepDerivativeProvider(): array
{
return [
[0, -1],
[1, 0],
[0, 1],
];
}
}