mirror of
https://github.com/Llewellynvdm/php-ml.git
synced 2024-11-29 16:24:05 +00:00
e83f7b95d5
- Backpropagation using the neuron activation functions derivative - instead of hardcoded sigmoid derivative - Added missing activation functions derivatives - Sigmoid forced for the output layer - Updated ThresholdedReLU default threshold to 0 (acts as a ReLU) - Unit tests for derivatives - Unit tests for classifiers using different activation functions - Added missing docs
58 lines
1.4 KiB
PHP
58 lines
1.4 KiB
PHP
<?php
|
|
|
|
declare(strict_types=1);
|
|
|
|
namespace Phpml\Tests\NeuralNetwork\ActivationFunction;
|
|
|
|
use Phpml\NeuralNetwork\ActivationFunction\HyperbolicTangent;
|
|
use PHPUnit\Framework\TestCase;
|
|
|
|
class HyperboliTangentTest extends TestCase
|
|
{
|
|
/**
|
|
* @dataProvider tanhProvider
|
|
*/
|
|
public function testHyperbolicTangentActivationFunction($beta, $expected, $value): void
|
|
{
|
|
$tanh = new HyperbolicTangent($beta);
|
|
|
|
$this->assertEquals($expected, $tanh->compute($value), '', 0.001);
|
|
}
|
|
|
|
public function tanhProvider(): array
|
|
{
|
|
return [
|
|
[1.0, 0.761, 1],
|
|
[1.0, 0, 0],
|
|
[1.0, 1, 4],
|
|
[1.0, -1, -4],
|
|
[0.5, 0.462, 1],
|
|
[0.3, 0, 0],
|
|
];
|
|
}
|
|
|
|
/**
|
|
* @dataProvider tanhDerivativeProvider
|
|
*/
|
|
public function testHyperbolicTangentDerivative($beta, $expected, $value): void
|
|
{
|
|
$tanh = new HyperbolicTangent($beta);
|
|
$activatedValue = $tanh->compute($value);
|
|
$this->assertEquals($expected, $tanh->differentiate($value, $activatedValue), '', 0.001);
|
|
}
|
|
|
|
public function tanhDerivativeProvider(): array
|
|
{
|
|
return [
|
|
[1.0, 0, -6],
|
|
[1.0, 0.419, -1],
|
|
[1.0, 1, 0],
|
|
[1.0, 0.419, 1],
|
|
[1.0, 0, 6],
|
|
[0.5, 0.786, 1],
|
|
[0.5, 0.786, -1],
|
|
[0.3, 1, 0],
|
|
];
|
|
}
|
|
}
|