Add PReLU activation function (#128)

* Implement RELU activation functions

* Add PReLUTest
This commit is contained in:
Marcin Michalski 2017-09-02 21:31:14 +02:00 committed by Arkadiusz Kondas
parent 0e59cfb174
commit b1be0574d8
2 changed files with 72 additions and 0 deletions

View File

@ -0,0 +1,33 @@
<?php
declare(strict_types=1);
namespace Phpml\NeuralNetwork\ActivationFunction;
use Phpml\NeuralNetwork\ActivationFunction;
class PReLU implements ActivationFunction
{
/**
* @var float
*/
private $beta;
/**
* @param float $beta
*/
public function __construct($beta = 0.01)
{
$this->beta = $beta;
}
/**
* @param float|int $value
*
* @return float
*/
public function compute($value): float
{
return $value >= 0 ? $value : $this->beta * $value;
}
}

View File

@ -0,0 +1,39 @@
<?php
declare(strict_types=1);
namespace tests\Phpml\NeuralNetwork\ActivationFunction;
use Phpml\NeuralNetwork\ActivationFunction\PReLU;
use PHPUnit\Framework\TestCase;
class PReLUTest extends TestCase
{
/**
* @param $beta
* @param $expected
* @param $value
*
* @dataProvider preluProvider
*/
public function testPReLUActivationFunction($beta, $expected, $value)
{
$prelu = new PReLU($beta);
$this->assertEquals($expected, $prelu->compute($value), '', 0.001);
}
/**
* @return array
*/
public function preluProvider()
{
return [
[0.01, 0.367, 0.367],
[0.0, 1, 1],
[0.3, -0.3, -1],
[0.9, 3, 3],
[0.02, -0.06, -3],
];
}
}