Add ThresholdedReLU activation function (#129)

This commit is contained in:
Marcin Michalski 2017-09-02 21:30:35 +02:00 committed by Arkadiusz Kondas
parent cacfd64a6f
commit 0e59cfb174
2 changed files with 71 additions and 0 deletions

View File

@ -0,0 +1,33 @@
<?php
declare(strict_types=1);
namespace Phpml\NeuralNetwork\ActivationFunction;
use Phpml\NeuralNetwork\ActivationFunction;
class ThresholdedReLU implements ActivationFunction
{
/**
* @var float
*/
private $theta;
/**
* @param float $theta
*/
public function __construct($theta = 1.0)
{
$this->theta = $theta;
}
/**
* @param float|int $value
*
* @return float
*/
public function compute($value): float
{
return $value > $this->theta ? $value : 0.0;
}
}

View File

@ -0,0 +1,38 @@
<?php
declare(strict_types=1);
namespace tests\Phpml\NeuralNetwork\ActivationFunction;
use Phpml\NeuralNetwork\ActivationFunction\ThresholdedReLU;
use PHPUnit\Framework\TestCase;
class ThresholdedReLUTest extends TestCase
{
/**
* @param $theta
* @param $expected
* @param $value
*
* @dataProvider thresholdProvider
*/
public function testThresholdedReLUActivationFunction($theta, $expected, $value)
{
$thresholdedReLU = new ThresholdedReLU($theta);
$this->assertEquals($expected, $thresholdedReLU->compute($value));
}
/**
* @return array
*/
public function thresholdProvider()
{
return [
[1.0, 0, 1.0],
[0.5, 3.75, 3.75],
[0.0, 0.5, 0.5],
[0.9, 0, 0.1]
];
}
}