diff --git a/docs/machine-learning/neural-network/multilayer-perceptron-classifier.md b/docs/machine-learning/neural-network/multilayer-perceptron-classifier.md index 72d0b4b..5acf093 100644 --- a/docs/machine-learning/neural-network/multilayer-perceptron-classifier.md +++ b/docs/machine-learning/neural-network/multilayer-perceptron-classifier.md @@ -66,4 +66,6 @@ $mlp->predict([[1, 1, 1, 1], [0, 0, 0, 0]]); * BinaryStep * Gaussian * HyperbolicTangent +* Parametric Rectified Linear Unit * Sigmoid (default) +* Thresholded Rectified Linear Unit diff --git a/src/Phpml/NeuralNetwork/ActivationFunction.php b/src/Phpml/NeuralNetwork/ActivationFunction.php index 5b91425..30adf4d 100644 --- a/src/Phpml/NeuralNetwork/ActivationFunction.php +++ b/src/Phpml/NeuralNetwork/ActivationFunction.php @@ -10,4 +10,10 @@ interface ActivationFunction * @param float|int $value */ public function compute($value): float; + + /** + * @param float|int $value + * @param float|int $computedvalue + */ + public function differentiate($value, $computedvalue): float; } diff --git a/src/Phpml/NeuralNetwork/ActivationFunction/BinaryStep.php b/src/Phpml/NeuralNetwork/ActivationFunction/BinaryStep.php index 764bc4e..56ea7eb 100644 --- a/src/Phpml/NeuralNetwork/ActivationFunction/BinaryStep.php +++ b/src/Phpml/NeuralNetwork/ActivationFunction/BinaryStep.php @@ -15,4 +15,17 @@ class BinaryStep implements ActivationFunction { return $value >= 0 ? 1.0 : 0.0; } + + /** + * @param float|int $value + * @param float|int $computedvalue + */ + public function differentiate($value, $computedvalue): float + { + if ($value === 0 || $value === 0.0) { + return 1; + } + + return 0; + } } diff --git a/src/Phpml/NeuralNetwork/ActivationFunction/Gaussian.php b/src/Phpml/NeuralNetwork/ActivationFunction/Gaussian.php index da428a4..8871b58 100644 --- a/src/Phpml/NeuralNetwork/ActivationFunction/Gaussian.php +++ b/src/Phpml/NeuralNetwork/ActivationFunction/Gaussian.php @@ -15,4 +15,13 @@ class Gaussian implements ActivationFunction { return exp(-pow($value, 2)); } + + /** + * @param float|int $value + * @param float|int $calculatedvalue + */ + public function differentiate($value, $calculatedvalue): float + { + return -2 * $value * $calculatedvalue; + } } diff --git a/src/Phpml/NeuralNetwork/ActivationFunction/HyperbolicTangent.php b/src/Phpml/NeuralNetwork/ActivationFunction/HyperbolicTangent.php index 6378606..7aa9614 100644 --- a/src/Phpml/NeuralNetwork/ActivationFunction/HyperbolicTangent.php +++ b/src/Phpml/NeuralNetwork/ActivationFunction/HyperbolicTangent.php @@ -25,4 +25,13 @@ class HyperbolicTangent implements ActivationFunction { return tanh($this->beta * $value); } + + /** + * @param float|int $value + * @param float|int $computedvalue + */ + public function differentiate($value, $computedvalue): float + { + return 1 - pow($computedvalue, 2); + } } diff --git a/src/Phpml/NeuralNetwork/ActivationFunction/PReLU.php b/src/Phpml/NeuralNetwork/ActivationFunction/PReLU.php index fc7ff62..88212d1 100644 --- a/src/Phpml/NeuralNetwork/ActivationFunction/PReLU.php +++ b/src/Phpml/NeuralNetwork/ActivationFunction/PReLU.php @@ -25,4 +25,13 @@ class PReLU implements ActivationFunction { return $value >= 0 ? $value : $this->beta * $value; } + + /** + * @param float|int $value + * @param float|int $computedvalue + */ + public function differentiate($value, $computedvalue): float + { + return $computedvalue >= 0 ? 1.0 : $this->beta; + } } diff --git a/src/Phpml/NeuralNetwork/ActivationFunction/Sigmoid.php b/src/Phpml/NeuralNetwork/ActivationFunction/Sigmoid.php index 4ae9603..edad3d6 100644 --- a/src/Phpml/NeuralNetwork/ActivationFunction/Sigmoid.php +++ b/src/Phpml/NeuralNetwork/ActivationFunction/Sigmoid.php @@ -25,4 +25,13 @@ class Sigmoid implements ActivationFunction { return 1 / (1 + exp(-$this->beta * $value)); } + + /** + * @param float|int $value + * @param float|int $computedvalue + */ + public function differentiate($value, $computedvalue): float + { + return $computedvalue * (1 - $computedvalue); + } } diff --git a/src/Phpml/NeuralNetwork/ActivationFunction/ThresholdedReLU.php b/src/Phpml/NeuralNetwork/ActivationFunction/ThresholdedReLU.php index 2bb1cc7..f8f8247 100644 --- a/src/Phpml/NeuralNetwork/ActivationFunction/ThresholdedReLU.php +++ b/src/Phpml/NeuralNetwork/ActivationFunction/ThresholdedReLU.php @@ -13,7 +13,7 @@ class ThresholdedReLU implements ActivationFunction */ private $theta; - public function __construct(float $theta = 1.0) + public function __construct(float $theta = 0.0) { $this->theta = $theta; } @@ -25,4 +25,13 @@ class ThresholdedReLU implements ActivationFunction { return $value > $this->theta ? $value : 0.0; } + + /** + * @param float|int $value + * @param float|int $calculatedvalue + */ + public function differentiate($value, $calculatedvalue): float + { + return $calculatedvalue >= $this->theta ? 1.0 : 0.0; + } } diff --git a/src/Phpml/NeuralNetwork/Network/MultilayerPerceptron.php b/src/Phpml/NeuralNetwork/Network/MultilayerPerceptron.php index 1a997be..bfec929 100644 --- a/src/Phpml/NeuralNetwork/Network/MultilayerPerceptron.php +++ b/src/Phpml/NeuralNetwork/Network/MultilayerPerceptron.php @@ -9,6 +9,7 @@ use Phpml\Exception\InvalidArgumentException; use Phpml\Helper\Predictable; use Phpml\IncrementalEstimator; use Phpml\NeuralNetwork\ActivationFunction; +use Phpml\NeuralNetwork\ActivationFunction\Sigmoid; use Phpml\NeuralNetwork\Layer; use Phpml\NeuralNetwork\Node\Bias; use Phpml\NeuralNetwork\Node\Input; @@ -125,7 +126,10 @@ abstract class MultilayerPerceptron extends LayeredNetwork implements Estimator, { $this->addInputLayer($this->inputLayerFeatures); $this->addNeuronLayers($this->hiddenLayers, $this->activationFunction); - $this->addNeuronLayers([count($this->classes)], $this->activationFunction); + + // Sigmoid function for the output layer as we want a value from 0 to 1. + $sigmoid = new Sigmoid(); + $this->addNeuronLayers([count($this->classes)], $sigmoid); $this->addBiasNodes(); $this->generateSynapses(); diff --git a/src/Phpml/NeuralNetwork/Node/Neuron.php b/src/Phpml/NeuralNetwork/Node/Neuron.php index 2dff600..47d606d 100644 --- a/src/Phpml/NeuralNetwork/Node/Neuron.php +++ b/src/Phpml/NeuralNetwork/Node/Neuron.php @@ -26,6 +26,11 @@ class Neuron implements Node */ protected $output = 0.0; + /** + * @var float + */ + protected $z = 0.0; + public function __construct(?ActivationFunction $activationFunction = null) { $this->activationFunction = $activationFunction ?: new Sigmoid(); @@ -47,19 +52,25 @@ class Neuron implements Node public function getOutput(): float { if ($this->output === 0.0) { - $sum = 0.0; + $this->z = 0; foreach ($this->synapses as $synapse) { - $sum += $synapse->getOutput(); + $this->z += $synapse->getOutput(); } - $this->output = $this->activationFunction->compute($sum); + $this->output = $this->activationFunction->compute($this->z); } return $this->output; } + public function getDerivative(): float + { + return $this->activationFunction->differentiate($this->z, $this->output); + } + public function reset(): void { $this->output = 0.0; + $this->z = 0.0; } } diff --git a/src/Phpml/NeuralNetwork/Training/Backpropagation.php b/src/Phpml/NeuralNetwork/Training/Backpropagation.php index fd09d95..6c9af98 100644 --- a/src/Phpml/NeuralNetwork/Training/Backpropagation.php +++ b/src/Phpml/NeuralNetwork/Training/Backpropagation.php @@ -64,7 +64,7 @@ class Backpropagation private function getSigma(Neuron $neuron, int $targetClass, int $key, bool $lastLayer): float { $neuronOutput = $neuron->getOutput(); - $sigma = $neuronOutput * (1 - $neuronOutput); + $sigma = $neuron->getDerivative(); if ($lastLayer) { $value = 0; diff --git a/tests/Phpml/Classification/MLPClassifierTest.php b/tests/Phpml/Classification/MLPClassifierTest.php index ef40618..c46e297 100644 --- a/tests/Phpml/Classification/MLPClassifierTest.php +++ b/tests/Phpml/Classification/MLPClassifierTest.php @@ -7,6 +7,11 @@ namespace Phpml\Tests\Classification; use Phpml\Classification\MLPClassifier; use Phpml\Exception\InvalidArgumentException; use Phpml\ModelManager; +use Phpml\NeuralNetwork\ActivationFunction; +use Phpml\NeuralNetwork\ActivationFunction\HyperbolicTangent; +use Phpml\NeuralNetwork\ActivationFunction\PReLU; +use Phpml\NeuralNetwork\ActivationFunction\Sigmoid; +use Phpml\NeuralNetwork\ActivationFunction\ThresholdedReLU; use Phpml\NeuralNetwork\Node\Neuron; use PHPUnit\Framework\TestCase; @@ -141,6 +146,33 @@ class MLPClassifierTest extends TestCase $this->assertEquals(4, $network->predict([0, 0, 0, 0, 0])); } + /** + * @dataProvider activationFunctionsProvider + */ + public function testBackpropagationActivationFunctions(ActivationFunction $activationFunction): void + { + $network = new MLPClassifier(5, [3], ['a', 'b'], 10000, $activationFunction); + $network->train( + [[1, 0, 0, 0, 0], [0, 1, 0, 0, 0], [0, 0, 1, 1, 0], [1, 1, 1, 1, 1]], + ['a', 'b', 'a', 'a'] + ); + + $this->assertEquals('a', $network->predict([1, 0, 0, 0, 0])); + $this->assertEquals('b', $network->predict([0, 1, 0, 0, 0])); + $this->assertEquals('a', $network->predict([0, 0, 1, 1, 0])); + $this->assertEquals('a', $network->predict([1, 1, 1, 1, 1])); + } + + public function activationFunctionsProvider(): array + { + return [ + [new Sigmoid()], + [new HyperbolicTangent()], + [new PReLU()], + [new ThresholdedReLU()], + ]; + } + public function testSaveAndRestore(): void { // Instantinate new Percetron trained for OR problem diff --git a/tests/Phpml/NeuralNetwork/ActivationFunction/BinaryStepTest.php b/tests/Phpml/NeuralNetwork/ActivationFunction/BinaryStepTest.php index acc7977..4e85478 100644 --- a/tests/Phpml/NeuralNetwork/ActivationFunction/BinaryStepTest.php +++ b/tests/Phpml/NeuralNetwork/ActivationFunction/BinaryStepTest.php @@ -27,4 +27,23 @@ class BinaryStepTest extends TestCase [0, -0.1], ]; } + + /** + * @dataProvider binaryStepDerivativeProvider + */ + public function testBinaryStepDerivative($expected, $value): void + { + $binaryStep = new BinaryStep(); + $activatedValue = $binaryStep->compute($value); + $this->assertEquals($expected, $binaryStep->differentiate($value, $activatedValue)); + } + + public function binaryStepDerivativeProvider(): array + { + return [ + [0, -1], + [1, 0], + [0, 1], + ]; + } } diff --git a/tests/Phpml/NeuralNetwork/ActivationFunction/GaussianTest.php b/tests/Phpml/NeuralNetwork/ActivationFunction/GaussianTest.php index 2b08793..aace8bc 100644 --- a/tests/Phpml/NeuralNetwork/ActivationFunction/GaussianTest.php +++ b/tests/Phpml/NeuralNetwork/ActivationFunction/GaussianTest.php @@ -29,4 +29,27 @@ class GaussianTest extends TestCase [0, -3], ]; } + + /** + * @dataProvider gaussianDerivativeProvider + */ + public function testGaussianDerivative($expected, $value): void + { + $gaussian = new Gaussian(); + $activatedValue = $gaussian->compute($value); + $this->assertEquals($expected, $gaussian->differentiate($value, $activatedValue), '', 0.001); + } + + public function gaussianDerivativeProvider(): array + { + return [ + [0, -5], + [0.735, -1], + [0.779, -0.5], + [0, 0], + [-0.779, 0.5], + [-0.735, 1], + [0, 5], + ]; + } } diff --git a/tests/Phpml/NeuralNetwork/ActivationFunction/HyperboliTangentTest.php b/tests/Phpml/NeuralNetwork/ActivationFunction/HyperboliTangentTest.php index 91e7eba..629200e 100644 --- a/tests/Phpml/NeuralNetwork/ActivationFunction/HyperboliTangentTest.php +++ b/tests/Phpml/NeuralNetwork/ActivationFunction/HyperboliTangentTest.php @@ -30,4 +30,28 @@ class HyperboliTangentTest extends TestCase [0.3, 0, 0], ]; } + + /** + * @dataProvider tanhDerivativeProvider + */ + public function testHyperbolicTangentDerivative($beta, $expected, $value): void + { + $tanh = new HyperbolicTangent($beta); + $activatedValue = $tanh->compute($value); + $this->assertEquals($expected, $tanh->differentiate($value, $activatedValue), '', 0.001); + } + + public function tanhDerivativeProvider(): array + { + return [ + [1.0, 0, -6], + [1.0, 0.419, -1], + [1.0, 1, 0], + [1.0, 0.419, 1], + [1.0, 0, 6], + [0.5, 0.786, 1], + [0.5, 0.786, -1], + [0.3, 1, 0], + ]; + } } diff --git a/tests/Phpml/NeuralNetwork/ActivationFunction/PReLUTest.php b/tests/Phpml/NeuralNetwork/ActivationFunction/PReLUTest.php index f407060..c9f565d 100644 --- a/tests/Phpml/NeuralNetwork/ActivationFunction/PReLUTest.php +++ b/tests/Phpml/NeuralNetwork/ActivationFunction/PReLUTest.php @@ -29,4 +29,27 @@ class PReLUTest extends TestCase [0.02, -0.06, -3], ]; } + + /** + * @dataProvider preluDerivativeProvider + */ + public function testPReLUDerivative($beta, $expected, $value): void + { + $prelu = new PReLU($beta); + $activatedValue = $prelu->compute($value); + $this->assertEquals($expected, $prelu->differentiate($value, $activatedValue)); + } + + public function preluDerivativeProvider(): array + { + return [ + [0.5, 0.5, -3], + [0.5, 1, 0], + [0.5, 1, 1], + [0.01, 1, 1], + [1, 1, 1], + [0.3, 1, 0.1], + [0.1, 0.1, -0.1], + ]; + } } diff --git a/tests/Phpml/NeuralNetwork/ActivationFunction/SigmoidTest.php b/tests/Phpml/NeuralNetwork/ActivationFunction/SigmoidTest.php index 30b50f8..1028fb3 100644 --- a/tests/Phpml/NeuralNetwork/ActivationFunction/SigmoidTest.php +++ b/tests/Phpml/NeuralNetwork/ActivationFunction/SigmoidTest.php @@ -30,4 +30,28 @@ class SigmoidTest extends TestCase [2.0, 0, -3.75], ]; } + + /** + * @dataProvider sigmoidDerivativeProvider + */ + public function testSigmoidDerivative($beta, $expected, $value): void + { + $sigmoid = new Sigmoid($beta); + $activatedValue = $sigmoid->compute($value); + $this->assertEquals($expected, $sigmoid->differentiate($value, $activatedValue), '', 0.001); + } + + public function sigmoidDerivativeProvider(): array + { + return [ + [1.0, 0, -10], + [1, 0.006, -5], + [1.0, 0.25, 0], + [1, 0.006, 5], + [1.0, 0, 10], + [2.0, 0.25, 0], + [0.5, 0.246, 0.5], + [0.5, 0.241, 0.75], + ]; + } } diff --git a/tests/Phpml/NeuralNetwork/ActivationFunction/ThresholdedReLUTest.php b/tests/Phpml/NeuralNetwork/ActivationFunction/ThresholdedReLUTest.php index f46ff02..4db0418 100644 --- a/tests/Phpml/NeuralNetwork/ActivationFunction/ThresholdedReLUTest.php +++ b/tests/Phpml/NeuralNetwork/ActivationFunction/ThresholdedReLUTest.php @@ -28,4 +28,26 @@ class ThresholdedReLUTest extends TestCase [0.9, 0, 0.1], ]; } + + /** + * @dataProvider thresholdDerivativeProvider + */ + public function testThresholdedReLUDerivative($theta, $expected, $value): void + { + $thresholdedReLU = new ThresholdedReLU($theta); + $activatedValue = $thresholdedReLU->compute($value); + $this->assertEquals($expected, $thresholdedReLU->differentiate($value, $activatedValue)); + } + + public function thresholdDerivativeProvider(): array + { + return [ + [0, 1, 1], + [0, 1, 0], + [0.5, 1, 1], + [0.5, 1, 1], + [0.5, 0, 0], + [2, 0, -1], + ]; + } }