mirror of
https://github.com/Llewellynvdm/php-ml.git
synced 2024-11-21 20:45:10 +00:00
Configure an Activation Function per hidden layer (#208)
* ability to specify per-layer activation function * some tests for new addition to layer * appease style CI whitespace issue * more flexible addition of layers, and developer can pass Layer object in manually * new test for layer object in mlp constructor * documentation for added MLP functionality
This commit is contained in:
parent
8daed2484d
commit
c32bf3fe2b
@ -19,6 +19,24 @@ $mlp = new MLPClassifier(4, [2], ['a', 'b', 'c']);
|
||||
|
||||
```
|
||||
|
||||
An Activation Function may also be passed in with each individual hidden layer. Example:
|
||||
|
||||
```
|
||||
use Phpml\NeuralNetwork\ActivationFunction\PReLU;
|
||||
use Phpml\NeuralNetwork\ActivationFunction\Sigmoid;
|
||||
$mlp = new MLPClassifier(4, [[2, new PReLU], [2, new Sigmoid]], ['a', 'b', 'c']);
|
||||
```
|
||||
|
||||
Instead of configuring each hidden layer as an array, they may also be configured with Layer objects. Example:
|
||||
|
||||
```
|
||||
use Phpml\NeuralNetwork\Layer;
|
||||
use Phpml\NeuralNetwork\Node\Neuron;
|
||||
$layer1 = new Layer(2, Neuron::class, new PReLU);
|
||||
$layer2 = new Layer(2, Neuron::class, new Sigmoid);
|
||||
$mlp = new MLPClassifier(4, [$layer1, $layer2], ['a', 'b', 'c']);
|
||||
```
|
||||
|
||||
## Train
|
||||
|
||||
To train a MLP simply provide train samples and labels (as array). Example:
|
||||
|
@ -142,10 +142,17 @@ abstract class MultilayerPerceptron extends LayeredNetwork implements Estimator,
|
||||
$this->addLayer(new Layer($nodes, Input::class));
|
||||
}
|
||||
|
||||
private function addNeuronLayers(array $layers, ?ActivationFunction $activationFunction = null): void
|
||||
private function addNeuronLayers(array $layers, ?ActivationFunction $defaultActivationFunction = null): void
|
||||
{
|
||||
foreach ($layers as $neurons) {
|
||||
$this->addLayer(new Layer($neurons, Neuron::class, $activationFunction));
|
||||
foreach ($layers as $layer) {
|
||||
if (is_array($layer)) {
|
||||
$function = $layer[1] instanceof ActivationFunction ? $layer[1] : $defaultActivationFunction;
|
||||
$this->addLayer(new Layer($layer[0], Neuron::class, $function));
|
||||
} elseif ($layer instanceof Layer) {
|
||||
$this->addLayer($layer);
|
||||
} else {
|
||||
$this->addLayer(new Layer($layer, Neuron::class, $defaultActivationFunction));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -4,6 +4,7 @@ declare(strict_types=1);
|
||||
|
||||
namespace Phpml\Tests\NeuralNetwork\Network;
|
||||
|
||||
use Phpml\NeuralNetwork\ActivationFunction;
|
||||
use Phpml\NeuralNetwork\Layer;
|
||||
use Phpml\NeuralNetwork\Network\LayeredNetwork;
|
||||
use Phpml\NeuralNetwork\Node\Input;
|
||||
@ -45,6 +46,15 @@ class LayeredNetworkTest extends TestCase
|
||||
$this->assertEquals([0.5], $network->getOutput());
|
||||
}
|
||||
|
||||
public function testSetInputAndGetOutputWithCustomActivationFunctions(): void
|
||||
{
|
||||
$network = $this->getLayeredNetworkMock();
|
||||
$network->addLayer(new Layer(2, Input::class, $this->getActivationFunctionMock()));
|
||||
|
||||
$network->setInput($input = [34, 43]);
|
||||
$this->assertEquals($input, $network->getOutput());
|
||||
}
|
||||
|
||||
/**
|
||||
* @return LayeredNetwork|PHPUnit_Framework_MockObject_MockObject
|
||||
*/
|
||||
@ -52,4 +62,12 @@ class LayeredNetworkTest extends TestCase
|
||||
{
|
||||
return $this->getMockForAbstractClass(LayeredNetwork::class);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return ActivationFunction|PHPUnit_Framework_MockObject_MockObject
|
||||
*/
|
||||
private function getActivationFunctionMock()
|
||||
{
|
||||
return $this->getMockForAbstractClass(ActivationFunction::class);
|
||||
}
|
||||
}
|
||||
|
@ -4,8 +4,12 @@ declare(strict_types=1);
|
||||
|
||||
namespace Phpml\Tests\NeuralNetwork\Network;
|
||||
|
||||
use Phpml\NeuralNetwork\ActivationFunction;
|
||||
use Phpml\NeuralNetwork\Layer;
|
||||
use Phpml\NeuralNetwork\Network\MultilayerPerceptron;
|
||||
use Phpml\NeuralNetwork\Node\Neuron;
|
||||
use PHPUnit\Framework\TestCase;
|
||||
use PHPUnit_Framework_MockObject_MockObject;
|
||||
|
||||
class MultilayerPerceptronTest extends TestCase
|
||||
{
|
||||
@ -26,4 +30,52 @@ class MultilayerPerceptronTest extends TestCase
|
||||
$backprop = $this->readAttribute($mlp, 'backpropagation');
|
||||
$this->assertEquals(0.24, $this->readAttribute($backprop, 'learningRate'));
|
||||
}
|
||||
|
||||
public function testLearningRateSetterWithCustomActivationFunctions(): void
|
||||
{
|
||||
$activation_function = $this->getActivationFunctionMock();
|
||||
|
||||
/** @var MultilayerPerceptron $mlp */
|
||||
$mlp = $this->getMockForAbstractClass(
|
||||
MultilayerPerceptron::class,
|
||||
[5, [[3, $activation_function], [5, $activation_function]], [0, 1], 1000, null, 0.42]
|
||||
);
|
||||
|
||||
$this->assertEquals(0.42, $this->readAttribute($mlp, 'learningRate'));
|
||||
$backprop = $this->readAttribute($mlp, 'backpropagation');
|
||||
$this->assertEquals(0.42, $this->readAttribute($backprop, 'learningRate'));
|
||||
|
||||
$mlp->setLearningRate(0.24);
|
||||
$this->assertEquals(0.24, $this->readAttribute($mlp, 'learningRate'));
|
||||
$backprop = $this->readAttribute($mlp, 'backpropagation');
|
||||
$this->assertEquals(0.24, $this->readAttribute($backprop, 'learningRate'));
|
||||
}
|
||||
|
||||
public function testLearningRateSetterWithLayerObject(): void
|
||||
{
|
||||
$activation_function = $this->getActivationFunctionMock();
|
||||
|
||||
/** @var MultilayerPerceptron $mlp */
|
||||
$mlp = $this->getMockForAbstractClass(
|
||||
MultilayerPerceptron::class,
|
||||
[5, [new Layer(3, Neuron::class, $activation_function), new Layer(5, Neuron::class, $activation_function)], [0, 1], 1000, null, 0.42]
|
||||
);
|
||||
|
||||
$this->assertEquals(0.42, $this->readAttribute($mlp, 'learningRate'));
|
||||
$backprop = $this->readAttribute($mlp, 'backpropagation');
|
||||
$this->assertEquals(0.42, $this->readAttribute($backprop, 'learningRate'));
|
||||
|
||||
$mlp->setLearningRate(0.24);
|
||||
$this->assertEquals(0.24, $this->readAttribute($mlp, 'learningRate'));
|
||||
$backprop = $this->readAttribute($mlp, 'backpropagation');
|
||||
$this->assertEquals(0.24, $this->readAttribute($backprop, 'learningRate'));
|
||||
}
|
||||
|
||||
/**
|
||||
* @return ActivationFunction|PHPUnit_Framework_MockObject_MockObject
|
||||
*/
|
||||
private function getActivationFunctionMock()
|
||||
{
|
||||
return $this->getMockForAbstractClass(ActivationFunction::class);
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user