diff --git a/ecs.yml b/ecs.yml index b19571e..21b30e9 100644 --- a/ecs.yml +++ b/ecs.yml @@ -18,7 +18,8 @@ services: PhpCsFixer\Fixer\Operator\BinaryOperatorSpacesFixer: align_double_arrow: false align_equals: false - + PhpCsFixer\Fixer\PhpUnit\PhpUnitTestCaseStaticMethodCallsFixer: + call_type: 'self' # phpdoc PhpCsFixer\Fixer\Phpdoc\PhpdocSeparationFixer: ~ PhpCsFixer\Fixer\Phpdoc\PhpdocAlignFixer: ~ diff --git a/src/Helper/Optimizer/Optimizer.php b/src/Helper/Optimizer/Optimizer.php index 99a82ab..54331e9 100644 --- a/src/Helper/Optimizer/Optimizer.php +++ b/src/Helper/Optimizer/Optimizer.php @@ -48,6 +48,11 @@ abstract class Optimizer return $this; } + public function theta(): array + { + return $this->theta; + } + /** * Executes the optimization with the given samples & targets * and returns the weights diff --git a/src/NeuralNetwork/Network/MultilayerPerceptron.php b/src/NeuralNetwork/Network/MultilayerPerceptron.php index e9e6b51..beefb1e 100644 --- a/src/NeuralNetwork/Network/MultilayerPerceptron.php +++ b/src/NeuralNetwork/Network/MultilayerPerceptron.php @@ -129,6 +129,16 @@ abstract class MultilayerPerceptron extends LayeredNetwork implements Estimator, return $result; } + public function getLearningRate(): float + { + return $this->learningRate; + } + + public function getBackpropagation(): Backpropagation + { + return $this->backpropagation; + } + /** * @param mixed $target */ diff --git a/src/NeuralNetwork/Training/Backpropagation.php b/src/NeuralNetwork/Training/Backpropagation.php index 6c9af98..69a3e2a 100644 --- a/src/NeuralNetwork/Training/Backpropagation.php +++ b/src/NeuralNetwork/Training/Backpropagation.php @@ -34,6 +34,11 @@ class Backpropagation $this->learningRate = $learningRate; } + public function getLearningRate(): float + { + return $this->learningRate; + } + /** * @param mixed $targetClass */ diff --git a/tests/Helper/Optimizer/OptimizerTest.php b/tests/Helper/Optimizer/OptimizerTest.php index 97af2d2..184f6c7 100644 --- a/tests/Helper/Optimizer/OptimizerTest.php +++ b/tests/Helper/Optimizer/OptimizerTest.php @@ -26,9 +26,7 @@ class OptimizerTest extends TestCase $optimizer = $this->getMockForAbstractClass(Optimizer::class, [2]); $object = $optimizer->setTheta([0.3, 1]); - $theta = self::getObjectAttribute($optimizer, 'theta'); - self::assertSame($object, $optimizer); - self::assertSame([0.3, 1], $theta); + self::assertSame([0.3, 1], $object->theta()); } } diff --git a/tests/NeuralNetwork/Network/MultilayerPerceptronTest.php b/tests/NeuralNetwork/Network/MultilayerPerceptronTest.php index 1495136..6123f9b 100644 --- a/tests/NeuralNetwork/Network/MultilayerPerceptronTest.php +++ b/tests/NeuralNetwork/Network/MultilayerPerceptronTest.php @@ -55,14 +55,12 @@ class MultilayerPerceptronTest extends TestCase [5, [3], [0, 1], 1000, null, 0.42] ); - self::assertEquals(0.42, self::readAttribute($mlp, 'learningRate')); - $backprop = self::readAttribute($mlp, 'backpropagation'); - self::assertEquals(0.42, self::readAttribute($backprop, 'learningRate')); + self::assertEquals(0.42, $mlp->getLearningRate()); + self::assertEquals(0.42, $mlp->getBackpropagation()->getLearningRate()); $mlp->setLearningRate(0.24); - self::assertEquals(0.24, self::readAttribute($mlp, 'learningRate')); - $backprop = self::readAttribute($mlp, 'backpropagation'); - self::assertEquals(0.24, self::readAttribute($backprop, 'learningRate')); + self::assertEquals(0.24, $mlp->getLearningRate()); + self::assertEquals(0.24, $mlp->getBackpropagation()->getLearningRate()); } public function testLearningRateSetterWithCustomActivationFunctions(): void @@ -75,14 +73,12 @@ class MultilayerPerceptronTest extends TestCase [5, [[3, $activation_function], [5, $activation_function]], [0, 1], 1000, null, 0.42] ); - self::assertEquals(0.42, self::readAttribute($mlp, 'learningRate')); - $backprop = self::readAttribute($mlp, 'backpropagation'); - self::assertEquals(0.42, self::readAttribute($backprop, 'learningRate')); + self::assertEquals(0.42, $mlp->getLearningRate()); + self::assertEquals(0.42, $mlp->getBackpropagation()->getLearningRate()); $mlp->setLearningRate(0.24); - self::assertEquals(0.24, self::readAttribute($mlp, 'learningRate')); - $backprop = self::readAttribute($mlp, 'backpropagation'); - self::assertEquals(0.24, self::readAttribute($backprop, 'learningRate')); + self::assertEquals(0.24, $mlp->getLearningRate()); + self::assertEquals(0.24, $mlp->getBackpropagation()->getLearningRate()); } public function testLearningRateSetterWithLayerObject(): void @@ -95,14 +91,12 @@ class MultilayerPerceptronTest extends TestCase [5, [new Layer(3, Neuron::class, $activation_function), new Layer(5, Neuron::class, $activation_function)], [0, 1], 1000, null, 0.42] ); - self::assertEquals(0.42, self::readAttribute($mlp, 'learningRate')); - $backprop = self::readAttribute($mlp, 'backpropagation'); - self::assertEquals(0.42, self::readAttribute($backprop, 'learningRate')); + self::assertEquals(0.42, $mlp->getLearningRate()); + self::assertEquals(0.42, $mlp->getBackpropagation()->getLearningRate()); $mlp->setLearningRate(0.24); - self::assertEquals(0.24, self::readAttribute($mlp, 'learningRate')); - $backprop = self::readAttribute($mlp, 'backpropagation'); - self::assertEquals(0.24, self::readAttribute($backprop, 'learningRate')); + self::assertEquals(0.24, $mlp->getLearningRate()); + self::assertEquals(0.24, $mlp->getBackpropagation()->getLearningRate()); } /**