Remove phpunit readAttributes deprecated methods (#372)

This commit is contained in:
Arkadiusz Kondas 2019-04-12 07:49:30 +02:00 committed by GitHub
parent db82afa263
commit f6aa1a59b0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 35 additions and 22 deletions

View File

@ -18,7 +18,8 @@ services:
PhpCsFixer\Fixer\Operator\BinaryOperatorSpacesFixer:
align_double_arrow: false
align_equals: false
PhpCsFixer\Fixer\PhpUnit\PhpUnitTestCaseStaticMethodCallsFixer:
call_type: 'self'
# phpdoc
PhpCsFixer\Fixer\Phpdoc\PhpdocSeparationFixer: ~
PhpCsFixer\Fixer\Phpdoc\PhpdocAlignFixer: ~

View File

@ -48,6 +48,11 @@ abstract class Optimizer
return $this;
}
public function theta(): array
{
return $this->theta;
}
/**
* Executes the optimization with the given samples & targets
* and returns the weights

View File

@ -129,6 +129,16 @@ abstract class MultilayerPerceptron extends LayeredNetwork implements Estimator,
return $result;
}
public function getLearningRate(): float
{
return $this->learningRate;
}
public function getBackpropagation(): Backpropagation
{
return $this->backpropagation;
}
/**
* @param mixed $target
*/

View File

@ -34,6 +34,11 @@ class Backpropagation
$this->learningRate = $learningRate;
}
public function getLearningRate(): float
{
return $this->learningRate;
}
/**
* @param mixed $targetClass
*/

View File

@ -26,9 +26,7 @@ class OptimizerTest extends TestCase
$optimizer = $this->getMockForAbstractClass(Optimizer::class, [2]);
$object = $optimizer->setTheta([0.3, 1]);
$theta = self::getObjectAttribute($optimizer, 'theta');
self::assertSame($object, $optimizer);
self::assertSame([0.3, 1], $theta);
self::assertSame([0.3, 1], $object->theta());
}
}

View File

@ -55,14 +55,12 @@ class MultilayerPerceptronTest extends TestCase
[5, [3], [0, 1], 1000, null, 0.42]
);
self::assertEquals(0.42, self::readAttribute($mlp, 'learningRate'));
$backprop = self::readAttribute($mlp, 'backpropagation');
self::assertEquals(0.42, self::readAttribute($backprop, 'learningRate'));
self::assertEquals(0.42, $mlp->getLearningRate());
self::assertEquals(0.42, $mlp->getBackpropagation()->getLearningRate());
$mlp->setLearningRate(0.24);
self::assertEquals(0.24, self::readAttribute($mlp, 'learningRate'));
$backprop = self::readAttribute($mlp, 'backpropagation');
self::assertEquals(0.24, self::readAttribute($backprop, 'learningRate'));
self::assertEquals(0.24, $mlp->getLearningRate());
self::assertEquals(0.24, $mlp->getBackpropagation()->getLearningRate());
}
public function testLearningRateSetterWithCustomActivationFunctions(): void
@ -75,14 +73,12 @@ class MultilayerPerceptronTest extends TestCase
[5, [[3, $activation_function], [5, $activation_function]], [0, 1], 1000, null, 0.42]
);
self::assertEquals(0.42, self::readAttribute($mlp, 'learningRate'));
$backprop = self::readAttribute($mlp, 'backpropagation');
self::assertEquals(0.42, self::readAttribute($backprop, 'learningRate'));
self::assertEquals(0.42, $mlp->getLearningRate());
self::assertEquals(0.42, $mlp->getBackpropagation()->getLearningRate());
$mlp->setLearningRate(0.24);
self::assertEquals(0.24, self::readAttribute($mlp, 'learningRate'));
$backprop = self::readAttribute($mlp, 'backpropagation');
self::assertEquals(0.24, self::readAttribute($backprop, 'learningRate'));
self::assertEquals(0.24, $mlp->getLearningRate());
self::assertEquals(0.24, $mlp->getBackpropagation()->getLearningRate());
}
public function testLearningRateSetterWithLayerObject(): void
@ -95,14 +91,12 @@ class MultilayerPerceptronTest extends TestCase
[5, [new Layer(3, Neuron::class, $activation_function), new Layer(5, Neuron::class, $activation_function)], [0, 1], 1000, null, 0.42]
);
self::assertEquals(0.42, self::readAttribute($mlp, 'learningRate'));
$backprop = self::readAttribute($mlp, 'backpropagation');
self::assertEquals(0.42, self::readAttribute($backprop, 'learningRate'));
self::assertEquals(0.42, $mlp->getLearningRate());
self::assertEquals(0.42, $mlp->getBackpropagation()->getLearningRate());
$mlp->setLearningRate(0.24);
self::assertEquals(0.24, self::readAttribute($mlp, 'learningRate'));
$backprop = self::readAttribute($mlp, 'backpropagation');
self::assertEquals(0.24, self::readAttribute($backprop, 'learningRate'));
self::assertEquals(0.24, $mlp->getLearningRate());
self::assertEquals(0.24, $mlp->getBackpropagation()->getLearningRate());
}
/**