Update phpstan & easy coding standard (#366)

This commit is contained in:
Marcin Michalski 2019-03-25 14:55:14 +01:00 committed by Arkadiusz Kondas
parent 5e02b893e9
commit d3888efa7a
16 changed files with 652 additions and 495 deletions

View File

@ -24,9 +24,9 @@
}, },
"require-dev": { "require-dev": {
"phpbench/phpbench": "^0.14.0", "phpbench/phpbench": "^0.14.0",
"phpstan/phpstan-phpunit": "^0.10", "phpstan/phpstan-phpunit": "^0.11",
"phpstan/phpstan-shim": "^0.10", "phpstan/phpstan-shim": "^0.11",
"phpstan/phpstan-strict-rules": "^0.10", "phpstan/phpstan-strict-rules": "^0.11",
"phpunit/phpunit": "^7.0.0", "phpunit/phpunit": "^7.0.0",
"symplify/coding-standard": "^5.1", "symplify/coding-standard": "^5.1",
"symplify/easy-coding-standard": "^5.1" "symplify/easy-coding-standard": "^5.1"

1048
composer.lock generated

File diff suppressed because it is too large Load Diff

16
ecs.yml
View File

@ -30,14 +30,14 @@ services:
Symplify\CodingStandard\Fixer\ArrayNotation\StandaloneLineInMultilineArrayFixer: ~ Symplify\CodingStandard\Fixer\ArrayNotation\StandaloneLineInMultilineArrayFixer: ~
parameters: parameters:
exclude_checkers:
# from strict.neon
- 'PhpCsFixer\Fixer\PhpUnit\PhpUnitStrictFixer'
- 'PhpCsFixer\Fixer\Strict\StrictComparisonFixer'
# personal prefference
- 'PhpCsFixer\Fixer\Operator\NotOperatorWithSuccessorSpaceFixer'
skip: skip:
# from strict.neon
PhpCsFixer\Fixer\PhpUnit\PhpUnitStrictFixer: ~
PhpCsFixer\Fixer\Strict\StrictComparisonFixer: ~
# personal prefference
PhpCsFixer\Fixer\Operator\NotOperatorWithSuccessorSpaceFixer: ~
PhpCsFixer\Fixer\Alias\RandomApiMigrationFixer: PhpCsFixer\Fixer\Alias\RandomApiMigrationFixer:
# random_int() breaks code # random_int() breaks code
- 'src/CrossValidation/RandomSplit.php' - 'src/CrossValidation/RandomSplit.php'
@ -65,4 +65,4 @@ parameters:
SlevomatCodingStandard\Sniffs\TypeHints\TypeHintDeclarationSniff.MissingTraversablePropertyTypeHintSpecification: ~ SlevomatCodingStandard\Sniffs\TypeHints\TypeHintDeclarationSniff.MissingTraversablePropertyTypeHintSpecification: ~
# assignment in "while ($var = ...)" are ok # assignment in "while ($var = ...)" are ok
PHP_CodeSniffer\Standards\Generic\Sniffs\CodeAnalysis\AssignmentInConditionSniff.FoundInWhileCondition: PHP_CodeSniffer\Standards\Generic\Sniffs\CodeAnalysis\AssignmentInConditionSniff.FoundInWhileCondition:

View File

@ -7,9 +7,8 @@ parameters:
ignoreErrors: ignoreErrors:
- '#Property Phpml\\Clustering\\KMeans\\Cluster\:\:\$points \(iterable\<Phpml\\Clustering\\KMeans\\Point\>\&SplObjectStorage\) does not accept SplObjectStorage#' - '#Property Phpml\\Clustering\\KMeans\\Cluster\:\:\$points \(iterable\<Phpml\\Clustering\\KMeans\\Point\>\&SplObjectStorage\) does not accept SplObjectStorage#'
- '#Phpml\\Dataset\\(.*)Dataset::__construct\(\) does not call parent constructor from Phpml\\Dataset\\ArrayDataset#' - '#Phpml\\Dataset\\(.*)Dataset::__construct\(\) does not call parent constructor from Phpml\\Dataset\\ArrayDataset#'
- '#Variable property access on .+#'
# wide range cases - '#Variable method call on .+#'
- '#Parameter \#1 \$coordinates of class Phpml\\Clustering\\KMeans\\Point constructor expects array, array<int>\|Phpml\\Clustering\\KMeans\\Point given#'
# probably known value # probably known value
- '#Method Phpml\\Classification\\DecisionTree::getBestSplit\(\) should return Phpml\\Classification\\DecisionTree\\DecisionTreeLeaf but returns Phpml\\Classification\\DecisionTree\\DecisionTreeLeaf\|null#' - '#Method Phpml\\Classification\\DecisionTree::getBestSplit\(\) should return Phpml\\Classification\\DecisionTree\\DecisionTreeLeaf but returns Phpml\\Classification\\DecisionTree\\DecisionTreeLeaf\|null#'

View File

@ -41,7 +41,7 @@ class RandomForest extends Bagging
* Default value for the ratio is 'log' which results in log(numFeatures, 2) + 1 * Default value for the ratio is 'log' which results in log(numFeatures, 2) + 1
* features to be taken into consideration while selecting subspace of features * features to be taken into consideration while selecting subspace of features
* *
* @param string|float $ratio * @param mixed $ratio
*/ */
public function setFeatureSubsetRatio($ratio): self public function setFeatureSubsetRatio($ratio): self
{ {
@ -73,7 +73,9 @@ class RandomForest extends Bagging
throw new InvalidArgumentException('RandomForest can only use DecisionTree as base classifier'); throw new InvalidArgumentException('RandomForest can only use DecisionTree as base classifier');
} }
return parent::setClassifer($classifier, $classifierOptions); parent::setClassifer($classifier, $classifierOptions);
return $this;
} }
/** /**
@ -122,12 +124,16 @@ class RandomForest extends Bagging
} }
/** /**
* @param DecisionTree $classifier
*
* @return DecisionTree * @return DecisionTree
*/ */
protected function initSingleClassifier(Classifier $classifier): Classifier protected function initSingleClassifier(Classifier $classifier): Classifier
{ {
if (!$classifier instanceof DecisionTree) {
throw new InvalidArgumentException(
sprintf('Classifier %s expected, got %s', DecisionTree::class, get_class($classifier))
);
}
if (is_float($this->featureSubsetRatio)) { if (is_float($this->featureSubsetRatio)) {
$featureCount = (int) ($this->featureSubsetRatio * $this->featureCount); $featureCount = (int) ($this->featureSubsetRatio * $this->featureCount);
} elseif ($this->featureSubsetRatio === 'sqrt') { } elseif ($this->featureSubsetRatio === 'sqrt') {

View File

@ -226,7 +226,7 @@ class LogisticRegression extends Adaline
$y = $y < 0 ? 0 : 1; $y = $y < 0 ? 0 : 1;
$error = ($y - $hX) ** 2; $error = (($y - $hX) ** 2);
$gradient = -($y - $hX) * $hX * (1 - $hX); $gradient = -($y - $hX) * $hX * (1 - $hX);
return [$error, $gradient, $penalty]; return [$error, $gradient, $penalty];

View File

@ -77,9 +77,6 @@ class FuzzyCMeans implements Clusterer
return $this->membership; return $this->membership;
} }
/**
* @param Point[]|int[][] $samples
*/
public function cluster(array $samples): array public function cluster(array $samples): array
{ {
// Initialize variables, clusters and membership matrix // Initialize variables, clusters and membership matrix
@ -210,7 +207,7 @@ class FuzzyCMeans implements Clusterer
$this->samples[$col] $this->samples[$col]
); );
$val = ($dist1 / $dist2) ** 2.0 / ($this->fuzziness - 1); $val = (($dist1 / $dist2) ** 2.0) / ($this->fuzziness - 1);
$sum += $val; $sum += $val;
} }

View File

@ -24,7 +24,7 @@ class SvmDataset extends ArrayDataset
$targets = []; $targets = [];
$maxIndex = 0; $maxIndex = 0;
while (false !== $line = fgets($handle)) { while (false !== $line = fgets($handle)) {
[$sample, $target, $maxIndex] = self::processLine((string) $line, $maxIndex); [$sample, $target, $maxIndex] = self::processLine($line, $maxIndex);
$samples[] = $sample; $samples[] = $sample;
$targets[] = $target; $targets[] = $target;
} }

View File

@ -46,7 +46,7 @@ final class UnivariateLinearRegression implements ScoringFunction
foreach (array_keys($samples[0]) as $index) { foreach (array_keys($samples[0]) as $index) {
$featureColumn = array_column($samples, $index); $featureColumn = array_column($samples, $index);
$correlations[$index] = $correlations[$index] =
(Matrix::dot($targets, $featureColumn)[0] / (new Matrix($featureColumn, false))->transpose()->frobeniusNorm()) Matrix::dot($targets, $featureColumn)[0] / (new Matrix($featureColumn, false))->transpose()->frobeniusNorm()
/ (new Matrix($targets, false))->frobeniusNorm(); / (new Matrix($targets, false))->frobeniusNorm();
} }

View File

@ -4,6 +4,7 @@ declare(strict_types=1);
namespace Phpml\Math\Kernel; namespace Phpml\Math\Kernel;
use Phpml\Exception\InvalidArgumentException;
use Phpml\Math\Kernel; use Phpml\Math\Kernel;
use Phpml\Math\Product; use Phpml\Math\Product;
@ -19,12 +20,12 @@ class RBF implements Kernel
$this->gamma = $gamma; $this->gamma = $gamma;
} }
/**
* @param array $a
* @param array $b
*/
public function compute($a, $b): float public function compute($a, $b): float
{ {
if (!is_array($a) || !is_array($b)) {
throw new InvalidArgumentException(sprintf('Arguments of %s must be arrays', __METHOD__));
}
$score = 2 * Product::scalar($a, $b); $score = 2 * Product::scalar($a, $b);
$squares = Product::scalar($a, $a) + Product::scalar($b, $b); $squares = Product::scalar($a, $a) + Product::scalar($b, $b);

View File

@ -502,7 +502,8 @@ class EigenvalueDecomposition
} }
// Double division avoids possible underflow // Double division avoids possible underflow
$g = ($g / $this->ort[$m]) / $this->H[$m][$m - 1]; $g /= $this->ort[$m];
$g /= $this->H[$m][$m - 1];
for ($i = $m; $i <= $high; ++$i) { for ($i = $m; $i <= $high; ++$i) {
$this->V[$i][$j] += $g * $this->ort[$i]; $this->V[$i][$j] += $g * $this->ort[$i];
} }
@ -734,7 +735,7 @@ class EigenvalueDecomposition
// Double QR step involving rows l:n and columns m:n // Double QR step involving rows l:n and columns m:n
for ($k = $m; $k <= $n - 1; ++$k) { for ($k = $m; $k <= $n - 1; ++$k) {
$notlast = ($k != $n - 1); $notlast = $k != $n - 1;
if ($k != $m) { if ($k != $m) {
$p = $this->H[$k][$k - 1]; $p = $this->H[$k][$k - 1];
$q = $this->H[$k + 1][$k - 1]; $q = $this->H[$k + 1][$k - 1];

View File

@ -59,8 +59,14 @@ abstract class MultilayerPerceptron extends LayeredNetwork implements Estimator,
/** /**
* @throws InvalidArgumentException * @throws InvalidArgumentException
*/ */
public function __construct(int $inputLayerFeatures, array $hiddenLayers, array $classes, int $iterations = 10000, ?ActivationFunction $activationFunction = null, float $learningRate = 1) public function __construct(
{ int $inputLayerFeatures,
array $hiddenLayers,
array $classes,
int $iterations = 10000,
?ActivationFunction $activationFunction = null,
float $learningRate = 1.
) {
if (count($hiddenLayers) === 0) { if (count($hiddenLayers) === 0) {
throw new InvalidArgumentException('Provide at least 1 hidden layer'); throw new InvalidArgumentException('Provide at least 1 hidden layer');
} }

View File

@ -23,7 +23,7 @@ class ConjugateGradientTest extends TestCase
$callback = function ($theta, $sample, $target) { $callback = function ($theta, $sample, $target) {
$y = $theta[0] + $theta[1] * $sample[0]; $y = $theta[0] + $theta[1] * $sample[0];
$cost = ($y - $target) ** 2 / 2; $cost = (($y - $target) ** 2) / 2;
$grad = $y - $target; $grad = $y - $target;
return [$cost, $grad]; return [$cost, $grad];
@ -49,7 +49,7 @@ class ConjugateGradientTest extends TestCase
$callback = function ($theta, $sample, $target) { $callback = function ($theta, $sample, $target) {
$y = $theta[0] + $theta[1] * $sample[0]; $y = $theta[0] + $theta[1] * $sample[0];
$cost = ($y - $target) ** 2 / 2; $cost = (($y - $target) ** 2) / 2;
$grad = $y - $target; $grad = $y - $target;
return [$cost, $grad]; return [$cost, $grad];
@ -78,7 +78,7 @@ class ConjugateGradientTest extends TestCase
$callback = function ($theta, $sample, $target) { $callback = function ($theta, $sample, $target) {
$y = $theta[0] + $theta[1] * $sample[0] + $theta[2] * $sample[1]; $y = $theta[0] + $theta[1] * $sample[0] + $theta[2] * $sample[1];
$cost = ($y - $target) ** 2 / 2; $cost = (($y - $target) ** 2) / 2;
$grad = $y - $target; $grad = $y - $target;
return [$cost, $grad]; return [$cost, $grad];

View File

@ -22,7 +22,7 @@ class GDTest extends TestCase
$callback = function ($theta, $sample, $target) { $callback = function ($theta, $sample, $target) {
$y = $theta[0] + $theta[1] * $sample[0]; $y = $theta[0] + $theta[1] * $sample[0];
$cost = ($y - $target) ** 2 / 2; $cost = (($y - $target) ** 2) / 2;
$grad = $y - $target; $grad = $y - $target;
return [$cost, $grad]; return [$cost, $grad];
@ -49,7 +49,7 @@ class GDTest extends TestCase
$callback = function ($theta, $sample, $target) { $callback = function ($theta, $sample, $target) {
$y = $theta[0] + $theta[1] * $sample[0] + $theta[2] * $sample[1]; $y = $theta[0] + $theta[1] * $sample[0] + $theta[2] * $sample[1];
$cost = ($y - $target) ** 2 / 2; $cost = (($y - $target) ** 2) / 2;
$grad = $y - $target; $grad = $y - $target;
return [$cost, $grad]; return [$cost, $grad];

View File

@ -22,7 +22,7 @@ class StochasticGDTest extends TestCase
$callback = function ($theta, $sample, $target) { $callback = function ($theta, $sample, $target) {
$y = $theta[0] + $theta[1] * $sample[0]; $y = $theta[0] + $theta[1] * $sample[0];
$cost = ($y - $target) ** 2 / 2; $cost = (($y - $target) ** 2) / 2;
$grad = $y - $target; $grad = $y - $target;
return [$cost, $grad]; return [$cost, $grad];
@ -49,7 +49,7 @@ class StochasticGDTest extends TestCase
$callback = function ($theta, $sample, $target) { $callback = function ($theta, $sample, $target) {
$y = $theta[0] + $theta[1] * $sample[0] + $theta[2] * $sample[1]; $y = $theta[0] + $theta[1] * $sample[0] + $theta[2] * $sample[1];
$cost = ($y - $target) ** 2 / 2; $cost = (($y - $target) ** 2) / 2;
$grad = $y - $target; $grad = $y - $target;
return [$cost, $grad]; return [$cost, $grad];

View File

@ -4,6 +4,7 @@ declare(strict_types=1);
namespace Phpml\Tests\Math\Kernel; namespace Phpml\Tests\Math\Kernel;
use Phpml\Exception\InvalidArgumentException;
use Phpml\Math\Kernel\RBF; use Phpml\Math\Kernel\RBF;
use PHPUnit\Framework\TestCase; use PHPUnit\Framework\TestCase;
@ -23,4 +24,12 @@ class RBFTest extends TestCase
self::assertEquals(0.00451, $rbf->compute([1, 2, 3], [4, 5, 6]), '', $delta = 0.0001); self::assertEquals(0.00451, $rbf->compute([1, 2, 3], [4, 5, 6]), '', $delta = 0.0001);
self::assertEquals(0, $rbf->compute([4, 5], [1, 100])); self::assertEquals(0, $rbf->compute([4, 5], [1, 100]));
} }
public function testThrowExceptionWhenComputeArgumentIsNotAnArray(): void
{
$this->expectException(InvalidArgumentException::class);
$this->expectExceptionMessage('Arguments of Phpml\\Math\\Kernel\\RBF::compute must be arrays');
(new RBF(0.1))->compute([0], 1.0);
}
} }