Fix static analysis errors from phpstan upgrade to 0.12 (#426)

This commit is contained in:
Marcin Michalski 2020-03-03 18:52:29 +01:00 committed by GitHub
parent deefbb36f2
commit 2ee0d373eb
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
27 changed files with 278 additions and 279 deletions

449
composer.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -4,12 +4,18 @@ includes:
- vendor/phpstan/phpstan-phpunit/rules.neon - vendor/phpstan/phpstan-phpunit/rules.neon
parameters: parameters:
checkGenericClassInNonGenericObjectType: false
checkMissingIterableValueType: false
ignoreErrors: ignoreErrors:
- '#Property Phpml\\Clustering\\KMeans\\Cluster\:\:\$points \(iterable\<Phpml\\Clustering\\KMeans\\Point\>\&SplObjectStorage\) does not accept SplObjectStorage#' - '#Property Phpml\\Clustering\\KMeans\\Cluster\:\:\$points \(iterable\<Phpml\\Clustering\\KMeans\\Point\>\&SplObjectStorage\) does not accept SplObjectStorage#'
- '#Phpml\\Dataset\\(.*)Dataset::__construct\(\) does not call parent constructor from Phpml\\Dataset\\ArrayDataset#' - '#Phpml\\Dataset\\(.*)Dataset::__construct\(\) does not call parent constructor from Phpml\\Dataset\\ArrayDataset#'
- '#Variable property access on .+#' - '#Variable property access on .+#'
- '#Variable method call on .+#' - '#Variable method call on .+#'
- message: '#ReflectionClass#'
paths:
- src/Classification/Ensemble/AdaBoost.php
- src/Classification/Ensemble/Bagging.php
# probably known value # probably known value
- '#Method Phpml\\Classification\\DecisionTree::getBestSplit\(\) should return Phpml\\Classification\\DecisionTree\\DecisionTreeLeaf but returns Phpml\\Classification\\DecisionTree\\DecisionTreeLeaf\|null#' - '#Method Phpml\\Classification\\DecisionTree::getBestSplit\(\) should return Phpml\\Classification\\DecisionTree\\DecisionTreeLeaf but returns Phpml\\Classification\\DecisionTree\\DecisionTreeLeaf\|null#'
- '#Call to an undefined method Phpml\\Helper\\Optimizer\\Optimizer::getCostValues\(\)#' - '#Call to an undefined method Phpml\\Helper\\Optimizer\\Optimizer::getCostValues\(\)#'

View File

@ -104,11 +104,11 @@ class Apriori implements Associator
*/ */
protected function predictSample(array $sample): array protected function predictSample(array $sample): array
{ {
$predicts = array_values(array_filter($this->getRules(), function ($rule) use ($sample) { $predicts = array_values(array_filter($this->getRules(), function ($rule) use ($sample): bool {
return $this->equals($rule[self::ARRAY_KEY_ANTECEDENT], $sample); return $this->equals($rule[self::ARRAY_KEY_ANTECEDENT], $sample);
})); }));
return array_map(function ($rule) { return array_map(static function ($rule) {
return $rule[self::ARRAY_KEY_CONSEQUENT]; return $rule[self::ARRAY_KEY_CONSEQUENT];
}, $predicts); }, $predicts);
} }
@ -177,7 +177,7 @@ class Apriori implements Associator
$cardinality = count($sample); $cardinality = count($sample);
$antecedents = $this->powerSet($sample); $antecedents = $this->powerSet($sample);
return array_filter($antecedents, function ($antecedent) use ($cardinality) { return array_filter($antecedents, static function ($antecedent) use ($cardinality): bool {
return (count($antecedent) != $cardinality) && ($antecedent != []); return (count($antecedent) != $cardinality) && ($antecedent != []);
}); });
} }
@ -199,7 +199,7 @@ class Apriori implements Associator
} }
} }
return array_map(function ($entry) { return array_map(static function ($entry): array {
return [$entry]; return [$entry];
}, $items); }, $items);
} }
@ -213,7 +213,7 @@ class Apriori implements Associator
*/ */
private function frequent(array $samples): array private function frequent(array $samples): array
{ {
return array_values(array_filter($samples, function ($entry) { return array_values(array_filter($samples, function ($entry): bool {
return $this->support($entry) >= $this->support; return $this->support($entry) >= $this->support;
})); }));
} }
@ -288,7 +288,7 @@ class Apriori implements Associator
*/ */
private function frequency(array $sample): int private function frequency(array $sample): int
{ {
return count(array_filter($this->samples, function ($entry) use ($sample) { return count(array_filter($this->samples, function ($entry) use ($sample): bool {
return $this->subset($entry, $sample); return $this->subset($entry, $sample);
})); }));
} }
@ -303,7 +303,7 @@ class Apriori implements Associator
*/ */
private function contains(array $system, array $set): bool private function contains(array $system, array $set): bool
{ {
return (bool) array_filter($system, function ($entry) use ($set) { return (bool) array_filter($system, function ($entry) use ($set): bool {
return $this->equals($entry, $set); return $this->equals($entry, $set);
}); });
} }

View File

@ -58,7 +58,7 @@ class Adaline extends Perceptron
protected function runTraining(array $samples, array $targets): void protected function runTraining(array $samples, array $targets): void
{ {
// The cost function is the sum of squares // The cost function is the sum of squares
$callback = function ($weights, $sample, $target) { $callback = function ($weights, $sample, $target): array {
$this->weights = $weights; $this->weights = $weights;
$output = $this->output($sample); $output = $this->output($sample);

View File

@ -188,7 +188,7 @@ class LogisticRegression extends Adaline
* The gradient of the cost function to be used with gradient descent: * The gradient of the cost function to be used with gradient descent:
* ∇J(x) = -(y - h(x)) = (h(x) - y) * ∇J(x) = -(y - h(x)) = (h(x) - y)
*/ */
return function ($weights, $sample, $y) use ($penalty) { return function ($weights, $sample, $y) use ($penalty): array {
$this->weights = $weights; $this->weights = $weights;
$hX = $this->output($sample); $hX = $this->output($sample);
@ -220,7 +220,7 @@ class LogisticRegression extends Adaline
* The gradient of the cost function: * The gradient of the cost function:
* ∇J(x) = -(h(x) - y) . h(x) . (1 - h(x)) * ∇J(x) = -(h(x) - y) . h(x) . (1 - h(x))
*/ */
return function ($weights, $sample, $y) use ($penalty) { return function ($weights, $sample, $y) use ($penalty): array {
$this->weights = $weights; $this->weights = $weights;
$hX = $this->output($sample); $hX = $this->output($sample);

View File

@ -154,7 +154,7 @@ class Perceptron implements Classifier, IncrementalEstimator
protected function runTraining(array $samples, array $targets): void protected function runTraining(array $samples, array $targets): void
{ {
// The cost function is the sum of squares // The cost function is the sum of squares
$callback = function ($weights, $sample, $target) { $callback = function ($weights, $sample, $target): array {
$this->weights = $weights; $this->weights = $weights;
$prediction = $this->outputClass($sample); $prediction = $this->outputClass($sample);

View File

@ -139,7 +139,7 @@ class FuzzyCMeans implements Clusterer
$total += $val; $total += $val;
} }
$this->membership[] = array_map(function ($val) use ($total) { $this->membership[] = array_map(static function ($val) use ($total): float {
return $val / $total; return $val / $total;
}, $row); }, $row);
} }

View File

@ -88,7 +88,7 @@ class Space extends SplObjectStorage
$min = $this->newPoint(array_fill(0, $this->dimension, null)); $min = $this->newPoint(array_fill(0, $this->dimension, null));
$max = $this->newPoint(array_fill(0, $this->dimension, null)); $max = $this->newPoint(array_fill(0, $this->dimension, null));
/** @var self $point */ /** @var Point $point */
foreach ($this as $point) { foreach ($this as $point) {
for ($n = 0; $n < $this->dimension; ++$n) { for ($n = 0; $n < $this->dimension; ++$n) {
if ($min[$n] === null || $min[$n] > $point[$n]) { if ($min[$n] === null || $min[$n] > $point[$n]) {

View File

@ -35,8 +35,8 @@ class CsvDataset extends ArrayDataset
} }
$samples = $targets = []; $samples = $targets = [];
while (($data = fgetcsv($handle, $maxLineLength, $delimiter)) !== false) { while ($data = fgetcsv($handle, $maxLineLength, $delimiter)) {
$samples[] = array_slice((array) $data, 0, $features); $samples[] = array_slice($data, 0, $features);
$targets[] = $data[$features]; $targets[] = $data[$features];
} }

View File

@ -179,13 +179,13 @@ class KernelPCA extends PCA
// k(x,y)=exp(-γ.|x-y|) where |..| is Euclidean distance // k(x,y)=exp(-γ.|x-y|) where |..| is Euclidean distance
$dist = new Euclidean(); $dist = new Euclidean();
return function ($x, $y) use ($dist) { return function ($x, $y) use ($dist): float {
return exp(-$this->gamma * $dist->sqDistance($x, $y)); return exp(-$this->gamma * $dist->sqDistance($x, $y));
}; };
case self::KERNEL_SIGMOID: case self::KERNEL_SIGMOID:
// k(x,y)=tanh(γ.xT.y+c0) where c0=1 // k(x,y)=tanh(γ.xT.y+c0) where c0=1
return function ($x, $y) { return function ($x, $y): float {
$res = Matrix::dot($x, $y)[0] + 1.0; $res = Matrix::dot($x, $y)[0] + 1.0;
return tanh((float) $this->gamma * $res); return tanh((float) $this->gamma * $res);
@ -195,7 +195,7 @@ class KernelPCA extends PCA
// k(x,y)=exp(-γ.|x-y|) where |..| is Manhattan distance // k(x,y)=exp(-γ.|x-y|) where |..| is Manhattan distance
$dist = new Manhattan(); $dist = new Manhattan();
return function ($x, $y) use ($dist) { return function ($x, $y) use ($dist): float {
return exp(-$this->gamma * $dist->distance($x, $y)); return exp(-$this->gamma * $dist->distance($x, $y));
}; };

View File

@ -37,7 +37,7 @@ final class VarianceThreshold implements Transformer
public function fit(array $samples, ?array $targets = null): void public function fit(array $samples, ?array $targets = null): void
{ {
$this->variances = array_map(function (array $column) { $this->variances = array_map(static function (array $column): float {
return Variance::population($column); return Variance::population($column);
}, Matrix::transposeArray($samples)); }, Matrix::transposeArray($samples));

View File

@ -38,7 +38,7 @@ class GD extends StochasticGD
$this->updateWeightsWithUpdates($updates, $totalPenalty); $this->updateWeightsWithUpdates($updates, $totalPenalty);
$this->costValues[] = array_sum($errors) / $this->sampleCount; $this->costValues[] = array_sum($errors) / (int) $this->sampleCount;
if ($this->earlyStop($theta)) { if ($this->earlyStop($theta)) {
break; break;

View File

@ -126,7 +126,7 @@ class Matrix
public function transpose(): self public function transpose(): self
{ {
if ($this->rows === 1) { if ($this->rows === 1) {
$matrix = array_map(function ($el) { $matrix = array_map(static function ($el): array {
return [$el]; return [$el];
}, $this->matrix[0]); }, $this->matrix[0]);
} else { } else {

View File

@ -28,7 +28,7 @@ final class ANOVA
throw new InvalidArgumentException('The array must have at least 2 elements'); throw new InvalidArgumentException('The array must have at least 2 elements');
} }
$samplesPerClass = array_map(function (array $class): int { $samplesPerClass = array_map(static function (array $class): int {
return count($class); return count($class);
}, $samples); }, $samples);
$allSamples = (int) array_sum($samplesPerClass); $allSamples = (int) array_sum($samplesPerClass);
@ -41,10 +41,10 @@ final class ANOVA
$dfbn = $classes - 1; $dfbn = $classes - 1;
$dfwn = $allSamples - $classes; $dfwn = $allSamples - $classes;
$msb = array_map(function ($s) use ($dfbn) { $msb = array_map(static function ($s) use ($dfbn) {
return $s / $dfbn; return $s / $dfbn;
}, $ssbn); }, $ssbn);
$msw = array_map(function ($s) use ($dfwn) { $msw = array_map(static function ($s) use ($dfwn) {
if ($dfwn === 0) { if ($dfwn === 0) {
return 1; return 1;
} }
@ -76,7 +76,7 @@ final class ANOVA
private static function sumOfFeaturesPerClass(array $samples): array private static function sumOfFeaturesPerClass(array $samples): array
{ {
return array_map(function (array $class) { return array_map(static function (array $class): array {
$sum = array_fill(0, count($class[0]), 0); $sum = array_fill(0, count($class[0]), 0);
foreach ($class as $sample) { foreach ($class as $sample) {
foreach ($sample as $index => $feature) { foreach ($sample as $index => $feature) {
@ -97,7 +97,7 @@ final class ANOVA
} }
} }
return array_map(function ($sum) { return array_map(static function ($sum) {
return $sum ** 2; return $sum ** 2;
}, $squares); }, $squares);
} }

View File

@ -50,7 +50,7 @@ class StandardDeviation
$mean = Mean::arithmetic($numbers); $mean = Mean::arithmetic($numbers);
return array_sum(array_map( return array_sum(array_map(
function ($val) use ($mean) { static function ($val) use ($mean): float {
return ($val - $mean) ** 2; return ($val - $mean) ** 2;
}, },
$numbers $numbers

View File

@ -148,7 +148,7 @@ class ClassificationReport
$precision = $this->computePrecision($truePositive, $falsePositive); $precision = $this->computePrecision($truePositive, $falsePositive);
$recall = $this->computeRecall($truePositive, $falseNegative); $recall = $this->computeRecall($truePositive, $falseNegative);
$f1score = $this->computeF1Score((float) $precision, (float) $recall); $f1score = $this->computeF1Score($precision, $recall);
$this->average = compact('precision', 'recall', 'f1score'); $this->average = compact('precision', 'recall', 'f1score');
} }
@ -186,10 +186,7 @@ class ClassificationReport
} }
} }
/** private function computePrecision(int $truePositive, int $falsePositive): float
* @return float|string
*/
private function computePrecision(int $truePositive, int $falsePositive)
{ {
$divider = $truePositive + $falsePositive; $divider = $truePositive + $falsePositive;
if ($divider == 0) { if ($divider == 0) {
@ -199,10 +196,7 @@ class ClassificationReport
return $truePositive / $divider; return $truePositive / $divider;
} }
/** private function computeRecall(int $truePositive, int $falseNegative): float
* @return float|string
*/
private function computeRecall(int $truePositive, int $falseNegative)
{ {
$divider = $truePositive + $falseNegative; $divider = $truePositive + $falseNegative;
if ($divider == 0) { if ($divider == 0) {

View File

@ -33,7 +33,7 @@ class Neuron implements Node
public function __construct(?ActivationFunction $activationFunction = null) public function __construct(?ActivationFunction $activationFunction = null)
{ {
$this->activationFunction = $activationFunction ?: new Sigmoid(); $this->activationFunction = $activationFunction ?? new Sigmoid();
} }
public function addSynapse(Synapse $synapse): void public function addSynapse(Synapse $synapse): void

View File

@ -24,7 +24,7 @@ class Synapse
public function __construct(Node $node, ?float $weight = null) public function __construct(Node $node, ?float $weight = null)
{ {
$this->node = $node; $this->node = $node;
$this->weight = $weight ?: $this->generateRandomWeight(); $this->weight = $weight ?? $this->generateRandomWeight();
} }
public function getOutput(): float public function getOutput(): float

View File

@ -61,12 +61,12 @@ class Pipeline implements Estimator, Transformer
*/ */
public function predict(array $samples) public function predict(array $samples)
{ {
$this->transform($samples);
if ($this->estimator === null) { if ($this->estimator === null) {
throw new InvalidOperationException('Pipeline without estimator can\'t use predict method'); throw new InvalidOperationException('Pipeline without estimator can\'t use predict method');
} }
$this->transform($samples);
return $this->estimator->predict($samples); return $this->estimator->predict($samples);
} }

View File

@ -121,7 +121,7 @@ final class DecisionTreeRegressor extends CART implements Regression
protected function splitImpurity(array $groups): float protected function splitImpurity(array $groups): float
{ {
$samplesCount = (int) array_sum(array_map(static function (array $group) { $samplesCount = (int) array_sum(array_map(static function (array $group): int {
return count($group[0]); return count($group[0]);
}, $groups)); }, $groups));

View File

@ -50,7 +50,7 @@ class DecisionNode extends BinaryNode implements PurityNode
$this->value = $value; $this->value = $value;
$this->groups = $groups; $this->groups = $groups;
$this->impurity = $impurity; $this->impurity = $impurity;
$this->samplesCount = (int) array_sum(array_map(function (array $group) { $this->samplesCount = (int) array_sum(array_map(static function (array $group): int {
return count($group[0]); return count($group[0]);
}, $groups)); }, $groups));
} }

View File

@ -40,7 +40,7 @@ class KernelPCATest extends TestCase
// during the calculation of eigenValues, we have to compare // during the calculation of eigenValues, we have to compare
// absolute value of the values // absolute value of the values
array_map(function ($val1, $val2) use ($epsilon): void { array_map(function ($val1, $val2) use ($epsilon): void {
self::assertEqualsWithDelta(abs($val1), abs($val2), $epsilon); self::assertEqualsWithDelta(abs($val1[0]), abs($val2[0]), $epsilon);
}, $transformed, $reducedData); }, $transformed, $reducedData);
// Fitted KernelPCA object can also transform an arbitrary sample of the // Fitted KernelPCA object can also transform an arbitrary sample of the

View File

@ -42,7 +42,7 @@ class PCATest extends TestCase
// during the calculation of eigenValues, we have to compare // during the calculation of eigenValues, we have to compare
// absolute value of the values // absolute value of the values
array_map(function ($val1, $val2) use ($epsilon): void { array_map(function ($val1, $val2) use ($epsilon): void {
self::assertEqualsWithDelta(abs($val1), abs($val2), $epsilon); self::assertEqualsWithDelta(abs($val1[0]), abs($val2[0]), $epsilon);
}, $transformed, $reducedData); }, $transformed, $reducedData);
// Test fitted PCA object to transform an arbitrary sample of the // Test fitted PCA object to transform an arbitrary sample of the
@ -52,7 +52,7 @@ class PCATest extends TestCase
$newRow2 = $pca->transform($row); $newRow2 = $pca->transform($row);
array_map(function ($val1, $val2) use ($epsilon): void { array_map(function ($val1, $val2) use ($epsilon): void {
self::assertEqualsWithDelta(abs($val1), abs($val2), $epsilon); self::assertEqualsWithDelta(abs($val1[0][0]), abs($val2[0]), $epsilon);
}, $newRow, $newRow2); }, $newRow, $newRow2);
} }
} }

View File

@ -21,7 +21,7 @@ class ConjugateGradientTest extends TestCase
$targets[] = -1 + 2 * $x; $targets[] = -1 + 2 * $x;
} }
$callback = function ($theta, $sample, $target) { $callback = static function ($theta, $sample, $target): array {
$y = $theta[0] + $theta[1] * $sample[0]; $y = $theta[0] + $theta[1] * $sample[0];
$cost = (($y - $target) ** 2) / 2; $cost = (($y - $target) ** 2) / 2;
$grad = $y - $target; $grad = $y - $target;
@ -47,7 +47,7 @@ class ConjugateGradientTest extends TestCase
$targets[] = -1 + 2 * $x; $targets[] = -1 + 2 * $x;
} }
$callback = function ($theta, $sample, $target) { $callback = static function ($theta, $sample, $target): array {
$y = $theta[0] + $theta[1] * $sample[0]; $y = $theta[0] + $theta[1] * $sample[0];
$cost = (($y - $target) ** 2) / 2; $cost = (($y - $target) ** 2) / 2;
$grad = $y - $target; $grad = $y - $target;
@ -76,7 +76,7 @@ class ConjugateGradientTest extends TestCase
$targets[] = -1 + 2 * $x0 - 3 * $x1; $targets[] = -1 + 2 * $x0 - 3 * $x1;
} }
$callback = function ($theta, $sample, $target) { $callback = static function ($theta, $sample, $target): array {
$y = $theta[0] + $theta[1] * $sample[0] + $theta[2] * $sample[1]; $y = $theta[0] + $theta[1] * $sample[0] + $theta[2] * $sample[1];
$cost = (($y - $target) ** 2) / 2; $cost = (($y - $target) ** 2) / 2;
$grad = $y - $target; $grad = $y - $target;

View File

@ -20,7 +20,7 @@ class GDTest extends TestCase
$targets[] = -1 + 2 * $x; $targets[] = -1 + 2 * $x;
} }
$callback = function ($theta, $sample, $target) { $callback = static function ($theta, $sample, $target): array {
$y = $theta[0] + $theta[1] * $sample[0]; $y = $theta[0] + $theta[1] * $sample[0];
$cost = (($y - $target) ** 2) / 2; $cost = (($y - $target) ** 2) / 2;
$grad = $y - $target; $grad = $y - $target;
@ -47,7 +47,7 @@ class GDTest extends TestCase
$targets[] = -1 + 2 * $x0 - 3 * $x1; $targets[] = -1 + 2 * $x0 - 3 * $x1;
} }
$callback = function ($theta, $sample, $target) { $callback = static function ($theta, $sample, $target): array {
$y = $theta[0] + $theta[1] * $sample[0] + $theta[2] * $sample[1]; $y = $theta[0] + $theta[1] * $sample[0] + $theta[2] * $sample[1];
$cost = (($y - $target) ** 2) / 2; $cost = (($y - $target) ** 2) / 2;
$grad = $y - $target; $grad = $y - $target;

View File

@ -20,7 +20,7 @@ class StochasticGDTest extends TestCase
$targets[] = -1 + 2 * $x; $targets[] = -1 + 2 * $x;
} }
$callback = function ($theta, $sample, $target) { $callback = static function ($theta, $sample, $target): array {
$y = $theta[0] + $theta[1] * $sample[0]; $y = $theta[0] + $theta[1] * $sample[0];
$cost = (($y - $target) ** 2) / 2; $cost = (($y - $target) ** 2) / 2;
$grad = $y - $target; $grad = $y - $target;
@ -47,7 +47,7 @@ class StochasticGDTest extends TestCase
$targets[] = -1 + 2 * $x0 - 3 * $x1; $targets[] = -1 + 2 * $x0 - 3 * $x1;
} }
$callback = function ($theta, $sample, $target) { $callback = static function ($theta, $sample, $target): array {
$y = $theta[0] + $theta[1] * $sample[0] + $theta[2] * $sample[1]; $y = $theta[0] + $theta[1] * $sample[0] + $theta[2] * $sample[1];
$cost = (($y - $target) ** 2) / 2; $cost = (($y - $target) ** 2) / 2;
$grad = $y - $target; $grad = $y - $target;

View File

@ -126,7 +126,7 @@ class NormalizerTest extends TestCase
foreach ($samples as $sample) { foreach ($samples as $sample) {
$errors = array_filter( $errors = array_filter(
$sample, $sample,
function ($element) { function ($element): bool {
return $element < -3 || $element > 3; return $element < -3 || $element > 3;
} }
); );