mirror of
https://github.com/Llewellynvdm/php-ml.git
synced 2024-11-21 12:35:10 +00:00
Update phpstan & easy coding standard (#366)
This commit is contained in:
parent
5e02b893e9
commit
d3888efa7a
@ -24,9 +24,9 @@
|
||||
},
|
||||
"require-dev": {
|
||||
"phpbench/phpbench": "^0.14.0",
|
||||
"phpstan/phpstan-phpunit": "^0.10",
|
||||
"phpstan/phpstan-shim": "^0.10",
|
||||
"phpstan/phpstan-strict-rules": "^0.10",
|
||||
"phpstan/phpstan-phpunit": "^0.11",
|
||||
"phpstan/phpstan-shim": "^0.11",
|
||||
"phpstan/phpstan-strict-rules": "^0.11",
|
||||
"phpunit/phpunit": "^7.0.0",
|
||||
"symplify/coding-standard": "^5.1",
|
||||
"symplify/easy-coding-standard": "^5.1"
|
||||
|
1048
composer.lock
generated
1048
composer.lock
generated
File diff suppressed because it is too large
Load Diff
16
ecs.yml
16
ecs.yml
@ -30,14 +30,14 @@ services:
|
||||
Symplify\CodingStandard\Fixer\ArrayNotation\StandaloneLineInMultilineArrayFixer: ~
|
||||
|
||||
parameters:
|
||||
exclude_checkers:
|
||||
# from strict.neon
|
||||
- 'PhpCsFixer\Fixer\PhpUnit\PhpUnitStrictFixer'
|
||||
- 'PhpCsFixer\Fixer\Strict\StrictComparisonFixer'
|
||||
# personal prefference
|
||||
- 'PhpCsFixer\Fixer\Operator\NotOperatorWithSuccessorSpaceFixer'
|
||||
|
||||
skip:
|
||||
# from strict.neon
|
||||
PhpCsFixer\Fixer\PhpUnit\PhpUnitStrictFixer: ~
|
||||
PhpCsFixer\Fixer\Strict\StrictComparisonFixer: ~
|
||||
|
||||
# personal prefference
|
||||
PhpCsFixer\Fixer\Operator\NotOperatorWithSuccessorSpaceFixer: ~
|
||||
|
||||
PhpCsFixer\Fixer\Alias\RandomApiMigrationFixer:
|
||||
# random_int() breaks code
|
||||
- 'src/CrossValidation/RandomSplit.php'
|
||||
@ -65,4 +65,4 @@ parameters:
|
||||
SlevomatCodingStandard\Sniffs\TypeHints\TypeHintDeclarationSniff.MissingTraversablePropertyTypeHintSpecification: ~
|
||||
|
||||
# assignment in "while ($var = ...)" are ok
|
||||
PHP_CodeSniffer\Standards\Generic\Sniffs\CodeAnalysis\AssignmentInConditionSniff.FoundInWhileCondition:
|
||||
PHP_CodeSniffer\Standards\Generic\Sniffs\CodeAnalysis\AssignmentInConditionSniff.FoundInWhileCondition:
|
||||
|
@ -7,9 +7,8 @@ parameters:
|
||||
ignoreErrors:
|
||||
- '#Property Phpml\\Clustering\\KMeans\\Cluster\:\:\$points \(iterable\<Phpml\\Clustering\\KMeans\\Point\>\&SplObjectStorage\) does not accept SplObjectStorage#'
|
||||
- '#Phpml\\Dataset\\(.*)Dataset::__construct\(\) does not call parent constructor from Phpml\\Dataset\\ArrayDataset#'
|
||||
|
||||
# wide range cases
|
||||
- '#Parameter \#1 \$coordinates of class Phpml\\Clustering\\KMeans\\Point constructor expects array, array<int>\|Phpml\\Clustering\\KMeans\\Point given#'
|
||||
- '#Variable property access on .+#'
|
||||
- '#Variable method call on .+#'
|
||||
|
||||
# probably known value
|
||||
- '#Method Phpml\\Classification\\DecisionTree::getBestSplit\(\) should return Phpml\\Classification\\DecisionTree\\DecisionTreeLeaf but returns Phpml\\Classification\\DecisionTree\\DecisionTreeLeaf\|null#'
|
||||
|
@ -41,7 +41,7 @@ class RandomForest extends Bagging
|
||||
* Default value for the ratio is 'log' which results in log(numFeatures, 2) + 1
|
||||
* features to be taken into consideration while selecting subspace of features
|
||||
*
|
||||
* @param string|float $ratio
|
||||
* @param mixed $ratio
|
||||
*/
|
||||
public function setFeatureSubsetRatio($ratio): self
|
||||
{
|
||||
@ -73,7 +73,9 @@ class RandomForest extends Bagging
|
||||
throw new InvalidArgumentException('RandomForest can only use DecisionTree as base classifier');
|
||||
}
|
||||
|
||||
return parent::setClassifer($classifier, $classifierOptions);
|
||||
parent::setClassifer($classifier, $classifierOptions);
|
||||
|
||||
return $this;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -122,12 +124,16 @@ class RandomForest extends Bagging
|
||||
}
|
||||
|
||||
/**
|
||||
* @param DecisionTree $classifier
|
||||
*
|
||||
* @return DecisionTree
|
||||
*/
|
||||
protected function initSingleClassifier(Classifier $classifier): Classifier
|
||||
{
|
||||
if (!$classifier instanceof DecisionTree) {
|
||||
throw new InvalidArgumentException(
|
||||
sprintf('Classifier %s expected, got %s', DecisionTree::class, get_class($classifier))
|
||||
);
|
||||
}
|
||||
|
||||
if (is_float($this->featureSubsetRatio)) {
|
||||
$featureCount = (int) ($this->featureSubsetRatio * $this->featureCount);
|
||||
} elseif ($this->featureSubsetRatio === 'sqrt') {
|
||||
|
@ -226,7 +226,7 @@ class LogisticRegression extends Adaline
|
||||
|
||||
$y = $y < 0 ? 0 : 1;
|
||||
|
||||
$error = ($y - $hX) ** 2;
|
||||
$error = (($y - $hX) ** 2);
|
||||
$gradient = -($y - $hX) * $hX * (1 - $hX);
|
||||
|
||||
return [$error, $gradient, $penalty];
|
||||
|
@ -77,9 +77,6 @@ class FuzzyCMeans implements Clusterer
|
||||
return $this->membership;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param Point[]|int[][] $samples
|
||||
*/
|
||||
public function cluster(array $samples): array
|
||||
{
|
||||
// Initialize variables, clusters and membership matrix
|
||||
@ -210,7 +207,7 @@ class FuzzyCMeans implements Clusterer
|
||||
$this->samples[$col]
|
||||
);
|
||||
|
||||
$val = ($dist1 / $dist2) ** 2.0 / ($this->fuzziness - 1);
|
||||
$val = (($dist1 / $dist2) ** 2.0) / ($this->fuzziness - 1);
|
||||
$sum += $val;
|
||||
}
|
||||
|
||||
|
@ -24,7 +24,7 @@ class SvmDataset extends ArrayDataset
|
||||
$targets = [];
|
||||
$maxIndex = 0;
|
||||
while (false !== $line = fgets($handle)) {
|
||||
[$sample, $target, $maxIndex] = self::processLine((string) $line, $maxIndex);
|
||||
[$sample, $target, $maxIndex] = self::processLine($line, $maxIndex);
|
||||
$samples[] = $sample;
|
||||
$targets[] = $target;
|
||||
}
|
||||
|
@ -46,7 +46,7 @@ final class UnivariateLinearRegression implements ScoringFunction
|
||||
foreach (array_keys($samples[0]) as $index) {
|
||||
$featureColumn = array_column($samples, $index);
|
||||
$correlations[$index] =
|
||||
(Matrix::dot($targets, $featureColumn)[0] / (new Matrix($featureColumn, false))->transpose()->frobeniusNorm())
|
||||
Matrix::dot($targets, $featureColumn)[0] / (new Matrix($featureColumn, false))->transpose()->frobeniusNorm()
|
||||
/ (new Matrix($targets, false))->frobeniusNorm();
|
||||
}
|
||||
|
||||
|
@ -4,6 +4,7 @@ declare(strict_types=1);
|
||||
|
||||
namespace Phpml\Math\Kernel;
|
||||
|
||||
use Phpml\Exception\InvalidArgumentException;
|
||||
use Phpml\Math\Kernel;
|
||||
use Phpml\Math\Product;
|
||||
|
||||
@ -19,12 +20,12 @@ class RBF implements Kernel
|
||||
$this->gamma = $gamma;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param array $a
|
||||
* @param array $b
|
||||
*/
|
||||
public function compute($a, $b): float
|
||||
{
|
||||
if (!is_array($a) || !is_array($b)) {
|
||||
throw new InvalidArgumentException(sprintf('Arguments of %s must be arrays', __METHOD__));
|
||||
}
|
||||
|
||||
$score = 2 * Product::scalar($a, $b);
|
||||
$squares = Product::scalar($a, $a) + Product::scalar($b, $b);
|
||||
|
||||
|
@ -502,7 +502,8 @@ class EigenvalueDecomposition
|
||||
}
|
||||
|
||||
// Double division avoids possible underflow
|
||||
$g = ($g / $this->ort[$m]) / $this->H[$m][$m - 1];
|
||||
$g /= $this->ort[$m];
|
||||
$g /= $this->H[$m][$m - 1];
|
||||
for ($i = $m; $i <= $high; ++$i) {
|
||||
$this->V[$i][$j] += $g * $this->ort[$i];
|
||||
}
|
||||
@ -734,7 +735,7 @@ class EigenvalueDecomposition
|
||||
|
||||
// Double QR step involving rows l:n and columns m:n
|
||||
for ($k = $m; $k <= $n - 1; ++$k) {
|
||||
$notlast = ($k != $n - 1);
|
||||
$notlast = $k != $n - 1;
|
||||
if ($k != $m) {
|
||||
$p = $this->H[$k][$k - 1];
|
||||
$q = $this->H[$k + 1][$k - 1];
|
||||
|
@ -59,8 +59,14 @@ abstract class MultilayerPerceptron extends LayeredNetwork implements Estimator,
|
||||
/**
|
||||
* @throws InvalidArgumentException
|
||||
*/
|
||||
public function __construct(int $inputLayerFeatures, array $hiddenLayers, array $classes, int $iterations = 10000, ?ActivationFunction $activationFunction = null, float $learningRate = 1)
|
||||
{
|
||||
public function __construct(
|
||||
int $inputLayerFeatures,
|
||||
array $hiddenLayers,
|
||||
array $classes,
|
||||
int $iterations = 10000,
|
||||
?ActivationFunction $activationFunction = null,
|
||||
float $learningRate = 1.
|
||||
) {
|
||||
if (count($hiddenLayers) === 0) {
|
||||
throw new InvalidArgumentException('Provide at least 1 hidden layer');
|
||||
}
|
||||
|
@ -23,7 +23,7 @@ class ConjugateGradientTest extends TestCase
|
||||
|
||||
$callback = function ($theta, $sample, $target) {
|
||||
$y = $theta[0] + $theta[1] * $sample[0];
|
||||
$cost = ($y - $target) ** 2 / 2;
|
||||
$cost = (($y - $target) ** 2) / 2;
|
||||
$grad = $y - $target;
|
||||
|
||||
return [$cost, $grad];
|
||||
@ -49,7 +49,7 @@ class ConjugateGradientTest extends TestCase
|
||||
|
||||
$callback = function ($theta, $sample, $target) {
|
||||
$y = $theta[0] + $theta[1] * $sample[0];
|
||||
$cost = ($y - $target) ** 2 / 2;
|
||||
$cost = (($y - $target) ** 2) / 2;
|
||||
$grad = $y - $target;
|
||||
|
||||
return [$cost, $grad];
|
||||
@ -78,7 +78,7 @@ class ConjugateGradientTest extends TestCase
|
||||
|
||||
$callback = function ($theta, $sample, $target) {
|
||||
$y = $theta[0] + $theta[1] * $sample[0] + $theta[2] * $sample[1];
|
||||
$cost = ($y - $target) ** 2 / 2;
|
||||
$cost = (($y - $target) ** 2) / 2;
|
||||
$grad = $y - $target;
|
||||
|
||||
return [$cost, $grad];
|
||||
|
@ -22,7 +22,7 @@ class GDTest extends TestCase
|
||||
|
||||
$callback = function ($theta, $sample, $target) {
|
||||
$y = $theta[0] + $theta[1] * $sample[0];
|
||||
$cost = ($y - $target) ** 2 / 2;
|
||||
$cost = (($y - $target) ** 2) / 2;
|
||||
$grad = $y - $target;
|
||||
|
||||
return [$cost, $grad];
|
||||
@ -49,7 +49,7 @@ class GDTest extends TestCase
|
||||
|
||||
$callback = function ($theta, $sample, $target) {
|
||||
$y = $theta[0] + $theta[1] * $sample[0] + $theta[2] * $sample[1];
|
||||
$cost = ($y - $target) ** 2 / 2;
|
||||
$cost = (($y - $target) ** 2) / 2;
|
||||
$grad = $y - $target;
|
||||
|
||||
return [$cost, $grad];
|
||||
|
@ -22,7 +22,7 @@ class StochasticGDTest extends TestCase
|
||||
|
||||
$callback = function ($theta, $sample, $target) {
|
||||
$y = $theta[0] + $theta[1] * $sample[0];
|
||||
$cost = ($y - $target) ** 2 / 2;
|
||||
$cost = (($y - $target) ** 2) / 2;
|
||||
$grad = $y - $target;
|
||||
|
||||
return [$cost, $grad];
|
||||
@ -49,7 +49,7 @@ class StochasticGDTest extends TestCase
|
||||
|
||||
$callback = function ($theta, $sample, $target) {
|
||||
$y = $theta[0] + $theta[1] * $sample[0] + $theta[2] * $sample[1];
|
||||
$cost = ($y - $target) ** 2 / 2;
|
||||
$cost = (($y - $target) ** 2) / 2;
|
||||
$grad = $y - $target;
|
||||
|
||||
return [$cost, $grad];
|
||||
|
@ -4,6 +4,7 @@ declare(strict_types=1);
|
||||
|
||||
namespace Phpml\Tests\Math\Kernel;
|
||||
|
||||
use Phpml\Exception\InvalidArgumentException;
|
||||
use Phpml\Math\Kernel\RBF;
|
||||
use PHPUnit\Framework\TestCase;
|
||||
|
||||
@ -23,4 +24,12 @@ class RBFTest extends TestCase
|
||||
self::assertEquals(0.00451, $rbf->compute([1, 2, 3], [4, 5, 6]), '', $delta = 0.0001);
|
||||
self::assertEquals(0, $rbf->compute([4, 5], [1, 100]));
|
||||
}
|
||||
|
||||
public function testThrowExceptionWhenComputeArgumentIsNotAnArray(): void
|
||||
{
|
||||
$this->expectException(InvalidArgumentException::class);
|
||||
$this->expectExceptionMessage('Arguments of Phpml\\Math\\Kernel\\RBF::compute must be arrays');
|
||||
|
||||
(new RBF(0.1))->compute([0], 1.0);
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user