Code Style (#86)

* Code Style

* Code Review fixes
This commit is contained in:
Marcin Michalski 2017-05-17 09:03:25 +02:00 committed by Arkadiusz Kondas
parent 43f15d2f7e
commit 7ab80b6e56
40 changed files with 535 additions and 388 deletions

View File

@ -102,19 +102,21 @@ class DecisionTree implements Classifier
$this->columnNames = array_slice($this->columnNames, 0, $this->featureCount); $this->columnNames = array_slice($this->columnNames, 0, $this->featureCount);
} elseif (count($this->columnNames) < $this->featureCount) { } elseif (count($this->columnNames) < $this->featureCount) {
$this->columnNames = array_merge($this->columnNames, $this->columnNames = array_merge($this->columnNames,
range(count($this->columnNames), $this->featureCount - 1)); range(count($this->columnNames), $this->featureCount - 1)
);
} }
} }
/** /**
* @param array $samples * @param array $samples
*
* @return array * @return array
*/ */
public static function getColumnTypes(array $samples) : array public static function getColumnTypes(array $samples) : array
{ {
$types = []; $types = [];
$featureCount = count($samples[0]); $featureCount = count($samples[0]);
for ($i=0; $i < $featureCount; $i++) { for ($i = 0; $i < $featureCount; ++$i) {
$values = array_column($samples, $i); $values = array_column($samples, $i);
$isCategorical = self::isCategoricalColumn($values); $isCategorical = self::isCategoricalColumn($values);
$types[] = $isCategorical ? self::NOMINAL : self::CONTINUOUS; $types[] = $isCategorical ? self::NOMINAL : self::CONTINUOUS;
@ -126,6 +128,7 @@ class DecisionTree implements Classifier
/** /**
* @param array $records * @param array $records
* @param int $depth * @param int $depth
*
* @return DecisionTreeLeaf * @return DecisionTreeLeaf
*/ */
protected function getSplitLeaf(array $records, int $depth = 0) : DecisionTreeLeaf protected function getSplitLeaf(array $records, int $depth = 0) : DecisionTreeLeaf
@ -166,7 +169,7 @@ class DecisionTree implements Classifier
if (!array_key_exists($target, $remainingTargets)) { if (!array_key_exists($target, $remainingTargets)) {
$remainingTargets[$target] = 1; $remainingTargets[$target] = 1;
} else { } else {
$remainingTargets[$target]++; ++$remainingTargets[$target];
} }
} }
@ -188,6 +191,7 @@ class DecisionTree implements Classifier
/** /**
* @param array $records * @param array $records
*
* @return DecisionTreeLeaf * @return DecisionTreeLeaf
*/ */
protected function getBestSplit(array $records) : DecisionTreeLeaf protected function getBestSplit(array $records) : DecisionTreeLeaf
@ -271,9 +275,10 @@ class DecisionTree implements Classifier
} }
/** /**
* @param $baseValue * @param mixed $baseValue
* @param array $colValues * @param array $colValues
* @param array $targets * @param array $targets
*
* @return float * @return float
*/ */
public function getGiniIndex($baseValue, array $colValues, array $targets) : float public function getGiniIndex($baseValue, array $colValues, array $targets) : float
@ -282,13 +287,15 @@ class DecisionTree implements Classifier
foreach ($this->labels as $label) { foreach ($this->labels as $label) {
$countMatrix[$label] = [0, 0]; $countMatrix[$label] = [0, 0];
} }
foreach ($colValues as $index => $value) { foreach ($colValues as $index => $value) {
$label = $targets[$index]; $label = $targets[$index];
$rowIndex = $value === $baseValue ? 0 : 1; $rowIndex = $value === $baseValue ? 0 : 1;
$countMatrix[$label][$rowIndex]++; ++$countMatrix[$label][$rowIndex];
} }
$giniParts = [0, 0]; $giniParts = [0, 0];
for ($i=0; $i<=1; $i++) { for ($i = 0; $i <= 1; ++$i) {
$part = 0; $part = 0;
$sum = array_sum(array_column($countMatrix, $i)); $sum = array_sum(array_column($countMatrix, $i));
if ($sum > 0) { if ($sum > 0) {
@ -296,6 +303,7 @@ class DecisionTree implements Classifier
$part += pow($countMatrix[$label][$i] / floatval($sum), 2); $part += pow($countMatrix[$label][$i] / floatval($sum), 2);
} }
} }
$giniParts[$i] = (1 - $part) * $sum; $giniParts[$i] = (1 - $part) * $sum;
} }
@ -304,6 +312,7 @@ class DecisionTree implements Classifier
/** /**
* @param array $samples * @param array $samples
*
* @return array * @return array
*/ */
protected function preprocess(array $samples) : array protected function preprocess(array $samples) : array
@ -311,7 +320,7 @@ class DecisionTree implements Classifier
// Detect and convert continuous data column values into // Detect and convert continuous data column values into
// discrete values by using the median as a threshold value // discrete values by using the median as a threshold value
$columns = []; $columns = [];
for ($i=0; $i<$this->featureCount; $i++) { for ($i = 0; $i < $this->featureCount; ++$i) {
$values = array_column($samples, $i); $values = array_column($samples, $i);
if ($this->columnTypes[$i] == self::CONTINUOUS) { if ($this->columnTypes[$i] == self::CONTINUOUS) {
$median = Mean::median($values); $median = Mean::median($values);
@ -332,6 +341,7 @@ class DecisionTree implements Classifier
/** /**
* @param array $columnValues * @param array $columnValues
*
* @return bool * @return bool
*/ */
protected static function isCategoricalColumn(array $columnValues) : bool protected static function isCategoricalColumn(array $columnValues) : bool
@ -348,6 +358,7 @@ class DecisionTree implements Classifier
if ($floatValues) { if ($floatValues) {
return false; return false;
} }
if (count($numericValues) !== $count) { if (count($numericValues) !== $count) {
return true; return true;
} }
@ -365,7 +376,9 @@ class DecisionTree implements Classifier
* randomly selected for each split operation. * randomly selected for each split operation.
* *
* @param int $numFeatures * @param int $numFeatures
*
* @return $this * @return $this
*
* @throws InvalidArgumentException * @throws InvalidArgumentException
*/ */
public function setNumFeatures(int $numFeatures) public function setNumFeatures(int $numFeatures)
@ -394,7 +407,9 @@ class DecisionTree implements Classifier
* column importances are desired to be inspected. * column importances are desired to be inspected.
* *
* @param array $names * @param array $names
*
* @return $this * @return $this
*
* @throws InvalidArgumentException * @throws InvalidArgumentException
*/ */
public function setColumnNames(array $names) public function setColumnNames(array $names)
@ -460,6 +475,7 @@ class DecisionTree implements Classifier
* *
* @param int $column * @param int $column
* @param DecisionTreeLeaf $node * @param DecisionTreeLeaf $node
*
* @return array * @return array
*/ */
protected function getSplitNodesByColumn(int $column, DecisionTreeLeaf $node) : array protected function getSplitNodesByColumn(int $column, DecisionTreeLeaf $node) : array
@ -478,9 +494,11 @@ class DecisionTree implements Classifier
if ($node->leftLeaf) { if ($node->leftLeaf) {
$lNodes = $this->getSplitNodesByColumn($column, $node->leftLeaf); $lNodes = $this->getSplitNodesByColumn($column, $node->leftLeaf);
} }
if ($node->rightLeaf) { if ($node->rightLeaf) {
$rNodes = $this->getSplitNodesByColumn($column, $node->rightLeaf); $rNodes = $this->getSplitNodesByColumn($column, $node->rightLeaf);
} }
$nodes = array_merge($nodes, $lNodes, $rNodes); $nodes = array_merge($nodes, $lNodes, $rNodes);
return $nodes; return $nodes;
@ -488,6 +506,7 @@ class DecisionTree implements Classifier
/** /**
* @param array $sample * @param array $sample
*
* @return mixed * @return mixed
*/ */
protected function predictSample(array $sample) protected function predictSample(array $sample)
@ -497,6 +516,7 @@ class DecisionTree implements Classifier
if ($node->isTerminal) { if ($node->isTerminal) {
break; break;
} }
if ($node->evaluate($sample)) { if ($node->evaluate($sample)) {
$node = $node->leftLeaf; $node = $node->leftLeaf;
} else { } else {

View File

@ -92,6 +92,8 @@ class DecisionTreeLeaf
* Returns Mean Decrease Impurity (MDI) in the node. * Returns Mean Decrease Impurity (MDI) in the node.
* For terminal nodes, this value is equal to 0 * For terminal nodes, this value is equal to 0
* *
* @param int $parentRecordCount
*
* @return float * @return float
*/ */
public function getNodeImpurityDecrease(int $parentRecordCount) public function getNodeImpurityDecrease(int $parentRecordCount)

View File

@ -75,6 +75,7 @@ class AdaBoost implements Classifier
* improve classification performance of 'weak' classifiers such as * improve classification performance of 'weak' classifiers such as
* DecisionStump (default base classifier of AdaBoost). * DecisionStump (default base classifier of AdaBoost).
* *
* @param int $maxIterations
*/ */
public function __construct(int $maxIterations = 50) public function __construct(int $maxIterations = 50)
{ {
@ -96,6 +97,8 @@ class AdaBoost implements Classifier
/** /**
* @param array $samples * @param array $samples
* @param array $targets * @param array $targets
*
* @throws \Exception
*/ */
public function train(array $samples, array $targets) public function train(array $samples, array $targets)
{ {
@ -123,7 +126,6 @@ class AdaBoost implements Classifier
// Execute the algorithm for a maximum number of iterations // Execute the algorithm for a maximum number of iterations
$currIter = 0; $currIter = 0;
while ($this->maxIterations > $currIter++) { while ($this->maxIterations > $currIter++) {
// Determine the best 'weak' classifier based on current weights // Determine the best 'weak' classifier based on current weights
$classifier = $this->getBestClassifier(); $classifier = $this->getBestClassifier();
$errorRate = $this->evaluateClassifier($classifier); $errorRate = $this->evaluateClassifier($classifier);
@ -181,7 +183,7 @@ class AdaBoost implements Classifier
$targets = []; $targets = [];
foreach ($weights as $index => $weight) { foreach ($weights as $index => $weight) {
$z = (int)round(($weight - $mean) / $std) - $minZ + 1; $z = (int)round(($weight - $mean) / $std) - $minZ + 1;
for ($i=0; $i < $z; $i++) { for ($i = 0; $i < $z; ++$i) {
if (rand(0, 1) == 0) { if (rand(0, 1) == 0) {
continue; continue;
} }
@ -197,6 +199,8 @@ class AdaBoost implements Classifier
* Evaluates the classifier and returns the classification error rate * Evaluates the classifier and returns the classification error rate
* *
* @param Classifier $classifier * @param Classifier $classifier
*
* @return float
*/ */
protected function evaluateClassifier(Classifier $classifier) protected function evaluateClassifier(Classifier $classifier)
{ {

View File

@ -59,13 +59,13 @@ class Bagging implements Classifier
private $samples = []; private $samples = [];
/** /**
* Creates an ensemble classifier with given number of base classifiers<br> * Creates an ensemble classifier with given number of base classifiers
* Default number of base classifiers is 100. * Default number of base classifiers is 50.
* The more number of base classifiers, the better performance but at the cost of procesing time * The more number of base classifiers, the better performance but at the cost of procesing time
* *
* @param int $numClassifier * @param int $numClassifier
*/ */
public function __construct($numClassifier = 50) public function __construct(int $numClassifier = 50)
{ {
$this->numClassifier = $numClassifier; $this->numClassifier = $numClassifier;
} }
@ -76,14 +76,17 @@ class Bagging implements Classifier
* to train each base classifier. * to train each base classifier.
* *
* @param float $ratio * @param float $ratio
*
* @return $this * @return $this
* @throws Exception *
* @throws \Exception
*/ */
public function setSubsetRatio(float $ratio) public function setSubsetRatio(float $ratio)
{ {
if ($ratio < 0.1 || $ratio > 1.0) { if ($ratio < 0.1 || $ratio > 1.0) {
throw new \Exception("Subset ratio should be between 0.1 and 1.0"); throw new \Exception("Subset ratio should be between 0.1 and 1.0");
} }
$this->subsetRatio = $ratio; $this->subsetRatio = $ratio;
return $this; return $this;
} }
@ -98,12 +101,14 @@ class Bagging implements Classifier
* *
* @param string $classifier * @param string $classifier
* @param array $classifierOptions * @param array $classifierOptions
*
* @return $this * @return $this
*/ */
public function setClassifer(string $classifier, array $classifierOptions = []) public function setClassifer(string $classifier, array $classifierOptions = [])
{ {
$this->classifier = $classifier; $this->classifier = $classifier;
$this->classifierOptions = $classifierOptions; $this->classifierOptions = $classifierOptions;
return $this; return $this;
} }
@ -138,11 +143,12 @@ class Bagging implements Classifier
$targets = []; $targets = [];
srand($index); srand($index);
$bootstrapSize = $this->subsetRatio * $this->numSamples; $bootstrapSize = $this->subsetRatio * $this->numSamples;
for ($i=0; $i < $bootstrapSize; $i++) { for ($i = 0; $i < $bootstrapSize; ++$i) {
$rand = rand(0, $this->numSamples - 1); $rand = rand(0, $this->numSamples - 1);
$samples[] = $this->samples[$rand]; $samples[] = $this->samples[$rand];
$targets[] = $this->targets[$rand]; $targets[] = $this->targets[$rand];
} }
return [$samples, $targets]; return [$samples, $targets];
} }
@ -152,24 +158,25 @@ class Bagging implements Classifier
protected function initClassifiers() protected function initClassifiers()
{ {
$classifiers = []; $classifiers = [];
for ($i=0; $i<$this->numClassifier; $i++) { for ($i = 0; $i < $this->numClassifier; ++$i) {
$ref = new \ReflectionClass($this->classifier); $ref = new \ReflectionClass($this->classifier);
if ($this->classifierOptions) { if ($this->classifierOptions) {
$obj = $ref->newInstanceArgs($this->classifierOptions); $obj = $ref->newInstanceArgs($this->classifierOptions);
} else { } else {
$obj = $ref->newInstance(); $obj = $ref->newInstance();
} }
$classifiers[] = $this->initSingleClassifier($obj, $i);
$classifiers[] = $this->initSingleClassifier($obj);
} }
return $classifiers; return $classifiers;
} }
/** /**
* @param Classifier $classifier * @param Classifier $classifier
* @param int $index *
* @return Classifier * @return Classifier
*/ */
protected function initSingleClassifier($classifier, $index) protected function initSingleClassifier($classifier)
{ {
return $classifier; return $classifier;
} }

View File

@ -5,7 +5,6 @@ declare(strict_types=1);
namespace Phpml\Classification\Ensemble; namespace Phpml\Classification\Ensemble;
use Phpml\Classification\DecisionTree; use Phpml\Classification\DecisionTree;
use Phpml\Classification\Classifier;
class RandomForest extends Bagging class RandomForest extends Bagging
{ {
@ -24,9 +23,9 @@ class RandomForest extends Bagging
* may increase the prediction performance while it will also substantially * may increase the prediction performance while it will also substantially
* increase the processing time and the required memory * increase the processing time and the required memory
* *
* @param type $numClassifier * @param int $numClassifier
*/ */
public function __construct($numClassifier = 50) public function __construct(int $numClassifier = 50)
{ {
parent::__construct($numClassifier); parent::__construct($numClassifier);
@ -43,17 +42,21 @@ class RandomForest extends Bagging
* features to be taken into consideration while selecting subspace of features * features to be taken into consideration while selecting subspace of features
* *
* @param mixed $ratio string or float should be given * @param mixed $ratio string or float should be given
*
* @return $this * @return $this
* @throws Exception *
* @throws \Exception
*/ */
public function setFeatureSubsetRatio($ratio) public function setFeatureSubsetRatio($ratio)
{ {
if (is_float($ratio) && ($ratio < 0.1 || $ratio > 1.0)) { if (is_float($ratio) && ($ratio < 0.1 || $ratio > 1.0)) {
throw new \Exception("When a float given, feature subset ratio should be between 0.1 and 1.0"); throw new \Exception("When a float given, feature subset ratio should be between 0.1 and 1.0");
} }
if (is_string($ratio) && $ratio != 'sqrt' && $ratio != 'log') { if (is_string($ratio) && $ratio != 'sqrt' && $ratio != 'log') {
throw new \Exception("When a string given, feature subset ratio can only be 'sqrt' or 'log' "); throw new \Exception("When a string given, feature subset ratio can only be 'sqrt' or 'log' ");
} }
$this->featureSubsetRatio = $ratio; $this->featureSubsetRatio = $ratio;
return $this; return $this;
} }
@ -63,7 +66,10 @@ class RandomForest extends Bagging
* *
* @param string $classifier * @param string $classifier
* @param array $classifierOptions * @param array $classifierOptions
*
* @return $this * @return $this
*
* @throws \Exception
*/ */
public function setClassifer(string $classifier, array $classifierOptions = []) public function setClassifer(string $classifier, array $classifierOptions = [])
{ {
@ -125,10 +131,10 @@ class RandomForest extends Bagging
/** /**
* @param DecisionTree $classifier * @param DecisionTree $classifier
* @param int $index *
* @return DecisionTree * @return DecisionTree
*/ */
protected function initSingleClassifier($classifier, $index) protected function initSingleClassifier($classifier)
{ {
if (is_float($this->featureSubsetRatio)) { if (is_float($this->featureSubsetRatio)) {
$featureCount = (int)($this->featureSubsetRatio * $this->featureCount); $featureCount = (int)($this->featureSubsetRatio * $this->featureCount);

View File

@ -4,11 +4,8 @@ declare(strict_types=1);
namespace Phpml\Classification\Linear; namespace Phpml\Classification\Linear;
use Phpml\Classification\Classifier;
class Adaline extends Perceptron class Adaline extends Perceptron
{ {
/** /**
* Batch training is the default Adaline training algorithm * Batch training is the default Adaline training algorithm
*/ */
@ -35,8 +32,12 @@ class Adaline extends Perceptron
* If normalizeInputs is set to true, then every input given to the algorithm will be standardized * If normalizeInputs is set to true, then every input given to the algorithm will be standardized
* by use of standard deviation and mean calculation * by use of standard deviation and mean calculation
* *
* @param int $learningRate * @param float $learningRate
* @param int $maxIterations * @param int $maxIterations
* @param bool $normalizeInputs
* @param int $trainingType
*
* @throws \Exception
*/ */
public function __construct(float $learningRate = 0.001, int $maxIterations = 1000, public function __construct(float $learningRate = 0.001, int $maxIterations = 1000,
bool $normalizeInputs = true, int $trainingType = self::BATCH_TRAINING) bool $normalizeInputs = true, int $trainingType = self::BATCH_TRAINING)

View File

@ -87,6 +87,8 @@ class DecisionStump extends WeightedClassifier
/** /**
* @param array $samples * @param array $samples
* @param array $targets * @param array $targets
* @param array $labels
*
* @throws \Exception * @throws \Exception
*/ */
protected function trainBinary(array $samples, array $targets, array $labels) protected function trainBinary(array $samples, array $targets, array $labels)
@ -237,13 +239,13 @@ class DecisionStump extends WeightedClassifier
/** /**
* *
* @param type $leftValue * @param mixed $leftValue
* @param type $operator * @param string $operator
* @param type $rightValue * @param mixed $rightValue
* *
* @return boolean * @return boolean
*/ */
protected function evaluate($leftValue, $operator, $rightValue) protected function evaluate($leftValue, string $operator, $rightValue)
{ {
switch ($operator) { switch ($operator) {
case '>': return $leftValue > $rightValue; case '>': return $leftValue > $rightValue;
@ -291,7 +293,7 @@ class DecisionStump extends WeightedClassifier
if (!isset($prob[$predicted][$target])) { if (!isset($prob[$predicted][$target])) {
$prob[$predicted][$target] = 0; $prob[$predicted][$target] = 0;
} }
$prob[$predicted][$target]++; ++$prob[$predicted][$target];
} }
// Calculate probabilities: Proportion of labels in each leaf // Calculate probabilities: Proportion of labels in each leaf

View File

@ -4,12 +4,10 @@ declare(strict_types=1);
namespace Phpml\Classification\Linear; namespace Phpml\Classification\Linear;
use Phpml\Classification\Classifier;
use Phpml\Helper\Optimizer\ConjugateGradient; use Phpml\Helper\Optimizer\ConjugateGradient;
class LogisticRegression extends Adaline class LogisticRegression extends Adaline
{ {
/** /**
* Batch training: Gradient descent algorithm (default) * Batch training: Gradient descent algorithm (default)
*/ */
@ -126,6 +124,8 @@ class LogisticRegression extends Adaline
* *
* @param array $samples * @param array $samples
* @param array $targets * @param array $targets
*
* @throws \Exception
*/ */
protected function runTraining(array $samples, array $targets) protected function runTraining(array $samples, array $targets)
{ {
@ -140,12 +140,18 @@ class LogisticRegression extends Adaline
case self::CONJUGATE_GRAD_TRAINING: case self::CONJUGATE_GRAD_TRAINING:
return $this->runConjugateGradient($samples, $targets, $callback); return $this->runConjugateGradient($samples, $targets, $callback);
default:
throw new \Exception('Logistic regression has invalid training type: %s.', $this->trainingType);
} }
} }
/** /**
* Executes Conjugate Gradient method to optimize the * Executes Conjugate Gradient method to optimize the weights of the LogReg model
* weights of the LogReg model *
* @param array $samples
* @param array $targets
* @param \Closure $gradientFunc
*/ */
protected function runConjugateGradient(array $samples, array $targets, \Closure $gradientFunc) protected function runConjugateGradient(array $samples, array $targets, \Closure $gradientFunc)
{ {
@ -162,6 +168,8 @@ class LogisticRegression extends Adaline
* Returns the appropriate callback function for the selected cost function * Returns the appropriate callback function for the selected cost function
* *
* @return \Closure * @return \Closure
*
* @throws \Exception
*/ */
protected function getCostFunction() protected function getCostFunction()
{ {
@ -203,7 +211,7 @@ class LogisticRegression extends Adaline
return $callback; return $callback;
case 'sse': case 'sse':
/** /*
* Sum of squared errors or least squared errors cost function: * Sum of squared errors or least squared errors cost function:
* J(x) = (y - h(x))^2 * J(x) = (y - h(x))^2
* *
@ -224,6 +232,9 @@ class LogisticRegression extends Adaline
}; };
return $callback; return $callback;
default:
throw new \Exception(sprintf('Logistic regression has invalid cost function: %s.', $this->costFunction));
} }
} }
@ -245,6 +256,7 @@ class LogisticRegression extends Adaline
* Returns the class value (either -1 or 1) for the given input * Returns the class value (either -1 or 1) for the given input
* *
* @param array $sample * @param array $sample
*
* @return int * @return int
*/ */
protected function outputClass(array $sample) protected function outputClass(array $sample)
@ -266,6 +278,8 @@ class LogisticRegression extends Adaline
* *
* @param array $sample * @param array $sample
* @param mixed $label * @param mixed $label
*
* @return float
*/ */
protected function predictProbability(array $sample, $label) protected function predictProbability(array $sample, $label)
{ {

View File

@ -63,22 +63,22 @@ class Perceptron implements Classifier, IncrementalEstimator
/** /**
* Initalize a perceptron classifier with given learning rate and maximum * Initalize a perceptron classifier with given learning rate and maximum
* number of iterations used while training the perceptron <br> * number of iterations used while training the perceptron
* *
* Learning rate should be a float value between 0.0(exclusive) and 1.0(inclusive) <br> * @param float $learningRate Value between 0.0(exclusive) and 1.0(inclusive)
* Maximum number of iterations can be an integer value greater than 0 * @param int $maxIterations Must be at least 1
* @param int $learningRate * @param bool $normalizeInputs
* @param int $maxIterations *
* @throws \Exception
*/ */
public function __construct(float $learningRate = 0.001, int $maxIterations = 1000, public function __construct(float $learningRate = 0.001, int $maxIterations = 1000, bool $normalizeInputs = true)
bool $normalizeInputs = true)
{ {
if ($learningRate <= 0.0 || $learningRate > 1.0) { if ($learningRate <= 0.0 || $learningRate > 1.0) {
throw new \Exception("Learning rate should be a float value between 0.0(exclusive) and 1.0(inclusive)"); throw new \Exception("Learning rate should be a float value between 0.0(exclusive) and 1.0(inclusive)");
} }
if ($maxIterations <= 0) { if ($maxIterations <= 0) {
throw new \Exception("Maximum number of iterations should be an integer greater than 0"); throw new \Exception("Maximum number of iterations must be an integer greater than 0");
} }
if ($normalizeInputs) { if ($normalizeInputs) {
@ -96,7 +96,7 @@ class Perceptron implements Classifier, IncrementalEstimator
*/ */
public function partialTrain(array $samples, array $targets, array $labels = []) public function partialTrain(array $samples, array $targets, array $labels = [])
{ {
return $this->trainByLabel($samples, $targets, $labels); $this->trainByLabel($samples, $targets, $labels);
} }
/** /**
@ -140,6 +140,8 @@ class Perceptron implements Classifier, IncrementalEstimator
* for $maxIterations times * for $maxIterations times
* *
* @param bool $enable * @param bool $enable
*
* @return $this
*/ */
public function setEarlyStop(bool $enable = true) public function setEarlyStop(bool $enable = true)
{ {
@ -187,6 +189,8 @@ class Perceptron implements Classifier, IncrementalEstimator
* *
* @param array $samples * @param array $samples
* @param array $targets * @param array $targets
* @param \Closure $gradientFunc
* @param bool $isBatch
*/ */
protected function runGradientDescent(array $samples, array $targets, \Closure $gradientFunc, bool $isBatch = false) protected function runGradientDescent(array $samples, array $targets, \Closure $gradientFunc, bool $isBatch = false)
{ {
@ -262,6 +266,8 @@ class Perceptron implements Classifier, IncrementalEstimator
* *
* @param array $sample * @param array $sample
* @param mixed $label * @param mixed $label
*
* @return float
*/ */
protected function predictProbability(array $sample, $label) protected function predictProbability(array $sample, $label)
{ {
@ -277,6 +283,7 @@ class Perceptron implements Classifier, IncrementalEstimator
/** /**
* @param array $sample * @param array $sample
*
* @return mixed * @return mixed
*/ */
protected function predictSampleBinary(array $sample) protected function predictSampleBinary(array $sample)

View File

@ -89,7 +89,7 @@ class NaiveBayes implements Classifier
$this->mean[$label]= array_fill(0, $this->featureCount, 0); $this->mean[$label]= array_fill(0, $this->featureCount, 0);
$this->dataType[$label] = array_fill(0, $this->featureCount, self::CONTINUOS); $this->dataType[$label] = array_fill(0, $this->featureCount, self::CONTINUOS);
$this->discreteProb[$label] = array_fill(0, $this->featureCount, self::CONTINUOS); $this->discreteProb[$label] = array_fill(0, $this->featureCount, self::CONTINUOS);
for ($i=0; $i<$this->featureCount; $i++) { for ($i = 0; $i < $this->featureCount; ++$i) {
// Get the values of nth column in the samples array // Get the values of nth column in the samples array
// Mean::arithmetic is called twice, can be optimized // Mean::arithmetic is called twice, can be optimized
$values = array_column($samples, $i); $values = array_column($samples, $i);
@ -117,6 +117,7 @@ class NaiveBayes implements Classifier
* @param array $sample * @param array $sample
* @param int $feature * @param int $feature
* @param string $label * @param string $label
*
* @return float * @return float
*/ */
private function sampleProbability($sample, $feature, $label) private function sampleProbability($sample, $feature, $label)
@ -145,13 +146,15 @@ class NaiveBayes implements Classifier
/** /**
* Return samples belonging to specific label * Return samples belonging to specific label
*
* @param string $label * @param string $label
*
* @return array * @return array
*/ */
private function getSamplesByLabel($label) private function getSamplesByLabel($label)
{ {
$samples = []; $samples = [];
for ($i=0; $i<$this->sampleCount; $i++) { for ($i = 0; $i < $this->sampleCount; ++$i) {
if ($this->targets[$i] == $label) { if ($this->targets[$i] == $label) {
$samples[] = $this->samples[$i]; $samples[] = $this->samples[$i];
} }
@ -171,12 +174,13 @@ class NaiveBayes implements Classifier
$predictions = []; $predictions = [];
foreach ($this->labels as $label) { foreach ($this->labels as $label) {
$p = $this->p[$label]; $p = $this->p[$label];
for ($i=0; $i<$this->featureCount; $i++) { for ($i = 0; $i<$this->featureCount; ++$i) {
$Plf = $this->sampleProbability($sample, $i, $label); $Plf = $this->sampleProbability($sample, $i, $label);
$p += $Plf; $p += $Plf;
} }
$predictions[$label] = $p; $predictions[$label] = $p;
} }
arsort($predictions, SORT_NUMERIC); arsort($predictions, SORT_NUMERIC);
reset($predictions); reset($predictions);
return key($predictions); return key($predictions);

View File

@ -7,6 +7,7 @@ namespace Phpml\Clustering;
use Phpml\Clustering\KMeans\Point; use Phpml\Clustering\KMeans\Point;
use Phpml\Clustering\KMeans\Cluster; use Phpml\Clustering\KMeans\Cluster;
use Phpml\Clustering\KMeans\Space; use Phpml\Clustering\KMeans\Space;
use Phpml\Exception\InvalidArgumentException;
use Phpml\Math\Distance\Euclidean; use Phpml\Math\Distance\Euclidean;
class FuzzyCMeans implements Clusterer class FuzzyCMeans implements Clusterer
@ -25,10 +26,12 @@ class FuzzyCMeans implements Clusterer
* @var Space * @var Space
*/ */
private $space; private $space;
/** /**
* @var array|float[][] * @var array|float[][]
*/ */
private $membership; private $membership;
/** /**
* @var float * @var float
*/ */
@ -56,6 +59,9 @@ class FuzzyCMeans implements Clusterer
/** /**
* @param int $clustersNumber * @param int $clustersNumber
* @param float $fuzziness
* @param float $epsilon
* @param int $maxIterations
* *
* @throws InvalidArgumentException * @throws InvalidArgumentException
*/ */
@ -86,14 +92,15 @@ class FuzzyCMeans implements Clusterer
protected function generateRandomMembership(int $rows, int $cols) protected function generateRandomMembership(int $rows, int $cols)
{ {
$this->membership = []; $this->membership = [];
for ($i=0; $i < $rows; $i++) { for ($i = 0; $i < $rows; ++$i) {
$row = []; $row = [];
$total = 0.0; $total = 0.0;
for ($k=0; $k < $cols; $k++) { for ($k = 0; $k < $cols; ++$k) {
$val = rand(1, 5) / 10.0; $val = rand(1, 5) / 10.0;
$row[] = $val; $row[] = $val;
$total += $val; $total += $val;
} }
$this->membership[] = array_map(function ($val) use ($total) { $this->membership[] = array_map(function ($val) use ($total) {
return $val / $total; return $val / $total;
}, $row); }, $row);
@ -105,19 +112,20 @@ class FuzzyCMeans implements Clusterer
$dim = $this->space->getDimension(); $dim = $this->space->getDimension();
if (!$this->clusters) { if (!$this->clusters) {
$this->clusters = []; $this->clusters = [];
for ($i=0; $i<$this->clustersNumber; $i++) { for ($i = 0; $i < $this->clustersNumber; ++$i) {
$this->clusters[] = new Cluster($this->space, array_fill(0, $dim, 0.0)); $this->clusters[] = new Cluster($this->space, array_fill(0, $dim, 0.0));
} }
} }
for ($i=0; $i<$this->clustersNumber; $i++) { for ($i = 0; $i < $this->clustersNumber; ++$i) {
$cluster = $this->clusters[$i]; $cluster = $this->clusters[$i];
$center = $cluster->getCoordinates(); $center = $cluster->getCoordinates();
for ($k=0; $k<$dim; $k++) { for ($k = 0; $k < $dim; ++$k) {
$a = $this->getMembershipRowTotal($i, $k, true); $a = $this->getMembershipRowTotal($i, $k, true);
$b = $this->getMembershipRowTotal($i, $k, false); $b = $this->getMembershipRowTotal($i, $k, false);
$center[$k] = $a / $b; $center[$k] = $a / $b;
} }
$cluster->setCoordinates($center); $cluster->setCoordinates($center);
} }
} }
@ -125,20 +133,22 @@ class FuzzyCMeans implements Clusterer
protected function getMembershipRowTotal(int $row, int $col, bool $multiply) protected function getMembershipRowTotal(int $row, int $col, bool $multiply)
{ {
$sum = 0.0; $sum = 0.0;
for ($k = 0; $k < $this->sampleCount; $k++) { for ($k = 0; $k < $this->sampleCount; ++$k) {
$val = pow($this->membership[$row][$k], $this->fuzziness); $val = pow($this->membership[$row][$k], $this->fuzziness);
if ($multiply) { if ($multiply) {
$val *= $this->samples[$k][$col]; $val *= $this->samples[$k][$col];
} }
$sum += $val; $sum += $val;
} }
return $sum; return $sum;
} }
protected function updateMembershipMatrix() protected function updateMembershipMatrix()
{ {
for ($i = 0; $i < $this->clustersNumber; $i++) { for ($i = 0; $i < $this->clustersNumber; ++$i) {
for ($k = 0; $k < $this->sampleCount; $k++) { for ($k = 0; $k < $this->sampleCount; ++$k) {
$distCalc = $this->getDistanceCalc($i, $k); $distCalc = $this->getDistanceCalc($i, $k);
$this->membership[$i][$k] = 1.0 / $distCalc; $this->membership[$i][$k] = 1.0 / $distCalc;
} }
@ -157,11 +167,15 @@ class FuzzyCMeans implements Clusterer
$distance = new Euclidean(); $distance = new Euclidean();
$dist1 = $distance->distance( $dist1 = $distance->distance(
$this->clusters[$row]->getCoordinates(), $this->clusters[$row]->getCoordinates(),
$this->samples[$col]); $this->samples[$col]
for ($j = 0; $j < $this->clustersNumber; $j++) { );
for ($j = 0; $j < $this->clustersNumber; ++$j) {
$dist2 = $distance->distance( $dist2 = $distance->distance(
$this->clusters[$j]->getCoordinates(), $this->clusters[$j]->getCoordinates(),
$this->samples[$col]); $this->samples[$col]
);
$val = pow($dist1 / $dist2, 2.0 / ($this->fuzziness - 1)); $val = pow($dist1 / $dist2, 2.0 / ($this->fuzziness - 1));
$sum += $val; $sum += $val;
} }
@ -177,13 +191,14 @@ class FuzzyCMeans implements Clusterer
{ {
$sum = 0.0; $sum = 0.0;
$distance = new Euclidean(); $distance = new Euclidean();
for ($i = 0; $i < $this->clustersNumber; $i++) { for ($i = 0; $i < $this->clustersNumber; ++$i) {
$clust = $this->clusters[$i]->getCoordinates(); $clust = $this->clusters[$i]->getCoordinates();
for ($k = 0; $k < $this->sampleCount; $k++) { for ($k = 0; $k < $this->sampleCount; ++$k) {
$point = $this->samples[$k]; $point = $this->samples[$k];
$sum += $distance->distance($clust, $point); $sum += $distance->distance($clust, $point);
} }
} }
return $sum; return $sum;
} }
@ -210,7 +225,6 @@ class FuzzyCMeans implements Clusterer
// Our goal is minimizing the objective value while // Our goal is minimizing the objective value while
// executing the clustering steps at a maximum number of iterations // executing the clustering steps at a maximum number of iterations
$lastObjective = 0.0; $lastObjective = 0.0;
$difference = 0.0;
$iterations = 0; $iterations = 0;
do { do {
// Update the membership matrix and cluster centers, respectively // Update the membership matrix and cluster centers, respectively
@ -224,7 +238,7 @@ class FuzzyCMeans implements Clusterer
} while ($difference > $this->epsilon && $iterations++ <= $this->maxIterations); } while ($difference > $this->epsilon && $iterations++ <= $this->maxIterations);
// Attach (hard cluster) each data point to the nearest cluster // Attach (hard cluster) each data point to the nearest cluster
for ($k=0; $k<$this->sampleCount; $k++) { for ($k = 0; $k < $this->sampleCount; ++$k) {
$column = array_column($this->membership, $k); $column = array_column($this->membership, $k);
arsort($column); arsort($column);
reset($column); reset($column);
@ -238,6 +252,7 @@ class FuzzyCMeans implements Clusterer
foreach ($this->clusters as $cluster) { foreach ($this->clusters as $cluster) {
$grouped[] = $cluster->getPoints(); $grouped[] = $cluster->getPoints();
} }
return $grouped; return $grouped;
} }
} }

View File

@ -156,7 +156,11 @@ class Space extends SplObjectStorage
case KMeans::INIT_KMEANS_PLUS_PLUS: case KMeans::INIT_KMEANS_PLUS_PLUS:
$clusters = $this->initializeKMPPClusters($clustersNumber); $clusters = $this->initializeKMPPClusters($clustersNumber);
break; break;
default:
return [];
} }
$clusters[0]->attachAll($this); $clusters[0]->attachAll($this);
return $clusters; return $clusters;

View File

@ -17,6 +17,7 @@ class CsvDataset extends ArrayDataset
* @param string $filepath * @param string $filepath
* @param int $features * @param int $features
* @param bool $headingRow * @param bool $headingRow
* @param string $delimiter
* *
* @throws FileException * @throws FileException
*/ */
@ -37,11 +38,15 @@ class CsvDataset extends ArrayDataset
$this->columnNames = range(0, $features - 1); $this->columnNames = range(0, $features - 1);
} }
$samples = $targets = [];
while (($data = fgetcsv($handle, 1000, $delimiter)) !== false) { while (($data = fgetcsv($handle, 1000, $delimiter)) !== false) {
$this->samples[] = array_slice($data, 0, $features); $samples[] = array_slice($data, 0, $features);
$this->targets[] = $data[$features]; $targets[] = $data[$features];
} }
fclose($handle); fclose($handle);
parent::__construct($samples, $targets);
} }
/** /**

View File

@ -1,4 +1,6 @@
<?php declare(strict_types=1); <?php
declare(strict_types=1);
namespace Phpml\DimensionReduction; namespace Phpml\DimensionReduction;
@ -37,7 +39,7 @@ abstract class EigenTransformerBase
/** /**
* Top eigenValues of the matrix * Top eigenValues of the matrix
* *
* @var type * @var array
*/ */
protected $eigValues = []; protected $eigValues = [];

View File

@ -86,7 +86,7 @@ class KernelPCA extends PCA
$matrix = $this->calculateKernelMatrix($this->data, $numRows); $matrix = $this->calculateKernelMatrix($this->data, $numRows);
$matrix = $this->centerMatrix($matrix, $numRows); $matrix = $this->centerMatrix($matrix, $numRows);
$this->eigenDecomposition($matrix, $numRows); $this->eigenDecomposition($matrix);
$this->fit = true; $this->fit = true;
@ -107,8 +107,8 @@ class KernelPCA extends PCA
$kernelFunc = $this->getKernel(); $kernelFunc = $this->getKernel();
$matrix = []; $matrix = [];
for ($i=0; $i < $numRows; $i++) { for ($i = 0; $i < $numRows; ++$i) {
for ($k=0; $k < $numRows; $k++) { for ($k = 0; $k < $numRows; ++$k) {
if ($i <= $k) { if ($i <= $k) {
$matrix[$i][$k] = $kernelFunc($data[$i], $data[$k]); $matrix[$i][$k] = $kernelFunc($data[$i], $data[$k]);
} else { } else {
@ -128,6 +128,8 @@ class KernelPCA extends PCA
* *
* @param array $matrix * @param array $matrix
* @param int $n * @param int $n
*
* @return array
*/ */
protected function centerMatrix(array $matrix, int $n) protected function centerMatrix(array $matrix, int $n)
{ {
@ -152,6 +154,8 @@ class KernelPCA extends PCA
* Returns the callable kernel function * Returns the callable kernel function
* *
* @return \Closure * @return \Closure
*
* @throws \Exception
*/ */
protected function getKernel() protected function getKernel()
{ {
@ -181,6 +185,9 @@ class KernelPCA extends PCA
return function ($x, $y) use ($dist) { return function ($x, $y) use ($dist) {
return exp(-$this->gamma * $dist->distance($x, $y)); return exp(-$this->gamma * $dist->distance($x, $y));
}; };
default:
throw new \Exception(sprintf('KernelPCA initialized with invalid kernel: %d', $this->kernel));
} }
} }
@ -228,6 +235,8 @@ class KernelPCA extends PCA
* @param array $sample * @param array $sample
* *
* @return array * @return array
*
* @throws \Exception
*/ */
public function transform(array $sample) public function transform(array $sample)
{ {

View File

@ -4,7 +4,6 @@ declare(strict_types=1);
namespace Phpml\DimensionReduction; namespace Phpml\DimensionReduction;
use Phpml\Math\Statistic\Mean;
use Phpml\Math\Matrix; use Phpml\Math\Matrix;
class LDA extends EigenTransformerBase class LDA extends EigenTransformerBase
@ -30,7 +29,7 @@ class LDA extends EigenTransformerBase
public $counts; public $counts;
/** /**
* @var float * @var float[]
*/ */
public $overallMean; public $overallMean;
@ -111,12 +110,12 @@ class LDA extends EigenTransformerBase
* Calculates mean of each column for each class and returns * Calculates mean of each column for each class and returns
* n by m matrix where n is number of labels and m is number of columns * n by m matrix where n is number of labels and m is number of columns
* *
* @param type $data * @param array $data
* @param type $classes * @param array $classes
* *
* @return array * @return array
*/ */
protected function calculateMeans($data, $classes) : array protected function calculateMeans(array $data, array $classes) : array
{ {
$means = []; $means = [];
$counts= []; $counts= [];
@ -136,7 +135,8 @@ class LDA extends EigenTransformerBase
if (!isset($counts[$label])) { if (!isset($counts[$label])) {
$counts[$label] = 0; $counts[$label] = 0;
} }
$counts[$label]++;
++$counts[$label];
} }
foreach ($means as $index => $row) { foreach ($means as $index => $row) {
@ -231,6 +231,8 @@ class LDA extends EigenTransformerBase
* @param array $sample * @param array $sample
* *
* @return array * @return array
*
* @throws \Exception
*/ */
public function transform(array $sample) public function transform(array $sample)
{ {

View File

@ -6,7 +6,6 @@ namespace Phpml\DimensionReduction;
use Phpml\Math\Statistic\Covariance; use Phpml\Math\Statistic\Covariance;
use Phpml\Math\Statistic\Mean; use Phpml\Math\Statistic\Mean;
use Phpml\Math\Matrix;
class PCA extends EigenTransformerBase class PCA extends EigenTransformerBase
{ {
@ -86,7 +85,7 @@ class PCA extends EigenTransformerBase
{ {
// Calculate means for each dimension // Calculate means for each dimension
$this->means = []; $this->means = [];
for ($i=0; $i < $n; $i++) { for ($i = 0; $i < $n; ++$i) {
$column = array_column($data, $i); $column = array_column($data, $i);
$this->means[] = Mean::arithmetic($column); $this->means[] = Mean::arithmetic($column);
} }
@ -109,7 +108,7 @@ class PCA extends EigenTransformerBase
// Normalize data // Normalize data
foreach ($data as $i => $row) { foreach ($data as $i => $row) {
for ($k=0; $k < $n; $k++) { for ($k = 0; $k < $n; ++$k) {
$data[$i][$k] -= $this->means[$k]; $data[$i][$k] -= $this->means[$k];
} }
} }
@ -124,6 +123,8 @@ class PCA extends EigenTransformerBase
* @param array $sample * @param array $sample
* *
* @return array * @return array
*
* @throws \Exception
*/ */
public function transform(array $sample) public function transform(array $sample)
{ {

View File

@ -6,7 +6,6 @@ namespace Phpml\Exception;
class DatasetException extends \Exception class DatasetException extends \Exception
{ {
/** /**
* @param string $path * @param string $path
* *

View File

@ -6,7 +6,6 @@ namespace Phpml\Exception;
class FileException extends \Exception class FileException extends \Exception
{ {
/** /**
* @param string $filepath * @param string $filepath
* *

View File

@ -11,7 +11,7 @@ class InvalidArgumentException extends \Exception
*/ */
public static function arraySizeNotMatch() public static function arraySizeNotMatch()
{ {
return new self('Size of given arrays not match'); return new self('Size of given arrays does not match');
} }
/** /**
@ -55,7 +55,7 @@ class InvalidArgumentException extends \Exception
*/ */
public static function inconsistentMatrixSupplied() public static function inconsistentMatrixSupplied()
{ {
return new self('Inconsistent matrix applied'); return new self('Inconsistent matrix supplied');
} }
/** /**

View File

@ -6,7 +6,6 @@ namespace Phpml\Exception;
class SerializeException extends \Exception class SerializeException extends \Exception
{ {
/** /**
* @param string $filepath * @param string $filepath
* *

View File

@ -6,7 +6,6 @@ namespace Phpml\Helper;
trait OneVsRest trait OneVsRest
{ {
/** /**
* @var array * @var array
*/ */
@ -35,18 +34,18 @@ trait OneVsRest
// Clears previous stuff. // Clears previous stuff.
$this->reset(); $this->reset();
return $this->trainBylabel($samples, $targets); $this->trainBylabel($samples, $targets);
} }
/** /**
* @param array $samples * @param array $samples
* @param array $targets * @param array $targets
* @param array $allLabels All training set labels * @param array $allLabels All training set labels
*
* @return void * @return void
*/ */
protected function trainByLabel(array $samples, array $targets, array $allLabels = []) protected function trainByLabel(array $samples, array $targets, array $allLabels = [])
{ {
// Overwrites the current value if it exist. $allLabels must be provided for each partialTrain run. // Overwrites the current value if it exist. $allLabels must be provided for each partialTrain run.
if (!empty($allLabels)) { if (!empty($allLabels)) {
$this->allLabels = $allLabels; $this->allLabels = $allLabels;
@ -57,7 +56,6 @@ trait OneVsRest
// If there are only two targets, then there is no need to perform OvR // If there are only two targets, then there is no need to perform OvR
if (count($this->allLabels) == 2) { if (count($this->allLabels) == 2) {
// Init classifier if required. // Init classifier if required.
if (empty($this->classifiers)) { if (empty($this->classifiers)) {
$this->classifiers[0] = $this->getClassifierCopy(); $this->classifiers[0] = $this->getClassifierCopy();
@ -68,7 +66,6 @@ trait OneVsRest
// Train a separate classifier for each label and memorize them // Train a separate classifier for each label and memorize them
foreach ($this->allLabels as $label) { foreach ($this->allLabels as $label) {
// Init classifier if required. // Init classifier if required.
if (empty($this->classifiers[$label])) { if (empty($this->classifiers[$label])) {
$this->classifiers[$label] = $this->getClassifierCopy(); $this->classifiers[$label] = $this->getClassifierCopy();
@ -107,7 +104,6 @@ trait OneVsRest
*/ */
protected function getClassifierCopy() protected function getClassifierCopy()
{ {
// Clone the current classifier, so that // Clone the current classifier, so that
// we don't mess up its variables while training // we don't mess up its variables while training
// multiple instances of this classifier // multiple instances of this classifier
@ -181,6 +177,7 @@ trait OneVsRest
* return a probability for a sample to belong to the given label. * return a probability for a sample to belong to the given label.
* *
* @param array $sample * @param array $sample
* @param string $label
* *
* @return mixed * @return mixed
*/ */

View File

@ -34,7 +34,7 @@ class ConjugateGradient extends GD
$d = mp::muls($this->gradient($this->theta), -1); $d = mp::muls($this->gradient($this->theta), -1);
for ($i=0; $i < $this->maxIterations; $i++) { for ($i = 0; $i < $this->maxIterations; ++$i) {
// Obtain α that minimizes f(θ + α.d) // Obtain α that minimizes f(θ + α.d)
$alpha = $this->getAlpha(array_sum($d)); $alpha = $this->getAlpha(array_sum($d));
@ -68,11 +68,11 @@ class ConjugateGradient extends GD
* *
* @param array $theta * @param array $theta
* *
* @return float * @return array
*/ */
protected function gradient(array $theta) protected function gradient(array $theta)
{ {
list($_, $gradient, $_) = parent::gradient($theta); list(, $gradient) = parent::gradient($theta);
return $gradient; return $gradient;
} }
@ -86,7 +86,7 @@ class ConjugateGradient extends GD
*/ */
protected function cost(array $theta) protected function cost(array $theta)
{ {
list($cost, $_, $_) = parent::gradient($theta); list($cost) = parent::gradient($theta);
return array_sum($cost) / $this->sampleCount; return array_sum($cost) / $this->sampleCount;
} }
@ -107,7 +107,7 @@ class ConjugateGradient extends GD
* *
* @param float $d * @param float $d
* *
* @return array * @return float
*/ */
protected function getAlpha(float $d) protected function getAlpha(float $d)
{ {
@ -157,14 +157,14 @@ class ConjugateGradient extends GD
* @param float $alpha * @param float $alpha
* @param array $d * @param array $d
* *
* return array * @return array
*/ */
protected function getNewTheta(float $alpha, array $d) protected function getNewTheta(float $alpha, array $d)
{ {
$theta = $this->theta; $theta = $this->theta;
for ($i=0; $i < $this->dimensions + 1; $i++) { for ($i = 0; $i < $this->dimensions + 1; ++$i) {
if ($i == 0) { if ($i === 0) {
$theta[$i] += $alpha * array_sum($d); $theta[$i] += $alpha * array_sum($d);
} else { } else {
$sum = 0.0; $sum = 0.0;
@ -266,10 +266,11 @@ class mp
* *
* @param array $m1 * @param array $m1
* @param array $m2 * @param array $m2
* @param int $mag
* *
* @return array * @return array
*/ */
public static function add(array $m1, array $m2, $mag = 1) public static function add(array $m1, array $m2, int $mag = 1)
{ {
$res = []; $res = [];
foreach ($m1 as $i => $val) { foreach ($m1 as $i => $val) {
@ -333,10 +334,11 @@ class mp
* *
* @param array $m1 * @param array $m1
* @param float $m2 * @param float $m2
* @param int $mag
* *
* @return array * @return array
*/ */
public static function adds(array $m1, float $m2, $mag = 1) public static function adds(array $m1, float $m2, int $mag = 1)
{ {
$res = []; $res = [];
foreach ($m1 as $val) { foreach ($m1 as $val) {
@ -350,7 +352,7 @@ class mp
* Element-wise <b>subtraction</b> of a vector with a scalar * Element-wise <b>subtraction</b> of a vector with a scalar
* *
* @param array $m1 * @param array $m1
* @param float $m2 * @param array $m2
* *
* @return array * @return array
*/ */

View File

@ -91,8 +91,8 @@ class GD extends StochasticGD
protected function updateWeightsWithUpdates(array $updates, float $penalty) protected function updateWeightsWithUpdates(array $updates, float $penalty)
{ {
// Updates all weights at once // Updates all weights at once
for ($i=0; $i <= $this->dimensions; $i++) { for ($i = 0; $i <= $this->dimensions; ++$i) {
if ($i == 0) { if ($i === 0) {
$this->theta[0] -= $this->learningRate * array_sum($updates); $this->theta[0] -= $this->learningRate * array_sum($updates);
} else { } else {
$col = array_column($this->samples, $i - 1); $col = array_column($this->samples, $i - 1);

View File

@ -31,7 +31,7 @@ abstract class Optimizer
// Inits the weights randomly // Inits the weights randomly
$this->theta = []; $this->theta = [];
for ($i=0; $i < $this->dimensions; $i++) { for ($i = 0; $i < $this->dimensions; ++$i) {
$this->theta[] = rand() / (float) getrandmax(); $this->theta[] = rand() / (float) getrandmax();
} }
} }
@ -40,6 +40,10 @@ abstract class Optimizer
* Sets the weights manually * Sets the weights manually
* *
* @param array $theta * @param array $theta
*
* @return $this
*
* @throws \Exception
*/ */
public function setInitialTheta(array $theta) public function setInitialTheta(array $theta)
{ {
@ -56,6 +60,9 @@ abstract class Optimizer
* Executes the optimization with the given samples & targets * Executes the optimization with the given samples & targets
* and returns the weights * and returns the weights
* *
* @param array $samples
* @param array $targets
* @param \Closure $gradientCb
*/ */
abstract protected function runOptimization(array $samples, array $targets, \Closure $gradientCb); abstract protected function runOptimization(array $samples, array $targets, \Closure $gradientCb);
} }

View File

@ -166,7 +166,6 @@ class StochasticGD extends Optimizer
$currIter = 0; $currIter = 0;
$bestTheta = null; $bestTheta = null;
$bestScore = 0.0; $bestScore = 0.0;
$bestWeightIter = 0;
$this->costValues = []; $this->costValues = [];
while ($this->maxIterations > $currIter++) { while ($this->maxIterations > $currIter++) {
@ -180,7 +179,6 @@ class StochasticGD extends Optimizer
if ($bestTheta == null || $cost <= $bestScore) { if ($bestTheta == null || $cost <= $bestScore) {
$bestTheta = $theta; $bestTheta = $theta;
$bestScore = $cost; $bestScore = $cost;
$bestWeightIter = $currIter;
} }
// Add the cost value for this iteration to the list // Add the cost value for this iteration to the list
@ -218,7 +216,7 @@ class StochasticGD extends Optimizer
$this->theta[0] -= $this->learningRate * $gradient; $this->theta[0] -= $this->learningRate * $gradient;
// Update other values // Update other values
for ($i=1; $i <= $this->dimensions; $i++) { for ($i = 1; $i <= $this->dimensions; ++$i) {
$this->theta[$i] -= $this->learningRate * $this->theta[$i] -= $this->learningRate *
($gradient * $sample[$i - 1] + $penalty * $this->theta[$i]); ($gradient * $sample[$i - 1] + $penalty * $this->theta[$i]);
} }

View File

@ -14,13 +14,13 @@ trait Predictable
public function predict(array $samples) public function predict(array $samples)
{ {
if (!is_array($samples[0])) { if (!is_array($samples[0])) {
$predicted = $this->predictSample($samples); return $this->predictSample($samples);
} else { }
$predicted = []; $predicted = [];
foreach ($samples as $index => $sample) { foreach ($samples as $index => $sample) {
$predicted[$index] = $this->predictSample($sample); $predicted[$index] = $this->predictSample($sample);
} }
}
return $predicted; return $predicted;
} }

View File

@ -6,7 +6,6 @@ namespace Phpml;
interface IncrementalEstimator interface IncrementalEstimator
{ {
/** /**
* @param array $samples * @param array $samples
* @param array $targets * @param array $targets

View File

@ -23,8 +23,8 @@ class RBF implements Kernel
} }
/** /**
* @param float $a * @param array $a
* @param float $b * @param array $b
* *
* @return float * @return float
*/ */

View File

@ -33,7 +33,6 @@ use Phpml\Math\Matrix;
class EigenvalueDecomposition class EigenvalueDecomposition
{ {
/** /**
* Row and column dimension (square matrix). * Row and column dimension (square matrix).
* @var int * @var int
@ -42,9 +41,9 @@ class EigenvalueDecomposition
/** /**
* Internal symmetry flag. * Internal symmetry flag.
* @var int * @var bool
*/ */
private $issymmetric; private $symmetric;
/** /**
* Arrays for internal storage of eigenvalues. * Arrays for internal storage of eigenvalues.
@ -78,6 +77,38 @@ class EigenvalueDecomposition
private $cdivr; private $cdivr;
private $cdivi; private $cdivi;
/**
* Constructor: Check for symmetry, then construct the eigenvalue decomposition
*
* @param array $Arg
*/
public function __construct(array $Arg)
{
$this->A = $Arg;
$this->n = count($Arg[0]);
$this->symmetric = true;
for ($j = 0; ($j < $this->n) && $this->symmetric; ++$j) {
for ($i = 0; ($i < $this->n) & $this->symmetric; ++$i) {
$this->symmetric = ($this->A[$i][$j] == $this->A[$j][$i]);
}
}
if ($this->symmetric) {
$this->V = $this->A;
// Tridiagonalize.
$this->tred2();
// Diagonalize.
$this->tql2();
} else {
$this->H = $this->A;
$this->ort = [];
// Reduce to Hessenberg form.
$this->orthes();
// Reduce Hessenberg to real Schur form.
$this->hqr2();
}
}
/** /**
* Symmetric Householder reduction to tridiagonal form. * Symmetric Householder reduction to tridiagonal form.
@ -107,14 +138,17 @@ class EigenvalueDecomposition
$this->d[$k] /= $scale; $this->d[$k] /= $scale;
$h += pow($this->d[$k], 2); $h += pow($this->d[$k], 2);
} }
$f = $this->d[$i_]; $f = $this->d[$i_];
$g = sqrt($h); $g = sqrt($h);
if ($f > 0) { if ($f > 0) {
$g = -$g; $g = -$g;
} }
$this->e[$i] = $scale * $g; $this->e[$i] = $scale * $g;
$h = $h - $f * $g; $h = $h - $f * $g;
$this->d[$i_] = $f - $g; $this->d[$i_] = $f - $g;
for ($j = 0; $j < $i; ++$j) { for ($j = 0; $j < $i; ++$j) {
$this->e[$j] = 0.0; $this->e[$j] = 0.0;
} }
@ -123,20 +157,24 @@ class EigenvalueDecomposition
$f = $this->d[$j]; $f = $this->d[$j];
$this->V[$j][$i] = $f; $this->V[$j][$i] = $f;
$g = $this->e[$j] + $this->V[$j][$j] * $f; $g = $this->e[$j] + $this->V[$j][$j] * $f;
for ($k = $j + 1; $k <= $i_; ++$k) { for ($k = $j + 1; $k <= $i_; ++$k) {
$g += $this->V[$k][$j] * $this->d[$k]; $g += $this->V[$k][$j] * $this->d[$k];
$this->e[$k] += $this->V[$k][$j] * $f; $this->e[$k] += $this->V[$k][$j] * $f;
} }
$this->e[$j] = $g; $this->e[$j] = $g;
} }
$f = 0.0; $f = 0.0;
if ($h === 0 || $h < 1e-32) { if ($h === 0 || $h < 1e-32) {
$h = 1e-32; $h = 1e-32;
} }
for ($j = 0; $j < $i; ++$j) { for ($j = 0; $j < $i; ++$j) {
$this->e[$j] /= $h; $this->e[$j] /= $h;
$f += $this->e[$j] * $this->d[$j]; $f += $this->e[$j] * $this->d[$j];
} }
$hh = $f / (2 * $h); $hh = $f / (2 * $h);
for ($j = 0; $j < $i; ++$j) { for ($j = 0; $j < $i; ++$j) {
$this->e[$j] -= $hh * $this->d[$j]; $this->e[$j] -= $hh * $this->d[$j];
@ -378,9 +416,13 @@ class EigenvalueDecomposition
} }
} }
/** /**
* Performs complex division. * Performs complex division.
*
* @param int|float $xr
* @param int|float $xi
* @param int|float $yr
* @param int|float $yi
*/ */
private function cdiv($xr, $xi, $yr, $yi) private function cdiv($xr, $xi, $yr, $yi)
{ {
@ -397,7 +439,6 @@ class EigenvalueDecomposition
} }
} }
/** /**
* Nonsymmetric reduction from Hessenberg to real Schur form. * Nonsymmetric reduction from Hessenberg to real Schur form.
* *
@ -662,6 +703,7 @@ class EigenvalueDecomposition
for ($j = $l; $j <= $n; ++$j) { for ($j = $l; $j <= $n; ++$j) {
$r = $r + $this->H[$i][$j] * $this->H[$j][$n]; $r = $r + $this->H[$i][$j] * $this->H[$j][$n];
} }
if ($this->e[$i] < 0.0) { if ($this->e[$i] < 0.0) {
$z = $w; $z = $w;
$s = $r; $s = $r;
@ -783,44 +825,11 @@ class EigenvalueDecomposition
} }
} // end hqr2 } // end hqr2
/**
* Constructor: Check for symmetry, then construct the eigenvalue decomposition
*
* @param array $Arg
*/
public function __construct(array $Arg)
{
$this->A = $Arg;
$this->n = count($Arg[0]);
$issymmetric = true;
for ($j = 0; ($j < $this->n) & $issymmetric; ++$j) {
for ($i = 0; ($i < $this->n) & $issymmetric; ++$i) {
$issymmetric = ($this->A[$i][$j] == $this->A[$j][$i]);
}
}
if ($issymmetric) {
$this->V = $this->A;
// Tridiagonalize.
$this->tred2();
// Diagonalize.
$this->tql2();
} else {
$this->H = $this->A;
$this->ort = [];
// Reduce to Hessenberg form.
$this->orthes();
// Reduce Hessenberg to real Schur form.
$this->hqr2();
}
}
/** /**
* Return the eigenvector matrix * Return the eigenvector matrix
* *
* @access public * @access public
*
* @return array * @return array
*/ */
public function getEigenvectors() public function getEigenvectors()
@ -831,20 +840,21 @@ class EigenvalueDecomposition
$vectors = new Matrix($vectors); $vectors = new Matrix($vectors);
$vectors = array_map(function ($vect) { $vectors = array_map(function ($vect) {
$sum = 0; $sum = 0;
for ($i=0; $i<count($vect); $i++) { for ($i = 0; $i < count($vect); ++$i) {
$sum += $vect[$i] ** 2; $sum += $vect[$i] ** 2;
} }
$sum = sqrt($sum); $sum = sqrt($sum);
for ($i=0; $i<count($vect); $i++) { for ($i = 0; $i < count($vect); ++$i) {
$vect[$i] /= $sum; $vect[$i] /= $sum;
} }
return $vect; return $vect;
}, $vectors->transpose()->toArray()); }, $vectors->transpose()->toArray());
return $vectors; return $vectors;
} }
/** /**
* Return the real parts of the eigenvalues<br> * Return the real parts of the eigenvalues<br>
* d = real(diag(D)); * d = real(diag(D));
@ -856,7 +866,6 @@ class EigenvalueDecomposition
return $this->d; return $this->d;
} }
/** /**
* Return the imaginary parts of the eigenvalues <br> * Return the imaginary parts of the eigenvalues <br>
* d = imag(diag(D)) * d = imag(diag(D))
@ -868,7 +877,6 @@ class EigenvalueDecomposition
return $this->e; return $this->e;
} }
/** /**
* Return the block diagonal eigenvalue matrix * Return the block diagonal eigenvalue matrix
* *
@ -876,15 +884,19 @@ class EigenvalueDecomposition
*/ */
public function getDiagonalEigenvalues() public function getDiagonalEigenvalues()
{ {
$D = [];
for ($i = 0; $i < $this->n; ++$i) { for ($i = 0; $i < $this->n; ++$i) {
$D[$i] = array_fill(0, $this->n, 0.0); $D[$i] = array_fill(0, $this->n, 0.0);
$D[$i][$i] = $this->d[$i]; $D[$i][$i] = $this->d[$i];
if ($this->e[$i] == 0) { if ($this->e[$i] == 0) {
continue; continue;
} }
$o = ($this->e[$i] > 0) ? $i + 1 : $i - 1; $o = ($this->e[$i] > 0) ? $i + 1 : $i - 1;
$D[$i][$o] = $this->e[$i]; $D[$i][$o] = $this->e[$i];
} }
return $D; return $D;
} }
} // class EigenvalueDecomposition } // class EigenvalueDecomposition

View File

@ -1,4 +1,6 @@
<?php declare(strict_types=1); <?php
declare(strict_types=1);
/** /**
* @package JAMA * @package JAMA
* *
@ -62,10 +64,11 @@ class LUDecomposition
/** /**
* LU Decomposition constructor. * Constructs Structure to access L, U and piv.
* *
* @param $A Rectangular matrix * @param Matrix $A Rectangular matrix
* @return Structure to access L, U and piv. *
* @throws MatrixException
*/ */
public function __construct(Matrix $A) public function __construct(Matrix $A)
{ {
@ -81,7 +84,7 @@ class LUDecomposition
$this->piv[$i] = $i; $this->piv[$i] = $i;
} }
$this->pivsign = 1; $this->pivsign = 1;
$LUrowi = $LUcolj = []; $LUcolj = [];
// Outer loop. // Outer loop.
for ($j = 0; $j < $this->n; ++$j) { for ($j = 0; $j < $this->n; ++$j) {
@ -131,7 +134,7 @@ class LUDecomposition
/** /**
* Get lower triangular factor. * Get lower triangular factor.
* *
* @return array Lower triangular factor * @return Matrix Lower triangular factor
*/ */
public function getL() public function getL()
{ {
@ -154,7 +157,7 @@ class LUDecomposition
/** /**
* Get upper triangular factor. * Get upper triangular factor.
* *
* @return array Upper triangular factor * @return Matrix Upper triangular factor
*/ */
public function getU() public function getU()
{ {
@ -206,6 +209,7 @@ class LUDecomposition
return false; return false;
} }
} }
return true; return true;
} // function isNonsingular() } // function isNonsingular()
@ -213,19 +217,22 @@ class LUDecomposition
/** /**
* Count determinants * Count determinants
* *
* @return array d matrix deterninat * @return float|int d matrix determinant
*
* @throws MatrixException
*/ */
public function det() public function det()
{ {
if ($this->m == $this->n) { if ($this->m !== $this->n) {
throw MatrixException::notSquareMatrix();
}
$d = $this->pivsign; $d = $this->pivsign;
for ($j = 0; $j < $this->n; ++$j) { for ($j = 0; $j < $this->n; ++$j) {
$d *= $this->LU[$j][$j]; $d *= $this->LU[$j][$j];
} }
return $d; return $d;
} else {
throw MatrixException::notSquareMatrix();
}
} // function det() } // function det()
@ -274,7 +281,8 @@ class LUDecomposition
} // function solve() } // function solve()
/** /**
* @param Matrix $matrix * @param array $matrix
* @param array $RL
* @param int $j0 * @param int $j0
* @param int $jF * @param int $jF
* *

View File

@ -139,6 +139,7 @@ class Matrix
} }
$lu = new LUDecomposition($this); $lu = new LUDecomposition($this);
return $this->determinant = $lu->det(); return $this->determinant = $lu->det();
} }
@ -232,6 +233,8 @@ class Matrix
* Element-wise addition of the matrix with another one * Element-wise addition of the matrix with another one
* *
* @param Matrix $other * @param Matrix $other
*
* @return Matrix
*/ */
public function add(Matrix $other) public function add(Matrix $other)
{ {
@ -242,6 +245,8 @@ class Matrix
* Element-wise subtracting of another matrix from this one * Element-wise subtracting of another matrix from this one
* *
* @param Matrix $other * @param Matrix $other
*
* @return Matrix
*/ */
public function subtract(Matrix $other) public function subtract(Matrix $other)
{ {
@ -252,7 +257,9 @@ class Matrix
* Element-wise addition or substraction depending on the given sign parameter * Element-wise addition or substraction depending on the given sign parameter
* *
* @param Matrix $other * @param Matrix $other
* @param type $sign * @param int $sign
*
* @return Matrix
*/ */
protected function _add(Matrix $other, $sign = 1) protected function _add(Matrix $other, $sign = 1)
{ {
@ -260,13 +267,13 @@ class Matrix
$a2 = $other->toArray(); $a2 = $other->toArray();
$newMatrix = []; $newMatrix = [];
for ($i=0; $i < $this->rows; $i++) { for ($i = 0; $i < $this->rows; ++$i) {
for ($k=0; $k < $this->columns; $k++) { for ($k = 0; $k < $this->columns; ++$k) {
$newMatrix[$i][$k] = $a1[$i][$k] + $sign * $a2[$i][$k]; $newMatrix[$i][$k] = $a1[$i][$k] + $sign * $a2[$i][$k];
} }
} }
return new Matrix($newMatrix, false); return new self($newMatrix, false);
} }
/** /**
@ -295,7 +302,7 @@ class Matrix
protected function getIdentity() protected function getIdentity()
{ {
$array = array_fill(0, $this->rows, array_fill(0, $this->columns, 0)); $array = array_fill(0, $this->rows, array_fill(0, $this->columns, 0));
for ($i=0; $i < $this->rows; $i++) { for ($i = 0; $i < $this->rows; ++$i) {
$array[$i][$i] = 1; $array[$i][$i] = 1;
} }
@ -345,7 +352,7 @@ class Matrix
*/ */
public static function transposeArray(array $array) public static function transposeArray(array $array)
{ {
return (new Matrix($array, false))->transpose()->toArray(); return (new self($array, false))->transpose()->toArray();
} }
/** /**
@ -359,8 +366,8 @@ class Matrix
*/ */
public static function dot(array $array1, array $array2) public static function dot(array $array1, array $array2)
{ {
$m1 = new Matrix($array1, false); $m1 = new self($array1, false);
$m2 = new Matrix($array2, false); $m2 = new self($array2, false);
return $m1->multiply($m2->transpose())->toArray()[0]; return $m1->multiply($m2->transpose())->toArray()[0];
} }

View File

@ -59,12 +59,16 @@ class Covariance
* @param array $data * @param array $data
* @param int $i * @param int $i
* @param int $k * @param int $k
* @param type $sample * @param bool $sample
* @param int $n
* @param float $meanX * @param float $meanX
* @param float $meanY * @param float $meanY
*
* @return float
*
* @throws InvalidArgumentException
* @throws \Exception
*/ */
public static function fromDataset(array $data, int $i, int $k, $sample = true, float $meanX = null, float $meanY = null) public static function fromDataset(array $data, int $i, int $k, bool $sample = true, float $meanX = null, float $meanY = null)
{ {
if (empty($data)) { if (empty($data)) {
throw InvalidArgumentException::arrayCantBeEmpty(); throw InvalidArgumentException::arrayCantBeEmpty();
@ -124,6 +128,7 @@ class Covariance
* Returns the covariance matrix of n-dimensional data * Returns the covariance matrix of n-dimensional data
* *
* @param array $data * @param array $data
* @param array|null $means
* *
* @return array * @return array
*/ */
@ -133,19 +138,20 @@ class Covariance
if ($means === null) { if ($means === null) {
$means = []; $means = [];
for ($i=0; $i < $n; $i++) { for ($i = 0; $i < $n; ++$i) {
$means[] = Mean::arithmetic(array_column($data, $i)); $means[] = Mean::arithmetic(array_column($data, $i));
} }
} }
$cov = []; $cov = [];
for ($i=0; $i < $n; $i++) { for ($i = 0; $i < $n; ++$i) {
for ($k=0; $k < $n; $k++) { for ($k = 0; $k < $n; ++$k) {
if ($i > $k) { if ($i > $k) {
$cov[$i][$k] = $cov[$k][$i]; $cov[$i][$k] = $cov[$k][$i];
} else { } else {
$cov[$i][$k] = Covariance::fromDataset( $cov[$i][$k] = self::fromDataset(
$data, $i, $k, true, $means[$i], $means[$k]); $data, $i, $k, true, $means[$i], $means[$k]
);
} }
} }
} }

View File

@ -31,7 +31,7 @@ class Gaussian
* *
* @param float $value * @param float $value
* *
* @return type * @return float|int
*/ */
public function pdf(float $value) public function pdf(float $value)
{ {

View File

@ -68,7 +68,7 @@ class Mean
*/ */
private static function checkArrayLength(array $array) private static function checkArrayLength(array $array)
{ {
if (0 == count($array)) { if (empty($array)) {
throw InvalidArgumentException::arrayCantBeEmpty(); throw InvalidArgumentException::arrayCantBeEmpty();
} }
} }

View File

@ -112,8 +112,8 @@ class ClassificationReport
private function computeAverage() private function computeAverage()
{ {
foreach (['precision', 'recall', 'f1score'] as $metric) { foreach (['precision', 'recall', 'f1score'] as $metric) {
$values = array_filter($this->$metric); $values = array_filter($this->{$metric});
if (0 == count($values)) { if (empty($values)) {
$this->average[$metric] = 0.0; $this->average[$metric] = 0.0;
continue; continue;
} }

View File

@ -12,6 +12,7 @@ class ModelManager
/** /**
* @param Estimator $estimator * @param Estimator $estimator
* @param string $filepath * @param string $filepath
*
* @throws FileException * @throws FileException
* @throws SerializeException * @throws SerializeException
*/ */
@ -23,7 +24,7 @@ class ModelManager
$serialized = serialize($estimator); $serialized = serialize($estimator);
if (empty($serialized)) { if (empty($serialized)) {
throw SerializeException::cantSerialize(get_type($estimator)); throw SerializeException::cantSerialize(gettype($estimator));
} }
$result = file_put_contents($filepath, $serialized, LOCK_EX); $result = file_put_contents($filepath, $serialized, LOCK_EX);
@ -34,7 +35,9 @@ class ModelManager
/** /**
* @param string $filepath * @param string $filepath
*
* @return Estimator * @return Estimator
*
* @throws FileException * @throws FileException
* @throws SerializeException * @throws SerializeException
*/ */

View File

@ -44,10 +44,12 @@ class Backpropagation implements Training
*/ */
public function train(array $samples, array $targets, float $desiredError = 0.001, int $maxIterations = 10000) public function train(array $samples, array $targets, float $desiredError = 0.001, int $maxIterations = 10000)
{ {
$samplesCount = count($samples);
for ($i = 0; $i < $maxIterations; ++$i) { for ($i = 0; $i < $maxIterations; ++$i) {
$resultsWithinError = $this->trainSamples($samples, $targets, $desiredError); $resultsWithinError = $this->trainSamples($samples, $targets, $desiredError);
if ($resultsWithinError == count($samples)) { if ($resultsWithinError === $samplesCount) {
break; break;
} }
} }

View File

@ -84,7 +84,7 @@ class Normalizer implements Preprocessor
$this->fit($samples); $this->fit($samples);
foreach ($samples as &$sample) { foreach ($samples as &$sample) {
$this->$method($sample); $this->{$method}($sample);
} }
} }

View File

@ -130,6 +130,8 @@ class SupportVectorMachine
/** /**
* @param string $binPath * @param string $binPath
*
* @return $this
*/ */
public function setBinPath(string $binPath) public function setBinPath(string $binPath)
{ {
@ -140,6 +142,8 @@ class SupportVectorMachine
/** /**
* @param string $varPath * @param string $varPath
*
* @return $this
*/ */
public function setVarPath(string $varPath) public function setVarPath(string $varPath)
{ {
@ -230,8 +234,8 @@ class SupportVectorMachine
} }
/** /**
* @param $trainingSetFileName * @param string $trainingSetFileName
* @param $modelFileName * @param string $modelFileName
* *
* @return string * @return string
*/ */