mirror of
https://github.com/Llewellynvdm/php-ml.git
synced 2025-02-08 23:18:29 +00:00
php-cs-fixer - more rules (#118)
* Add new cs-fixer rules and run them * Do not align double arrows/equals
This commit is contained in:
parent
ed5fc8996c
commit
3ac658c397
18
.php_cs
18
.php_cs
@ -3,11 +3,25 @@
|
|||||||
return PhpCsFixer\Config::create()
|
return PhpCsFixer\Config::create()
|
||||||
->setRules([
|
->setRules([
|
||||||
'@PSR2' => true,
|
'@PSR2' => true,
|
||||||
'declare_strict_types' => true,
|
|
||||||
'array_syntax' => ['syntax' => 'short'],
|
'array_syntax' => ['syntax' => 'short'],
|
||||||
|
'binary_operator_spaces' => ['align_double_arrow' => false, 'align_equals' => false],
|
||||||
'blank_line_after_opening_tag' => true,
|
'blank_line_after_opening_tag' => true,
|
||||||
|
'blank_line_before_return' => true,
|
||||||
|
'cast_spaces' => true,
|
||||||
|
'concat_space' => ['spacing' => 'none'],
|
||||||
|
'declare_strict_types' => true,
|
||||||
|
'method_separation' => true,
|
||||||
|
'no_blank_lines_after_class_opening' => true,
|
||||||
|
'no_spaces_around_offset' => ['positions' => ['inside', 'outside']],
|
||||||
|
'no_unneeded_control_parentheses' => true,
|
||||||
|
'no_unused_imports' => true,
|
||||||
|
'phpdoc_align' => true,
|
||||||
|
'phpdoc_no_access' => true,
|
||||||
|
'phpdoc_separation' => true,
|
||||||
|
'pre_increment' => true,
|
||||||
|
'single_quote' => true,
|
||||||
|
'trim_array_spaces' => true,
|
||||||
'single_blank_line_before_namespace' => true,
|
'single_blank_line_before_namespace' => true,
|
||||||
'no_unused_imports' => true
|
|
||||||
])
|
])
|
||||||
->setFinder(
|
->setFinder(
|
||||||
PhpCsFixer\Finder::create()
|
PhpCsFixer\Finder::create()
|
||||||
|
@ -71,6 +71,7 @@ class DecisionTreeLeaf
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* @param array $record
|
* @param array $record
|
||||||
|
*
|
||||||
* @return bool
|
* @return bool
|
||||||
*/
|
*/
|
||||||
public function evaluate($record)
|
public function evaluate($record)
|
||||||
@ -82,6 +83,7 @@ class DecisionTreeLeaf
|
|||||||
$value = $this->numericValue;
|
$value = $this->numericValue;
|
||||||
$recordField = strval($recordField);
|
$recordField = strval($recordField);
|
||||||
eval("\$result = $recordField $op $value;");
|
eval("\$result = $recordField $op $value;");
|
||||||
|
|
||||||
return $result;
|
return $result;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -122,6 +124,7 @@ class DecisionTreeLeaf
|
|||||||
* Returns HTML representation of the node including children nodes
|
* Returns HTML representation of the node including children nodes
|
||||||
*
|
*
|
||||||
* @param $columnNames
|
* @param $columnNames
|
||||||
|
*
|
||||||
* @return string
|
* @return string
|
||||||
*/
|
*/
|
||||||
public function getHTML($columnNames = null)
|
public function getHTML($columnNames = null)
|
||||||
@ -135,29 +138,34 @@ class DecisionTreeLeaf
|
|||||||
} else {
|
} else {
|
||||||
$col = "col_$this->columnIndex";
|
$col = "col_$this->columnIndex";
|
||||||
}
|
}
|
||||||
if (!preg_match("/^[<>=]{1,2}/", $value)) {
|
if (!preg_match('/^[<>=]{1,2}/', $value)) {
|
||||||
$value = "=$value";
|
$value = "=$value";
|
||||||
}
|
}
|
||||||
$value = "<b>$col $value</b><br>Gini: ".number_format($this->giniIndex, 2);
|
$value = "<b>$col $value</b><br>Gini: ".number_format($this->giniIndex, 2);
|
||||||
}
|
}
|
||||||
$str = "<table ><tr><td colspan=3 align=center style='border:1px solid;'>
|
|
||||||
$value</td></tr>";
|
$str = "<table ><tr><td colspan=3 align=center style='border:1px solid;'>$value</td></tr>";
|
||||||
|
|
||||||
if ($this->leftLeaf || $this->rightLeaf) {
|
if ($this->leftLeaf || $this->rightLeaf) {
|
||||||
$str .= '<tr>';
|
$str .= '<tr>';
|
||||||
if ($this->leftLeaf) {
|
if ($this->leftLeaf) {
|
||||||
$str .="<td valign=top><b>| Yes</b><br>" . $this->leftLeaf->getHTML($columnNames) . "</td>";
|
$str .= '<td valign=top><b>| Yes</b><br>'.$this->leftLeaf->getHTML($columnNames).'</td>';
|
||||||
} else {
|
} else {
|
||||||
$str .= '<td></td>';
|
$str .= '<td></td>';
|
||||||
}
|
}
|
||||||
|
|
||||||
$str .= '<td> </td>';
|
$str .= '<td> </td>';
|
||||||
if ($this->rightLeaf) {
|
if ($this->rightLeaf) {
|
||||||
$str .="<td valign=top align=right><b>No |</b><br>" . $this->rightLeaf->getHTML($columnNames) . "</td>";
|
$str .= '<td valign=top align=right><b>No |</b><br>'.$this->rightLeaf->getHTML($columnNames).'</td>';
|
||||||
} else {
|
} else {
|
||||||
$str .= '<td></td>';
|
$str .= '<td></td>';
|
||||||
}
|
}
|
||||||
|
|
||||||
$str .= '</tr>';
|
$str .= '</tr>';
|
||||||
}
|
}
|
||||||
|
|
||||||
$str .= '</table>';
|
$str .= '</table>';
|
||||||
|
|
||||||
return $str;
|
return $str;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -18,6 +18,7 @@ class AdaBoost implements Classifier
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Actual labels given in the targets array
|
* Actual labels given in the targets array
|
||||||
|
*
|
||||||
* @var array
|
* @var array
|
||||||
*/
|
*/
|
||||||
protected $labels = [];
|
protected $labels = [];
|
||||||
@ -105,7 +106,7 @@ class AdaBoost implements Classifier
|
|||||||
// Initialize usual variables
|
// Initialize usual variables
|
||||||
$this->labels = array_keys(array_count_values($targets));
|
$this->labels = array_keys(array_count_values($targets));
|
||||||
if (count($this->labels) != 2) {
|
if (count($this->labels) != 2) {
|
||||||
throw new \Exception("AdaBoost is a binary classifier and can classify between two classes only");
|
throw new \Exception('AdaBoost is a binary classifier and can classify between two classes only');
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set all target values to either -1 or 1
|
// Set all target values to either -1 or 1
|
||||||
@ -220,6 +221,7 @@ class AdaBoost implements Classifier
|
|||||||
* Calculates alpha of a classifier
|
* Calculates alpha of a classifier
|
||||||
*
|
*
|
||||||
* @param float $errorRate
|
* @param float $errorRate
|
||||||
|
*
|
||||||
* @return float
|
* @return float
|
||||||
*/
|
*/
|
||||||
protected function calculateAlpha(float $errorRate)
|
protected function calculateAlpha(float $errorRate)
|
||||||
@ -227,6 +229,7 @@ class AdaBoost implements Classifier
|
|||||||
if ($errorRate == 0) {
|
if ($errorRate == 0) {
|
||||||
$errorRate = 1e-10;
|
$errorRate = 1e-10;
|
||||||
}
|
}
|
||||||
|
|
||||||
return 0.5 * log((1 - $errorRate) / $errorRate);
|
return 0.5 * log((1 - $errorRate) / $errorRate);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -254,6 +257,7 @@ class AdaBoost implements Classifier
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* @param array $sample
|
* @param array $sample
|
||||||
|
*
|
||||||
* @return mixed
|
* @return mixed
|
||||||
*/
|
*/
|
||||||
public function predictSample(array $sample)
|
public function predictSample(array $sample)
|
||||||
|
@ -84,10 +84,11 @@ class Bagging implements Classifier
|
|||||||
public function setSubsetRatio(float $ratio)
|
public function setSubsetRatio(float $ratio)
|
||||||
{
|
{
|
||||||
if ($ratio < 0.1 || $ratio > 1.0) {
|
if ($ratio < 0.1 || $ratio > 1.0) {
|
||||||
throw new \Exception("Subset ratio should be between 0.1 and 1.0");
|
throw new \Exception('Subset ratio should be between 0.1 and 1.0');
|
||||||
}
|
}
|
||||||
|
|
||||||
$this->subsetRatio = $ratio;
|
$this->subsetRatio = $ratio;
|
||||||
|
|
||||||
return $this;
|
return $this;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -135,6 +136,7 @@ class Bagging implements Classifier
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* @param int $index
|
* @param int $index
|
||||||
|
*
|
||||||
* @return array
|
* @return array
|
||||||
*/
|
*/
|
||||||
protected function getRandomSubset(int $index)
|
protected function getRandomSubset(int $index)
|
||||||
@ -168,6 +170,7 @@ class Bagging implements Classifier
|
|||||||
|
|
||||||
$classifiers[] = $this->initSingleClassifier($obj);
|
$classifiers[] = $this->initSingleClassifier($obj);
|
||||||
}
|
}
|
||||||
|
|
||||||
return $classifiers;
|
return $classifiers;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -183,6 +186,7 @@ class Bagging implements Classifier
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* @param array $sample
|
* @param array $sample
|
||||||
|
*
|
||||||
* @return mixed
|
* @return mixed
|
||||||
*/
|
*/
|
||||||
protected function predictSample(array $sample)
|
protected function predictSample(array $sample)
|
||||||
@ -196,6 +200,7 @@ class Bagging implements Classifier
|
|||||||
$counts = array_count_values($predictions);
|
$counts = array_count_values($predictions);
|
||||||
arsort($counts);
|
arsort($counts);
|
||||||
reset($counts);
|
reset($counts);
|
||||||
|
|
||||||
return key($counts);
|
return key($counts);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -50,7 +50,7 @@ class RandomForest extends Bagging
|
|||||||
public function setFeatureSubsetRatio($ratio)
|
public function setFeatureSubsetRatio($ratio)
|
||||||
{
|
{
|
||||||
if (is_float($ratio) && ($ratio < 0.1 || $ratio > 1.0)) {
|
if (is_float($ratio) && ($ratio < 0.1 || $ratio > 1.0)) {
|
||||||
throw new \Exception("When a float given, feature subset ratio should be between 0.1 and 1.0");
|
throw new \Exception('When a float given, feature subset ratio should be between 0.1 and 1.0');
|
||||||
}
|
}
|
||||||
|
|
||||||
if (is_string($ratio) && $ratio != 'sqrt' && $ratio != 'log') {
|
if (is_string($ratio) && $ratio != 'sqrt' && $ratio != 'log') {
|
||||||
@ -58,6 +58,7 @@ class RandomForest extends Bagging
|
|||||||
}
|
}
|
||||||
|
|
||||||
$this->featureSubsetRatio = $ratio;
|
$this->featureSubsetRatio = $ratio;
|
||||||
|
|
||||||
return $this;
|
return $this;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -74,7 +75,7 @@ class RandomForest extends Bagging
|
|||||||
public function setClassifer(string $classifier, array $classifierOptions = [])
|
public function setClassifer(string $classifier, array $classifierOptions = [])
|
||||||
{
|
{
|
||||||
if ($classifier != DecisionTree::class) {
|
if ($classifier != DecisionTree::class) {
|
||||||
throw new \Exception("RandomForest can only use DecisionTree as base classifier");
|
throw new \Exception('RandomForest can only use DecisionTree as base classifier');
|
||||||
}
|
}
|
||||||
|
|
||||||
return parent::setClassifer($classifier, $classifierOptions);
|
return parent::setClassifer($classifier, $classifierOptions);
|
||||||
@ -120,6 +121,7 @@ class RandomForest extends Bagging
|
|||||||
* when trying to print some information about the trees such as feature importances
|
* when trying to print some information about the trees such as feature importances
|
||||||
*
|
*
|
||||||
* @param array $names
|
* @param array $names
|
||||||
|
*
|
||||||
* @return $this
|
* @return $this
|
||||||
*/
|
*/
|
||||||
public function setColumnNames(array $names)
|
public function setColumnNames(array $names)
|
||||||
|
@ -46,7 +46,7 @@ class Adaline extends Perceptron
|
|||||||
int $trainingType = self::BATCH_TRAINING
|
int $trainingType = self::BATCH_TRAINING
|
||||||
) {
|
) {
|
||||||
if (!in_array($trainingType, [self::BATCH_TRAINING, self::ONLINE_TRAINING])) {
|
if (!in_array($trainingType, [self::BATCH_TRAINING, self::ONLINE_TRAINING])) {
|
||||||
throw new \Exception("Adaline can only be trained with batch and online/stochastic gradient descent algorithm");
|
throw new \Exception('Adaline can only be trained with batch and online/stochastic gradient descent algorithm');
|
||||||
}
|
}
|
||||||
|
|
||||||
$this->trainingType = $trainingType;
|
$this->trainingType = $trainingType;
|
||||||
|
@ -106,7 +106,7 @@ class DecisionStump extends WeightedClassifier
|
|||||||
if ($this->weights) {
|
if ($this->weights) {
|
||||||
$numWeights = count($this->weights);
|
$numWeights = count($this->weights);
|
||||||
if ($numWeights != count($samples)) {
|
if ($numWeights != count($samples)) {
|
||||||
throw new \Exception("Number of sample weights does not match with number of samples");
|
throw new \Exception('Number of sample weights does not match with number of samples');
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
$this->weights = array_fill(0, count($samples), 1);
|
$this->weights = array_fill(0, count($samples), 1);
|
||||||
@ -236,7 +236,6 @@ class DecisionStump extends WeightedClassifier
|
|||||||
return $split;
|
return $split;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @param mixed $leftValue
|
* @param mixed $leftValue
|
||||||
@ -358,7 +357,7 @@ class DecisionStump extends WeightedClassifier
|
|||||||
public function __toString()
|
public function __toString()
|
||||||
{
|
{
|
||||||
return "IF $this->column $this->operator $this->value ".
|
return "IF $this->column $this->operator $this->value ".
|
||||||
"THEN " . $this->binaryLabels[0] . " ".
|
'THEN '.$this->binaryLabels[0].' '.
|
||||||
"ELSE " . $this->binaryLabels[1];
|
'ELSE '.$this->binaryLabels[1];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -76,9 +76,9 @@ class LogisticRegression extends Adaline
|
|||||||
) {
|
) {
|
||||||
$trainingTypes = range(self::BATCH_TRAINING, self::CONJUGATE_GRAD_TRAINING);
|
$trainingTypes = range(self::BATCH_TRAINING, self::CONJUGATE_GRAD_TRAINING);
|
||||||
if (!in_array($trainingType, $trainingTypes)) {
|
if (!in_array($trainingType, $trainingTypes)) {
|
||||||
throw new \Exception("Logistic regression can only be trained with " .
|
throw new \Exception('Logistic regression can only be trained with '.
|
||||||
"batch (gradient descent), online (stochastic gradient descent) " .
|
'batch (gradient descent), online (stochastic gradient descent) '.
|
||||||
"or conjugate batch (conjugate gradients) algorithms");
|
'or conjugate batch (conjugate gradients) algorithms');
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!in_array($cost, ['log', 'sse'])) {
|
if (!in_array($cost, ['log', 'sse'])) {
|
||||||
@ -290,6 +290,7 @@ class LogisticRegression extends Adaline
|
|||||||
|
|
||||||
if (strval($predicted) == strval($label)) {
|
if (strval($predicted) == strval($label)) {
|
||||||
$sample = $this->checkNormalizedSample($sample);
|
$sample = $this->checkNormalizedSample($sample);
|
||||||
|
|
||||||
return abs($this->output($sample) - 0.5);
|
return abs($this->output($sample) - 0.5);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -74,11 +74,11 @@ class Perceptron implements Classifier, IncrementalEstimator
|
|||||||
public function __construct(float $learningRate = 0.001, int $maxIterations = 1000, bool $normalizeInputs = true)
|
public function __construct(float $learningRate = 0.001, int $maxIterations = 1000, bool $normalizeInputs = true)
|
||||||
{
|
{
|
||||||
if ($learningRate <= 0.0 || $learningRate > 1.0) {
|
if ($learningRate <= 0.0 || $learningRate > 1.0) {
|
||||||
throw new \Exception("Learning rate should be a float value between 0.0(exclusive) and 1.0(inclusive)");
|
throw new \Exception('Learning rate should be a float value between 0.0(exclusive) and 1.0(inclusive)');
|
||||||
}
|
}
|
||||||
|
|
||||||
if ($maxIterations <= 0) {
|
if ($maxIterations <= 0) {
|
||||||
throw new \Exception("Maximum number of iterations must be an integer greater than 0");
|
throw new \Exception('Maximum number of iterations must be an integer greater than 0');
|
||||||
}
|
}
|
||||||
|
|
||||||
if ($normalizeInputs) {
|
if ($normalizeInputs) {
|
||||||
@ -231,6 +231,7 @@ class Perceptron implements Classifier, IncrementalEstimator
|
|||||||
* Calculates net output of the network as a float value for the given input
|
* Calculates net output of the network as a float value for the given input
|
||||||
*
|
*
|
||||||
* @param array $sample
|
* @param array $sample
|
||||||
|
*
|
||||||
* @return int
|
* @return int
|
||||||
*/
|
*/
|
||||||
protected function output(array $sample)
|
protected function output(array $sample)
|
||||||
@ -251,6 +252,7 @@ class Perceptron implements Classifier, IncrementalEstimator
|
|||||||
* Returns the class value (either -1 or 1) for the given input
|
* Returns the class value (either -1 or 1) for the given input
|
||||||
*
|
*
|
||||||
* @param array $sample
|
* @param array $sample
|
||||||
|
*
|
||||||
* @return int
|
* @return int
|
||||||
*/
|
*/
|
||||||
protected function outputClass(array $sample)
|
protected function outputClass(array $sample)
|
||||||
@ -275,6 +277,7 @@ class Perceptron implements Classifier, IncrementalEstimator
|
|||||||
|
|
||||||
if (strval($predicted) == strval($label)) {
|
if (strval($predicted) == strval($label)) {
|
||||||
$sample = $this->checkNormalizedSample($sample);
|
$sample = $this->checkNormalizedSample($sample);
|
||||||
|
|
||||||
return abs($this->output($sample));
|
return abs($this->output($sample));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -9,7 +9,6 @@ use Phpml\NeuralNetwork\Network\MultilayerPerceptron;
|
|||||||
|
|
||||||
class MLPClassifier extends MultilayerPerceptron implements Classifier
|
class MLPClassifier extends MultilayerPerceptron implements Classifier
|
||||||
{
|
{
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param mixed $target
|
* @param mixed $target
|
||||||
*
|
*
|
||||||
@ -22,6 +21,7 @@ class MLPClassifier extends MultilayerPerceptron implements Classifier
|
|||||||
if (!in_array($target, $this->classes)) {
|
if (!in_array($target, $this->classes)) {
|
||||||
throw InvalidArgumentException::invalidTarget($target);
|
throw InvalidArgumentException::invalidTarget($target);
|
||||||
}
|
}
|
||||||
|
|
||||||
return array_search($target, $this->classes);
|
return array_search($target, $this->classes);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -42,6 +42,7 @@ class MLPClassifier extends MultilayerPerceptron implements Classifier
|
|||||||
$max = $value;
|
$max = $value;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return $this->classes[$predictedClass];
|
return $this->classes[$predictedClass];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -80,6 +80,7 @@ class NaiveBayes implements Classifier
|
|||||||
/**
|
/**
|
||||||
* Calculates vital statistics for each label & feature. Stores these
|
* Calculates vital statistics for each label & feature. Stores these
|
||||||
* values in private array in order to avoid repeated calculation
|
* values in private array in order to avoid repeated calculation
|
||||||
|
*
|
||||||
* @param string $label
|
* @param string $label
|
||||||
* @param array $samples
|
* @param array $samples
|
||||||
*/
|
*/
|
||||||
@ -128,6 +129,7 @@ class NaiveBayes implements Classifier
|
|||||||
$this->discreteProb[$label][$feature][$value] == 0) {
|
$this->discreteProb[$label][$feature][$value] == 0) {
|
||||||
return self::EPSILON;
|
return self::EPSILON;
|
||||||
}
|
}
|
||||||
|
|
||||||
return $this->discreteProb[$label][$feature][$value];
|
return $this->discreteProb[$label][$feature][$value];
|
||||||
}
|
}
|
||||||
$std = $this->std[$label][$feature] ;
|
$std = $this->std[$label][$feature] ;
|
||||||
@ -141,6 +143,7 @@ class NaiveBayes implements Classifier
|
|||||||
// (See : https://github.com/scikit-learn/scikit-learn/blob/master/sklearn/naive_bayes.py)
|
// (See : https://github.com/scikit-learn/scikit-learn/blob/master/sklearn/naive_bayes.py)
|
||||||
$pdf = -0.5 * log(2.0 * pi() * $std * $std);
|
$pdf = -0.5 * log(2.0 * pi() * $std * $std);
|
||||||
$pdf -= 0.5 * pow($value - $mean, 2) / ($std * $std);
|
$pdf -= 0.5 * pow($value - $mean, 2) / ($std * $std);
|
||||||
|
|
||||||
return $pdf;
|
return $pdf;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -159,11 +162,13 @@ class NaiveBayes implements Classifier
|
|||||||
$samples[] = $this->samples[$i];
|
$samples[] = $this->samples[$i];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return $samples;
|
return $samples;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param array $sample
|
* @param array $sample
|
||||||
|
*
|
||||||
* @return mixed
|
* @return mixed
|
||||||
*/
|
*/
|
||||||
protected function predictSample(array $sample)
|
protected function predictSample(array $sample)
|
||||||
@ -183,6 +188,7 @@ class NaiveBayes implements Classifier
|
|||||||
|
|
||||||
arsort($predictions, SORT_NUMERIC);
|
arsort($predictions, SORT_NUMERIC);
|
||||||
reset($predictions);
|
reset($predictions);
|
||||||
|
|
||||||
return key($predictions);
|
return key($predictions);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -159,6 +159,7 @@ class FuzzyCMeans implements Clusterer
|
|||||||
*
|
*
|
||||||
* @param int $row
|
* @param int $row
|
||||||
* @param int $col
|
* @param int $col
|
||||||
|
*
|
||||||
* @return float
|
* @return float
|
||||||
*/
|
*/
|
||||||
protected function getDistanceCalc(int $row, int $col)
|
protected function getDistanceCalc(int $row, int $col)
|
||||||
@ -179,6 +180,7 @@ class FuzzyCMeans implements Clusterer
|
|||||||
$val = pow($dist1 / $dist2, 2.0 / ($this->fuzziness - 1));
|
$val = pow($dist1 / $dist2, 2.0 / ($this->fuzziness - 1));
|
||||||
$sum += $val;
|
$sum += $val;
|
||||||
}
|
}
|
||||||
|
|
||||||
return $sum;
|
return $sum;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -212,6 +214,7 @@ class FuzzyCMeans implements Clusterer
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* @param array|Point[] $samples
|
* @param array|Point[] $samples
|
||||||
|
*
|
||||||
* @return array
|
* @return array
|
||||||
*/
|
*/
|
||||||
public function cluster(array $samples)
|
public function cluster(array $samples)
|
||||||
|
@ -55,7 +55,7 @@ class KernelPCA extends PCA
|
|||||||
{
|
{
|
||||||
$availableKernels = [self::KERNEL_RBF, self::KERNEL_SIGMOID, self::KERNEL_LAPLACIAN, self::KERNEL_LINEAR];
|
$availableKernels = [self::KERNEL_RBF, self::KERNEL_SIGMOID, self::KERNEL_LAPLACIAN, self::KERNEL_LINEAR];
|
||||||
if (!in_array($kernel, $availableKernels)) {
|
if (!in_array($kernel, $availableKernels)) {
|
||||||
throw new \Exception("KernelPCA can be initialized with the following kernels only: Linear, RBF, Sigmoid and Laplacian");
|
throw new \Exception('KernelPCA can be initialized with the following kernels only: Linear, RBF, Sigmoid and Laplacian');
|
||||||
}
|
}
|
||||||
|
|
||||||
parent::__construct($totalVariance, $numFeatures);
|
parent::__construct($totalVariance, $numFeatures);
|
||||||
@ -168,6 +168,7 @@ class KernelPCA extends PCA
|
|||||||
case self::KERNEL_RBF:
|
case self::KERNEL_RBF:
|
||||||
// k(x,y)=exp(-γ.|x-y|) where |..| is Euclidean distance
|
// k(x,y)=exp(-γ.|x-y|) where |..| is Euclidean distance
|
||||||
$dist = new Euclidean();
|
$dist = new Euclidean();
|
||||||
|
|
||||||
return function ($x, $y) use ($dist) {
|
return function ($x, $y) use ($dist) {
|
||||||
return exp(-$this->gamma * $dist->sqDistance($x, $y));
|
return exp(-$this->gamma * $dist->sqDistance($x, $y));
|
||||||
};
|
};
|
||||||
@ -176,12 +177,14 @@ class KernelPCA extends PCA
|
|||||||
// k(x,y)=tanh(γ.xT.y+c0) where c0=1
|
// k(x,y)=tanh(γ.xT.y+c0) where c0=1
|
||||||
return function ($x, $y) {
|
return function ($x, $y) {
|
||||||
$res = Matrix::dot($x, $y)[0] + 1.0;
|
$res = Matrix::dot($x, $y)[0] + 1.0;
|
||||||
|
|
||||||
return tanh($this->gamma * $res);
|
return tanh($this->gamma * $res);
|
||||||
};
|
};
|
||||||
|
|
||||||
case self::KERNEL_LAPLACIAN:
|
case self::KERNEL_LAPLACIAN:
|
||||||
// k(x,y)=exp(-γ.|x-y|) where |..| is Manhattan distance
|
// k(x,y)=exp(-γ.|x-y|) where |..| is Manhattan distance
|
||||||
$dist = new Manhattan();
|
$dist = new Manhattan();
|
||||||
|
|
||||||
return function ($x, $y) use ($dist) {
|
return function ($x, $y) use ($dist) {
|
||||||
return exp(-$this->gamma * $dist->distance($x, $y));
|
return exp(-$this->gamma * $dist->distance($x, $y));
|
||||||
};
|
};
|
||||||
@ -241,11 +244,11 @@ class KernelPCA extends PCA
|
|||||||
public function transform(array $sample)
|
public function transform(array $sample)
|
||||||
{
|
{
|
||||||
if (!$this->fit) {
|
if (!$this->fit) {
|
||||||
throw new \Exception("KernelPCA has not been fitted with respect to original dataset, please run KernelPCA::fit() first");
|
throw new \Exception('KernelPCA has not been fitted with respect to original dataset, please run KernelPCA::fit() first');
|
||||||
}
|
}
|
||||||
|
|
||||||
if (is_array($sample[0])) {
|
if (is_array($sample[0])) {
|
||||||
throw new \Exception("KernelPCA::transform() accepts only one-dimensional arrays");
|
throw new \Exception('KernelPCA::transform() accepts only one-dimensional arrays');
|
||||||
}
|
}
|
||||||
|
|
||||||
$pairs = $this->getDistancePairs($sample);
|
$pairs = $this->getDistancePairs($sample);
|
||||||
|
@ -50,13 +50,13 @@ class LDA extends EigenTransformerBase
|
|||||||
public function __construct($totalVariance = null, $numFeatures = null)
|
public function __construct($totalVariance = null, $numFeatures = null)
|
||||||
{
|
{
|
||||||
if ($totalVariance !== null && ($totalVariance < 0.1 || $totalVariance > 0.99)) {
|
if ($totalVariance !== null && ($totalVariance < 0.1 || $totalVariance > 0.99)) {
|
||||||
throw new \Exception("Total variance can be a value between 0.1 and 0.99");
|
throw new \Exception('Total variance can be a value between 0.1 and 0.99');
|
||||||
}
|
}
|
||||||
if ($numFeatures !== null && $numFeatures <= 0) {
|
if ($numFeatures !== null && $numFeatures <= 0) {
|
||||||
throw new \Exception("Number of features to be preserved should be greater than 0");
|
throw new \Exception('Number of features to be preserved should be greater than 0');
|
||||||
}
|
}
|
||||||
if ($totalVariance !== null && $numFeatures !== null) {
|
if ($totalVariance !== null && $numFeatures !== null) {
|
||||||
throw new \Exception("Either totalVariance or numFeatures should be specified in order to run the algorithm");
|
throw new \Exception('Either totalVariance or numFeatures should be specified in order to run the algorithm');
|
||||||
}
|
}
|
||||||
|
|
||||||
if ($numFeatures !== null) {
|
if ($numFeatures !== null) {
|
||||||
@ -105,7 +105,6 @@ class LDA extends EigenTransformerBase
|
|||||||
return array_keys($counts);
|
return array_keys($counts);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Calculates mean of each column for each class and returns
|
* Calculates mean of each column for each class and returns
|
||||||
* n by m matrix where n is number of labels and m is number of columns
|
* n by m matrix where n is number of labels and m is number of columns
|
||||||
@ -156,7 +155,6 @@ class LDA extends EigenTransformerBase
|
|||||||
return $means;
|
return $means;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns in-class scatter matrix for each class, which
|
* Returns in-class scatter matrix for each class, which
|
||||||
* is a n by m matrix where n is number of classes and
|
* is a n by m matrix where n is number of classes and
|
||||||
@ -237,7 +235,7 @@ class LDA extends EigenTransformerBase
|
|||||||
public function transform(array $sample)
|
public function transform(array $sample)
|
||||||
{
|
{
|
||||||
if (!$this->fit) {
|
if (!$this->fit) {
|
||||||
throw new \Exception("LDA has not been fitted with respect to original dataset, please run LDA::fit() first");
|
throw new \Exception('LDA has not been fitted with respect to original dataset, please run LDA::fit() first');
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!is_array($sample[0])) {
|
if (!is_array($sample[0])) {
|
||||||
|
@ -35,13 +35,13 @@ class PCA extends EigenTransformerBase
|
|||||||
public function __construct($totalVariance = null, $numFeatures = null)
|
public function __construct($totalVariance = null, $numFeatures = null)
|
||||||
{
|
{
|
||||||
if ($totalVariance !== null && ($totalVariance < 0.1 || $totalVariance > 0.99)) {
|
if ($totalVariance !== null && ($totalVariance < 0.1 || $totalVariance > 0.99)) {
|
||||||
throw new \Exception("Total variance can be a value between 0.1 and 0.99");
|
throw new \Exception('Total variance can be a value between 0.1 and 0.99');
|
||||||
}
|
}
|
||||||
if ($numFeatures !== null && $numFeatures <= 0) {
|
if ($numFeatures !== null && $numFeatures <= 0) {
|
||||||
throw new \Exception("Number of features to be preserved should be greater than 0");
|
throw new \Exception('Number of features to be preserved should be greater than 0');
|
||||||
}
|
}
|
||||||
if ($totalVariance !== null && $numFeatures !== null) {
|
if ($totalVariance !== null && $numFeatures !== null) {
|
||||||
throw new \Exception("Either totalVariance or numFeatures should be specified in order to run the algorithm");
|
throw new \Exception('Either totalVariance or numFeatures should be specified in order to run the algorithm');
|
||||||
}
|
}
|
||||||
|
|
||||||
if ($numFeatures !== null) {
|
if ($numFeatures !== null) {
|
||||||
@ -129,7 +129,7 @@ class PCA extends EigenTransformerBase
|
|||||||
public function transform(array $sample)
|
public function transform(array $sample)
|
||||||
{
|
{
|
||||||
if (!$this->fit) {
|
if (!$this->fit) {
|
||||||
throw new \Exception("PCA has not been fitted with respect to original dataset, please run PCA::fit() first");
|
throw new \Exception('PCA has not been fitted with respect to original dataset, please run PCA::fit() first');
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!is_array($sample[0])) {
|
if (!is_array($sample[0])) {
|
||||||
|
@ -109,6 +109,7 @@ trait OneVsRest
|
|||||||
// multiple instances of this classifier
|
// multiple instances of this classifier
|
||||||
$classifier = clone $this;
|
$classifier = clone $this;
|
||||||
$classifier->reset();
|
$classifier->reset();
|
||||||
|
|
||||||
return $classifier;
|
return $classifier;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -121,6 +122,7 @@ trait OneVsRest
|
|||||||
*
|
*
|
||||||
* @param array $targets
|
* @param array $targets
|
||||||
* @param mixed $label
|
* @param mixed $label
|
||||||
|
*
|
||||||
* @return array Binarized targets and target's labels
|
* @return array Binarized targets and target's labels
|
||||||
*/
|
*/
|
||||||
private function binarizeTargets($targets, $label)
|
private function binarizeTargets($targets, $label)
|
||||||
@ -131,10 +133,10 @@ trait OneVsRest
|
|||||||
}
|
}
|
||||||
|
|
||||||
$labels = [$label, $notLabel];
|
$labels = [$label, $notLabel];
|
||||||
|
|
||||||
return [$targets, $labels];
|
return [$targets, $labels];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param array $sample
|
* @param array $sample
|
||||||
*
|
*
|
||||||
@ -153,6 +155,7 @@ trait OneVsRest
|
|||||||
}
|
}
|
||||||
|
|
||||||
arsort($probs, SORT_NUMERIC);
|
arsort($probs, SORT_NUMERIC);
|
||||||
|
|
||||||
return key($probs);
|
return key($probs);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -20,10 +20,12 @@ declare(strict_types=1);
|
|||||||
*
|
*
|
||||||
* @author Paul Meagher
|
* @author Paul Meagher
|
||||||
* @license PHP v3.0
|
* @license PHP v3.0
|
||||||
|
*
|
||||||
* @version 1.1
|
* @version 1.1
|
||||||
*
|
*
|
||||||
* Slightly changed to adapt the original code to PHP-ML library
|
* Slightly changed to adapt the original code to PHP-ML library
|
||||||
* @date 2017/04/11
|
* @date 2017/04/11
|
||||||
|
*
|
||||||
* @author Mustafa Karabulut
|
* @author Mustafa Karabulut
|
||||||
*/
|
*/
|
||||||
|
|
||||||
@ -35,18 +37,21 @@ class EigenvalueDecomposition
|
|||||||
{
|
{
|
||||||
/**
|
/**
|
||||||
* Row and column dimension (square matrix).
|
* Row and column dimension (square matrix).
|
||||||
|
*
|
||||||
* @var int
|
* @var int
|
||||||
*/
|
*/
|
||||||
private $n;
|
private $n;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Internal symmetry flag.
|
* Internal symmetry flag.
|
||||||
|
*
|
||||||
* @var bool
|
* @var bool
|
||||||
*/
|
*/
|
||||||
private $symmetric;
|
private $symmetric;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Arrays for internal storage of eigenvalues.
|
* Arrays for internal storage of eigenvalues.
|
||||||
|
*
|
||||||
* @var array
|
* @var array
|
||||||
*/
|
*/
|
||||||
private $d = [];
|
private $d = [];
|
||||||
@ -54,24 +59,28 @@ class EigenvalueDecomposition
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Array for internal storage of eigenvectors.
|
* Array for internal storage of eigenvectors.
|
||||||
|
*
|
||||||
* @var array
|
* @var array
|
||||||
*/
|
*/
|
||||||
private $V = [];
|
private $V = [];
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Array for internal storage of nonsymmetric Hessenberg form.
|
* Array for internal storage of nonsymmetric Hessenberg form.
|
||||||
|
*
|
||||||
* @var array
|
* @var array
|
||||||
*/
|
*/
|
||||||
private $H = [];
|
private $H = [];
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Working storage for nonsymmetric algorithm.
|
* Working storage for nonsymmetric algorithm.
|
||||||
|
*
|
||||||
* @var array
|
* @var array
|
||||||
*/
|
*/
|
||||||
private $ort;
|
private $ort;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Used for complex scalar division.
|
* Used for complex scalar division.
|
||||||
|
*
|
||||||
* @var float
|
* @var float
|
||||||
*/
|
*/
|
||||||
private $cdivr;
|
private $cdivr;
|
||||||
@ -222,7 +231,6 @@ class EigenvalueDecomposition
|
|||||||
$this->e[0] = 0.0;
|
$this->e[0] = 0.0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Symmetric tridiagonal QL algorithm.
|
* Symmetric tridiagonal QL algorithm.
|
||||||
*
|
*
|
||||||
@ -330,7 +338,6 @@ class EigenvalueDecomposition
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Nonsymmetric reduction to Hessenberg form.
|
* Nonsymmetric reduction to Hessenberg form.
|
||||||
*
|
*
|
||||||
@ -823,12 +830,11 @@ class EigenvalueDecomposition
|
|||||||
$this->V[$i][$j] = $z;
|
$this->V[$i][$j] = $z;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} // end hqr2
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return the eigenvector matrix
|
* Return the eigenvector matrix
|
||||||
*
|
*
|
||||||
* @access public
|
|
||||||
*
|
*
|
||||||
* @return array
|
* @return array
|
||||||
*/
|
*/
|
||||||
@ -899,4 +905,4 @@ class EigenvalueDecomposition
|
|||||||
|
|
||||||
return $D;
|
return $D;
|
||||||
}
|
}
|
||||||
} // class EigenvalueDecomposition
|
}
|
||||||
|
@ -17,11 +17,14 @@ declare(strict_types=1);
|
|||||||
* @author Paul Meagher
|
* @author Paul Meagher
|
||||||
* @author Bartosz Matosiuk
|
* @author Bartosz Matosiuk
|
||||||
* @author Michael Bommarito
|
* @author Michael Bommarito
|
||||||
|
*
|
||||||
* @version 1.1
|
* @version 1.1
|
||||||
|
*
|
||||||
* @license PHP v3.0
|
* @license PHP v3.0
|
||||||
*
|
*
|
||||||
* Slightly changed to adapt the original code to PHP-ML library
|
* Slightly changed to adapt the original code to PHP-ML library
|
||||||
* @date 2017/04/24
|
* @date 2017/04/24
|
||||||
|
*
|
||||||
* @author Mustafa Karabulut
|
* @author Mustafa Karabulut
|
||||||
*/
|
*/
|
||||||
|
|
||||||
@ -34,35 +37,39 @@ class LUDecomposition
|
|||||||
{
|
{
|
||||||
/**
|
/**
|
||||||
* Decomposition storage
|
* Decomposition storage
|
||||||
|
*
|
||||||
* @var array
|
* @var array
|
||||||
*/
|
*/
|
||||||
private $LU = [];
|
private $LU = [];
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Row dimension.
|
* Row dimension.
|
||||||
|
*
|
||||||
* @var int
|
* @var int
|
||||||
*/
|
*/
|
||||||
private $m;
|
private $m;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Column dimension.
|
* Column dimension.
|
||||||
|
*
|
||||||
* @var int
|
* @var int
|
||||||
*/
|
*/
|
||||||
private $n;
|
private $n;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Pivot sign.
|
* Pivot sign.
|
||||||
|
*
|
||||||
* @var int
|
* @var int
|
||||||
*/
|
*/
|
||||||
private $pivsign;
|
private $pivsign;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Internal storage of pivot vector.
|
* Internal storage of pivot vector.
|
||||||
|
*
|
||||||
* @var array
|
* @var array
|
||||||
*/
|
*/
|
||||||
private $piv = [];
|
private $piv = [];
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constructs Structure to access L, U and piv.
|
* Constructs Structure to access L, U and piv.
|
||||||
*
|
*
|
||||||
@ -128,8 +135,7 @@ class LUDecomposition
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} // function __construct()
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get lower triangular factor.
|
* Get lower triangular factor.
|
||||||
@ -150,9 +156,9 @@ class LUDecomposition
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return new Matrix($L);
|
|
||||||
} // function getL()
|
|
||||||
|
|
||||||
|
return new Matrix($L);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get upper triangular factor.
|
* Get upper triangular factor.
|
||||||
@ -171,9 +177,9 @@ class LUDecomposition
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return new Matrix($U);
|
|
||||||
} // function getU()
|
|
||||||
|
|
||||||
|
return new Matrix($U);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return pivot permutation vector.
|
* Return pivot permutation vector.
|
||||||
@ -183,8 +189,7 @@ class LUDecomposition
|
|||||||
public function getPivot()
|
public function getPivot()
|
||||||
{
|
{
|
||||||
return $this->piv;
|
return $this->piv;
|
||||||
} // function getPivot()
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Alias for getPivot
|
* Alias for getPivot
|
||||||
@ -194,8 +199,7 @@ class LUDecomposition
|
|||||||
public function getDoublePivot()
|
public function getDoublePivot()
|
||||||
{
|
{
|
||||||
return $this->getPivot();
|
return $this->getPivot();
|
||||||
} // function getDoublePivot()
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Is the matrix nonsingular?
|
* Is the matrix nonsingular?
|
||||||
@ -211,8 +215,7 @@ class LUDecomposition
|
|||||||
}
|
}
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
} // function isNonsingular()
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Count determinants
|
* Count determinants
|
||||||
@ -233,8 +236,7 @@ class LUDecomposition
|
|||||||
}
|
}
|
||||||
|
|
||||||
return $d;
|
return $d;
|
||||||
} // function det()
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Solve A*X = B
|
* Solve A*X = B
|
||||||
@ -277,8 +279,9 @@ class LUDecomposition
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return $X;
|
return $X;
|
||||||
} // function solve()
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param array $matrix
|
* @param array $matrix
|
||||||
@ -302,4 +305,4 @@ class LUDecomposition
|
|||||||
|
|
||||||
return $R;
|
return $R;
|
||||||
}
|
}
|
||||||
} // class LUDecomposition
|
}
|
||||||
|
@ -122,7 +122,6 @@ class Matrix
|
|||||||
return array_column($this->matrix, $column);
|
return array_column($this->matrix, $column);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @return float|int
|
* @return float|int
|
||||||
*
|
*
|
||||||
|
@ -80,7 +80,7 @@ class Covariance
|
|||||||
}
|
}
|
||||||
|
|
||||||
if ($i < 0 || $k < 0 || $i >= $n || $k >= $n) {
|
if ($i < 0 || $k < 0 || $i >= $n || $k >= $n) {
|
||||||
throw new \Exception("Given indices i and k do not match with the dimensionality of data");
|
throw new \Exception('Given indices i and k do not match with the dimensionality of data');
|
||||||
}
|
}
|
||||||
|
|
||||||
if ($meanX === null || $meanY === null) {
|
if ($meanX === null || $meanY === null) {
|
||||||
|
@ -39,6 +39,7 @@ class Gaussian
|
|||||||
// Ref: https://en.wikipedia.org/wiki/Normal_distribution
|
// Ref: https://en.wikipedia.org/wiki/Normal_distribution
|
||||||
$std2 = $this->std ** 2;
|
$std2 = $this->std ** 2;
|
||||||
$mean = $this->mean;
|
$mean = $this->mean;
|
||||||
|
|
||||||
return exp(-(($value - $mean) ** 2) / (2 * $std2)) / sqrt(2 * $std2 * pi());
|
return exp(-(($value - $mean) ** 2) / (2 * $std2)) / sqrt(2 * $std2 * pi());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -55,6 +56,7 @@ class Gaussian
|
|||||||
public static function distributionPdf(float $mean, float $std, float $value)
|
public static function distributionPdf(float $mean, float $std, float $value)
|
||||||
{
|
{
|
||||||
$normal = new self($mean, $std);
|
$normal = new self($mean, $std);
|
||||||
|
|
||||||
return $normal->pdf($value);
|
return $normal->pdf($value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -138,6 +138,7 @@ abstract class MultilayerPerceptron extends LayeredNetwork implements Estimator,
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* @param array $sample
|
* @param array $sample
|
||||||
|
*
|
||||||
* @return mixed
|
* @return mixed
|
||||||
*/
|
*/
|
||||||
abstract protected function predictSample(array $sample);
|
abstract protected function predictSample(array $sample);
|
||||||
|
@ -38,6 +38,7 @@ class DecisionTreeTest extends TestCase
|
|||||||
array_walk($input, function (&$v) {
|
array_walk($input, function (&$v) {
|
||||||
array_splice($v, 4, 1);
|
array_splice($v, 4, 1);
|
||||||
});
|
});
|
||||||
|
|
||||||
return [$input, $targets];
|
return [$input, $targets];
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -54,6 +55,7 @@ class DecisionTreeTest extends TestCase
|
|||||||
$classifier->train($data, $targets);
|
$classifier->train($data, $targets);
|
||||||
$this->assertEquals('Dont_play', $classifier->predict(['scorching', 95, 90, 'true']));
|
$this->assertEquals('Dont_play', $classifier->predict(['scorching', 95, 90, 'true']));
|
||||||
$this->assertEquals('Play', $classifier->predict(['overcast', 60, 60, 'false']));
|
$this->assertEquals('Play', $classifier->predict(['overcast', 60, 60, 'false']));
|
||||||
|
|
||||||
return $classifier;
|
return $classifier;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -97,6 +97,7 @@ class BaggingTest extends TestCase
|
|||||||
$classifier = new Bagging($numBaseClassifiers);
|
$classifier = new Bagging($numBaseClassifiers);
|
||||||
$classifier->setSubsetRatio(1.0);
|
$classifier->setSubsetRatio(1.0);
|
||||||
$classifier->setClassifer(DecisionTree::class, ['depth' => 10]);
|
$classifier->setClassifer(DecisionTree::class, ['depth' => 10]);
|
||||||
|
|
||||||
return $classifier;
|
return $classifier;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -113,7 +114,7 @@ class BaggingTest extends TestCase
|
|||||||
// Populating input data to a size large enough
|
// Populating input data to a size large enough
|
||||||
// for base classifiers that they can work with a subset of it
|
// for base classifiers that they can work with a subset of it
|
||||||
$populated = [];
|
$populated = [];
|
||||||
for ($i=0; $i<20; $i++) {
|
for ($i = 0; $i < 20; ++$i) {
|
||||||
$populated = array_merge($populated, $input);
|
$populated = array_merge($populated, $input);
|
||||||
}
|
}
|
||||||
shuffle($populated);
|
shuffle($populated);
|
||||||
@ -121,6 +122,7 @@ class BaggingTest extends TestCase
|
|||||||
array_walk($populated, function (&$v) {
|
array_walk($populated, function (&$v) {
|
||||||
array_splice($v, 4, 1);
|
array_splice($v, 4, 1);
|
||||||
});
|
});
|
||||||
|
|
||||||
return [$populated, $targets];
|
return [$populated, $targets];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -14,6 +14,7 @@ class RandomForestTest extends BaggingTest
|
|||||||
{
|
{
|
||||||
$classifier = new RandomForest($numBaseClassifiers);
|
$classifier = new RandomForest($numBaseClassifiers);
|
||||||
$classifier->setFeatureSubsetRatio('log');
|
$classifier->setFeatureSubsetRatio('log');
|
||||||
|
|
||||||
return $classifier;
|
return $classifier;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -180,6 +180,7 @@ class MLPClassifierTest extends TestCase
|
|||||||
[0, 1, 2]
|
[0, 1, 2]
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @expectedException \Phpml\Exception\InvalidArgumentException
|
* @expectedException \Phpml\Exception\InvalidArgumentException
|
||||||
*/
|
*/
|
||||||
|
@ -21,6 +21,7 @@ class FuzzyCMeansTest extends TestCase
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
$this->assertCount(0, $samples);
|
$this->assertCount(0, $samples);
|
||||||
|
|
||||||
return $fcm;
|
return $fcm;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -37,8 +37,8 @@ class EigenDecompositionTest extends TestCase
|
|||||||
$len = 3;
|
$len = 3;
|
||||||
$A = array_fill(0, $len, array_fill(0, $len, 0.0));
|
$A = array_fill(0, $len, array_fill(0, $len, 0.0));
|
||||||
srand(intval(microtime(true) * 1000));
|
srand(intval(microtime(true) * 1000));
|
||||||
for ($i=0; $i < $len; $i++) {
|
for ($i = 0; $i < $len; ++$i) {
|
||||||
for ($k=0; $k < $len; $k++) {
|
for ($k = 0; $k < $len; ++$k) {
|
||||||
if ($i > $k) {
|
if ($i > $k) {
|
||||||
$A[$i][$k] = $A[$k][$i];
|
$A[$i][$k] = $A[$k][$i];
|
||||||
} else {
|
} else {
|
||||||
|
@ -106,9 +106,9 @@ class NormalizerTest extends TestCase
|
|||||||
// Generate 10 random vectors of length 3
|
// Generate 10 random vectors of length 3
|
||||||
$samples = [];
|
$samples = [];
|
||||||
srand(time());
|
srand(time());
|
||||||
for ($i=0; $i<10; $i++) {
|
for ($i = 0; $i < 10; ++$i) {
|
||||||
$sample = array_fill(0, 3, 0);
|
$sample = array_fill(0, 3, 0);
|
||||||
for ($k=0; $k<3; $k++) {
|
for ($k = 0; $k < 3; ++$k) {
|
||||||
$sample[$k] = rand(1, 100);
|
$sample[$k] = rand(1, 100);
|
||||||
}
|
}
|
||||||
// Last feature's value shared across samples.
|
// Last feature's value shared across samples.
|
||||||
|
Loading…
x
Reference in New Issue
Block a user