mirror of
https://github.com/Llewellynvdm/php-ml.git
synced 2025-01-10 00:37:55 +00:00
parent
43f15d2f7e
commit
7ab80b6e56
@ -102,19 +102,21 @@ class DecisionTree implements Classifier
|
|||||||
$this->columnNames = array_slice($this->columnNames, 0, $this->featureCount);
|
$this->columnNames = array_slice($this->columnNames, 0, $this->featureCount);
|
||||||
} elseif (count($this->columnNames) < $this->featureCount) {
|
} elseif (count($this->columnNames) < $this->featureCount) {
|
||||||
$this->columnNames = array_merge($this->columnNames,
|
$this->columnNames = array_merge($this->columnNames,
|
||||||
range(count($this->columnNames), $this->featureCount - 1));
|
range(count($this->columnNames), $this->featureCount - 1)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param array $samples
|
* @param array $samples
|
||||||
|
*
|
||||||
* @return array
|
* @return array
|
||||||
*/
|
*/
|
||||||
public static function getColumnTypes(array $samples) : array
|
public static function getColumnTypes(array $samples) : array
|
||||||
{
|
{
|
||||||
$types = [];
|
$types = [];
|
||||||
$featureCount = count($samples[0]);
|
$featureCount = count($samples[0]);
|
||||||
for ($i=0; $i < $featureCount; $i++) {
|
for ($i = 0; $i < $featureCount; ++$i) {
|
||||||
$values = array_column($samples, $i);
|
$values = array_column($samples, $i);
|
||||||
$isCategorical = self::isCategoricalColumn($values);
|
$isCategorical = self::isCategoricalColumn($values);
|
||||||
$types[] = $isCategorical ? self::NOMINAL : self::CONTINUOUS;
|
$types[] = $isCategorical ? self::NOMINAL : self::CONTINUOUS;
|
||||||
@ -125,7 +127,8 @@ class DecisionTree implements Classifier
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* @param array $records
|
* @param array $records
|
||||||
* @param int $depth
|
* @param int $depth
|
||||||
|
*
|
||||||
* @return DecisionTreeLeaf
|
* @return DecisionTreeLeaf
|
||||||
*/
|
*/
|
||||||
protected function getSplitLeaf(array $records, int $depth = 0) : DecisionTreeLeaf
|
protected function getSplitLeaf(array $records, int $depth = 0) : DecisionTreeLeaf
|
||||||
@ -163,10 +166,10 @@ class DecisionTree implements Classifier
|
|||||||
|
|
||||||
// Group remaining targets
|
// Group remaining targets
|
||||||
$target = $this->targets[$recordNo];
|
$target = $this->targets[$recordNo];
|
||||||
if (! array_key_exists($target, $remainingTargets)) {
|
if (!array_key_exists($target, $remainingTargets)) {
|
||||||
$remainingTargets[$target] = 1;
|
$remainingTargets[$target] = 1;
|
||||||
} else {
|
} else {
|
||||||
$remainingTargets[$target]++;
|
++$remainingTargets[$target];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -188,6 +191,7 @@ class DecisionTree implements Classifier
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* @param array $records
|
* @param array $records
|
||||||
|
*
|
||||||
* @return DecisionTreeLeaf
|
* @return DecisionTreeLeaf
|
||||||
*/
|
*/
|
||||||
protected function getBestSplit(array $records) : DecisionTreeLeaf
|
protected function getBestSplit(array $records) : DecisionTreeLeaf
|
||||||
@ -251,7 +255,7 @@ class DecisionTree implements Classifier
|
|||||||
protected function getSelectedFeatures() : array
|
protected function getSelectedFeatures() : array
|
||||||
{
|
{
|
||||||
$allFeatures = range(0, $this->featureCount - 1);
|
$allFeatures = range(0, $this->featureCount - 1);
|
||||||
if ($this->numUsableFeatures === 0 && ! $this->selectedFeatures) {
|
if ($this->numUsableFeatures === 0 && !$this->selectedFeatures) {
|
||||||
return $allFeatures;
|
return $allFeatures;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -271,9 +275,10 @@ class DecisionTree implements Classifier
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param $baseValue
|
* @param mixed $baseValue
|
||||||
* @param array $colValues
|
* @param array $colValues
|
||||||
* @param array $targets
|
* @param array $targets
|
||||||
|
*
|
||||||
* @return float
|
* @return float
|
||||||
*/
|
*/
|
||||||
public function getGiniIndex($baseValue, array $colValues, array $targets) : float
|
public function getGiniIndex($baseValue, array $colValues, array $targets) : float
|
||||||
@ -282,13 +287,15 @@ class DecisionTree implements Classifier
|
|||||||
foreach ($this->labels as $label) {
|
foreach ($this->labels as $label) {
|
||||||
$countMatrix[$label] = [0, 0];
|
$countMatrix[$label] = [0, 0];
|
||||||
}
|
}
|
||||||
|
|
||||||
foreach ($colValues as $index => $value) {
|
foreach ($colValues as $index => $value) {
|
||||||
$label = $targets[$index];
|
$label = $targets[$index];
|
||||||
$rowIndex = $value === $baseValue ? 0 : 1;
|
$rowIndex = $value === $baseValue ? 0 : 1;
|
||||||
$countMatrix[$label][$rowIndex]++;
|
++$countMatrix[$label][$rowIndex];
|
||||||
}
|
}
|
||||||
|
|
||||||
$giniParts = [0, 0];
|
$giniParts = [0, 0];
|
||||||
for ($i=0; $i<=1; $i++) {
|
for ($i = 0; $i <= 1; ++$i) {
|
||||||
$part = 0;
|
$part = 0;
|
||||||
$sum = array_sum(array_column($countMatrix, $i));
|
$sum = array_sum(array_column($countMatrix, $i));
|
||||||
if ($sum > 0) {
|
if ($sum > 0) {
|
||||||
@ -296,6 +303,7 @@ class DecisionTree implements Classifier
|
|||||||
$part += pow($countMatrix[$label][$i] / floatval($sum), 2);
|
$part += pow($countMatrix[$label][$i] / floatval($sum), 2);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
$giniParts[$i] = (1 - $part) * $sum;
|
$giniParts[$i] = (1 - $part) * $sum;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -304,6 +312,7 @@ class DecisionTree implements Classifier
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* @param array $samples
|
* @param array $samples
|
||||||
|
*
|
||||||
* @return array
|
* @return array
|
||||||
*/
|
*/
|
||||||
protected function preprocess(array $samples) : array
|
protected function preprocess(array $samples) : array
|
||||||
@ -311,7 +320,7 @@ class DecisionTree implements Classifier
|
|||||||
// Detect and convert continuous data column values into
|
// Detect and convert continuous data column values into
|
||||||
// discrete values by using the median as a threshold value
|
// discrete values by using the median as a threshold value
|
||||||
$columns = [];
|
$columns = [];
|
||||||
for ($i=0; $i<$this->featureCount; $i++) {
|
for ($i = 0; $i < $this->featureCount; ++$i) {
|
||||||
$values = array_column($samples, $i);
|
$values = array_column($samples, $i);
|
||||||
if ($this->columnTypes[$i] == self::CONTINUOUS) {
|
if ($this->columnTypes[$i] == self::CONTINUOUS) {
|
||||||
$median = Mean::median($values);
|
$median = Mean::median($values);
|
||||||
@ -332,6 +341,7 @@ class DecisionTree implements Classifier
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* @param array $columnValues
|
* @param array $columnValues
|
||||||
|
*
|
||||||
* @return bool
|
* @return bool
|
||||||
*/
|
*/
|
||||||
protected static function isCategoricalColumn(array $columnValues) : bool
|
protected static function isCategoricalColumn(array $columnValues) : bool
|
||||||
@ -348,6 +358,7 @@ class DecisionTree implements Classifier
|
|||||||
if ($floatValues) {
|
if ($floatValues) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (count($numericValues) !== $count) {
|
if (count($numericValues) !== $count) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
@ -365,7 +376,9 @@ class DecisionTree implements Classifier
|
|||||||
* randomly selected for each split operation.
|
* randomly selected for each split operation.
|
||||||
*
|
*
|
||||||
* @param int $numFeatures
|
* @param int $numFeatures
|
||||||
|
*
|
||||||
* @return $this
|
* @return $this
|
||||||
|
*
|
||||||
* @throws InvalidArgumentException
|
* @throws InvalidArgumentException
|
||||||
*/
|
*/
|
||||||
public function setNumFeatures(int $numFeatures)
|
public function setNumFeatures(int $numFeatures)
|
||||||
@ -394,7 +407,9 @@ class DecisionTree implements Classifier
|
|||||||
* column importances are desired to be inspected.
|
* column importances are desired to be inspected.
|
||||||
*
|
*
|
||||||
* @param array $names
|
* @param array $names
|
||||||
|
*
|
||||||
* @return $this
|
* @return $this
|
||||||
|
*
|
||||||
* @throws InvalidArgumentException
|
* @throws InvalidArgumentException
|
||||||
*/
|
*/
|
||||||
public function setColumnNames(array $names)
|
public function setColumnNames(array $names)
|
||||||
@ -458,8 +473,9 @@ class DecisionTree implements Classifier
|
|||||||
* Collects and returns an array of internal nodes that use the given
|
* Collects and returns an array of internal nodes that use the given
|
||||||
* column as a split criterion
|
* column as a split criterion
|
||||||
*
|
*
|
||||||
* @param int $column
|
* @param int $column
|
||||||
* @param DecisionTreeLeaf $node
|
* @param DecisionTreeLeaf $node
|
||||||
|
*
|
||||||
* @return array
|
* @return array
|
||||||
*/
|
*/
|
||||||
protected function getSplitNodesByColumn(int $column, DecisionTreeLeaf $node) : array
|
protected function getSplitNodesByColumn(int $column, DecisionTreeLeaf $node) : array
|
||||||
@ -478,9 +494,11 @@ class DecisionTree implements Classifier
|
|||||||
if ($node->leftLeaf) {
|
if ($node->leftLeaf) {
|
||||||
$lNodes = $this->getSplitNodesByColumn($column, $node->leftLeaf);
|
$lNodes = $this->getSplitNodesByColumn($column, $node->leftLeaf);
|
||||||
}
|
}
|
||||||
|
|
||||||
if ($node->rightLeaf) {
|
if ($node->rightLeaf) {
|
||||||
$rNodes = $this->getSplitNodesByColumn($column, $node->rightLeaf);
|
$rNodes = $this->getSplitNodesByColumn($column, $node->rightLeaf);
|
||||||
}
|
}
|
||||||
|
|
||||||
$nodes = array_merge($nodes, $lNodes, $rNodes);
|
$nodes = array_merge($nodes, $lNodes, $rNodes);
|
||||||
|
|
||||||
return $nodes;
|
return $nodes;
|
||||||
@ -488,6 +506,7 @@ class DecisionTree implements Classifier
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* @param array $sample
|
* @param array $sample
|
||||||
|
*
|
||||||
* @return mixed
|
* @return mixed
|
||||||
*/
|
*/
|
||||||
protected function predictSample(array $sample)
|
protected function predictSample(array $sample)
|
||||||
@ -497,6 +516,7 @@ class DecisionTree implements Classifier
|
|||||||
if ($node->isTerminal) {
|
if ($node->isTerminal) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
if ($node->evaluate($sample)) {
|
if ($node->evaluate($sample)) {
|
||||||
$node = $node->leftLeaf;
|
$node = $node->leftLeaf;
|
||||||
} else {
|
} else {
|
||||||
|
@ -92,6 +92,8 @@ class DecisionTreeLeaf
|
|||||||
* Returns Mean Decrease Impurity (MDI) in the node.
|
* Returns Mean Decrease Impurity (MDI) in the node.
|
||||||
* For terminal nodes, this value is equal to 0
|
* For terminal nodes, this value is equal to 0
|
||||||
*
|
*
|
||||||
|
* @param int $parentRecordCount
|
||||||
|
*
|
||||||
* @return float
|
* @return float
|
||||||
*/
|
*/
|
||||||
public function getNodeImpurityDecrease(int $parentRecordCount)
|
public function getNodeImpurityDecrease(int $parentRecordCount)
|
||||||
@ -133,7 +135,7 @@ class DecisionTreeLeaf
|
|||||||
} else {
|
} else {
|
||||||
$col = "col_$this->columnIndex";
|
$col = "col_$this->columnIndex";
|
||||||
}
|
}
|
||||||
if (! preg_match("/^[<>=]{1,2}/", $value)) {
|
if (!preg_match("/^[<>=]{1,2}/", $value)) {
|
||||||
$value = "=$value";
|
$value = "=$value";
|
||||||
}
|
}
|
||||||
$value = "<b>$col $value</b><br>Gini: ". number_format($this->giniIndex, 2);
|
$value = "<b>$col $value</b><br>Gini: ". number_format($this->giniIndex, 2);
|
||||||
|
@ -75,6 +75,7 @@ class AdaBoost implements Classifier
|
|||||||
* improve classification performance of 'weak' classifiers such as
|
* improve classification performance of 'weak' classifiers such as
|
||||||
* DecisionStump (default base classifier of AdaBoost).
|
* DecisionStump (default base classifier of AdaBoost).
|
||||||
*
|
*
|
||||||
|
* @param int $maxIterations
|
||||||
*/
|
*/
|
||||||
public function __construct(int $maxIterations = 50)
|
public function __construct(int $maxIterations = 50)
|
||||||
{
|
{
|
||||||
@ -96,6 +97,8 @@ class AdaBoost implements Classifier
|
|||||||
/**
|
/**
|
||||||
* @param array $samples
|
* @param array $samples
|
||||||
* @param array $targets
|
* @param array $targets
|
||||||
|
*
|
||||||
|
* @throws \Exception
|
||||||
*/
|
*/
|
||||||
public function train(array $samples, array $targets)
|
public function train(array $samples, array $targets)
|
||||||
{
|
{
|
||||||
@ -123,7 +126,6 @@ class AdaBoost implements Classifier
|
|||||||
// Execute the algorithm for a maximum number of iterations
|
// Execute the algorithm for a maximum number of iterations
|
||||||
$currIter = 0;
|
$currIter = 0;
|
||||||
while ($this->maxIterations > $currIter++) {
|
while ($this->maxIterations > $currIter++) {
|
||||||
|
|
||||||
// Determine the best 'weak' classifier based on current weights
|
// Determine the best 'weak' classifier based on current weights
|
||||||
$classifier = $this->getBestClassifier();
|
$classifier = $this->getBestClassifier();
|
||||||
$errorRate = $this->evaluateClassifier($classifier);
|
$errorRate = $this->evaluateClassifier($classifier);
|
||||||
@ -181,7 +183,7 @@ class AdaBoost implements Classifier
|
|||||||
$targets = [];
|
$targets = [];
|
||||||
foreach ($weights as $index => $weight) {
|
foreach ($weights as $index => $weight) {
|
||||||
$z = (int)round(($weight - $mean) / $std) - $minZ + 1;
|
$z = (int)round(($weight - $mean) / $std) - $minZ + 1;
|
||||||
for ($i=0; $i < $z; $i++) {
|
for ($i = 0; $i < $z; ++$i) {
|
||||||
if (rand(0, 1) == 0) {
|
if (rand(0, 1) == 0) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
@ -197,6 +199,8 @@ class AdaBoost implements Classifier
|
|||||||
* Evaluates the classifier and returns the classification error rate
|
* Evaluates the classifier and returns the classification error rate
|
||||||
*
|
*
|
||||||
* @param Classifier $classifier
|
* @param Classifier $classifier
|
||||||
|
*
|
||||||
|
* @return float
|
||||||
*/
|
*/
|
||||||
protected function evaluateClassifier(Classifier $classifier)
|
protected function evaluateClassifier(Classifier $classifier)
|
||||||
{
|
{
|
||||||
|
@ -59,13 +59,13 @@ class Bagging implements Classifier
|
|||||||
private $samples = [];
|
private $samples = [];
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates an ensemble classifier with given number of base classifiers<br>
|
* Creates an ensemble classifier with given number of base classifiers
|
||||||
* Default number of base classifiers is 100.
|
* Default number of base classifiers is 50.
|
||||||
* The more number of base classifiers, the better performance but at the cost of procesing time
|
* The more number of base classifiers, the better performance but at the cost of procesing time
|
||||||
*
|
*
|
||||||
* @param int $numClassifier
|
* @param int $numClassifier
|
||||||
*/
|
*/
|
||||||
public function __construct($numClassifier = 50)
|
public function __construct(int $numClassifier = 50)
|
||||||
{
|
{
|
||||||
$this->numClassifier = $numClassifier;
|
$this->numClassifier = $numClassifier;
|
||||||
}
|
}
|
||||||
@ -76,14 +76,17 @@ class Bagging implements Classifier
|
|||||||
* to train each base classifier.
|
* to train each base classifier.
|
||||||
*
|
*
|
||||||
* @param float $ratio
|
* @param float $ratio
|
||||||
|
*
|
||||||
* @return $this
|
* @return $this
|
||||||
* @throws Exception
|
*
|
||||||
|
* @throws \Exception
|
||||||
*/
|
*/
|
||||||
public function setSubsetRatio(float $ratio)
|
public function setSubsetRatio(float $ratio)
|
||||||
{
|
{
|
||||||
if ($ratio < 0.1 || $ratio > 1.0) {
|
if ($ratio < 0.1 || $ratio > 1.0) {
|
||||||
throw new \Exception("Subset ratio should be between 0.1 and 1.0");
|
throw new \Exception("Subset ratio should be between 0.1 and 1.0");
|
||||||
}
|
}
|
||||||
|
|
||||||
$this->subsetRatio = $ratio;
|
$this->subsetRatio = $ratio;
|
||||||
return $this;
|
return $this;
|
||||||
}
|
}
|
||||||
@ -98,12 +101,14 @@ class Bagging implements Classifier
|
|||||||
*
|
*
|
||||||
* @param string $classifier
|
* @param string $classifier
|
||||||
* @param array $classifierOptions
|
* @param array $classifierOptions
|
||||||
|
*
|
||||||
* @return $this
|
* @return $this
|
||||||
*/
|
*/
|
||||||
public function setClassifer(string $classifier, array $classifierOptions = [])
|
public function setClassifer(string $classifier, array $classifierOptions = [])
|
||||||
{
|
{
|
||||||
$this->classifier = $classifier;
|
$this->classifier = $classifier;
|
||||||
$this->classifierOptions = $classifierOptions;
|
$this->classifierOptions = $classifierOptions;
|
||||||
|
|
||||||
return $this;
|
return $this;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -138,11 +143,12 @@ class Bagging implements Classifier
|
|||||||
$targets = [];
|
$targets = [];
|
||||||
srand($index);
|
srand($index);
|
||||||
$bootstrapSize = $this->subsetRatio * $this->numSamples;
|
$bootstrapSize = $this->subsetRatio * $this->numSamples;
|
||||||
for ($i=0; $i < $bootstrapSize; $i++) {
|
for ($i = 0; $i < $bootstrapSize; ++$i) {
|
||||||
$rand = rand(0, $this->numSamples - 1);
|
$rand = rand(0, $this->numSamples - 1);
|
||||||
$samples[] = $this->samples[$rand];
|
$samples[] = $this->samples[$rand];
|
||||||
$targets[] = $this->targets[$rand];
|
$targets[] = $this->targets[$rand];
|
||||||
}
|
}
|
||||||
|
|
||||||
return [$samples, $targets];
|
return [$samples, $targets];
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -152,24 +158,25 @@ class Bagging implements Classifier
|
|||||||
protected function initClassifiers()
|
protected function initClassifiers()
|
||||||
{
|
{
|
||||||
$classifiers = [];
|
$classifiers = [];
|
||||||
for ($i=0; $i<$this->numClassifier; $i++) {
|
for ($i = 0; $i < $this->numClassifier; ++$i) {
|
||||||
$ref = new \ReflectionClass($this->classifier);
|
$ref = new \ReflectionClass($this->classifier);
|
||||||
if ($this->classifierOptions) {
|
if ($this->classifierOptions) {
|
||||||
$obj = $ref->newInstanceArgs($this->classifierOptions);
|
$obj = $ref->newInstanceArgs($this->classifierOptions);
|
||||||
} else {
|
} else {
|
||||||
$obj = $ref->newInstance();
|
$obj = $ref->newInstance();
|
||||||
}
|
}
|
||||||
$classifiers[] = $this->initSingleClassifier($obj, $i);
|
|
||||||
|
$classifiers[] = $this->initSingleClassifier($obj);
|
||||||
}
|
}
|
||||||
return $classifiers;
|
return $classifiers;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param Classifier $classifier
|
* @param Classifier $classifier
|
||||||
* @param int $index
|
*
|
||||||
* @return Classifier
|
* @return Classifier
|
||||||
*/
|
*/
|
||||||
protected function initSingleClassifier($classifier, $index)
|
protected function initSingleClassifier($classifier)
|
||||||
{
|
{
|
||||||
return $classifier;
|
return $classifier;
|
||||||
}
|
}
|
||||||
|
@ -5,7 +5,6 @@ declare(strict_types=1);
|
|||||||
namespace Phpml\Classification\Ensemble;
|
namespace Phpml\Classification\Ensemble;
|
||||||
|
|
||||||
use Phpml\Classification\DecisionTree;
|
use Phpml\Classification\DecisionTree;
|
||||||
use Phpml\Classification\Classifier;
|
|
||||||
|
|
||||||
class RandomForest extends Bagging
|
class RandomForest extends Bagging
|
||||||
{
|
{
|
||||||
@ -24,9 +23,9 @@ class RandomForest extends Bagging
|
|||||||
* may increase the prediction performance while it will also substantially
|
* may increase the prediction performance while it will also substantially
|
||||||
* increase the processing time and the required memory
|
* increase the processing time and the required memory
|
||||||
*
|
*
|
||||||
* @param type $numClassifier
|
* @param int $numClassifier
|
||||||
*/
|
*/
|
||||||
public function __construct($numClassifier = 50)
|
public function __construct(int $numClassifier = 50)
|
||||||
{
|
{
|
||||||
parent::__construct($numClassifier);
|
parent::__construct($numClassifier);
|
||||||
|
|
||||||
@ -43,17 +42,21 @@ class RandomForest extends Bagging
|
|||||||
* features to be taken into consideration while selecting subspace of features
|
* features to be taken into consideration while selecting subspace of features
|
||||||
*
|
*
|
||||||
* @param mixed $ratio string or float should be given
|
* @param mixed $ratio string or float should be given
|
||||||
|
*
|
||||||
* @return $this
|
* @return $this
|
||||||
* @throws Exception
|
*
|
||||||
|
* @throws \Exception
|
||||||
*/
|
*/
|
||||||
public function setFeatureSubsetRatio($ratio)
|
public function setFeatureSubsetRatio($ratio)
|
||||||
{
|
{
|
||||||
if (is_float($ratio) && ($ratio < 0.1 || $ratio > 1.0)) {
|
if (is_float($ratio) && ($ratio < 0.1 || $ratio > 1.0)) {
|
||||||
throw new \Exception("When a float given, feature subset ratio should be between 0.1 and 1.0");
|
throw new \Exception("When a float given, feature subset ratio should be between 0.1 and 1.0");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (is_string($ratio) && $ratio != 'sqrt' && $ratio != 'log') {
|
if (is_string($ratio) && $ratio != 'sqrt' && $ratio != 'log') {
|
||||||
throw new \Exception("When a string given, feature subset ratio can only be 'sqrt' or 'log' ");
|
throw new \Exception("When a string given, feature subset ratio can only be 'sqrt' or 'log' ");
|
||||||
}
|
}
|
||||||
|
|
||||||
$this->featureSubsetRatio = $ratio;
|
$this->featureSubsetRatio = $ratio;
|
||||||
return $this;
|
return $this;
|
||||||
}
|
}
|
||||||
@ -62,8 +65,11 @@ class RandomForest extends Bagging
|
|||||||
* RandomForest algorithm is usable *only* with DecisionTree
|
* RandomForest algorithm is usable *only* with DecisionTree
|
||||||
*
|
*
|
||||||
* @param string $classifier
|
* @param string $classifier
|
||||||
* @param array $classifierOptions
|
* @param array $classifierOptions
|
||||||
|
*
|
||||||
* @return $this
|
* @return $this
|
||||||
|
*
|
||||||
|
* @throws \Exception
|
||||||
*/
|
*/
|
||||||
public function setClassifer(string $classifier, array $classifierOptions = [])
|
public function setClassifer(string $classifier, array $classifierOptions = [])
|
||||||
{
|
{
|
||||||
@ -125,10 +131,10 @@ class RandomForest extends Bagging
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* @param DecisionTree $classifier
|
* @param DecisionTree $classifier
|
||||||
* @param int $index
|
*
|
||||||
* @return DecisionTree
|
* @return DecisionTree
|
||||||
*/
|
*/
|
||||||
protected function initSingleClassifier($classifier, $index)
|
protected function initSingleClassifier($classifier)
|
||||||
{
|
{
|
||||||
if (is_float($this->featureSubsetRatio)) {
|
if (is_float($this->featureSubsetRatio)) {
|
||||||
$featureCount = (int)($this->featureSubsetRatio * $this->featureCount);
|
$featureCount = (int)($this->featureSubsetRatio * $this->featureCount);
|
||||||
|
@ -4,11 +4,8 @@ declare(strict_types=1);
|
|||||||
|
|
||||||
namespace Phpml\Classification\Linear;
|
namespace Phpml\Classification\Linear;
|
||||||
|
|
||||||
use Phpml\Classification\Classifier;
|
|
||||||
|
|
||||||
class Adaline extends Perceptron
|
class Adaline extends Perceptron
|
||||||
{
|
{
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Batch training is the default Adaline training algorithm
|
* Batch training is the default Adaline training algorithm
|
||||||
*/
|
*/
|
||||||
@ -35,13 +32,17 @@ class Adaline extends Perceptron
|
|||||||
* If normalizeInputs is set to true, then every input given to the algorithm will be standardized
|
* If normalizeInputs is set to true, then every input given to the algorithm will be standardized
|
||||||
* by use of standard deviation and mean calculation
|
* by use of standard deviation and mean calculation
|
||||||
*
|
*
|
||||||
* @param int $learningRate
|
* @param float $learningRate
|
||||||
* @param int $maxIterations
|
* @param int $maxIterations
|
||||||
|
* @param bool $normalizeInputs
|
||||||
|
* @param int $trainingType
|
||||||
|
*
|
||||||
|
* @throws \Exception
|
||||||
*/
|
*/
|
||||||
public function __construct(float $learningRate = 0.001, int $maxIterations = 1000,
|
public function __construct(float $learningRate = 0.001, int $maxIterations = 1000,
|
||||||
bool $normalizeInputs = true, int $trainingType = self::BATCH_TRAINING)
|
bool $normalizeInputs = true, int $trainingType = self::BATCH_TRAINING)
|
||||||
{
|
{
|
||||||
if (! in_array($trainingType, [self::BATCH_TRAINING, self::ONLINE_TRAINING])) {
|
if (!in_array($trainingType, [self::BATCH_TRAINING, self::ONLINE_TRAINING])) {
|
||||||
throw new \Exception("Adaline can only be trained with batch and online/stochastic gradient descent algorithm");
|
throw new \Exception("Adaline can only be trained with batch and online/stochastic gradient descent algorithm");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -87,6 +87,8 @@ class DecisionStump extends WeightedClassifier
|
|||||||
/**
|
/**
|
||||||
* @param array $samples
|
* @param array $samples
|
||||||
* @param array $targets
|
* @param array $targets
|
||||||
|
* @param array $labels
|
||||||
|
*
|
||||||
* @throws \Exception
|
* @throws \Exception
|
||||||
*/
|
*/
|
||||||
protected function trainBinary(array $samples, array $targets, array $labels)
|
protected function trainBinary(array $samples, array $targets, array $labels)
|
||||||
@ -237,13 +239,13 @@ class DecisionStump extends WeightedClassifier
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @param type $leftValue
|
* @param mixed $leftValue
|
||||||
* @param type $operator
|
* @param string $operator
|
||||||
* @param type $rightValue
|
* @param mixed $rightValue
|
||||||
*
|
*
|
||||||
* @return boolean
|
* @return boolean
|
||||||
*/
|
*/
|
||||||
protected function evaluate($leftValue, $operator, $rightValue)
|
protected function evaluate($leftValue, string $operator, $rightValue)
|
||||||
{
|
{
|
||||||
switch ($operator) {
|
switch ($operator) {
|
||||||
case '>': return $leftValue > $rightValue;
|
case '>': return $leftValue > $rightValue;
|
||||||
@ -288,10 +290,10 @@ class DecisionStump extends WeightedClassifier
|
|||||||
$wrong += $this->weights[$index];
|
$wrong += $this->weights[$index];
|
||||||
}
|
}
|
||||||
|
|
||||||
if (! isset($prob[$predicted][$target])) {
|
if (!isset($prob[$predicted][$target])) {
|
||||||
$prob[$predicted][$target] = 0;
|
$prob[$predicted][$target] = 0;
|
||||||
}
|
}
|
||||||
$prob[$predicted][$target]++;
|
++$prob[$predicted][$target];
|
||||||
}
|
}
|
||||||
|
|
||||||
// Calculate probabilities: Proportion of labels in each leaf
|
// Calculate probabilities: Proportion of labels in each leaf
|
||||||
|
@ -4,21 +4,19 @@ declare(strict_types=1);
|
|||||||
|
|
||||||
namespace Phpml\Classification\Linear;
|
namespace Phpml\Classification\Linear;
|
||||||
|
|
||||||
use Phpml\Classification\Classifier;
|
|
||||||
use Phpml\Helper\Optimizer\ConjugateGradient;
|
use Phpml\Helper\Optimizer\ConjugateGradient;
|
||||||
|
|
||||||
class LogisticRegression extends Adaline
|
class LogisticRegression extends Adaline
|
||||||
{
|
{
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Batch training: Gradient descent algorithm (default)
|
* Batch training: Gradient descent algorithm (default)
|
||||||
*/
|
*/
|
||||||
const BATCH_TRAINING = 1;
|
const BATCH_TRAINING = 1;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Online training: Stochastic gradient descent learning
|
* Online training: Stochastic gradient descent learning
|
||||||
*/
|
*/
|
||||||
const ONLINE_TRAINING = 2;
|
const ONLINE_TRAINING = 2;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Conjugate Batch: Conjugate Gradient algorithm
|
* Conjugate Batch: Conjugate Gradient algorithm
|
||||||
@ -74,13 +72,13 @@ class LogisticRegression extends Adaline
|
|||||||
string $penalty = 'L2')
|
string $penalty = 'L2')
|
||||||
{
|
{
|
||||||
$trainingTypes = range(self::BATCH_TRAINING, self::CONJUGATE_GRAD_TRAINING);
|
$trainingTypes = range(self::BATCH_TRAINING, self::CONJUGATE_GRAD_TRAINING);
|
||||||
if (! in_array($trainingType, $trainingTypes)) {
|
if (!in_array($trainingType, $trainingTypes)) {
|
||||||
throw new \Exception("Logistic regression can only be trained with " .
|
throw new \Exception("Logistic regression can only be trained with " .
|
||||||
"batch (gradient descent), online (stochastic gradient descent) " .
|
"batch (gradient descent), online (stochastic gradient descent) " .
|
||||||
"or conjugate batch (conjugate gradients) algorithms");
|
"or conjugate batch (conjugate gradients) algorithms");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (! in_array($cost, ['log', 'sse'])) {
|
if (!in_array($cost, ['log', 'sse'])) {
|
||||||
throw new \Exception("Logistic regression cost function can be one of the following: \n" .
|
throw new \Exception("Logistic regression cost function can be one of the following: \n" .
|
||||||
"'log' for log-likelihood and 'sse' for sum of squared errors");
|
"'log' for log-likelihood and 'sse' for sum of squared errors");
|
||||||
}
|
}
|
||||||
@ -126,6 +124,8 @@ class LogisticRegression extends Adaline
|
|||||||
*
|
*
|
||||||
* @param array $samples
|
* @param array $samples
|
||||||
* @param array $targets
|
* @param array $targets
|
||||||
|
*
|
||||||
|
* @throws \Exception
|
||||||
*/
|
*/
|
||||||
protected function runTraining(array $samples, array $targets)
|
protected function runTraining(array $samples, array $targets)
|
||||||
{
|
{
|
||||||
@ -140,12 +140,18 @@ class LogisticRegression extends Adaline
|
|||||||
|
|
||||||
case self::CONJUGATE_GRAD_TRAINING:
|
case self::CONJUGATE_GRAD_TRAINING:
|
||||||
return $this->runConjugateGradient($samples, $targets, $callback);
|
return $this->runConjugateGradient($samples, $targets, $callback);
|
||||||
|
|
||||||
|
default:
|
||||||
|
throw new \Exception('Logistic regression has invalid training type: %s.', $this->trainingType);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Executes Conjugate Gradient method to optimize the
|
* Executes Conjugate Gradient method to optimize the weights of the LogReg model
|
||||||
* weights of the LogReg model
|
*
|
||||||
|
* @param array $samples
|
||||||
|
* @param array $targets
|
||||||
|
* @param \Closure $gradientFunc
|
||||||
*/
|
*/
|
||||||
protected function runConjugateGradient(array $samples, array $targets, \Closure $gradientFunc)
|
protected function runConjugateGradient(array $samples, array $targets, \Closure $gradientFunc)
|
||||||
{
|
{
|
||||||
@ -162,6 +168,8 @@ class LogisticRegression extends Adaline
|
|||||||
* Returns the appropriate callback function for the selected cost function
|
* Returns the appropriate callback function for the selected cost function
|
||||||
*
|
*
|
||||||
* @return \Closure
|
* @return \Closure
|
||||||
|
*
|
||||||
|
* @throws \Exception
|
||||||
*/
|
*/
|
||||||
protected function getCostFunction()
|
protected function getCostFunction()
|
||||||
{
|
{
|
||||||
@ -203,7 +211,7 @@ class LogisticRegression extends Adaline
|
|||||||
return $callback;
|
return $callback;
|
||||||
|
|
||||||
case 'sse':
|
case 'sse':
|
||||||
/**
|
/*
|
||||||
* Sum of squared errors or least squared errors cost function:
|
* Sum of squared errors or least squared errors cost function:
|
||||||
* J(x) = ∑ (y - h(x))^2
|
* J(x) = ∑ (y - h(x))^2
|
||||||
*
|
*
|
||||||
@ -224,6 +232,9 @@ class LogisticRegression extends Adaline
|
|||||||
};
|
};
|
||||||
|
|
||||||
return $callback;
|
return $callback;
|
||||||
|
|
||||||
|
default:
|
||||||
|
throw new \Exception(sprintf('Logistic regression has invalid cost function: %s.', $this->costFunction));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -245,6 +256,7 @@ class LogisticRegression extends Adaline
|
|||||||
* Returns the class value (either -1 or 1) for the given input
|
* Returns the class value (either -1 or 1) for the given input
|
||||||
*
|
*
|
||||||
* @param array $sample
|
* @param array $sample
|
||||||
|
*
|
||||||
* @return int
|
* @return int
|
||||||
*/
|
*/
|
||||||
protected function outputClass(array $sample)
|
protected function outputClass(array $sample)
|
||||||
@ -266,6 +278,8 @@ class LogisticRegression extends Adaline
|
|||||||
*
|
*
|
||||||
* @param array $sample
|
* @param array $sample
|
||||||
* @param mixed $label
|
* @param mixed $label
|
||||||
|
*
|
||||||
|
* @return float
|
||||||
*/
|
*/
|
||||||
protected function predictProbability(array $sample, $label)
|
protected function predictProbability(array $sample, $label)
|
||||||
{
|
{
|
||||||
|
@ -63,22 +63,22 @@ class Perceptron implements Classifier, IncrementalEstimator
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Initalize a perceptron classifier with given learning rate and maximum
|
* Initalize a perceptron classifier with given learning rate and maximum
|
||||||
* number of iterations used while training the perceptron <br>
|
* number of iterations used while training the perceptron
|
||||||
*
|
*
|
||||||
* Learning rate should be a float value between 0.0(exclusive) and 1.0(inclusive) <br>
|
* @param float $learningRate Value between 0.0(exclusive) and 1.0(inclusive)
|
||||||
* Maximum number of iterations can be an integer value greater than 0
|
* @param int $maxIterations Must be at least 1
|
||||||
* @param int $learningRate
|
* @param bool $normalizeInputs
|
||||||
* @param int $maxIterations
|
*
|
||||||
|
* @throws \Exception
|
||||||
*/
|
*/
|
||||||
public function __construct(float $learningRate = 0.001, int $maxIterations = 1000,
|
public function __construct(float $learningRate = 0.001, int $maxIterations = 1000, bool $normalizeInputs = true)
|
||||||
bool $normalizeInputs = true)
|
|
||||||
{
|
{
|
||||||
if ($learningRate <= 0.0 || $learningRate > 1.0) {
|
if ($learningRate <= 0.0 || $learningRate > 1.0) {
|
||||||
throw new \Exception("Learning rate should be a float value between 0.0(exclusive) and 1.0(inclusive)");
|
throw new \Exception("Learning rate should be a float value between 0.0(exclusive) and 1.0(inclusive)");
|
||||||
}
|
}
|
||||||
|
|
||||||
if ($maxIterations <= 0) {
|
if ($maxIterations <= 0) {
|
||||||
throw new \Exception("Maximum number of iterations should be an integer greater than 0");
|
throw new \Exception("Maximum number of iterations must be an integer greater than 0");
|
||||||
}
|
}
|
||||||
|
|
||||||
if ($normalizeInputs) {
|
if ($normalizeInputs) {
|
||||||
@ -96,7 +96,7 @@ class Perceptron implements Classifier, IncrementalEstimator
|
|||||||
*/
|
*/
|
||||||
public function partialTrain(array $samples, array $targets, array $labels = [])
|
public function partialTrain(array $samples, array $targets, array $labels = [])
|
||||||
{
|
{
|
||||||
return $this->trainByLabel($samples, $targets, $labels);
|
$this->trainByLabel($samples, $targets, $labels);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -140,6 +140,8 @@ class Perceptron implements Classifier, IncrementalEstimator
|
|||||||
* for $maxIterations times
|
* for $maxIterations times
|
||||||
*
|
*
|
||||||
* @param bool $enable
|
* @param bool $enable
|
||||||
|
*
|
||||||
|
* @return $this
|
||||||
*/
|
*/
|
||||||
public function setEarlyStop(bool $enable = true)
|
public function setEarlyStop(bool $enable = true)
|
||||||
{
|
{
|
||||||
@ -185,12 +187,14 @@ class Perceptron implements Classifier, IncrementalEstimator
|
|||||||
* Executes a Gradient Descent algorithm for
|
* Executes a Gradient Descent algorithm for
|
||||||
* the given cost function
|
* the given cost function
|
||||||
*
|
*
|
||||||
* @param array $samples
|
* @param array $samples
|
||||||
* @param array $targets
|
* @param array $targets
|
||||||
|
* @param \Closure $gradientFunc
|
||||||
|
* @param bool $isBatch
|
||||||
*/
|
*/
|
||||||
protected function runGradientDescent(array $samples, array $targets, \Closure $gradientFunc, bool $isBatch = false)
|
protected function runGradientDescent(array $samples, array $targets, \Closure $gradientFunc, bool $isBatch = false)
|
||||||
{
|
{
|
||||||
$class = $isBatch ? GD::class : StochasticGD::class;
|
$class = $isBatch ? GD::class : StochasticGD::class;
|
||||||
|
|
||||||
if (empty($this->optimizer)) {
|
if (empty($this->optimizer)) {
|
||||||
$this->optimizer = (new $class($this->featureCount))
|
$this->optimizer = (new $class($this->featureCount))
|
||||||
@ -262,6 +266,8 @@ class Perceptron implements Classifier, IncrementalEstimator
|
|||||||
*
|
*
|
||||||
* @param array $sample
|
* @param array $sample
|
||||||
* @param mixed $label
|
* @param mixed $label
|
||||||
|
*
|
||||||
|
* @return float
|
||||||
*/
|
*/
|
||||||
protected function predictProbability(array $sample, $label)
|
protected function predictProbability(array $sample, $label)
|
||||||
{
|
{
|
||||||
@ -277,6 +283,7 @@ class Perceptron implements Classifier, IncrementalEstimator
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* @param array $sample
|
* @param array $sample
|
||||||
|
*
|
||||||
* @return mixed
|
* @return mixed
|
||||||
*/
|
*/
|
||||||
protected function predictSampleBinary(array $sample)
|
protected function predictSampleBinary(array $sample)
|
||||||
@ -285,6 +292,6 @@ class Perceptron implements Classifier, IncrementalEstimator
|
|||||||
|
|
||||||
$predictedClass = $this->outputClass($sample);
|
$predictedClass = $this->outputClass($sample);
|
||||||
|
|
||||||
return $this->labels[ $predictedClass ];
|
return $this->labels[$predictedClass];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -89,7 +89,7 @@ class NaiveBayes implements Classifier
|
|||||||
$this->mean[$label]= array_fill(0, $this->featureCount, 0);
|
$this->mean[$label]= array_fill(0, $this->featureCount, 0);
|
||||||
$this->dataType[$label] = array_fill(0, $this->featureCount, self::CONTINUOS);
|
$this->dataType[$label] = array_fill(0, $this->featureCount, self::CONTINUOS);
|
||||||
$this->discreteProb[$label] = array_fill(0, $this->featureCount, self::CONTINUOS);
|
$this->discreteProb[$label] = array_fill(0, $this->featureCount, self::CONTINUOS);
|
||||||
for ($i=0; $i<$this->featureCount; $i++) {
|
for ($i = 0; $i < $this->featureCount; ++$i) {
|
||||||
// Get the values of nth column in the samples array
|
// Get the values of nth column in the samples array
|
||||||
// Mean::arithmetic is called twice, can be optimized
|
// Mean::arithmetic is called twice, can be optimized
|
||||||
$values = array_column($samples, $i);
|
$values = array_column($samples, $i);
|
||||||
@ -114,16 +114,17 @@ class NaiveBayes implements Classifier
|
|||||||
/**
|
/**
|
||||||
* Calculates the probability P(label|sample_n)
|
* Calculates the probability P(label|sample_n)
|
||||||
*
|
*
|
||||||
* @param array $sample
|
* @param array $sample
|
||||||
* @param int $feature
|
* @param int $feature
|
||||||
* @param string $label
|
* @param string $label
|
||||||
|
*
|
||||||
* @return float
|
* @return float
|
||||||
*/
|
*/
|
||||||
private function sampleProbability($sample, $feature, $label)
|
private function sampleProbability($sample, $feature, $label)
|
||||||
{
|
{
|
||||||
$value = $sample[$feature];
|
$value = $sample[$feature];
|
||||||
if ($this->dataType[$label][$feature] == self::NOMINAL) {
|
if ($this->dataType[$label][$feature] == self::NOMINAL) {
|
||||||
if (! isset($this->discreteProb[$label][$feature][$value]) ||
|
if (!isset($this->discreteProb[$label][$feature][$value]) ||
|
||||||
$this->discreteProb[$label][$feature][$value] == 0) {
|
$this->discreteProb[$label][$feature][$value] == 0) {
|
||||||
return self::EPSILON;
|
return self::EPSILON;
|
||||||
}
|
}
|
||||||
@ -145,13 +146,15 @@ class NaiveBayes implements Classifier
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Return samples belonging to specific label
|
* Return samples belonging to specific label
|
||||||
|
*
|
||||||
* @param string $label
|
* @param string $label
|
||||||
|
*
|
||||||
* @return array
|
* @return array
|
||||||
*/
|
*/
|
||||||
private function getSamplesByLabel($label)
|
private function getSamplesByLabel($label)
|
||||||
{
|
{
|
||||||
$samples = [];
|
$samples = [];
|
||||||
for ($i=0; $i<$this->sampleCount; $i++) {
|
for ($i = 0; $i < $this->sampleCount; ++$i) {
|
||||||
if ($this->targets[$i] == $label) {
|
if ($this->targets[$i] == $label) {
|
||||||
$samples[] = $this->samples[$i];
|
$samples[] = $this->samples[$i];
|
||||||
}
|
}
|
||||||
@ -171,12 +174,13 @@ class NaiveBayes implements Classifier
|
|||||||
$predictions = [];
|
$predictions = [];
|
||||||
foreach ($this->labels as $label) {
|
foreach ($this->labels as $label) {
|
||||||
$p = $this->p[$label];
|
$p = $this->p[$label];
|
||||||
for ($i=0; $i<$this->featureCount; $i++) {
|
for ($i = 0; $i<$this->featureCount; ++$i) {
|
||||||
$Plf = $this->sampleProbability($sample, $i, $label);
|
$Plf = $this->sampleProbability($sample, $i, $label);
|
||||||
$p += $Plf;
|
$p += $Plf;
|
||||||
}
|
}
|
||||||
$predictions[$label] = $p;
|
$predictions[$label] = $p;
|
||||||
}
|
}
|
||||||
|
|
||||||
arsort($predictions, SORT_NUMERIC);
|
arsort($predictions, SORT_NUMERIC);
|
||||||
reset($predictions);
|
reset($predictions);
|
||||||
return key($predictions);
|
return key($predictions);
|
||||||
|
@ -7,6 +7,7 @@ namespace Phpml\Clustering;
|
|||||||
use Phpml\Clustering\KMeans\Point;
|
use Phpml\Clustering\KMeans\Point;
|
||||||
use Phpml\Clustering\KMeans\Cluster;
|
use Phpml\Clustering\KMeans\Cluster;
|
||||||
use Phpml\Clustering\KMeans\Space;
|
use Phpml\Clustering\KMeans\Space;
|
||||||
|
use Phpml\Exception\InvalidArgumentException;
|
||||||
use Phpml\Math\Distance\Euclidean;
|
use Phpml\Math\Distance\Euclidean;
|
||||||
|
|
||||||
class FuzzyCMeans implements Clusterer
|
class FuzzyCMeans implements Clusterer
|
||||||
@ -25,10 +26,12 @@ class FuzzyCMeans implements Clusterer
|
|||||||
* @var Space
|
* @var Space
|
||||||
*/
|
*/
|
||||||
private $space;
|
private $space;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @var array|float[][]
|
* @var array|float[][]
|
||||||
*/
|
*/
|
||||||
private $membership;
|
private $membership;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @var float
|
* @var float
|
||||||
*/
|
*/
|
||||||
@ -56,6 +59,9 @@ class FuzzyCMeans implements Clusterer
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* @param int $clustersNumber
|
* @param int $clustersNumber
|
||||||
|
* @param float $fuzziness
|
||||||
|
* @param float $epsilon
|
||||||
|
* @param int $maxIterations
|
||||||
*
|
*
|
||||||
* @throws InvalidArgumentException
|
* @throws InvalidArgumentException
|
||||||
*/
|
*/
|
||||||
@ -86,14 +92,15 @@ class FuzzyCMeans implements Clusterer
|
|||||||
protected function generateRandomMembership(int $rows, int $cols)
|
protected function generateRandomMembership(int $rows, int $cols)
|
||||||
{
|
{
|
||||||
$this->membership = [];
|
$this->membership = [];
|
||||||
for ($i=0; $i < $rows; $i++) {
|
for ($i = 0; $i < $rows; ++$i) {
|
||||||
$row = [];
|
$row = [];
|
||||||
$total = 0.0;
|
$total = 0.0;
|
||||||
for ($k=0; $k < $cols; $k++) {
|
for ($k = 0; $k < $cols; ++$k) {
|
||||||
$val = rand(1, 5) / 10.0;
|
$val = rand(1, 5) / 10.0;
|
||||||
$row[] = $val;
|
$row[] = $val;
|
||||||
$total += $val;
|
$total += $val;
|
||||||
}
|
}
|
||||||
|
|
||||||
$this->membership[] = array_map(function ($val) use ($total) {
|
$this->membership[] = array_map(function ($val) use ($total) {
|
||||||
return $val / $total;
|
return $val / $total;
|
||||||
}, $row);
|
}, $row);
|
||||||
@ -103,21 +110,22 @@ class FuzzyCMeans implements Clusterer
|
|||||||
protected function updateClusters()
|
protected function updateClusters()
|
||||||
{
|
{
|
||||||
$dim = $this->space->getDimension();
|
$dim = $this->space->getDimension();
|
||||||
if (! $this->clusters) {
|
if (!$this->clusters) {
|
||||||
$this->clusters = [];
|
$this->clusters = [];
|
||||||
for ($i=0; $i<$this->clustersNumber; $i++) {
|
for ($i = 0; $i < $this->clustersNumber; ++$i) {
|
||||||
$this->clusters[] = new Cluster($this->space, array_fill(0, $dim, 0.0));
|
$this->clusters[] = new Cluster($this->space, array_fill(0, $dim, 0.0));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for ($i=0; $i<$this->clustersNumber; $i++) {
|
for ($i = 0; $i < $this->clustersNumber; ++$i) {
|
||||||
$cluster = $this->clusters[$i];
|
$cluster = $this->clusters[$i];
|
||||||
$center = $cluster->getCoordinates();
|
$center = $cluster->getCoordinates();
|
||||||
for ($k=0; $k<$dim; $k++) {
|
for ($k = 0; $k < $dim; ++$k) {
|
||||||
$a = $this->getMembershipRowTotal($i, $k, true);
|
$a = $this->getMembershipRowTotal($i, $k, true);
|
||||||
$b = $this->getMembershipRowTotal($i, $k, false);
|
$b = $this->getMembershipRowTotal($i, $k, false);
|
||||||
$center[$k] = $a / $b;
|
$center[$k] = $a / $b;
|
||||||
}
|
}
|
||||||
|
|
||||||
$cluster->setCoordinates($center);
|
$cluster->setCoordinates($center);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -125,20 +133,22 @@ class FuzzyCMeans implements Clusterer
|
|||||||
protected function getMembershipRowTotal(int $row, int $col, bool $multiply)
|
protected function getMembershipRowTotal(int $row, int $col, bool $multiply)
|
||||||
{
|
{
|
||||||
$sum = 0.0;
|
$sum = 0.0;
|
||||||
for ($k = 0; $k < $this->sampleCount; $k++) {
|
for ($k = 0; $k < $this->sampleCount; ++$k) {
|
||||||
$val = pow($this->membership[$row][$k], $this->fuzziness);
|
$val = pow($this->membership[$row][$k], $this->fuzziness);
|
||||||
if ($multiply) {
|
if ($multiply) {
|
||||||
$val *= $this->samples[$k][$col];
|
$val *= $this->samples[$k][$col];
|
||||||
}
|
}
|
||||||
|
|
||||||
$sum += $val;
|
$sum += $val;
|
||||||
}
|
}
|
||||||
|
|
||||||
return $sum;
|
return $sum;
|
||||||
}
|
}
|
||||||
|
|
||||||
protected function updateMembershipMatrix()
|
protected function updateMembershipMatrix()
|
||||||
{
|
{
|
||||||
for ($i = 0; $i < $this->clustersNumber; $i++) {
|
for ($i = 0; $i < $this->clustersNumber; ++$i) {
|
||||||
for ($k = 0; $k < $this->sampleCount; $k++) {
|
for ($k = 0; $k < $this->sampleCount; ++$k) {
|
||||||
$distCalc = $this->getDistanceCalc($i, $k);
|
$distCalc = $this->getDistanceCalc($i, $k);
|
||||||
$this->membership[$i][$k] = 1.0 / $distCalc;
|
$this->membership[$i][$k] = 1.0 / $distCalc;
|
||||||
}
|
}
|
||||||
@ -157,11 +167,15 @@ class FuzzyCMeans implements Clusterer
|
|||||||
$distance = new Euclidean();
|
$distance = new Euclidean();
|
||||||
$dist1 = $distance->distance(
|
$dist1 = $distance->distance(
|
||||||
$this->clusters[$row]->getCoordinates(),
|
$this->clusters[$row]->getCoordinates(),
|
||||||
$this->samples[$col]);
|
$this->samples[$col]
|
||||||
for ($j = 0; $j < $this->clustersNumber; $j++) {
|
);
|
||||||
|
|
||||||
|
for ($j = 0; $j < $this->clustersNumber; ++$j) {
|
||||||
$dist2 = $distance->distance(
|
$dist2 = $distance->distance(
|
||||||
$this->clusters[$j]->getCoordinates(),
|
$this->clusters[$j]->getCoordinates(),
|
||||||
$this->samples[$col]);
|
$this->samples[$col]
|
||||||
|
);
|
||||||
|
|
||||||
$val = pow($dist1 / $dist2, 2.0 / ($this->fuzziness - 1));
|
$val = pow($dist1 / $dist2, 2.0 / ($this->fuzziness - 1));
|
||||||
$sum += $val;
|
$sum += $val;
|
||||||
}
|
}
|
||||||
@ -177,13 +191,14 @@ class FuzzyCMeans implements Clusterer
|
|||||||
{
|
{
|
||||||
$sum = 0.0;
|
$sum = 0.0;
|
||||||
$distance = new Euclidean();
|
$distance = new Euclidean();
|
||||||
for ($i = 0; $i < $this->clustersNumber; $i++) {
|
for ($i = 0; $i < $this->clustersNumber; ++$i) {
|
||||||
$clust = $this->clusters[$i]->getCoordinates();
|
$clust = $this->clusters[$i]->getCoordinates();
|
||||||
for ($k = 0; $k < $this->sampleCount; $k++) {
|
for ($k = 0; $k < $this->sampleCount; ++$k) {
|
||||||
$point = $this->samples[$k];
|
$point = $this->samples[$k];
|
||||||
$sum += $distance->distance($clust, $point);
|
$sum += $distance->distance($clust, $point);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return $sum;
|
return $sum;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -210,7 +225,6 @@ class FuzzyCMeans implements Clusterer
|
|||||||
// Our goal is minimizing the objective value while
|
// Our goal is minimizing the objective value while
|
||||||
// executing the clustering steps at a maximum number of iterations
|
// executing the clustering steps at a maximum number of iterations
|
||||||
$lastObjective = 0.0;
|
$lastObjective = 0.0;
|
||||||
$difference = 0.0;
|
|
||||||
$iterations = 0;
|
$iterations = 0;
|
||||||
do {
|
do {
|
||||||
// Update the membership matrix and cluster centers, respectively
|
// Update the membership matrix and cluster centers, respectively
|
||||||
@ -224,7 +238,7 @@ class FuzzyCMeans implements Clusterer
|
|||||||
} while ($difference > $this->epsilon && $iterations++ <= $this->maxIterations);
|
} while ($difference > $this->epsilon && $iterations++ <= $this->maxIterations);
|
||||||
|
|
||||||
// Attach (hard cluster) each data point to the nearest cluster
|
// Attach (hard cluster) each data point to the nearest cluster
|
||||||
for ($k=0; $k<$this->sampleCount; $k++) {
|
for ($k = 0; $k < $this->sampleCount; ++$k) {
|
||||||
$column = array_column($this->membership, $k);
|
$column = array_column($this->membership, $k);
|
||||||
arsort($column);
|
arsort($column);
|
||||||
reset($column);
|
reset($column);
|
||||||
@ -238,6 +252,7 @@ class FuzzyCMeans implements Clusterer
|
|||||||
foreach ($this->clusters as $cluster) {
|
foreach ($this->clusters as $cluster) {
|
||||||
$grouped[] = $cluster->getPoints();
|
$grouped[] = $cluster->getPoints();
|
||||||
}
|
}
|
||||||
|
|
||||||
return $grouped;
|
return $grouped;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -156,7 +156,11 @@ class Space extends SplObjectStorage
|
|||||||
case KMeans::INIT_KMEANS_PLUS_PLUS:
|
case KMeans::INIT_KMEANS_PLUS_PLUS:
|
||||||
$clusters = $this->initializeKMPPClusters($clustersNumber);
|
$clusters = $this->initializeKMPPClusters($clustersNumber);
|
||||||
break;
|
break;
|
||||||
|
|
||||||
|
default:
|
||||||
|
return [];
|
||||||
}
|
}
|
||||||
|
|
||||||
$clusters[0]->attachAll($this);
|
$clusters[0]->attachAll($this);
|
||||||
|
|
||||||
return $clusters;
|
return $clusters;
|
||||||
|
@ -17,6 +17,7 @@ class CsvDataset extends ArrayDataset
|
|||||||
* @param string $filepath
|
* @param string $filepath
|
||||||
* @param int $features
|
* @param int $features
|
||||||
* @param bool $headingRow
|
* @param bool $headingRow
|
||||||
|
* @param string $delimiter
|
||||||
*
|
*
|
||||||
* @throws FileException
|
* @throws FileException
|
||||||
*/
|
*/
|
||||||
@ -37,11 +38,15 @@ class CsvDataset extends ArrayDataset
|
|||||||
$this->columnNames = range(0, $features - 1);
|
$this->columnNames = range(0, $features - 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
$samples = $targets = [];
|
||||||
while (($data = fgetcsv($handle, 1000, $delimiter)) !== false) {
|
while (($data = fgetcsv($handle, 1000, $delimiter)) !== false) {
|
||||||
$this->samples[] = array_slice($data, 0, $features);
|
$samples[] = array_slice($data, 0, $features);
|
||||||
$this->targets[] = $data[$features];
|
$targets[] = $data[$features];
|
||||||
}
|
}
|
||||||
|
|
||||||
fclose($handle);
|
fclose($handle);
|
||||||
|
|
||||||
|
parent::__construct($samples, $targets);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -1,4 +1,6 @@
|
|||||||
<?php declare(strict_types=1);
|
<?php
|
||||||
|
|
||||||
|
declare(strict_types=1);
|
||||||
|
|
||||||
namespace Phpml\DimensionReduction;
|
namespace Phpml\DimensionReduction;
|
||||||
|
|
||||||
@ -37,7 +39,7 @@ abstract class EigenTransformerBase
|
|||||||
/**
|
/**
|
||||||
* Top eigenValues of the matrix
|
* Top eigenValues of the matrix
|
||||||
*
|
*
|
||||||
* @var type
|
* @var array
|
||||||
*/
|
*/
|
||||||
protected $eigValues = [];
|
protected $eigValues = [];
|
||||||
|
|
||||||
|
@ -54,7 +54,7 @@ class KernelPCA extends PCA
|
|||||||
public function __construct(int $kernel = self::KERNEL_RBF, $totalVariance = null, $numFeatures = null, $gamma = null)
|
public function __construct(int $kernel = self::KERNEL_RBF, $totalVariance = null, $numFeatures = null, $gamma = null)
|
||||||
{
|
{
|
||||||
$availableKernels = [self::KERNEL_RBF, self::KERNEL_SIGMOID, self::KERNEL_LAPLACIAN, self::KERNEL_LINEAR];
|
$availableKernels = [self::KERNEL_RBF, self::KERNEL_SIGMOID, self::KERNEL_LAPLACIAN, self::KERNEL_LINEAR];
|
||||||
if (! in_array($kernel, $availableKernels)) {
|
if (!in_array($kernel, $availableKernels)) {
|
||||||
throw new \Exception("KernelPCA can be initialized with the following kernels only: Linear, RBF, Sigmoid and Laplacian");
|
throw new \Exception("KernelPCA can be initialized with the following kernels only: Linear, RBF, Sigmoid and Laplacian");
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -86,7 +86,7 @@ class KernelPCA extends PCA
|
|||||||
$matrix = $this->calculateKernelMatrix($this->data, $numRows);
|
$matrix = $this->calculateKernelMatrix($this->data, $numRows);
|
||||||
$matrix = $this->centerMatrix($matrix, $numRows);
|
$matrix = $this->centerMatrix($matrix, $numRows);
|
||||||
|
|
||||||
$this->eigenDecomposition($matrix, $numRows);
|
$this->eigenDecomposition($matrix);
|
||||||
|
|
||||||
$this->fit = true;
|
$this->fit = true;
|
||||||
|
|
||||||
@ -98,7 +98,7 @@ class KernelPCA extends PCA
|
|||||||
* An n-by-m matrix is given and an n-by-n matrix is returned
|
* An n-by-m matrix is given and an n-by-n matrix is returned
|
||||||
*
|
*
|
||||||
* @param array $data
|
* @param array $data
|
||||||
* @param int $numRows
|
* @param int $numRows
|
||||||
*
|
*
|
||||||
* @return array
|
* @return array
|
||||||
*/
|
*/
|
||||||
@ -107,8 +107,8 @@ class KernelPCA extends PCA
|
|||||||
$kernelFunc = $this->getKernel();
|
$kernelFunc = $this->getKernel();
|
||||||
|
|
||||||
$matrix = [];
|
$matrix = [];
|
||||||
for ($i=0; $i < $numRows; $i++) {
|
for ($i = 0; $i < $numRows; ++$i) {
|
||||||
for ($k=0; $k < $numRows; $k++) {
|
for ($k = 0; $k < $numRows; ++$k) {
|
||||||
if ($i <= $k) {
|
if ($i <= $k) {
|
||||||
$matrix[$i][$k] = $kernelFunc($data[$i], $data[$k]);
|
$matrix[$i][$k] = $kernelFunc($data[$i], $data[$k]);
|
||||||
} else {
|
} else {
|
||||||
@ -127,7 +127,9 @@ class KernelPCA extends PCA
|
|||||||
* K′ = K − N.K − K.N + N.K.N where N is n-by-n matrix filled with 1/n
|
* K′ = K − N.K − K.N + N.K.N where N is n-by-n matrix filled with 1/n
|
||||||
*
|
*
|
||||||
* @param array $matrix
|
* @param array $matrix
|
||||||
* @param int $n
|
* @param int $n
|
||||||
|
*
|
||||||
|
* @return array
|
||||||
*/
|
*/
|
||||||
protected function centerMatrix(array $matrix, int $n)
|
protected function centerMatrix(array $matrix, int $n)
|
||||||
{
|
{
|
||||||
@ -152,6 +154,8 @@ class KernelPCA extends PCA
|
|||||||
* Returns the callable kernel function
|
* Returns the callable kernel function
|
||||||
*
|
*
|
||||||
* @return \Closure
|
* @return \Closure
|
||||||
|
*
|
||||||
|
* @throws \Exception
|
||||||
*/
|
*/
|
||||||
protected function getKernel()
|
protected function getKernel()
|
||||||
{
|
{
|
||||||
@ -181,6 +185,9 @@ class KernelPCA extends PCA
|
|||||||
return function ($x, $y) use ($dist) {
|
return function ($x, $y) use ($dist) {
|
||||||
return exp(-$this->gamma * $dist->distance($x, $y));
|
return exp(-$this->gamma * $dist->distance($x, $y));
|
||||||
};
|
};
|
||||||
|
|
||||||
|
default:
|
||||||
|
throw new \Exception(sprintf('KernelPCA initialized with invalid kernel: %d', $this->kernel));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -228,6 +235,8 @@ class KernelPCA extends PCA
|
|||||||
* @param array $sample
|
* @param array $sample
|
||||||
*
|
*
|
||||||
* @return array
|
* @return array
|
||||||
|
*
|
||||||
|
* @throws \Exception
|
||||||
*/
|
*/
|
||||||
public function transform(array $sample)
|
public function transform(array $sample)
|
||||||
{
|
{
|
||||||
|
@ -4,7 +4,6 @@ declare(strict_types=1);
|
|||||||
|
|
||||||
namespace Phpml\DimensionReduction;
|
namespace Phpml\DimensionReduction;
|
||||||
|
|
||||||
use Phpml\Math\Statistic\Mean;
|
|
||||||
use Phpml\Math\Matrix;
|
use Phpml\Math\Matrix;
|
||||||
|
|
||||||
class LDA extends EigenTransformerBase
|
class LDA extends EigenTransformerBase
|
||||||
@ -30,7 +29,7 @@ class LDA extends EigenTransformerBase
|
|||||||
public $counts;
|
public $counts;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @var float
|
* @var float[]
|
||||||
*/
|
*/
|
||||||
public $overallMean;
|
public $overallMean;
|
||||||
|
|
||||||
@ -111,12 +110,12 @@ class LDA extends EigenTransformerBase
|
|||||||
* Calculates mean of each column for each class and returns
|
* Calculates mean of each column for each class and returns
|
||||||
* n by m matrix where n is number of labels and m is number of columns
|
* n by m matrix where n is number of labels and m is number of columns
|
||||||
*
|
*
|
||||||
* @param type $data
|
* @param array $data
|
||||||
* @param type $classes
|
* @param array $classes
|
||||||
*
|
*
|
||||||
* @return array
|
* @return array
|
||||||
*/
|
*/
|
||||||
protected function calculateMeans($data, $classes) : array
|
protected function calculateMeans(array $data, array $classes) : array
|
||||||
{
|
{
|
||||||
$means = [];
|
$means = [];
|
||||||
$counts= [];
|
$counts= [];
|
||||||
@ -126,17 +125,18 @@ class LDA extends EigenTransformerBase
|
|||||||
$label = array_search($classes[$index], $this->labels);
|
$label = array_search($classes[$index], $this->labels);
|
||||||
|
|
||||||
foreach ($row as $col => $val) {
|
foreach ($row as $col => $val) {
|
||||||
if (! isset($means[$label][$col])) {
|
if (!isset($means[$label][$col])) {
|
||||||
$means[$label][$col] = 0.0;
|
$means[$label][$col] = 0.0;
|
||||||
}
|
}
|
||||||
$means[$label][$col] += $val;
|
$means[$label][$col] += $val;
|
||||||
$overallMean[$col] += $val;
|
$overallMean[$col] += $val;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (! isset($counts[$label])) {
|
if (!isset($counts[$label])) {
|
||||||
$counts[$label] = 0;
|
$counts[$label] = 0;
|
||||||
}
|
}
|
||||||
$counts[$label]++;
|
|
||||||
|
++$counts[$label];
|
||||||
}
|
}
|
||||||
|
|
||||||
foreach ($means as $index => $row) {
|
foreach ($means as $index => $row) {
|
||||||
@ -231,6 +231,8 @@ class LDA extends EigenTransformerBase
|
|||||||
* @param array $sample
|
* @param array $sample
|
||||||
*
|
*
|
||||||
* @return array
|
* @return array
|
||||||
|
*
|
||||||
|
* @throws \Exception
|
||||||
*/
|
*/
|
||||||
public function transform(array $sample)
|
public function transform(array $sample)
|
||||||
{
|
{
|
||||||
@ -238,7 +240,7 @@ class LDA extends EigenTransformerBase
|
|||||||
throw new \Exception("LDA has not been fitted with respect to original dataset, please run LDA::fit() first");
|
throw new \Exception("LDA has not been fitted with respect to original dataset, please run LDA::fit() first");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (! is_array($sample[0])) {
|
if (!is_array($sample[0])) {
|
||||||
$sample = [$sample];
|
$sample = [$sample];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -6,7 +6,6 @@ namespace Phpml\DimensionReduction;
|
|||||||
|
|
||||||
use Phpml\Math\Statistic\Covariance;
|
use Phpml\Math\Statistic\Covariance;
|
||||||
use Phpml\Math\Statistic\Mean;
|
use Phpml\Math\Statistic\Mean;
|
||||||
use Phpml\Math\Matrix;
|
|
||||||
|
|
||||||
class PCA extends EigenTransformerBase
|
class PCA extends EigenTransformerBase
|
||||||
{
|
{
|
||||||
@ -86,7 +85,7 @@ class PCA extends EigenTransformerBase
|
|||||||
{
|
{
|
||||||
// Calculate means for each dimension
|
// Calculate means for each dimension
|
||||||
$this->means = [];
|
$this->means = [];
|
||||||
for ($i=0; $i < $n; $i++) {
|
for ($i = 0; $i < $n; ++$i) {
|
||||||
$column = array_column($data, $i);
|
$column = array_column($data, $i);
|
||||||
$this->means[] = Mean::arithmetic($column);
|
$this->means[] = Mean::arithmetic($column);
|
||||||
}
|
}
|
||||||
@ -97,7 +96,7 @@ class PCA extends EigenTransformerBase
|
|||||||
* each dimension therefore dimensions will be centered to zero
|
* each dimension therefore dimensions will be centered to zero
|
||||||
*
|
*
|
||||||
* @param array $data
|
* @param array $data
|
||||||
* @param int $n
|
* @param int $n
|
||||||
*
|
*
|
||||||
* @return array
|
* @return array
|
||||||
*/
|
*/
|
||||||
@ -109,7 +108,7 @@ class PCA extends EigenTransformerBase
|
|||||||
|
|
||||||
// Normalize data
|
// Normalize data
|
||||||
foreach ($data as $i => $row) {
|
foreach ($data as $i => $row) {
|
||||||
for ($k=0; $k < $n; $k++) {
|
for ($k = 0; $k < $n; ++$k) {
|
||||||
$data[$i][$k] -= $this->means[$k];
|
$data[$i][$k] -= $this->means[$k];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -124,6 +123,8 @@ class PCA extends EigenTransformerBase
|
|||||||
* @param array $sample
|
* @param array $sample
|
||||||
*
|
*
|
||||||
* @return array
|
* @return array
|
||||||
|
*
|
||||||
|
* @throws \Exception
|
||||||
*/
|
*/
|
||||||
public function transform(array $sample)
|
public function transform(array $sample)
|
||||||
{
|
{
|
||||||
@ -131,7 +132,7 @@ class PCA extends EigenTransformerBase
|
|||||||
throw new \Exception("PCA has not been fitted with respect to original dataset, please run PCA::fit() first");
|
throw new \Exception("PCA has not been fitted with respect to original dataset, please run PCA::fit() first");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (! is_array($sample[0])) {
|
if (!is_array($sample[0])) {
|
||||||
$sample = [$sample];
|
$sample = [$sample];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -6,7 +6,6 @@ namespace Phpml\Exception;
|
|||||||
|
|
||||||
class DatasetException extends \Exception
|
class DatasetException extends \Exception
|
||||||
{
|
{
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param string $path
|
* @param string $path
|
||||||
*
|
*
|
||||||
|
@ -6,7 +6,6 @@ namespace Phpml\Exception;
|
|||||||
|
|
||||||
class FileException extends \Exception
|
class FileException extends \Exception
|
||||||
{
|
{
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param string $filepath
|
* @param string $filepath
|
||||||
*
|
*
|
||||||
|
@ -11,7 +11,7 @@ class InvalidArgumentException extends \Exception
|
|||||||
*/
|
*/
|
||||||
public static function arraySizeNotMatch()
|
public static function arraySizeNotMatch()
|
||||||
{
|
{
|
||||||
return new self('Size of given arrays not match');
|
return new self('Size of given arrays does not match');
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -55,7 +55,7 @@ class InvalidArgumentException extends \Exception
|
|||||||
*/
|
*/
|
||||||
public static function inconsistentMatrixSupplied()
|
public static function inconsistentMatrixSupplied()
|
||||||
{
|
{
|
||||||
return new self('Inconsistent matrix applied');
|
return new self('Inconsistent matrix supplied');
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -6,7 +6,6 @@ namespace Phpml\Exception;
|
|||||||
|
|
||||||
class SerializeException extends \Exception
|
class SerializeException extends \Exception
|
||||||
{
|
{
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param string $filepath
|
* @param string $filepath
|
||||||
*
|
*
|
||||||
|
@ -6,7 +6,6 @@ namespace Phpml\Helper;
|
|||||||
|
|
||||||
trait OneVsRest
|
trait OneVsRest
|
||||||
{
|
{
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @var array
|
* @var array
|
||||||
*/
|
*/
|
||||||
@ -35,18 +34,18 @@ trait OneVsRest
|
|||||||
// Clears previous stuff.
|
// Clears previous stuff.
|
||||||
$this->reset();
|
$this->reset();
|
||||||
|
|
||||||
return $this->trainBylabel($samples, $targets);
|
$this->trainBylabel($samples, $targets);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param array $samples
|
* @param array $samples
|
||||||
* @param array $targets
|
* @param array $targets
|
||||||
* @param array $allLabels All training set labels
|
* @param array $allLabels All training set labels
|
||||||
|
*
|
||||||
* @return void
|
* @return void
|
||||||
*/
|
*/
|
||||||
protected function trainByLabel(array $samples, array $targets, array $allLabels = [])
|
protected function trainByLabel(array $samples, array $targets, array $allLabels = [])
|
||||||
{
|
{
|
||||||
|
|
||||||
// Overwrites the current value if it exist. $allLabels must be provided for each partialTrain run.
|
// Overwrites the current value if it exist. $allLabels must be provided for each partialTrain run.
|
||||||
if (!empty($allLabels)) {
|
if (!empty($allLabels)) {
|
||||||
$this->allLabels = $allLabels;
|
$this->allLabels = $allLabels;
|
||||||
@ -57,7 +56,6 @@ trait OneVsRest
|
|||||||
|
|
||||||
// If there are only two targets, then there is no need to perform OvR
|
// If there are only two targets, then there is no need to perform OvR
|
||||||
if (count($this->allLabels) == 2) {
|
if (count($this->allLabels) == 2) {
|
||||||
|
|
||||||
// Init classifier if required.
|
// Init classifier if required.
|
||||||
if (empty($this->classifiers)) {
|
if (empty($this->classifiers)) {
|
||||||
$this->classifiers[0] = $this->getClassifierCopy();
|
$this->classifiers[0] = $this->getClassifierCopy();
|
||||||
@ -68,7 +66,6 @@ trait OneVsRest
|
|||||||
// Train a separate classifier for each label and memorize them
|
// Train a separate classifier for each label and memorize them
|
||||||
|
|
||||||
foreach ($this->allLabels as $label) {
|
foreach ($this->allLabels as $label) {
|
||||||
|
|
||||||
// Init classifier if required.
|
// Init classifier if required.
|
||||||
if (empty($this->classifiers[$label])) {
|
if (empty($this->classifiers[$label])) {
|
||||||
$this->classifiers[$label] = $this->getClassifierCopy();
|
$this->classifiers[$label] = $this->getClassifierCopy();
|
||||||
@ -107,7 +104,6 @@ trait OneVsRest
|
|||||||
*/
|
*/
|
||||||
protected function getClassifierCopy()
|
protected function getClassifierCopy()
|
||||||
{
|
{
|
||||||
|
|
||||||
// Clone the current classifier, so that
|
// Clone the current classifier, so that
|
||||||
// we don't mess up its variables while training
|
// we don't mess up its variables while training
|
||||||
// multiple instances of this classifier
|
// multiple instances of this classifier
|
||||||
@ -180,7 +176,8 @@ trait OneVsRest
|
|||||||
* Each classifier that make use of OvR approach should be able to
|
* Each classifier that make use of OvR approach should be able to
|
||||||
* return a probability for a sample to belong to the given label.
|
* return a probability for a sample to belong to the given label.
|
||||||
*
|
*
|
||||||
* @param array $sample
|
* @param array $sample
|
||||||
|
* @param string $label
|
||||||
*
|
*
|
||||||
* @return mixed
|
* @return mixed
|
||||||
*/
|
*/
|
||||||
|
@ -18,8 +18,8 @@ namespace Phpml\Helper\Optimizer;
|
|||||||
class ConjugateGradient extends GD
|
class ConjugateGradient extends GD
|
||||||
{
|
{
|
||||||
/**
|
/**
|
||||||
* @param array $samples
|
* @param array $samples
|
||||||
* @param array $targets
|
* @param array $targets
|
||||||
* @param \Closure $gradientCb
|
* @param \Closure $gradientCb
|
||||||
*
|
*
|
||||||
* @return array
|
* @return array
|
||||||
@ -34,7 +34,7 @@ class ConjugateGradient extends GD
|
|||||||
|
|
||||||
$d = mp::muls($this->gradient($this->theta), -1);
|
$d = mp::muls($this->gradient($this->theta), -1);
|
||||||
|
|
||||||
for ($i=0; $i < $this->maxIterations; $i++) {
|
for ($i = 0; $i < $this->maxIterations; ++$i) {
|
||||||
// Obtain α that minimizes f(θ + α.d)
|
// Obtain α that minimizes f(θ + α.d)
|
||||||
$alpha = $this->getAlpha(array_sum($d));
|
$alpha = $this->getAlpha(array_sum($d));
|
||||||
|
|
||||||
@ -68,11 +68,11 @@ class ConjugateGradient extends GD
|
|||||||
*
|
*
|
||||||
* @param array $theta
|
* @param array $theta
|
||||||
*
|
*
|
||||||
* @return float
|
* @return array
|
||||||
*/
|
*/
|
||||||
protected function gradient(array $theta)
|
protected function gradient(array $theta)
|
||||||
{
|
{
|
||||||
list($_, $gradient, $_) = parent::gradient($theta);
|
list(, $gradient) = parent::gradient($theta);
|
||||||
|
|
||||||
return $gradient;
|
return $gradient;
|
||||||
}
|
}
|
||||||
@ -86,7 +86,7 @@ class ConjugateGradient extends GD
|
|||||||
*/
|
*/
|
||||||
protected function cost(array $theta)
|
protected function cost(array $theta)
|
||||||
{
|
{
|
||||||
list($cost, $_, $_) = parent::gradient($theta);
|
list($cost) = parent::gradient($theta);
|
||||||
|
|
||||||
return array_sum($cost) / $this->sampleCount;
|
return array_sum($cost) / $this->sampleCount;
|
||||||
}
|
}
|
||||||
@ -107,7 +107,7 @@ class ConjugateGradient extends GD
|
|||||||
*
|
*
|
||||||
* @param float $d
|
* @param float $d
|
||||||
*
|
*
|
||||||
* @return array
|
* @return float
|
||||||
*/
|
*/
|
||||||
protected function getAlpha(float $d)
|
protected function getAlpha(float $d)
|
||||||
{
|
{
|
||||||
@ -157,14 +157,14 @@ class ConjugateGradient extends GD
|
|||||||
* @param float $alpha
|
* @param float $alpha
|
||||||
* @param array $d
|
* @param array $d
|
||||||
*
|
*
|
||||||
* return array
|
* @return array
|
||||||
*/
|
*/
|
||||||
protected function getNewTheta(float $alpha, array $d)
|
protected function getNewTheta(float $alpha, array $d)
|
||||||
{
|
{
|
||||||
$theta = $this->theta;
|
$theta = $this->theta;
|
||||||
|
|
||||||
for ($i=0; $i < $this->dimensions + 1; $i++) {
|
for ($i = 0; $i < $this->dimensions + 1; ++$i) {
|
||||||
if ($i == 0) {
|
if ($i === 0) {
|
||||||
$theta[$i] += $alpha * array_sum($d);
|
$theta[$i] += $alpha * array_sum($d);
|
||||||
} else {
|
} else {
|
||||||
$sum = 0.0;
|
$sum = 0.0;
|
||||||
@ -266,10 +266,11 @@ class mp
|
|||||||
*
|
*
|
||||||
* @param array $m1
|
* @param array $m1
|
||||||
* @param array $m2
|
* @param array $m2
|
||||||
|
* @param int $mag
|
||||||
*
|
*
|
||||||
* @return array
|
* @return array
|
||||||
*/
|
*/
|
||||||
public static function add(array $m1, array $m2, $mag = 1)
|
public static function add(array $m1, array $m2, int $mag = 1)
|
||||||
{
|
{
|
||||||
$res = [];
|
$res = [];
|
||||||
foreach ($m1 as $i => $val) {
|
foreach ($m1 as $i => $val) {
|
||||||
@ -333,10 +334,11 @@ class mp
|
|||||||
*
|
*
|
||||||
* @param array $m1
|
* @param array $m1
|
||||||
* @param float $m2
|
* @param float $m2
|
||||||
|
* @param int $mag
|
||||||
*
|
*
|
||||||
* @return array
|
* @return array
|
||||||
*/
|
*/
|
||||||
public static function adds(array $m1, float $m2, $mag = 1)
|
public static function adds(array $m1, float $m2, int $mag = 1)
|
||||||
{
|
{
|
||||||
$res = [];
|
$res = [];
|
||||||
foreach ($m1 as $val) {
|
foreach ($m1 as $val) {
|
||||||
@ -350,7 +352,7 @@ class mp
|
|||||||
* Element-wise <b>subtraction</b> of a vector with a scalar
|
* Element-wise <b>subtraction</b> of a vector with a scalar
|
||||||
*
|
*
|
||||||
* @param array $m1
|
* @param array $m1
|
||||||
* @param float $m2
|
* @param array $m2
|
||||||
*
|
*
|
||||||
* @return array
|
* @return array
|
||||||
*/
|
*/
|
||||||
|
@ -18,8 +18,8 @@ class GD extends StochasticGD
|
|||||||
protected $sampleCount = null;
|
protected $sampleCount = null;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param array $samples
|
* @param array $samples
|
||||||
* @param array $targets
|
* @param array $targets
|
||||||
* @param \Closure $gradientCb
|
* @param \Closure $gradientCb
|
||||||
*
|
*
|
||||||
* @return array
|
* @return array
|
||||||
@ -75,7 +75,7 @@ class GD extends StochasticGD
|
|||||||
list($cost, $grad, $penalty) = array_pad($result, 3, 0);
|
list($cost, $grad, $penalty) = array_pad($result, 3, 0);
|
||||||
|
|
||||||
$costs[] = $cost;
|
$costs[] = $cost;
|
||||||
$gradient[]= $grad;
|
$gradient[] = $grad;
|
||||||
$totalPenalty += $penalty;
|
$totalPenalty += $penalty;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -91,8 +91,8 @@ class GD extends StochasticGD
|
|||||||
protected function updateWeightsWithUpdates(array $updates, float $penalty)
|
protected function updateWeightsWithUpdates(array $updates, float $penalty)
|
||||||
{
|
{
|
||||||
// Updates all weights at once
|
// Updates all weights at once
|
||||||
for ($i=0; $i <= $this->dimensions; $i++) {
|
for ($i = 0; $i <= $this->dimensions; ++$i) {
|
||||||
if ($i == 0) {
|
if ($i === 0) {
|
||||||
$this->theta[0] -= $this->learningRate * array_sum($updates);
|
$this->theta[0] -= $this->learningRate * array_sum($updates);
|
||||||
} else {
|
} else {
|
||||||
$col = array_column($this->samples, $i - 1);
|
$col = array_column($this->samples, $i - 1);
|
||||||
|
@ -31,7 +31,7 @@ abstract class Optimizer
|
|||||||
|
|
||||||
// Inits the weights randomly
|
// Inits the weights randomly
|
||||||
$this->theta = [];
|
$this->theta = [];
|
||||||
for ($i=0; $i < $this->dimensions; $i++) {
|
for ($i = 0; $i < $this->dimensions; ++$i) {
|
||||||
$this->theta[] = rand() / (float) getrandmax();
|
$this->theta[] = rand() / (float) getrandmax();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -40,6 +40,10 @@ abstract class Optimizer
|
|||||||
* Sets the weights manually
|
* Sets the weights manually
|
||||||
*
|
*
|
||||||
* @param array $theta
|
* @param array $theta
|
||||||
|
*
|
||||||
|
* @return $this
|
||||||
|
*
|
||||||
|
* @throws \Exception
|
||||||
*/
|
*/
|
||||||
public function setInitialTheta(array $theta)
|
public function setInitialTheta(array $theta)
|
||||||
{
|
{
|
||||||
@ -56,6 +60,9 @@ abstract class Optimizer
|
|||||||
* Executes the optimization with the given samples & targets
|
* Executes the optimization with the given samples & targets
|
||||||
* and returns the weights
|
* and returns the weights
|
||||||
*
|
*
|
||||||
|
* @param array $samples
|
||||||
|
* @param array $targets
|
||||||
|
* @param \Closure $gradientCb
|
||||||
*/
|
*/
|
||||||
abstract protected function runOptimization(array $samples, array $targets, \Closure $gradientCb);
|
abstract protected function runOptimization(array $samples, array $targets, \Closure $gradientCb);
|
||||||
}
|
}
|
||||||
|
@ -166,7 +166,6 @@ class StochasticGD extends Optimizer
|
|||||||
$currIter = 0;
|
$currIter = 0;
|
||||||
$bestTheta = null;
|
$bestTheta = null;
|
||||||
$bestScore = 0.0;
|
$bestScore = 0.0;
|
||||||
$bestWeightIter = 0;
|
|
||||||
$this->costValues = [];
|
$this->costValues = [];
|
||||||
|
|
||||||
while ($this->maxIterations > $currIter++) {
|
while ($this->maxIterations > $currIter++) {
|
||||||
@ -180,7 +179,6 @@ class StochasticGD extends Optimizer
|
|||||||
if ($bestTheta == null || $cost <= $bestScore) {
|
if ($bestTheta == null || $cost <= $bestScore) {
|
||||||
$bestTheta = $theta;
|
$bestTheta = $theta;
|
||||||
$bestScore = $cost;
|
$bestScore = $cost;
|
||||||
$bestWeightIter = $currIter;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add the cost value for this iteration to the list
|
// Add the cost value for this iteration to the list
|
||||||
@ -218,7 +216,7 @@ class StochasticGD extends Optimizer
|
|||||||
$this->theta[0] -= $this->learningRate * $gradient;
|
$this->theta[0] -= $this->learningRate * $gradient;
|
||||||
|
|
||||||
// Update other values
|
// Update other values
|
||||||
for ($i=1; $i <= $this->dimensions; $i++) {
|
for ($i = 1; $i <= $this->dimensions; ++$i) {
|
||||||
$this->theta[$i] -= $this->learningRate *
|
$this->theta[$i] -= $this->learningRate *
|
||||||
($gradient * $sample[$i - 1] + $penalty * $this->theta[$i]);
|
($gradient * $sample[$i - 1] + $penalty * $this->theta[$i]);
|
||||||
}
|
}
|
||||||
|
@ -14,12 +14,12 @@ trait Predictable
|
|||||||
public function predict(array $samples)
|
public function predict(array $samples)
|
||||||
{
|
{
|
||||||
if (!is_array($samples[0])) {
|
if (!is_array($samples[0])) {
|
||||||
$predicted = $this->predictSample($samples);
|
return $this->predictSample($samples);
|
||||||
} else {
|
}
|
||||||
$predicted = [];
|
|
||||||
foreach ($samples as $index => $sample) {
|
$predicted = [];
|
||||||
$predicted[$index] = $this->predictSample($sample);
|
foreach ($samples as $index => $sample) {
|
||||||
}
|
$predicted[$index] = $this->predictSample($sample);
|
||||||
}
|
}
|
||||||
|
|
||||||
return $predicted;
|
return $predicted;
|
||||||
|
@ -6,7 +6,6 @@ namespace Phpml;
|
|||||||
|
|
||||||
interface IncrementalEstimator
|
interface IncrementalEstimator
|
||||||
{
|
{
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param array $samples
|
* @param array $samples
|
||||||
* @param array $targets
|
* @param array $targets
|
||||||
|
@ -23,8 +23,8 @@ class RBF implements Kernel
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param float $a
|
* @param array $a
|
||||||
* @param float $b
|
* @param array $b
|
||||||
*
|
*
|
||||||
* @return float
|
* @return float
|
||||||
*/
|
*/
|
||||||
|
@ -33,7 +33,6 @@ use Phpml\Math\Matrix;
|
|||||||
|
|
||||||
class EigenvalueDecomposition
|
class EigenvalueDecomposition
|
||||||
{
|
{
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Row and column dimension (square matrix).
|
* Row and column dimension (square matrix).
|
||||||
* @var int
|
* @var int
|
||||||
@ -42,9 +41,9 @@ class EigenvalueDecomposition
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Internal symmetry flag.
|
* Internal symmetry flag.
|
||||||
* @var int
|
* @var bool
|
||||||
*/
|
*/
|
||||||
private $issymmetric;
|
private $symmetric;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Arrays for internal storage of eigenvalues.
|
* Arrays for internal storage of eigenvalues.
|
||||||
@ -78,6 +77,38 @@ class EigenvalueDecomposition
|
|||||||
private $cdivr;
|
private $cdivr;
|
||||||
private $cdivi;
|
private $cdivi;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Constructor: Check for symmetry, then construct the eigenvalue decomposition
|
||||||
|
*
|
||||||
|
* @param array $Arg
|
||||||
|
*/
|
||||||
|
public function __construct(array $Arg)
|
||||||
|
{
|
||||||
|
$this->A = $Arg;
|
||||||
|
$this->n = count($Arg[0]);
|
||||||
|
$this->symmetric = true;
|
||||||
|
|
||||||
|
for ($j = 0; ($j < $this->n) && $this->symmetric; ++$j) {
|
||||||
|
for ($i = 0; ($i < $this->n) & $this->symmetric; ++$i) {
|
||||||
|
$this->symmetric = ($this->A[$i][$j] == $this->A[$j][$i]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if ($this->symmetric) {
|
||||||
|
$this->V = $this->A;
|
||||||
|
// Tridiagonalize.
|
||||||
|
$this->tred2();
|
||||||
|
// Diagonalize.
|
||||||
|
$this->tql2();
|
||||||
|
} else {
|
||||||
|
$this->H = $this->A;
|
||||||
|
$this->ort = [];
|
||||||
|
// Reduce to Hessenberg form.
|
||||||
|
$this->orthes();
|
||||||
|
// Reduce Hessenberg to real Schur form.
|
||||||
|
$this->hqr2();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Symmetric Householder reduction to tridiagonal form.
|
* Symmetric Householder reduction to tridiagonal form.
|
||||||
@ -88,10 +119,10 @@ class EigenvalueDecomposition
|
|||||||
// Bowdler, Martin, Reinsch, and Wilkinson, Handbook for
|
// Bowdler, Martin, Reinsch, and Wilkinson, Handbook for
|
||||||
// Auto. Comp., Vol.ii-Linear Algebra, and the corresponding
|
// Auto. Comp., Vol.ii-Linear Algebra, and the corresponding
|
||||||
// Fortran subroutine in EISPACK.
|
// Fortran subroutine in EISPACK.
|
||||||
$this->d = $this->V[$this->n-1];
|
$this->d = $this->V[$this->n - 1];
|
||||||
// Householder reduction to tridiagonal form.
|
// Householder reduction to tridiagonal form.
|
||||||
for ($i = $this->n-1; $i > 0; --$i) {
|
for ($i = $this->n - 1; $i > 0; --$i) {
|
||||||
$i_ = $i -1;
|
$i_ = $i - 1;
|
||||||
// Scale to avoid under/overflow.
|
// Scale to avoid under/overflow.
|
||||||
$h = $scale = 0.0;
|
$h = $scale = 0.0;
|
||||||
$scale += array_sum(array_map('abs', $this->d));
|
$scale += array_sum(array_map('abs', $this->d));
|
||||||
@ -107,14 +138,17 @@ class EigenvalueDecomposition
|
|||||||
$this->d[$k] /= $scale;
|
$this->d[$k] /= $scale;
|
||||||
$h += pow($this->d[$k], 2);
|
$h += pow($this->d[$k], 2);
|
||||||
}
|
}
|
||||||
|
|
||||||
$f = $this->d[$i_];
|
$f = $this->d[$i_];
|
||||||
$g = sqrt($h);
|
$g = sqrt($h);
|
||||||
if ($f > 0) {
|
if ($f > 0) {
|
||||||
$g = -$g;
|
$g = -$g;
|
||||||
}
|
}
|
||||||
|
|
||||||
$this->e[$i] = $scale * $g;
|
$this->e[$i] = $scale * $g;
|
||||||
$h = $h - $f * $g;
|
$h = $h - $f * $g;
|
||||||
$this->d[$i_] = $f - $g;
|
$this->d[$i_] = $f - $g;
|
||||||
|
|
||||||
for ($j = 0; $j < $i; ++$j) {
|
for ($j = 0; $j < $i; ++$j) {
|
||||||
$this->e[$j] = 0.0;
|
$this->e[$j] = 0.0;
|
||||||
}
|
}
|
||||||
@ -123,22 +157,26 @@ class EigenvalueDecomposition
|
|||||||
$f = $this->d[$j];
|
$f = $this->d[$j];
|
||||||
$this->V[$j][$i] = $f;
|
$this->V[$j][$i] = $f;
|
||||||
$g = $this->e[$j] + $this->V[$j][$j] * $f;
|
$g = $this->e[$j] + $this->V[$j][$j] * $f;
|
||||||
for ($k = $j+1; $k <= $i_; ++$k) {
|
|
||||||
|
for ($k = $j + 1; $k <= $i_; ++$k) {
|
||||||
$g += $this->V[$k][$j] * $this->d[$k];
|
$g += $this->V[$k][$j] * $this->d[$k];
|
||||||
$this->e[$k] += $this->V[$k][$j] * $f;
|
$this->e[$k] += $this->V[$k][$j] * $f;
|
||||||
}
|
}
|
||||||
$this->e[$j] = $g;
|
$this->e[$j] = $g;
|
||||||
}
|
}
|
||||||
|
|
||||||
$f = 0.0;
|
$f = 0.0;
|
||||||
if ($h === 0 || $h < 1e-32) {
|
if ($h === 0 || $h < 1e-32) {
|
||||||
$h = 1e-32;
|
$h = 1e-32;
|
||||||
}
|
}
|
||||||
|
|
||||||
for ($j = 0; $j < $i; ++$j) {
|
for ($j = 0; $j < $i; ++$j) {
|
||||||
$this->e[$j] /= $h;
|
$this->e[$j] /= $h;
|
||||||
$f += $this->e[$j] * $this->d[$j];
|
$f += $this->e[$j] * $this->d[$j];
|
||||||
}
|
}
|
||||||
|
|
||||||
$hh = $f / (2 * $h);
|
$hh = $f / (2 * $h);
|
||||||
for ($j=0; $j < $i; ++$j) {
|
for ($j = 0; $j < $i; ++$j) {
|
||||||
$this->e[$j] -= $hh * $this->d[$j];
|
$this->e[$j] -= $hh * $this->d[$j];
|
||||||
}
|
}
|
||||||
for ($j = 0; $j < $i; ++$j) {
|
for ($j = 0; $j < $i; ++$j) {
|
||||||
@ -147,7 +185,7 @@ class EigenvalueDecomposition
|
|||||||
for ($k = $j; $k <= $i_; ++$k) {
|
for ($k = $j; $k <= $i_; ++$k) {
|
||||||
$this->V[$k][$j] -= ($f * $this->e[$k] + $g * $this->d[$k]);
|
$this->V[$k][$j] -= ($f * $this->e[$k] + $g * $this->d[$k]);
|
||||||
}
|
}
|
||||||
$this->d[$j] = $this->V[$i-1][$j];
|
$this->d[$j] = $this->V[$i - 1][$j];
|
||||||
$this->V[$i][$j] = 0.0;
|
$this->V[$i][$j] = 0.0;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -155,18 +193,18 @@ class EigenvalueDecomposition
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Accumulate transformations.
|
// Accumulate transformations.
|
||||||
for ($i = 0; $i < $this->n-1; ++$i) {
|
for ($i = 0; $i < $this->n - 1; ++$i) {
|
||||||
$this->V[$this->n-1][$i] = $this->V[$i][$i];
|
$this->V[$this->n - 1][$i] = $this->V[$i][$i];
|
||||||
$this->V[$i][$i] = 1.0;
|
$this->V[$i][$i] = 1.0;
|
||||||
$h = $this->d[$i+1];
|
$h = $this->d[$i + 1];
|
||||||
if ($h != 0.0) {
|
if ($h != 0.0) {
|
||||||
for ($k = 0; $k <= $i; ++$k) {
|
for ($k = 0; $k <= $i; ++$k) {
|
||||||
$this->d[$k] = $this->V[$k][$i+1] / $h;
|
$this->d[$k] = $this->V[$k][$i + 1] / $h;
|
||||||
}
|
}
|
||||||
for ($j = 0; $j <= $i; ++$j) {
|
for ($j = 0; $j <= $i; ++$j) {
|
||||||
$g = 0.0;
|
$g = 0.0;
|
||||||
for ($k = 0; $k <= $i; ++$k) {
|
for ($k = 0; $k <= $i; ++$k) {
|
||||||
$g += $this->V[$k][$i+1] * $this->V[$k][$j];
|
$g += $this->V[$k][$i + 1] * $this->V[$k][$j];
|
||||||
}
|
}
|
||||||
for ($k = 0; $k <= $i; ++$k) {
|
for ($k = 0; $k <= $i; ++$k) {
|
||||||
$this->V[$k][$j] -= $g * $this->d[$k];
|
$this->V[$k][$j] -= $g * $this->d[$k];
|
||||||
@ -174,13 +212,13 @@ class EigenvalueDecomposition
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
for ($k = 0; $k <= $i; ++$k) {
|
for ($k = 0; $k <= $i; ++$k) {
|
||||||
$this->V[$k][$i+1] = 0.0;
|
$this->V[$k][$i + 1] = 0.0;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
$this->d = $this->V[$this->n-1];
|
$this->d = $this->V[$this->n - 1];
|
||||||
$this->V[$this->n-1] = array_fill(0, $j, 0.0);
|
$this->V[$this->n - 1] = array_fill(0, $j, 0.0);
|
||||||
$this->V[$this->n-1][$this->n-1] = 1.0;
|
$this->V[$this->n - 1][$this->n - 1] = 1.0;
|
||||||
$this->e[0] = 0.0;
|
$this->e[0] = 0.0;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -196,9 +234,9 @@ class EigenvalueDecomposition
|
|||||||
private function tql2()
|
private function tql2()
|
||||||
{
|
{
|
||||||
for ($i = 1; $i < $this->n; ++$i) {
|
for ($i = 1; $i < $this->n; ++$i) {
|
||||||
$this->e[$i-1] = $this->e[$i];
|
$this->e[$i - 1] = $this->e[$i];
|
||||||
}
|
}
|
||||||
$this->e[$this->n-1] = 0.0;
|
$this->e[$this->n - 1] = 0.0;
|
||||||
$f = 0.0;
|
$f = 0.0;
|
||||||
$tst1 = 0.0;
|
$tst1 = 0.0;
|
||||||
$eps = pow(2.0, -52.0);
|
$eps = pow(2.0, -52.0);
|
||||||
@ -222,14 +260,14 @@ class EigenvalueDecomposition
|
|||||||
$iter += 1;
|
$iter += 1;
|
||||||
// Compute implicit shift
|
// Compute implicit shift
|
||||||
$g = $this->d[$l];
|
$g = $this->d[$l];
|
||||||
$p = ($this->d[$l+1] - $g) / (2.0 * $this->e[$l]);
|
$p = ($this->d[$l + 1] - $g) / (2.0 * $this->e[$l]);
|
||||||
$r = hypot($p, 1.0);
|
$r = hypot($p, 1.0);
|
||||||
if ($p < 0) {
|
if ($p < 0) {
|
||||||
$r *= -1;
|
$r *= -1;
|
||||||
}
|
}
|
||||||
$this->d[$l] = $this->e[$l] / ($p + $r);
|
$this->d[$l] = $this->e[$l] / ($p + $r);
|
||||||
$this->d[$l+1] = $this->e[$l] * ($p + $r);
|
$this->d[$l + 1] = $this->e[$l] * ($p + $r);
|
||||||
$dl1 = $this->d[$l+1];
|
$dl1 = $this->d[$l + 1];
|
||||||
$h = $g - $this->d[$l];
|
$h = $g - $this->d[$l];
|
||||||
for ($i = $l + 2; $i < $this->n; ++$i) {
|
for ($i = $l + 2; $i < $this->n; ++$i) {
|
||||||
$this->d[$i] -= $h;
|
$this->d[$i] -= $h;
|
||||||
@ -241,23 +279,23 @@ class EigenvalueDecomposition
|
|||||||
$c2 = $c3 = $c;
|
$c2 = $c3 = $c;
|
||||||
$el1 = $this->e[$l + 1];
|
$el1 = $this->e[$l + 1];
|
||||||
$s = $s2 = 0.0;
|
$s = $s2 = 0.0;
|
||||||
for ($i = $m-1; $i >= $l; --$i) {
|
for ($i = $m - 1; $i >= $l; --$i) {
|
||||||
$c3 = $c2;
|
$c3 = $c2;
|
||||||
$c2 = $c;
|
$c2 = $c;
|
||||||
$s2 = $s;
|
$s2 = $s;
|
||||||
$g = $c * $this->e[$i];
|
$g = $c * $this->e[$i];
|
||||||
$h = $c * $p;
|
$h = $c * $p;
|
||||||
$r = hypot($p, $this->e[$i]);
|
$r = hypot($p, $this->e[$i]);
|
||||||
$this->e[$i+1] = $s * $r;
|
$this->e[$i + 1] = $s * $r;
|
||||||
$s = $this->e[$i] / $r;
|
$s = $this->e[$i] / $r;
|
||||||
$c = $p / $r;
|
$c = $p / $r;
|
||||||
$p = $c * $this->d[$i] - $s * $g;
|
$p = $c * $this->d[$i] - $s * $g;
|
||||||
$this->d[$i+1] = $h + $s * ($c * $g + $s * $this->d[$i]);
|
$this->d[$i + 1] = $h + $s * ($c * $g + $s * $this->d[$i]);
|
||||||
// Accumulate transformation.
|
// Accumulate transformation.
|
||||||
for ($k = 0; $k < $this->n; ++$k) {
|
for ($k = 0; $k < $this->n; ++$k) {
|
||||||
$h = $this->V[$k][$i+1];
|
$h = $this->V[$k][$i + 1];
|
||||||
$this->V[$k][$i+1] = $s * $this->V[$k][$i] + $c * $h;
|
$this->V[$k][$i + 1] = $s * $this->V[$k][$i] + $c * $h;
|
||||||
$this->V[$k][$i] = $c * $this->V[$k][$i] - $s * $h;
|
$this->V[$k][$i] = $c * $this->V[$k][$i] - $s * $h;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
$p = -$s * $s2 * $c3 * $el1 * $this->e[$l] / $dl1;
|
$p = -$s * $s2 * $c3 * $el1 * $this->e[$l] / $dl1;
|
||||||
@ -274,7 +312,7 @@ class EigenvalueDecomposition
|
|||||||
for ($i = 0; $i < $this->n - 1; ++$i) {
|
for ($i = 0; $i < $this->n - 1; ++$i) {
|
||||||
$k = $i;
|
$k = $i;
|
||||||
$p = $this->d[$i];
|
$p = $this->d[$i];
|
||||||
for ($j = $i+1; $j < $this->n; ++$j) {
|
for ($j = $i + 1; $j < $this->n; ++$j) {
|
||||||
if ($this->d[$j] < $p) {
|
if ($this->d[$j] < $p) {
|
||||||
$k = $j;
|
$k = $j;
|
||||||
$p = $this->d[$j];
|
$p = $this->d[$j];
|
||||||
@ -304,19 +342,19 @@ class EigenvalueDecomposition
|
|||||||
private function orthes()
|
private function orthes()
|
||||||
{
|
{
|
||||||
$low = 0;
|
$low = 0;
|
||||||
$high = $this->n-1;
|
$high = $this->n - 1;
|
||||||
|
|
||||||
for ($m = $low+1; $m <= $high-1; ++$m) {
|
for ($m = $low + 1; $m <= $high - 1; ++$m) {
|
||||||
// Scale column.
|
// Scale column.
|
||||||
$scale = 0.0;
|
$scale = 0.0;
|
||||||
for ($i = $m; $i <= $high; ++$i) {
|
for ($i = $m; $i <= $high; ++$i) {
|
||||||
$scale = $scale + abs($this->H[$i][$m-1]);
|
$scale = $scale + abs($this->H[$i][$m - 1]);
|
||||||
}
|
}
|
||||||
if ($scale != 0.0) {
|
if ($scale != 0.0) {
|
||||||
// Compute Householder transformation.
|
// Compute Householder transformation.
|
||||||
$h = 0.0;
|
$h = 0.0;
|
||||||
for ($i = $high; $i >= $m; --$i) {
|
for ($i = $high; $i >= $m; --$i) {
|
||||||
$this->ort[$i] = $this->H[$i][$m-1] / $scale;
|
$this->ort[$i] = $this->H[$i][$m - 1] / $scale;
|
||||||
$h += $this->ort[$i] * $this->ort[$i];
|
$h += $this->ort[$i] * $this->ort[$i];
|
||||||
}
|
}
|
||||||
$g = sqrt($h);
|
$g = sqrt($h);
|
||||||
@ -348,7 +386,7 @@ class EigenvalueDecomposition
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
$this->ort[$m] = $scale * $this->ort[$m];
|
$this->ort[$m] = $scale * $this->ort[$m];
|
||||||
$this->H[$m][$m-1] = $scale * $g;
|
$this->H[$m][$m - 1] = $scale * $g;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -358,10 +396,10 @@ class EigenvalueDecomposition
|
|||||||
$this->V[$i][$j] = ($i == $j ? 1.0 : 0.0);
|
$this->V[$i][$j] = ($i == $j ? 1.0 : 0.0);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for ($m = $high-1; $m >= $low+1; --$m) {
|
for ($m = $high - 1; $m >= $low + 1; --$m) {
|
||||||
if ($this->H[$m][$m-1] != 0.0) {
|
if ($this->H[$m][$m - 1] != 0.0) {
|
||||||
for ($i = $m+1; $i <= $high; ++$i) {
|
for ($i = $m + 1; $i <= $high; ++$i) {
|
||||||
$this->ort[$i] = $this->H[$i][$m-1];
|
$this->ort[$i] = $this->H[$i][$m - 1];
|
||||||
}
|
}
|
||||||
for ($j = $m; $j <= $high; ++$j) {
|
for ($j = $m; $j <= $high; ++$j) {
|
||||||
$g = 0.0;
|
$g = 0.0;
|
||||||
@ -369,7 +407,7 @@ class EigenvalueDecomposition
|
|||||||
$g += $this->ort[$i] * $this->V[$i][$j];
|
$g += $this->ort[$i] * $this->V[$i][$j];
|
||||||
}
|
}
|
||||||
// Double division avoids possible underflow
|
// Double division avoids possible underflow
|
||||||
$g = ($g / $this->ort[$m]) / $this->H[$m][$m-1];
|
$g = ($g / $this->ort[$m]) / $this->H[$m][$m - 1];
|
||||||
for ($i = $m; $i <= $high; ++$i) {
|
for ($i = $m; $i <= $high; ++$i) {
|
||||||
$this->V[$i][$j] += $g * $this->ort[$i];
|
$this->V[$i][$j] += $g * $this->ort[$i];
|
||||||
}
|
}
|
||||||
@ -378,9 +416,13 @@ class EigenvalueDecomposition
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Performs complex division.
|
* Performs complex division.
|
||||||
|
*
|
||||||
|
* @param int|float $xr
|
||||||
|
* @param int|float $xi
|
||||||
|
* @param int|float $yr
|
||||||
|
* @param int|float $yi
|
||||||
*/
|
*/
|
||||||
private function cdiv($xr, $xi, $yr, $yi)
|
private function cdiv($xr, $xi, $yr, $yi)
|
||||||
{
|
{
|
||||||
@ -397,7 +439,6 @@ class EigenvalueDecomposition
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Nonsymmetric reduction from Hessenberg to real Schur form.
|
* Nonsymmetric reduction from Hessenberg to real Schur form.
|
||||||
*
|
*
|
||||||
@ -424,7 +465,7 @@ class EigenvalueDecomposition
|
|||||||
$this->d[$i] = $this->H[$i][$i];
|
$this->d[$i] = $this->H[$i][$i];
|
||||||
$this->e[$i] = 0.0;
|
$this->e[$i] = 0.0;
|
||||||
}
|
}
|
||||||
for ($j = max($i-1, 0); $j < $nn; ++$j) {
|
for ($j = max($i - 1, 0); $j < $nn; ++$j) {
|
||||||
$norm = $norm + abs($this->H[$i][$j]);
|
$norm = $norm + abs($this->H[$i][$j]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -435,11 +476,11 @@ class EigenvalueDecomposition
|
|||||||
// Look for single small sub-diagonal element
|
// Look for single small sub-diagonal element
|
||||||
$l = $n;
|
$l = $n;
|
||||||
while ($l > $low) {
|
while ($l > $low) {
|
||||||
$s = abs($this->H[$l-1][$l-1]) + abs($this->H[$l][$l]);
|
$s = abs($this->H[$l - 1][$l - 1]) + abs($this->H[$l][$l]);
|
||||||
if ($s == 0.0) {
|
if ($s == 0.0) {
|
||||||
$s = $norm;
|
$s = $norm;
|
||||||
}
|
}
|
||||||
if (abs($this->H[$l][$l-1]) < $eps * $s) {
|
if (abs($this->H[$l][$l - 1]) < $eps * $s) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
--$l;
|
--$l;
|
||||||
@ -453,13 +494,13 @@ class EigenvalueDecomposition
|
|||||||
--$n;
|
--$n;
|
||||||
$iter = 0;
|
$iter = 0;
|
||||||
// Two roots found
|
// Two roots found
|
||||||
} elseif ($l == $n-1) {
|
} elseif ($l == $n - 1) {
|
||||||
$w = $this->H[$n][$n-1] * $this->H[$n-1][$n];
|
$w = $this->H[$n][$n - 1] * $this->H[$n - 1][$n];
|
||||||
$p = ($this->H[$n-1][$n-1] - $this->H[$n][$n]) / 2.0;
|
$p = ($this->H[$n - 1][$n - 1] - $this->H[$n][$n]) / 2.0;
|
||||||
$q = $p * $p + $w;
|
$q = $p * $p + $w;
|
||||||
$z = sqrt(abs($q));
|
$z = sqrt(abs($q));
|
||||||
$this->H[$n][$n] = $this->H[$n][$n] + $exshift;
|
$this->H[$n][$n] = $this->H[$n][$n] + $exshift;
|
||||||
$this->H[$n-1][$n-1] = $this->H[$n-1][$n-1] + $exshift;
|
$this->H[$n - 1][$n - 1] = $this->H[$n - 1][$n - 1] + $exshift;
|
||||||
$x = $this->H[$n][$n];
|
$x = $this->H[$n][$n];
|
||||||
// Real pair
|
// Real pair
|
||||||
if ($q >= 0) {
|
if ($q >= 0) {
|
||||||
@ -468,14 +509,14 @@ class EigenvalueDecomposition
|
|||||||
} else {
|
} else {
|
||||||
$z = $p - $z;
|
$z = $p - $z;
|
||||||
}
|
}
|
||||||
$this->d[$n-1] = $x + $z;
|
$this->d[$n - 1] = $x + $z;
|
||||||
$this->d[$n] = $this->d[$n-1];
|
$this->d[$n] = $this->d[$n - 1];
|
||||||
if ($z != 0.0) {
|
if ($z != 0.0) {
|
||||||
$this->d[$n] = $x - $w / $z;
|
$this->d[$n] = $x - $w / $z;
|
||||||
}
|
}
|
||||||
$this->e[$n-1] = 0.0;
|
$this->e[$n - 1] = 0.0;
|
||||||
$this->e[$n] = 0.0;
|
$this->e[$n] = 0.0;
|
||||||
$x = $this->H[$n][$n-1];
|
$x = $this->H[$n][$n - 1];
|
||||||
$s = abs($x) + abs($z);
|
$s = abs($x) + abs($z);
|
||||||
$p = $x / $s;
|
$p = $x / $s;
|
||||||
$q = $z / $s;
|
$q = $z / $s;
|
||||||
@ -483,29 +524,29 @@ class EigenvalueDecomposition
|
|||||||
$p = $p / $r;
|
$p = $p / $r;
|
||||||
$q = $q / $r;
|
$q = $q / $r;
|
||||||
// Row modification
|
// Row modification
|
||||||
for ($j = $n-1; $j < $nn; ++$j) {
|
for ($j = $n - 1; $j < $nn; ++$j) {
|
||||||
$z = $this->H[$n-1][$j];
|
$z = $this->H[$n - 1][$j];
|
||||||
$this->H[$n-1][$j] = $q * $z + $p * $this->H[$n][$j];
|
$this->H[$n - 1][$j] = $q * $z + $p * $this->H[$n][$j];
|
||||||
$this->H[$n][$j] = $q * $this->H[$n][$j] - $p * $z;
|
$this->H[$n][$j] = $q * $this->H[$n][$j] - $p * $z;
|
||||||
}
|
}
|
||||||
// Column modification
|
// Column modification
|
||||||
for ($i = 0; $i <= $n; ++$i) {
|
for ($i = 0; $i <= $n; ++$i) {
|
||||||
$z = $this->H[$i][$n-1];
|
$z = $this->H[$i][$n - 1];
|
||||||
$this->H[$i][$n-1] = $q * $z + $p * $this->H[$i][$n];
|
$this->H[$i][$n - 1] = $q * $z + $p * $this->H[$i][$n];
|
||||||
$this->H[$i][$n] = $q * $this->H[$i][$n] - $p * $z;
|
$this->H[$i][$n] = $q * $this->H[$i][$n] - $p * $z;
|
||||||
}
|
}
|
||||||
// Accumulate transformations
|
// Accumulate transformations
|
||||||
for ($i = $low; $i <= $high; ++$i) {
|
for ($i = $low; $i <= $high; ++$i) {
|
||||||
$z = $this->V[$i][$n-1];
|
$z = $this->V[$i][$n - 1];
|
||||||
$this->V[$i][$n-1] = $q * $z + $p * $this->V[$i][$n];
|
$this->V[$i][$n - 1] = $q * $z + $p * $this->V[$i][$n];
|
||||||
$this->V[$i][$n] = $q * $this->V[$i][$n] - $p * $z;
|
$this->V[$i][$n] = $q * $this->V[$i][$n] - $p * $z;
|
||||||
}
|
}
|
||||||
// Complex pair
|
// Complex pair
|
||||||
} else {
|
} else {
|
||||||
$this->d[$n-1] = $x + $p;
|
$this->d[$n - 1] = $x + $p;
|
||||||
$this->d[$n] = $x + $p;
|
$this->d[$n] = $x + $p;
|
||||||
$this->e[$n-1] = $z;
|
$this->e[$n - 1] = $z;
|
||||||
$this->e[$n] = -$z;
|
$this->e[$n] = -$z;
|
||||||
}
|
}
|
||||||
$n = $n - 2;
|
$n = $n - 2;
|
||||||
$iter = 0;
|
$iter = 0;
|
||||||
@ -516,8 +557,8 @@ class EigenvalueDecomposition
|
|||||||
$y = 0.0;
|
$y = 0.0;
|
||||||
$w = 0.0;
|
$w = 0.0;
|
||||||
if ($l < $n) {
|
if ($l < $n) {
|
||||||
$y = $this->H[$n-1][$n-1];
|
$y = $this->H[$n - 1][$n - 1];
|
||||||
$w = $this->H[$n][$n-1] * $this->H[$n-1][$n];
|
$w = $this->H[$n][$n - 1] * $this->H[$n - 1][$n];
|
||||||
}
|
}
|
||||||
// Wilkinson's original ad hoc shift
|
// Wilkinson's original ad hoc shift
|
||||||
if ($iter == 10) {
|
if ($iter == 10) {
|
||||||
@ -525,7 +566,7 @@ class EigenvalueDecomposition
|
|||||||
for ($i = $low; $i <= $n; ++$i) {
|
for ($i = $low; $i <= $n; ++$i) {
|
||||||
$this->H[$i][$i] -= $x;
|
$this->H[$i][$i] -= $x;
|
||||||
}
|
}
|
||||||
$s = abs($this->H[$n][$n-1]) + abs($this->H[$n-1][$n-2]);
|
$s = abs($this->H[$n][$n - 1]) + abs($this->H[$n - 1][$n - 2]);
|
||||||
$x = $y = 0.75 * $s;
|
$x = $y = 0.75 * $s;
|
||||||
$w = -0.4375 * $s * $s;
|
$w = -0.4375 * $s * $s;
|
||||||
}
|
}
|
||||||
@ -554,9 +595,9 @@ class EigenvalueDecomposition
|
|||||||
$z = $this->H[$m][$m];
|
$z = $this->H[$m][$m];
|
||||||
$r = $x - $z;
|
$r = $x - $z;
|
||||||
$s = $y - $z;
|
$s = $y - $z;
|
||||||
$p = ($r * $s - $w) / $this->H[$m+1][$m] + $this->H[$m][$m+1];
|
$p = ($r * $s - $w) / $this->H[$m + 1][$m] + $this->H[$m][$m + 1];
|
||||||
$q = $this->H[$m+1][$m+1] - $z - $r - $s;
|
$q = $this->H[$m + 1][$m + 1] - $z - $r - $s;
|
||||||
$r = $this->H[$m+2][$m+1];
|
$r = $this->H[$m + 2][$m + 1];
|
||||||
$s = abs($p) + abs($q) + abs($r);
|
$s = abs($p) + abs($q) + abs($r);
|
||||||
$p = $p / $s;
|
$p = $p / $s;
|
||||||
$q = $q / $s;
|
$q = $q / $s;
|
||||||
@ -564,25 +605,25 @@ class EigenvalueDecomposition
|
|||||||
if ($m == $l) {
|
if ($m == $l) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
if (abs($this->H[$m][$m-1]) * (abs($q) + abs($r)) <
|
if (abs($this->H[$m][$m - 1]) * (abs($q) + abs($r)) <
|
||||||
$eps * (abs($p) * (abs($this->H[$m-1][$m-1]) + abs($z) + abs($this->H[$m+1][$m+1])))) {
|
$eps * (abs($p) * (abs($this->H[$m - 1][$m - 1]) + abs($z) + abs($this->H[$m + 1][$m + 1])))) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
--$m;
|
--$m;
|
||||||
}
|
}
|
||||||
for ($i = $m + 2; $i <= $n; ++$i) {
|
for ($i = $m + 2; $i <= $n; ++$i) {
|
||||||
$this->H[$i][$i-2] = 0.0;
|
$this->H[$i][$i - 2] = 0.0;
|
||||||
if ($i > $m+2) {
|
if ($i > $m + 2) {
|
||||||
$this->H[$i][$i-3] = 0.0;
|
$this->H[$i][$i - 3] = 0.0;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Double QR step involving rows l:n and columns m:n
|
// Double QR step involving rows l:n and columns m:n
|
||||||
for ($k = $m; $k <= $n-1; ++$k) {
|
for ($k = $m; $k <= $n - 1; ++$k) {
|
||||||
$notlast = ($k != $n-1);
|
$notlast = ($k != $n - 1);
|
||||||
if ($k != $m) {
|
if ($k != $m) {
|
||||||
$p = $this->H[$k][$k-1];
|
$p = $this->H[$k][$k - 1];
|
||||||
$q = $this->H[$k+1][$k-1];
|
$q = $this->H[$k + 1][$k - 1];
|
||||||
$r = ($notlast ? $this->H[$k+2][$k-1] : 0.0);
|
$r = ($notlast ? $this->H[$k + 2][$k - 1] : 0.0);
|
||||||
$x = abs($p) + abs($q) + abs($r);
|
$x = abs($p) + abs($q) + abs($r);
|
||||||
if ($x != 0.0) {
|
if ($x != 0.0) {
|
||||||
$p = $p / $x;
|
$p = $p / $x;
|
||||||
@ -599,9 +640,9 @@ class EigenvalueDecomposition
|
|||||||
}
|
}
|
||||||
if ($s != 0) {
|
if ($s != 0) {
|
||||||
if ($k != $m) {
|
if ($k != $m) {
|
||||||
$this->H[$k][$k-1] = -$s * $x;
|
$this->H[$k][$k - 1] = -$s * $x;
|
||||||
} elseif ($l != $m) {
|
} elseif ($l != $m) {
|
||||||
$this->H[$k][$k-1] = -$this->H[$k][$k-1];
|
$this->H[$k][$k - 1] = -$this->H[$k][$k - 1];
|
||||||
}
|
}
|
||||||
$p = $p + $s;
|
$p = $p + $s;
|
||||||
$x = $p / $s;
|
$x = $p / $s;
|
||||||
@ -611,33 +652,33 @@ class EigenvalueDecomposition
|
|||||||
$r = $r / $p;
|
$r = $r / $p;
|
||||||
// Row modification
|
// Row modification
|
||||||
for ($j = $k; $j < $nn; ++$j) {
|
for ($j = $k; $j < $nn; ++$j) {
|
||||||
$p = $this->H[$k][$j] + $q * $this->H[$k+1][$j];
|
$p = $this->H[$k][$j] + $q * $this->H[$k + 1][$j];
|
||||||
if ($notlast) {
|
if ($notlast) {
|
||||||
$p = $p + $r * $this->H[$k+2][$j];
|
$p = $p + $r * $this->H[$k + 2][$j];
|
||||||
$this->H[$k+2][$j] = $this->H[$k+2][$j] - $p * $z;
|
$this->H[$k + 2][$j] = $this->H[$k + 2][$j] - $p * $z;
|
||||||
}
|
}
|
||||||
$this->H[$k][$j] = $this->H[$k][$j] - $p * $x;
|
$this->H[$k][$j] = $this->H[$k][$j] - $p * $x;
|
||||||
$this->H[$k+1][$j] = $this->H[$k+1][$j] - $p * $y;
|
$this->H[$k + 1][$j] = $this->H[$k + 1][$j] - $p * $y;
|
||||||
}
|
}
|
||||||
// Column modification
|
// Column modification
|
||||||
for ($i = 0; $i <= min($n, $k+3); ++$i) {
|
for ($i = 0; $i <= min($n, $k + 3); ++$i) {
|
||||||
$p = $x * $this->H[$i][$k] + $y * $this->H[$i][$k+1];
|
$p = $x * $this->H[$i][$k] + $y * $this->H[$i][$k + 1];
|
||||||
if ($notlast) {
|
if ($notlast) {
|
||||||
$p = $p + $z * $this->H[$i][$k+2];
|
$p = $p + $z * $this->H[$i][$k + 2];
|
||||||
$this->H[$i][$k+2] = $this->H[$i][$k+2] - $p * $r;
|
$this->H[$i][$k + 2] = $this->H[$i][$k + 2] - $p * $r;
|
||||||
}
|
}
|
||||||
$this->H[$i][$k] = $this->H[$i][$k] - $p;
|
$this->H[$i][$k] = $this->H[$i][$k] - $p;
|
||||||
$this->H[$i][$k+1] = $this->H[$i][$k+1] - $p * $q;
|
$this->H[$i][$k + 1] = $this->H[$i][$k + 1] - $p * $q;
|
||||||
}
|
}
|
||||||
// Accumulate transformations
|
// Accumulate transformations
|
||||||
for ($i = $low; $i <= $high; ++$i) {
|
for ($i = $low; $i <= $high; ++$i) {
|
||||||
$p = $x * $this->V[$i][$k] + $y * $this->V[$i][$k+1];
|
$p = $x * $this->V[$i][$k] + $y * $this->V[$i][$k + 1];
|
||||||
if ($notlast) {
|
if ($notlast) {
|
||||||
$p = $p + $z * $this->V[$i][$k+2];
|
$p = $p + $z * $this->V[$i][$k + 2];
|
||||||
$this->V[$i][$k+2] = $this->V[$i][$k+2] - $p * $r;
|
$this->V[$i][$k + 2] = $this->V[$i][$k + 2] - $p * $r;
|
||||||
}
|
}
|
||||||
$this->V[$i][$k] = $this->V[$i][$k] - $p;
|
$this->V[$i][$k] = $this->V[$i][$k] - $p;
|
||||||
$this->V[$i][$k+1] = $this->V[$i][$k+1] - $p * $q;
|
$this->V[$i][$k + 1] = $this->V[$i][$k + 1] - $p * $q;
|
||||||
}
|
}
|
||||||
} // ($s != 0)
|
} // ($s != 0)
|
||||||
} // k loop
|
} // k loop
|
||||||
@ -649,19 +690,20 @@ class EigenvalueDecomposition
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
for ($n = $nn-1; $n >= 0; --$n) {
|
for ($n = $nn - 1; $n >= 0; --$n) {
|
||||||
$p = $this->d[$n];
|
$p = $this->d[$n];
|
||||||
$q = $this->e[$n];
|
$q = $this->e[$n];
|
||||||
// Real vector
|
// Real vector
|
||||||
if ($q == 0) {
|
if ($q == 0) {
|
||||||
$l = $n;
|
$l = $n;
|
||||||
$this->H[$n][$n] = 1.0;
|
$this->H[$n][$n] = 1.0;
|
||||||
for ($i = $n-1; $i >= 0; --$i) {
|
for ($i = $n - 1; $i >= 0; --$i) {
|
||||||
$w = $this->H[$i][$i] - $p;
|
$w = $this->H[$i][$i] - $p;
|
||||||
$r = 0.0;
|
$r = 0.0;
|
||||||
for ($j = $l; $j <= $n; ++$j) {
|
for ($j = $l; $j <= $n; ++$j) {
|
||||||
$r = $r + $this->H[$i][$j] * $this->H[$j][$n];
|
$r = $r + $this->H[$i][$j] * $this->H[$j][$n];
|
||||||
}
|
}
|
||||||
|
|
||||||
if ($this->e[$i] < 0.0) {
|
if ($this->e[$i] < 0.0) {
|
||||||
$z = $w;
|
$z = $w;
|
||||||
$s = $r;
|
$s = $r;
|
||||||
@ -675,15 +717,15 @@ class EigenvalueDecomposition
|
|||||||
}
|
}
|
||||||
// Solve real equations
|
// Solve real equations
|
||||||
} else {
|
} else {
|
||||||
$x = $this->H[$i][$i+1];
|
$x = $this->H[$i][$i + 1];
|
||||||
$y = $this->H[$i+1][$i];
|
$y = $this->H[$i + 1][$i];
|
||||||
$q = ($this->d[$i] - $p) * ($this->d[$i] - $p) + $this->e[$i] * $this->e[$i];
|
$q = ($this->d[$i] - $p) * ($this->d[$i] - $p) + $this->e[$i] * $this->e[$i];
|
||||||
$t = ($x * $s - $z * $r) / $q;
|
$t = ($x * $s - $z * $r) / $q;
|
||||||
$this->H[$i][$n] = $t;
|
$this->H[$i][$n] = $t;
|
||||||
if (abs($x) > abs($z)) {
|
if (abs($x) > abs($z)) {
|
||||||
$this->H[$i+1][$n] = (-$r - $w * $t) / $x;
|
$this->H[$i + 1][$n] = (-$r - $w * $t) / $x;
|
||||||
} else {
|
} else {
|
||||||
$this->H[$i+1][$n] = (-$s - $y * $t) / $z;
|
$this->H[$i + 1][$n] = (-$s - $y * $t) / $z;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Overflow control
|
// Overflow control
|
||||||
@ -697,24 +739,24 @@ class EigenvalueDecomposition
|
|||||||
}
|
}
|
||||||
// Complex vector
|
// Complex vector
|
||||||
} elseif ($q < 0) {
|
} elseif ($q < 0) {
|
||||||
$l = $n-1;
|
$l = $n - 1;
|
||||||
// Last vector component imaginary so matrix is triangular
|
// Last vector component imaginary so matrix is triangular
|
||||||
if (abs($this->H[$n][$n-1]) > abs($this->H[$n-1][$n])) {
|
if (abs($this->H[$n][$n - 1]) > abs($this->H[$n - 1][$n])) {
|
||||||
$this->H[$n-1][$n-1] = $q / $this->H[$n][$n-1];
|
$this->H[$n - 1][$n - 1] = $q / $this->H[$n][$n - 1];
|
||||||
$this->H[$n-1][$n] = -($this->H[$n][$n] - $p) / $this->H[$n][$n-1];
|
$this->H[$n - 1][$n] = -($this->H[$n][$n] - $p) / $this->H[$n][$n - 1];
|
||||||
} else {
|
} else {
|
||||||
$this->cdiv(0.0, -$this->H[$n-1][$n], $this->H[$n-1][$n-1] - $p, $q);
|
$this->cdiv(0.0, -$this->H[$n - 1][$n], $this->H[$n - 1][$n - 1] - $p, $q);
|
||||||
$this->H[$n-1][$n-1] = $this->cdivr;
|
$this->H[$n - 1][$n - 1] = $this->cdivr;
|
||||||
$this->H[$n-1][$n] = $this->cdivi;
|
$this->H[$n - 1][$n] = $this->cdivi;
|
||||||
}
|
}
|
||||||
$this->H[$n][$n-1] = 0.0;
|
$this->H[$n][$n - 1] = 0.0;
|
||||||
$this->H[$n][$n] = 1.0;
|
$this->H[$n][$n] = 1.0;
|
||||||
for ($i = $n-2; $i >= 0; --$i) {
|
for ($i = $n - 2; $i >= 0; --$i) {
|
||||||
// double ra,sa,vr,vi;
|
// double ra,sa,vr,vi;
|
||||||
$ra = 0.0;
|
$ra = 0.0;
|
||||||
$sa = 0.0;
|
$sa = 0.0;
|
||||||
for ($j = $l; $j <= $n; ++$j) {
|
for ($j = $l; $j <= $n; ++$j) {
|
||||||
$ra = $ra + $this->H[$i][$j] * $this->H[$j][$n-1];
|
$ra = $ra + $this->H[$i][$j] * $this->H[$j][$n - 1];
|
||||||
$sa = $sa + $this->H[$i][$j] * $this->H[$j][$n];
|
$sa = $sa + $this->H[$i][$j] * $this->H[$j][$n];
|
||||||
}
|
}
|
||||||
$w = $this->H[$i][$i] - $p;
|
$w = $this->H[$i][$i] - $p;
|
||||||
@ -726,35 +768,35 @@ class EigenvalueDecomposition
|
|||||||
$l = $i;
|
$l = $i;
|
||||||
if ($this->e[$i] == 0) {
|
if ($this->e[$i] == 0) {
|
||||||
$this->cdiv(-$ra, -$sa, $w, $q);
|
$this->cdiv(-$ra, -$sa, $w, $q);
|
||||||
$this->H[$i][$n-1] = $this->cdivr;
|
$this->H[$i][$n - 1] = $this->cdivr;
|
||||||
$this->H[$i][$n] = $this->cdivi;
|
$this->H[$i][$n] = $this->cdivi;
|
||||||
} else {
|
} else {
|
||||||
// Solve complex equations
|
// Solve complex equations
|
||||||
$x = $this->H[$i][$i+1];
|
$x = $this->H[$i][$i + 1];
|
||||||
$y = $this->H[$i+1][$i];
|
$y = $this->H[$i + 1][$i];
|
||||||
$vr = ($this->d[$i] - $p) * ($this->d[$i] - $p) + $this->e[$i] * $this->e[$i] - $q * $q;
|
$vr = ($this->d[$i] - $p) * ($this->d[$i] - $p) + $this->e[$i] * $this->e[$i] - $q * $q;
|
||||||
$vi = ($this->d[$i] - $p) * 2.0 * $q;
|
$vi = ($this->d[$i] - $p) * 2.0 * $q;
|
||||||
if ($vr == 0.0 & $vi == 0.0) {
|
if ($vr == 0.0 & $vi == 0.0) {
|
||||||
$vr = $eps * $norm * (abs($w) + abs($q) + abs($x) + abs($y) + abs($z));
|
$vr = $eps * $norm * (abs($w) + abs($q) + abs($x) + abs($y) + abs($z));
|
||||||
}
|
}
|
||||||
$this->cdiv($x * $r - $z * $ra + $q * $sa, $x * $s - $z * $sa - $q * $ra, $vr, $vi);
|
$this->cdiv($x * $r - $z * $ra + $q * $sa, $x * $s - $z * $sa - $q * $ra, $vr, $vi);
|
||||||
$this->H[$i][$n-1] = $this->cdivr;
|
$this->H[$i][$n - 1] = $this->cdivr;
|
||||||
$this->H[$i][$n] = $this->cdivi;
|
$this->H[$i][$n] = $this->cdivi;
|
||||||
if (abs($x) > (abs($z) + abs($q))) {
|
if (abs($x) > (abs($z) + abs($q))) {
|
||||||
$this->H[$i+1][$n-1] = (-$ra - $w * $this->H[$i][$n-1] + $q * $this->H[$i][$n]) / $x;
|
$this->H[$i + 1][$n - 1] = (-$ra - $w * $this->H[$i][$n - 1] + $q * $this->H[$i][$n]) / $x;
|
||||||
$this->H[$i+1][$n] = (-$sa - $w * $this->H[$i][$n] - $q * $this->H[$i][$n-1]) / $x;
|
$this->H[$i + 1][$n] = (-$sa - $w * $this->H[$i][$n] - $q * $this->H[$i][$n - 1]) / $x;
|
||||||
} else {
|
} else {
|
||||||
$this->cdiv(-$r - $y * $this->H[$i][$n-1], -$s - $y * $this->H[$i][$n], $z, $q);
|
$this->cdiv(-$r - $y * $this->H[$i][$n - 1], -$s - $y * $this->H[$i][$n], $z, $q);
|
||||||
$this->H[$i+1][$n-1] = $this->cdivr;
|
$this->H[$i + 1][$n - 1] = $this->cdivr;
|
||||||
$this->H[$i+1][$n] = $this->cdivi;
|
$this->H[$i + 1][$n] = $this->cdivi;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Overflow control
|
// Overflow control
|
||||||
$t = max(abs($this->H[$i][$n-1]), abs($this->H[$i][$n]));
|
$t = max(abs($this->H[$i][$n - 1]), abs($this->H[$i][$n]));
|
||||||
if (($eps * $t) * $t > 1) {
|
if (($eps * $t) * $t > 1) {
|
||||||
for ($j = $i; $j <= $n; ++$j) {
|
for ($j = $i; $j <= $n; ++$j) {
|
||||||
$this->H[$j][$n-1] = $this->H[$j][$n-1] / $t;
|
$this->H[$j][$n - 1] = $this->H[$j][$n - 1] / $t;
|
||||||
$this->H[$j][$n] = $this->H[$j][$n] / $t;
|
$this->H[$j][$n] = $this->H[$j][$n] / $t;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} // end else
|
} // end else
|
||||||
@ -772,7 +814,7 @@ class EigenvalueDecomposition
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Back transformation to get eigenvectors of original matrix
|
// Back transformation to get eigenvectors of original matrix
|
||||||
for ($j = $nn-1; $j >= $low; --$j) {
|
for ($j = $nn - 1; $j >= $low; --$j) {
|
||||||
for ($i = $low; $i <= $high; ++$i) {
|
for ($i = $low; $i <= $high; ++$i) {
|
||||||
$z = 0.0;
|
$z = 0.0;
|
||||||
for ($k = $low; $k <= min($j, $high); ++$k) {
|
for ($k = $low; $k <= min($j, $high); ++$k) {
|
||||||
@ -783,45 +825,12 @@ class EigenvalueDecomposition
|
|||||||
}
|
}
|
||||||
} // end hqr2
|
} // end hqr2
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constructor: Check for symmetry, then construct the eigenvalue decomposition
|
* Return the eigenvector matrix
|
||||||
*
|
*
|
||||||
* @param array $Arg
|
* @access public
|
||||||
*/
|
|
||||||
public function __construct(array $Arg)
|
|
||||||
{
|
|
||||||
$this->A = $Arg;
|
|
||||||
$this->n = count($Arg[0]);
|
|
||||||
|
|
||||||
$issymmetric = true;
|
|
||||||
for ($j = 0; ($j < $this->n) & $issymmetric; ++$j) {
|
|
||||||
for ($i = 0; ($i < $this->n) & $issymmetric; ++$i) {
|
|
||||||
$issymmetric = ($this->A[$i][$j] == $this->A[$j][$i]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if ($issymmetric) {
|
|
||||||
$this->V = $this->A;
|
|
||||||
// Tridiagonalize.
|
|
||||||
$this->tred2();
|
|
||||||
// Diagonalize.
|
|
||||||
$this->tql2();
|
|
||||||
} else {
|
|
||||||
$this->H = $this->A;
|
|
||||||
$this->ort = [];
|
|
||||||
// Reduce to Hessenberg form.
|
|
||||||
$this->orthes();
|
|
||||||
// Reduce Hessenberg to real Schur form.
|
|
||||||
$this->hqr2();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Return the eigenvector matrix
|
|
||||||
*
|
*
|
||||||
* @access public
|
* @return array
|
||||||
* @return array
|
|
||||||
*/
|
*/
|
||||||
public function getEigenvectors()
|
public function getEigenvectors()
|
||||||
{
|
{
|
||||||
@ -831,20 +840,21 @@ class EigenvalueDecomposition
|
|||||||
$vectors = new Matrix($vectors);
|
$vectors = new Matrix($vectors);
|
||||||
$vectors = array_map(function ($vect) {
|
$vectors = array_map(function ($vect) {
|
||||||
$sum = 0;
|
$sum = 0;
|
||||||
for ($i=0; $i<count($vect); $i++) {
|
for ($i = 0; $i < count($vect); ++$i) {
|
||||||
$sum += $vect[$i] ** 2;
|
$sum += $vect[$i] ** 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
$sum = sqrt($sum);
|
$sum = sqrt($sum);
|
||||||
for ($i=0; $i<count($vect); $i++) {
|
for ($i = 0; $i < count($vect); ++$i) {
|
||||||
$vect[$i] /= $sum;
|
$vect[$i] /= $sum;
|
||||||
}
|
}
|
||||||
|
|
||||||
return $vect;
|
return $vect;
|
||||||
}, $vectors->transpose()->toArray());
|
}, $vectors->transpose()->toArray());
|
||||||
|
|
||||||
return $vectors;
|
return $vectors;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return the real parts of the eigenvalues<br>
|
* Return the real parts of the eigenvalues<br>
|
||||||
* d = real(diag(D));
|
* d = real(diag(D));
|
||||||
@ -856,7 +866,6 @@ class EigenvalueDecomposition
|
|||||||
return $this->d;
|
return $this->d;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return the imaginary parts of the eigenvalues <br>
|
* Return the imaginary parts of the eigenvalues <br>
|
||||||
* d = imag(diag(D))
|
* d = imag(diag(D))
|
||||||
@ -868,7 +877,6 @@ class EigenvalueDecomposition
|
|||||||
return $this->e;
|
return $this->e;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return the block diagonal eigenvalue matrix
|
* Return the block diagonal eigenvalue matrix
|
||||||
*
|
*
|
||||||
@ -876,15 +884,19 @@ class EigenvalueDecomposition
|
|||||||
*/
|
*/
|
||||||
public function getDiagonalEigenvalues()
|
public function getDiagonalEigenvalues()
|
||||||
{
|
{
|
||||||
|
$D = [];
|
||||||
|
|
||||||
for ($i = 0; $i < $this->n; ++$i) {
|
for ($i = 0; $i < $this->n; ++$i) {
|
||||||
$D[$i] = array_fill(0, $this->n, 0.0);
|
$D[$i] = array_fill(0, $this->n, 0.0);
|
||||||
$D[$i][$i] = $this->d[$i];
|
$D[$i][$i] = $this->d[$i];
|
||||||
if ($this->e[$i] == 0) {
|
if ($this->e[$i] == 0) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
$o = ($this->e[$i] > 0) ? $i + 1 : $i - 1;
|
$o = ($this->e[$i] > 0) ? $i + 1 : $i - 1;
|
||||||
$D[$i][$o] = $this->e[$i];
|
$D[$i][$o] = $this->e[$i];
|
||||||
}
|
}
|
||||||
|
|
||||||
return $D;
|
return $D;
|
||||||
}
|
}
|
||||||
} // class EigenvalueDecomposition
|
} // class EigenvalueDecomposition
|
||||||
|
@ -1,4 +1,6 @@
|
|||||||
<?php declare(strict_types=1);
|
<?php
|
||||||
|
|
||||||
|
declare(strict_types=1);
|
||||||
/**
|
/**
|
||||||
* @package JAMA
|
* @package JAMA
|
||||||
*
|
*
|
||||||
@ -62,10 +64,11 @@ class LUDecomposition
|
|||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* LU Decomposition constructor.
|
* Constructs Structure to access L, U and piv.
|
||||||
*
|
*
|
||||||
* @param $A Rectangular matrix
|
* @param Matrix $A Rectangular matrix
|
||||||
* @return Structure to access L, U and piv.
|
*
|
||||||
|
* @throws MatrixException
|
||||||
*/
|
*/
|
||||||
public function __construct(Matrix $A)
|
public function __construct(Matrix $A)
|
||||||
{
|
{
|
||||||
@ -81,7 +84,7 @@ class LUDecomposition
|
|||||||
$this->piv[$i] = $i;
|
$this->piv[$i] = $i;
|
||||||
}
|
}
|
||||||
$this->pivsign = 1;
|
$this->pivsign = 1;
|
||||||
$LUrowi = $LUcolj = [];
|
$LUcolj = [];
|
||||||
|
|
||||||
// Outer loop.
|
// Outer loop.
|
||||||
for ($j = 0; $j < $this->n; ++$j) {
|
for ($j = 0; $j < $this->n; ++$j) {
|
||||||
@ -102,7 +105,7 @@ class LUDecomposition
|
|||||||
}
|
}
|
||||||
// Find pivot and exchange if necessary.
|
// Find pivot and exchange if necessary.
|
||||||
$p = $j;
|
$p = $j;
|
||||||
for ($i = $j+1; $i < $this->m; ++$i) {
|
for ($i = $j + 1; $i < $this->m; ++$i) {
|
||||||
if (abs($LUcolj[$i]) > abs($LUcolj[$p])) {
|
if (abs($LUcolj[$i]) > abs($LUcolj[$p])) {
|
||||||
$p = $i;
|
$p = $i;
|
||||||
}
|
}
|
||||||
@ -120,7 +123,7 @@ class LUDecomposition
|
|||||||
}
|
}
|
||||||
// Compute multipliers.
|
// Compute multipliers.
|
||||||
if (($j < $this->m) && ($this->LU[$j][$j] != 0.0)) {
|
if (($j < $this->m) && ($this->LU[$j][$j] != 0.0)) {
|
||||||
for ($i = $j+1; $i < $this->m; ++$i) {
|
for ($i = $j + 1; $i < $this->m; ++$i) {
|
||||||
$this->LU[$i][$j] /= $this->LU[$j][$j];
|
$this->LU[$i][$j] /= $this->LU[$j][$j];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -129,9 +132,9 @@ class LUDecomposition
|
|||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get lower triangular factor.
|
* Get lower triangular factor.
|
||||||
*
|
*
|
||||||
* @return array Lower triangular factor
|
* @return Matrix Lower triangular factor
|
||||||
*/
|
*/
|
||||||
public function getL()
|
public function getL()
|
||||||
{
|
{
|
||||||
@ -152,9 +155,9 @@ class LUDecomposition
|
|||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get upper triangular factor.
|
* Get upper triangular factor.
|
||||||
*
|
*
|
||||||
* @return array Upper triangular factor
|
* @return Matrix Upper triangular factor
|
||||||
*/
|
*/
|
||||||
public function getU()
|
public function getU()
|
||||||
{
|
{
|
||||||
@ -173,9 +176,9 @@ class LUDecomposition
|
|||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return pivot permutation vector.
|
* Return pivot permutation vector.
|
||||||
*
|
*
|
||||||
* @return array Pivot vector
|
* @return array Pivot vector
|
||||||
*/
|
*/
|
||||||
public function getPivot()
|
public function getPivot()
|
||||||
{
|
{
|
||||||
@ -184,9 +187,9 @@ class LUDecomposition
|
|||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Alias for getPivot
|
* Alias for getPivot
|
||||||
*
|
*
|
||||||
* @see getPivot
|
* @see getPivot
|
||||||
*/
|
*/
|
||||||
public function getDoublePivot()
|
public function getDoublePivot()
|
||||||
{
|
{
|
||||||
@ -195,9 +198,9 @@ class LUDecomposition
|
|||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Is the matrix nonsingular?
|
* Is the matrix nonsingular?
|
||||||
*
|
*
|
||||||
* @return true if U, and hence A, is nonsingular.
|
* @return true if U, and hence A, is nonsingular.
|
||||||
*/
|
*/
|
||||||
public function isNonsingular()
|
public function isNonsingular()
|
||||||
{
|
{
|
||||||
@ -206,31 +209,35 @@ class LUDecomposition
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
} // function isNonsingular()
|
} // function isNonsingular()
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Count determinants
|
* Count determinants
|
||||||
*
|
*
|
||||||
* @return array d matrix deterninat
|
* @return float|int d matrix determinant
|
||||||
|
*
|
||||||
|
* @throws MatrixException
|
||||||
*/
|
*/
|
||||||
public function det()
|
public function det()
|
||||||
{
|
{
|
||||||
if ($this->m == $this->n) {
|
if ($this->m !== $this->n) {
|
||||||
$d = $this->pivsign;
|
|
||||||
for ($j = 0; $j < $this->n; ++$j) {
|
|
||||||
$d *= $this->LU[$j][$j];
|
|
||||||
}
|
|
||||||
return $d;
|
|
||||||
} else {
|
|
||||||
throw MatrixException::notSquareMatrix();
|
throw MatrixException::notSquareMatrix();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
$d = $this->pivsign;
|
||||||
|
for ($j = 0; $j < $this->n; ++$j) {
|
||||||
|
$d *= $this->LU[$j][$j];
|
||||||
|
}
|
||||||
|
|
||||||
|
return $d;
|
||||||
} // function det()
|
} // function det()
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Solve A*X = B
|
* Solve A*X = B
|
||||||
*
|
*
|
||||||
* @param Matrix $B A Matrix with as many rows as A and any number of columns.
|
* @param Matrix $B A Matrix with as many rows as A and any number of columns.
|
||||||
*
|
*
|
||||||
@ -244,23 +251,23 @@ class LUDecomposition
|
|||||||
throw MatrixException::notSquareMatrix();
|
throw MatrixException::notSquareMatrix();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (! $this->isNonsingular()) {
|
if (!$this->isNonsingular()) {
|
||||||
throw MatrixException::singularMatrix();
|
throw MatrixException::singularMatrix();
|
||||||
}
|
}
|
||||||
|
|
||||||
// Copy right hand side with pivoting
|
// Copy right hand side with pivoting
|
||||||
$nx = $B->getColumns();
|
$nx = $B->getColumns();
|
||||||
$X = $this->getSubMatrix($B->toArray(), $this->piv, 0, $nx-1);
|
$X = $this->getSubMatrix($B->toArray(), $this->piv, 0, $nx - 1);
|
||||||
// Solve L*Y = B(piv,:)
|
// Solve L*Y = B(piv,:)
|
||||||
for ($k = 0; $k < $this->n; ++$k) {
|
for ($k = 0; $k < $this->n; ++$k) {
|
||||||
for ($i = $k+1; $i < $this->n; ++$i) {
|
for ($i = $k + 1; $i < $this->n; ++$i) {
|
||||||
for ($j = 0; $j < $nx; ++$j) {
|
for ($j = 0; $j < $nx; ++$j) {
|
||||||
$X[$i][$j] -= $X[$k][$j] * $this->LU[$i][$k];
|
$X[$i][$j] -= $X[$k][$j] * $this->LU[$i][$k];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Solve U*X = Y;
|
// Solve U*X = Y;
|
||||||
for ($k = $this->n-1; $k >= 0; --$k) {
|
for ($k = $this->n - 1; $k >= 0; --$k) {
|
||||||
for ($j = 0; $j < $nx; ++$j) {
|
for ($j = 0; $j < $nx; ++$j) {
|
||||||
$X[$k][$j] /= $this->LU[$k][$k];
|
$X[$k][$j] /= $this->LU[$k][$k];
|
||||||
}
|
}
|
||||||
@ -274,9 +281,10 @@ class LUDecomposition
|
|||||||
} // function solve()
|
} // function solve()
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param Matrix $matrix
|
* @param array $matrix
|
||||||
* @param int $j0
|
* @param array $RL
|
||||||
* @param int $jF
|
* @param int $j0
|
||||||
|
* @param int $jF
|
||||||
*
|
*
|
||||||
* @return array
|
* @return array
|
||||||
*/
|
*/
|
||||||
@ -284,11 +292,11 @@ class LUDecomposition
|
|||||||
{
|
{
|
||||||
$m = count($RL);
|
$m = count($RL);
|
||||||
$n = $jF - $j0;
|
$n = $jF - $j0;
|
||||||
$R = array_fill(0, $m, array_fill(0, $n+1, 0.0));
|
$R = array_fill(0, $m, array_fill(0, $n + 1, 0.0));
|
||||||
|
|
||||||
for ($i = 0; $i < $m; ++$i) {
|
for ($i = 0; $i < $m; ++$i) {
|
||||||
for ($j = $j0; $j <= $jF; ++$j) {
|
for ($j = $j0; $j <= $jF; ++$j) {
|
||||||
$R[$i][$j - $j0]= $matrix[ $RL[$i] ][$j];
|
$R[$i][$j - $j0] = $matrix[$RL[$i]][$j];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -139,6 +139,7 @@ class Matrix
|
|||||||
}
|
}
|
||||||
|
|
||||||
$lu = new LUDecomposition($this);
|
$lu = new LUDecomposition($this);
|
||||||
|
|
||||||
return $this->determinant = $lu->det();
|
return $this->determinant = $lu->det();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -232,6 +233,8 @@ class Matrix
|
|||||||
* Element-wise addition of the matrix with another one
|
* Element-wise addition of the matrix with another one
|
||||||
*
|
*
|
||||||
* @param Matrix $other
|
* @param Matrix $other
|
||||||
|
*
|
||||||
|
* @return Matrix
|
||||||
*/
|
*/
|
||||||
public function add(Matrix $other)
|
public function add(Matrix $other)
|
||||||
{
|
{
|
||||||
@ -242,6 +245,8 @@ class Matrix
|
|||||||
* Element-wise subtracting of another matrix from this one
|
* Element-wise subtracting of another matrix from this one
|
||||||
*
|
*
|
||||||
* @param Matrix $other
|
* @param Matrix $other
|
||||||
|
*
|
||||||
|
* @return Matrix
|
||||||
*/
|
*/
|
||||||
public function subtract(Matrix $other)
|
public function subtract(Matrix $other)
|
||||||
{
|
{
|
||||||
@ -252,7 +257,9 @@ class Matrix
|
|||||||
* Element-wise addition or substraction depending on the given sign parameter
|
* Element-wise addition or substraction depending on the given sign parameter
|
||||||
*
|
*
|
||||||
* @param Matrix $other
|
* @param Matrix $other
|
||||||
* @param type $sign
|
* @param int $sign
|
||||||
|
*
|
||||||
|
* @return Matrix
|
||||||
*/
|
*/
|
||||||
protected function _add(Matrix $other, $sign = 1)
|
protected function _add(Matrix $other, $sign = 1)
|
||||||
{
|
{
|
||||||
@ -260,13 +267,13 @@ class Matrix
|
|||||||
$a2 = $other->toArray();
|
$a2 = $other->toArray();
|
||||||
|
|
||||||
$newMatrix = [];
|
$newMatrix = [];
|
||||||
for ($i=0; $i < $this->rows; $i++) {
|
for ($i = 0; $i < $this->rows; ++$i) {
|
||||||
for ($k=0; $k < $this->columns; $k++) {
|
for ($k = 0; $k < $this->columns; ++$k) {
|
||||||
$newMatrix[$i][$k] = $a1[$i][$k] + $sign * $a2[$i][$k];
|
$newMatrix[$i][$k] = $a1[$i][$k] + $sign * $a2[$i][$k];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return new Matrix($newMatrix, false);
|
return new self($newMatrix, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -295,7 +302,7 @@ class Matrix
|
|||||||
protected function getIdentity()
|
protected function getIdentity()
|
||||||
{
|
{
|
||||||
$array = array_fill(0, $this->rows, array_fill(0, $this->columns, 0));
|
$array = array_fill(0, $this->rows, array_fill(0, $this->columns, 0));
|
||||||
for ($i=0; $i < $this->rows; $i++) {
|
for ($i = 0; $i < $this->rows; ++$i) {
|
||||||
$array[$i][$i] = 1;
|
$array[$i][$i] = 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -345,7 +352,7 @@ class Matrix
|
|||||||
*/
|
*/
|
||||||
public static function transposeArray(array $array)
|
public static function transposeArray(array $array)
|
||||||
{
|
{
|
||||||
return (new Matrix($array, false))->transpose()->toArray();
|
return (new self($array, false))->transpose()->toArray();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -359,8 +366,8 @@ class Matrix
|
|||||||
*/
|
*/
|
||||||
public static function dot(array $array1, array $array2)
|
public static function dot(array $array1, array $array2)
|
||||||
{
|
{
|
||||||
$m1 = new Matrix($array1, false);
|
$m1 = new self($array1, false);
|
||||||
$m2 = new Matrix($array2, false);
|
$m2 = new self($array2, false);
|
||||||
|
|
||||||
return $m1->multiply($m2->transpose())->toArray()[0];
|
return $m1->multiply($m2->transpose())->toArray()[0];
|
||||||
}
|
}
|
||||||
|
@ -13,7 +13,7 @@ class Covariance
|
|||||||
*
|
*
|
||||||
* @param array $x
|
* @param array $x
|
||||||
* @param array $y
|
* @param array $y
|
||||||
* @param bool $sample
|
* @param bool $sample
|
||||||
* @param float $meanX
|
* @param float $meanX
|
||||||
* @param float $meanY
|
* @param float $meanY
|
||||||
*
|
*
|
||||||
@ -57,14 +57,18 @@ class Covariance
|
|||||||
* Calculates covariance of two dimensions, i and k in the given data.
|
* Calculates covariance of two dimensions, i and k in the given data.
|
||||||
*
|
*
|
||||||
* @param array $data
|
* @param array $data
|
||||||
* @param int $i
|
* @param int $i
|
||||||
* @param int $k
|
* @param int $k
|
||||||
* @param type $sample
|
* @param bool $sample
|
||||||
* @param int $n
|
|
||||||
* @param float $meanX
|
* @param float $meanX
|
||||||
* @param float $meanY
|
* @param float $meanY
|
||||||
|
*
|
||||||
|
* @return float
|
||||||
|
*
|
||||||
|
* @throws InvalidArgumentException
|
||||||
|
* @throws \Exception
|
||||||
*/
|
*/
|
||||||
public static function fromDataset(array $data, int $i, int $k, $sample = true, float $meanX = null, float $meanY = null)
|
public static function fromDataset(array $data, int $i, int $k, bool $sample = true, float $meanX = null, float $meanY = null)
|
||||||
{
|
{
|
||||||
if (empty($data)) {
|
if (empty($data)) {
|
||||||
throw InvalidArgumentException::arrayCantBeEmpty();
|
throw InvalidArgumentException::arrayCantBeEmpty();
|
||||||
@ -123,7 +127,8 @@ class Covariance
|
|||||||
/**
|
/**
|
||||||
* Returns the covariance matrix of n-dimensional data
|
* Returns the covariance matrix of n-dimensional data
|
||||||
*
|
*
|
||||||
* @param array $data
|
* @param array $data
|
||||||
|
* @param array|null $means
|
||||||
*
|
*
|
||||||
* @return array
|
* @return array
|
||||||
*/
|
*/
|
||||||
@ -133,19 +138,20 @@ class Covariance
|
|||||||
|
|
||||||
if ($means === null) {
|
if ($means === null) {
|
||||||
$means = [];
|
$means = [];
|
||||||
for ($i=0; $i < $n; $i++) {
|
for ($i = 0; $i < $n; ++$i) {
|
||||||
$means[] = Mean::arithmetic(array_column($data, $i));
|
$means[] = Mean::arithmetic(array_column($data, $i));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
$cov = [];
|
$cov = [];
|
||||||
for ($i=0; $i < $n; $i++) {
|
for ($i = 0; $i < $n; ++$i) {
|
||||||
for ($k=0; $k < $n; $k++) {
|
for ($k = 0; $k < $n; ++$k) {
|
||||||
if ($i > $k) {
|
if ($i > $k) {
|
||||||
$cov[$i][$k] = $cov[$k][$i];
|
$cov[$i][$k] = $cov[$k][$i];
|
||||||
} else {
|
} else {
|
||||||
$cov[$i][$k] = Covariance::fromDataset(
|
$cov[$i][$k] = self::fromDataset(
|
||||||
$data, $i, $k, true, $means[$i], $means[$k]);
|
$data, $i, $k, true, $means[$i], $means[$k]
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -31,7 +31,7 @@ class Gaussian
|
|||||||
*
|
*
|
||||||
* @param float $value
|
* @param float $value
|
||||||
*
|
*
|
||||||
* @return type
|
* @return float|int
|
||||||
*/
|
*/
|
||||||
public function pdf(float $value)
|
public function pdf(float $value)
|
||||||
{
|
{
|
||||||
|
@ -68,7 +68,7 @@ class Mean
|
|||||||
*/
|
*/
|
||||||
private static function checkArrayLength(array $array)
|
private static function checkArrayLength(array $array)
|
||||||
{
|
{
|
||||||
if (0 == count($array)) {
|
if (empty($array)) {
|
||||||
throw InvalidArgumentException::arrayCantBeEmpty();
|
throw InvalidArgumentException::arrayCantBeEmpty();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -112,8 +112,8 @@ class ClassificationReport
|
|||||||
private function computeAverage()
|
private function computeAverage()
|
||||||
{
|
{
|
||||||
foreach (['precision', 'recall', 'f1score'] as $metric) {
|
foreach (['precision', 'recall', 'f1score'] as $metric) {
|
||||||
$values = array_filter($this->$metric);
|
$values = array_filter($this->{$metric});
|
||||||
if (0 == count($values)) {
|
if (empty($values)) {
|
||||||
$this->average[$metric] = 0.0;
|
$this->average[$metric] = 0.0;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
@ -11,7 +11,8 @@ class ModelManager
|
|||||||
{
|
{
|
||||||
/**
|
/**
|
||||||
* @param Estimator $estimator
|
* @param Estimator $estimator
|
||||||
* @param string $filepath
|
* @param string $filepath
|
||||||
|
*
|
||||||
* @throws FileException
|
* @throws FileException
|
||||||
* @throws SerializeException
|
* @throws SerializeException
|
||||||
*/
|
*/
|
||||||
@ -23,7 +24,7 @@ class ModelManager
|
|||||||
|
|
||||||
$serialized = serialize($estimator);
|
$serialized = serialize($estimator);
|
||||||
if (empty($serialized)) {
|
if (empty($serialized)) {
|
||||||
throw SerializeException::cantSerialize(get_type($estimator));
|
throw SerializeException::cantSerialize(gettype($estimator));
|
||||||
}
|
}
|
||||||
|
|
||||||
$result = file_put_contents($filepath, $serialized, LOCK_EX);
|
$result = file_put_contents($filepath, $serialized, LOCK_EX);
|
||||||
@ -34,7 +35,9 @@ class ModelManager
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* @param string $filepath
|
* @param string $filepath
|
||||||
|
*
|
||||||
* @return Estimator
|
* @return Estimator
|
||||||
|
*
|
||||||
* @throws FileException
|
* @throws FileException
|
||||||
* @throws SerializeException
|
* @throws SerializeException
|
||||||
*/
|
*/
|
||||||
|
@ -44,10 +44,12 @@ class Backpropagation implements Training
|
|||||||
*/
|
*/
|
||||||
public function train(array $samples, array $targets, float $desiredError = 0.001, int $maxIterations = 10000)
|
public function train(array $samples, array $targets, float $desiredError = 0.001, int $maxIterations = 10000)
|
||||||
{
|
{
|
||||||
|
$samplesCount = count($samples);
|
||||||
|
|
||||||
for ($i = 0; $i < $maxIterations; ++$i) {
|
for ($i = 0; $i < $maxIterations; ++$i) {
|
||||||
$resultsWithinError = $this->trainSamples($samples, $targets, $desiredError);
|
$resultsWithinError = $this->trainSamples($samples, $targets, $desiredError);
|
||||||
|
|
||||||
if ($resultsWithinError == count($samples)) {
|
if ($resultsWithinError === $samplesCount) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -84,7 +84,7 @@ class Normalizer implements Preprocessor
|
|||||||
$this->fit($samples);
|
$this->fit($samples);
|
||||||
|
|
||||||
foreach ($samples as &$sample) {
|
foreach ($samples as &$sample) {
|
||||||
$this->$method($sample);
|
$this->{$method}($sample);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -130,6 +130,8 @@ class SupportVectorMachine
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* @param string $binPath
|
* @param string $binPath
|
||||||
|
*
|
||||||
|
* @return $this
|
||||||
*/
|
*/
|
||||||
public function setBinPath(string $binPath)
|
public function setBinPath(string $binPath)
|
||||||
{
|
{
|
||||||
@ -140,6 +142,8 @@ class SupportVectorMachine
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* @param string $varPath
|
* @param string $varPath
|
||||||
|
*
|
||||||
|
* @return $this
|
||||||
*/
|
*/
|
||||||
public function setVarPath(string $varPath)
|
public function setVarPath(string $varPath)
|
||||||
{
|
{
|
||||||
@ -230,8 +234,8 @@ class SupportVectorMachine
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param $trainingSetFileName
|
* @param string $trainingSetFileName
|
||||||
* @param $modelFileName
|
* @param string $modelFileName
|
||||||
*
|
*
|
||||||
* @return string
|
* @return string
|
||||||
*/
|
*/
|
||||||
|
Loading…
Reference in New Issue
Block a user