From 38deaaeb2ed2ab2d5b3e7fe3e69fcce3c174c8ce Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pablo=20Jo=C3=A1n=20Iglesias?= Date: Tue, 26 Jul 2016 02:13:52 -0400 Subject: [PATCH 01/19] testScalarProduct check for non numeric values (#13) * testScalarProduct check for non numeric values test for non numeric values. * updating pr #13 using global namespace fro stdClass --- tests/Phpml/Math/ProductTest.php | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tests/Phpml/Math/ProductTest.php b/tests/Phpml/Math/ProductTest.php index aba0ff2..05ca5b5 100644 --- a/tests/Phpml/Math/ProductTest.php +++ b/tests/Phpml/Math/ProductTest.php @@ -13,5 +13,8 @@ class ProductTest extends \PHPUnit_Framework_TestCase $this->assertEquals(10, Product::scalar([2, 3], [-1, 4])); $this->assertEquals(-0.1, Product::scalar([1, 4, 1], [-2, 0.5, -0.1])); $this->assertEquals(8, Product::scalar([2], [4])); + + //test for non numeric values + $this->assertEquals(0, Product::scalar(['', null, [], new \stdClass()], [null])); } } From bbbf5cfc9dc46f58776954a18c0f60e342410a17 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pablo=20Jo=C3=A1n=20Iglesias?= Date: Tue, 26 Jul 2016 02:14:57 -0400 Subject: [PATCH 02/19] For each body should be wrapped in an if statement (#14) unit test to go with commit --- src/Phpml/Math/Product.php | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/Phpml/Math/Product.php b/src/Phpml/Math/Product.php index 70accb9..678dd71 100644 --- a/src/Phpml/Math/Product.php +++ b/src/Phpml/Math/Product.php @@ -16,7 +16,9 @@ class Product { $product = 0; foreach ($a as $index => $value) { - $product += $value * $b[$index]; + if (is_numeric($value) && is_numeric($b[$index])) { + $product += $value * $b[$index]; + } } return $product; From 2f5b09018870753130a7c5a8089157d401c1e348 Mon Sep 17 00:00:00 2001 From: Arkadiusz Kondas Date: Tue, 26 Jul 2016 21:57:15 +0200 Subject: [PATCH 03/19] create contributing guide --- CONTRIBUTING.md | 43 +++++++++++++++++++++++++++++++++++++++++++ README.md | 7 +------ docs/index.md | 6 +----- 3 files changed, 45 insertions(+), 11 deletions(-) create mode 100644 CONTRIBUTING.md diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..8084dc8 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,43 @@ +# Contributing to PHP-ML + +PHP-ML is an open source project. If you'd like to contribute, please read the following text. Before I can merge your +Pull-Request here are some guidelines that you need to follow. These guidelines exist not to annoy you, but to keep the +code base clean, unified and future proof. + +## Branch + +You should only open pull requests against the develop branch. + +## Unit-Tests + +Please try to add a test for your pull-request. You can run the unit-tests by calling: + +``` +bin/phpunit +``` + +## Travis + +GitHub automatically run your pull request through Travis CI against PHP 7. +If you break the tests, I cannot merge your code, so please make sure that your code is working +before opening up a Pull-Request. + +## Merge + +Please allow me time to review your pull requests. I will give my best to review everything as fast as possible, but cannot always live up to my own expectations. + +## Coding Standards + +When contributing code to PHP-ML, you must follow its coding standards. To make a long story short, here is the golden tool: + +``` +tools/php-cs-fixer.sh +``` + +This script run PHP Coding Standards Fixer with `--level=symfony` param. + +More about PHP-CS-Fixer: [http://cs.sensiolabs.org/](http://cs.sensiolabs.org/) + +--- + +Thank you very much again for your contribution! diff --git a/README.md b/README.md index e34a69c..07ce099 100644 --- a/README.md +++ b/README.md @@ -84,17 +84,12 @@ Example scripts are available in a separate repository [php-ai/php-ml-examples]( * [Matrix](http://php-ml.readthedocs.io/en/latest/math/matrix/) * [Statistic](http://php-ml.readthedocs.io/en/latest/math/statistic/) - ## Contribute - Issue Tracker: github.com/php-ai/php-ml/issues - Source Code: github.com/php-ai/php-ml -After installation, you can launch the test suite in project root directory (you will need to install dev requirements with Composer) - -``` -bin/phpunit -``` +You can find more about contributing in [CONTRIBUTING.md](CONTRIBUTING.md). ## License diff --git a/docs/index.md b/docs/index.md index 38eca65..6b0ce8c 100644 --- a/docs/index.md +++ b/docs/index.md @@ -90,11 +90,7 @@ Example scripts are available in a separate repository [php-ai/php-ml-examples]( - Issue Tracker: github.com/php-ai/php-ml/issues - Source Code: github.com/php-ai/php-ml -After installation, you can launch the test suite in project root directory (you will need to install dev requirements with Composer) - -``` -bin/phpunit -``` +You can find more about contributing in [CONTRIBUTING.md](CONTRIBUTING.md). ## License From 637fd613b84675d0a8cc12449971bf51273c4432 Mon Sep 17 00:00:00 2001 From: Arkadiusz Kondas Date: Tue, 2 Aug 2016 13:07:47 +0200 Subject: [PATCH 04/19] implement activation function for neural network --- .../NeuralNetwork/ActivationFunction.php | 15 +++++++ .../ActivationFunction/BinaryStep.php | 20 ++++++++++ .../ActivationFunction/Gaussian.php | 20 ++++++++++ .../ActivationFunction/HyperbolicTangent.php | 33 ++++++++++++++++ .../ActivationFunction/Sigmoid.php | 33 ++++++++++++++++ src/Phpml/NeuralNetwork/Node/Neuron.php | 9 +++++ tests/Phpml/Math/ProductTest.php | 2 +- .../ActivationFunction/BinaryStepTest.php | 35 +++++++++++++++++ .../ActivationFunction/GaussianTest.php | 37 ++++++++++++++++++ .../HyperboliTangentTest.php | 39 +++++++++++++++++++ .../ActivationFunction/SigmoidTest.php | 39 +++++++++++++++++++ 11 files changed, 281 insertions(+), 1 deletion(-) create mode 100644 src/Phpml/NeuralNetwork/ActivationFunction.php create mode 100644 src/Phpml/NeuralNetwork/ActivationFunction/BinaryStep.php create mode 100644 src/Phpml/NeuralNetwork/ActivationFunction/Gaussian.php create mode 100644 src/Phpml/NeuralNetwork/ActivationFunction/HyperbolicTangent.php create mode 100644 src/Phpml/NeuralNetwork/ActivationFunction/Sigmoid.php create mode 100644 src/Phpml/NeuralNetwork/Node/Neuron.php create mode 100644 tests/Phpml/NeuralNetwork/ActivationFunction/BinaryStepTest.php create mode 100644 tests/Phpml/NeuralNetwork/ActivationFunction/GaussianTest.php create mode 100644 tests/Phpml/NeuralNetwork/ActivationFunction/HyperboliTangentTest.php create mode 100644 tests/Phpml/NeuralNetwork/ActivationFunction/SigmoidTest.php diff --git a/src/Phpml/NeuralNetwork/ActivationFunction.php b/src/Phpml/NeuralNetwork/ActivationFunction.php new file mode 100644 index 0000000..9b7d984 --- /dev/null +++ b/src/Phpml/NeuralNetwork/ActivationFunction.php @@ -0,0 +1,15 @@ += 0 ? 1.0 : 0.0; + } +} diff --git a/src/Phpml/NeuralNetwork/ActivationFunction/Gaussian.php b/src/Phpml/NeuralNetwork/ActivationFunction/Gaussian.php new file mode 100644 index 0000000..cdbe4ae --- /dev/null +++ b/src/Phpml/NeuralNetwork/ActivationFunction/Gaussian.php @@ -0,0 +1,20 @@ +beta = $beta; + } + + /** + * @param float|int $value + * + * @return float + */ + public function compute($value): float + { + return tanh($this->beta * $value); + } +} diff --git a/src/Phpml/NeuralNetwork/ActivationFunction/Sigmoid.php b/src/Phpml/NeuralNetwork/ActivationFunction/Sigmoid.php new file mode 100644 index 0000000..ee7b7be --- /dev/null +++ b/src/Phpml/NeuralNetwork/ActivationFunction/Sigmoid.php @@ -0,0 +1,33 @@ +beta = $beta; + } + + /** + * @param float|int $value + * + * @return float + */ + public function compute($value): float + { + return 1 / (1 + exp(-$this->beta * $value)); + } +} diff --git a/src/Phpml/NeuralNetwork/Node/Neuron.php b/src/Phpml/NeuralNetwork/Node/Neuron.php new file mode 100644 index 0000000..52b38e7 --- /dev/null +++ b/src/Phpml/NeuralNetwork/Node/Neuron.php @@ -0,0 +1,9 @@ +assertEquals(10, Product::scalar([2, 3], [-1, 4])); $this->assertEquals(-0.1, Product::scalar([1, 4, 1], [-2, 0.5, -0.1])); $this->assertEquals(8, Product::scalar([2], [4])); - + //test for non numeric values $this->assertEquals(0, Product::scalar(['', null, [], new \stdClass()], [null])); } diff --git a/tests/Phpml/NeuralNetwork/ActivationFunction/BinaryStepTest.php b/tests/Phpml/NeuralNetwork/ActivationFunction/BinaryStepTest.php new file mode 100644 index 0000000..c074955 --- /dev/null +++ b/tests/Phpml/NeuralNetwork/ActivationFunction/BinaryStepTest.php @@ -0,0 +1,35 @@ +assertEquals($expected, $binaryStep->compute($value)); + } + + /** + * @return array + */ + public function binaryStepProvider() + { + return [ + [1, 1], + [1, 0], + [0, -0.1], + ]; + } +} diff --git a/tests/Phpml/NeuralNetwork/ActivationFunction/GaussianTest.php b/tests/Phpml/NeuralNetwork/ActivationFunction/GaussianTest.php new file mode 100644 index 0000000..4780a53 --- /dev/null +++ b/tests/Phpml/NeuralNetwork/ActivationFunction/GaussianTest.php @@ -0,0 +1,37 @@ +assertEquals($expected, $gaussian->compute($value), '', 0.001); + } + + /** + * @return array + */ + public function gaussianProvider() + { + return [ + [0.367, 1], + [1, 0], + [0.367, -1], + [0, 3], + [0, -3], + ]; + } +} diff --git a/tests/Phpml/NeuralNetwork/ActivationFunction/HyperboliTangentTest.php b/tests/Phpml/NeuralNetwork/ActivationFunction/HyperboliTangentTest.php new file mode 100644 index 0000000..92f4b97 --- /dev/null +++ b/tests/Phpml/NeuralNetwork/ActivationFunction/HyperboliTangentTest.php @@ -0,0 +1,39 @@ +assertEquals($expected, $tanh->compute($value), '', 0.001); + } + + /** + * @return array + */ + public function tanhProvider() + { + return [ + [1.0, 0.761, 1], + [1.0, 0, 0], + [1.0, 1, 4], + [1.0, -1, -4], + [0.5, 0.462, 1], + [0.3, 0, 0], + ]; + } +} diff --git a/tests/Phpml/NeuralNetwork/ActivationFunction/SigmoidTest.php b/tests/Phpml/NeuralNetwork/ActivationFunction/SigmoidTest.php new file mode 100644 index 0000000..c84a20b --- /dev/null +++ b/tests/Phpml/NeuralNetwork/ActivationFunction/SigmoidTest.php @@ -0,0 +1,39 @@ +assertEquals($expected, $sigmoid->compute($value), '', 0.001); + } + + /** + * @return array + */ + public function sigmoidProvider() + { + return [ + [1.0, 1, 7.25], + [2.0, 1, 3.75], + [1.0, 0.5, 0], + [0.5, 0.5, 0], + [1.0, 0, -7.25], + [2.0, 0, -3.75], + ]; + } +} From f186aa9c0bb9a46b619f7141d805c2da27959b00 Mon Sep 17 00:00:00 2001 From: Arkadiusz Kondas Date: Tue, 2 Aug 2016 13:23:58 +0200 Subject: [PATCH 05/19] extract functions from loops and remove unused code --- src/Phpml/Clustering/KMeans/Space.php | 8 ++++---- src/Phpml/Math/Matrix.php | 3 ++- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/src/Phpml/Clustering/KMeans/Space.php b/src/Phpml/Clustering/KMeans/Space.php index 2904e2f..89a0d09 100644 --- a/src/Phpml/Clustering/KMeans/Space.php +++ b/src/Phpml/Clustering/KMeans/Space.php @@ -61,7 +61,7 @@ class Space extends SplObjectStorage */ public function addPoint(array $coordinates, $data = null) { - return $this->attach($this->newPoint($coordinates), $data); + $this->attach($this->newPoint($coordinates), $data); } /** @@ -74,7 +74,7 @@ class Space extends SplObjectStorage throw new InvalidArgumentException('can only attach points to spaces'); } - return parent::attach($point, $data); + parent::attach($point, $data); } /** @@ -230,8 +230,8 @@ class Space extends SplObjectStorage protected function initializeKMPPClusters(int $clustersNumber) { $clusters = []; - $position = rand(1, count($this)); - for ($i = 1, $this->rewind(); $i < $position && $this->valid(); $i++, $this->next()); + $this->rewind(); + $clusters[] = new Cluster($this, $this->current()->getCoordinates()); $distances = new SplObjectStorage(); diff --git a/src/Phpml/Math/Matrix.php b/src/Phpml/Math/Matrix.php index 208b10d..808472c 100644 --- a/src/Phpml/Math/Matrix.php +++ b/src/Phpml/Math/Matrix.php @@ -193,7 +193,8 @@ class Matrix $product = []; $multiplier = $matrix->toArray(); for ($i = 0; $i < $this->rows; ++$i) { - for ($j = 0; $j < $matrix->getColumns(); ++$j) { + $columns = $matrix->getColumns(); + for ($j = 0; $j < $columns; ++$j) { $product[$i][$j] = 0; for ($k = 0; $k < $this->columns; ++$k) { $product[$i][$j] += $this->matrix[$i][$k] * $multiplier[$k][$j]; From 7062ee29e14b9f2571b29f79947f27c4172d21c8 Mon Sep 17 00:00:00 2001 From: Arkadiusz Kondas Date: Tue, 2 Aug 2016 20:30:20 +0200 Subject: [PATCH 06/19] add Neuron and Synapse classes --- src/Phpml/NeuralNetwork/Node.php | 13 ++++ src/Phpml/NeuralNetwork/Node/Neuron.php | 67 +++++++++++++++++- src/Phpml/NeuralNetwork/Node/Synapse.php | 70 +++++++++++++++++++ tests/Phpml/NeuralNetwork/Node/NeuronTest.php | 65 +++++++++++++++++ .../Phpml/NeuralNetwork/Node/SynapseTest.php | 52 ++++++++++++++ 5 files changed, 266 insertions(+), 1 deletion(-) create mode 100644 src/Phpml/NeuralNetwork/Node.php create mode 100644 src/Phpml/NeuralNetwork/Node/Synapse.php create mode 100644 tests/Phpml/NeuralNetwork/Node/NeuronTest.php create mode 100644 tests/Phpml/NeuralNetwork/Node/SynapseTest.php diff --git a/src/Phpml/NeuralNetwork/Node.php b/src/Phpml/NeuralNetwork/Node.php new file mode 100644 index 0000000..77e0c00 --- /dev/null +++ b/src/Phpml/NeuralNetwork/Node.php @@ -0,0 +1,13 @@ +activationFunction = $activationFunction ?: new ActivationFunction\Sigmoid(); + $this->synapses = []; + $this->output = 0; + } + + /** + * @param Synapse $synapse + */ + public function addSynapse(Synapse $synapse) + { + $this->synapses[] = $synapse; + } + + /** + * @return Synapse[] + */ + public function getSynapses() + { + return $this->synapses; + } + + /** + * @return float + */ + public function getOutput(): float + { + if (0 === $this->output) { + $sum = 0; + foreach ($this->synapses as $synapse) { + $sum += $synapse->getOutput(); + } + + $this->output = $this->activationFunction->compute($sum); + } + + return $this->output; + } + + public function refresh() + { + $this->output = 0; + } } diff --git a/src/Phpml/NeuralNetwork/Node/Synapse.php b/src/Phpml/NeuralNetwork/Node/Synapse.php new file mode 100644 index 0000000..923c4ff --- /dev/null +++ b/src/Phpml/NeuralNetwork/Node/Synapse.php @@ -0,0 +1,70 @@ +node = $node; + $this->weight = $weight ?: $this->generateRandomWeight(); + } + + /** + * @return float + */ + protected function generateRandomWeight(): float + { + return 1 / rand(5, 25) * (rand(0, 1) ? -1 : 1); + } + + /** + * @return float + */ + public function getOutput(): float + { + return $this->weight * $this->node->getOutput(); + } + + /** + * @param float $delta + */ + public function changeWeight($delta) + { + $this->weight += $delta; + } + + /** + * @return float + */ + public function getWeight() + { + return $this->weight; + } + + /** + * @return Node + */ + public function getNode() + { + return $this->node; + } +} diff --git a/tests/Phpml/NeuralNetwork/Node/NeuronTest.php b/tests/Phpml/NeuralNetwork/Node/NeuronTest.php new file mode 100644 index 0000000..526041b --- /dev/null +++ b/tests/Phpml/NeuralNetwork/Node/NeuronTest.php @@ -0,0 +1,65 @@ +assertEquals([], $neuron->getSynapses()); + $this->assertEquals(0.5, $neuron->getOutput()); + } + + public function testNeuronActivationFunction() + { + $activationFunction = $this->getMock(BinaryStep::class); + $activationFunction->method('compute')->with(0)->willReturn($output = 0.69); + + $neuron = new Neuron($activationFunction); + + $this->assertEquals($output, $neuron->getOutput()); + } + + public function testNeuronWithSynapse() + { + $neuron = new Neuron(); + $neuron->addSynapse($synapse = $this->getSynapseMock()); + + $this->assertEquals([$synapse], $neuron->getSynapses()); + $this->assertEquals(0.88, $neuron->getOutput(), '', 0.01); + } + + public function testNeuronRefresh() + { + $neuron = new Neuron(); + $neuron->getOutput(); + $neuron->addSynapse($this->getSynapseMock()); + + $this->assertEquals(0.5, $neuron->getOutput(), '', 0.01); + + $neuron->refresh(); + + $this->assertEquals(0.88, $neuron->getOutput(), '', 0.01); + } + + /** + * @param int $output + * + * @return \PHPUnit_Framework_MockObject_MockObject + */ + private function getSynapseMock($output = 2) + { + $synapse = $this->getMock(Synapse::class, [], [], '', false); + $synapse->method('getOutput')->willReturn($output); + + return $synapse; + } +} diff --git a/tests/Phpml/NeuralNetwork/Node/SynapseTest.php b/tests/Phpml/NeuralNetwork/Node/SynapseTest.php new file mode 100644 index 0000000..41fc937 --- /dev/null +++ b/tests/Phpml/NeuralNetwork/Node/SynapseTest.php @@ -0,0 +1,52 @@ +getNodeMock($nodeOutput = 0.5); + + $synapse = new Synapse($node, $weight = 0.75); + + $this->assertEquals($node, $synapse->getNode()); + $this->assertEquals($weight, $synapse->getWeight()); + $this->assertEquals($weight * $nodeOutput, $synapse->getOutput()); + + $synapse = new Synapse($node); + + $this->assertInternalType('float', $synapse->getWeight()); + } + + public function testSynapseWeightChange() + { + $node = $this->getNodeMock(); + $synapse = new Synapse($node, $weight = 0.75); + $synapse->changeWeight(1.0); + + $this->assertEquals(1.75, $synapse->getWeight()); + + $synapse->changeWeight(-2.0); + + $this->assertEquals(-0.25, $synapse->getWeight()); + } + + /** + * @param int $output + * + * @return \PHPUnit_Framework_MockObject_MockObject + */ + private function getNodeMock($output = 1) + { + $node = $this->getMock(Neuron::class); + $node->method('getOutput')->willReturn($nodeOutput = 0.5); + + return $node; + } +} From 95b29d40b1322c9add7e3aa219ee71291dfeb4e4 Mon Sep 17 00:00:00 2001 From: Arkadiusz Kondas Date: Fri, 5 Aug 2016 10:20:31 +0200 Subject: [PATCH 07/19] add Layer, Input and Bias for neutal network --- .../Exception/InvalidArgumentException.php | 8 +++ src/Phpml/NeuralNetwork/Layer.php | 49 ++++++++++++++++ src/Phpml/NeuralNetwork/Node/Bias.php | 18 ++++++ src/Phpml/NeuralNetwork/Node/Input.php | 39 +++++++++++++ src/Phpml/NeuralNetwork/Node/Neuron.php | 1 + .../Node/{ => Neuron}/Synapse.php | 4 +- tests/Phpml/NeuralNetwork/LayerTest.php | 56 +++++++++++++++++++ tests/Phpml/NeuralNetwork/Node/BiasTest.php | 17 ++++++ tests/Phpml/NeuralNetwork/Node/InputTest.php | 27 +++++++++ .../Node/{ => Neuron}/SynapseTest.php | 4 +- tests/Phpml/NeuralNetwork/Node/NeuronTest.php | 4 +- 11 files changed, 221 insertions(+), 6 deletions(-) create mode 100644 src/Phpml/NeuralNetwork/Layer.php create mode 100644 src/Phpml/NeuralNetwork/Node/Bias.php create mode 100644 src/Phpml/NeuralNetwork/Node/Input.php rename src/Phpml/NeuralNetwork/Node/{ => Neuron}/Synapse.php (94%) create mode 100644 tests/Phpml/NeuralNetwork/LayerTest.php create mode 100644 tests/Phpml/NeuralNetwork/Node/BiasTest.php create mode 100644 tests/Phpml/NeuralNetwork/Node/InputTest.php rename tests/Phpml/NeuralNetwork/Node/{ => Neuron}/SynapseTest.php (93%) diff --git a/src/Phpml/Exception/InvalidArgumentException.php b/src/Phpml/Exception/InvalidArgumentException.php index 798532d..d280296 100644 --- a/src/Phpml/Exception/InvalidArgumentException.php +++ b/src/Phpml/Exception/InvalidArgumentException.php @@ -73,4 +73,12 @@ class InvalidArgumentException extends \Exception { return new self(sprintf('Can\'t find %s language for StopWords', $language)); } + + /** + * @return InvalidArgumentException + */ + public static function invalidLayerNodeClass() + { + return new self('Layer node class must implement Node interface'); + } } diff --git a/src/Phpml/NeuralNetwork/Layer.php b/src/Phpml/NeuralNetwork/Layer.php new file mode 100644 index 0000000..6700164 --- /dev/null +++ b/src/Phpml/NeuralNetwork/Layer.php @@ -0,0 +1,49 @@ +nodes[] = new $nodeClass(); + } + } + + /** + * @param Node $node + */ + public function addNode(Node $node) + { + $this->nodes[] = $node; + } + + /** + * @return Node[] + */ + public function getNodes() + { + return $this->nodes; + } +} diff --git a/src/Phpml/NeuralNetwork/Node/Bias.php b/src/Phpml/NeuralNetwork/Node/Bias.php new file mode 100644 index 0000000..f19dcb6 --- /dev/null +++ b/src/Phpml/NeuralNetwork/Node/Bias.php @@ -0,0 +1,18 @@ +input = $input; + } + + /** + * @return float + */ + public function getOutput(): float + { + return $this->input; + } + + /** + * @param float $input + */ + public function setInput(float $input) + { + $this->input = $input; + } +} diff --git a/src/Phpml/NeuralNetwork/Node/Neuron.php b/src/Phpml/NeuralNetwork/Node/Neuron.php index 8d2fb4a..677831e 100644 --- a/src/Phpml/NeuralNetwork/Node/Neuron.php +++ b/src/Phpml/NeuralNetwork/Node/Neuron.php @@ -5,6 +5,7 @@ declare (strict_types = 1); namespace Phpml\NeuralNetwork\Node; use Phpml\NeuralNetwork\ActivationFunction; +use Phpml\NeuralNetwork\Node\Neuron\Synapse; use Phpml\NeuralNetwork\Node; class Neuron implements Node diff --git a/src/Phpml/NeuralNetwork/Node/Synapse.php b/src/Phpml/NeuralNetwork/Node/Neuron/Synapse.php similarity index 94% rename from src/Phpml/NeuralNetwork/Node/Synapse.php rename to src/Phpml/NeuralNetwork/Node/Neuron/Synapse.php index 923c4ff..3813d71 100644 --- a/src/Phpml/NeuralNetwork/Node/Synapse.php +++ b/src/Phpml/NeuralNetwork/Node/Neuron/Synapse.php @@ -2,11 +2,11 @@ declare (strict_types = 1); -namespace Phpml\NeuralNetwork\Node; +namespace Phpml\NeuralNetwork\Node\Neuron; use Phpml\NeuralNetwork\Node; -class Synapse implements Node +class Synapse { /** * @var float diff --git a/tests/Phpml/NeuralNetwork/LayerTest.php b/tests/Phpml/NeuralNetwork/LayerTest.php new file mode 100644 index 0000000..5706ab4 --- /dev/null +++ b/tests/Phpml/NeuralNetwork/LayerTest.php @@ -0,0 +1,56 @@ +assertEquals([], $layer->getNodes()); + } + + public function testLayerInitializationWithDefaultNodesType() + { + $layer = new Layer($number = 5); + + $this->assertCount($number, $layer->getNodes()); + foreach ($layer->getNodes() as $node) { + $this->assertInstanceOf(Neuron::class, $node); + } + } + + public function testLayerInitializationWithExplicitNodesType() + { + $layer = new Layer($number = 5, $class = Bias::class); + + $this->assertCount($number, $layer->getNodes()); + foreach ($layer->getNodes() as $node) { + $this->assertInstanceOf($class, $node); + } + } + + /** + * @expectedException \Phpml\Exception\InvalidArgumentException + */ + public function testThrowExceptionOnInvalidNodeClass() + { + new Layer(1, \stdClass::class); + } + + public function testAddNodesToLayer() + { + $layer = new Layer(); + $layer->addNode($node1 = new Neuron()); + $layer->addNode($node2 = new Neuron()); + + $this->assertEquals([$node1, $node2], $layer->getNodes()); + } +} diff --git a/tests/Phpml/NeuralNetwork/Node/BiasTest.php b/tests/Phpml/NeuralNetwork/Node/BiasTest.php new file mode 100644 index 0000000..c0ece3f --- /dev/null +++ b/tests/Phpml/NeuralNetwork/Node/BiasTest.php @@ -0,0 +1,17 @@ +assertEquals(1.0, $bias->getOutput()); + } +} diff --git a/tests/Phpml/NeuralNetwork/Node/InputTest.php b/tests/Phpml/NeuralNetwork/Node/InputTest.php new file mode 100644 index 0000000..b0abdcc --- /dev/null +++ b/tests/Phpml/NeuralNetwork/Node/InputTest.php @@ -0,0 +1,27 @@ +assertEquals(0.0, $input->getOutput()); + + $input = new Input($value = 9.6); + $this->assertEquals($value, $input->getOutput()); + } + + public function testSetInput() + { + $input = new Input(); + $input->setInput($value = 6.9); + + $this->assertEquals($value, $input->getOutput()); + } +} diff --git a/tests/Phpml/NeuralNetwork/Node/SynapseTest.php b/tests/Phpml/NeuralNetwork/Node/Neuron/SynapseTest.php similarity index 93% rename from tests/Phpml/NeuralNetwork/Node/SynapseTest.php rename to tests/Phpml/NeuralNetwork/Node/Neuron/SynapseTest.php index 41fc937..9ad733d 100644 --- a/tests/Phpml/NeuralNetwork/Node/SynapseTest.php +++ b/tests/Phpml/NeuralNetwork/Node/Neuron/SynapseTest.php @@ -2,10 +2,10 @@ declare (strict_types = 1); -namespace tests\Phpml\NeuralNetwork\Node; +namespace tests\Phpml\NeuralNetwork\Node\Neuron; +use Phpml\NeuralNetwork\Node\Neuron\Synapse; use Phpml\NeuralNetwork\Node\Neuron; -use Phpml\NeuralNetwork\Node\Synapse; class SynapseTest extends \PHPUnit_Framework_TestCase { diff --git a/tests/Phpml/NeuralNetwork/Node/NeuronTest.php b/tests/Phpml/NeuralNetwork/Node/NeuronTest.php index 526041b..c416ffd 100644 --- a/tests/Phpml/NeuralNetwork/Node/NeuronTest.php +++ b/tests/Phpml/NeuralNetwork/Node/NeuronTest.php @@ -6,7 +6,7 @@ namespace tests\Phpml\NeuralNetwork\Node; use Phpml\NeuralNetwork\ActivationFunction\BinaryStep; use Phpml\NeuralNetwork\Node\Neuron; -use Phpml\NeuralNetwork\Node\Synapse; +use Phpml\NeuralNetwork\Node\Neuron\Synapse; class NeuronTest extends \PHPUnit_Framework_TestCase { @@ -53,7 +53,7 @@ class NeuronTest extends \PHPUnit_Framework_TestCase /** * @param int $output * - * @return \PHPUnit_Framework_MockObject_MockObject + * @return Synapse|\PHPUnit_Framework_MockObject_MockObject */ private function getSynapseMock($output = 2) { From 12ee62bbca4acc3ad29e0eb1867a0683cf965402 Mon Sep 17 00:00:00 2001 From: Arkadiusz Kondas Date: Fri, 5 Aug 2016 16:12:39 +0200 Subject: [PATCH 08/19] create Network and Training contracts --- src/Phpml/NeuralNetwork/Network.php | 20 +++++++++++ .../NeuralNetwork/Network/LayeredNetwork.php | 36 +++++++++++++++++++ .../Network/MultilayerPerceptron.php | 10 ++++++ src/Phpml/NeuralNetwork/Training.php | 16 +++++++++ .../Training/Backpropagation.php | 23 ++++++++++++ 5 files changed, 105 insertions(+) create mode 100644 src/Phpml/NeuralNetwork/Network.php create mode 100644 src/Phpml/NeuralNetwork/Network/LayeredNetwork.php create mode 100644 src/Phpml/NeuralNetwork/Network/MultilayerPerceptron.php create mode 100644 src/Phpml/NeuralNetwork/Training.php create mode 100644 src/Phpml/NeuralNetwork/Training/Backpropagation.php diff --git a/src/Phpml/NeuralNetwork/Network.php b/src/Phpml/NeuralNetwork/Network.php new file mode 100644 index 0000000..be59b34 --- /dev/null +++ b/src/Phpml/NeuralNetwork/Network.php @@ -0,0 +1,20 @@ + Date: Sun, 7 Aug 2016 23:41:02 +0200 Subject: [PATCH 09/19] test abstraction from LayeredNetwork --- src/Phpml/NeuralNetwork/Network.php | 14 ++++-- .../NeuralNetwork/Network/LayeredNetwork.php | 49 +++++++++++++++---- .../Network/MultilayerPerceptron.php | 1 - src/Phpml/NeuralNetwork/Training.php | 2 +- .../Training/Backpropagation.php | 5 +- 5 files changed, 52 insertions(+), 19 deletions(-) diff --git a/src/Phpml/NeuralNetwork/Network.php b/src/Phpml/NeuralNetwork/Network.php index be59b34..269351f 100644 --- a/src/Phpml/NeuralNetwork/Network.php +++ b/src/Phpml/NeuralNetwork/Network.php @@ -4,9 +4,8 @@ declare (strict_types = 1); namespace Phpml\NeuralNetwork; -interface Network extends Node +interface Network { - /** * @param mixed $input */ @@ -15,6 +14,15 @@ interface Network extends Node /** * @return array */ - public function getLayers(): array; + public function getOutput(): array; + /** + * @param Layer $layer + */ + public function addLayer(Layer $layer); + + /** + * @return Layer[] + */ + public function getLayers(): array; } diff --git a/src/Phpml/NeuralNetwork/Network/LayeredNetwork.php b/src/Phpml/NeuralNetwork/Network/LayeredNetwork.php index a46b267..699c4d4 100644 --- a/src/Phpml/NeuralNetwork/Network/LayeredNetwork.php +++ b/src/Phpml/NeuralNetwork/Network/LayeredNetwork.php @@ -4,25 +4,51 @@ declare (strict_types = 1); namespace Phpml\NeuralNetwork\Network; +use Phpml\NeuralNetwork\Layer; use Phpml\NeuralNetwork\Network; abstract class LayeredNetwork implements Network { + /** + * @var Layer[] + */ + protected $layers; + + /** + * @param Layer $layer + */ + public function addLayer(Layer $layer) + { + $this->layers[] = $layer; + } + + /** + * @return Layer[] + */ + public function getLayers(): array + { + return $this->layers; + } + + /** + * @return Layer + */ + public function getOutputLayer(): Layer + { + return $this->layers[count($this->layers) - 1]; + } /** * @return array */ - public function getLayers(): array - { - - } - - /** - * @return float - */ - public function getOutput(): float + public function getOutput(): array { + $result = []; + foreach ($this->getOutputLayer()->getNodes() as $neuron) { + $result[] = $neuron->getOutput(); + } + return $result; } /** @@ -30,7 +56,10 @@ abstract class LayeredNetwork implements Network */ public function setInput($input) { + $firstLayer = $this->layers[0]; + foreach ($firstLayer->getNodes() as $key => $neuron) { + $neuron->setInput($input[$key]); + } } - } diff --git a/src/Phpml/NeuralNetwork/Network/MultilayerPerceptron.php b/src/Phpml/NeuralNetwork/Network/MultilayerPerceptron.php index ce5a615..c0f7df3 100644 --- a/src/Phpml/NeuralNetwork/Network/MultilayerPerceptron.php +++ b/src/Phpml/NeuralNetwork/Network/MultilayerPerceptron.php @@ -6,5 +6,4 @@ namespace Phpml\NeuralNetwork\Network; class MultilayerPerceptron extends LayeredNetwork { - } diff --git a/src/Phpml/NeuralNetwork/Training.php b/src/Phpml/NeuralNetwork/Training.php index 9870c11..932f189 100644 --- a/src/Phpml/NeuralNetwork/Training.php +++ b/src/Phpml/NeuralNetwork/Training.php @@ -10,7 +10,7 @@ interface Training * @param array $samples * @param array $targets * @param float $desiredError - * @param int $maxIterations + * @param int $maxIterations */ public function train(array $samples, array $targets, float $desiredError = 0.001, int $maxIterations = 10000); } diff --git a/src/Phpml/NeuralNetwork/Training/Backpropagation.php b/src/Phpml/NeuralNetwork/Training/Backpropagation.php index 17ca44c..ce9f5e6 100644 --- a/src/Phpml/NeuralNetwork/Training/Backpropagation.php +++ b/src/Phpml/NeuralNetwork/Training/Backpropagation.php @@ -8,16 +8,13 @@ use Phpml\NeuralNetwork\Training; class Backpropagation implements Training { - /** * @param array $samples * @param array $targets * @param float $desiredError - * @param int $maxIterations + * @param int $maxIterations */ public function train(array $samples, array $targets, float $desiredError = 0.001, int $maxIterations = 10000) { - } - } From 64859f263f378e2c2f73423eaf6d106bf23e2899 Mon Sep 17 00:00:00 2001 From: Arkadiusz Kondas Date: Sun, 7 Aug 2016 23:41:08 +0200 Subject: [PATCH 10/19] test abstraction from LayeredNetwork --- .../Network/LayeredNetworkTest.php | 53 +++++++++++++++++++ 1 file changed, 53 insertions(+) create mode 100644 tests/Phpml/NeuralNetwork/Network/LayeredNetworkTest.php diff --git a/tests/Phpml/NeuralNetwork/Network/LayeredNetworkTest.php b/tests/Phpml/NeuralNetwork/Network/LayeredNetworkTest.php new file mode 100644 index 0000000..11fe914 --- /dev/null +++ b/tests/Phpml/NeuralNetwork/Network/LayeredNetworkTest.php @@ -0,0 +1,53 @@ +getLayeredNetworkMock(); + + $network->addLayer($layer1 = new Layer()); + $network->addLayer($layer2 = new Layer()); + + $this->assertEquals([$layer1, $layer2], $network->getLayers()); + } + + public function testGetLastLayerAsOutputLayer() + { + $network = $this->getLayeredNetworkMock(); + $network->addLayer($layer1 = new Layer()); + + $this->assertEquals($layer1, $network->getOutputLayer()); + + $network->addLayer($layer2 = new Layer()); + $this->assertEquals($layer2, $network->getOutputLayer()); + } + + public function testSetInputAndGetOutput() + { + $network = $this->getLayeredNetworkMock(); + $network->addLayer(new Layer(2, Input::class)); + + $network->setInput($input = [34, 43]); + $this->assertEquals($input, $network->getOutput()); + + $network->addLayer(new Layer(1)); + $this->assertEquals([0.5], $network->getOutput()); + } + + /** + * @return LayeredNetwork + */ + private function getLayeredNetworkMock() + { + return $this->getMockForAbstractClass(LayeredNetwork::class); + } +} From 72afeb7040104bd9825d0555637fb735eb8c9e5e Mon Sep 17 00:00:00 2001 From: Arkadiusz Kondas Date: Tue, 9 Aug 2016 13:27:43 +0200 Subject: [PATCH 11/19] implements and test multilayer perceptron methods --- .../Exception/InvalidArgumentException.php | 8 ++ .../Network/MultilayerPerceptron.php | 83 +++++++++++++++++++ 2 files changed, 91 insertions(+) diff --git a/src/Phpml/Exception/InvalidArgumentException.php b/src/Phpml/Exception/InvalidArgumentException.php index d280296..86cfd86 100644 --- a/src/Phpml/Exception/InvalidArgumentException.php +++ b/src/Phpml/Exception/InvalidArgumentException.php @@ -81,4 +81,12 @@ class InvalidArgumentException extends \Exception { return new self('Layer node class must implement Node interface'); } + + /** + * @return InvalidArgumentException + */ + public static function invalidLayersNumber() + { + return new self('Provide at least 2 layers: 1 input and 1 output'); + } } diff --git a/src/Phpml/NeuralNetwork/Network/MultilayerPerceptron.php b/src/Phpml/NeuralNetwork/Network/MultilayerPerceptron.php index c0f7df3..4079822 100644 --- a/src/Phpml/NeuralNetwork/Network/MultilayerPerceptron.php +++ b/src/Phpml/NeuralNetwork/Network/MultilayerPerceptron.php @@ -4,6 +4,89 @@ declare (strict_types = 1); namespace Phpml\NeuralNetwork\Network; +use Phpml\Exception\InvalidArgumentException; +use Phpml\NeuralNetwork\Layer; +use Phpml\NeuralNetwork\Node\Bias; +use Phpml\NeuralNetwork\Node\Input; +use Phpml\NeuralNetwork\Node\Neuron; +use Phpml\NeuralNetwork\Node\Neuron\Synapse; + class MultilayerPerceptron extends LayeredNetwork { + /** + * @param array $layers + * + * @throws InvalidArgumentException + */ + public function __construct(array $layers) + { + if (count($layers) < 2) { + throw InvalidArgumentException::invalidLayersNumber(); + } + + $this->addInputLayer(array_shift($layers)); + $this->addNeuronLayers($layers); + $this->addBiasNodes(); + $this->generateSynapses(); + } + + /** + * @param int $nodes + */ + private function addInputLayer(int $nodes) + { + $this->addLayer(new Layer($nodes, Input::class)); + } + + /** + * @param array $layers + */ + private function addNeuronLayers(array $layers) + { + foreach ($layers as $neurons) { + $this->addLayer(new Layer($neurons, Neuron::class)); + } + } + + private function generateSynapses() + { + $layersNumber = count($this->layers) - 1; + for ($i = 0; $i < $layersNumber; ++$i) { + $currentLayer = $this->layers[$i]; + $nextLayer = $this->layers[$i + 1]; + $this->generateLayerSynapses($nextLayer, $currentLayer); + } + } + + private function addBiasNodes() + { + $biasLayers = count($this->layers) - 1; + for ($i = 0;$i < $biasLayers;++$i) { + $this->layers[$i]->addNode(new Bias()); + } + } + + /** + * @param Layer $nextLayer + * @param Layer $currentLayer + */ + private function generateLayerSynapses(Layer $nextLayer, Layer $currentLayer) + { + foreach ($nextLayer->getNodes() as $nextNeuron) { + if ($nextNeuron instanceof Neuron) { + $this->generateNeuronSynapses($currentLayer, $nextNeuron); + } + } + } + + /** + * @param Layer $currentLayer + * @param Neuron $nextNeuron + */ + private function generateNeuronSynapses(Layer $currentLayer, Neuron $nextNeuron) + { + foreach ($currentLayer->getNodes() as $currentNeuron) { + $nextNeuron->addSynapse(new Synapse($currentNeuron)); + } + } } From e5d39ee18a5154df4173121cc89c357a952d6078 Mon Sep 17 00:00:00 2001 From: Arkadiusz Kondas Date: Tue, 9 Aug 2016 13:27:48 +0200 Subject: [PATCH 12/19] implements and test multilayer perceptron methods --- .../Network/MultilayerPerceptronTest.php | 73 +++++++++++++++++++ 1 file changed, 73 insertions(+) create mode 100644 tests/Phpml/NeuralNetwork/Network/MultilayerPerceptronTest.php diff --git a/tests/Phpml/NeuralNetwork/Network/MultilayerPerceptronTest.php b/tests/Phpml/NeuralNetwork/Network/MultilayerPerceptronTest.php new file mode 100644 index 0000000..1ac1621 --- /dev/null +++ b/tests/Phpml/NeuralNetwork/Network/MultilayerPerceptronTest.php @@ -0,0 +1,73 @@ +assertCount(3, $mlp->getLayers()); + + $layers = $mlp->getLayers(); + + // input layer + $this->assertCount(3, $layers[0]->getNodes()); + $this->assertNotContainsOnly(Neuron::class, $layers[0]->getNodes()); + + // hidden layer + $this->assertCount(3, $layers[1]->getNodes()); + $this->assertNotContainsOnly(Neuron::class, $layers[0]->getNodes()); + + // output layer + $this->assertCount(1, $layers[2]->getNodes()); + $this->assertContainsOnly(Neuron::class, $layers[2]->getNodes()); + } + + public function testSynapsesGeneration() + { + $mlp = new MultilayerPerceptron([2, 2, 1]); + $layers = $mlp->getLayers(); + + foreach ($layers[1]->getNodes() as $node) { + if ($node instanceof Neuron) { + $synapses = $node->getSynapses(); + $this->assertCount(3, $synapses); + + $synapsesNodes = $this->getSynapsesNodes($synapses); + foreach ($layers[0]->getNodes() as $prevNode) { + $this->assertContains($prevNode, $synapsesNodes); + } + } + } + } + + /** + * @param array $synapses + * + * @return array + */ + private function getSynapsesNodes(array $synapses): array + { + $nodes = []; + foreach ($synapses as $synapse) { + $nodes[] = $synapse->getNode(); + } + + return $nodes; + } + + /** + * @expectedException \Phpml\Exception\InvalidArgumentException + */ + public function testThrowExceptionOnInvalidLayersNumber() + { + new MultilayerPerceptron([2]); + } +} From 66d029e94fb6bb024dee5781504be0d32011e651 Mon Sep 17 00:00:00 2001 From: Arkadiusz Kondas Date: Wed, 10 Aug 2016 22:43:47 +0200 Subject: [PATCH 13/19] implement and test Backpropagation training --- src/Phpml/NeuralNetwork/Network.php | 2 + .../NeuralNetwork/Network/LayeredNetwork.php | 18 ++- .../Training/Backpropagation.php | 114 ++++++++++++++++++ .../Training/Backpropagation/Sigma.php | 46 +++++++ .../Training/BackpropagationTest.php | 29 +++++ 5 files changed, 208 insertions(+), 1 deletion(-) create mode 100644 src/Phpml/NeuralNetwork/Training/Backpropagation/Sigma.php create mode 100644 tests/Phpml/NeuralNetwork/Training/BackpropagationTest.php diff --git a/src/Phpml/NeuralNetwork/Network.php b/src/Phpml/NeuralNetwork/Network.php index 269351f..a03b8b6 100644 --- a/src/Phpml/NeuralNetwork/Network.php +++ b/src/Phpml/NeuralNetwork/Network.php @@ -8,6 +8,8 @@ interface Network { /** * @param mixed $input + * + * @return self */ public function setInput($input); diff --git a/src/Phpml/NeuralNetwork/Network/LayeredNetwork.php b/src/Phpml/NeuralNetwork/Network/LayeredNetwork.php index 699c4d4..4413403 100644 --- a/src/Phpml/NeuralNetwork/Network/LayeredNetwork.php +++ b/src/Phpml/NeuralNetwork/Network/LayeredNetwork.php @@ -6,6 +6,8 @@ namespace Phpml\NeuralNetwork\Network; use Phpml\NeuralNetwork\Layer; use Phpml\NeuralNetwork\Network; +use Phpml\NeuralNetwork\Node\Input; +use Phpml\NeuralNetwork\Node\Neuron; abstract class LayeredNetwork implements Network { @@ -53,13 +55,27 @@ abstract class LayeredNetwork implements Network /** * @param mixed $input + * + * @return $this */ public function setInput($input) { $firstLayer = $this->layers[0]; foreach ($firstLayer->getNodes() as $key => $neuron) { - $neuron->setInput($input[$key]); + if ($neuron instanceof Input) { + $neuron->setInput($input[$key]); + } } + + foreach ($this->getLayers() as $layer) { + foreach ($layer->getNodes() as $node) { + if ($node instanceof Neuron) { + $node->refresh(); + } + } + } + + return $this; } } diff --git a/src/Phpml/NeuralNetwork/Training/Backpropagation.php b/src/Phpml/NeuralNetwork/Training/Backpropagation.php index ce9f5e6..e6691e2 100644 --- a/src/Phpml/NeuralNetwork/Training/Backpropagation.php +++ b/src/Phpml/NeuralNetwork/Training/Backpropagation.php @@ -4,10 +4,33 @@ declare (strict_types = 1); namespace Phpml\NeuralNetwork\Training; +use Phpml\NeuralNetwork\Network; +use Phpml\NeuralNetwork\Node\Neuron; use Phpml\NeuralNetwork\Training; +use Phpml\NeuralNetwork\Training\Backpropagation\Sigma; class Backpropagation implements Training { + /** + * @var Network + */ + private $network; + + /** + * @var int + */ + private $theta; + + /** + * @param Network $network + * @param int $theta + */ + public function __construct(Network $network, int $theta = 1) + { + $this->network = $network; + $this->theta = $theta; + } + /** * @param array $samples * @param array $targets @@ -16,5 +39,96 @@ class Backpropagation implements Training */ public function train(array $samples, array $targets, float $desiredError = 0.001, int $maxIterations = 10000) { + for ($i = 0; $i < $maxIterations; ++$i) { + $resultsWithinError = $this->trainSamples($samples, $targets, $desiredError); + + if ($resultsWithinError == count($samples)) { + break; + } + } + } + + /** + * @param array $samples + * @param array $targets + * @param float $desiredError + * + * @return int + */ + private function trainSamples(array $samples, array $targets, float $desiredError): int + { + $resultsWithinError = 0; + foreach ($targets as $key => $target) { + $result = $this->network->setInput($samples[$key])->getOutput(); + + if ($this->isResultWithinError($result, $target, $desiredError)) { + ++$resultsWithinError; + } else { + $this->trainSample($samples[$key], $target); + } + } + + return $resultsWithinError; + } + + private function trainSample(array $sample, array $target) + { + $this->network->setInput($sample)->getOutput(); + + $sigmas = []; + $layers = $this->network->getLayers(); + $layersNumber = count($layers); + + for ($i = $layersNumber; $i > 1; --$i) { + foreach ($layers[$i - 1]->getNodes() as $key => $neuron) { + if ($neuron instanceof Neuron) { + $neuronOutput = $neuron->getOutput(); + $sigma = $neuronOutput * (1 - $neuronOutput) * ($i == $layersNumber ? ($target[$key] - $neuronOutput) : $this->getPrevSigma($sigmas, $neuron)); + $sigmas[] = new Sigma($neuron, $sigma); + foreach ($neuron->getSynapses() as $synapse) { + $synapse->changeWeight($this->theta * $sigma * $synapse->getNode()->getOutput()); + } + } + } + } + } + + /** + * @param Sigma[] $sigmas + * @param Neuron $forNeuron + * + * @return float + */ + private function getPrevSigma(array $sigmas, Neuron $forNeuron): float + { + $sigma = 0.0; + + foreach ($sigmas as $neuronSigma) { + foreach ($neuronSigma->getNeuron()->getSynapses() as $synapse) { + if ($synapse->getNode() == $forNeuron) { + $sigma += $synapse->getWeight() * $neuronSigma->getSigma(); + } + } + } + + return $sigma; + } + + /** + * @param array $result + * @param array $target + * @param float $desiredError + * + * @return bool + */ + private function isResultWithinError(array $result, array $target, float $desiredError) + { + foreach ($target as $key => $value) { + if ($result[$key] > $value + $desiredError || $result[$key] < $value - $desiredError) { + return false; + } + } + + return true; } } diff --git a/src/Phpml/NeuralNetwork/Training/Backpropagation/Sigma.php b/src/Phpml/NeuralNetwork/Training/Backpropagation/Sigma.php new file mode 100644 index 0000000..8ce397b --- /dev/null +++ b/src/Phpml/NeuralNetwork/Training/Backpropagation/Sigma.php @@ -0,0 +1,46 @@ +neuron = $neuron; + $this->sigma = $sigma; + } + + /** + * @return Neuron + */ + public function getNeuron() + { + return $this->neuron; + } + + /** + * @return float + */ + public function getSigma() + { + return $this->sigma; + } +} diff --git a/tests/Phpml/NeuralNetwork/Training/BackpropagationTest.php b/tests/Phpml/NeuralNetwork/Training/BackpropagationTest.php new file mode 100644 index 0000000..a44c1d5 --- /dev/null +++ b/tests/Phpml/NeuralNetwork/Training/BackpropagationTest.php @@ -0,0 +1,29 @@ +train( + [[1, 0], [0, 1], [1, 1], [0, 0]], + [[1], [1], [0], [0]], + $desiredError = 0.2, + 10000 + ); + + $this->assertEquals(0, $network->setInput([1, 1])->getOutput()[0], '', $desiredError); + $this->assertEquals(0, $network->setInput([0, 0])->getOutput()[0], '', $desiredError); + $this->assertEquals(1, $network->setInput([1, 0])->getOutput()[0], '', $desiredError); + $this->assertEquals(1, $network->setInput([0, 1])->getOutput()[0], '', $desiredError); + } +} From c506a84164c203634f5d5e3a92b89aced51cc9a5 Mon Sep 17 00:00:00 2001 From: Arkadiusz Kondas Date: Wed, 10 Aug 2016 23:03:02 +0200 Subject: [PATCH 14/19] refactor Backpropagation methods and simplify things --- .../Training/Backpropagation.php | 52 ++++++++++++++----- .../Training/Backpropagation/Sigma.php | 18 +++++++ .../Training/BackpropagationTest.php | 2 +- 3 files changed, 58 insertions(+), 14 deletions(-) diff --git a/src/Phpml/NeuralNetwork/Training/Backpropagation.php b/src/Phpml/NeuralNetwork/Training/Backpropagation.php index e6691e2..3f80bf5 100644 --- a/src/Phpml/NeuralNetwork/Training/Backpropagation.php +++ b/src/Phpml/NeuralNetwork/Training/Backpropagation.php @@ -21,6 +21,11 @@ class Backpropagation implements Training */ private $theta; + /** + * @var array + */ + private $sigmas; + /** * @param Network $network * @param int $theta @@ -71,20 +76,22 @@ class Backpropagation implements Training return $resultsWithinError; } + /** + * @param array $sample + * @param array $target + */ private function trainSample(array $sample, array $target) { $this->network->setInput($sample)->getOutput(); + $this->sigmas = []; - $sigmas = []; $layers = $this->network->getLayers(); $layersNumber = count($layers); for ($i = $layersNumber; $i > 1; --$i) { foreach ($layers[$i - 1]->getNodes() as $key => $neuron) { if ($neuron instanceof Neuron) { - $neuronOutput = $neuron->getOutput(); - $sigma = $neuronOutput * (1 - $neuronOutput) * ($i == $layersNumber ? ($target[$key] - $neuronOutput) : $this->getPrevSigma($sigmas, $neuron)); - $sigmas[] = new Sigma($neuron, $sigma); + $sigma = $this->getSigma($neuron, $target, $key, $i == $layersNumber); foreach ($neuron->getSynapses() as $synapse) { $synapse->changeWeight($this->theta * $sigma * $synapse->getNode()->getOutput()); } @@ -94,21 +101,40 @@ class Backpropagation implements Training } /** - * @param Sigma[] $sigmas - * @param Neuron $forNeuron + * @param Neuron $neuron + * @param array $target + * @param int $key + * @param bool $lastLayer + * + * @return float + */ + private function getSigma(Neuron $neuron, array $target, int $key, bool $lastLayer): float + { + $neuronOutput = $neuron->getOutput(); + $sigma = $neuronOutput * (1 - $neuronOutput); + + if ($lastLayer) { + $sigma *= ($target[$key] - $neuronOutput); + } else { + $sigma *= $this->getPrevSigma($neuron); + } + + $this->sigmas[] = new Sigma($neuron, $sigma); + + return $sigma; + } + + /** + * @param Neuron $neuron * * @return float */ - private function getPrevSigma(array $sigmas, Neuron $forNeuron): float + private function getPrevSigma(Neuron $neuron): float { $sigma = 0.0; - foreach ($sigmas as $neuronSigma) { - foreach ($neuronSigma->getNeuron()->getSynapses() as $synapse) { - if ($synapse->getNode() == $forNeuron) { - $sigma += $synapse->getWeight() * $neuronSigma->getSigma(); - } - } + foreach ($this->sigmas as $neuronSigma) { + $sigma += $neuronSigma->getSigmaForNeuron($neuron); } return $sigma; diff --git a/src/Phpml/NeuralNetwork/Training/Backpropagation/Sigma.php b/src/Phpml/NeuralNetwork/Training/Backpropagation/Sigma.php index 8ce397b..8520354 100644 --- a/src/Phpml/NeuralNetwork/Training/Backpropagation/Sigma.php +++ b/src/Phpml/NeuralNetwork/Training/Backpropagation/Sigma.php @@ -43,4 +43,22 @@ class Sigma { return $this->sigma; } + + /** + * @param Neuron $neuron + * + * @return float + */ + public function getSigmaForNeuron(Neuron $neuron): float + { + $sigma = 0.0; + + foreach ($this->neuron->getSynapses() as $synapse) { + if ($synapse->getNode() == $neuron) { + $sigma += $synapse->getWeight() * $this->getSigma(); + } + } + + return $sigma; + } } diff --git a/tests/Phpml/NeuralNetwork/Training/BackpropagationTest.php b/tests/Phpml/NeuralNetwork/Training/BackpropagationTest.php index a44c1d5..265d936 100644 --- a/tests/Phpml/NeuralNetwork/Training/BackpropagationTest.php +++ b/tests/Phpml/NeuralNetwork/Training/BackpropagationTest.php @@ -18,7 +18,7 @@ class BackpropagationTest extends \PHPUnit_Framework_TestCase [[1, 0], [0, 1], [1, 1], [0, 0]], [[1], [1], [0], [0]], $desiredError = 0.2, - 10000 + 30000 ); $this->assertEquals(0, $network->setInput([1, 1])->getOutput()[0], '', $desiredError); From 2412f15923514d3c14b1b1ad09d3c3f6c5c34558 Mon Sep 17 00:00:00 2001 From: Arkadiusz Kondas Date: Thu, 11 Aug 2016 13:21:22 +0200 Subject: [PATCH 15/19] Add activationFunction parameter for Perceptron and Layer --- src/Phpml/NeuralNetwork/Layer.php | 24 +++++++++++++++---- .../Network/MultilayerPerceptron.php | 15 +++++++----- 2 files changed, 29 insertions(+), 10 deletions(-) diff --git a/src/Phpml/NeuralNetwork/Layer.php b/src/Phpml/NeuralNetwork/Layer.php index 6700164..b94da21 100644 --- a/src/Phpml/NeuralNetwork/Layer.php +++ b/src/Phpml/NeuralNetwork/Layer.php @@ -15,22 +15,38 @@ class Layer private $nodes = []; /** - * @param int $nodesNumber - * @param string $nodeClass + * @param int $nodesNumber + * @param string $nodeClass + * @param ActivationFunction|null $activationFunction * * @throws InvalidArgumentException */ - public function __construct(int $nodesNumber = 0, string $nodeClass = Neuron::class) + public function __construct(int $nodesNumber = 0, string $nodeClass = Neuron::class, ActivationFunction $activationFunction = null) { if (!in_array(Node::class, class_implements($nodeClass))) { throw InvalidArgumentException::invalidLayerNodeClass(); } for ($i = 0; $i < $nodesNumber; ++$i) { - $this->nodes[] = new $nodeClass(); + $this->nodes[] = $this->createNode($nodeClass, $activationFunction); } } + /** + * @param string $nodeClass + * @param ActivationFunction|null $activationFunction + * + * @return Neuron + */ + private function createNode(string $nodeClass, ActivationFunction $activationFunction = null) + { + if (Neuron::class == $nodeClass) { + return new Neuron($activationFunction); + } + + return new $nodeClass(); + } + /** * @param Node $node */ diff --git a/src/Phpml/NeuralNetwork/Network/MultilayerPerceptron.php b/src/Phpml/NeuralNetwork/Network/MultilayerPerceptron.php index 4079822..e97e045 100644 --- a/src/Phpml/NeuralNetwork/Network/MultilayerPerceptron.php +++ b/src/Phpml/NeuralNetwork/Network/MultilayerPerceptron.php @@ -5,6 +5,7 @@ declare (strict_types = 1); namespace Phpml\NeuralNetwork\Network; use Phpml\Exception\InvalidArgumentException; +use Phpml\NeuralNetwork\ActivationFunction; use Phpml\NeuralNetwork\Layer; use Phpml\NeuralNetwork\Node\Bias; use Phpml\NeuralNetwork\Node\Input; @@ -14,18 +15,19 @@ use Phpml\NeuralNetwork\Node\Neuron\Synapse; class MultilayerPerceptron extends LayeredNetwork { /** - * @param array $layers + * @param array $layers + * @param ActivationFunction|null $activationFunction * * @throws InvalidArgumentException */ - public function __construct(array $layers) + public function __construct(array $layers, ActivationFunction $activationFunction = null) { if (count($layers) < 2) { throw InvalidArgumentException::invalidLayersNumber(); } $this->addInputLayer(array_shift($layers)); - $this->addNeuronLayers($layers); + $this->addNeuronLayers($layers, $activationFunction); $this->addBiasNodes(); $this->generateSynapses(); } @@ -39,12 +41,13 @@ class MultilayerPerceptron extends LayeredNetwork } /** - * @param array $layers + * @param array $layers + * @param ActivationFunction|null $activationFunction */ - private function addNeuronLayers(array $layers) + private function addNeuronLayers(array $layers, ActivationFunction $activationFunction = null) { foreach ($layers as $neurons) { - $this->addLayer(new Layer($neurons, Neuron::class)); + $this->addLayer(new Layer($neurons, Neuron::class, $activationFunction)); } } From f0bd5ae4244a5c3b8e71400bbb9727d7a68ab35f Mon Sep 17 00:00:00 2001 From: Arkadiusz Kondas Date: Fri, 12 Aug 2016 16:29:50 +0200 Subject: [PATCH 16/19] Create MLP Regressor draft --- src/Phpml/Regression/MLPRegressor.php | 81 +++++++++++++++++++++++++++ 1 file changed, 81 insertions(+) create mode 100644 src/Phpml/Regression/MLPRegressor.php diff --git a/src/Phpml/Regression/MLPRegressor.php b/src/Phpml/Regression/MLPRegressor.php new file mode 100644 index 0000000..b00b4d1 --- /dev/null +++ b/src/Phpml/Regression/MLPRegressor.php @@ -0,0 +1,81 @@ +hiddenLayers = $hiddenLayers; + $this->desiredError = $desiredError; + $this->maxIterations = $maxIterations; + $this->activationFunction = $activationFunction; + } + + + /** + * @param array $samples + * @param array $targets + */ + public function train(array $samples, array $targets) + { + $layers = [count($samples[0])] + $this->hiddenLayers + [count($targets[0])]; + + $this->perceptron = new MultilayerPerceptron($layers, $this->activationFunction); + + $trainer = new Backpropagation($this->perceptron); + $trainer->train($samples, $targets, $this->desiredError, $this->maxIterations); + } + + /** + * @param array $sample + * + * @return array + */ + protected function predictSample(array $sample) + { + return $this->perceptron->setInput($sample)->getOutput(); + } + +} From 638119fc986f86ffc9e315c58cc9d1b67beab708 Mon Sep 17 00:00:00 2001 From: Arkadiusz Kondas Date: Sun, 14 Aug 2016 18:27:08 +0200 Subject: [PATCH 17/19] code style fixes --- src/Phpml/Regression/MLPRegressor.php | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/src/Phpml/Regression/MLPRegressor.php b/src/Phpml/Regression/MLPRegressor.php index b00b4d1..9a84214 100644 --- a/src/Phpml/Regression/MLPRegressor.php +++ b/src/Phpml/Regression/MLPRegressor.php @@ -4,7 +4,6 @@ declare (strict_types = 1); namespace Phpml\Regression; - use Phpml\Helper\Predictable; use Phpml\NeuralNetwork\ActivationFunction; use Phpml\NeuralNetwork\Network\MultilayerPerceptron; @@ -40,12 +39,12 @@ class MLPRegressor implements Regression private $activationFunction; /** - * @param array $hiddenLayers - * @param float $desiredError - * @param int $maxIterations + * @param array $hiddenLayers + * @param float $desiredError + * @param int $maxIterations * @param ActivationFunction $activationFunction */ - public function __construct(array $hiddenLayers = [100], float $desiredError, int $maxIterations, ActivationFunction $activationFunction = null) + public function __construct(array $hiddenLayers = [10], float $desiredError = 0.01, int $maxIterations = 10000, ActivationFunction $activationFunction = null) { $this->hiddenLayers = $hiddenLayers; $this->desiredError = $desiredError; @@ -53,14 +52,15 @@ class MLPRegressor implements Regression $this->activationFunction = $activationFunction; } - /** * @param array $samples * @param array $targets */ public function train(array $samples, array $targets) { - $layers = [count($samples[0])] + $this->hiddenLayers + [count($targets[0])]; + $layers = $this->hiddenLayers; + array_unshift($layers, count($samples[0])); + $layers[] = count($targets[0]); $this->perceptron = new MultilayerPerceptron($layers, $this->activationFunction); @@ -77,5 +77,4 @@ class MLPRegressor implements Regression { return $this->perceptron->setInput($sample)->getOutput(); } - } From b1978cf5ca0d0bd2361c4d9fbe64e0d6718fac9e Mon Sep 17 00:00:00 2001 From: Arkadiusz Kondas Date: Sun, 14 Aug 2016 18:35:17 +0200 Subject: [PATCH 18/19] update changelog --- CHANGELOG.md | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index bef3451..3b636d2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,9 +3,11 @@ CHANGELOG This changelog references the relevant changes done in PHP-ML library. -* 0.1.3 (in plan/progress) - * SSE, SSTo, SSR [Regression] - sum of the squared - * +* 0.2.1 (in plan/progress) + * feature [Regression] - SSE, SSTo, SSR - sum of the squared + +* 0.2.0 (2016-08-14) + * feature [NeuralNetwork] - MultilayerPerceptron and Backpropagation training * 0.1.2 (2016-07-24) * feature [Dataset] - FilesDataset - load dataset from files (folder names as targets) From 3599367ce8673876c276f1cfa3bfc2af265e68bb Mon Sep 17 00:00:00 2001 From: Arkadiusz Kondas Date: Sun, 14 Aug 2016 19:14:56 +0200 Subject: [PATCH 19/19] Add docs for neural network --- README.md | 5 +++- composer.json | 2 +- docs/index.md | 3 ++ .../classification/k-nearest-neighbors.md | 6 ++-- .../neural-network/backpropagation.md | 29 +++++++++++++++++++ .../neural-network/multilayer-perceptron.md | 29 +++++++++++++++++++ mkdocs.yml | 3 ++ 7 files changed, 72 insertions(+), 5 deletions(-) create mode 100644 docs/machine-learning/neural-network/backpropagation.md create mode 100644 docs/machine-learning/neural-network/multilayer-perceptron.md diff --git a/README.md b/README.md index 07ce099..ea1ff4f 100644 --- a/README.md +++ b/README.md @@ -10,7 +10,7 @@ ![PHP-ML - Machine Learning library for PHP](docs/assets/php-ml-logo.png) -Fresh approach to Machine Learning in PHP. Algorithms, Cross Validation, Preprocessing, Feature Extraction and much more in one library. +Fresh approach to Machine Learning in PHP. Algorithms, Cross Validation, Neural Network, Preprocessing, Feature Extraction and much more in one library. PHP-ML requires PHP >= 7.0. @@ -62,6 +62,9 @@ Example scripts are available in a separate repository [php-ai/php-ml-examples]( * [Classification Report](http://php-ml.readthedocs.io/en/latest/machine-learning/metric/classification-report/) * Workflow * [Pipeline](http://php-ml.readthedocs.io/en/latest/machine-learning/workflow/pipeline) +* Neural Network + * [Multilayer Perceptron](http://php-ml.readthedocs.io/en/latest/machine-learning/neural-network/multilayer-perceptron/) + * [Backpropagation training](http://php-ml.readthedocs.io/en/latest/machine-learning/neural-network/backpropagation/) * Cross Validation * [Random Split](http://php-ml.readthedocs.io/en/latest/machine-learning/cross-validation/random-split/) * [Stratified Random Split](http://php-ml.readthedocs.io/en/latest/machine-learning/cross-validation/stratified-random-split/) diff --git a/composer.json b/composer.json index 041f818..eeccc53 100644 --- a/composer.json +++ b/composer.json @@ -3,7 +3,7 @@ "type": "library", "description": "PHP-ML - Machine Learning library for PHP", "license": "MIT", - "keywords": ["machine learning","pattern recognition","computational learning theory","artificial intelligence"], + "keywords": ["machine learning","pattern recognition","neural network","computational learning theory","artificial intelligence"], "homepage": "https://github.com/php-ai/php-ml", "authors": [ { diff --git a/docs/index.md b/docs/index.md index 6b0ce8c..1a38642 100644 --- a/docs/index.md +++ b/docs/index.md @@ -62,6 +62,9 @@ Example scripts are available in a separate repository [php-ai/php-ml-examples]( * [Classification Report](machine-learning/metric/classification-report/) * Workflow * [Pipeline](machine-learning/workflow/pipeline) +* Neural Network + * [Multilayer Perceptron](machine-learning/neural-network/multilayer-perceptron/) + * [Backpropagation training](machine-learning/neural-network/backpropagation/) * Cross Validation * [Random Split](machine-learning/cross-validation/random-split/) * [Stratified Random Split](machine-learning/cross-validation/stratified-random-split/) diff --git a/docs/machine-learning/classification/k-nearest-neighbors.md b/docs/machine-learning/classification/k-nearest-neighbors.md index 3d5aa27..2de597c 100644 --- a/docs/machine-learning/classification/k-nearest-neighbors.md +++ b/docs/machine-learning/classification/k-nearest-neighbors.md @@ -2,7 +2,7 @@ Classifier implementing the k-nearest neighbors algorithm. -### Constructor Parameters +## Constructor Parameters * $k - number of nearest neighbors to scan (default: 3) * $distanceMetric - Distance object, default Euclidean (see [distance documentation](math/distance/)) @@ -12,7 +12,7 @@ $classifier = new KNearestNeighbors($k=4); $classifier = new KNearestNeighbors($k=3, new Minkowski($lambda=4)); ``` -### Train +## Train To train a classifier simply provide train samples and labels (as `array`). Example: @@ -24,7 +24,7 @@ $classifier = new KNearestNeighbors(); $classifier->train($samples, $labels); ``` -### Predict +## Predict To predict sample label use `predict` method. You can provide one sample or array of samples: diff --git a/docs/machine-learning/neural-network/backpropagation.md b/docs/machine-learning/neural-network/backpropagation.md new file mode 100644 index 0000000..8c9b560 --- /dev/null +++ b/docs/machine-learning/neural-network/backpropagation.md @@ -0,0 +1,29 @@ +# Backpropagation + +Backpropagation, an abbreviation for "backward propagation of errors", is a common method of training artificial neural networks used in conjunction with an optimization method such as gradient descent. + +## Constructor Parameters + +* $network (Network) - network to train (for example MultilayerPerceptron instance) +* $theta (int) - network theta parameter + +``` +use Phpml\NeuralNetwork\Network\MultilayerPerceptron; +use Phpml\NeuralNetwork\Training\Backpropagation; + +$network = new MultilayerPerceptron([2, 2, 1]); +$training = new Backpropagation($network); +``` + +## Training + +Example of XOR training: + +``` +$training->train( + $samples = [[1, 0], [0, 1], [1, 1], [0, 0]], + $targets = [[1], [1], [0], [0]], + $desiredError = 0.2, + $maxIteraions = 30000 +); +``` diff --git a/docs/machine-learning/neural-network/multilayer-perceptron.md b/docs/machine-learning/neural-network/multilayer-perceptron.md new file mode 100644 index 0000000..c1c0eef --- /dev/null +++ b/docs/machine-learning/neural-network/multilayer-perceptron.md @@ -0,0 +1,29 @@ +# MultilayerPerceptron + +A multilayer perceptron (MLP) is a feedforward artificial neural network model that maps sets of input data onto a set of appropriate outputs. + +## Constructor Parameters + +* $layers (array) - array with layers configuration, each value represent number of neurons in each layers +* $activationFunction (ActivationFunction) - neuron activation function + +``` +use Phpml\NeuralNetwork\Network\MultilayerPerceptron; +$mlp = new MultilayerPerceptron([2, 2, 1]); + +// 2 nodes in input layer, 2 nodes in first hidden layer and 1 node in output layer +``` + +## Methods + +* setInput(array $input) +* getOutput() +* getLayers() +* addLayer(Layer $layer) + +## Activation Functions + +* BinaryStep +* Gaussian +* HyperbolicTangent +* Sigmoid (default) diff --git a/mkdocs.yml b/mkdocs.yml index 057a1a1..4fa6c21 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -18,6 +18,9 @@ pages: - Classification Report: machine-learning/metric/classification-report.md - Workflow: - Pipeline: machine-learning/workflow/pipeline.md + - Neural Network: + - Multilayer Perceptron: machine-learning/neural-network/multilayer-perceptron.md + - Backpropagation training: machine-learning/neural-network/backpropagation.md - Cross Validation: - RandomSplit: machine-learning/cross-validation/random-split.md - Stratified Random Split: machine-learning/cross-validation/stratified-random-split.md