From 2f4db079184e55e84e967e56552f9f2b800d681c Mon Sep 17 00:00:00 2001 From: Matthias Guillitte Date: Sat, 4 Apr 2026 16:45:04 +0200 Subject: [PATCH] MonoLayer Perceptron --- app/Http/Controllers/PerceptronController.php | 12 + .../ADALINEPerceptronTraining.php | 4 +- .../GradientDescentPerceptronTraining.php | 2 +- .../MonoLayerPerceptronTraining.php | 157 +++++++++ .../NetworksTraining/NetworkTraining.php | 2 + .../SimpleBinaryPerceptronTraining.php | 2 +- app/Models/Perceptrons/InputNeuron.php | 26 ++ app/Models/Perceptrons/NetworkPerceptron.php | 76 +++++ app/Models/Perceptrons/Perceptron.php | 8 +- .../Perceptrons/SimpleBinaryPerceptron2.php | 18 ++ app/Services/DatasetReader/IDataSetReader.php | 4 + .../LinearOrderDataSetReader.php | 13 + .../RandomOrderDataSetReader.php | 13 + .../INetworkSynapticWeightsProvider.php | 8 + .../SimpleNetworkWeightsProvider.php | 35 ++ public/data_sets/table_3_1.csv | 300 +++++++++--------- public/data_sets/table_3_5.csv | 8 +- resources/js/components/LinkHeader.vue | 10 + .../js/components/PerceptronDecisionGraph.vue | 162 ++++++---- .../PerceptronIterationsErrorsGraph.vue | 3 + tests/Unit/Training/TrainingTestCase.php | 4 +- 21 files changed, 641 insertions(+), 226 deletions(-) create mode 100644 app/Models/NetworksTraining/MonoLayerPerceptronTraining.php create mode 100644 app/Models/Perceptrons/InputNeuron.php create mode 100644 app/Models/Perceptrons/NetworkPerceptron.php create mode 100644 app/Models/Perceptrons/SimpleBinaryPerceptron2.php create mode 100644 app/Services/SynapticWeightsProvider/INetworkSynapticWeightsProvider.php create mode 100644 app/Services/SynapticWeightsProvider/SimpleNetworkWeightsProvider.php diff --git a/app/Http/Controllers/PerceptronController.php b/app/Http/Controllers/PerceptronController.php index dc38716..16e5bcb 100644 --- a/app/Http/Controllers/PerceptronController.php +++ b/app/Http/Controllers/PerceptronController.php @@ -5,6 +5,7 @@ namespace App\Http\Controllers; use App\Events\PerceptronInitialization; use App\Models\NetworksTraining\ADALINEPerceptronTraining; use App\Models\NetworksTraining\GradientDescentPerceptronTraining; +use App\Models\NetworksTraining\MonoLayerPerceptronTraining; use App\Models\NetworksTraining\SimpleBinaryPerceptronTraining; use App\Services\DatasetReader\IDataSetReader; use App\Services\DatasetReader\LinearOrderDataSetReader; @@ -13,7 +14,11 @@ use App\Services\IterationEventBuffer\PerceptronIterationEventBuffer; use App\Services\IterationEventBuffer\PerceptronLimitedEpochEventBuffer; use App\Services\SynapticWeightsProvider\ISynapticWeightsProvider; use App\Services\SynapticWeightsProvider\ZeroSynapticWeights; +use Illuminate\Contracts\Queue\Job; use Illuminate\Http\Request; +use Illuminate\Support\Facades\DB; +use Symfony\Contracts\EventDispatcher\Event; +use Tests\Services\IterationEventBuffer\DullIterationEventBuffer; class PerceptronController extends Controller { @@ -126,6 +131,8 @@ class PerceptronController extends Controller public function run(Request $request, ISynapticWeightsProvider $synapticWeightsProvider) { + $startTime = microtime(true); + $perceptronType = $request->input('type'); $minError = $request->input('min_error', 0.01); $weightInitMethod = $request->input('weight_init_method', 'random'); @@ -135,6 +142,9 @@ class PerceptronController extends Controller $sessionId = $request->input('session_id', session()->getId()); $trainingId = $request->input('training_id'); + // Remove the jobs for the sessionId + DB::table('jobs')->where('payload', 'like', '%s:9:\"sessionId\";s:40:\"'. $sessionId .'\";%')->delete(); + if ($weightInitMethod === 'zeros') { $synapticWeightsProvider = new ZeroSynapticWeights; } @@ -151,6 +161,7 @@ class PerceptronController extends Controller 'simple' => new SimpleBinaryPerceptronTraining($datasetReader, $learningRate, $maxEpochs, $synapticWeightsProvider, $iterationEventBuffer, $sessionId, $trainingId), 'gradientdescent' => new GradientDescentPerceptronTraining($datasetReader, $learningRate, $maxEpochs, $synapticWeightsProvider, $iterationEventBuffer, $sessionId, $trainingId, $minError), 'adaline' => new ADALINEPerceptronTraining($datasetReader, $learningRate, $maxEpochs, $synapticWeightsProvider, $iterationEventBuffer, $sessionId, $trainingId, $minError), + 'monolayer' => new MonoLayerPerceptronTraining($datasetReader, $learningRate, $maxEpochs, $synapticWeightsProvider, $iterationEventBuffer, $sessionId, $trainingId, $minError), default => null, }; @@ -160,6 +171,7 @@ class PerceptronController extends Controller return response()->json([ 'message' => 'Training completed', + 'execution_time' => microtime(true) - $startTime, ]); } } diff --git a/app/Models/NetworksTraining/ADALINEPerceptronTraining.php b/app/Models/NetworksTraining/ADALINEPerceptronTraining.php index 5462a31..f294bfb 100644 --- a/app/Models/NetworksTraining/ADALINEPerceptronTraining.php +++ b/app/Models/NetworksTraining/ADALINEPerceptronTraining.php @@ -66,7 +66,7 @@ class ADALINEPerceptronTraining extends NetworkTraining foreach ($inputsForCurrentEpoch as $inputsWithLabel) { $inputs = array_slice($inputsWithLabel, 0, -1); $correctOutput = (float) end($inputsWithLabel); - $output = $this->perceptron->test($inputs); + $output = $this->perceptron->test($inputs)[0]; $iterationError = $correctOutput - $output; $this->epochError += ($iterationError ** 2) / 2; // Squared error for the example } @@ -92,7 +92,7 @@ class ADALINEPerceptronTraining extends NetworkTraining private function iterationFunction(array $inputs, float $correctOutput): float { - $output = $this->perceptron->test($inputs); + $output = $this->perceptron->test($inputs)[0]; $error = $correctOutput - $output; diff --git a/app/Models/NetworksTraining/GradientDescentPerceptronTraining.php b/app/Models/NetworksTraining/GradientDescentPerceptronTraining.php index 3859318..fdc8a60 100644 --- a/app/Models/NetworksTraining/GradientDescentPerceptronTraining.php +++ b/app/Models/NetworksTraining/GradientDescentPerceptronTraining.php @@ -88,7 +88,7 @@ class GradientDescentPerceptronTraining extends NetworkTraining private function iterationFunction(array $inputs, float $correctOutput): float { - $output = $this->perceptron->test($inputs); + $output = $this->perceptron->test($inputs)[0]; $error = $correctOutput - $output; diff --git a/app/Models/NetworksTraining/MonoLayerPerceptronTraining.php b/app/Models/NetworksTraining/MonoLayerPerceptronTraining.php new file mode 100644 index 0000000..e8bfb13 --- /dev/null +++ b/app/Models/NetworksTraining/MonoLayerPerceptronTraining.php @@ -0,0 +1,157 @@ +network = new NetworkPerceptron( + $networkWeightsProvider->generate( + $datasetReader->getInputSize(), + $datasetReader->getOutputSize(), + 0, // No hidden layer + 0, // No hidden layer neurons + ), + $datasetReader->getInputSize(), + GradientDescentPerceptron::class, // No hidden layer + SimpleBinaryPerceptron2::class, + ); + $this->labels = $datasetReader->getLabels(); + } + + public function start(): void + { + $this->epoch = 0; + do { + $this->epochError = 0; + $this->epoch++; + + $inputsForCurrentEpoch = []; + + while ($nextRow = $this->datasetReader->getNextLine()) { + $inputsForCurrentEpoch[] = $nextRow; + $inputs = array_slice($nextRow, 0, -1); + $correctOutput = (int) end($nextRow); + + $iterationError = $this->iterationFunction($inputs, $correctOutput); + + // Synaptic weights correction after each example + $synaptic_weights = $this->network->getSynapticWeights(); + $inputs_with_bias = array_merge([1], $inputs); // Add bias input + + // Updates the weights + $this->network->setSynapticWeights( + $this->getUpdatedSynapticWeights($synaptic_weights, $iterationError, $inputs_with_bias) + ); + + // Broadcast the training iteration event + $this->addIterationToBuffer(array_sum($iterationError), $this->network->getSynapticWeights()); + } + + // Calculte the average error for the epoch with the last synaptic weights + foreach ($inputsForCurrentEpoch as $inputsWithLabel) { + $inputs = array_slice($inputsWithLabel, 0, -1); + $correctOutput = (float) end($inputsWithLabel); + $iterationError = $this->iterationFunction($inputs, $correctOutput); + foreach ($iterationError as $error) { + $this->epochError += ($error ** 2) / 2; // Squared error for the example + } + } + $this->epochError /= $this->datasetReader->getEpochExamplesCount(); // Average error for the epoch + + $this->datasetReader->reset(); // Reset the dataset for the next iteration + } while ($this->epoch < $this->maxEpochs && ! $this->stopCondition()); + + $this->iterationEventBuffer->flush(); // Ensure all iterations are sent to the frontend + + $this->checkPassedMaxIterations($this->epochError); + } + + protected function stopCondition(): bool + { + $condition = $this->epochError <= $this->minError; + if ($condition === true) { + event(new PerceptronTrainingEnded('Le perceptron à atteint l\'erreur minimale', $this->sessionId, $this->trainingId)); + } + + return $condition; + } + + private function iterationFunction(array $inputs, int $correctOutput): array + { + $outputs = $this->network->test($inputs); + $desiredOutput = $this->getDesiredOutputFromCorrectOutput($correctOutput); + + $errors = []; + foreach ($outputs as $index => $output) { + $error = $desiredOutput[$index] - $output; + $errors[] = $error; + } + + return $errors; + } + + private function getUpdatedSynapticWeights(array $synaptic_weights, array $iterationError, array $inputs): array + { + $updatedWeights = []; + foreach ($synaptic_weights[0] as $neuronIndex => $neuronWeights) { // There is only one layer of weights + $updatedNeuronWeights = []; + foreach ($neuronWeights as $weightIndex => $weight) { + $updatedWeight = $weight + ($this->learningRate * $iterationError[$neuronIndex] * $inputs[$weightIndex]); + $updatedNeuronWeights[] = $updatedWeight; + } + $updatedWeights[] = $updatedNeuronWeights; + } + + return [$updatedWeights]; + } + + private function getDesiredOutputFromCorrectOutput(int $correctOutput): array + { + $desiredOutput = array_fill(0, count($this->labels), -1); + $labelIndex = Arr::first(array_keys($this->labels), fn($key) => $this->labels[$key] == $correctOutput); + if ($labelIndex !== null) { + $desiredOutput[$labelIndex] = 1; + } + + return $desiredOutput; + } + + public function getSynapticWeights(): array + { + return [[$this->network->getSynapticWeights()]]; + } +} diff --git a/app/Models/NetworksTraining/NetworkTraining.php b/app/Models/NetworksTraining/NetworkTraining.php index 859e7fa..050e0c5 100644 --- a/app/Models/NetworksTraining/NetworkTraining.php +++ b/app/Models/NetworksTraining/NetworkTraining.php @@ -16,6 +16,8 @@ abstract class NetworkTraining */ public ActivationsFunctions $activationFunction; + public ?ActivationsFunctions $presentationLayerActivationFunction = null; + public function __construct( protected IDataSetReader $datasetReader, protected int $maxEpochs, diff --git a/app/Models/NetworksTraining/SimpleBinaryPerceptronTraining.php b/app/Models/NetworksTraining/SimpleBinaryPerceptronTraining.php index 72e2dd8..4118c59 100644 --- a/app/Models/NetworksTraining/SimpleBinaryPerceptronTraining.php +++ b/app/Models/NetworksTraining/SimpleBinaryPerceptronTraining.php @@ -71,7 +71,7 @@ class SimpleBinaryPerceptronTraining extends NetworkTraining private function iterationFunction(array $inputs, int $correctOutput) { - $output = $this->perceptron->test($inputs); + $output = $this->perceptron->test($inputs)[0]; $error = $correctOutput - $output; if (abs($error) > $this::MIN_ERROR) { diff --git a/app/Models/Perceptrons/InputNeuron.php b/app/Models/Perceptrons/InputNeuron.php new file mode 100644 index 0000000..7de86a6 --- /dev/null +++ b/app/Models/Perceptrons/InputNeuron.php @@ -0,0 +1,26 @@ +input = $input; + } + + public function test(array $inputs): array + { + return [$this->input]; + } + + public function activationFunction(float $input): float + { + return $input; // Identity function for input neurons + } +} diff --git a/app/Models/Perceptrons/NetworkPerceptron.php b/app/Models/Perceptrons/NetworkPerceptron.php new file mode 100644 index 0000000..e218323 --- /dev/null +++ b/app/Models/Perceptrons/NetworkPerceptron.php @@ -0,0 +1,76 @@ +initializeNetwork($synaptic_weights); + } + + private function initializeNetwork(array $synaptic_weights): void + { + // Input Layer + $this->network[0] = []; + foreach (range(0, $this->inputLayerNeuronsCount - 1) as $i) { + $this->network[0][] = new InputNeuron(); + } + + // Hidden Layer + for ($layerIndex = 0; $layerIndex < count($synaptic_weights) - 2; $layerIndex++) { + $this->network[$layerIndex + 1] = []; + + foreach ($synaptic_weights[$layerIndex] as $neuronWeights) { + $this->network[$layerIndex + 1][] = new $this->hiddenLayerNeuronClass($neuronWeights); + } + } + + // Output Layer + $outputLayer = $synaptic_weights[count($synaptic_weights) - 1]; + $this->network[count($synaptic_weights)] = []; + + foreach ($outputLayer as $neuronWeights) { + $this->network[count($synaptic_weights)][] = new $this->outputLayerNeuronClass($neuronWeights); + } + } + + public function test(array $inputs): array + { + // Set the inputs for the input layer + foreach ($this->network[0] as $index => $inputNeuron) { + $inputNeuron->setInput($inputs[$index]); + } + + // Pass through the hidden and output layers + $output = []; + for ($layerIndex = 0; $layerIndex < count($this->network); $layerIndex++) { + $lastLayerOutput = $output; + $output = []; + foreach ($this->network[$layerIndex] as $neuron) { + $output[] = $neuron->test($lastLayerOutput)[0]; + } + } + + return $output; + } + + public function activationFunction(float $weighted_sum): float + { + return $weighted_sum; + } + + public function setSynapticWeights(array $synaptic_weights): void + { + parent::setSynapticWeights($synaptic_weights); + $this->network = []; + $this->initializeNetwork($synaptic_weights); + } +} diff --git a/app/Models/Perceptrons/Perceptron.php b/app/Models/Perceptrons/Perceptron.php index f6a5ef8..dfcd9de 100644 --- a/app/Models/Perceptrons/Perceptron.php +++ b/app/Models/Perceptrons/Perceptron.php @@ -2,9 +2,9 @@ namespace App\Models\Perceptrons; -use Illuminate\Database\Eloquent\Model; +// use Illuminate\Database\Eloquent\Model; -abstract class Perceptron extends Model +abstract class Perceptron { public function __construct( private array $synaptic_weights, @@ -12,7 +12,7 @@ abstract class Perceptron extends Model $this->synaptic_weights = $synaptic_weights; } - public function test(array $inputs): float + public function test(array $inputs): array { $inputs = array_merge([1], $inputs); // Add bias input @@ -22,7 +22,7 @@ abstract class Perceptron extends Model $weighted_sum = array_sum(array_map(fn ($input, $weight) => $input * $weight, $inputs, $this->synaptic_weights)); - return $this->activationFunction($weighted_sum); + return [$this->activationFunction($weighted_sum)]; } abstract public function activationFunction(float $weighted_sum): float; diff --git a/app/Models/Perceptrons/SimpleBinaryPerceptron2.php b/app/Models/Perceptrons/SimpleBinaryPerceptron2.php new file mode 100644 index 0000000..93ef38c --- /dev/null +++ b/app/Models/Perceptrons/SimpleBinaryPerceptron2.php @@ -0,0 +1,18 @@ += 0.0 ? 1.0 : -1.0; + return $weighted_sum; + } +} diff --git a/app/Services/DatasetReader/IDataSetReader.php b/app/Services/DatasetReader/IDataSetReader.php index 52bc0cd..42c294d 100644 --- a/app/Services/DatasetReader/IDataSetReader.php +++ b/app/Services/DatasetReader/IDataSetReader.php @@ -8,6 +8,10 @@ interface IDataSetReader public function getInputSize(): int; + public function getOutputSize(): int; + + public function getLabels(): array; + public function reset(): void; public function getLastReadLineIndex(): int; diff --git a/app/Services/DatasetReader/LinearOrderDataSetReader.php b/app/Services/DatasetReader/LinearOrderDataSetReader.php index 548ecf2..219396e 100644 --- a/app/Services/DatasetReader/LinearOrderDataSetReader.php +++ b/app/Services/DatasetReader/LinearOrderDataSetReader.php @@ -49,6 +49,19 @@ class LinearOrderDataSetReader implements IDataSetReader return count($this->lines[0]) - 1; // Don't count the label } + public function getOutputSize(): int + { + // Count the number of unique labels in the dataset + $labels = array_map(fn ($line) => end($line), $this->lines); + return count(array_unique($labels)); + } + + public function getLabels(): array + { + $labels = array_map(fn ($line) => end($line), $this->lines); + return array_values(array_unique($labels)); + } + public function reset(): void { $this->currentLines = $this->lines; diff --git a/app/Services/DatasetReader/RandomOrderDataSetReader.php b/app/Services/DatasetReader/RandomOrderDataSetReader.php index d8e4354..558a7f5 100644 --- a/app/Services/DatasetReader/RandomOrderDataSetReader.php +++ b/app/Services/DatasetReader/RandomOrderDataSetReader.php @@ -55,6 +55,19 @@ class RandomOrderDataSetReader implements IDataSetReader return count($this->lines[0]) - 1; // Don't count the label } + public function getOutputSize(): int + { + // Count the number of unique labels in the dataset + $labels = array_map(fn ($line) => end($line), $this->lines); + return count(array_unique($labels)); + } + + public function getLabels(): array + { + $labels = array_map(fn ($line) => end($line), $this->lines); + return array_values(array_unique($labels)); + } + public function reset(): void { $this->currentLines = $this->lines; diff --git a/app/Services/SynapticWeightsProvider/INetworkSynapticWeightsProvider.php b/app/Services/SynapticWeightsProvider/INetworkSynapticWeightsProvider.php new file mode 100644 index 0000000..802140f --- /dev/null +++ b/app/Services/SynapticWeightsProvider/INetworkSynapticWeightsProvider.php @@ -0,0 +1,8 @@ +synapticWeightsProvider->generate($lastLayerSize); + } + $lastLayerSize = $hidden_layers_neurons_count; + } + + // Generate Output Layer weights + $synaptic_weights[] = []; + for ($outputNeuronIndex = 0; $outputNeuronIndex < $output_size; $outputNeuronIndex++) { + $synaptic_weights[count($synaptic_weights) -1][] = $this->synapticWeightsProvider->generate($lastLayerSize); + } + + return $synaptic_weights; + } +} diff --git a/public/data_sets/table_3_1.csv b/public/data_sets/table_3_1.csv index 6ad9115..b9fa34d 100644 --- a/public/data_sets/table_3_1.csv +++ b/public/data_sets/table_3_1.csv @@ -1,150 +1,150 @@ -2.8,1.9,-1,-1,1 -2.9,1.8,-1,-1,1 -2.2,1.5,-1,-1,1 -1.3,5.6,-1,1,-1 --1.2,1.6,1,-1,-1 -2.5,1.7,-1,-1,1 -3.1,2.3,-1,-1,1 -2.8,2.4,-1,-1,1 --1.2,1.5,1,-1,-1 -1.4,6.1,-1,1,-1 --1.2,1.3,1,-1,-1 -3.1,1.8,-1,-1,1 -3.3,2.5,-1,-1,1 -3.4,2.3,-1,-1,1 -1.5,5.9,-1,1,-1 -1.3,5.6,-1,1,-1 --1.2,1,1,-1,-1 -3,2.1,-1,-1,1 -1.4,5.2,-1,1,-1 -1.2,5.5,-1,1,-1 -1.4,6.7,-1,1,-1 --1.3,1.7,1,-1,-1 -1,5.7,-1,1,-1 -2.6,1.4,-1,-1,1 -1.6,6,-1,1,-1 -1.2,5.8,-1,1,-1 -1.4,6.1,-1,1,-1 -1.3,5.7,-1,1,-1 --1.2,1.4,1,-1,-1 -1,5.5,-1,1,-1 -3.1,2.1,-1,-1,1 --1.4,1.7,1,-1,-1 -1.6,6.3,-1,1,-1 --1.2,1.4,1,-1,-1 -2.7,1.9,-1,-1,1 -1.1,5.5,-1,1,-1 -3,1.8,-1,-1,1 -3.8,2,-1,-1,1 -2.8,2.1,-1,-1,1 -1,5,-1,1,-1 -2.8,2,-1,-1,1 --1.2,1.6,1,-1,-1 -1.3,6.6,-1,1,-1 -1.6,6,-1,1,-1 -3,1.6,-1,-1,1 --1.1,1.1,1,-1,-1 -2.9,1.8,-1,-1,1 -3,1.8,-1,-1,1 --1.2,1.4,1,-1,-1 -1,5.8,-1,1,-1 -1.2,5.7,-1,1,-1 -2.8,1.5,-1,-1,1 --1.4,1.6,1,-1,-1 --1.2,1.4,1,-1,-1 -2.8,2.2,-1,-1,1 --1.5,1.7,1,-1,-1 -1,4.9,-1,1,-1 -1.3,5.7,-1,1,-1 -3.2,2.3,-1,-1,1 --1.3,1.5,1,-1,-1 --1.2,1.2,1,-1,-1 --1.2,1.6,1,-1,-1 -3,2.1,-1,-1,1 --1.4,1.5,1,-1,-1 -1.3,5.5,-1,1,-1 -3.3,2.5,-1,-1,1 -3.1,2.4,-1,-1,1 -1.8,5.9,-1,1,-1 -2.7,1.9,-1,-1,1 -1.5,6.3,-1,1,-1 --1.3,1.4,1,-1,-1 -1,5,-1,1,-1 -3.2,2.3,-1,-1,1 --1.3,1.4,1,-1,-1 -3,2.1,-1,-1,1 -3.2,2,-1,-1,1 -1.3,6.3,-1,1,-1 -1.4,7,-1,1,-1 --1.2,1.7,1,-1,-1 -1.4,6.6,-1,1,-1 -1.2,5.8,-1,1,-1 -3,2,-1,-1,1 --1.2,1.6,1,-1,-1 -1.5,6.2,-1,1,-1 -1.5,6.4,-1,1,-1 --1.2,1.9,1,-1,-1 -3.2,2.3,-1,-1,1 --1.2,1.5,1,-1,-1 -1.5,5.4,-1,1,-1 -1.5,6,-1,1,-1 -1.5,6.9,-1,1,-1 --1.4,1.5,1,-1,-1 -1.3,6.4,-1,1,-1 -2.8,2,-1,-1,1 --1.2,1.5,1,-1,-1 --1.1,1.5,1,-1,-1 -3.6,2.5,-1,-1,1 -1.1,5.1,-1,1,-1 --1.1,1.5,1,-1,-1 --1.2,1.5,1,-1,-1 -3.4,2.4,-1,-1,1 --1.3,1.3,1,-1,-1 -3,2.3,-1,-1,1 --1.1,1.5,1,-1,-1 -1.1,5.6,-1,1,-1 -1.3,5.6,-1,1,-1 --1.2,1.5,1,-1,-1 --1.2,1.3,1,-1,-1 -1.7,6.7,-1,1,-1 --1.2,1.3,1,-1,-1 --1.4,1.3,1,-1,-1 -2.5,1.8,-1,-1,1 --1.4,1.5,1,-1,-1 -1.5,6.5,-1,1,-1 -2.6,2.3,-1,-1,1 --1.2,1.4,1,-1,-1 -1.5,5.6,-1,1,-1 -1.3,5.7,-1,1,-1 -1.2,6.1,-1,1,-1 -3,2.2,-1,-1,1 -3,1.8,-1,-1,1 --1.1,1.4,1,-1,-1 -1.4,6.8,-1,1,-1 --1.6,1.6,1,-1,-1 --1.2,1.4,1,-1,-1 -3.2,1.8,-1,-1,1 --1.2,1.5,1,-1,-1 -2.7,1.8,-1,-1,1 --1.3,1.4,1,-1,-1 -1.3,6.2,-1,1,-1 --1.3,1.3,1,-1,-1 -3,1.8,-1,-1,1 -2.7,1.9,-1,-1,1 --1.2,1.4,1,-1,-1 --1.2,1.2,1,-1,-1 -1.5,6.7,-1,1,-1 -2.5,1.9,-1,-1,1 -3.3,2.1,-1,-1,1 -2.8,1.8,-1,-1,1 --1.2,1.3,1,-1,-1 -3.8,2.2,-1,-1,1 -2.5,2,-1,-1,1 -1.3,6.1,-1,1,-1 --1.2,1.4,1,-1,-1 --1.1,1.5,1,-1,-1 -3,2.3,-1,-1,1 --1.4,1.9,1,-1,-1 --1.2,1.6,1,-1,-1 -1,6,-1,1,-1 -1.3,5.5,-1,1,-1 +2.8,1.9,1 +2.9,1.8,1 +2.2,1.5,1 +1.3,5.6,0 +-1.2,1.6,-1 +2.5,1.7,1 +3.1,2.3,1 +2.8,2.4,1 +-1.2,1.5,-1 +1.4,6.1,0 +-1.2,1.3,-1 +3.1,1.8,1 +3.3,2.5,1 +3.4,2.3,1 +1.5,5.9,0 +1.3,5.6,0 +-1.2,1,-1 +3,2.1,1 +1.4,5.2,0 +1.2,5.5,0 +1.4,6.7,0 +-1.3,1.7,-1 +1,5.7,0 +2.6,1.4,1 +1.6,6,0 +1.2,5.8,0 +1.4,6.1,0 +1.3,5.7,0 +-1.2,1.4,-1 +1,5.5,0 +3.1,2.1,1 +-1.4,1.7,-1 +1.6,6.3,0 +-1.2,1.4,-1 +2.7,1.9,1 +1.1,5.5,0 +3,1.8,1 +3.8,2,1 +2.8,2.1,1 +1,5,0 +2.8,2,1 +-1.2,1.6,-1 +1.3,6.6,0 +1.6,6,0 +3,1.6,1 +-1.1,1.1,-1 +2.9,1.8,1 +3,1.8,1 +-1.2,1.4,-1 +1,5.8,0 +1.2,5.7,0 +2.8,1.5,1 +-1.4,1.6,-1 +-1.2,1.4,-1 +2.8,2.2,1 +-1.5,1.7,-1 +1,4.9,0 +1.3,5.7,0 +3.2,2.3,1 +-1.3,1.5,-1 +-1.2,1.2,-1 +-1.2,1.6,-1 +3,2.1,1 +-1.4,1.5,-1 +1.3,5.5,0 +3.3,2.5,1 +3.1,2.4,1 +1.8,5.9,0 +2.7,1.9,1 +1.5,6.3,0 +-1.3,1.4,-1 +1,5,0 +3.2,2.3,1 +-1.3,1.4,-1 +3,2.1,1 +3.2,2,1 +1.3,6.3,0 +1.4,7,0 +-1.2,1.7,-1 +1.4,6.6,0 +1.2,5.8,0 +3,2,1 +-1.2,1.6,-1 +1.5,6.2,0 +1.5,6.4,0 +-1.2,1.9,-1 +3.2,2.3,1 +-1.2,1.5,-1 +1.5,5.4,0 +1.5,6,0 +1.5,6.9,0 +-1.4,1.5,-1 +1.3,6.4,0 +2.8,2,1 +-1.2,1.5,-1 +-1.1,1.5,-1 +3.6,2.5,1 +1.1,5.1,0 +-1.1,1.5,-1 +-1.2,1.5,-1 +3.4,2.4,1 +-1.3,1.3,-1 +3,2.3,1 +-1.1,1.5,-1 +1.1,5.6,0 +1.3,5.6,0 +-1.2,1.5,-1 +-1.2,1.3,-1 +1.7,6.7,0 +-1.2,1.3,-1 +-1.4,1.3,-1 +2.5,1.8,1 +-1.4,1.5,-1 +1.5,6.5,0 +2.6,2.3,1 +-1.2,1.4,-1 +1.5,5.6,0 +1.3,5.7,0 +1.2,6.1,0 +3,2.2,1 +3,1.8,1 +-1.1,1.4,-1 +1.4,6.8,0 +-1.6,1.6,-1 +-1.2,1.4,-1 +3.2,1.8,1 +-1.2,1.5,-1 +2.7,1.8,1 +-1.3,1.4,-1 +1.3,6.2,0 +-1.3,1.3,-1 +3,1.8,1 +2.7,1.9,1 +-1.2,1.4,-1 +-1.2,1.2,-1 +1.5,6.7,0 +2.5,1.9,1 +3.3,2.1,1 +2.8,1.8,1 +-1.2,1.3,-1 +3.8,2.2,1 +2.5,2,1 +1.3,6.1,0 +-1.2,1.4,-1 +-1.1,1.5,-1 +3,2.3,1 +-1.4,1.9,-1 +-1.2,1.6,-1 +1,6,0 +1.3,5.5,0 diff --git a/public/data_sets/table_3_5.csv b/public/data_sets/table_3_5.csv index 7616d23..611e04b 100644 --- a/public/data_sets/table_3_5.csv +++ b/public/data_sets/table_3_5.csv @@ -1,4 +1,4 @@ -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,-1,-1,-1 -0,0,1,0,0,0,0,1,0,0,1,1,1,1,1,0,0,1,0,0,0,0,1,0,0,-1,1,-1,-1 -1,0,0,0,1,0,1,0,1,0,0,0,1,0,0,0,1,0,1,0,1,0,0,0,1,-1,-1,1,-1 -0,0,0,0,0,0,1,1,1,0,0,1,0,1,0,0,1,1,1,0,0,0,0,0,0,-1,-1,-1,1 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 +0,0,1,0,0,0,0,1,0,0,1,1,1,1,1,0,0,1,0,0,0,0,1,0,0,1 +1,0,0,0,1,0,1,0,1,0,0,0,1,0,0,0,1,0,1,0,1,0,0,0,1,2 +0,0,0,0,0,0,1,1,1,0,0,1,0,1,0,0,1,1,1,0,0,0,0,0,0,3 diff --git a/resources/js/components/LinkHeader.vue b/resources/js/components/LinkHeader.vue index dfded11..0f1cba3 100644 --- a/resources/js/components/LinkHeader.vue +++ b/resources/js/components/LinkHeader.vue @@ -20,6 +20,16 @@ const links = [ href: '/perceptron', data: { type: 'adaline' }, }, + { + name: 'Mono-couche', + href: '/perceptron', + data: { type: 'monolayer' }, + }, + { + name: 'Multi-couche', + href: '/perceptron', + data: { type: 'multilayer' }, + }, ]; const isActiveLink = (link: any) => { diff --git a/resources/js/components/PerceptronDecisionGraph.vue b/resources/js/components/PerceptronDecisionGraph.vue index 70d1db7..ed5eec6 100644 --- a/resources/js/components/PerceptronDecisionGraph.vue +++ b/resources/js/components/PerceptronDecisionGraph.vue @@ -5,7 +5,7 @@ import type { BubbleDataPoint, Point, } from 'chart.js'; -import { computed } from 'vue'; +import { computed, ref } from 'vue'; import { Chart } from 'vue-chartjs'; import { colors, gridColor, gridColorBold } from '@/types/graphs'; import type { Iteration } from '@/types/perceptron'; @@ -16,6 +16,10 @@ const props = defineProps<{ activationFunction: (x: number) => number; }>(); +const examplesNumber = computed(() => { + return props.cleanedDataset.reduce((sum, dataset) => sum + dataset.data.length, 0); +}); + const farLeftDataPointX = computed(() => { if (props.cleanedDataset.length === 0) { return 0; @@ -25,6 +29,15 @@ const farLeftDataPointX = computed(() => { ); return minX; }); +const farBottomDataPointY = computed(() => { + if (props.cleanedDataset.length === 0) { + return 0; + } + const minY = Math.min( + ...props.cleanedDataset.flatMap((d) => d.data.map((point) => point.y)), + ); + return minY; +}); const farRightDataPointX = computed(() => { if (props.cleanedDataset.length === 0) { return 0; @@ -34,8 +47,20 @@ const farRightDataPointX = computed(() => { ); return maxX; }); +const farTopDataPointY = computed(() => { + if (props.cleanedDataset.length === 0) { + return 0; + } + const maxY = Math.max( + ...props.cleanedDataset.flatMap((d) => d.data.map((point) => point.y)), + ); + return maxY; +}); -function getPerceptronOutput(weightsNetwork: number[][], inputs: number[]): number[] { +function getPerceptronOutput( + weightsNetwork: number[][][], + inputs: number[], +): number[] { for (const layer of weightsNetwork) { const nextInputs: number[] = []; @@ -59,13 +84,14 @@ function getPerceptronOutput(weightsNetwork: number[][], inputs: number[]): numb return inputs; } +const nonLinearGraph = ref(false); function getPerceptronDecisionBoundaryDataset( networkWeights: number[][][], activationFunction: (x: number) => number = (x) => x, ): ChartDataset< keyof ChartTypeRegistry, number | Point | [number, number] | BubbleDataPoint | null -> { +>[] { const label = 'Ligne de décision du Perceptron'; console.log('Calculating decision boundary with weights:', networkWeights); @@ -74,6 +100,7 @@ function getPerceptronDecisionBoundaryDataset( networkWeights[0].length == 1 && networkWeights[0][0].length <= 3 ) { + nonLinearGraph.value = false; // Unique, 3 weights perceptron const perceptronWeights = [...networkWeights[0][0]]; // Copy of the unique perceptron weights @@ -89,76 +116,84 @@ function getPerceptronDecisionBoundaryDataset( } // Simple line - return { - type: 'line', - label: label, - data: [ - { - x: farLeftDataPointX.value - 1, - y: perceptronLine(farLeftDataPointX.value - 1), - }, - { - x: farRightDataPointX.value + 1, - y: perceptronLine(farRightDataPointX.value + 1), - }, - ], - borderColor: '#FFF', - borderWidth: 2, - pointRadius: 0, - }; + return [ + { + type: 'line', + label: label, + data: [ + { + x: farLeftDataPointX.value - 1, + y: perceptronLine(farLeftDataPointX.value - 1), + }, + { + x: farRightDataPointX.value + 1, + y: perceptronLine(farRightDataPointX.value + 1), + }, + ], + borderColor: '#FFF', + borderWidth: 2, + pointRadius: 0, + }, + ]; } else { - function forward(x1: number, x2: number): number { - let activations: number[] = [x1, x2]; + nonLinearGraph.value = true; - for (const layer of networkWeights) { - const nextActivations: number[] = []; + const bubbleTransparency = '30'; + const isInDataThreshold = 0.0; - for (const neuron of layer) { - const bias = neuron[0]; - const weights = neuron.slice(1); - let sum = bias; - - for (let i = 0; i < weights.length; i++) { - sum += weights[i] * activations[i]; - } - - const activated = activationFunction(sum); - - nextActivations.push(activated); - } - - activations = nextActivations; - } - - return activations[0]; // on suppose sortie unique + // -------- 1️⃣ Construction des datasets -------- + const datasets: { + type: string; + label: string; + data: Point[]; + backgroundColor: string; + pointRadius: number; + borderWidth: number; + order: number; + }[] = []; + // For the number of neuron in the last layer + const lastLayer = networkWeights[networkWeights.length - 1]; + for (let i = 0; i < lastLayer.length; i++) { + const dataset = { + type: 'scatter', + label: label, + data: [], // Will be filled with the decision boundary points + backgroundColor: colors[i] + bubbleTransparency || '#AAA', + pointRadius: 15, + borderWidth: 0, + order: -1, + }; + datasets.push(dataset); } // -------- 2️⃣ Échantillonnage grille -------- - const decisionBoundary: Point[] = []; - const min = -2; - const max = 2; - const step = 0.03; - const epsilon = 0.01; + const step = + Math.abs( + farRightDataPointX.value + 1 - (farLeftDataPointX.value - 1), + ) / 50; - for (let x = min; x <= max; x += step) { - for (let y = min; y <= max; y += step) { - const value = forward(x, y); - - if (Math.abs(value) < epsilon) { - decisionBoundary.push({ x, y }); - } + for ( + let x = farLeftDataPointX.value - 1; + x <= farRightDataPointX.value + 1; + x += step + ) { + for ( + let y = farBottomDataPointY.value - 1; + y <= farTopDataPointY.value + 1; + y += step + ) { + const values = getPerceptronOutput(networkWeights, [x, y]); + values.forEach((v, i) => { + if (v > isInDataThreshold) { + datasets[i].data.push({ x, y }); + } + }); } } // -------- 3️⃣ Dataset ChartJS -------- - return { - type: 'scatter', - label: label, - data: decisionBoundary, - backgroundColor: '#FFFFFF', - pointRadius: 1, - }; + return datasets; } } @@ -180,6 +215,9 @@ function getPerceptronDecisionBoundaryDataset( text: 'Ligne de décision du Perceptron', }, }, + animation: { + duration: nonLinearGraph || examplesNumber > 10 ? 0 : 1000, // Disable animations for instant updates + }, layout: { padding: { left: 10, @@ -228,7 +266,7 @@ function getPerceptronDecisionBoundaryDataset( })), // Perceptron decision boundary - getPerceptronDecisionBoundaryDataset( + ...getPerceptronDecisionBoundaryDataset( props.iterations.length > 0 ? props.iterations[props.iterations.length - 1].weights : [[[0, 0, 0]]], diff --git a/resources/js/components/PerceptronIterationsErrorsGraph.vue b/resources/js/components/PerceptronIterationsErrorsGraph.vue index 849bb25..89147c3 100644 --- a/resources/js/components/PerceptronIterationsErrorsGraph.vue +++ b/resources/js/components/PerceptronIterationsErrorsGraph.vue @@ -94,6 +94,9 @@ const datasets = computed< text: 'Nombre d\'erreurs par epoch', }, }, + animation: { + duration: iterations.length > 100 ? 0 : 1000, // Disable animations for instant updates + }, scales: { x: { stacked: true, diff --git a/tests/Unit/Training/TrainingTestCase.php b/tests/Unit/Training/TrainingTestCase.php index 8d418ce..854f4c9 100644 --- a/tests/Unit/Training/TrainingTestCase.php +++ b/tests/Unit/Training/TrainingTestCase.php @@ -14,8 +14,8 @@ class TrainingTestCase extends TestCase $training->start(); // Assert that the final synaptic weights are as expected withing the margin of error - // $finalWeights = $training->getSynapticWeights(); - // $this->assertEqualsWithDelta($expectedWeights, $finalWeights, $marginOfError, "Final synaptic weights do not match expected values."); + $finalWeights = $training->getSynapticWeights(); + $this->assertEqualsWithDelta($expectedWeights, $finalWeights, $marginOfError, "Final synaptic weights do not match expected values."); // Assert that the number of epochs taken is as expected $this->assertEquals($expectedEpochs, $training->getEpoch(), "Expected training to take $expectedEpochs epochs, but it took {$training->getEpoch()} epochs.");