Compare commits

...

1 Commits

Author SHA1 Message Date
2f4db07918 MonoLayer Perceptron
All checks were successful
linter / quality (push) Successful in 6m16s
tests / ci (8.4) (push) Successful in 4m10s
tests / ci (8.5) (push) Successful in 4m29s
2026-04-04 16:45:04 +02:00
21 changed files with 641 additions and 226 deletions

View File

@@ -5,6 +5,7 @@ namespace App\Http\Controllers;
use App\Events\PerceptronInitialization;
use App\Models\NetworksTraining\ADALINEPerceptronTraining;
use App\Models\NetworksTraining\GradientDescentPerceptronTraining;
use App\Models\NetworksTraining\MonoLayerPerceptronTraining;
use App\Models\NetworksTraining\SimpleBinaryPerceptronTraining;
use App\Services\DatasetReader\IDataSetReader;
use App\Services\DatasetReader\LinearOrderDataSetReader;
@@ -13,7 +14,11 @@ use App\Services\IterationEventBuffer\PerceptronIterationEventBuffer;
use App\Services\IterationEventBuffer\PerceptronLimitedEpochEventBuffer;
use App\Services\SynapticWeightsProvider\ISynapticWeightsProvider;
use App\Services\SynapticWeightsProvider\ZeroSynapticWeights;
use Illuminate\Contracts\Queue\Job;
use Illuminate\Http\Request;
use Illuminate\Support\Facades\DB;
use Symfony\Contracts\EventDispatcher\Event;
use Tests\Services\IterationEventBuffer\DullIterationEventBuffer;
class PerceptronController extends Controller
{
@@ -126,6 +131,8 @@ class PerceptronController extends Controller
public function run(Request $request, ISynapticWeightsProvider $synapticWeightsProvider)
{
$startTime = microtime(true);
$perceptronType = $request->input('type');
$minError = $request->input('min_error', 0.01);
$weightInitMethod = $request->input('weight_init_method', 'random');
@@ -135,6 +142,9 @@ class PerceptronController extends Controller
$sessionId = $request->input('session_id', session()->getId());
$trainingId = $request->input('training_id');
// Remove the jobs for the sessionId
DB::table('jobs')->where('payload', 'like', '%s:9:\"sessionId\";s:40:\"'. $sessionId .'\";%')->delete();
if ($weightInitMethod === 'zeros') {
$synapticWeightsProvider = new ZeroSynapticWeights;
}
@@ -151,6 +161,7 @@ class PerceptronController extends Controller
'simple' => new SimpleBinaryPerceptronTraining($datasetReader, $learningRate, $maxEpochs, $synapticWeightsProvider, $iterationEventBuffer, $sessionId, $trainingId),
'gradientdescent' => new GradientDescentPerceptronTraining($datasetReader, $learningRate, $maxEpochs, $synapticWeightsProvider, $iterationEventBuffer, $sessionId, $trainingId, $minError),
'adaline' => new ADALINEPerceptronTraining($datasetReader, $learningRate, $maxEpochs, $synapticWeightsProvider, $iterationEventBuffer, $sessionId, $trainingId, $minError),
'monolayer' => new MonoLayerPerceptronTraining($datasetReader, $learningRate, $maxEpochs, $synapticWeightsProvider, $iterationEventBuffer, $sessionId, $trainingId, $minError),
default => null,
};
@@ -160,6 +171,7 @@ class PerceptronController extends Controller
return response()->json([
'message' => 'Training completed',
'execution_time' => microtime(true) - $startTime,
]);
}
}

View File

@@ -66,7 +66,7 @@ class ADALINEPerceptronTraining extends NetworkTraining
foreach ($inputsForCurrentEpoch as $inputsWithLabel) {
$inputs = array_slice($inputsWithLabel, 0, -1);
$correctOutput = (float) end($inputsWithLabel);
$output = $this->perceptron->test($inputs);
$output = $this->perceptron->test($inputs)[0];
$iterationError = $correctOutput - $output;
$this->epochError += ($iterationError ** 2) / 2; // Squared error for the example
}
@@ -92,7 +92,7 @@ class ADALINEPerceptronTraining extends NetworkTraining
private function iterationFunction(array $inputs, float $correctOutput): float
{
$output = $this->perceptron->test($inputs);
$output = $this->perceptron->test($inputs)[0];
$error = $correctOutput - $output;

View File

@@ -88,7 +88,7 @@ class GradientDescentPerceptronTraining extends NetworkTraining
private function iterationFunction(array $inputs, float $correctOutput): float
{
$output = $this->perceptron->test($inputs);
$output = $this->perceptron->test($inputs)[0];
$error = $correctOutput - $output;

View File

@@ -0,0 +1,157 @@
<?php
namespace App\Models\NetworksTraining;
use App\Events\PerceptronTrainingEnded;
use App\Models\ActivationsFunctions;
use App\Models\Perceptrons\GradientDescentPerceptron;
use App\Models\Perceptrons\NetworkPerceptron;
use App\Models\Perceptrons\Perceptron;
use App\Models\Perceptrons\SimpleBinaryPerceptron2;
use App\Models\Perceptrons\SimpleBinaryPerceptron;
use App\Services\DatasetReader\IDataSetReader;
use App\Services\IterationEventBuffer\IPerceptronIterationEventBuffer;
use App\Services\SynapticWeightsProvider\ISynapticWeightsProvider;
use App\Services\SynapticWeightsProvider\SimpleNetworkWeightsProvider;
use Illuminate\Support\Arr;
class MonoLayerPerceptronTraining extends NetworkTraining
{
private Perceptron $network;
private array $labels;
public ActivationsFunctions $activationFunction = ActivationsFunctions::LINEAR;
public ?ActivationsFunctions $presentationLayerActivationFunction = ActivationsFunctions::STEP;
private float $epochError;
public function __construct(
IDataSetReader $datasetReader,
protected float $learningRate,
int $maxEpochs,
ISynapticWeightsProvider $synapticWeightsProvider,
IPerceptronIterationEventBuffer $iterationEventBuffer,
string $sessionId,
string $trainingId,
private float $minError,
) {
parent::__construct($datasetReader, $maxEpochs, $iterationEventBuffer, $sessionId, $trainingId);
$networkWeightsProvider = new SimpleNetworkWeightsProvider($synapticWeightsProvider);
$this->network = new NetworkPerceptron(
$networkWeightsProvider->generate(
$datasetReader->getInputSize(),
$datasetReader->getOutputSize(),
0, // No hidden layer
0, // No hidden layer neurons
),
$datasetReader->getInputSize(),
GradientDescentPerceptron::class, // No hidden layer
SimpleBinaryPerceptron2::class,
);
$this->labels = $datasetReader->getLabels();
}
public function start(): void
{
$this->epoch = 0;
do {
$this->epochError = 0;
$this->epoch++;
$inputsForCurrentEpoch = [];
while ($nextRow = $this->datasetReader->getNextLine()) {
$inputsForCurrentEpoch[] = $nextRow;
$inputs = array_slice($nextRow, 0, -1);
$correctOutput = (int) end($nextRow);
$iterationError = $this->iterationFunction($inputs, $correctOutput);
// Synaptic weights correction after each example
$synaptic_weights = $this->network->getSynapticWeights();
$inputs_with_bias = array_merge([1], $inputs); // Add bias input
// Updates the weights
$this->network->setSynapticWeights(
$this->getUpdatedSynapticWeights($synaptic_weights, $iterationError, $inputs_with_bias)
);
// Broadcast the training iteration event
$this->addIterationToBuffer(array_sum($iterationError), $this->network->getSynapticWeights());
}
// Calculte the average error for the epoch with the last synaptic weights
foreach ($inputsForCurrentEpoch as $inputsWithLabel) {
$inputs = array_slice($inputsWithLabel, 0, -1);
$correctOutput = (float) end($inputsWithLabel);
$iterationError = $this->iterationFunction($inputs, $correctOutput);
foreach ($iterationError as $error) {
$this->epochError += ($error ** 2) / 2; // Squared error for the example
}
}
$this->epochError /= $this->datasetReader->getEpochExamplesCount(); // Average error for the epoch
$this->datasetReader->reset(); // Reset the dataset for the next iteration
} while ($this->epoch < $this->maxEpochs && ! $this->stopCondition());
$this->iterationEventBuffer->flush(); // Ensure all iterations are sent to the frontend
$this->checkPassedMaxIterations($this->epochError);
}
protected function stopCondition(): bool
{
$condition = $this->epochError <= $this->minError;
if ($condition === true) {
event(new PerceptronTrainingEnded('Le perceptron à atteint l\'erreur minimale', $this->sessionId, $this->trainingId));
}
return $condition;
}
private function iterationFunction(array $inputs, int $correctOutput): array
{
$outputs = $this->network->test($inputs);
$desiredOutput = $this->getDesiredOutputFromCorrectOutput($correctOutput);
$errors = [];
foreach ($outputs as $index => $output) {
$error = $desiredOutput[$index] - $output;
$errors[] = $error;
}
return $errors;
}
private function getUpdatedSynapticWeights(array $synaptic_weights, array $iterationError, array $inputs): array
{
$updatedWeights = [];
foreach ($synaptic_weights[0] as $neuronIndex => $neuronWeights) { // There is only one layer of weights
$updatedNeuronWeights = [];
foreach ($neuronWeights as $weightIndex => $weight) {
$updatedWeight = $weight + ($this->learningRate * $iterationError[$neuronIndex] * $inputs[$weightIndex]);
$updatedNeuronWeights[] = $updatedWeight;
}
$updatedWeights[] = $updatedNeuronWeights;
}
return [$updatedWeights];
}
private function getDesiredOutputFromCorrectOutput(int $correctOutput): array
{
$desiredOutput = array_fill(0, count($this->labels), -1);
$labelIndex = Arr::first(array_keys($this->labels), fn($key) => $this->labels[$key] == $correctOutput);
if ($labelIndex !== null) {
$desiredOutput[$labelIndex] = 1;
}
return $desiredOutput;
}
public function getSynapticWeights(): array
{
return [[$this->network->getSynapticWeights()]];
}
}

View File

@@ -16,6 +16,8 @@ abstract class NetworkTraining
*/
public ActivationsFunctions $activationFunction;
public ?ActivationsFunctions $presentationLayerActivationFunction = null;
public function __construct(
protected IDataSetReader $datasetReader,
protected int $maxEpochs,

View File

@@ -71,7 +71,7 @@ class SimpleBinaryPerceptronTraining extends NetworkTraining
private function iterationFunction(array $inputs, int $correctOutput)
{
$output = $this->perceptron->test($inputs);
$output = $this->perceptron->test($inputs)[0];
$error = $correctOutput - $output;
if (abs($error) > $this::MIN_ERROR) {

View File

@@ -0,0 +1,26 @@
<?php
namespace App\Models\Perceptrons;
class InputNeuron extends Perceptron
{
public function __construct(
) {
parent::__construct([]);
}
public function setInput(float $input): void
{
$this->input = $input;
}
public function test(array $inputs): array
{
return [$this->input];
}
public function activationFunction(float $input): float
{
return $input; // Identity function for input neurons
}
}

View File

@@ -0,0 +1,76 @@
<?php
namespace App\Models\Perceptrons;
class NetworkPerceptron extends Perceptron
{
public array $network = [];
public function __construct(
private array $synaptic_weights,
private int $inputLayerNeuronsCount,
private string $hiddenLayerNeuronClass,
private string $outputLayerNeuronClass,
) {
parent::__construct($synaptic_weights);
$this->initializeNetwork($synaptic_weights);
}
private function initializeNetwork(array $synaptic_weights): void
{
// Input Layer
$this->network[0] = [];
foreach (range(0, $this->inputLayerNeuronsCount - 1) as $i) {
$this->network[0][] = new InputNeuron();
}
// Hidden Layer
for ($layerIndex = 0; $layerIndex < count($synaptic_weights) - 2; $layerIndex++) {
$this->network[$layerIndex + 1] = [];
foreach ($synaptic_weights[$layerIndex] as $neuronWeights) {
$this->network[$layerIndex + 1][] = new $this->hiddenLayerNeuronClass($neuronWeights);
}
}
// Output Layer
$outputLayer = $synaptic_weights[count($synaptic_weights) - 1];
$this->network[count($synaptic_weights)] = [];
foreach ($outputLayer as $neuronWeights) {
$this->network[count($synaptic_weights)][] = new $this->outputLayerNeuronClass($neuronWeights);
}
}
public function test(array $inputs): array
{
// Set the inputs for the input layer
foreach ($this->network[0] as $index => $inputNeuron) {
$inputNeuron->setInput($inputs[$index]);
}
// Pass through the hidden and output layers
$output = [];
for ($layerIndex = 0; $layerIndex < count($this->network); $layerIndex++) {
$lastLayerOutput = $output;
$output = [];
foreach ($this->network[$layerIndex] as $neuron) {
$output[] = $neuron->test($lastLayerOutput)[0];
}
}
return $output;
}
public function activationFunction(float $weighted_sum): float
{
return $weighted_sum;
}
public function setSynapticWeights(array $synaptic_weights): void
{
parent::setSynapticWeights($synaptic_weights);
$this->network = [];
$this->initializeNetwork($synaptic_weights);
}
}

View File

@@ -2,9 +2,9 @@
namespace App\Models\Perceptrons;
use Illuminate\Database\Eloquent\Model;
// use Illuminate\Database\Eloquent\Model;
abstract class Perceptron extends Model
abstract class Perceptron
{
public function __construct(
private array $synaptic_weights,
@@ -12,7 +12,7 @@ abstract class Perceptron extends Model
$this->synaptic_weights = $synaptic_weights;
}
public function test(array $inputs): float
public function test(array $inputs): array
{
$inputs = array_merge([1], $inputs); // Add bias input
@@ -22,7 +22,7 @@ abstract class Perceptron extends Model
$weighted_sum = array_sum(array_map(fn ($input, $weight) => $input * $weight, $inputs, $this->synaptic_weights));
return $this->activationFunction($weighted_sum);
return [$this->activationFunction($weighted_sum)];
}
abstract public function activationFunction(float $weighted_sum): float;

View File

@@ -0,0 +1,18 @@
<?php
namespace App\Models\Perceptrons;
class SimpleBinaryPerceptron2 extends Perceptron
{
public function __construct(
array $synaptic_weights,
) {
parent::__construct($synaptic_weights);
}
public function activationFunction(float $weighted_sum): float
{
// return $weighted_sum >= 0.0 ? 1.0 : -1.0;
return $weighted_sum;
}
}

View File

@@ -8,6 +8,10 @@ interface IDataSetReader
public function getInputSize(): int;
public function getOutputSize(): int;
public function getLabels(): array;
public function reset(): void;
public function getLastReadLineIndex(): int;

View File

@@ -49,6 +49,19 @@ class LinearOrderDataSetReader implements IDataSetReader
return count($this->lines[0]) - 1; // Don't count the label
}
public function getOutputSize(): int
{
// Count the number of unique labels in the dataset
$labels = array_map(fn ($line) => end($line), $this->lines);
return count(array_unique($labels));
}
public function getLabels(): array
{
$labels = array_map(fn ($line) => end($line), $this->lines);
return array_values(array_unique($labels));
}
public function reset(): void
{
$this->currentLines = $this->lines;

View File

@@ -55,6 +55,19 @@ class RandomOrderDataSetReader implements IDataSetReader
return count($this->lines[0]) - 1; // Don't count the label
}
public function getOutputSize(): int
{
// Count the number of unique labels in the dataset
$labels = array_map(fn ($line) => end($line), $this->lines);
return count(array_unique($labels));
}
public function getLabels(): array
{
$labels = array_map(fn ($line) => end($line), $this->lines);
return array_values(array_unique($labels));
}
public function reset(): void
{
$this->currentLines = $this->lines;

View File

@@ -0,0 +1,8 @@
<?php
namespace App\Services\SynapticWeightsProvider;
interface INetworkSynapticWeightsProvider
{
public function generate(int $input_size, int $output_size, int $hidden_layers_count, int $hidden_layers_neurons_count): array;
}

View File

@@ -0,0 +1,35 @@
<?php
namespace App\Services\SynapticWeightsProvider;
use App\Services\SynapticWeightsProvider\INetworkSynapticWeightsProvider;
class SimpleNetworkWeightsProvider implements INetworkSynapticWeightsProvider
{
public function __construct(
private ISynapticWeightsProvider $synapticWeightsProvider,
) {
}
public function generate(int $input_size, int $output_size, int $hidden_layers_count, int $hidden_layers_neurons_count): array
{
$synaptic_weights = [];
$lastLayerSize = $input_size;
// Generate Hidden Layer weights
for ($hiddenLayerNeuronIndex = 0; $hiddenLayerNeuronIndex < $hidden_layers_count; $hiddenLayerNeuronIndex++) {
for ($neuronIndex = 0; $neuronIndex < $hidden_layers_neurons_count; $neuronIndex++) {
$synaptic_weights[] = $this->synapticWeightsProvider->generate($lastLayerSize);
}
$lastLayerSize = $hidden_layers_neurons_count;
}
// Generate Output Layer weights
$synaptic_weights[] = [];
for ($outputNeuronIndex = 0; $outputNeuronIndex < $output_size; $outputNeuronIndex++) {
$synaptic_weights[count($synaptic_weights) -1][] = $this->synapticWeightsProvider->generate($lastLayerSize);
}
return $synaptic_weights;
}
}

View File

@@ -1,150 +1,150 @@
2.8,1.9,-1,-1,1
2.9,1.8,-1,-1,1
2.2,1.5,-1,-1,1
1.3,5.6,-1,1,-1
-1.2,1.6,1,-1,-1
2.5,1.7,-1,-1,1
3.1,2.3,-1,-1,1
2.8,2.4,-1,-1,1
-1.2,1.5,1,-1,-1
1.4,6.1,-1,1,-1
-1.2,1.3,1,-1,-1
3.1,1.8,-1,-1,1
3.3,2.5,-1,-1,1
3.4,2.3,-1,-1,1
1.5,5.9,-1,1,-1
1.3,5.6,-1,1,-1
-1.2,1,1,-1,-1
3,2.1,-1,-1,1
1.4,5.2,-1,1,-1
1.2,5.5,-1,1,-1
1.4,6.7,-1,1,-1
-1.3,1.7,1,-1,-1
1,5.7,-1,1,-1
2.6,1.4,-1,-1,1
1.6,6,-1,1,-1
1.2,5.8,-1,1,-1
1.4,6.1,-1,1,-1
1.3,5.7,-1,1,-1
-1.2,1.4,1,-1,-1
1,5.5,-1,1,-1
3.1,2.1,-1,-1,1
-1.4,1.7,1,-1,-1
1.6,6.3,-1,1,-1
-1.2,1.4,1,-1,-1
2.7,1.9,-1,-1,1
1.1,5.5,-1,1,-1
3,1.8,-1,-1,1
3.8,2,-1,-1,1
2.8,2.1,-1,-1,1
1,5,-1,1,-1
2.8,2,-1,-1,1
-1.2,1.6,1,-1,-1
1.3,6.6,-1,1,-1
1.6,6,-1,1,-1
3,1.6,-1,-1,1
-1.1,1.1,1,-1,-1
2.9,1.8,-1,-1,1
3,1.8,-1,-1,1
-1.2,1.4,1,-1,-1
1,5.8,-1,1,-1
1.2,5.7,-1,1,-1
2.8,1.5,-1,-1,1
-1.4,1.6,1,-1,-1
-1.2,1.4,1,-1,-1
2.8,2.2,-1,-1,1
-1.5,1.7,1,-1,-1
1,4.9,-1,1,-1
1.3,5.7,-1,1,-1
3.2,2.3,-1,-1,1
-1.3,1.5,1,-1,-1
-1.2,1.2,1,-1,-1
-1.2,1.6,1,-1,-1
3,2.1,-1,-1,1
-1.4,1.5,1,-1,-1
1.3,5.5,-1,1,-1
3.3,2.5,-1,-1,1
3.1,2.4,-1,-1,1
1.8,5.9,-1,1,-1
2.7,1.9,-1,-1,1
1.5,6.3,-1,1,-1
-1.3,1.4,1,-1,-1
1,5,-1,1,-1
3.2,2.3,-1,-1,1
-1.3,1.4,1,-1,-1
3,2.1,-1,-1,1
3.2,2,-1,-1,1
1.3,6.3,-1,1,-1
1.4,7,-1,1,-1
-1.2,1.7,1,-1,-1
1.4,6.6,-1,1,-1
1.2,5.8,-1,1,-1
3,2,-1,-1,1
-1.2,1.6,1,-1,-1
1.5,6.2,-1,1,-1
1.5,6.4,-1,1,-1
-1.2,1.9,1,-1,-1
3.2,2.3,-1,-1,1
-1.2,1.5,1,-1,-1
1.5,5.4,-1,1,-1
1.5,6,-1,1,-1
1.5,6.9,-1,1,-1
-1.4,1.5,1,-1,-1
1.3,6.4,-1,1,-1
2.8,2,-1,-1,1
-1.2,1.5,1,-1,-1
-1.1,1.5,1,-1,-1
3.6,2.5,-1,-1,1
1.1,5.1,-1,1,-1
-1.1,1.5,1,-1,-1
-1.2,1.5,1,-1,-1
3.4,2.4,-1,-1,1
-1.3,1.3,1,-1,-1
3,2.3,-1,-1,1
-1.1,1.5,1,-1,-1
1.1,5.6,-1,1,-1
1.3,5.6,-1,1,-1
-1.2,1.5,1,-1,-1
-1.2,1.3,1,-1,-1
1.7,6.7,-1,1,-1
-1.2,1.3,1,-1,-1
-1.4,1.3,1,-1,-1
2.5,1.8,-1,-1,1
-1.4,1.5,1,-1,-1
1.5,6.5,-1,1,-1
2.6,2.3,-1,-1,1
-1.2,1.4,1,-1,-1
1.5,5.6,-1,1,-1
1.3,5.7,-1,1,-1
1.2,6.1,-1,1,-1
3,2.2,-1,-1,1
3,1.8,-1,-1,1
-1.1,1.4,1,-1,-1
1.4,6.8,-1,1,-1
-1.6,1.6,1,-1,-1
-1.2,1.4,1,-1,-1
3.2,1.8,-1,-1,1
-1.2,1.5,1,-1,-1
2.7,1.8,-1,-1,1
-1.3,1.4,1,-1,-1
1.3,6.2,-1,1,-1
-1.3,1.3,1,-1,-1
3,1.8,-1,-1,1
2.7,1.9,-1,-1,1
-1.2,1.4,1,-1,-1
-1.2,1.2,1,-1,-1
1.5,6.7,-1,1,-1
2.5,1.9,-1,-1,1
3.3,2.1,-1,-1,1
2.8,1.8,-1,-1,1
-1.2,1.3,1,-1,-1
3.8,2.2,-1,-1,1
2.5,2,-1,-1,1
1.3,6.1,-1,1,-1
-1.2,1.4,1,-1,-1
-1.1,1.5,1,-1,-1
3,2.3,-1,-1,1
-1.4,1.9,1,-1,-1
-1.2,1.6,1,-1,-1
1,6,-1,1,-1
1.3,5.5,-1,1,-1
2.8,1.9,1
2.9,1.8,1
2.2,1.5,1
1.3,5.6,0
-1.2,1.6,-1
2.5,1.7,1
3.1,2.3,1
2.8,2.4,1
-1.2,1.5,-1
1.4,6.1,0
-1.2,1.3,-1
3.1,1.8,1
3.3,2.5,1
3.4,2.3,1
1.5,5.9,0
1.3,5.6,0
-1.2,1,-1
3,2.1,1
1.4,5.2,0
1.2,5.5,0
1.4,6.7,0
-1.3,1.7,-1
1,5.7,0
2.6,1.4,1
1.6,6,0
1.2,5.8,0
1.4,6.1,0
1.3,5.7,0
-1.2,1.4,-1
1,5.5,0
3.1,2.1,1
-1.4,1.7,-1
1.6,6.3,0
-1.2,1.4,-1
2.7,1.9,1
1.1,5.5,0
3,1.8,1
3.8,2,1
2.8,2.1,1
1,5,0
2.8,2,1
-1.2,1.6,-1
1.3,6.6,0
1.6,6,0
3,1.6,1
-1.1,1.1,-1
2.9,1.8,1
3,1.8,1
-1.2,1.4,-1
1,5.8,0
1.2,5.7,0
2.8,1.5,1
-1.4,1.6,-1
-1.2,1.4,-1
2.8,2.2,1
-1.5,1.7,-1
1,4.9,0
1.3,5.7,0
3.2,2.3,1
-1.3,1.5,-1
-1.2,1.2,-1
-1.2,1.6,-1
3,2.1,1
-1.4,1.5,-1
1.3,5.5,0
3.3,2.5,1
3.1,2.4,1
1.8,5.9,0
2.7,1.9,1
1.5,6.3,0
-1.3,1.4,-1
1,5,0
3.2,2.3,1
-1.3,1.4,-1
3,2.1,1
3.2,2,1
1.3,6.3,0
1.4,7,0
-1.2,1.7,-1
1.4,6.6,0
1.2,5.8,0
3,2,1
-1.2,1.6,-1
1.5,6.2,0
1.5,6.4,0
-1.2,1.9,-1
3.2,2.3,1
-1.2,1.5,-1
1.5,5.4,0
1.5,6,0
1.5,6.9,0
-1.4,1.5,-1
1.3,6.4,0
2.8,2,1
-1.2,1.5,-1
-1.1,1.5,-1
3.6,2.5,1
1.1,5.1,0
-1.1,1.5,-1
-1.2,1.5,-1
3.4,2.4,1
-1.3,1.3,-1
3,2.3,1
-1.1,1.5,-1
1.1,5.6,0
1.3,5.6,0
-1.2,1.5,-1
-1.2,1.3,-1
1.7,6.7,0
-1.2,1.3,-1
-1.4,1.3,-1
2.5,1.8,1
-1.4,1.5,-1
1.5,6.5,0
2.6,2.3,1
-1.2,1.4,-1
1.5,5.6,0
1.3,5.7,0
1.2,6.1,0
3,2.2,1
3,1.8,1
-1.1,1.4,-1
1.4,6.8,0
-1.6,1.6,-1
-1.2,1.4,-1
3.2,1.8,1
-1.2,1.5,-1
2.7,1.8,1
-1.3,1.4,-1
1.3,6.2,0
-1.3,1.3,-1
3,1.8,1
2.7,1.9,1
-1.2,1.4,-1
-1.2,1.2,-1
1.5,6.7,0
2.5,1.9,1
3.3,2.1,1
2.8,1.8,1
-1.2,1.3,-1
3.8,2.2,1
2.5,2,1
1.3,6.1,0
-1.2,1.4,-1
-1.1,1.5,-1
3,2.3,1
-1.4,1.9,-1
-1.2,1.6,-1
1,6,0
1.3,5.5,0
1 2.8 1.9 -1 1 -1
2 2.9 1.8 -1 1 -1
3 2.2 1.5 -1 1 -1
4 1.3 5.6 -1 -1 0 1
5 -1.2 1.6 1 -1 -1
6 2.5 1.7 -1 1 -1
7 3.1 2.3 -1 1 -1
8 2.8 2.4 -1 1 -1
9 -1.2 1.5 1 -1 -1
10 1.4 6.1 -1 -1 0 1
11 -1.2 1.3 1 -1 -1
12 3.1 1.8 -1 1 -1
13 3.3 2.5 -1 1 -1
14 3.4 2.3 -1 1 -1
15 1.5 5.9 -1 -1 0 1
16 1.3 5.6 -1 -1 0 1
17 -1.2 1 1 -1 -1
18 3 2.1 -1 1 -1
19 1.4 5.2 -1 -1 0 1
20 1.2 5.5 -1 -1 0 1
21 1.4 6.7 -1 -1 0 1
22 -1.3 1.7 1 -1 -1
23 1 5.7 -1 -1 0 1
24 2.6 1.4 -1 1 -1
25 1.6 6 -1 -1 0 1
26 1.2 5.8 -1 -1 0 1
27 1.4 6.1 -1 -1 0 1
28 1.3 5.7 -1 -1 0 1
29 -1.2 1.4 1 -1 -1
30 1 5.5 -1 -1 0 1
31 3.1 2.1 -1 1 -1
32 -1.4 1.7 1 -1 -1
33 1.6 6.3 -1 -1 0 1
34 -1.2 1.4 1 -1 -1
35 2.7 1.9 -1 1 -1
36 1.1 5.5 -1 -1 0 1
37 3 1.8 -1 1 -1
38 3.8 2 -1 1 -1
39 2.8 2.1 -1 1 -1
40 1 5 -1 -1 0 1
41 2.8 2 -1 1 -1
42 -1.2 1.6 1 -1 -1
43 1.3 6.6 -1 -1 0 1
44 1.6 6 -1 -1 0 1
45 3 1.6 -1 1 -1
46 -1.1 1.1 1 -1 -1
47 2.9 1.8 -1 1 -1
48 3 1.8 -1 1 -1
49 -1.2 1.4 1 -1 -1
50 1 5.8 -1 -1 0 1
51 1.2 5.7 -1 -1 0 1
52 2.8 1.5 -1 1 -1
53 -1.4 1.6 1 -1 -1
54 -1.2 1.4 1 -1 -1
55 2.8 2.2 -1 1 -1
56 -1.5 1.7 1 -1 -1
57 1 4.9 -1 -1 0 1
58 1.3 5.7 -1 -1 0 1
59 3.2 2.3 -1 1 -1
60 -1.3 1.5 1 -1 -1
61 -1.2 1.2 1 -1 -1
62 -1.2 1.6 1 -1 -1
63 3 2.1 -1 1 -1
64 -1.4 1.5 1 -1 -1
65 1.3 5.5 -1 -1 0 1
66 3.3 2.5 -1 1 -1
67 3.1 2.4 -1 1 -1
68 1.8 5.9 -1 -1 0 1
69 2.7 1.9 -1 1 -1
70 1.5 6.3 -1 -1 0 1
71 -1.3 1.4 1 -1 -1
72 1 5 -1 -1 0 1
73 3.2 2.3 -1 1 -1
74 -1.3 1.4 1 -1 -1
75 3 2.1 -1 1 -1
76 3.2 2 -1 1 -1
77 1.3 6.3 -1 -1 0 1
78 1.4 7 -1 -1 0 1
79 -1.2 1.7 1 -1 -1
80 1.4 6.6 -1 -1 0 1
81 1.2 5.8 -1 -1 0 1
82 3 2 -1 1 -1
83 -1.2 1.6 1 -1 -1
84 1.5 6.2 -1 -1 0 1
85 1.5 6.4 -1 -1 0 1
86 -1.2 1.9 1 -1 -1
87 3.2 2.3 -1 1 -1
88 -1.2 1.5 1 -1 -1
89 1.5 5.4 -1 -1 0 1
90 1.5 6 -1 -1 0 1
91 1.5 6.9 -1 -1 0 1
92 -1.4 1.5 1 -1 -1
93 1.3 6.4 -1 -1 0 1
94 2.8 2 -1 1 -1
95 -1.2 1.5 1 -1 -1
96 -1.1 1.5 1 -1 -1
97 3.6 2.5 -1 1 -1
98 1.1 5.1 -1 -1 0 1
99 -1.1 1.5 1 -1 -1
100 -1.2 1.5 1 -1 -1
101 3.4 2.4 -1 1 -1
102 -1.3 1.3 1 -1 -1
103 3 2.3 -1 1 -1
104 -1.1 1.5 1 -1 -1
105 1.1 5.6 -1 -1 0 1
106 1.3 5.6 -1 -1 0 1
107 -1.2 1.5 1 -1 -1
108 -1.2 1.3 1 -1 -1
109 1.7 6.7 -1 -1 0 1
110 -1.2 1.3 1 -1 -1
111 -1.4 1.3 1 -1 -1
112 2.5 1.8 -1 1 -1
113 -1.4 1.5 1 -1 -1
114 1.5 6.5 -1 -1 0 1
115 2.6 2.3 -1 1 -1
116 -1.2 1.4 1 -1 -1
117 1.5 5.6 -1 -1 0 1
118 1.3 5.7 -1 -1 0 1
119 1.2 6.1 -1 -1 0 1
120 3 2.2 -1 1 -1
121 3 1.8 -1 1 -1
122 -1.1 1.4 1 -1 -1
123 1.4 6.8 -1 -1 0 1
124 -1.6 1.6 1 -1 -1
125 -1.2 1.4 1 -1 -1
126 3.2 1.8 -1 1 -1
127 -1.2 1.5 1 -1 -1
128 2.7 1.8 -1 1 -1
129 -1.3 1.4 1 -1 -1
130 1.3 6.2 -1 -1 0 1
131 -1.3 1.3 1 -1 -1
132 3 1.8 -1 1 -1
133 2.7 1.9 -1 1 -1
134 -1.2 1.4 1 -1 -1
135 -1.2 1.2 1 -1 -1
136 1.5 6.7 -1 -1 0 1
137 2.5 1.9 -1 1 -1
138 3.3 2.1 -1 1 -1
139 2.8 1.8 -1 1 -1
140 -1.2 1.3 1 -1 -1
141 3.8 2.2 -1 1 -1
142 2.5 2 -1 1 -1
143 1.3 6.1 -1 -1 0 1
144 -1.2 1.4 1 -1 -1
145 -1.1 1.5 1 -1 -1
146 3 2.3 -1 1 -1
147 -1.4 1.9 1 -1 -1
148 -1.2 1.6 1 -1 -1
149 1 6 -1 -1 0 1
150 1.3 5.5 -1 -1 0 1

View File

@@ -1,4 +1,4 @@
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,-1,-1,-1
0,0,1,0,0,0,0,1,0,0,1,1,1,1,1,0,0,1,0,0,0,0,1,0,0,-1,1,-1,-1
1,0,0,0,1,0,1,0,1,0,0,0,1,0,0,0,1,0,1,0,1,0,0,0,1,-1,-1,1,-1
0,0,0,0,0,0,1,1,1,0,0,1,0,1,0,0,1,1,1,0,0,0,0,0,0,-1,-1,-1,1
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
0,0,1,0,0,0,0,1,0,0,1,1,1,1,1,0,0,1,0,0,0,0,1,0,0,1
1,0,0,0,1,0,1,0,1,0,0,0,1,0,0,0,1,0,1,0,1,0,0,0,1,2
0,0,0,0,0,0,1,1,1,0,0,1,0,1,0,0,1,1,1,0,0,0,0,0,0,3
1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 -1 -1 -1
2 0 0 1 0 0 0 0 1 0 0 1 1 1 1 1 0 0 1 0 0 0 0 1 0 0 -1 1 1 -1 -1
3 1 0 0 0 1 0 1 0 1 0 0 0 1 0 0 0 1 0 1 0 1 0 0 0 1 -1 2 -1 1 -1
4 0 0 0 0 0 0 1 1 1 0 0 1 0 1 0 0 1 1 1 0 0 0 0 0 0 -1 3 -1 -1 1

View File

@@ -20,6 +20,16 @@ const links = [
href: '/perceptron',
data: { type: 'adaline' },
},
{
name: 'Mono-couche',
href: '/perceptron',
data: { type: 'monolayer' },
},
{
name: 'Multi-couche',
href: '/perceptron',
data: { type: 'multilayer' },
},
];
const isActiveLink = (link: any) => {

View File

@@ -5,7 +5,7 @@ import type {
BubbleDataPoint,
Point,
} from 'chart.js';
import { computed } from 'vue';
import { computed, ref } from 'vue';
import { Chart } from 'vue-chartjs';
import { colors, gridColor, gridColorBold } from '@/types/graphs';
import type { Iteration } from '@/types/perceptron';
@@ -16,6 +16,10 @@ const props = defineProps<{
activationFunction: (x: number) => number;
}>();
const examplesNumber = computed(() => {
return props.cleanedDataset.reduce((sum, dataset) => sum + dataset.data.length, 0);
});
const farLeftDataPointX = computed(() => {
if (props.cleanedDataset.length === 0) {
return 0;
@@ -25,6 +29,15 @@ const farLeftDataPointX = computed(() => {
);
return minX;
});
const farBottomDataPointY = computed(() => {
if (props.cleanedDataset.length === 0) {
return 0;
}
const minY = Math.min(
...props.cleanedDataset.flatMap((d) => d.data.map((point) => point.y)),
);
return minY;
});
const farRightDataPointX = computed(() => {
if (props.cleanedDataset.length === 0) {
return 0;
@@ -34,8 +47,20 @@ const farRightDataPointX = computed(() => {
);
return maxX;
});
const farTopDataPointY = computed(() => {
if (props.cleanedDataset.length === 0) {
return 0;
}
const maxY = Math.max(
...props.cleanedDataset.flatMap((d) => d.data.map((point) => point.y)),
);
return maxY;
});
function getPerceptronOutput(weightsNetwork: number[][], inputs: number[]): number[] {
function getPerceptronOutput(
weightsNetwork: number[][][],
inputs: number[],
): number[] {
for (const layer of weightsNetwork) {
const nextInputs: number[] = [];
@@ -59,13 +84,14 @@ function getPerceptronOutput(weightsNetwork: number[][], inputs: number[]): numb
return inputs;
}
const nonLinearGraph = ref<boolean>(false);
function getPerceptronDecisionBoundaryDataset(
networkWeights: number[][][],
activationFunction: (x: number) => number = (x) => x,
): ChartDataset<
keyof ChartTypeRegistry,
number | Point | [number, number] | BubbleDataPoint | null
> {
>[] {
const label = 'Ligne de décision du Perceptron';
console.log('Calculating decision boundary with weights:', networkWeights);
@@ -74,6 +100,7 @@ function getPerceptronDecisionBoundaryDataset(
networkWeights[0].length == 1 &&
networkWeights[0][0].length <= 3
) {
nonLinearGraph.value = false;
// Unique, 3 weights perceptron
const perceptronWeights = [...networkWeights[0][0]]; // Copy of the unique perceptron weights
@@ -89,7 +116,8 @@ function getPerceptronDecisionBoundaryDataset(
}
// Simple line
return {
return [
{
type: 'line',
label: label,
data: [
@@ -105,60 +133,67 @@ function getPerceptronDecisionBoundaryDataset(
borderColor: '#FFF',
borderWidth: 2,
pointRadius: 0,
};
},
];
} else {
function forward(x1: number, x2: number): number {
let activations: number[] = [x1, x2];
nonLinearGraph.value = true;
for (const layer of networkWeights) {
const nextActivations: number[] = [];
const bubbleTransparency = '30';
const isInDataThreshold = 0.0;
for (const neuron of layer) {
const bias = neuron[0];
const weights = neuron.slice(1);
let sum = bias;
for (let i = 0; i < weights.length; i++) {
sum += weights[i] * activations[i];
}
const activated = activationFunction(sum);
nextActivations.push(activated);
}
activations = nextActivations;
}
return activations[0]; // on suppose sortie unique
// -------- 1⃣ Construction des datasets --------
const datasets: {
type: string;
label: string;
data: Point[];
backgroundColor: string;
pointRadius: number;
borderWidth: number;
order: number;
}[] = [];
// For the number of neuron in the last layer
const lastLayer = networkWeights[networkWeights.length - 1];
for (let i = 0; i < lastLayer.length; i++) {
const dataset = {
type: 'scatter',
label: label,
data: [], // Will be filled with the decision boundary points
backgroundColor: colors[i] + bubbleTransparency || '#AAA',
pointRadius: 15,
borderWidth: 0,
order: -1,
};
datasets.push(dataset);
}
// -------- 2⃣ Échantillonnage grille --------
const decisionBoundary: Point[] = [];
const min = -2;
const max = 2;
const step = 0.03;
const epsilon = 0.01;
const step =
Math.abs(
farRightDataPointX.value + 1 - (farLeftDataPointX.value - 1),
) / 50;
for (let x = min; x <= max; x += step) {
for (let y = min; y <= max; y += step) {
const value = forward(x, y);
if (Math.abs(value) < epsilon) {
decisionBoundary.push({ x, y });
for (
let x = farLeftDataPointX.value - 1;
x <= farRightDataPointX.value + 1;
x += step
) {
for (
let y = farBottomDataPointY.value - 1;
y <= farTopDataPointY.value + 1;
y += step
) {
const values = getPerceptronOutput(networkWeights, [x, y]);
values.forEach((v, i) => {
if (v > isInDataThreshold) {
datasets[i].data.push({ x, y });
}
});
}
}
// -------- 3⃣ Dataset ChartJS --------
return {
type: 'scatter',
label: label,
data: decisionBoundary,
backgroundColor: '#FFFFFF',
pointRadius: 1,
};
return datasets;
}
}
</script>
@@ -180,6 +215,9 @@ function getPerceptronDecisionBoundaryDataset(
text: 'Ligne de décision du Perceptron',
},
},
animation: {
duration: nonLinearGraph || examplesNumber > 10 ? 0 : 1000, // Disable animations for instant updates
},
layout: {
padding: {
left: 10,
@@ -228,7 +266,7 @@ function getPerceptronDecisionBoundaryDataset(
})),
// Perceptron decision boundary
getPerceptronDecisionBoundaryDataset(
...getPerceptronDecisionBoundaryDataset(
props.iterations.length > 0
? props.iterations[props.iterations.length - 1].weights
: [[[0, 0, 0]]],

View File

@@ -94,6 +94,9 @@ const datasets = computed<
text: 'Nombre d\'erreurs par epoch',
},
},
animation: {
duration: iterations.length > 100 ? 0 : 1000, // Disable animations for instant updates
},
scales: {
x: {
stacked: true,

View File

@@ -14,8 +14,8 @@ class TrainingTestCase extends TestCase
$training->start();
// Assert that the final synaptic weights are as expected withing the margin of error
// $finalWeights = $training->getSynapticWeights();
// $this->assertEqualsWithDelta($expectedWeights, $finalWeights, $marginOfError, "Final synaptic weights do not match expected values.");
$finalWeights = $training->getSynapticWeights();
$this->assertEqualsWithDelta($expectedWeights, $finalWeights, $marginOfError, "Final synaptic weights do not match expected values.");
// Assert that the number of epochs taken is as expected
$this->assertEquals($expectedEpochs, $training->getEpoch(), "Expected training to take $expectedEpochs epochs, but it took {$training->getEpoch()} epochs.");