Compare commits

...

12 Commits

Author SHA1 Message Date
2f4db07918 MonoLayer Perceptron
All checks were successful
linter / quality (push) Successful in 6m16s
tests / ci (8.4) (push) Successful in 4m10s
tests / ci (8.5) (push) Successful in 4m29s
2026-04-04 16:45:04 +02:00
f6620c2eca Fixed graph showing wrong decision border for regression
All checks were successful
linter / quality (push) Successful in 8m11s
tests / ci (8.4) (push) Successful in 10m8s
tests / ci (8.5) (push) Successful in 12m59s
2026-03-24 17:33:58 +01:00
ca9c0dc511 Misc Fixes for ADALINE 2026-03-24 17:12:15 +01:00
0876588550 Changed Favicon
All checks were successful
linter / quality (push) Successful in 5m21s
tests / ci (8.4) (push) Successful in 6m49s
tests / ci (8.5) (push) Successful in 7m14s
2026-03-24 10:48:50 +01:00
a37afcec07 Added brianium/paratest to run test in parallel
All checks were successful
linter / quality (push) Successful in 8m4s
tests / ci (8.4) (push) Successful in 8m27s
tests / ci (8.5) (push) Successful in 8m26s
2026-03-23 16:33:58 +01:00
b052d792f8 Added Cache to CICD
Some checks failed
linter / quality (push) Successful in 11m58s
tests / ci (8.4) (push) Failing after 7m52s
tests / ci (8.5) (push) Has been cancelled
2026-03-23 16:13:12 +01:00
5880024933 Fix linting
Some checks failed
linter / quality (push) Successful in 7m44s
tests / ci (8.5) (push) Has been cancelled
tests / ci (8.4) (push) Has been cancelled
2026-03-23 16:02:07 +01:00
a92a47288c Fixed Regression datasets
Some checks failed
linter / quality (push) Has been cancelled
tests / ci (8.4) (push) Has been cancelled
tests / ci (8.5) (push) Has been cancelled
2026-03-23 16:01:22 +01:00
bcaf334380 Added pointer cursor to selects 2026-03-23 16:01:10 +01:00
dea908c63e Modified limitedEventBuffer to send in linear delay 2026-03-23 16:00:51 +01:00
236fa503fb Fix setup command
Some checks failed
linter / quality (push) Failing after 9m48s
tests / ci (8.4) (push) Successful in 9m2s
tests / ci (8.5) (push) Successful in 8m50s
2026-03-23 13:57:02 +01:00
0f92af4a1e Added Reverb terminal color 2026-03-23 13:56:53 +01:00
34 changed files with 995 additions and 292 deletions

View File

@@ -20,30 +20,59 @@ permissions:
jobs:
quality:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v6
- name: Checkout code
uses: actions/checkout@v6
# -------------------------
# Cache Composer
# -------------------------
- name: Cache Composer dependencies
uses: actions/cache@v4
with:
path: ~/.composer/cache
key: composer-${{ runner.os }}-${{ hashFiles('**/composer.lock') }}
restore-keys: |
composer-${{ runner.os }}-
# -------------------------
# Cache Node
# -------------------------
- name: Cache Node dependencies
uses: actions/cache@v4
with:
path: ~/.npm
key: node-${{ runner.os }}-${{ hashFiles('**/package-lock.json') }}
restore-keys: |
node-${{ runner.os }}-
- name: Setup PHP
uses: shivammathur/setup-php@v2
with:
php-version: '8.4'
coverage: none
- name: Setup Node
uses: actions/setup-node@v4
with:
node-version: '22'
cache: 'npm'
# -------------------------
# Install dependencies
# -------------------------
- name: Install Dependencies
run: |
composer install -q --no-ansi --no-interaction --no-scripts --no-progress --prefer-dist
npm install
composer install --no-interaction --prefer-dist --no-progress --no-scripts
npm ci
- name: Run Pint
run: composer lint
- name: Format Frontend
run: npm run format
- name: Lint Frontend
run: npm run lint
# - name: Commit Changes
# uses: stefanzweifel/git-auto-commit-action@v7
# with:
# commit_message: fix code style
# commit_options: '--no-verify'
# -------------------------
# Run linters in parallel
# -------------------------
- name: Run linters
run: |
composer lint &
npm run format &
npm run lint &
wait

View File

@@ -25,32 +25,66 @@ jobs:
- name: Checkout code
uses: actions/checkout@v6
# -------------------------
# Cache Composer
# -------------------------
- name: Cache Composer dependencies
uses: actions/cache@v4
with:
path: ~/.composer/cache
key: composer-${{ runner.os }}-${{ matrix.php-version }}-${{ hashFiles('**/composer.lock') }}
restore-keys: |
composer-${{ runner.os }}-${{ matrix.php-version }}-
# -------------------------
# Cache Node
# -------------------------
- name: Cache Node dependencies
uses: actions/cache@v4
with:
path: ~/.npm
key: node-${{ runner.os }}-${{ hashFiles('**/package-lock.json') }}
restore-keys: |
node-${{ runner.os }}-
- name: Setup PHP
uses: shivammathur/setup-php@v2
with:
php-version: ${{ matrix.php-version }}
tools: composer:v2
coverage: xdebug
coverage: none
- name: Setup Node
uses: actions/setup-node@v4
with:
node-version: '22'
cache: 'npm'
# -------------------------
# Install dependencies
# -------------------------
- name: Install Node Dependencies
run: npm i
run: npm ci
- name: Install Dependencies
run: composer install --no-interaction --prefer-dist --optimize-autoloader
- name: Install PHP Dependencies
run: composer install --no-interaction --prefer-dist --optimize-autoloader --no-progress
- name: Copy Environment File
run: cp .env.example .env
- name: Generate Application Key
run: php artisan key:generate
# -------------------------
# Laravel setup
# -------------------------
- name: Prepare environment
run: |
cp .env.example .env
php artisan key:generate
# -------------------------
# Build (optional remove if not needed for tests)
# -------------------------
- name: Build Assets
run: npm run build
- name: Tests
run: ./vendor/bin/phpunit
# -------------------------
# Run tests (parallel)
# -------------------------
- name: Run Tests
run: php artisan test --parallel

View File

@@ -23,10 +23,10 @@ Using Laravel and Vue JS
3. NodeJs (Node + NPM)
<https://nodejs.org/en/download>
2. Install dependencies
2. Setup project and install dependencies
```shell
composer install
composer run setup
```
## Running the project

View File

@@ -5,6 +5,7 @@ namespace App\Http\Controllers;
use App\Events\PerceptronInitialization;
use App\Models\NetworksTraining\ADALINEPerceptronTraining;
use App\Models\NetworksTraining\GradientDescentPerceptronTraining;
use App\Models\NetworksTraining\MonoLayerPerceptronTraining;
use App\Models\NetworksTraining\SimpleBinaryPerceptronTraining;
use App\Services\DatasetReader\IDataSetReader;
use App\Services\DatasetReader\LinearOrderDataSetReader;
@@ -13,7 +14,11 @@ use App\Services\IterationEventBuffer\PerceptronIterationEventBuffer;
use App\Services\IterationEventBuffer\PerceptronLimitedEpochEventBuffer;
use App\Services\SynapticWeightsProvider\ISynapticWeightsProvider;
use App\Services\SynapticWeightsProvider\ZeroSynapticWeights;
use Illuminate\Contracts\Queue\Job;
use Illuminate\Http\Request;
use Illuminate\Support\Facades\DB;
use Symfony\Contracts\EventDispatcher\Event;
use Tests\Services\IterationEventBuffer\DullIterationEventBuffer;
class PerceptronController extends Controller
{
@@ -126,6 +131,8 @@ class PerceptronController extends Controller
public function run(Request $request, ISynapticWeightsProvider $synapticWeightsProvider)
{
$startTime = microtime(true);
$perceptronType = $request->input('type');
$minError = $request->input('min_error', 0.01);
$weightInitMethod = $request->input('weight_init_method', 'random');
@@ -135,6 +142,9 @@ class PerceptronController extends Controller
$sessionId = $request->input('session_id', session()->getId());
$trainingId = $request->input('training_id');
// Remove the jobs for the sessionId
DB::table('jobs')->where('payload', 'like', '%s:9:\"sessionId\";s:40:\"'. $sessionId .'\";%')->delete();
if ($weightInitMethod === 'zeros') {
$synapticWeightsProvider = new ZeroSynapticWeights;
}
@@ -151,6 +161,7 @@ class PerceptronController extends Controller
'simple' => new SimpleBinaryPerceptronTraining($datasetReader, $learningRate, $maxEpochs, $synapticWeightsProvider, $iterationEventBuffer, $sessionId, $trainingId),
'gradientdescent' => new GradientDescentPerceptronTraining($datasetReader, $learningRate, $maxEpochs, $synapticWeightsProvider, $iterationEventBuffer, $sessionId, $trainingId, $minError),
'adaline' => new ADALINEPerceptronTraining($datasetReader, $learningRate, $maxEpochs, $synapticWeightsProvider, $iterationEventBuffer, $sessionId, $trainingId, $minError),
'monolayer' => new MonoLayerPerceptronTraining($datasetReader, $learningRate, $maxEpochs, $synapticWeightsProvider, $iterationEventBuffer, $sessionId, $trainingId, $minError),
default => null,
};
@@ -160,6 +171,7 @@ class PerceptronController extends Controller
return response()->json([
'message' => 'Training completed',
'execution_time' => microtime(true) - $startTime,
]);
}
}

View File

@@ -66,7 +66,7 @@ class ADALINEPerceptronTraining extends NetworkTraining
foreach ($inputsForCurrentEpoch as $inputsWithLabel) {
$inputs = array_slice($inputsWithLabel, 0, -1);
$correctOutput = (float) end($inputsWithLabel);
$output = $this->perceptron->test($inputs);
$output = $this->perceptron->test($inputs)[0];
$iterationError = $correctOutput - $output;
$this->epochError += ($iterationError ** 2) / 2; // Squared error for the example
}
@@ -90,9 +90,9 @@ class ADALINEPerceptronTraining extends NetworkTraining
return $condition;
}
private function iterationFunction(array $inputs, int $correctOutput)
private function iterationFunction(array $inputs, float $correctOutput): float
{
$output = $this->perceptron->test($inputs);
$output = $this->perceptron->test($inputs)[0];
$error = $correctOutput - $output;

View File

@@ -86,9 +86,9 @@ class GradientDescentPerceptronTraining extends NetworkTraining
return $condition;
}
private function iterationFunction(array $inputs, int $correctOutput)
private function iterationFunction(array $inputs, float $correctOutput): float
{
$output = $this->perceptron->test($inputs);
$output = $this->perceptron->test($inputs)[0];
$error = $correctOutput - $output;

View File

@@ -0,0 +1,157 @@
<?php
namespace App\Models\NetworksTraining;
use App\Events\PerceptronTrainingEnded;
use App\Models\ActivationsFunctions;
use App\Models\Perceptrons\GradientDescentPerceptron;
use App\Models\Perceptrons\NetworkPerceptron;
use App\Models\Perceptrons\Perceptron;
use App\Models\Perceptrons\SimpleBinaryPerceptron2;
use App\Models\Perceptrons\SimpleBinaryPerceptron;
use App\Services\DatasetReader\IDataSetReader;
use App\Services\IterationEventBuffer\IPerceptronIterationEventBuffer;
use App\Services\SynapticWeightsProvider\ISynapticWeightsProvider;
use App\Services\SynapticWeightsProvider\SimpleNetworkWeightsProvider;
use Illuminate\Support\Arr;
class MonoLayerPerceptronTraining extends NetworkTraining
{
private Perceptron $network;
private array $labels;
public ActivationsFunctions $activationFunction = ActivationsFunctions::LINEAR;
public ?ActivationsFunctions $presentationLayerActivationFunction = ActivationsFunctions::STEP;
private float $epochError;
public function __construct(
IDataSetReader $datasetReader,
protected float $learningRate,
int $maxEpochs,
ISynapticWeightsProvider $synapticWeightsProvider,
IPerceptronIterationEventBuffer $iterationEventBuffer,
string $sessionId,
string $trainingId,
private float $minError,
) {
parent::__construct($datasetReader, $maxEpochs, $iterationEventBuffer, $sessionId, $trainingId);
$networkWeightsProvider = new SimpleNetworkWeightsProvider($synapticWeightsProvider);
$this->network = new NetworkPerceptron(
$networkWeightsProvider->generate(
$datasetReader->getInputSize(),
$datasetReader->getOutputSize(),
0, // No hidden layer
0, // No hidden layer neurons
),
$datasetReader->getInputSize(),
GradientDescentPerceptron::class, // No hidden layer
SimpleBinaryPerceptron2::class,
);
$this->labels = $datasetReader->getLabels();
}
public function start(): void
{
$this->epoch = 0;
do {
$this->epochError = 0;
$this->epoch++;
$inputsForCurrentEpoch = [];
while ($nextRow = $this->datasetReader->getNextLine()) {
$inputsForCurrentEpoch[] = $nextRow;
$inputs = array_slice($nextRow, 0, -1);
$correctOutput = (int) end($nextRow);
$iterationError = $this->iterationFunction($inputs, $correctOutput);
// Synaptic weights correction after each example
$synaptic_weights = $this->network->getSynapticWeights();
$inputs_with_bias = array_merge([1], $inputs); // Add bias input
// Updates the weights
$this->network->setSynapticWeights(
$this->getUpdatedSynapticWeights($synaptic_weights, $iterationError, $inputs_with_bias)
);
// Broadcast the training iteration event
$this->addIterationToBuffer(array_sum($iterationError), $this->network->getSynapticWeights());
}
// Calculte the average error for the epoch with the last synaptic weights
foreach ($inputsForCurrentEpoch as $inputsWithLabel) {
$inputs = array_slice($inputsWithLabel, 0, -1);
$correctOutput = (float) end($inputsWithLabel);
$iterationError = $this->iterationFunction($inputs, $correctOutput);
foreach ($iterationError as $error) {
$this->epochError += ($error ** 2) / 2; // Squared error for the example
}
}
$this->epochError /= $this->datasetReader->getEpochExamplesCount(); // Average error for the epoch
$this->datasetReader->reset(); // Reset the dataset for the next iteration
} while ($this->epoch < $this->maxEpochs && ! $this->stopCondition());
$this->iterationEventBuffer->flush(); // Ensure all iterations are sent to the frontend
$this->checkPassedMaxIterations($this->epochError);
}
protected function stopCondition(): bool
{
$condition = $this->epochError <= $this->minError;
if ($condition === true) {
event(new PerceptronTrainingEnded('Le perceptron à atteint l\'erreur minimale', $this->sessionId, $this->trainingId));
}
return $condition;
}
private function iterationFunction(array $inputs, int $correctOutput): array
{
$outputs = $this->network->test($inputs);
$desiredOutput = $this->getDesiredOutputFromCorrectOutput($correctOutput);
$errors = [];
foreach ($outputs as $index => $output) {
$error = $desiredOutput[$index] - $output;
$errors[] = $error;
}
return $errors;
}
private function getUpdatedSynapticWeights(array $synaptic_weights, array $iterationError, array $inputs): array
{
$updatedWeights = [];
foreach ($synaptic_weights[0] as $neuronIndex => $neuronWeights) { // There is only one layer of weights
$updatedNeuronWeights = [];
foreach ($neuronWeights as $weightIndex => $weight) {
$updatedWeight = $weight + ($this->learningRate * $iterationError[$neuronIndex] * $inputs[$weightIndex]);
$updatedNeuronWeights[] = $updatedWeight;
}
$updatedWeights[] = $updatedNeuronWeights;
}
return [$updatedWeights];
}
private function getDesiredOutputFromCorrectOutput(int $correctOutput): array
{
$desiredOutput = array_fill(0, count($this->labels), -1);
$labelIndex = Arr::first(array_keys($this->labels), fn($key) => $this->labels[$key] == $correctOutput);
if ($labelIndex !== null) {
$desiredOutput[$labelIndex] = 1;
}
return $desiredOutput;
}
public function getSynapticWeights(): array
{
return [[$this->network->getSynapticWeights()]];
}
}

View File

@@ -16,6 +16,8 @@ abstract class NetworkTraining
*/
public ActivationsFunctions $activationFunction;
public ?ActivationsFunctions $presentationLayerActivationFunction = null;
public function __construct(
protected IDataSetReader $datasetReader,
protected int $maxEpochs,

View File

@@ -71,7 +71,7 @@ class SimpleBinaryPerceptronTraining extends NetworkTraining
private function iterationFunction(array $inputs, int $correctOutput)
{
$output = $this->perceptron->test($inputs);
$output = $this->perceptron->test($inputs)[0];
$error = $correctOutput - $output;
if (abs($error) > $this::MIN_ERROR) {

View File

@@ -0,0 +1,26 @@
<?php
namespace App\Models\Perceptrons;
class InputNeuron extends Perceptron
{
public function __construct(
) {
parent::__construct([]);
}
public function setInput(float $input): void
{
$this->input = $input;
}
public function test(array $inputs): array
{
return [$this->input];
}
public function activationFunction(float $input): float
{
return $input; // Identity function for input neurons
}
}

View File

@@ -0,0 +1,76 @@
<?php
namespace App\Models\Perceptrons;
class NetworkPerceptron extends Perceptron
{
public array $network = [];
public function __construct(
private array $synaptic_weights,
private int $inputLayerNeuronsCount,
private string $hiddenLayerNeuronClass,
private string $outputLayerNeuronClass,
) {
parent::__construct($synaptic_weights);
$this->initializeNetwork($synaptic_weights);
}
private function initializeNetwork(array $synaptic_weights): void
{
// Input Layer
$this->network[0] = [];
foreach (range(0, $this->inputLayerNeuronsCount - 1) as $i) {
$this->network[0][] = new InputNeuron();
}
// Hidden Layer
for ($layerIndex = 0; $layerIndex < count($synaptic_weights) - 2; $layerIndex++) {
$this->network[$layerIndex + 1] = [];
foreach ($synaptic_weights[$layerIndex] as $neuronWeights) {
$this->network[$layerIndex + 1][] = new $this->hiddenLayerNeuronClass($neuronWeights);
}
}
// Output Layer
$outputLayer = $synaptic_weights[count($synaptic_weights) - 1];
$this->network[count($synaptic_weights)] = [];
foreach ($outputLayer as $neuronWeights) {
$this->network[count($synaptic_weights)][] = new $this->outputLayerNeuronClass($neuronWeights);
}
}
public function test(array $inputs): array
{
// Set the inputs for the input layer
foreach ($this->network[0] as $index => $inputNeuron) {
$inputNeuron->setInput($inputs[$index]);
}
// Pass through the hidden and output layers
$output = [];
for ($layerIndex = 0; $layerIndex < count($this->network); $layerIndex++) {
$lastLayerOutput = $output;
$output = [];
foreach ($this->network[$layerIndex] as $neuron) {
$output[] = $neuron->test($lastLayerOutput)[0];
}
}
return $output;
}
public function activationFunction(float $weighted_sum): float
{
return $weighted_sum;
}
public function setSynapticWeights(array $synaptic_weights): void
{
parent::setSynapticWeights($synaptic_weights);
$this->network = [];
$this->initializeNetwork($synaptic_weights);
}
}

View File

@@ -2,9 +2,9 @@
namespace App\Models\Perceptrons;
use Illuminate\Database\Eloquent\Model;
// use Illuminate\Database\Eloquent\Model;
abstract class Perceptron extends Model
abstract class Perceptron
{
public function __construct(
private array $synaptic_weights,
@@ -12,7 +12,7 @@ abstract class Perceptron extends Model
$this->synaptic_weights = $synaptic_weights;
}
public function test(array $inputs): float
public function test(array $inputs): array
{
$inputs = array_merge([1], $inputs); // Add bias input
@@ -22,7 +22,7 @@ abstract class Perceptron extends Model
$weighted_sum = array_sum(array_map(fn ($input, $weight) => $input * $weight, $inputs, $this->synaptic_weights));
return $this->activationFunction($weighted_sum);
return [$this->activationFunction($weighted_sum)];
}
abstract public function activationFunction(float $weighted_sum): float;

View File

@@ -0,0 +1,18 @@
<?php
namespace App\Models\Perceptrons;
class SimpleBinaryPerceptron2 extends Perceptron
{
public function __construct(
array $synaptic_weights,
) {
parent::__construct($synaptic_weights);
}
public function activationFunction(float $weighted_sum): float
{
// return $weighted_sum >= 0.0 ? 1.0 : -1.0;
return $weighted_sum;
}
}

View File

@@ -8,6 +8,10 @@ interface IDataSetReader
public function getInputSize(): int;
public function getOutputSize(): int;
public function getLabels(): array;
public function reset(): void;
public function getLastReadLineIndex(): int;

View File

@@ -29,11 +29,6 @@ class LinearOrderDataSetReader implements IDataSetReader
$newLine[] = (float) $value;
}
// if the dataset is for regression, we add a fake label of 0
if (count($newLine) === 2) {
$newLine[] = 0.0;
}
$this->lines[] = $newLine;
}
}
@@ -54,6 +49,19 @@ class LinearOrderDataSetReader implements IDataSetReader
return count($this->lines[0]) - 1; // Don't count the label
}
public function getOutputSize(): int
{
// Count the number of unique labels in the dataset
$labels = array_map(fn ($line) => end($line), $this->lines);
return count(array_unique($labels));
}
public function getLabels(): array
{
$labels = array_map(fn ($line) => end($line), $this->lines);
return array_values(array_unique($labels));
}
public function reset(): void
{
$this->currentLines = $this->lines;

View File

@@ -29,11 +29,6 @@ class RandomOrderDataSetReader implements IDataSetReader
$newLine[] = (float) $value;
}
// if the dataset is for regression, we add a fake label of 0
if (count($newLine) === 2) {
$newLine[] = 0.0;
}
$this->lines[] = $newLine;
}
}
@@ -60,6 +55,19 @@ class RandomOrderDataSetReader implements IDataSetReader
return count($this->lines[0]) - 1; // Don't count the label
}
public function getOutputSize(): int
{
// Count the number of unique labels in the dataset
$labels = array_map(fn ($line) => end($line), $this->lines);
return count(array_unique($labels));
}
public function getLabels(): array
{
$labels = array_map(fn ($line) => end($line), $this->lines);
return array_values(array_unique($labels));
}
public function reset(): void
{
$this->currentLines = $this->lines;

View File

@@ -32,20 +32,12 @@ class PerceptronLimitedEpochEventBuffer implements IPerceptronIterationEventBuff
'weights' => $synaptic_weights,
];
if ($this->underSizeIncreaseCount <= $this->sizeIncreaseStart) { // Special case where we need to send each iteration separately
$this->underSizeIncreaseCount++;
$this->data[] = $newData;
$this->flush();
return;
}
$lastEpoch = $this->data[0]['epoch'] ?? null;
if ($this->data && $lastEpoch !== $epoch) { // Current Epoch has changed from the last one
if ($lastEpoch % $this->epochInterval === 0) { // The last epoch need to be sent
if ($lastEpoch == 1 || $lastEpoch % $this->epochInterval === 0) { // The last saved epoch need to be sent
$this->flush(); // Flush all data from the previous epoch
} else {
$this->data = [];
$this->data = []; // We clear the data without sending it as we are saving the next epoch data
}
$lastEpoch = $epoch;

View File

@@ -0,0 +1,8 @@
<?php
namespace App\Services\SynapticWeightsProvider;
interface INetworkSynapticWeightsProvider
{
public function generate(int $input_size, int $output_size, int $hidden_layers_count, int $hidden_layers_neurons_count): array;
}

View File

@@ -0,0 +1,35 @@
<?php
namespace App\Services\SynapticWeightsProvider;
use App\Services\SynapticWeightsProvider\INetworkSynapticWeightsProvider;
class SimpleNetworkWeightsProvider implements INetworkSynapticWeightsProvider
{
public function __construct(
private ISynapticWeightsProvider $synapticWeightsProvider,
) {
}
public function generate(int $input_size, int $output_size, int $hidden_layers_count, int $hidden_layers_neurons_count): array
{
$synaptic_weights = [];
$lastLayerSize = $input_size;
// Generate Hidden Layer weights
for ($hiddenLayerNeuronIndex = 0; $hiddenLayerNeuronIndex < $hidden_layers_count; $hiddenLayerNeuronIndex++) {
for ($neuronIndex = 0; $neuronIndex < $hidden_layers_neurons_count; $neuronIndex++) {
$synaptic_weights[] = $this->synapticWeightsProvider->generate($lastLayerSize);
}
$lastLayerSize = $hidden_layers_neurons_count;
}
// Generate Output Layer weights
$synaptic_weights[] = [];
for ($outputNeuronIndex = 0; $outputNeuronIndex < $output_size; $outputNeuronIndex++) {
$synaptic_weights[count($synaptic_weights) -1][] = $this->synapticWeightsProvider->generate($lastLayerSize);
}
return $synaptic_weights;
}
}

View File

@@ -19,6 +19,7 @@
"laravel/wayfinder": "^0.1.9"
},
"require-dev": {
"brianium/paratest": "^7.8",
"fakerphp/faker": "^1.23",
"laravel/pail": "^1.2.2",
"laravel/pint": "^1.24",
@@ -50,7 +51,7 @@
],
"dev": [
"Composer\\Config::disableProcessTimeout",
"npx concurrently -c \"#93c5fd,#c4b5fd,#fb7185,#fdba74\" \"php artisan serve\" \"php artisan queue:listen --tries=1 --timeout=0\" \"php artisan pail --timeout=0\" \"npm run dev\" \"php artisan reverb:start --debug\" --names=server,queue,logs,vite,reverb --kill-others"
"npx concurrently -c \"#93c5fd,#c4b5fd,#fb7185,#fdba74,#79dff0\" \"php artisan serve\" \"php artisan queue:listen --tries=1 --timeout=0\" \"php artisan pail --timeout=0\" \"npm run dev\" \"php artisan reverb:start --debug\" --names=server,queue,logs,vite,reverb --kill-others"
],
"dev:ssr": [
"npm run build:ssr",

216
composer.lock generated
View File

@@ -4,7 +4,7 @@
"Read more about it at https://getcomposer.org/doc/01-basic-usage.md#installing-dependencies",
"This file is @generated automatically"
],
"content-hash": "a72ab6feeee69457d0085c4a5e4580f7",
"content-hash": "93a44ad3435bb0cb19a8bd3b2b700b4f",
"packages": [
{
"name": "bacon/bacon-qr-code",
@@ -7544,6 +7544,99 @@
}
],
"packages-dev": [
{
"name": "brianium/paratest",
"version": "v7.8.5",
"source": {
"type": "git",
"url": "https://github.com/paratestphp/paratest.git",
"reference": "9b324c8fc319cf9728b581c7a90e1c8f6361c5e5"
},
"dist": {
"type": "zip",
"url": "https://api.github.com/repos/paratestphp/paratest/zipball/9b324c8fc319cf9728b581c7a90e1c8f6361c5e5",
"reference": "9b324c8fc319cf9728b581c7a90e1c8f6361c5e5",
"shasum": ""
},
"require": {
"ext-dom": "*",
"ext-pcre": "*",
"ext-reflection": "*",
"ext-simplexml": "*",
"fidry/cpu-core-counter": "^1.3.0",
"jean85/pretty-package-versions": "^2.1.1",
"php": "~8.2.0 || ~8.3.0 || ~8.4.0 || ~8.5.0",
"phpunit/php-code-coverage": "^11.0.12",
"phpunit/php-file-iterator": "^5.1.0",
"phpunit/php-timer": "^7.0.1",
"phpunit/phpunit": "^11.5.46",
"sebastian/environment": "^7.2.1",
"symfony/console": "^6.4.22 || ^7.3.4 || ^8.0.3",
"symfony/process": "^6.4.20 || ^7.3.4 || ^8.0.3"
},
"require-dev": {
"doctrine/coding-standard": "^12.0.0",
"ext-pcov": "*",
"ext-posix": "*",
"phpstan/phpstan": "^2.1.33",
"phpstan/phpstan-deprecation-rules": "^2.0.3",
"phpstan/phpstan-phpunit": "^2.0.11",
"phpstan/phpstan-strict-rules": "^2.0.7",
"squizlabs/php_codesniffer": "^3.13.5",
"symfony/filesystem": "^6.4.13 || ^7.3.2 || ^8.0.1"
},
"bin": [
"bin/paratest",
"bin/paratest_for_phpstorm"
],
"type": "library",
"autoload": {
"psr-4": {
"ParaTest\\": [
"src/"
]
}
},
"notification-url": "https://packagist.org/downloads/",
"license": [
"MIT"
],
"authors": [
{
"name": "Brian Scaturro",
"email": "scaturrob@gmail.com",
"role": "Developer"
},
{
"name": "Filippo Tessarotto",
"email": "zoeslam@gmail.com",
"role": "Developer"
}
],
"description": "Parallel testing for PHP",
"homepage": "https://github.com/paratestphp/paratest",
"keywords": [
"concurrent",
"parallel",
"phpunit",
"testing"
],
"support": {
"issues": "https://github.com/paratestphp/paratest/issues",
"source": "https://github.com/paratestphp/paratest/tree/v7.8.5"
},
"funding": [
{
"url": "https://github.com/sponsors/Slamdunk",
"type": "github"
},
{
"url": "https://paypal.me/filippotessarotto",
"type": "paypal"
}
],
"time": "2026-01-08T08:02:38+00:00"
},
{
"name": "fakerphp/faker",
"version": "v1.24.1",
@@ -7607,6 +7700,67 @@
},
"time": "2024-11-21T13:46:39+00:00"
},
{
"name": "fidry/cpu-core-counter",
"version": "1.3.0",
"source": {
"type": "git",
"url": "https://github.com/theofidry/cpu-core-counter.git",
"reference": "db9508f7b1474469d9d3c53b86f817e344732678"
},
"dist": {
"type": "zip",
"url": "https://api.github.com/repos/theofidry/cpu-core-counter/zipball/db9508f7b1474469d9d3c53b86f817e344732678",
"reference": "db9508f7b1474469d9d3c53b86f817e344732678",
"shasum": ""
},
"require": {
"php": "^7.2 || ^8.0"
},
"require-dev": {
"fidry/makefile": "^0.2.0",
"fidry/php-cs-fixer-config": "^1.1.2",
"phpstan/extension-installer": "^1.2.0",
"phpstan/phpstan": "^2.0",
"phpstan/phpstan-deprecation-rules": "^2.0.0",
"phpstan/phpstan-phpunit": "^2.0",
"phpstan/phpstan-strict-rules": "^2.0",
"phpunit/phpunit": "^8.5.31 || ^9.5.26",
"webmozarts/strict-phpunit": "^7.5"
},
"type": "library",
"autoload": {
"psr-4": {
"Fidry\\CpuCoreCounter\\": "src/"
}
},
"notification-url": "https://packagist.org/downloads/",
"license": [
"MIT"
],
"authors": [
{
"name": "Théo FIDRY",
"email": "theo.fidry@gmail.com"
}
],
"description": "Tiny utility to get the number of CPU cores.",
"keywords": [
"CPU",
"core"
],
"support": {
"issues": "https://github.com/theofidry/cpu-core-counter/issues",
"source": "https://github.com/theofidry/cpu-core-counter/tree/1.3.0"
},
"funding": [
{
"url": "https://github.com/theofidry",
"type": "github"
}
],
"time": "2025-08-14T07:29:31+00:00"
},
{
"name": "filp/whoops",
"version": "2.18.4",
@@ -7729,6 +7883,66 @@
},
"time": "2025-04-30T06:54:44+00:00"
},
{
"name": "jean85/pretty-package-versions",
"version": "2.1.1",
"source": {
"type": "git",
"url": "https://github.com/Jean85/pretty-package-versions.git",
"reference": "4d7aa5dab42e2a76d99559706022885de0e18e1a"
},
"dist": {
"type": "zip",
"url": "https://api.github.com/repos/Jean85/pretty-package-versions/zipball/4d7aa5dab42e2a76d99559706022885de0e18e1a",
"reference": "4d7aa5dab42e2a76d99559706022885de0e18e1a",
"shasum": ""
},
"require": {
"composer-runtime-api": "^2.1.0",
"php": "^7.4|^8.0"
},
"require-dev": {
"friendsofphp/php-cs-fixer": "^3.2",
"jean85/composer-provided-replaced-stub-package": "^1.0",
"phpstan/phpstan": "^2.0",
"phpunit/phpunit": "^7.5|^8.5|^9.6",
"rector/rector": "^2.0",
"vimeo/psalm": "^4.3 || ^5.0"
},
"type": "library",
"extra": {
"branch-alias": {
"dev-master": "1.x-dev"
}
},
"autoload": {
"psr-4": {
"Jean85\\": "src/"
}
},
"notification-url": "https://packagist.org/downloads/",
"license": [
"MIT"
],
"authors": [
{
"name": "Alessandro Lai",
"email": "alessandro.lai85@gmail.com"
}
],
"description": "A library to get pretty versions strings of installed dependencies",
"keywords": [
"composer",
"package",
"release",
"versions"
],
"support": {
"issues": "https://github.com/Jean85/pretty-package-versions/issues",
"source": "https://github.com/Jean85/pretty-package-versions/tree/2.1.1"
},
"time": "2025-03-19T14:43:43+00:00"
},
{
"name": "laravel/pail",
"version": "v1.2.6",

View File

@@ -7,12 +7,12 @@ return [
* Beyond this number of iterations, the broadcast will be splitted every x iterations,
* x is limited_broadcast_number
*/
'limited_broadcast_iterations' => 200,
'limited_broadcast_iterations' => 100,
/**
* How much broadcasts is sent when in limmited broadcast mode
*/
'limited_broadcast_number' => 200,
'limited_broadcast_number' => 100,
'broadcast_iteration_size' => 75,

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.6 KiB

After

Width:  |  Height:  |  Size: 46 KiB

View File

@@ -1,150 +1,150 @@
2.8,1.9,-1,-1,1
2.9,1.8,-1,-1,1
2.2,1.5,-1,-1,1
1.3,5.6,-1,1,-1
-1.2,1.6,1,-1,-1
2.5,1.7,-1,-1,1
3.1,2.3,-1,-1,1
2.8,2.4,-1,-1,1
-1.2,1.5,1,-1,-1
1.4,6.1,-1,1,-1
-1.2,1.3,1,-1,-1
3.1,1.8,-1,-1,1
3.3,2.5,-1,-1,1
3.4,2.3,-1,-1,1
1.5,5.9,-1,1,-1
1.3,5.6,-1,1,-1
-1.2,1,1,-1,-1
3,2.1,-1,-1,1
1.4,5.2,-1,1,-1
1.2,5.5,-1,1,-1
1.4,6.7,-1,1,-1
-1.3,1.7,1,-1,-1
1,5.7,-1,1,-1
2.6,1.4,-1,-1,1
1.6,6,-1,1,-1
1.2,5.8,-1,1,-1
1.4,6.1,-1,1,-1
1.3,5.7,-1,1,-1
-1.2,1.4,1,-1,-1
1,5.5,-1,1,-1
3.1,2.1,-1,-1,1
-1.4,1.7,1,-1,-1
1.6,6.3,-1,1,-1
-1.2,1.4,1,-1,-1
2.7,1.9,-1,-1,1
1.1,5.5,-1,1,-1
3,1.8,-1,-1,1
3.8,2,-1,-1,1
2.8,2.1,-1,-1,1
1,5,-1,1,-1
2.8,2,-1,-1,1
-1.2,1.6,1,-1,-1
1.3,6.6,-1,1,-1
1.6,6,-1,1,-1
3,1.6,-1,-1,1
-1.1,1.1,1,-1,-1
2.9,1.8,-1,-1,1
3,1.8,-1,-1,1
-1.2,1.4,1,-1,-1
1,5.8,-1,1,-1
1.2,5.7,-1,1,-1
2.8,1.5,-1,-1,1
-1.4,1.6,1,-1,-1
-1.2,1.4,1,-1,-1
2.8,2.2,-1,-1,1
-1.5,1.7,1,-1,-1
1,4.9,-1,1,-1
1.3,5.7,-1,1,-1
3.2,2.3,-1,-1,1
-1.3,1.5,1,-1,-1
-1.2,1.2,1,-1,-1
-1.2,1.6,1,-1,-1
3,2.1,-1,-1,1
-1.4,1.5,1,-1,-1
1.3,5.5,-1,1,-1
3.3,2.5,-1,-1,1
3.1,2.4,-1,-1,1
1.8,5.9,-1,1,-1
2.7,1.9,-1,-1,1
1.5,6.3,-1,1,-1
-1.3,1.4,1,-1,-1
1,5,-1,1,-1
3.2,2.3,-1,-1,1
-1.3,1.4,1,-1,-1
3,2.1,-1,-1,1
3.2,2,-1,-1,1
1.3,6.3,-1,1,-1
1.4,7,-1,1,-1
-1.2,1.7,1,-1,-1
1.4,6.6,-1,1,-1
1.2,5.8,-1,1,-1
3,2,-1,-1,1
-1.2,1.6,1,-1,-1
1.5,6.2,-1,1,-1
1.5,6.4,-1,1,-1
-1.2,1.9,1,-1,-1
3.2,2.3,-1,-1,1
-1.2,1.5,1,-1,-1
1.5,5.4,-1,1,-1
1.5,6,-1,1,-1
1.5,6.9,-1,1,-1
-1.4,1.5,1,-1,-1
1.3,6.4,-1,1,-1
2.8,2,-1,-1,1
-1.2,1.5,1,-1,-1
-1.1,1.5,1,-1,-1
3.6,2.5,-1,-1,1
1.1,5.1,-1,1,-1
-1.1,1.5,1,-1,-1
-1.2,1.5,1,-1,-1
3.4,2.4,-1,-1,1
-1.3,1.3,1,-1,-1
3,2.3,-1,-1,1
-1.1,1.5,1,-1,-1
1.1,5.6,-1,1,-1
1.3,5.6,-1,1,-1
-1.2,1.5,1,-1,-1
-1.2,1.3,1,-1,-1
1.7,6.7,-1,1,-1
-1.2,1.3,1,-1,-1
-1.4,1.3,1,-1,-1
2.5,1.8,-1,-1,1
-1.4,1.5,1,-1,-1
1.5,6.5,-1,1,-1
2.6,2.3,-1,-1,1
-1.2,1.4,1,-1,-1
1.5,5.6,-1,1,-1
1.3,5.7,-1,1,-1
1.2,6.1,-1,1,-1
3,2.2,-1,-1,1
3,1.8,-1,-1,1
-1.1,1.4,1,-1,-1
1.4,6.8,-1,1,-1
-1.6,1.6,1,-1,-1
-1.2,1.4,1,-1,-1
3.2,1.8,-1,-1,1
-1.2,1.5,1,-1,-1
2.7,1.8,-1,-1,1
-1.3,1.4,1,-1,-1
1.3,6.2,-1,1,-1
-1.3,1.3,1,-1,-1
3,1.8,-1,-1,1
2.7,1.9,-1,-1,1
-1.2,1.4,1,-1,-1
-1.2,1.2,1,-1,-1
1.5,6.7,-1,1,-1
2.5,1.9,-1,-1,1
3.3,2.1,-1,-1,1
2.8,1.8,-1,-1,1
-1.2,1.3,1,-1,-1
3.8,2.2,-1,-1,1
2.5,2,-1,-1,1
1.3,6.1,-1,1,-1
-1.2,1.4,1,-1,-1
-1.1,1.5,1,-1,-1
3,2.3,-1,-1,1
-1.4,1.9,1,-1,-1
-1.2,1.6,1,-1,-1
1,6,-1,1,-1
1.3,5.5,-1,1,-1
2.8,1.9,1
2.9,1.8,1
2.2,1.5,1
1.3,5.6,0
-1.2,1.6,-1
2.5,1.7,1
3.1,2.3,1
2.8,2.4,1
-1.2,1.5,-1
1.4,6.1,0
-1.2,1.3,-1
3.1,1.8,1
3.3,2.5,1
3.4,2.3,1
1.5,5.9,0
1.3,5.6,0
-1.2,1,-1
3,2.1,1
1.4,5.2,0
1.2,5.5,0
1.4,6.7,0
-1.3,1.7,-1
1,5.7,0
2.6,1.4,1
1.6,6,0
1.2,5.8,0
1.4,6.1,0
1.3,5.7,0
-1.2,1.4,-1
1,5.5,0
3.1,2.1,1
-1.4,1.7,-1
1.6,6.3,0
-1.2,1.4,-1
2.7,1.9,1
1.1,5.5,0
3,1.8,1
3.8,2,1
2.8,2.1,1
1,5,0
2.8,2,1
-1.2,1.6,-1
1.3,6.6,0
1.6,6,0
3,1.6,1
-1.1,1.1,-1
2.9,1.8,1
3,1.8,1
-1.2,1.4,-1
1,5.8,0
1.2,5.7,0
2.8,1.5,1
-1.4,1.6,-1
-1.2,1.4,-1
2.8,2.2,1
-1.5,1.7,-1
1,4.9,0
1.3,5.7,0
3.2,2.3,1
-1.3,1.5,-1
-1.2,1.2,-1
-1.2,1.6,-1
3,2.1,1
-1.4,1.5,-1
1.3,5.5,0
3.3,2.5,1
3.1,2.4,1
1.8,5.9,0
2.7,1.9,1
1.5,6.3,0
-1.3,1.4,-1
1,5,0
3.2,2.3,1
-1.3,1.4,-1
3,2.1,1
3.2,2,1
1.3,6.3,0
1.4,7,0
-1.2,1.7,-1
1.4,6.6,0
1.2,5.8,0
3,2,1
-1.2,1.6,-1
1.5,6.2,0
1.5,6.4,0
-1.2,1.9,-1
3.2,2.3,1
-1.2,1.5,-1
1.5,5.4,0
1.5,6,0
1.5,6.9,0
-1.4,1.5,-1
1.3,6.4,0
2.8,2,1
-1.2,1.5,-1
-1.1,1.5,-1
3.6,2.5,1
1.1,5.1,0
-1.1,1.5,-1
-1.2,1.5,-1
3.4,2.4,1
-1.3,1.3,-1
3,2.3,1
-1.1,1.5,-1
1.1,5.6,0
1.3,5.6,0
-1.2,1.5,-1
-1.2,1.3,-1
1.7,6.7,0
-1.2,1.3,-1
-1.4,1.3,-1
2.5,1.8,1
-1.4,1.5,-1
1.5,6.5,0
2.6,2.3,1
-1.2,1.4,-1
1.5,5.6,0
1.3,5.7,0
1.2,6.1,0
3,2.2,1
3,1.8,1
-1.1,1.4,-1
1.4,6.8,0
-1.6,1.6,-1
-1.2,1.4,-1
3.2,1.8,1
-1.2,1.5,-1
2.7,1.8,1
-1.3,1.4,-1
1.3,6.2,0
-1.3,1.3,-1
3,1.8,1
2.7,1.9,1
-1.2,1.4,-1
-1.2,1.2,-1
1.5,6.7,0
2.5,1.9,1
3.3,2.1,1
2.8,1.8,1
-1.2,1.3,-1
3.8,2.2,1
2.5,2,1
1.3,6.1,0
-1.2,1.4,-1
-1.1,1.5,-1
3,2.3,1
-1.4,1.9,-1
-1.2,1.6,-1
1,6,0
1.3,5.5,0
1 2.8 1.9 -1 1 -1
2 2.9 1.8 -1 1 -1
3 2.2 1.5 -1 1 -1
4 1.3 5.6 -1 -1 0 1
5 -1.2 1.6 1 -1 -1
6 2.5 1.7 -1 1 -1
7 3.1 2.3 -1 1 -1
8 2.8 2.4 -1 1 -1
9 -1.2 1.5 1 -1 -1
10 1.4 6.1 -1 -1 0 1
11 -1.2 1.3 1 -1 -1
12 3.1 1.8 -1 1 -1
13 3.3 2.5 -1 1 -1
14 3.4 2.3 -1 1 -1
15 1.5 5.9 -1 -1 0 1
16 1.3 5.6 -1 -1 0 1
17 -1.2 1 1 -1 -1
18 3 2.1 -1 1 -1
19 1.4 5.2 -1 -1 0 1
20 1.2 5.5 -1 -1 0 1
21 1.4 6.7 -1 -1 0 1
22 -1.3 1.7 1 -1 -1
23 1 5.7 -1 -1 0 1
24 2.6 1.4 -1 1 -1
25 1.6 6 -1 -1 0 1
26 1.2 5.8 -1 -1 0 1
27 1.4 6.1 -1 -1 0 1
28 1.3 5.7 -1 -1 0 1
29 -1.2 1.4 1 -1 -1
30 1 5.5 -1 -1 0 1
31 3.1 2.1 -1 1 -1
32 -1.4 1.7 1 -1 -1
33 1.6 6.3 -1 -1 0 1
34 -1.2 1.4 1 -1 -1
35 2.7 1.9 -1 1 -1
36 1.1 5.5 -1 -1 0 1
37 3 1.8 -1 1 -1
38 3.8 2 -1 1 -1
39 2.8 2.1 -1 1 -1
40 1 5 -1 -1 0 1
41 2.8 2 -1 1 -1
42 -1.2 1.6 1 -1 -1
43 1.3 6.6 -1 -1 0 1
44 1.6 6 -1 -1 0 1
45 3 1.6 -1 1 -1
46 -1.1 1.1 1 -1 -1
47 2.9 1.8 -1 1 -1
48 3 1.8 -1 1 -1
49 -1.2 1.4 1 -1 -1
50 1 5.8 -1 -1 0 1
51 1.2 5.7 -1 -1 0 1
52 2.8 1.5 -1 1 -1
53 -1.4 1.6 1 -1 -1
54 -1.2 1.4 1 -1 -1
55 2.8 2.2 -1 1 -1
56 -1.5 1.7 1 -1 -1
57 1 4.9 -1 -1 0 1
58 1.3 5.7 -1 -1 0 1
59 3.2 2.3 -1 1 -1
60 -1.3 1.5 1 -1 -1
61 -1.2 1.2 1 -1 -1
62 -1.2 1.6 1 -1 -1
63 3 2.1 -1 1 -1
64 -1.4 1.5 1 -1 -1
65 1.3 5.5 -1 -1 0 1
66 3.3 2.5 -1 1 -1
67 3.1 2.4 -1 1 -1
68 1.8 5.9 -1 -1 0 1
69 2.7 1.9 -1 1 -1
70 1.5 6.3 -1 -1 0 1
71 -1.3 1.4 1 -1 -1
72 1 5 -1 -1 0 1
73 3.2 2.3 -1 1 -1
74 -1.3 1.4 1 -1 -1
75 3 2.1 -1 1 -1
76 3.2 2 -1 1 -1
77 1.3 6.3 -1 -1 0 1
78 1.4 7 -1 -1 0 1
79 -1.2 1.7 1 -1 -1
80 1.4 6.6 -1 -1 0 1
81 1.2 5.8 -1 -1 0 1
82 3 2 -1 1 -1
83 -1.2 1.6 1 -1 -1
84 1.5 6.2 -1 -1 0 1
85 1.5 6.4 -1 -1 0 1
86 -1.2 1.9 1 -1 -1
87 3.2 2.3 -1 1 -1
88 -1.2 1.5 1 -1 -1
89 1.5 5.4 -1 -1 0 1
90 1.5 6 -1 -1 0 1
91 1.5 6.9 -1 -1 0 1
92 -1.4 1.5 1 -1 -1
93 1.3 6.4 -1 -1 0 1
94 2.8 2 -1 1 -1
95 -1.2 1.5 1 -1 -1
96 -1.1 1.5 1 -1 -1
97 3.6 2.5 -1 1 -1
98 1.1 5.1 -1 -1 0 1
99 -1.1 1.5 1 -1 -1
100 -1.2 1.5 1 -1 -1
101 3.4 2.4 -1 1 -1
102 -1.3 1.3 1 -1 -1
103 3 2.3 -1 1 -1
104 -1.1 1.5 1 -1 -1
105 1.1 5.6 -1 -1 0 1
106 1.3 5.6 -1 -1 0 1
107 -1.2 1.5 1 -1 -1
108 -1.2 1.3 1 -1 -1
109 1.7 6.7 -1 -1 0 1
110 -1.2 1.3 1 -1 -1
111 -1.4 1.3 1 -1 -1
112 2.5 1.8 -1 1 -1
113 -1.4 1.5 1 -1 -1
114 1.5 6.5 -1 -1 0 1
115 2.6 2.3 -1 1 -1
116 -1.2 1.4 1 -1 -1
117 1.5 5.6 -1 -1 0 1
118 1.3 5.7 -1 -1 0 1
119 1.2 6.1 -1 -1 0 1
120 3 2.2 -1 1 -1
121 3 1.8 -1 1 -1
122 -1.1 1.4 1 -1 -1
123 1.4 6.8 -1 -1 0 1
124 -1.6 1.6 1 -1 -1
125 -1.2 1.4 1 -1 -1
126 3.2 1.8 -1 1 -1
127 -1.2 1.5 1 -1 -1
128 2.7 1.8 -1 1 -1
129 -1.3 1.4 1 -1 -1
130 1.3 6.2 -1 -1 0 1
131 -1.3 1.3 1 -1 -1
132 3 1.8 -1 1 -1
133 2.7 1.9 -1 1 -1
134 -1.2 1.4 1 -1 -1
135 -1.2 1.2 1 -1 -1
136 1.5 6.7 -1 -1 0 1
137 2.5 1.9 -1 1 -1
138 3.3 2.1 -1 1 -1
139 2.8 1.8 -1 1 -1
140 -1.2 1.3 1 -1 -1
141 3.8 2.2 -1 1 -1
142 2.5 2 -1 1 -1
143 1.3 6.1 -1 -1 0 1
144 -1.2 1.4 1 -1 -1
145 -1.1 1.5 1 -1 -1
146 3 2.3 -1 1 -1
147 -1.4 1.9 1 -1 -1
148 -1.2 1.6 1 -1 -1
149 1 6 -1 -1 0 1
150 1.3 5.5 -1 -1 0 1

View File

@@ -1,4 +1,4 @@
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,-1,-1,-1
0,0,1,0,0,0,0,1,0,0,1,1,1,1,1,0,0,1,0,0,0,0,1,0,0,-1,1,-1,-1
1,0,0,0,1,0,1,0,1,0,0,0,1,0,0,0,1,0,1,0,1,0,0,0,1,-1,-1,1,-1
0,0,0,0,0,0,1,1,1,0,0,1,0,1,0,0,1,1,1,0,0,0,0,0,0,-1,-1,-1,1
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
0,0,1,0,0,0,0,1,0,0,1,1,1,1,1,0,0,1,0,0,0,0,1,0,0,1
1,0,0,0,1,0,1,0,1,0,0,0,1,0,0,0,1,0,1,0,1,0,0,0,1,2
0,0,0,0,0,0,1,1,1,0,0,1,0,1,0,0,1,1,1,0,0,0,0,0,0,3
1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 -1 -1 -1
2 0 0 1 0 0 0 0 1 0 0 1 1 1 1 1 0 0 1 0 0 0 0 1 0 0 -1 1 1 -1 -1
3 1 0 0 0 1 0 1 0 1 0 0 0 1 0 0 0 1 0 1 0 1 0 0 0 1 -1 2 -1 1 -1
4 0 0 0 0 0 0 1 1 1 0 0 1 0 1 0 0 1 1 1 0 0 0 0 0 0 -1 3 -1 -1 1

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.2 KiB

After

Width:  |  Height:  |  Size: 24 KiB

View File

@@ -1,3 +0,0 @@
<svg width="166" height="166" viewBox="0 0 166 166" fill="none" xmlns="http://www.w3.org/2000/svg">
<path fill-rule="evenodd" clip-rule="evenodd" d="M162.041 38.7592C162.099 38.9767 162.129 39.201 162.13 39.4264V74.4524C162.13 74.9019 162.011 75.3435 161.786 75.7325C161.561 76.1216 161.237 76.4442 160.847 76.6678L131.462 93.5935V127.141C131.462 128.054 130.977 128.897 130.186 129.357L68.8474 164.683C68.707 164.763 68.5538 164.814 68.4007 164.868C68.3432 164.887 68.289 164.922 68.2284 164.938C67.7996 165.051 67.3489 165.051 66.9201 164.938C66.8499 164.919 66.7861 164.881 66.7191 164.855C66.5787 164.804 66.4319 164.76 66.2979 164.683L4.97219 129.357C4.58261 129.133 4.2589 128.81 4.0337 128.421C3.8085 128.032 3.68976 127.591 3.68945 127.141L3.68945 22.0634C3.68945 21.8336 3.72136 21.6101 3.7788 21.393C3.79794 21.3196 3.84262 21.2526 3.86814 21.1791C3.91601 21.0451 3.96068 20.9078 4.03088 20.7833C4.07874 20.7003 4.14894 20.6333 4.20638 20.5566C4.27977 20.4545 4.34678 20.3491 4.43293 20.2598C4.50632 20.1863 4.60205 20.1321 4.68501 20.0682C4.77755 19.9916 4.86051 19.9086 4.96581 19.848L35.6334 2.18492C36.0217 1.96139 36.4618 1.84375 36.9098 1.84375C37.3578 1.84375 37.7979 1.96139 38.1862 2.18492L68.8506 19.848H68.857C68.9591 19.9118 69.0452 19.9916 69.1378 20.065C69.2207 20.1289 69.3133 20.1863 69.3867 20.2566C69.476 20.3491 69.5398 20.4545 69.6164 20.5566C69.6707 20.6333 69.7441 20.7003 69.7887 20.7833C69.8621 20.911 69.9036 21.0451 69.9546 21.1791C69.9802 21.2526 70.0248 21.3196 70.044 21.3962C70.1027 21.6138 70.1328 21.8381 70.1333 22.0634V87.6941L95.686 72.9743V39.4232C95.686 39.1997 95.7179 38.9731 95.7753 38.7592C95.7977 38.6826 95.8391 38.6155 95.8647 38.5421C95.9157 38.408 95.9604 38.2708 96.0306 38.1463C96.0785 38.0633 96.1487 37.9962 96.2029 37.9196C96.2795 37.8175 96.3433 37.7121 96.4326 37.6227C96.506 37.5493 96.5986 37.495 96.6815 37.4312C96.7773 37.3546 96.8602 37.2716 96.9623 37.2109L127.633 19.5479C128.021 19.324 128.461 19.2062 128.91 19.2062C129.358 19.2062 129.798 19.324 130.186 19.5479L160.85 37.2109C160.959 37.2748 161.042 37.3546 161.137 37.428C161.217 37.4918 161.31 37.5493 161.383 37.6195C161.473 37.7121 161.536 37.8175 161.613 37.9196C161.67 37.9962 161.741 38.0633 161.785 38.1463C161.859 38.2708 161.9 38.408 161.951 38.5421C161.98 38.6155 162.021 38.6826 162.041 38.7592ZM157.018 72.9743V43.8477L146.287 50.028L131.462 58.5675V87.6941L157.021 72.9743H157.018ZM126.354 125.663V96.5176L111.771 104.85L70.1301 128.626V158.046L126.354 125.663ZM8.80126 26.4848V125.663L65.0183 158.043V128.629L35.6494 112L35.6398 111.994L35.6271 111.988C35.5281 111.93 35.4452 111.847 35.3526 111.777C35.2729 111.713 35.1803 111.662 35.1101 111.592L35.1038 111.582C35.0208 111.502 34.9634 111.403 34.8932 111.314C34.8293 111.228 34.7528 111.154 34.7017 111.065L34.6985 111.055C34.6411 110.96 34.606 110.845 34.5645 110.736C34.523 110.64 34.4688 110.551 34.4432 110.449C34.4113 110.328 34.4049 110.197 34.3922 110.072C34.3794 109.976 34.3539 109.881 34.3539 109.785V109.778V41.2045L19.5322 32.6619L8.80126 26.4848ZM36.913 7.35007L11.3635 22.0634L36.9066 36.7768L62.4529 22.0602L36.9066 7.35007H36.913ZM50.1999 99.1736L65.0215 90.6374V26.4848L54.2906 32.6651L39.4657 41.2045V105.357L50.1999 99.1736ZM128.91 24.713L103.363 39.4264L128.91 54.1397L154.453 39.4232L128.91 24.713ZM126.354 58.5675L111.529 50.028L100.798 43.8477V72.9743L115.619 81.5106L126.354 87.6941V58.5675ZM67.5711 124.205L105.042 102.803L123.772 92.109L98.2451 77.4053L68.8538 94.3341L42.0663 109.762L67.5711 124.205Z" fill="#FF2D20"/>
</svg>

Before

Width:  |  Height:  |  Size: 3.5 KiB

View File

@@ -33,9 +33,7 @@ const rowBgDark = computed(() => {
<th>Époch</th>
<th>Exemple</th>
<th
v-for="(weight, index) in allWeightPerIteration[
allWeightPerIteration.length - 1
]"
v-for="(weight, index) in allWeightPerIteration[0]"
v-bind:key="index"
>
X<sub>{{ index }}</sub>
@@ -60,7 +58,10 @@ const rowBgDark = computed(() => {
<td>{{ iteration.error.toFixed(2) }}</td>
</tr>
<tr v-if="props.trainingEnded" class="bg-red-400 dark:bg-red-900 text-center">
<tr
v-if="props.trainingEnded"
class="bg-red-400 text-center dark:bg-red-900"
>
<td colspan="100%">
<strong>Entraînement terminé :</strong>
{{ props.trainingEndReason }}

View File

@@ -20,6 +20,16 @@ const links = [
href: '/perceptron',
data: { type: 'adaline' },
},
{
name: 'Mono-couche',
href: '/perceptron',
data: { type: 'monolayer' },
},
{
name: 'Multi-couche',
href: '/perceptron',
data: { type: 'multilayer' },
},
];
const isActiveLink = (link: any) => {

View File

@@ -5,7 +5,7 @@ import type {
BubbleDataPoint,
Point,
} from 'chart.js';
import { computed } from 'vue';
import { computed, ref } from 'vue';
import { Chart } from 'vue-chartjs';
import { colors, gridColor, gridColorBold } from '@/types/graphs';
import type { Iteration } from '@/types/perceptron';
@@ -16,6 +16,10 @@ const props = defineProps<{
activationFunction: (x: number) => number;
}>();
const examplesNumber = computed(() => {
return props.cleanedDataset.reduce((sum, dataset) => sum + dataset.data.length, 0);
});
const farLeftDataPointX = computed(() => {
if (props.cleanedDataset.length === 0) {
return 0;
@@ -25,6 +29,15 @@ const farLeftDataPointX = computed(() => {
);
return minX;
});
const farBottomDataPointY = computed(() => {
if (props.cleanedDataset.length === 0) {
return 0;
}
const minY = Math.min(
...props.cleanedDataset.flatMap((d) => d.data.map((point) => point.y)),
);
return minY;
});
const farRightDataPointX = computed(() => {
if (props.cleanedDataset.length === 0) {
return 0;
@@ -34,33 +47,77 @@ const farRightDataPointX = computed(() => {
);
return maxX;
});
const farTopDataPointY = computed(() => {
if (props.cleanedDataset.length === 0) {
return 0;
}
const maxY = Math.max(
...props.cleanedDataset.flatMap((d) => d.data.map((point) => point.y)),
);
return maxY;
});
function getPerceptronOutput(
weightsNetwork: number[][][],
inputs: number[],
): number[] {
for (const layer of weightsNetwork) {
const nextInputs: number[] = [];
for (const neuron of layer) {
const bias = neuron[0];
const weights = neuron.slice(1);
let sum = bias;
for (let i = 0; i < weights.length; i++) {
sum += weights[i] * inputs[i];
}
const activated = props.activationFunction(sum);
nextInputs.push(activated);
}
inputs = nextInputs;
}
return inputs;
}
const nonLinearGraph = ref<boolean>(false);
function getPerceptronDecisionBoundaryDataset(
networkWeights: number[][][],
activationFunction: (x: number) => number = (x) => x,
): ChartDataset<
keyof ChartTypeRegistry,
number | Point | [number, number] | BubbleDataPoint | null
> {
>[] {
const label = 'Ligne de décision du Perceptron';
console.log('Calculating decision boundary with weights:', networkWeights);
if (
networkWeights.length == 1 &&
networkWeights[0].length == 1 &&
networkWeights[0][0].length == 3
networkWeights[0][0].length <= 3
) {
nonLinearGraph.value = false;
// Unique, 3 weights perceptron
const perceptronWeights = networkWeights[0][0]; // We take the unique perceptron
const perceptronWeights = [...networkWeights[0][0]]; // Copy of the unique perceptron weights
function perceptronLine(x: number): number {
if (perceptronWeights.length < 3) {
// If we have less than 3 weights, we assume missing weights are zero
return getPerceptronOutput(networkWeights, [x])[0];
}
// w0 + w1*x + w2*y = 0 => y = -(w1/w2)*x - w0/w2
const w2 = perceptronWeights[2] == 0 ? 1e-6 : perceptronWeights[2]; // Avoid division by zero
return -(perceptronWeights[1] / w2) * x - perceptronWeights[0] / w2;
}
// Simple line
return {
return [
{
type: 'line',
label: label,
data: [
@@ -76,60 +133,67 @@ function getPerceptronDecisionBoundaryDataset(
borderColor: '#FFF',
borderWidth: 2,
pointRadius: 0,
};
},
];
} else {
function forward(x1: number, x2: number): number {
let activations: number[] = [x1, x2];
nonLinearGraph.value = true;
for (const layer of networkWeights) {
const nextActivations: number[] = [];
const bubbleTransparency = '30';
const isInDataThreshold = 0.0;
for (const neuron of layer) {
const bias = neuron[0];
const weights = neuron.slice(1);
let sum = bias;
for (let i = 0; i < weights.length; i++) {
sum += weights[i] * activations[i];
}
const activated = activationFunction(sum);
nextActivations.push(activated);
}
activations = nextActivations;
}
return activations[0]; // on suppose sortie unique
// -------- 1⃣ Construction des datasets --------
const datasets: {
type: string;
label: string;
data: Point[];
backgroundColor: string;
pointRadius: number;
borderWidth: number;
order: number;
}[] = [];
// For the number of neuron in the last layer
const lastLayer = networkWeights[networkWeights.length - 1];
for (let i = 0; i < lastLayer.length; i++) {
const dataset = {
type: 'scatter',
label: label,
data: [], // Will be filled with the decision boundary points
backgroundColor: colors[i] + bubbleTransparency || '#AAA',
pointRadius: 15,
borderWidth: 0,
order: -1,
};
datasets.push(dataset);
}
// -------- 2⃣ Échantillonnage grille --------
const decisionBoundary: Point[] = [];
const min = -2;
const max = 2;
const step = 0.03;
const epsilon = 0.01;
const step =
Math.abs(
farRightDataPointX.value + 1 - (farLeftDataPointX.value - 1),
) / 50;
for (let x = min; x <= max; x += step) {
for (let y = min; y <= max; y += step) {
const value = forward(x, y);
if (Math.abs(value) < epsilon) {
decisionBoundary.push({ x, y });
for (
let x = farLeftDataPointX.value - 1;
x <= farRightDataPointX.value + 1;
x += step
) {
for (
let y = farBottomDataPointY.value - 1;
y <= farTopDataPointY.value + 1;
y += step
) {
const values = getPerceptronOutput(networkWeights, [x, y]);
values.forEach((v, i) => {
if (v > isInDataThreshold) {
datasets[i].data.push({ x, y });
}
});
}
}
// -------- 3⃣ Dataset ChartJS --------
return {
type: 'scatter',
label: label,
data: decisionBoundary,
backgroundColor: '#FFFFFF',
pointRadius: 1,
};
return datasets;
}
}
</script>
@@ -138,6 +202,7 @@ function getPerceptronDecisionBoundaryDataset(
<Chart
v-if="props.cleanedDataset.length > 0 || props.iterations.length > 0"
class="flex bg-primary dark:bg-transparent!"
type="scatter"
:options="{
responsive: true,
maintainAspectRatio: true,
@@ -150,6 +215,9 @@ function getPerceptronDecisionBoundaryDataset(
text: 'Ligne de décision du Perceptron',
},
},
animation: {
duration: nonLinearGraph || examplesNumber > 10 ? 0 : 1000, // Disable animations for instant updates
},
layout: {
padding: {
left: 10,
@@ -198,7 +266,7 @@ function getPerceptronDecisionBoundaryDataset(
})),
// Perceptron decision boundary
getPerceptronDecisionBoundaryDataset(
...getPerceptronDecisionBoundaryDataset(
props.iterations.length > 0
? props.iterations[props.iterations.length - 1].weights
: [[[0, 0, 0]]],

View File

@@ -5,7 +5,6 @@ import { Bar } from 'vue-chartjs';
import { colors, gridColor, gridColorBold } from '@/types/graphs';
import type { Iteration } from '@/types/perceptron';
import Toggle from './ui/toggle/Toggle.vue';
import { usePage } from '@inertiajs/vue3';
const props = defineProps<{
iterations: Iteration[];
@@ -95,6 +94,9 @@ const datasets = computed<
text: 'Nombre d\'erreurs par epoch',
},
},
animation: {
duration: iterations.length > 100 ? 0 : 1000, // Disable animations for instant updates
},
scales: {
x: {
stacked: true,

View File

@@ -127,6 +127,7 @@ watch(selectedDatasetCopy, (newValue) => {
name="dataset"
id="dataset-select"
v-model="selectedDatasetCopy"
class="cursor-pointer"
>
<NativeSelectOption value="" disabled
>Sélectionnez un dataset</NativeSelectOption
@@ -154,6 +155,7 @@ watch(selectedDatasetCopy, (newValue) => {
name="weight_init_method"
id="weight_init_method"
v-model="selectedMethod"
class="cursor-pointer"
>
<NativeSelectOption
v-for="method in ['zeros', 'random']"

View File

@@ -33,7 +33,6 @@
<title inertia>{{ config('app.name', 'Laravel') }}</title>
<link rel="icon" href="/favicon.ico" sizes="any">
<link rel="icon" href="/favicon.svg" type="image/svg+xml">
<link rel="apple-touch-icon" href="/apple-touch-icon.png">
<link rel="preconnect" href="https://fonts.bunny.net">

View File

@@ -14,8 +14,8 @@ class TrainingTestCase extends TestCase
$training->start();
// Assert that the final synaptic weights are as expected withing the margin of error
// $finalWeights = $training->getSynapticWeights();
// $this->assertEqualsWithDelta($expectedWeights, $finalWeights, $marginOfError, "Final synaptic weights do not match expected values.");
$finalWeights = $training->getSynapticWeights();
$this->assertEqualsWithDelta($expectedWeights, $finalWeights, $marginOfError, "Final synaptic weights do not match expected values.");
// Assert that the number of epochs taken is as expected
$this->assertEquals($expectedEpochs, $training->getEpoch(), "Expected training to take $expectedEpochs epochs, but it took {$training->getEpoch()} epochs.");