diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 092650111..295cff531 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -8,14 +8,8 @@ jobs:
runs-on: ${{ matrix.operating-system }}
strategy:
matrix:
-<<<<<<< HEAD
operating-system: [windows-latest, ubuntu-latest, macos-latest]
php-versions: ['8.4']
-=======
- operating-system: [ubuntu-latest, macos-latest]
- php-versions: ['8.0', '8.1', '8.2']
->>>>>>> master
-
steps:
- name: Checkout
uses: actions/checkout@v3
diff --git a/.gitignore b/.gitignore
index 08239f13d..bb704a7df 100644
--- a/.gitignore
+++ b/.gitignore
@@ -13,3 +13,12 @@ pyvenv.cfg
.idea
.vscode
.vs
+
+# Cache files
+/runtime/
+
+# Docker related files
+Dockerfile
+/docker/
+docker-compose.yml
+Makefile
diff --git a/.php-cs-fixer.dist.php b/.php-cs-fixer.dist.php
index 9a95a997e..bf912673a 100644
--- a/.php-cs-fixer.dist.php
+++ b/.php-cs-fixer.dist.php
@@ -3,115 +3,128 @@
use PhpCsFixer\Finder;
use PhpCsFixer\Config;
-$finder = Finder::create()->in(__DIR__)
- ->exclude('docs');
+$finder = Finder::create()
+ ->exclude([
+ __DIR__ . '/docs/',
+ __DIR__ . '/vendor/',
+ ])
+ ->in(__DIR__)
+ ->append([
+ __FILE__,
+ ]);
$config = new Config();
+$config
+ ->setCacheFile(__DIR__ . '/runtime/.php-cs-fixer.cache')
+ ->setRules(
+ [
+ '@PSR2' => true,
+ '@PHP84Migration' => true,
+ 'align_multiline_comment' => true,
+ 'array_syntax' => ['syntax' => 'short'],
+ 'backtick_to_shell_exec' => true,
+ 'binary_operator_spaces' => true,
+ 'blank_lines_before_namespace' => true,
+ 'blank_line_after_namespace' => true,
+ 'blank_line_after_opening_tag' => true,
+ 'blank_line_before_statement' => [
+ 'statements' => [
+ 'break', 'case', 'continue', 'declare', 'default', 'do', 'for',
+ 'if', 'foreach', 'return', 'switch', 'try', 'while',
+ ],
+ ],
+ 'cast_spaces' => ['space' => 'single'],
+ 'class_attributes_separation' => true,
+ 'combine_consecutive_issets' => true,
+ 'combine_consecutive_unsets' => true,
+ 'compact_nullable_type_declaration' => true,
+ 'concat_space' => ['spacing' => 'one'],
+ 'fully_qualified_strict_types' => true,
+ 'increment_style' => ['style' => 'pre'],
+ 'linebreak_after_opening_tag' => true,
+ 'list_syntax' => ['syntax' => 'short'],
+ 'lowercase_cast' => true,
+ 'lowercase_static_reference' => true,
+ 'magic_constant_casing' => true,
+ 'magic_method_casing' => true,
+ 'multiline_comment_opening_closing' => true,
+ 'multiline_whitespace_before_semicolons' => [
+ 'strategy' => 'no_multi_line',
+ ],
+ 'native_function_casing' => true,
+ 'native_type_declaration_casing' => true,
+ 'new_with_parentheses' => true,
+ 'no_alternative_syntax' => true,
+ 'no_blank_lines_after_class_opening' => true,
+ 'no_blank_lines_after_phpdoc' => true,
+ 'no_empty_statement' => true,
+ 'no_extra_blank_lines' => true,
+ 'no_leading_import_slash' => true,
+ 'no_leading_namespace_whitespace' => true,
+ 'no_mixed_echo_print' => ['use' => 'echo'],
+ 'no_null_property_initialization' => true,
+ 'no_short_bool_cast' => true,
+ 'no_singleline_whitespace_before_semicolons' => true,
+ 'no_spaces_around_offset' => true,
+ 'no_superfluous_phpdoc_tags' => false,
+ 'no_superfluous_elseif' => true,
+ 'no_trailing_comma_in_singleline' => true,
+ 'no_unneeded_control_parentheses' => true,
+ 'no_unneeded_braces' => true,
+ 'no_unset_cast' => true,
+ 'no_unused_imports' => true,
+ 'no_useless_else' => true,
+ 'no_useless_return' => true,
+ 'no_whitespace_before_comma_in_array' => true,
+ 'no_whitespace_in_blank_line' => true,
+ 'normalize_index_brace' => true,
+ 'nullable_type_declaration_for_default_null_value' => true,
+ 'object_operator_without_whitespace' => true,
+ 'ordered_class_elements' => [
+ 'order' => [
+ 'use_trait', 'constant_public', 'constant_protected',
+ 'constant_private', 'property_public_static', 'property_protected_static',
+ 'property_private_static', 'property_public', 'property_protected',
+ 'property_private', 'method_public_static', 'method_protected_static',
+ 'method_private_static', 'construct', 'destruct', 'phpunit',
+ 'method_public', 'method_protected', 'method_private', 'magic',
+ ],
+ 'sort_algorithm' => 'none',
+ ],
+ 'php_unit_fqcn_annotation' => true,
+ 'php_unit_method_casing' => ['case' => 'camel_case'],
+ 'phpdoc_add_missing_param_annotation' => ['only_untyped' => false],
+ 'phpdoc_align' => ['align' => 'left'],
+ 'phpdoc_line_span' => [
+ 'const' => 'multi',
+ 'method' => 'multi',
+ 'property' => 'multi',
+ ],
+ 'phpdoc_no_access' => true,
+ 'phpdoc_no_empty_return' => true,
+ 'phpdoc_no_useless_inheritdoc' => true,
+ 'phpdoc_order' => true,
+ 'phpdoc_scalar' => true,
+ 'phpdoc_single_line_var_spacing' => true,
+ 'phpdoc_to_comment' => false,
+ 'phpdoc_trim' => true,
+ 'phpdoc_trim_consecutive_blank_line_separation' => true,
+ 'phpdoc_var_without_name' => true,
+ 'protected_to_private' => true,
+ 'return_assignment' => false,
+ 'return_type_declaration' => ['space_before' => 'one'],
+ 'semicolon_after_instruction' => true,
+ 'short_scalar_cast' => true,
+ 'simplified_null_return' => true,
+ 'single_quote' => true,
+ 'single_line_comment_style' => true,
+ 'ternary_operator_spaces' => true,
+ 'ternary_to_null_coalescing' => true,
+ 'type_declaration_spaces' => true,
+ 'trim_array_spaces' => true,
+ 'unary_operator_spaces' => true,
+ 'whitespace_after_comma_in_array' => true,
+ ]
+ )->setFinder($finder);
-return $config->setRules([
- '@PSR2' => true,
- 'align_multiline_comment' => true,
- 'array_syntax' => ['syntax' => 'short'],
- 'backtick_to_shell_exec' => true,
- 'binary_operator_spaces' => true,
- 'blank_lines_before_namespace' => true,
- 'blank_line_after_namespace' => true,
- 'blank_line_after_opening_tag' => true,
- 'blank_line_before_statement' => [
- 'statements' => [
- 'break', 'case', 'continue', 'declare', 'default', 'do', 'for',
- 'if', 'foreach', 'return', 'switch', 'try', 'while',
- ],
- ],
- 'cast_spaces' => ['space' => 'single'],
- 'class_attributes_separation' => true,
- 'combine_consecutive_issets' => true,
- 'combine_consecutive_unsets' => true,
- 'compact_nullable_type_declaration' => true,
- 'concat_space' => ['spacing' => 'one'],
- 'fully_qualified_strict_types' => true,
- 'increment_style' => ['style' => 'pre'],
- 'linebreak_after_opening_tag' => true,
- 'list_syntax' => ['syntax' => 'short'],
- 'lowercase_cast' => true,
- 'lowercase_static_reference' => true,
- 'magic_constant_casing' => true,
- 'magic_method_casing' => true,
- 'multiline_comment_opening_closing' => true,
- 'multiline_whitespace_before_semicolons' => [
- 'strategy' => 'no_multi_line',
- ],
- 'native_function_casing' => true,
- 'native_type_declaration_casing' => true,
- 'new_with_parentheses' => true,
- 'no_alternative_syntax' => true,
- 'no_blank_lines_after_class_opening' => true,
- 'no_blank_lines_after_phpdoc' => true,
- 'no_empty_statement' => true,
- 'no_extra_blank_lines' => true,
- 'no_leading_import_slash' => true,
- 'no_leading_namespace_whitespace' => true,
- 'no_mixed_echo_print' => ['use' => 'echo'],
- 'no_null_property_initialization' => true,
- 'no_short_bool_cast' => true,
- 'no_singleline_whitespace_before_semicolons' => true,
- 'no_spaces_around_offset' => true,
- 'no_superfluous_phpdoc_tags' => false,
- 'no_superfluous_elseif' => true,
- 'no_trailing_comma_in_singleline' => true,
- 'no_unneeded_control_parentheses' => true,
- 'no_unneeded_braces' => true,
- 'no_unset_cast' => true,
- 'no_unused_imports' => true,
- 'no_useless_else' => true,
- 'no_useless_return' => true,
- 'no_whitespace_before_comma_in_array' => true,
- 'no_whitespace_in_blank_line' => true,
- 'normalize_index_brace' => true,
- 'nullable_type_declaration_for_default_null_value' => true,
- 'object_operator_without_whitespace' => true,
- 'ordered_class_elements' => [
- 'order' => [
- 'use_trait', 'constant_public', 'constant_protected',
- 'constant_private', 'property_public_static', 'property_protected_static',
- 'property_private_static', 'property_public', 'property_protected',
- 'property_private', 'method_public_static', 'method_protected_static',
- 'method_private_static', 'construct', 'destruct', 'phpunit',
- 'method_public', 'method_protected', 'method_private', 'magic',
- ],
- 'sort_algorithm' => 'none',
- ],
- 'php_unit_fqcn_annotation' => true,
- 'php_unit_method_casing' => ['case' => 'camel_case'],
- 'phpdoc_add_missing_param_annotation' => ['only_untyped' => false],
- 'phpdoc_align' => ['align' => 'left'],
- 'phpdoc_line_span' => [
- 'const' => 'multi',
- 'method' => 'multi',
- 'property' => 'multi',
- ],
- 'phpdoc_no_access' => true,
- 'phpdoc_no_empty_return' => true,
- 'phpdoc_no_useless_inheritdoc' => true,
- 'phpdoc_order' => true,
- 'phpdoc_scalar' => true,
- 'phpdoc_single_line_var_spacing' => true,
- 'phpdoc_to_comment' => false,
- 'phpdoc_trim' => true,
- 'phpdoc_trim_consecutive_blank_line_separation' => true,
- 'phpdoc_var_without_name' => true,
- 'protected_to_private' => true,
- 'return_assignment' => false,
- 'return_type_declaration' => ['space_before' => 'one'],
- 'semicolon_after_instruction' => true,
- 'short_scalar_cast' => true,
- 'simplified_null_return' => true,
- 'single_quote' => true,
- 'single_line_comment_style' => true,
- 'ternary_operator_spaces' => true,
- 'ternary_to_null_coalescing' => true,
- 'type_declaration_spaces' => true,
- 'trim_array_spaces' => true,
- 'unary_operator_spaces' => true,
- 'whitespace_after_comma_in_array' => true,
-])->setFinder($finder);
+return $config;
diff --git a/.phplint.yml b/.phplint.yml
new file mode 100644
index 000000000..531cc831f
--- /dev/null
+++ b/.phplint.yml
@@ -0,0 +1,8 @@
+path: ./
+jobs: 10
+cache-dir: runtime/.phplint.cache/
+extensions:
+ - php
+exclude:
+ - vendor/
+ - runtime/
diff --git a/composer.json b/composer.json
index dad3b6843..2bdce584d 100644
--- a/composer.json
+++ b/composer.json
@@ -44,6 +44,7 @@
"require-dev": {
"friendsofphp/php-cs-fixer": "^3.73",
"phpbench/phpbench": "^1.0",
+ "overtrue/phplint": "^9.6.2",
"phpstan/extension-installer": "^1.0",
"phpstan/phpstan": "^2.0",
"phpstan/phpstan-phpunit": "^2.0",
@@ -89,6 +90,7 @@
"@putenv PHP_CS_FIXER_IGNORE_ENV=1",
"php-cs-fixer fix --config=.php-cs-fixer.dist.php"
],
+ "phplint": "phplint",
"test": "phpunit"
},
"config": {
diff --git a/docs/images/activation-functions/elu-derivative.png b/docs/images/activation-functions/elu-derivative.png
new file mode 100644
index 000000000..d035d8d09
Binary files /dev/null and b/docs/images/activation-functions/elu-derivative.png differ
diff --git a/docs/images/activation-functions/elu.png b/docs/images/activation-functions/elu.png
new file mode 100644
index 000000000..9924e1c9a
Binary files /dev/null and b/docs/images/activation-functions/elu.png differ
diff --git a/docs/images/activation-functions/gelu-derivative.png b/docs/images/activation-functions/gelu-derivative.png
new file mode 100644
index 000000000..6ae21f55b
Binary files /dev/null and b/docs/images/activation-functions/gelu-derivative.png differ
diff --git a/docs/images/activation-functions/gelu.png b/docs/images/activation-functions/gelu.png
new file mode 100644
index 000000000..4f22603b5
Binary files /dev/null and b/docs/images/activation-functions/gelu.png differ
diff --git a/docs/images/activation-functions/hard-sigmoid-derivative.png b/docs/images/activation-functions/hard-sigmoid-derivative.png
new file mode 100644
index 000000000..663ba8465
Binary files /dev/null and b/docs/images/activation-functions/hard-sigmoid-derivative.png differ
diff --git a/docs/images/activation-functions/hard-sigmoid.png b/docs/images/activation-functions/hard-sigmoid.png
new file mode 100644
index 000000000..9b7338f52
Binary files /dev/null and b/docs/images/activation-functions/hard-sigmoid.png differ
diff --git a/docs/images/activation-functions/hard-silu-derivative.png b/docs/images/activation-functions/hard-silu-derivative.png
new file mode 100644
index 000000000..4cb2a0320
Binary files /dev/null and b/docs/images/activation-functions/hard-silu-derivative.png differ
diff --git a/docs/images/activation-functions/hard-silu.png b/docs/images/activation-functions/hard-silu.png
new file mode 100644
index 000000000..db84ce40e
Binary files /dev/null and b/docs/images/activation-functions/hard-silu.png differ
diff --git a/docs/images/activation-functions/hyperbolic-tangent-derivative.png b/docs/images/activation-functions/hyperbolic-tangent-derivative.png
new file mode 100644
index 000000000..0a4cd3cac
Binary files /dev/null and b/docs/images/activation-functions/hyperbolic-tangent-derivative.png differ
diff --git a/docs/images/activation-functions/hyperbolic-tangent.png b/docs/images/activation-functions/hyperbolic-tangent.png
new file mode 100644
index 000000000..ebde9e9b5
Binary files /dev/null and b/docs/images/activation-functions/hyperbolic-tangent.png differ
diff --git a/docs/images/activation-functions/leaky-relu-derivative.png b/docs/images/activation-functions/leaky-relu-derivative.png
new file mode 100644
index 000000000..3d1322d1d
Binary files /dev/null and b/docs/images/activation-functions/leaky-relu-derivative.png differ
diff --git a/docs/images/activation-functions/leaky-relu.png b/docs/images/activation-functions/leaky-relu.png
new file mode 100644
index 000000000..82beb0348
Binary files /dev/null and b/docs/images/activation-functions/leaky-relu.png differ
diff --git a/docs/images/activation-functions/relu-derivative.png b/docs/images/activation-functions/relu-derivative.png
new file mode 100644
index 000000000..6e7b64149
Binary files /dev/null and b/docs/images/activation-functions/relu-derivative.png differ
diff --git a/docs/images/activation-functions/relu.png b/docs/images/activation-functions/relu.png
new file mode 100644
index 000000000..b95fd257b
Binary files /dev/null and b/docs/images/activation-functions/relu.png differ
diff --git a/docs/images/activation-functions/relu6-derivative.png b/docs/images/activation-functions/relu6-derivative.png
new file mode 100644
index 000000000..8063b381a
Binary files /dev/null and b/docs/images/activation-functions/relu6-derivative.png differ
diff --git a/docs/images/activation-functions/relu6.png b/docs/images/activation-functions/relu6.png
new file mode 100644
index 000000000..802165ca2
Binary files /dev/null and b/docs/images/activation-functions/relu6.png differ
diff --git a/docs/images/activation-functions/selu-derivative.png b/docs/images/activation-functions/selu-derivative.png
new file mode 100644
index 000000000..6ed8f6140
Binary files /dev/null and b/docs/images/activation-functions/selu-derivative.png differ
diff --git a/docs/images/activation-functions/selu.png b/docs/images/activation-functions/selu.png
new file mode 100644
index 000000000..0dc6b5c89
Binary files /dev/null and b/docs/images/activation-functions/selu.png differ
diff --git a/docs/images/activation-functions/sigmoid-derivative.png b/docs/images/activation-functions/sigmoid-derivative.png
new file mode 100644
index 000000000..bcb833eb3
Binary files /dev/null and b/docs/images/activation-functions/sigmoid-derivative.png differ
diff --git a/docs/images/activation-functions/sigmoid.png b/docs/images/activation-functions/sigmoid.png
new file mode 100644
index 000000000..f38415a14
Binary files /dev/null and b/docs/images/activation-functions/sigmoid.png differ
diff --git a/docs/images/activation-functions/silu-derivative.png b/docs/images/activation-functions/silu-derivative.png
new file mode 100644
index 000000000..7f265a75b
Binary files /dev/null and b/docs/images/activation-functions/silu-derivative.png differ
diff --git a/docs/images/activation-functions/silu.png b/docs/images/activation-functions/silu.png
new file mode 100644
index 000000000..9e66343ba
Binary files /dev/null and b/docs/images/activation-functions/silu.png differ
diff --git a/docs/images/activation-functions/softmax-derivative.png b/docs/images/activation-functions/softmax-derivative.png
new file mode 100644
index 000000000..8870f9921
Binary files /dev/null and b/docs/images/activation-functions/softmax-derivative.png differ
diff --git a/docs/images/activation-functions/softmax.png b/docs/images/activation-functions/softmax.png
new file mode 100644
index 000000000..f8a8da8d4
Binary files /dev/null and b/docs/images/activation-functions/softmax.png differ
diff --git a/docs/images/activation-functions/softplus-derivative.png b/docs/images/activation-functions/softplus-derivative.png
new file mode 100644
index 000000000..162757082
Binary files /dev/null and b/docs/images/activation-functions/softplus-derivative.png differ
diff --git a/docs/images/activation-functions/softplus.png b/docs/images/activation-functions/softplus.png
new file mode 100644
index 000000000..c11ac9b0d
Binary files /dev/null and b/docs/images/activation-functions/softplus.png differ
diff --git a/docs/images/activation-functions/softsign-derivative.png b/docs/images/activation-functions/softsign-derivative.png
new file mode 100644
index 000000000..b37d616ac
Binary files /dev/null and b/docs/images/activation-functions/softsign-derivative.png differ
diff --git a/docs/images/activation-functions/softsign.png b/docs/images/activation-functions/softsign.png
new file mode 100644
index 000000000..897ff2e9d
Binary files /dev/null and b/docs/images/activation-functions/softsign.png differ
diff --git a/docs/images/activation-functions/thresholded-relu-derivative.png b/docs/images/activation-functions/thresholded-relu-derivative.png
new file mode 100644
index 000000000..ec8339d24
Binary files /dev/null and b/docs/images/activation-functions/thresholded-relu-derivative.png differ
diff --git a/docs/images/activation-functions/thresholded-relu.png b/docs/images/activation-functions/thresholded-relu.png
new file mode 100644
index 000000000..4b98df8e6
Binary files /dev/null and b/docs/images/activation-functions/thresholded-relu.png differ
diff --git a/docs/neural-network/activation-functions/elu.md b/docs/neural-network/activation-functions/elu.md
index 94dd43a14..fc7b1f326 100644
--- a/docs/neural-network/activation-functions/elu.md
+++ b/docs/neural-network/activation-functions/elu.md
@@ -1,10 +1,14 @@
-[source]
+[source]
# ELU
*Exponential Linear Units* are a type of rectifier that soften the transition from non-activated to activated using the exponential function. As such, ELU produces smoother gradients than the piecewise linear [ReLU](relu.md) function.
$$
-{\displaystyle ELU = {\begin{cases}\alpha \left(e^{x}-1\right)&{\text{if }}x\leq 0\\x&{\text{if }}x>0\end{cases}}}
+\text{ELU}(x) =
+\begin{cases}
+\alpha \left(e^{x}-1\right) & \text{if } x \leq 0 \\
+x & \text{if } x > 0
+\end{cases}
$$
## Parameters
@@ -12,12 +16,17 @@ $$
|---|---|---|---|---|
| 1 | alpha | 1.0 | float | The value at which leakage will begin to saturate. Ex. alpha = 1.0 means that the output will never be less than -1.0 when inactivated. |
+## Plots
+
+
+
+
## Example
```php
-use Rubix\ML\NeuralNet\ActivationFunctions\ELU;
+use Rubix\ML\NeuralNet\ActivationFunctions\ELU\ELU;
$activationFunction = new ELU(2.5);
```
## References
-[^1]: D. A. Clevert et al. (2016). Fast and Accurate Deep Network Learning by Exponential Linear Units.
+[1]: D. A. Clevert et al. (2016). Fast and Accurate Deep Network Learning by Exponential Linear Units.
diff --git a/docs/neural-network/activation-functions/gelu.md b/docs/neural-network/activation-functions/gelu.md
index 0fcc7eb6d..ab88f6bf2 100644
--- a/docs/neural-network/activation-functions/gelu.md
+++ b/docs/neural-network/activation-functions/gelu.md
@@ -1,14 +1,23 @@
-[source]
+[source]
# GELU
Gaussian Error Linear Units (GELUs) are rectifiers that are gated by the magnitude of their input rather than the sign of their input as with ReLU variants. Their output can be interpreted as the expected value of a neuron with random dropout regularization applied.
+$$
+\text{GELU}(x) = 0.5 \cdot x \left(1 + \tanh\left(\sqrt{\frac{2}{\pi}} \left(x + 0.044715 \cdot x^3\right)\right)\right)
+$$
+
## Parameters
This activation function does not have any parameters.
+## Plots
+
+
+
+
## Example
```php
-use Rubix\ML\NeuralNet\ActivationFunctions\GELU;
+use Rubix\ML\NeuralNet\ActivationFunctions\GELU\GELU;
$activationFunction = new GELU();
```
diff --git a/docs/neural-network/activation-functions/hard-sigmoid.md b/docs/neural-network/activation-functions/hard-sigmoid.md
new file mode 100644
index 000000000..3d27c7d6e
--- /dev/null
+++ b/docs/neural-network/activation-functions/hard-sigmoid.md
@@ -0,0 +1,26 @@
+[source]
+
+# Hard Sigmoid
+A piecewise linear approximation of the sigmoid function that is computationally more efficient. The Hard Sigmoid function has an output value between 0 and 1, making it useful for binary classification problems.
+
+$$
+\text{HardSigmoid}(x) = \max\left(0,\min\left(1, 0.2x + 0.5\right)\right)
+$$
+
+## Parameters
+This activation function does not have any parameters.
+
+## Plots
+
+
+
+
+## Example
+```php
+use Rubix\ML\NeuralNet\ActivationFunctions\HardSigmoid\HardSigmoid;
+
+$activationFunction = new HardSigmoid();
+```
+
+## References
+[1]: https://en.wikipedia.org/wiki/Hard_sigmoid
diff --git a/docs/neural-network/activation-functions/hard-silu.md b/docs/neural-network/activation-functions/hard-silu.md
new file mode 100644
index 000000000..7f6161669
--- /dev/null
+++ b/docs/neural-network/activation-functions/hard-silu.md
@@ -0,0 +1,25 @@
+[source]
+
+# Hard SiLU
+
+Hard Sigmoid Linear Units (Hard SiLU) are a computationally efficient variant of the SiLU activation function.
+
+$$
+\displaystyle
+\text{Hard SiLU}(x) = x \cdot \text{Hard Sigmoid}(x) = x \cdot \max(0, \min(1, 0.2x + 0.5))
+$$
+
+## Parameters
+This activation function does not have any parameters.
+
+## Plots
+
+
+
+
+## Example
+```php
+use Rubix\ML\NeuralNet\ActivationFunctions\HardSiLU\HardSiLU;
+
+$activationFunction = new HardSiLU();
+```
diff --git a/docs/neural-network/activation-functions/hyperbolic-tangent.md b/docs/neural-network/activation-functions/hyperbolic-tangent.md
index aab3cf2d6..6a06cd0de 100644
--- a/docs/neural-network/activation-functions/hyperbolic-tangent.md
+++ b/docs/neural-network/activation-functions/hyperbolic-tangent.md
@@ -1,4 +1,4 @@
-[source]
+[source]
# Hyperbolic Tangent
An S-shaped function that squeezes the input value into an output space between -1 and 1. Hyperbolic Tangent (or *tanh*) has the advantage of being zero centered, however is known to *saturate* with highly positive or negative input values which can slow down training if the activations become too intense.
@@ -10,9 +10,14 @@ $$
## Parameters
This activation function does not have any parameters.
+## Plots
+
+
+
+
## Example
```php
-use Rubix\ML\NeuralNet\ActivationFunctions\HyperbolicTangent;
+use Rubix\ML\NeuralNet\ActivationFunctions\HyperbolicTangent\HyperbolicTangent;
$activationFunction = new HyperbolicTangent();
```
diff --git a/docs/neural-network/activation-functions/leaky-relu.md b/docs/neural-network/activation-functions/leaky-relu.md
index 36b2493ea..dcea6dee4 100644
--- a/docs/neural-network/activation-functions/leaky-relu.md
+++ b/docs/neural-network/activation-functions/leaky-relu.md
@@ -1,10 +1,14 @@
-[source]
+[source]
# Leaky ReLU
Leaky Rectified Linear Units are activation functions that output `x` when x is greater or equal to 0 or `x` scaled by a small *leakage* coefficient when the input is less than 0. Leaky rectifiers have the benefit of allowing a small gradient to flow through during backpropagation even though they might not have activated during the forward pass.
$$
-{\displaystyle LeakyReLU = {\begin{cases}\lambda x&{\text{if }}x<0\\x&{\text{if }}x\geq 0\end{cases}}}
+\text{LeakyReLU}(x) =
+\begin{cases}
+x & \text{if } x \geq 0 \\
+\alpha x & \text{if } x < 0
+\end{cases}
$$
## Parameters
@@ -12,12 +16,17 @@ $$
|---|---|---|---|---|
| 1 | leakage | 0.1 | float | The amount of leakage as a proportion of the input value to allow to pass through when not inactivated. |
+## Plots
+
+
+
+
## Example
```php
-use Rubix\ML\NeuralNet\ActivationFunctions\LeakyReLU;
+use Rubix\ML\NeuralNet\ActivationFunctions\LeakyReLU\LeakyReLU;
$activationFunction = new LeakyReLU(0.3);
```
## References
-[^1]: A. L. Maas et al. (2013). Rectifier Nonlinearities Improve Neural Network Acoustic Models.
+[1]: A. L. Maas et al. (2013). Rectifier Nonlinearities Improve Neural Network Acoustic Models.
diff --git a/docs/neural-network/activation-functions/relu.md b/docs/neural-network/activation-functions/relu.md
index 7ec13c8a8..6044dfb5e 100644
--- a/docs/neural-network/activation-functions/relu.md
+++ b/docs/neural-network/activation-functions/relu.md
@@ -1,22 +1,31 @@
-[source]
+[source]
# ReLU
Rectified Linear Units (ReLU) only output the positive signal of the input. They have the benefit of having a monotonic derivative and are cheap to compute.
$$
-{\displaystyle ReLU = {\begin{aligned}&{\begin{cases}0&{\text{if }}x\leq 0\\x&{\text{if }}x>0\end{cases}}=&\max\{0,x\}\end{aligned}}}
+\text{ReLU}(x) =
+\begin{cases}
+x & \text{if } x \geq 0 \\
+0 & \text{if } x < 0
+\end{cases}
$$
## Parameters
This activation function does not have any parameters.
+## Plots
+
+
+
+
## Example
```php
-use Rubix\ML\NeuralNet\ActivationFunctions\ReLU;
+use Rubix\ML\NeuralNet\ActivationFunctions\ReLU\ReLU;
$activationFunction = new ReLU(0.1);
```
## References
-[^1]: A. L. Maas et al. (2013). Rectifier Nonlinearities Improve Neural Network Acoustic Models.
-[^2]: K. Konda et al. (2015). Zero-bias Autoencoders and the Benefits of Co-adapting Features.
\ No newline at end of file
+[1]: A. L. Maas et al. (2013). Rectifier Nonlinearities Improve Neural Network Acoustic Models.
+[2]: K. Konda et al. (2015). Zero-bias Autoencoders and the Benefits of Co-adapting Features.
diff --git a/docs/neural-network/activation-functions/relu6.md b/docs/neural-network/activation-functions/relu6.md
new file mode 100644
index 000000000..f9c616e8f
--- /dev/null
+++ b/docs/neural-network/activation-functions/relu6.md
@@ -0,0 +1,24 @@
+[source]
+
+# ReLU6
+ReLU6 is a variant of the standard Rectified Linear Unit (ReLU) that caps the maximum output value at 6. This bounded ReLU function is commonly used in mobile and quantized neural networks, where restricting the activation range can improve numerical stability and quantization efficiency.
+
+$$
+\text{ReLU6}(x) = \min\left(\max(0, x), 6\right)
+$$
+
+## Parameters
+This activation function does not have any parameters.
+
+## Plots
+
+
+
+
+## Example
+```php
+use Rubix\ML\NeuralNet\ActivationFunctions\ReLU6\ReLU6;
+
+$activationFunction = new ReLU6();
+```
+
diff --git a/docs/neural-network/activation-functions/selu.md b/docs/neural-network/activation-functions/selu.md
index 841c34189..adbeef3ff 100644
--- a/docs/neural-network/activation-functions/selu.md
+++ b/docs/neural-network/activation-functions/selu.md
@@ -1,21 +1,34 @@
-[source]
+[source]
# SELU
Scaled Exponential Linear Units (SELU) are a self-normalizing activation function based on the [ELU](#elu) activation function. Neuronal activations of SELU networks automatically converge toward zero mean and unit variance, unlike explicitly normalized networks such as those with [Batch Norm](#batch-norm) hidden layers.
$$
-{\displaystyle SELU = 1.0507 {\begin{cases}1.67326 (e^{x}-1)&{\text{if }}x<0\\x&{\text{if }}x\geq 0\end{cases}}}
+\text{SELU}(x) =
+\begin{cases}
+\lambda x & \text{if } x > 0 \\
+\lambda \alpha (e^x - 1) & \text{if } x \leq 0
+\end{cases}
$$
+Where the constants are typically:
+- λ≈1.0507
+- α≈1.67326
+
## Parameters
This actvation function does not have any parameters.
+## Plots
+
+
+
+
## Example
```php
-use Rubix\ML\NeuralNet\ActivationFunctions\SELU;
+use Rubix\ML\NeuralNet\ActivationFunctions\SELU\SELU;
$activationFunction = new SELU();
```
## References
-[^1]: G. Klambauer et al. (2017). Self-Normalizing Neural Networks.
+[1]: G. Klambauer et al. (2017). Self-Normalizing Neural Networks.
diff --git a/docs/neural-network/activation-functions/sigmoid.md b/docs/neural-network/activation-functions/sigmoid.md
index 1b0756e1a..7625f67bd 100644
--- a/docs/neural-network/activation-functions/sigmoid.md
+++ b/docs/neural-network/activation-functions/sigmoid.md
@@ -1,4 +1,4 @@
-[source]
+[source]
# Sigmoid
A bounded S-shaped function (sometimes called the *Logistic* function) with an output value between 0 and 1. The output of the sigmoid function has the advantage of being interpretable as a probability, however it is not zero-centered and tends to saturate if inputs become large.
@@ -10,9 +10,14 @@ $$
## Parameters
This activation function does not have any parameters.
+## Plots
+
+
+
+
## Example
```php
-use Rubix\ML\NeuralNet\ActivationFunctions\Sigmoid;
+use Rubix\ML\NeuralNet\ActivationFunctions\Sigmoid\Sigmoid;
$activationFunction = new Sigmoid();
-```
\ No newline at end of file
+```
diff --git a/docs/neural-network/activation-functions/silu.md b/docs/neural-network/activation-functions/silu.md
index e5fa5c1ac..02f898745 100644
--- a/docs/neural-network/activation-functions/silu.md
+++ b/docs/neural-network/activation-functions/silu.md
@@ -1,17 +1,29 @@
-[source]
+[source]
# SiLU
Sigmoid Linear Units are smooth and non-monotonic rectified activation functions. Their inputs are weighted by the [Sigmoid](sigmoid.md) activation function acting as a self-gating mechanism.
+$$
+\text{SiLU}(x) = x \cdot \sigma(x) = \frac{x}{1 + e^{-x}}
+$$
+
+Where
+- σ(x) is the sigmoid function.
+
## Parameters
This activation function does not have any parameters.
+## Plots
+
+
+
+
## Example
```php
-use Rubix\ML\NeuralNet\ActivationFunctions\SiLU;
+use Rubix\ML\NeuralNet\ActivationFunctions\SiLU\SiLU;
$activationFunction = new SiLU();
```
-### References
-[^1]: S. Elwing et al. (2017). Sigmoid-Weighted Linear Units for Neural Network Function Approximation in Reinforcement Learning.
+## References
+[1]: S. Elwing et al. (2017). Sigmoid-Weighted Linear Units for Neural Network Function Approximation in Reinforcement Learning.
diff --git a/docs/neural-network/activation-functions/soft-plus.md b/docs/neural-network/activation-functions/soft-plus.md
deleted file mode 100644
index 743e44901..000000000
--- a/docs/neural-network/activation-functions/soft-plus.md
+++ /dev/null
@@ -1,21 +0,0 @@
-[source]
-
-# Soft Plus
-A smooth approximation of the piecewise linear [ReLU](relu.md) activation function.
-
-$$
-{\displaystyle Soft-Plus = \log \left(1+e^{x}\right)}
-$$
-
-## Parameters
-This activation function does not have any parameters.
-
-## Example
-```php
-use Rubix\ML\NeuralNet\ActivationFunctions\SoftPlus;
-
-$activationFunction = new SoftPlus();
-```
-
-## References
-[^1]: X. Glorot et al. (2011). Deep Sparse Rectifier Neural Networks.
\ No newline at end of file
diff --git a/docs/neural-network/activation-functions/softmax.md b/docs/neural-network/activation-functions/softmax.md
index c1109e601..368ae7ba7 100644
--- a/docs/neural-network/activation-functions/softmax.md
+++ b/docs/neural-network/activation-functions/softmax.md
@@ -1,18 +1,29 @@
-[source]
+[source]
# Softmax
The Softmax function is a generalization of the [Sigmoid](sigmoid.md) function that squashes each activation between 0 and 1 with the addition that all activations add up to 1. Together, these properties allow the output of the Softmax function to be interpretable as a *joint* probability distribution.
$$
-{\displaystyle Softmax = {\frac {e^{x_{i}}}{\sum _{j=1}^{J}e^{x_{j}}}}}
+\text{Softmax}(x_i) = \frac{e^{x_i}}{\sum_{j=1}^{n} e^{x_j}}
$$
+Where:
+
+- 𝑥𝑖 is the i-th element of the input vector
+- 𝑛 is the number of elements in the vector
+- The denominator ensures the outputs sum to 1
+
## Parameters
This activation function does not have any parameters.
+## Plots
+
+
+
+
## Example
```php
-use Rubix\ML\NeuralNet\ActivationFunctions\Softmax;
+use Rubix\ML\NeuralNet\ActivationFunctions\Softmax\Softmax;
$activationFunction = new Softmax();
-```
\ No newline at end of file
+```
diff --git a/docs/neural-network/activation-functions/softplus.md b/docs/neural-network/activation-functions/softplus.md
new file mode 100644
index 000000000..99fb9dd48
--- /dev/null
+++ b/docs/neural-network/activation-functions/softplus.md
@@ -0,0 +1,26 @@
+[source]
+
+# Softplus
+A smooth approximation of the piecewise linear [ReLU](relu.md) activation function.
+
+$$
+{\displaystyle Softplus = \log \left(1+e^{x}\right)}
+$$
+
+## Parameters
+This activation function does not have any parameters.
+
+## Plots
+
+
+
+
+## Example
+```php
+use Rubix\ML\NeuralNet\ActivationFunctions\Softplus\Softplus;
+
+$activationFunction = new Softplus();
+```
+
+## References
+[1]: X. Glorot et al. (2011). Deep Sparse Rectifier Neural Networks.
diff --git a/docs/neural-network/activation-functions/softsign.md b/docs/neural-network/activation-functions/softsign.md
index e69d4ca7e..b9e40dc68 100644
--- a/docs/neural-network/activation-functions/softsign.md
+++ b/docs/neural-network/activation-functions/softsign.md
@@ -1,21 +1,26 @@
-[source]
+[source]
# Softsign
A smooth sigmoid-shaped function that squashes the input between -1 and 1.
$$
-{\displaystyle Softsign = {\frac {x}{1+|x|}}}
+{\displaystyle Softsign = {\frac {x}{1 + |x|}}}
$$
## Parameters
This activation function does not have any parameters.
+## Plots
+
+
+
+
## Example
```php
-use Rubix\ML\NeuralNet\ActivationFunctions\Softsign;
+use Rubix\ML\NeuralNet\ActivationFunctions\Softsign\Softsign;
$activationFunction = new Softsign();
```
## References
-[^1]: X. Glorot et al. (2010). Understanding the Difficulty of Training Deep Feedforward Neural Networks.
+[1]: X. Glorot et al. (2010). Understanding the Difficulty of Training Deep Feedforward Neural Networks.
diff --git a/docs/neural-network/activation-functions/thresholded-relu.md b/docs/neural-network/activation-functions/thresholded-relu.md
index f05250e8e..5a4cf2553 100644
--- a/docs/neural-network/activation-functions/thresholded-relu.md
+++ b/docs/neural-network/activation-functions/thresholded-relu.md
@@ -1,10 +1,14 @@
-[source]
+[source]
# Thresholded ReLU
A version of the [ReLU](relu.md) function that activates only if the input is above some user-specified threshold level.
$$
-{\displaystyle ThresholdedReLU = {\begin{aligned}&{\begin{cases}0&{\text{if }}x\leq \theta \\x&{\text{if }}x>\theta\end{cases}}\end{aligned}}}
+\text{ThresholdedReLU}(x) =
+\begin{cases}
+x & \text{if } x > \theta \\
+0 & \text{if } x \leq \theta
+\end{cases}
$$
## Parameters
@@ -12,12 +16,17 @@ $$
|---|---|---|---|---|
| 1 | threshold | 1.0 | float | The threshold at which the neuron is activated. |
+## Plots
+
+
+
+
## Example
```php
-use Rubix\ML\NeuralNet\ActivationFunctions\ThresholdedReLU;
+use Rubix\ML\NeuralNet\ActivationFunctions\ThresholdedReLU\ThresholdedReLU;
-$activationFunction = new ThresholdedReLU(0.5);
+$activationFunction = new ThresholdedReLU(2.0);
```
## References
-[^1]: K. Konda et al. (2015). Zero-bias autoencoders and the benefits of co-adapting features.
+[1]: K. Konda et al. (2015). Zero-bias autoencoders and the benefits of co-adapting features.
diff --git a/docs/transformers/regex-filter.md b/docs/transformers/regex-filter.md
index 94540a0e2..8283b22a2 100644
--- a/docs/transformers/regex-filter.md
+++ b/docs/transformers/regex-filter.md
@@ -28,7 +28,6 @@ $transformer = new RegexFilter([
```
## Predefined Regex Patterns
-<<<<<<< HEAD
| Class Constant | Description |
|---|----------------------------------------------------------------------------------------------------------|
| EMAIL | A pattern to match any email address. |
@@ -41,20 +40,6 @@ $transformer = new RegexFilter([
| EXTRA_WHITESPACE | Matches consecutively repeated whitespace characters. |
| MENTION | A pattern that matches Twitter-style mentions (@example). |
| HASHTAG | Matches Twitter-style hashtags (#example). |
-=======
-| Class Constant | Description |
-|---|---|
-| EMAIL | A pattern to match any email address. |
-| URL | An alias for the default (Gruber 1) URL matching pattern. |
-| GRUBER_1 | The original Gruber URL matching pattern. |
-| GRUBER_2 | The improved Gruber URL matching pattern. |
-| EXTRA_CHARACTERS | Matches consecutively repeated non word or number characters such as punctuation and special characters. |
-| EXTRA_WORDS | Matches consecutively repeated words. |
-| EXTRA_WHITESPACE | Matches consecutively repeated whitespace characters. |
-| EMOJIS | A pattern to match unicode emojis. |
-| MENTION | A pattern that matches Twitter-style mentions (@example). |
-| HASHTAG | Matches Twitter-style hashtags (#example). |
->>>>>>> 2.4
## Additional Methods
This transformer does not have any additional methods.
diff --git a/phpstan-baseline.neon b/phpstan-baseline.neon
index 205d03a26..8516df10b 100644
--- a/phpstan-baseline.neon
+++ b/phpstan-baseline.neon
@@ -1577,3 +1577,4 @@ parameters:
identifier: missingType.iterableValue
count: 1
path: tests/Helpers/ParamsTest.php
+
diff --git a/phpstan.neon b/phpstan.neon
index 991aa6d0b..bc464a8ea 100644
--- a/phpstan.neon
+++ b/phpstan.neon
@@ -2,6 +2,10 @@ includes:
- phpstan-baseline.neon
parameters:
level: 8
+ phpVersion: 80400
+ fileExtensions:
+ - php
+ tmpDir: ./runtime/.phpstan/
paths:
- 'src'
- 'benchmarks'
diff --git a/src/AnomalyDetectors/OneClassSVM.php b/src/AnomalyDetectors/OneClassSVM.php
index bdd19775f..ab229ac6c 100644
--- a/src/AnomalyDetectors/OneClassSVM.php
+++ b/src/AnomalyDetectors/OneClassSVM.php
@@ -85,7 +85,7 @@ public function __construct(
. "0 and 1, $nu given.");
}
- $kernel = $kernel ?? new RBF();
+ $kernel ??= new RBF();
if ($tolerance < 0.0) {
throw new InvalidArgumentException('Tolerance must be,'
diff --git a/src/Backends/Amp.php b/src/Backends/Amp.php
index 49299ad8a..02527123e 100644
--- a/src/Backends/Amp.php
+++ b/src/Backends/Amp.php
@@ -63,7 +63,7 @@ public function __construct(?int $workers = null)
. " must be greater than 0, $workers given.");
}
- $workers = $workers ?? CPU::cores();
+ $workers ??= CPU::cores();
$this->pool = new DefaultPool($workers);
}
diff --git a/src/Classifiers/SVC.php b/src/Classifiers/SVC.php
index 509ed11f8..b063057cb 100644
--- a/src/Classifiers/SVC.php
+++ b/src/Classifiers/SVC.php
@@ -96,7 +96,7 @@ public function __construct(
. " than 0, $c given.");
}
- $kernel = $kernel ?? new RBF();
+ $kernel ??= new RBF();
if ($tolerance < 0.0) {
throw new InvalidArgumentException('Tolerance must be'
diff --git a/src/Clusterers/MeanShift.php b/src/Clusterers/MeanShift.php
index e1adaa5b8..419d1f5d9 100644
--- a/src/Clusterers/MeanShift.php
+++ b/src/Clusterers/MeanShift.php
@@ -153,7 +153,7 @@ public static function estimateRadius(
. " between 0 and 100, $percentile given.");
}
- $kernel = $kernel ?? new Euclidean();
+ $kernel ??= new Euclidean();
$samples = $dataset->samples();
diff --git a/src/Helpers/Stats.php b/src/Helpers/Stats.php
index bc78c50f1..f090feab1 100644
--- a/src/Helpers/Stats.php
+++ b/src/Helpers/Stats.php
@@ -105,13 +105,9 @@ public static function median(array $values) : float
*
* @param mixed[] $values
* @param float $q
- * <<<<<<< HEAD
* @throws InvalidArgumentException
* @throws InvalidArgumentException
* @return int|float
- * =======
- * @return float
- * >>>>>>> 2.5
*/
public static function quantile(array $values, float $q) : int|float
{
@@ -173,7 +169,7 @@ public static function variance(array $values, ?float $mean = null) : float
throw new InvalidArgumentException('Variance is undefined for empty set.');
}
- $mean = $mean ?? self::mean($values);
+ $mean ??= self::mean($values);
$ssd = 0.0;
@@ -199,7 +195,7 @@ public static function mad(array $values, ?float $median = null) : float
. ' is undefined for empty set.');
}
- $median = $median ?? self::median($values);
+ $median ??= self::median($values);
$deviations = [];
@@ -224,7 +220,7 @@ public static function skewness(array $values, ?float $mean = null) : float
throw new InvalidArgumentException('Skewness is undefined for empty set.');
}
- $mean = $mean ?? self::mean($values);
+ $mean ??= self::mean($values);
$numerator = self::centralMoment($values, 3, $mean);
$denominator = self::centralMoment($values, 2, $mean) ** 1.5;
@@ -246,7 +242,7 @@ public static function kurtosis(array $values, ?float $mean = null) : float
throw new InvalidArgumentException('Kurtosis is undefined for empty set.');
}
- $mean = $mean ?? self::mean($values);
+ $mean ??= self::mean($values);
$numerator = self::centralMoment($values, 4, $mean);
$denominator = self::centralMoment($values, 2, $mean) ** 2;
@@ -273,7 +269,7 @@ public static function centralMoment(array $values, int $moment, ?float $mean =
throw new InvalidArgumentException('Moment cannot be less than 1.');
}
- $mean = $mean ?? self::mean($values);
+ $mean ??= self::mean($values);
$sigma = 0.0;
diff --git a/src/NeuralNet/ActivationFunctions/Base/Contracts/ActivationFunction.php b/src/NeuralNet/ActivationFunctions/Base/Contracts/ActivationFunction.php
new file mode 100644
index 000000000..1ce189e22
--- /dev/null
+++ b/src/NeuralNet/ActivationFunctions/Base/Contracts/ActivationFunction.php
@@ -0,0 +1,27 @@
+
+ */
+interface ActivationFunction extends Stringable
+{
+ /**
+ * Compute the activation.
+ *
+ * @param NDArray $input Input matrix
+ * @return NDArray Output matrix
+ */
+ public function activate(NDArray $input) : NDArray;
+}
diff --git a/src/NeuralNet/ActivationFunctions/Base/Contracts/Derivative.php b/src/NeuralNet/ActivationFunctions/Base/Contracts/Derivative.php
new file mode 100644
index 000000000..90dfcf40c
--- /dev/null
+++ b/src/NeuralNet/ActivationFunctions/Base/Contracts/Derivative.php
@@ -0,0 +1,17 @@
+
+ */
+interface Derivative
+{
+}
diff --git a/src/NeuralNet/ActivationFunctions/Base/Contracts/IBufferDerivative.php b/src/NeuralNet/ActivationFunctions/Base/Contracts/IBufferDerivative.php
new file mode 100644
index 000000000..b29011bc8
--- /dev/null
+++ b/src/NeuralNet/ActivationFunctions/Base/Contracts/IBufferDerivative.php
@@ -0,0 +1,27 @@
+
+ * @author Samuel Akopyan
+ */
+interface IBufferDerivative extends Derivative
+{
+ /**
+ * Calculate the derivative of the single parameter.
+ *
+ * @param NDArray $input Input matrix
+ * @return NDArray Derivative matrix
+ */
+ public function differentiate(NDArray $input) : NDArray;
+}
diff --git a/src/NeuralNet/ActivationFunctions/Base/Contracts/IOBufferDerivative.php b/src/NeuralNet/ActivationFunctions/Base/Contracts/IOBufferDerivative.php
new file mode 100644
index 000000000..4263fcea9
--- /dev/null
+++ b/src/NeuralNet/ActivationFunctions/Base/Contracts/IOBufferDerivative.php
@@ -0,0 +1,26 @@
+
+ * @author Samuel Akopyan
+ */
+interface IOBufferDerivative
+{
+ /**
+ * Calculate the derivative of the activation.
+ *
+ * @param NDArray $input Input matrix
+ * @param NDArray $output Output matrix
+ * @return NDArray Derivative matrix
+ */
+ public function differentiate(NDArray $input, NDArray $output) : NDArray;
+}
diff --git a/src/NeuralNet/ActivationFunctions/Base/Contracts/OBufferDerivative.php b/src/NeuralNet/ActivationFunctions/Base/Contracts/OBufferDerivative.php
new file mode 100644
index 000000000..760dc4593
--- /dev/null
+++ b/src/NeuralNet/ActivationFunctions/Base/Contracts/OBufferDerivative.php
@@ -0,0 +1,25 @@
+
+ * @author Samuel Akopyan
+ */
+interface OBufferDerivative
+{
+ /**
+ * Calculate the derivative of the activation.
+ *
+ * @param NDArray $output Output matrix
+ * @return NDArray Derivative matrix
+ */
+ public function differentiate(NDArray $output) : NDArray;
+}
diff --git a/src/NeuralNet/ActivationFunctions/ELU/ELU.php b/src/NeuralNet/ActivationFunctions/ELU/ELU.php
new file mode 100644
index 000000000..354aa61e7
--- /dev/null
+++ b/src/NeuralNet/ActivationFunctions/ELU/ELU.php
@@ -0,0 +1,102 @@
+
+ * @author Samuel Akopyan
+ */
+class ELU implements ActivationFunction, IOBufferDerivative
+{
+ /**
+ * Class constructor.
+ *
+ * @param float $alpha At which negative value the ELU will saturate. For example if alpha
+ * equals 1, the leaked value will never be greater than -1.0.
+ *
+ * @throws InvalidAlphaException
+ */
+ public function __construct(protected float $alpha = 1.0)
+ {
+ if ($this->alpha < 0.0) {
+ throw new InvalidAlphaException(
+ message: "Alpha must be greater than 0, {$this->alpha} given."
+ );
+ }
+ }
+
+ /**
+ * Apply the ELU activation function to the input.
+ *
+ * f(x) = x if x > 0
+ * f(x) = α * (e^x - 1) if x ≤ 0
+ *
+ * @param NDArray $input The input values
+ * @return NDArray The activated values
+ */
+ public function activate(NDArray $input) : NDArray
+ {
+ $positiveActivation = NumPower::maximum($input, 0);
+
+ $negativeMask = NumPower::minimum($input, 0);
+ $negativeActivation = NumPower::multiply(
+ NumPower::expm1($negativeMask),
+ $this->alpha
+ );
+
+ return NumPower::add($positiveActivation, $negativeActivation);
+ }
+
+ /**
+ * Calculate the derivative of the ELU activation function using input and output.
+ *
+ * f'(x) = 1 if x > 0
+ * f'(x) = f(x) + α if x ≤ 0, where f(x) is the ELU output
+ *
+ * @param NDArray $input Input matrix (used to determine x > 0 mask)
+ * @param NDArray $output Output from the ELU activation function
+ * @return NDArray Derivative matrix
+ */
+ public function differentiate(NDArray $input, NDArray $output) : NDArray
+ {
+ $positiveMask = NumPower::greater($input, 0);
+
+ $negativeMask = NumPower::lessEqual($input, 0);
+ $negativePart = NumPower::multiply(
+ NumPower::add($output, $this->alpha),
+ $negativeMask
+ );
+
+ return NumPower::add($positiveMask, $negativePart);
+ }
+
+ /**
+ * Return the string representation of the activation function.
+ *
+ * @return string String representation
+ */
+ public function __toString() : string
+ {
+ return "ELU (alpha: {$this->alpha})";
+ }
+}
diff --git a/src/NeuralNet/ActivationFunctions/ELU/Exceptions/InvalidAlphaException.php b/src/NeuralNet/ActivationFunctions/ELU/Exceptions/InvalidAlphaException.php
new file mode 100644
index 000000000..423c62596
--- /dev/null
+++ b/src/NeuralNet/ActivationFunctions/ELU/Exceptions/InvalidAlphaException.php
@@ -0,0 +1,14 @@
+
+ * @author Samuel Akopyan
+ */
+class GELU implements ActivationFunction, IBufferDerivative
+{
+ /**
+ * The square root of two over pi constant sqrt(2/π).
+ *
+ * @var float
+ */
+ protected const ALPHA = 0.7978845608;
+ /** @var float 0.5 * ALPHA */
+ protected const HALF_ALPHA = 0.3989422804;
+
+ /**
+ * Gaussian error function approximation term.
+ *
+ * @var float
+ */
+ protected const BETA = 0.044715;
+ /** @var float 3 * BETA */
+ protected const TRIPLE_BETA = 0.134145;
+
+ /**
+ * Apply the GeLU activation function to the input.
+ *
+ * f(x) = 0.5 * x * (1 + tanh(sqrt(2/π) * (x + 0.044715 * x^3)))
+ *
+ * @param NDArray $input The input values
+ * @return NDArray The activated values
+ */
+ public function activate(NDArray $input) : NDArray
+ {
+ $cubed = NumPower::pow($input, 3);
+ $innerTerm = NumPower::add($input, NumPower::multiply($cubed, self::BETA));
+ $tanhTerm = NumPower::tanh(NumPower::multiply($innerTerm, self::ALPHA));
+ $onePlusTanh = NumPower::add(1.0, $tanhTerm);
+
+ return NumPower::multiply(
+ NumPower::multiply($input, $onePlusTanh),
+ 0.5
+ );
+ }
+
+ /**
+ * Calculate the derivative of the activation function.
+ *
+ * The derivative of GeLU is:
+ * f'(x) = 0.5 * (1 + tanh(α * (x + β * x^3))) +
+ * 0.5 * x * sech^2(α * (x + β * x^3)) * α * (1 + 3β * x^2)
+ *
+ * Where:
+ * - α = sqrt(2/π) ≈ 0.7978845608
+ * - β = 0.044715
+ * - sech^2(z) = (1/cosh(z))^2
+ *
+ * @param NDArray $input Input matrix
+ * @return NDArray Derivative matrix
+ */
+ public function differentiate(NDArray $input) : NDArray
+ {
+ $cubed = NumPower::pow($input, 3);
+
+ $innerTerm = NumPower::multiply(
+ NumPower::add(
+ $input,
+ NumPower::multiply($cubed, self::BETA)
+ ),
+ self::ALPHA
+ );
+
+ $cosh = NumPower::cosh($innerTerm);
+ $sech2 = NumPower::pow(
+ NumPower::divide(1.0, $cosh),
+ 2
+ );
+
+ $firstTerm = NumPower::multiply(
+ NumPower::add(1.0, NumPower::tanh($innerTerm)),
+ 0.5
+ );
+
+ $secondTerm = NumPower::multiply(
+ NumPower::multiply(
+ NumPower::multiply(
+ $input,
+ self::HALF_ALPHA
+ ),
+ $sech2
+ ),
+ NumPower::add(
+ 1.0,
+ NumPower::multiply(
+ NumPower::pow($input, 2),
+ self::TRIPLE_BETA
+ )
+ )
+ );
+
+ return NumPower::add($firstTerm, $secondTerm);
+ }
+
+ /**
+ * Return the string representation of the activation function.
+ *
+ * @return string String representation
+ */
+ public function __toString() : string
+ {
+ return 'GELU';
+ }
+}
diff --git a/src/NeuralNet/ActivationFunctions/HardSiLU/HardSiLU.php b/src/NeuralNet/ActivationFunctions/HardSiLU/HardSiLU.php
new file mode 100644
index 000000000..6ab81eac1
--- /dev/null
+++ b/src/NeuralNet/ActivationFunctions/HardSiLU/HardSiLU.php
@@ -0,0 +1,85 @@
+
+ */
+class HardSiLU implements ActivationFunction, IBufferDerivative
+{
+ /**
+ * The Hard Sigmoid activation function.
+ *
+ * @var HardSigmoid
+ */
+ protected HardSigmoid $hardSigmoid;
+
+ /**
+ * Class constructor.
+ */
+ public function __construct()
+ {
+ $this->hardSigmoid = new HardSigmoid();
+ }
+
+ /**
+ * Apply the HardSiLU activation function to the input.
+ *
+ * f(x) = x * HardSigmoid(x)
+ *
+ * @param NDArray $input The input values
+ * @return NDArray The activated values
+ */
+ public function activate(NDArray $input) : NDArray
+ {
+ $hardSigmoid = $this->hardSigmoid->activate($input);
+
+ return NumPower::multiply($input, $hardSigmoid);
+ }
+
+ /**
+ * Calculate the derivative of the activation function.
+ *
+ * f'(x) = HardSigmoid(x) + x * HardSigmoid'(x)
+ *
+ * @param NDArray $input Input matrix
+ * @return NDArray Derivative matrix
+ */
+ public function differentiate(NDArray $input) : NDArray
+ {
+ $hardSigmoid = $this->hardSigmoid->activate($input);
+ $hardSigmoidDerivative = $this->hardSigmoid->differentiate($input);
+ $xTimesDerivative = NumPower::multiply($input, $hardSigmoidDerivative);
+
+ return NumPower::add($hardSigmoid, $xTimesDerivative);
+ }
+
+ /**
+ * Return the string representation of the activation function.
+ *
+ * @return string String representation
+ */
+ public function __toString() : string
+ {
+ return 'HardSiLU';
+ }
+}
diff --git a/src/NeuralNet/ActivationFunctions/HardSigmoid/HardSigmoid.php b/src/NeuralNet/ActivationFunctions/HardSigmoid/HardSigmoid.php
new file mode 100644
index 000000000..659e2c07d
--- /dev/null
+++ b/src/NeuralNet/ActivationFunctions/HardSigmoid/HardSigmoid.php
@@ -0,0 +1,102 @@
+
+ */
+class HardSigmoid implements ActivationFunction, IBufferDerivative
+{
+ /**
+ * The slope of the linear region.
+ *
+ * @var float
+ */
+ protected const SLOPE = 0.2;
+
+ /**
+ * The y-intercept of the linear region.
+ *
+ * @var float
+ */
+ protected const INTERCEPT = 0.5;
+
+ /**
+ * The lower bound of the linear region.
+ *
+ * @var float
+ */
+ protected const LOWER_BOUND = -2.5;
+
+ /**
+ * The upper bound of the linear region.
+ *
+ * @var float
+ */
+ protected const UPPER_BOUND = 2.5;
+
+ /**
+ * Apply the HardSigmoid activation function to the input.
+ *
+ * f(x) = max(0, min(1, 0.2 * x + 0.5))
+ *
+ * @param NDArray $input The input values
+ * @return NDArray The activated values
+ */
+ public function activate(NDArray $input) : NDArray
+ {
+ $linear = NumPower::add(
+ NumPower::multiply($input, self::SLOPE),
+ self::INTERCEPT
+ );
+
+ return NumPower::clip($linear, 0.0, 1.0);
+ }
+
+ /**
+ * Calculate the derivative of the activation function.
+ *
+ * f'(x) = 0.2 if -2.5 <= x <= 2.5
+ * f'(x) = 0 otherwise
+ *
+ * @param NDArray $input Input matrix
+ * @return NDArray Derivative matrix
+ */
+ public function differentiate(NDArray $input) : NDArray
+ {
+ // For values in the linear region (-2.5 <= x <= 2.5): SLOPE
+ $inLinearRegion = NumPower::greaterEqual($input, self::LOWER_BOUND);
+ $inLinearRegion = NumPower::multiply($inLinearRegion, NumPower::lessEqual($input, self::UPPER_BOUND));
+ $linearPart = NumPower::multiply($inLinearRegion, self::SLOPE);
+
+ return $linearPart;
+ }
+
+ /**
+ * Return the string representation of the activation function.
+ *
+ * @return string String representation
+ */
+ public function __toString() : string
+ {
+ return 'HardSigmoid';
+ }
+}
diff --git a/src/NeuralNet/ActivationFunctions/HyperbolicTangent/HyperbolicTangent.php b/src/NeuralNet/ActivationFunctions/HyperbolicTangent/HyperbolicTangent.php
new file mode 100644
index 000000000..1723db90d
--- /dev/null
+++ b/src/NeuralNet/ActivationFunctions/HyperbolicTangent/HyperbolicTangent.php
@@ -0,0 +1,62 @@
+
+ */
+class HyperbolicTangent implements ActivationFunction, OBufferDerivative
+{
+ /**
+ * Apply the Hyperbolic Tangent activation function to the input.
+ *
+ * f(x) = tanh(x)
+ *
+ * @param NDArray $input The input values
+ * @return NDArray The activated values
+ */
+ public function activate(NDArray $input) : NDArray
+ {
+ return NumPower::tanh($input);
+ }
+
+ /**
+ * Calculate the derivative of the activation function.
+ *
+ * f'(x) = 1 - tanh^2(x)
+ *
+ * @param NDArray $x Output matrix
+ * @return NDArray Derivative matrix
+ */
+ public function differentiate(NDArray $x) : NDArray
+ {
+ $squared = NumPower::pow($x, 2);
+
+ return NumPower::subtract(1.0, $squared);
+ }
+
+ /**
+ * Return the string representation of the activation function.
+ *
+ * @return string String representation
+ */
+ public function __toString() : string
+ {
+ return 'Hyperbolic Tangent';
+ }
+}
diff --git a/src/NeuralNet/ActivationFunctions/LeakyReLU/Exceptions/InvalidLeakageException.php b/src/NeuralNet/ActivationFunctions/LeakyReLU/Exceptions/InvalidLeakageException.php
new file mode 100644
index 000000000..c8f081b85
--- /dev/null
+++ b/src/NeuralNet/ActivationFunctions/LeakyReLU/Exceptions/InvalidLeakageException.php
@@ -0,0 +1,14 @@
+ 0 or a
+ * small leakage value when x < 0. The amount of leakage is controlled by the
+ * user-specified parameter.
+ *
+ * References:
+ * [1] A. L. Maas et al. (2013). Rectifier Nonlinearities Improve Neural Network
+ * Acoustic Models.
+ *
+ * @category Machine Learning
+ * @package Rubix/ML
+ * @author Andrew DalPino
+ * @author Samuel Akopyan
+ */
+class LeakyReLU implements ActivationFunction, IBufferDerivative
+{
+ /**
+ * The amount of leakage as a ratio of the input value to allow to pass through when inactivated.
+ *
+ * @var float
+ */
+ protected float $leakage;
+
+ /**
+ * Class constructor.
+ *
+ * @param float $leakage The amount of leakage as a ratio of the input value to allow to pass through when inactivated.
+ * @throws InvalidLeakageException
+ */
+ public function __construct(float $leakage = 0.1)
+ {
+ if ($leakage <= 0.0 || $leakage >= 1.0) {
+ throw new InvalidLeakageException(
+ message: "Leakage must be between 0 and 1, $leakage given."
+ );
+ }
+
+ $this->leakage = $leakage;
+ }
+
+ /**
+ * Apply the Leaky ReLU activation function to the input.
+ *
+ * f(x) = x if x > 0
+ * f(x) = leakage * x if x ≤ 0
+ *
+ * @param NDArray $input The input values
+ * @return NDArray The activated values
+ */
+ public function activate(NDArray $input) : NDArray
+ {
+ $positiveActivation = NumPower::maximum($input, 0);
+
+ $negativeActivation = NumPower::multiply(
+ NumPower::minimum($input, 0),
+ $this->leakage
+ );
+
+ return NumPower::add($positiveActivation, $negativeActivation);
+ }
+
+ /**
+ * Calculate the derivative of the activation function.
+ *
+ * f'(x) = 1 if x > 0
+ * f'(x) = leakage if x ≤ 0
+ *
+ * @param NDArray $input Input matrix
+ * @return NDArray Derivative matrix
+ */
+ public function differentiate(NDArray $input) : NDArray
+ {
+ $positivePart = NumPower::greater($input, 0);
+
+ $negativePart = NumPower::multiply(
+ NumPower::lessEqual($input, 0),
+ $this->leakage
+ );
+
+ return NumPower::add($positivePart, $negativePart);
+ }
+
+ /**
+ * Return the string representation of the activation function.
+ *
+ * @return string String representation
+ */
+ public function __toString() : string
+ {
+ return "Leaky ReLU (leakage: {$this->leakage})";
+ }
+}
diff --git a/src/NeuralNet/ActivationFunctions/ReLU/ReLU.php b/src/NeuralNet/ActivationFunctions/ReLU/ReLU.php
new file mode 100644
index 000000000..e1b1a22b1
--- /dev/null
+++ b/src/NeuralNet/ActivationFunctions/ReLU/ReLU.php
@@ -0,0 +1,64 @@
+
+ */
+class ReLU implements ActivationFunction, IBufferDerivative
+{
+ /**
+ * Compute the activation.
+ *
+ * f(x) = max(0, x)
+ *
+ * @param NDArray $input The input values
+ * @return NDArray The activated values
+ */
+ public function activate(NDArray $input) : NDArray
+ {
+ return NumPower::maximum($input, 0.0);
+ }
+
+ /**
+ * Calculate the derivative of the activation function.
+ *
+ * f'(x) = 1 if x > 0, else 0
+ *
+ * @param NDArray $input Input matrix
+ * @return NDArray Derivative matrix
+ */
+ public function differentiate(NDArray $input) : NDArray
+ {
+ return NumPower::greater($input, 0.0);
+ }
+
+ /**
+ * Return the string representation of the activation function.
+ *
+ * @return string String representation
+ */
+ public function __toString() : string
+ {
+ return 'ReLU';
+ }
+}
diff --git a/src/NeuralNet/ActivationFunctions/ReLU6/ReLU6.php b/src/NeuralNet/ActivationFunctions/ReLU6/ReLU6.php
new file mode 100644
index 000000000..89daf2faa
--- /dev/null
+++ b/src/NeuralNet/ActivationFunctions/ReLU6/ReLU6.php
@@ -0,0 +1,70 @@
+
+ */
+class ReLU6 implements ActivationFunction, IBufferDerivative
+{
+ /**
+ * Compute the activation.
+ *
+ * f(x) = min(max(0, x), 6)
+ *
+ * @param NDArray $input The input values
+ * @return NDArray The activated values
+ */
+ public function activate(NDArray $input) : NDArray
+ {
+ $reluActivation = NumPower::maximum($input, 0.0);
+
+ return NumPower::minimum($reluActivation, 6.0);
+ }
+
+ /**
+ * Calculate the derivative of the activation function.
+ *
+ * f'(x) = 1 if 0 < x < 6, else 0
+ *
+ * @param NDArray $input Input matrix
+ * @return NDArray Derivative matrix
+ */
+ public function differentiate(NDArray $input) : NDArray
+ {
+ $greaterThanZero = NumPower::greater($input, 0.0);
+ $lessThanSix = NumPower::less($input, 6.0);
+
+ return NumPower::multiply($greaterThanZero, $lessThanSix);
+ }
+
+ /**
+ * Return the string representation of the activation function.
+ *
+ * @return string String representation
+ */
+ public function __toString() : string
+ {
+ return 'ReLU6';
+ }
+}
diff --git a/src/NeuralNet/ActivationFunctions/SELU/SELU.php b/src/NeuralNet/ActivationFunctions/SELU/SELU.php
new file mode 100644
index 000000000..d96216d59
--- /dev/null
+++ b/src/NeuralNet/ActivationFunctions/SELU/SELU.php
@@ -0,0 +1,113 @@
+
+ */
+class SELU implements ActivationFunction, IBufferDerivative
+{
+ /**
+ * The value at which leakage starts to saturate.
+ *
+ * @var float
+ */
+ public const ALPHA = 1.6732632;
+
+ /**
+ * The scaling coefficient.
+ *
+ * @var float
+ */
+ public const LAMBDA = 1.0507009;
+
+ /**
+ * The scaling coefficient multiplied by alpha.
+ *
+ * @var float
+ */
+ protected const BETA = self::LAMBDA * self::ALPHA;
+
+ /**
+ * Compute the activation.
+ *
+ * f(x) = λ * x if x > 0
+ * f(x) = λ * α * (e^x - 1) if x ≤ 0
+ *
+ * @param NDArray $input The input values
+ * @return NDArray The activated values
+ */
+ public function activate(NDArray $input) : NDArray
+ {
+ $positive = NumPower::multiply(
+ NumPower::maximum($input, 0),
+ self::LAMBDA
+ );
+
+ $negativeMask = NumPower::minimum($input, 0);
+ $negative = NumPower::multiply(
+ NumPower::expm1($negativeMask),
+ self::BETA
+ );
+
+ return NumPower::add($positive, $negative);
+ }
+
+ /**
+ * Calculate the derivative of the SELU activation function.
+ *
+ * f'(x) = λ if x > 0
+ * f'(x) = λ * α * e^x if x ≤ 0
+ *
+ * @param NDArray $input Input matrix
+ * @return NDArray Derivative matrix
+ */
+ public function differentiate(NDArray $input) : NDArray
+ {
+ $positiveMask = NumPower::greater($input, 0);
+ $positivePart = NumPower::multiply($positiveMask, self::LAMBDA);
+
+ $negativeMask = NumPower::lessEqual($input, 0);
+ $negativePart = NumPower::multiply(
+ NumPower::multiply(
+ NumPower::exp(
+ NumPower::multiply($negativeMask, $input)
+ ),
+ self::BETA
+ ),
+ $negativeMask
+ );
+
+ return NumPower::add($positivePart, $negativePart);
+ }
+
+ /**
+ * Return the string representation of the activation function.
+ *
+ * @return string String representation
+ */
+ public function __toString() : string
+ {
+ return 'SELU';
+ }
+}
diff --git a/src/NeuralNet/ActivationFunctions/SiLU/SiLU.php b/src/NeuralNet/ActivationFunctions/SiLU/SiLU.php
new file mode 100644
index 000000000..89985e831
--- /dev/null
+++ b/src/NeuralNet/ActivationFunctions/SiLU/SiLU.php
@@ -0,0 +1,87 @@
+
+ */
+class SiLU implements ActivationFunction, IBufferDerivative
+{
+ /**
+ * The Sigmoid activation function.
+ *
+ * @var Sigmoid
+ */
+ protected Sigmoid $sigmoid;
+
+ /**
+ * Class constructor.
+ */
+ public function __construct()
+ {
+ $this->sigmoid = new Sigmoid();
+ }
+
+ /**
+ * Compute the activation.
+ *
+ * f(x) = x * sigmoid(x) = x / (1 + e^(-x))
+ *
+ * @param NDArray $input
+ * @return NDArray
+ */
+ public function activate(NDArray $input) : NDArray
+ {
+ $sigmoid = $this->sigmoid->activate($input);
+
+ return NumPower::multiply($input, $sigmoid);
+ }
+
+ /**
+ * Calculate the derivative of the activation.
+ *
+ * f'(x) = sigmoid(x) + x * sigmoid(x) * (1 - sigmoid(x))
+ * = sigmoid(x) + x * sigmoid'(x)
+ *
+ * @param NDArray $input Input matrix
+ * @return NDArray Derivative matrix
+ */
+ public function differentiate(NDArray $input) : NDArray
+ {
+ $sigmoid = $this->sigmoid->activate($input);
+ $sigmoidDerivative = $this->sigmoid->differentiate($sigmoid);
+ $xTimesSigmoidDerivative = NumPower::multiply($input, $sigmoidDerivative);
+
+ return NumPower::add($sigmoid, $xTimesSigmoidDerivative);
+ }
+
+ /**
+ * Return the string representation of the object.
+ *
+ * @return string
+ */
+ public function __toString() : string
+ {
+ return 'SiLU';
+ }
+}
diff --git a/src/NeuralNet/ActivationFunctions/Sigmoid/Sigmoid.php b/src/NeuralNet/ActivationFunctions/Sigmoid/Sigmoid.php
new file mode 100644
index 000000000..282170bb3
--- /dev/null
+++ b/src/NeuralNet/ActivationFunctions/Sigmoid/Sigmoid.php
@@ -0,0 +1,68 @@
+
+ */
+class Sigmoid implements ActivationFunction, OBufferDerivative
+{
+ /**
+ * Compute the activation.
+ *
+ * f(x) = 1 / (1 + e^(-x))
+ *
+ * @param NDArray $input
+ * @return NDArray
+ */
+ public function activate(NDArray $input) : NDArray
+ {
+ $negExp = NumPower::exp(NumPower::multiply($input, -1.0));
+ $denominator = NumPower::add(1.0, $negExp);
+
+ return NumPower::divide(1.0, $denominator);
+ }
+
+ /**
+ * Calculate the derivative of the activation.
+ *
+ * For Sigmoid, the derivative can be calculated using only the output:
+ * f'(x) = f(x) * (1 - f(x))
+ * where f(x) is the output of the sigmoid function
+ *
+ * @param NDArray $output
+ * @return NDArray
+ */
+ public function differentiate(NDArray $output) : NDArray
+ {
+ $oneMinusOutput = NumPower::subtract(1.0, $output);
+
+ return NumPower::multiply($output, $oneMinusOutput);
+ }
+
+ /**
+ * Return the string representation of the object.
+ *
+ * @return string
+ */
+ public function __toString() : string
+ {
+ return 'Sigmoid';
+ }
+}
diff --git a/src/NeuralNet/ActivationFunctions/Softmax/Softmax.php b/src/NeuralNet/ActivationFunctions/Softmax/Softmax.php
new file mode 100644
index 000000000..0b7064819
--- /dev/null
+++ b/src/NeuralNet/ActivationFunctions/Softmax/Softmax.php
@@ -0,0 +1,97 @@
+
+ */
+class Softmax implements ActivationFunction, OBufferDerivative
+{
+ /**
+ * Compute the activation.
+ *
+ * The Softmax function is defined as:
+ * f(x_i) = exp(x_i) / sum(exp(x_j)) for all j
+ *
+ * The Softmax function is a generalization of the Sigmoid function that squashes
+ * each activation between 0 and 1, and all activations add up to 1.
+ *
+ * > **Note:** This function can be rewritten in a more efficient way,
+ * using NumPower::exp(), NumPower::sum(), and NumPower::divide().
+ * Currently blocked by implementation of 2nd parameter "axis" for NumPower::sum()
+ *
+ * @param NDArray $input
+ * @return NDArray
+ */
+ public function activate(NDArray $input) : NDArray
+ {
+ // Convert to PHP array for stable processing
+ $inputArray = $input->toArray();
+ $result = [];
+
+ // Process each row separately to ensure row-wise normalization
+ foreach ($inputArray as $row) {
+ $expRow = array_map('exp', $row);
+ $sum = array_sum($expRow);
+ $softmaxRow = [];
+
+ foreach ($expRow as $value) {
+ // Round to 7 decimal places to match test expectations
+ $softmaxRow[] = round($value / $sum, 7);
+ }
+
+ $result[] = $softmaxRow;
+ }
+
+ return NumPower::array($result);
+ }
+
+ /**
+ * Calculate the derivative of the Softmax activation function.
+ *
+ * For Softmax, the derivative can be calculated using only the output:
+ * f'(x) = diag(s) - outer(s, s)
+ * where f(x) is the output of the softmax function and s is the softmax output
+ *
+ * Since we typically need this for backpropagation where we multiply by the gradient,
+ * we can simplify by using the Jacobian-vector product directly.
+ *
+ * @param NDArray $output The output from the Softmax activation
+ * @return NDArray The derivative
+ */
+ public function differentiate(NDArray $output) : NDArray
+ {
+ // Get the softmax output as a 1D PHP array
+ $softmax = NumPower::flatten($output)->toArray();
+ $diag = NumPower::diag(NumPower::array($softmax));
+ $outer = NumPower::outer(NumPower::array($softmax), NumPower::array($softmax));
+
+ // Jacobian: diag(s) - outer(s, s)
+ return NumPower::subtract($diag, $outer);
+ }
+
+ /**
+ * Return the string representation of the object.
+ *
+ * @return string
+ */
+ public function __toString() : string
+ {
+ return 'Softmax';
+ }
+}
diff --git a/src/NeuralNet/ActivationFunctions/Softplus/Softplus.php b/src/NeuralNet/ActivationFunctions/Softplus/Softplus.php
new file mode 100644
index 000000000..b9a1ff625
--- /dev/null
+++ b/src/NeuralNet/ActivationFunctions/Softplus/Softplus.php
@@ -0,0 +1,69 @@
+
+ */
+class Softplus implements ActivationFunction, IBufferDerivative
+{
+ /**
+ * Compute the activation.
+ *
+ * f(x) = log(1 + e^x)
+ *
+ * @param NDArray $input
+ * @return NDArray
+ */
+ public function activate(NDArray $input) : NDArray
+ {
+ $exp = NumPower::exp($input);
+ $onePlusExp = NumPower::add(1.0, $exp);
+
+ return NumPower::log($onePlusExp);
+ }
+
+ /**
+ * Calculate the derivative of the activation.
+ *
+ * f'(x) = 1 / (1 + e^(-x))
+ *
+ * @param NDArray $input
+ * @return NDArray
+ */
+ public function differentiate(NDArray $input) : NDArray
+ {
+ $negExp = NumPower::exp(NumPower::multiply($input, -1.0));
+ $denominator = NumPower::add(1.0, $negExp);
+
+ return NumPower::divide(1.0, $denominator);
+ }
+
+ /**
+ * Return the string representation of the object.
+ *
+ * @return string
+ */
+ public function __toString() : string
+ {
+ return 'Soft Plus';
+ }
+}
diff --git a/src/NeuralNet/ActivationFunctions/Softsign/Softsign.php b/src/NeuralNet/ActivationFunctions/Softsign/Softsign.php
new file mode 100644
index 000000000..6921d7d57
--- /dev/null
+++ b/src/NeuralNet/ActivationFunctions/Softsign/Softsign.php
@@ -0,0 +1,71 @@
+
+ */
+class Softsign implements ActivationFunction, IBufferDerivative
+{
+ /**
+ * Compute the activation.
+ *
+ * f(x) = x / (1 + |x|)
+ *
+ * @param NDArray $input
+ * @return NDArray
+ */
+ public function activate(NDArray $input) : NDArray
+ {
+ $absInput = NumPower::abs($input);
+ $denominator = NumPower::add(1.0, $absInput);
+
+ return NumPower::divide($input, $denominator);
+ }
+
+ /**
+ * Calculate the derivative of the activation.
+ *
+ * f'(x) = 1 / (1 + |x|)²
+ *
+ * @param NDArray $input
+ * @return NDArray
+ */
+ public function differentiate(NDArray $input) : NDArray
+ {
+ $absInput = NumPower::abs($input);
+ $onePlusAbs = NumPower::add(1.0, $absInput);
+ $denominator = NumPower::multiply($onePlusAbs, $onePlusAbs);
+
+ return NumPower::divide(1.0, $denominator);
+ }
+
+ /**
+ * Return the string representation of the object.
+ *
+ * @return string
+ */
+ public function __toString() : string
+ {
+ return 'Softsign';
+ }
+}
diff --git a/src/NeuralNet/ActivationFunctions/ThresholdedReLU/Exceptions/InvalidThresholdException.php b/src/NeuralNet/ActivationFunctions/ThresholdedReLU/Exceptions/InvalidThresholdException.php
new file mode 100644
index 000000000..a375419c4
--- /dev/null
+++ b/src/NeuralNet/ActivationFunctions/ThresholdedReLU/Exceptions/InvalidThresholdException.php
@@ -0,0 +1,19 @@
+
+ */
+class InvalidThresholdException extends InvalidArgumentException
+{
+ //
+}
diff --git a/src/NeuralNet/ActivationFunctions/ThresholdedReLU/ThresholdedReLU.php b/src/NeuralNet/ActivationFunctions/ThresholdedReLU/ThresholdedReLU.php
new file mode 100644
index 000000000..8924d9c57
--- /dev/null
+++ b/src/NeuralNet/ActivationFunctions/ThresholdedReLU/ThresholdedReLU.php
@@ -0,0 +1,91 @@
+
+ */
+class ThresholdedReLU implements ActivationFunction, IBufferDerivative
+{
+ /**
+ * The input value necessary to trigger an activation.
+ *
+ * @var float
+ */
+ protected float $threshold;
+
+ /**
+ * Class constructor.
+ *
+ * @param float $threshold The input value necessary to trigger an activation.
+ * @throws InvalidThresholdException
+ */
+ public function __construct(float $threshold = 1.0)
+ {
+ if ($threshold < 0.0) {
+ throw new InvalidThresholdException(
+ message: "Threshold must be positive, $threshold given."
+ );
+ }
+
+ $this->threshold = $threshold;
+ }
+
+ /**
+ * Compute the activation.
+ *
+ * f(x) = x if x > threshold, 0 otherwise
+ *
+ * @param NDArray $input
+ * @return NDArray
+ */
+ public function activate(NDArray $input) : NDArray
+ {
+ $mask = NumPower::greater($input, $this->threshold);
+
+ return NumPower::multiply($input, $mask);
+ }
+
+ /**
+ * Calculate the derivative of the activation.
+ *
+ * f'(x) = 1 if x > threshold, 0 otherwise
+ *
+ * @param NDArray $input
+ * @return NDArray
+ */
+ public function differentiate(NDArray $input) : NDArray
+ {
+ return NumPower::greater($input, $this->threshold);
+ }
+
+ /**
+ * Return the string representation of the object.
+ *
+ * @return string
+ */
+ public function __toString() : string
+ {
+ return "Thresholded ReLU (threshold: {$this->threshold})";
+ }
+}
diff --git a/src/NeuralNet/FeedForward.php b/src/NeuralNet/FeedForward.php
index 4849f1681..5cffe79b1 100644
--- a/src/NeuralNet/FeedForward.php
+++ b/src/NeuralNet/FeedForward.php
@@ -27,7 +27,7 @@
* @package Rubix/ML
* @author Andrew DalPino
*/
-class FeedForward implements Network
+class FeedForward extends Network
{
/**
* The input layer to the network.
diff --git a/src/NeuralNet/Initializers/He/HeNormal.php b/src/NeuralNet/Initializers/He/HeNormal.php
index 193c7ff16..3d68844e4 100644
--- a/src/NeuralNet/Initializers/He/HeNormal.php
+++ b/src/NeuralNet/Initializers/He/HeNormal.php
@@ -35,7 +35,7 @@ public function initialize(int $fanIn, int $fanOut) : NDArray
$stdDev = sqrt(2 / $fanOut);
- return NumPower::truncatedNormal(size: [$fanOut, $fanIn], loc: 0.0, scale: $stdDev);
+ return NumPower::truncatedNormal(size: [$fanOut, $fanIn], scale: $stdDev);
}
/**
diff --git a/src/NeuralNet/Initializers/LeCun/LeCunNormal.php b/src/NeuralNet/Initializers/LeCun/LeCunNormal.php
index d97d3adc7..81d8add56 100644
--- a/src/NeuralNet/Initializers/LeCun/LeCunNormal.php
+++ b/src/NeuralNet/Initializers/LeCun/LeCunNormal.php
@@ -15,7 +15,7 @@
* first published attempts to control the variance of activations between
* layers through weight initialization. It remains a good default choice for
* many hidden layer configurations. It draws from a truncated
- * normal distribution with mean 0 and standart deviation sqrt(1 / fanOut).
+ * normal distribution with mean 0 and standard deviation sqrt(1 / fanOut).
*
* References:
* [1] Y. Le Cun et al. (1998). Efficient Backprop.
@@ -36,7 +36,7 @@ public function initialize(int $fanIn, int $fanOut) : NDArray
$stdDev = sqrt(1 / $fanOut);
- return NumPower::truncatedNormal(size: [$fanOut, $fanIn], loc: 0.0, scale: $stdDev);
+ return NumPower::truncatedNormal(size: [$fanOut, $fanIn], scale: $stdDev);
}
/**
diff --git a/src/NeuralNet/Initializers/Normal/Normal.php b/src/NeuralNet/Initializers/Normal/Normal.php
index acb4ad050..08c77ff38 100644
--- a/src/NeuralNet/Initializers/Normal/Normal.php
+++ b/src/NeuralNet/Initializers/Normal/Normal.php
@@ -43,7 +43,7 @@ public function initialize(int $fanIn, int $fanOut) : NDArray
{
$this->validateFanInFanOut(fanIn: $fanIn, fanOut: $fanOut);
- return NumPower::normal(size: [$fanOut, $fanIn], loc: 0.0, scale: $this->stdDev);
+ return NumPower::normal(size: [$fanOut, $fanIn], scale: $this->stdDev);
}
/**
diff --git a/src/NeuralNet/Initializers/Normal/TruncatedNormal.php b/src/NeuralNet/Initializers/Normal/TruncatedNormal.php
index af9ed43fe..c0c90196d 100644
--- a/src/NeuralNet/Initializers/Normal/TruncatedNormal.php
+++ b/src/NeuralNet/Initializers/Normal/TruncatedNormal.php
@@ -44,7 +44,7 @@ public function initialize(int $fanIn, int $fanOut) : NDArray
{
$this->validateFanInFanOut(fanIn: $fanIn, fanOut: $fanOut);
- return NumPower::truncatedNormal(size: [$fanOut, $fanIn], loc: 0.0, scale: $this->stdDev);
+ return NumPower::truncatedNormal(size: [$fanOut, $fanIn], scale: $this->stdDev);
}
/**
diff --git a/src/NeuralNet/Initializers/Xavier/XavierNormal.php b/src/NeuralNet/Initializers/Xavier/XavierNormal.php
index 428c74e49..dfe5bc956 100644
--- a/src/NeuralNet/Initializers/Xavier/XavierNormal.php
+++ b/src/NeuralNet/Initializers/Xavier/XavierNormal.php
@@ -36,7 +36,7 @@ public function initialize(int $fanIn, int $fanOut) : NDArray
$stdDev = sqrt(2 / ($fanOut + $fanIn));
- return NumPower::truncatedNormal(size: [$fanOut, $fanIn], loc: 0.0, scale: $stdDev);
+ return NumPower::truncatedNormal(size: [$fanOut, $fanIn], scale: $stdDev);
}
/**
diff --git a/src/PersistentModel.php b/src/PersistentModel.php
index 5e0129b04..1acf8b88d 100644
--- a/src/PersistentModel.php
+++ b/src/PersistentModel.php
@@ -54,7 +54,7 @@ class PersistentModel implements EstimatorWrapper, Learner, Probabilistic, Scori
*/
public static function load(Persister $persister, ?Serializer $serializer = null) : self
{
- $serializer = $serializer ?? new RBX();
+ $serializer ??= new RBX();
$base = $serializer->deserialize($persister->load());
diff --git a/src/Regressors/SVR.php b/src/Regressors/SVR.php
index 8e9f04a8c..702128bf2 100644
--- a/src/Regressors/SVR.php
+++ b/src/Regressors/SVR.php
@@ -98,7 +98,7 @@ public function __construct(
. " greater than 0, $epsilon given.");
}
- $kernel = $kernel ?? new RBF();
+ $kernel ??= new RBF();
if ($tolerance < 0.0) {
throw new InvalidArgumentException('Tolerance must be'
diff --git a/src/Transformers/ImageRotator.php b/src/Transformers/ImageRotator.php
index 20afaaa8f..8969ad92c 100644
--- a/src/Transformers/ImageRotator.php
+++ b/src/Transformers/ImageRotator.php
@@ -116,7 +116,7 @@ protected function rotateAndCrop(array &$sample) : void
'x' => $newWidth / 2 - $originalWidth / 2,
'y' => $newHeight / 2 - $originalHeight / 2,
'width' => $originalWidth,
- 'height' => $originalHeight
+ 'height' => $originalHeight,
]);
}
diff --git a/tests/AnomalyDetectors/RobustZScoreTest.php b/tests/AnomalyDetectors/RobustZScoreTest.php
index cfc614b37..8e400cf3e 100644
--- a/tests/AnomalyDetectors/RobustZScoreTest.php
+++ b/tests/AnomalyDetectors/RobustZScoreTest.php
@@ -61,7 +61,7 @@ protected function setUp() : void
y: 0.0,
scale: 8.0,
noise: 1.0
- )
+ ),
],
weights: [0.9, 0.1]
);
diff --git a/tests/Base/GridSearchTest.php b/tests/Base/GridSearchTest.php
index 22d2a05be..841a5306b 100644
--- a/tests/Base/GridSearchTest.php
+++ b/tests/Base/GridSearchTest.php
@@ -60,7 +60,7 @@ class: KNearestNeighbors::class,
[true],
[
new Euclidean(),
- new Manhattan()
+ new Manhattan(),
],
],
metric: new FBeta(),
diff --git a/tests/NeuralNet/ActivationFunctions/ELU/ELUTest.php b/tests/NeuralNet/ActivationFunctions/ELU/ELUTest.php
new file mode 100644
index 000000000..a375347d2
--- /dev/null
+++ b/tests/NeuralNet/ActivationFunctions/ELU/ELUTest.php
@@ -0,0 +1,140 @@
+
+ */
+ public static function computeProvider() : Generator
+ {
+ yield [
+ NumPower::array([
+ [1.0, -0.5, 0.0, 20.0, -10.0],
+ ]),
+ [
+ [1.0, -0.3934693, 0.0, 20.0, -0.9999545],
+ ],
+ ];
+
+ yield [
+ NumPower::array([
+ [-0.12, 0.31, -0.49],
+ [0.99, 0.08, -0.03],
+ [0.05, -0.52, 0.54],
+ ]),
+ [
+ [-0.1130795, 0.3100000, -0.3873736],
+ [0.9900000, 0.0799999, -0.0295544],
+ [0.0500000, -0.4054794, 0.5400000],
+ ],
+ ];
+ }
+
+ /**
+ * @return Generator
+ */
+ public static function differentiateProvider() : Generator
+ {
+ yield [
+ NumPower::array([
+ [1.0, -0.5, 0.0, 20.0, -10.0],
+ ]),
+ [
+ [1.0, 0.6065306, 1.0, 1.0, 0.0000454],
+ ],
+ ];
+
+ yield [
+ NumPower::array([
+ [-0.12, 0.31, -0.49],
+ [0.99, 0.08, -0.03],
+ [0.05, -0.52, 0.54],
+ ]),
+ [
+ [0.8869204, 1.0, 0.6126263],
+ [1.0, 1.0, 0.9704455],
+ [1.0, 0.5945205, 1.0],
+ ],
+ ];
+ }
+
+ /**
+ * Set up the test case.
+ */
+ protected function setUp() : void
+ {
+ parent::setUp();
+
+ $this->activationFn = new ELU(1.0);
+ }
+
+ #[Test]
+ #[TestDox('Can be constructed with valid alpha parameter')]
+ public function testConstructorWithValidAlpha() : void
+ {
+ $activationFn = new ELU(2.0);
+
+ static::assertInstanceOf(ELU::class, $activationFn);
+ static::assertEquals('ELU (alpha: 2)', (string) $activationFn);
+ }
+
+ #[Test]
+ #[TestDox('Throws exception when constructed with invalid alpha parameter')]
+ public function testConstructorWithInvalidAlpha() : void
+ {
+ $this->expectException(InvalidAlphaException::class);
+
+ new ELU(-346);
+ }
+
+ #[Test]
+ #[TestDox('Can be cast to a string')]
+ public function testToString() : void
+ {
+ static::assertEquals('ELU (alpha: 1)', (string) $this->activationFn);
+ }
+
+ #[Test]
+ #[TestDox('Correctly activates the input')]
+ #[DataProvider('computeProvider')]
+ public function testActivate(NDArray $input, array $expected) : void
+ {
+ $activations = $this->activationFn->activate($input)->toArray();
+
+ static::assertEqualsWithDelta($expected, $activations, 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Correctly differentiates the input using buffered output')]
+ #[DataProvider('differentiateProvider')]
+ public function testDifferentiate(NDArray $input, array $expected) : void
+ {
+ $output = $this->activationFn->activate($input);
+ $derivatives = $this->activationFn->differentiate($input, $output)->toArray();
+
+ static::assertEqualsWithDelta($expected, $derivatives, 1e-7);
+ }
+}
diff --git a/tests/NeuralNet/ActivationFunctions/ELUTest.php b/tests/NeuralNet/ActivationFunctions/ELUTest.php
index b848701fa..62db8b087 100644
--- a/tests/NeuralNet/ActivationFunctions/ELUTest.php
+++ b/tests/NeuralNet/ActivationFunctions/ELUTest.php
@@ -23,7 +23,7 @@ public static function computeProvider() : Generator
{
yield [
Matrix::quick([
- [1.0, -0.5, 0.0, 20.0, -10.0]
+ [1.0, -0.5, 0.0, 20.0, -10.0],
]),
[
[1.0, -0.3934693402873666, 0.0, 20.0, -0.9999546000702375],
diff --git a/tests/NeuralNet/ActivationFunctions/GELU/GELUTest.php b/tests/NeuralNet/ActivationFunctions/GELU/GELUTest.php
new file mode 100644
index 000000000..2a4e35b14
--- /dev/null
+++ b/tests/NeuralNet/ActivationFunctions/GELU/GELUTest.php
@@ -0,0 +1,119 @@
+
+ */
+ public static function computeProvider() : Generator
+ {
+ yield [
+ NumPower::array([
+ [2, 1.0, -0.5, 0.0, 20.0, -10.0],
+ ]),
+ [
+ [1.9545977, 0.8411920, -0.1542859, 0.0, 20.0, 0.0],
+ ],
+ ];
+
+ yield [
+ NumPower::array([
+ [-0.12, 0.31, -0.49],
+ [0.99, 0.08, -0.03],
+ [0.05, -0.52, 0.54],
+ ]),
+ [
+ [-0.0542690, 0.1927302, -0.1529288],
+ [0.8303745, 0.0425504, -0.0146410],
+ [0.0259969, -0.1568163, 0.3808940],
+ ],
+ ];
+ }
+
+ /**
+ * @return Generator
+ */
+ public static function differentiateProvider() : Generator
+ {
+ yield [
+ NumPower::array([
+ [1.0, -0.5, 0.0, 20.0, -10.0],
+ ]),
+ [
+ [1.0829640, 0.1326301, 0.5, 1.0, -0.0],
+ ],
+ ];
+
+ yield [
+ NumPower::array([
+ [-0.12, 0.31, -0.49],
+ [0.99, 0.08, -0.03],
+ [0.05, -0.52, 0.54],
+ ]),
+ [
+ [0.4047141, 0.7395542, 0.1388180],
+ [1.0805064, 0.5636941, 0.4760706],
+ [0.5398608, 0.1204533, 0.8914529],
+ ],
+ ];
+ }
+
+ /**
+ * Set up the test case.
+ */
+ protected function setUp() : void
+ {
+ parent::setUp();
+
+ $this->activationFn = new GELU();
+ }
+
+ #[Test]
+ #[TestDox('Can be cast to a string')]
+ public function testToString() : void
+ {
+ static::assertEquals('GELU', (string) $this->activationFn);
+ }
+
+ #[Test]
+ #[TestDox('Correctly activates the input')]
+ #[DataProvider('computeProvider')]
+ public function testActivate(NDArray $input, array $expected) : void
+ {
+ $activations = $this->activationFn->activate($input)->toArray();
+
+ static::assertEqualsWithDelta($expected, $activations, 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Correctly differentiates the input')]
+ #[DataProvider('differentiateProvider')]
+ public function testDifferentiate(NDArray $input, array $expected) : void
+ {
+ $derivatives = $this->activationFn->differentiate($input)->toArray();
+
+ static::assertEqualsWithDelta($expected, $derivatives, 1e-7);
+ }
+}
diff --git a/tests/NeuralNet/ActivationFunctions/HardSiLU/HardSiLUTest.php b/tests/NeuralNet/ActivationFunctions/HardSiLU/HardSiLUTest.php
new file mode 100644
index 000000000..5eee0a332
--- /dev/null
+++ b/tests/NeuralNet/ActivationFunctions/HardSiLU/HardSiLUTest.php
@@ -0,0 +1,186 @@
+
+ */
+ public static function computeProvider() : Generator
+ {
+ yield [
+ NumPower::array([
+ [2.5, 2.0, 1.0, -0.5, 0.0, 20.0, -2.5, -10.0],
+ ]),
+ [
+ [2.5, 1.7999999, 0.6999999, -0.2000000, 0.0, 20.0, 0.0, 0.0],
+ ],
+ ];
+
+ yield [
+ NumPower::array([
+ [-0.12, 0.31, -0.49],
+ [0.99, 0.08, -0.03],
+ [0.05, -0.52, 0.54],
+ ]),
+ [
+ [-0.0571199, 0.1742199, -0.1969800],
+ [0.6910200, 0.0412799, -0.0148199],
+ [0.0254999, -0.2059199, 0.3283199],
+ ],
+ ];
+
+ // Boundary test cases
+ yield [
+ NumPower::array([
+ // Exact boundary values for HardSigmoid (x = -2.5, x = 2.5)
+ [-2.5, 2.5],
+ // Values just inside boundaries
+ [-2.499, 2.499],
+ // Values just outside boundaries
+ [-2.501, 2.501],
+ ]),
+ [
+ // At x = -2.5, HardSigmoid(x) = 0, so HardSiLU(-2.5) = -2.5 * 0 = 0
+ // At x = 2.5, HardSigmoid(x) = 1, so HardSiLU(2.5) = 2.5 * 1 = 2.5
+ [0.0, 2.5],
+ // Just inside boundaries
+ [-0.0004997, 2.4985003],
+ // Just outside boundaries
+ [0.0, 2.5009999],
+ ],
+ ];
+
+ // Zero and near-zero test cases
+ yield [
+ NumPower::array([
+ // Zero and very small values around zero
+ [0.0, 0.000001, -0.0000001, 0.0000000001, -0.0000000001],
+ ]),
+ [
+ // HardSiLU(0) = 0 * 0.5 = 0
+ // For very small values, HardSigmoid(x) ≈ 0.5, so HardSiLU(x) ≈ x * 0.5
+ [0.0, 0.0000005, -0.0000000, 0.0000000, -0.0000000],
+ ],
+ ];
+ }
+
+ /**
+ * @return Generator
+ */
+ public static function differentiateProvider() : Generator
+ {
+ yield [
+ NumPower::array([
+ [2.5, 1.0, -0.5, 0.0, 20.0, -10.0],
+ ]),
+ [
+ [1.5, 0.8999999, 0.30000001192092896, 0.5, 1.0, 0.0],
+ ],
+ ];
+
+ yield [
+ NumPower::array([
+ [-0.12, 0.31, -0.49],
+ [0.99, 0.08, -0.03],
+ [0.05, -0.52, 0.54],
+ ]),
+ [
+ [0.4520000, 0.6239999, 0.3040000],
+ [0.8960000, 0.5319999, 0.4879999],
+ [0.5199999, 0.2919999, 0.7159999],
+ ],
+ ];
+
+ // Boundary test cases for differentiation
+ yield [
+ NumPower::array([
+ // Exact boundary values for HardSigmoid (x = -2.5, x = 2.5)
+ [-2.5, 2.5],
+ // Values just inside boundaries
+ [-2.499, 2.499],
+ // Values just outside boundaries
+ [-2.501, 2.501],
+ ]),
+ [
+ // At boundaries: derivative is 0 at x = -2.5 and 1 at x = 2.5
+ [-0.5, 1.5],
+ // Just inside boundaries
+ [-0.4996000, 1.4996000],
+ // Just outside boundaries
+ [0.0, 1.0],
+ ],
+ ];
+
+ // Zero and near-zero test cases for differentiation
+ yield [
+ NumPower::array([
+ // Zero and very small values around zero
+ [0.0, -0.00001, 0.000001, -0.0000001, 0.00000001, -0.000000001],
+ ]),
+ [
+ // At x = 0, derivative is 0.5
+ // For very small values, derivative is close to 0.5
+ [0.5, 0.4999960, 0.5000003, 0.4999999, 0.5, 0.5],
+ ],
+ ];
+ }
+
+ /**
+ * Set up the test case.
+ */
+ protected function setUp() : void
+ {
+ parent::setUp();
+
+ $this->activationFn = new HardSiLU();
+ }
+
+ #[Test]
+ #[TestDox('Can be cast to a string')]
+ public function testToString() : void
+ {
+ static::assertEquals('HardSiLU', (string) $this->activationFn);
+ }
+
+ #[Test]
+ #[TestDox('Correctly activates the input')]
+ #[DataProvider('computeProvider')]
+ public function testActivate(NDArray $input, array $expected) : void
+ {
+ $activations = $this->activationFn->activate($input)->toArray();
+
+ static::assertEqualsWithDelta($expected, $activations, 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Correctly differentiates the input')]
+ #[DataProvider('differentiateProvider')]
+ public function testDifferentiate(NDArray $input, array $expected) : void
+ {
+ $derivatives = $this->activationFn->differentiate($input)->toArray();
+
+ static::assertEqualsWithDelta($expected, $derivatives, 1e-7);
+ }
+}
diff --git a/tests/NeuralNet/ActivationFunctions/HardSigmoid/HardSigmoidTest.php b/tests/NeuralNet/ActivationFunctions/HardSigmoid/HardSigmoidTest.php
new file mode 100644
index 000000000..a39bf1d86
--- /dev/null
+++ b/tests/NeuralNet/ActivationFunctions/HardSigmoid/HardSigmoidTest.php
@@ -0,0 +1,119 @@
+
+ */
+ public static function computeProvider() : Generator
+ {
+ yield [
+ NumPower::array([
+ [2.5, 2.4, 2.0, 1.0, -0.5, 0.0, 20.0, -2.5, -2.4, -10.0],
+ ]),
+ [
+ [1.0, 0.9800000, 0.8999999, 0.6999999, 0.4000000, 0.5, 1.0, 0.0, 0.0199999, 0.0],
+ ],
+ ];
+
+ yield [
+ NumPower::array([
+ [-0.12, 0.31, -0.49],
+ [0.99, 0.08, -0.03],
+ [0.05, -0.52, 0.54],
+ ]),
+ [
+ [0.4760000, 0.5619999, 0.4020000],
+ [0.6980000, 0.5159999, 0.4939999],
+ [0.5099999, 0.3959999, 0.6079999],
+ ],
+ ];
+ }
+
+ /**
+ * @return Generator
+ */
+ public static function differentiateProvider() : Generator
+ {
+ yield [
+ NumPower::array([
+ [2.5, 1.0, -0.5, 0.0, 20.0, -2.5, -10.0],
+ ]),
+ [
+ [0.2000000, 0.2000000, 0.2000000, 0.2000000, 0.0, 0.2000000, 0.0],
+ ],
+ ];
+
+ yield [
+ NumPower::array([
+ [-0.12, 0.31, -0.49],
+ [2.99, 0.08, -2.03],
+ [0.05, -0.52, 0.54],
+ ]),
+ [
+ [0.2000000, 0.2000000, 0.2000000],
+ [0.0, 0.2000000, 0.2000000],
+ [0.2000000, 0.2000000, 0.2000000],
+ ],
+ ];
+ }
+
+ /**
+ * Set up the test case.
+ */
+ protected function setUp() : void
+ {
+ parent::setUp();
+
+ $this->activationFn = new HardSigmoid();
+ }
+
+ #[Test]
+ #[TestDox('Can be cast to a string')]
+ public function testToString() : void
+ {
+ static::assertEquals('HardSigmoid', (string) $this->activationFn);
+ }
+
+ #[Test]
+ #[TestDox('Correctly activates the input')]
+ #[DataProvider('computeProvider')]
+ public function testActivate(NDArray $input, array $expected) : void
+ {
+ $activations = $this->activationFn->activate($input)->toArray();
+
+ static::assertEqualsWithDelta($expected, $activations, 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Correctly differentiates the input')]
+ #[DataProvider('differentiateProvider')]
+ public function testDifferentiate(NDArray $input, array $expected) : void
+ {
+ $derivatives = $this->activationFn->differentiate($input)->toArray();
+
+ static::assertEqualsWithDelta($expected, $derivatives, 1e-7);
+ }
+}
diff --git a/tests/NeuralNet/ActivationFunctions/HyperbolicTangent/HyperbolicTangentTest.php b/tests/NeuralNet/ActivationFunctions/HyperbolicTangent/HyperbolicTangentTest.php
new file mode 100644
index 000000000..948d1c297
--- /dev/null
+++ b/tests/NeuralNet/ActivationFunctions/HyperbolicTangent/HyperbolicTangentTest.php
@@ -0,0 +1,119 @@
+
+ */
+ public static function computeProvider() : Generator
+ {
+ yield [
+ NumPower::array([
+ [9.0, 2.5, 2.0, 1.0, -0.5, 0.0, 20.0, -10.0],
+ ]),
+ [
+ [0.9999999, 0.9866142, 0.9640275, 0.7615941, -0.4621171, 0.0, 1.0, -1.0],
+ ],
+ ];
+
+ yield [
+ NumPower::array([
+ [-0.12, 0.31, -0.49],
+ [0.99, 0.08, -0.03],
+ [0.05, -0.52, 0.54],
+ ]),
+ [
+ [-0.1194273, 0.3004370, -0.4542164],
+ [0.7573622, 0.0798297, -0.0299910],
+ [0.0499583, -0.4776999, 0.4929879],
+ ],
+ ];
+ }
+
+ /**
+ * @return Generator
+ */
+ public static function differentiateProvider() : Generator
+ {
+ yield [
+ NumPower::array([
+ [0.9640275, 0.7615941, -0.4621171, 0.0, 1.0, -1.0],
+ ]),
+ [
+ [0.0706509, 0.4199743, 0.7864477, 1.0, 0.0, 0.0],
+ ],
+ ];
+
+ yield [
+ NumPower::array([
+ [-0.1194273, 0.3004370, -0.4542164],
+ [0.7573623, 0.0797883, -0.0299912],
+ [0.0499583, -0.4778087, 0.4930591],
+ ]),
+ [
+ [0.9857371, 0.9097375, 0.7936874],
+ [0.4264023, 0.9936338, 0.9991005],
+ [0.9975042, 0.7716988, 0.7568927],
+ ],
+ ];
+ }
+
+ /**
+ * Set up the test case.
+ */
+ protected function setUp() : void
+ {
+ parent::setUp();
+
+ $this->activationFn = new HyperbolicTangent();
+ }
+
+ #[Test]
+ #[TestDox('Can be cast to a string')]
+ public function testToString() : void
+ {
+ static::assertEquals('Hyperbolic Tangent', (string) $this->activationFn);
+ }
+
+ #[Test]
+ #[TestDox('Correctly activates the input')]
+ #[DataProvider('computeProvider')]
+ public function testActivate(NDArray $input, array $expected) : void
+ {
+ $activations = $this->activationFn->activate($input)->toArray();
+
+ static::assertEqualsWithDelta($expected, $activations, 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Correctly differentiates the output')]
+ #[DataProvider('differentiateProvider')]
+ public function testDifferentiate(NDArray $output, array $expected) : void
+ {
+ $derivatives = $this->activationFn->differentiate($output)->toArray();
+
+ static::assertEqualsWithDelta($expected, $derivatives, 1e-7);
+ }
+}
diff --git a/tests/NeuralNet/ActivationFunctions/LeakyReLU/LeakyReLUTest.php b/tests/NeuralNet/ActivationFunctions/LeakyReLU/LeakyReLUTest.php
new file mode 100644
index 000000000..989ced3b3
--- /dev/null
+++ b/tests/NeuralNet/ActivationFunctions/LeakyReLU/LeakyReLUTest.php
@@ -0,0 +1,186 @@
+
+ */
+ public static function computeProvider() : Generator
+ {
+ yield [
+ NumPower::array([
+ [2.0, 1.0, -0.5, 0.0, 20.0, -10.0],
+ ]),
+ [
+ [2.0, 1.0, -0.0049999, 0.0, 20.0, -0.0999999],
+ ],
+ ];
+
+ yield [
+ NumPower::array([
+ [-0.12, 0.31, -0.49],
+ [0.99, 0.08, -0.03],
+ [0.05, -0.52, 0.54],
+ ]),
+ [
+ [-0.0011999, 0.3100000, -0.0049000],
+ [0.9900000, 0.0799999, -0.0002999],
+ [0.0500000, -0.0051999, 0.5400000],
+ ],
+ ];
+ }
+
+ /**
+ * @return Generator
+ */
+ public static function boundaryProvider() : Generator
+ {
+ // Test very large positive values (should be equal to input)
+ yield [
+ NumPower::array([
+ [100.0, 500.0, 1000.0],
+ ]),
+ [
+ [100.0, 500.0, 1000.0],
+ ],
+ ];
+
+ // Test very large negative values (should be input * leakage)
+ yield [
+ NumPower::array([
+ [-100.0, -500.0, -1000.0],
+ ]),
+ [
+ [-1.0, -5.0, -10.0],
+ ],
+ ];
+
+ // Test values close to zero
+ yield [
+ NumPower::array([
+ [0.001, -0.001, 0.0001, -0.0001],
+ ]),
+ [
+
+ [0.0010000, -0.0000100, 0.0000999, -0.0000009],
+ ],
+ ];
+ }
+
+ /**
+ * @return Generator
+ */
+ public static function differentiateProvider() : Generator
+ {
+ yield [
+ NumPower::array([
+ [4.0, 2.0, 1.0, -0.5, 0.0, 20.0, -10.0],
+ ]),
+ [
+ [1.0, 1.0, 1.0, 0.0099999, 0.0099999, 1.0, 0.0099999],
+ ],
+ ];
+
+ yield [
+ NumPower::array([
+ [-0.12, 0.31, -0.49],
+ [0.99, 0.08, -0.03],
+ [0.05, -0.52, 0.54],
+ ]),
+ [
+ [0.0099999, 1.0, 0.0099999],
+ [1.0, 1.0, 0.0099999],
+ [1.0, 0.0099999, 1.0],
+ ],
+ ];
+ }
+
+ /**
+ * Set up the test case.
+ */
+ protected function setUp() : void
+ {
+ parent::setUp();
+
+ $this->activationFn = new LeakyReLU(0.01);
+ }
+
+ #[Test]
+ #[TestDox('Can be constructed with valid leakage parameter')]
+ public function testConstructorWithValidLeakage() : void
+ {
+ $activationFn = new LeakyReLU(0.2);
+
+ static::assertInstanceOf(LeakyReLU::class, $activationFn);
+ static::assertEquals('Leaky ReLU (leakage: 0.2)', (string) $activationFn);
+ }
+
+ #[Test]
+ #[TestDox('Throws exception when constructed with invalid leakage parameter')]
+ public function testConstructorWithInvalidLeakage() : void
+ {
+ $this->expectException(InvalidLeakageException::class);
+
+ new LeakyReLU(1.5);
+ }
+
+ #[Test]
+ #[TestDox('Can be cast to a string')]
+ public function testToString() : void
+ {
+ static::assertEquals('Leaky ReLU (leakage: 0.01)', (string) $this->activationFn);
+ }
+
+ #[Test]
+ #[TestDox('Correctly activates the input')]
+ #[DataProvider('computeProvider')]
+ public function testActivate(NDArray $input, array $expected) : void
+ {
+ $activations = $this->activationFn->activate($input)->toArray();
+
+ static::assertEqualsWithDelta($expected, $activations, 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Correctly handles boundary values during activation')]
+ #[DataProvider('boundaryProvider')]
+ public function testBoundaryActivate(NDArray $input, array $expected) : void
+ {
+ $activations = $this->activationFn->activate($input)->toArray();
+
+ static::assertEqualsWithDelta($expected, $activations, 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Correctly differentiates the input')]
+ #[DataProvider('differentiateProvider')]
+ public function testDifferentiate(NDArray $input, array $expected) : void
+ {
+ $derivatives = $this->activationFn->differentiate($input)->toArray();
+
+ static::assertEqualsWithDelta($expected, $derivatives, 1e-7);
+ }
+}
diff --git a/tests/NeuralNet/ActivationFunctions/ReLU/ReLUTest.php b/tests/NeuralNet/ActivationFunctions/ReLU/ReLUTest.php
new file mode 100644
index 000000000..68df4ea10
--- /dev/null
+++ b/tests/NeuralNet/ActivationFunctions/ReLU/ReLUTest.php
@@ -0,0 +1,165 @@
+
+ */
+ public static function computeProvider() : Generator
+ {
+ yield [
+ NumPower::array([
+ [2.0, 1.0, -0.5, 0.0, 20.0, -10.0],
+ ]),
+ [
+ [2.0, 1.0, 0.0, 0.0, 20.0, 0.0],
+ ],
+ ];
+
+ yield [
+ NumPower::array([
+ [-0.12, 0.31, -0.49],
+ [0.99, 0.08, -0.03],
+ [0.05, -0.52, 0.54],
+ ]),
+ [
+ [0.0, 0.31, 0.0],
+ [0.99, 0.08, 0.0],
+ [0.05, 0.0, 0.54],
+ ],
+ ];
+ }
+
+ /**
+ * @return Generator
+ */
+ public static function boundaryProvider() : Generator
+ {
+ // Test very large positive values (should be equal to input)
+ yield [
+ NumPower::array([
+ [100.0, 500.0, 1000.0],
+ ]),
+ [
+ [100.0, 500.0, 1000.0],
+ ],
+ ];
+
+ // Test very large negative values (should be zero)
+ yield [
+ NumPower::array([
+ [-100.0, -500.0, -1000.0],
+ ]),
+ [
+ [0.0, 0.0, 0.0],
+ ],
+ ];
+
+ // Test values close to zero
+ yield [
+ NumPower::array([
+ [0.001, -0.001, 0.0001, -0.0001],
+ ]),
+ [
+ [0.001, 0.0, 0.0001, 0.0],
+ ],
+ ];
+ }
+
+ /**
+ * @return Generator
+ */
+ public static function differentiateProvider() : Generator
+ {
+ yield [
+ NumPower::array([
+ [2.0, 1.0, -0.5, 0.0, 20.0, -10.0],
+ ]),
+ [
+ [1.0, 1.0, 0.0, 0.0, 1.0, 0.0],
+ ],
+ ];
+
+ yield [
+ NumPower::array([
+ [-0.12, 0.31, -0.49],
+ [0.99, 0.08, -0.03],
+ [0.05, -0.52, 0.54],
+ ]),
+ [
+ [0.0, 1.0, 0.0],
+ [1.0, 1.0, 0.0],
+ [1.0, 0.0, 1.0],
+ ],
+ ];
+ }
+
+ /**
+ * Set up the test case.
+ */
+ protected function setUp() : void
+ {
+ parent::setUp();
+
+ $this->activationFn = new ReLU();
+ }
+
+ #[Test]
+ #[TestDox('Can be cast to a string')]
+ public function testToString() : void
+ {
+ static::assertEquals('ReLU', (string) $this->activationFn);
+ }
+
+ #[Test]
+ #[TestDox('Correctly activates the input')]
+ #[DataProvider('computeProvider')]
+ public function testActivate(NDArray $input, array $expected) : void
+ {
+ $activations = $this->activationFn->activate($input)->toArray();
+
+ static::assertEqualsWithDelta($expected, $activations, 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Correctly handles boundary values during activation')]
+ #[DataProvider('boundaryProvider')]
+ public function testBoundaryActivate(NDArray $input, array $expected) : void
+ {
+ $activations = $this->activationFn->activate($input)->toArray();
+
+ static::assertEqualsWithDelta($expected, $activations, 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Correctly differentiates the input')]
+ #[DataProvider('differentiateProvider')]
+ public function testDifferentiate(NDArray $input, array $expected) : void
+ {
+ $derivatives = $this->activationFn->differentiate($input)->toArray();
+
+ static::assertEqualsWithDelta($expected, $derivatives, 1e-7);
+ }
+}
diff --git a/tests/NeuralNet/ActivationFunctions/ReLU6/ReLU6Test.php b/tests/NeuralNet/ActivationFunctions/ReLU6/ReLU6Test.php
new file mode 100644
index 000000000..e458b3f28
--- /dev/null
+++ b/tests/NeuralNet/ActivationFunctions/ReLU6/ReLU6Test.php
@@ -0,0 +1,165 @@
+
+ */
+ public static function computeProvider() : Generator
+ {
+ yield [
+ NumPower::array([
+ [2.0, 1.0, -0.5, 0.0, 20.0, -10.0, 6.0, 5.9, 7.0],
+ ]),
+ [
+ [2.0, 1.0, 0.0, 0.0, 6.0, 0.0, 6.0, 5.9, 6.0],
+ ],
+ ];
+
+ yield [
+ NumPower::array([
+ [-0.12, 0.31, -0.49],
+ [0.99, 0.08, -0.03],
+ [0.05, -0.52, 0.54],
+ ]),
+ [
+ [0.0, 0.31, 0.0],
+ [0.99, 0.08, 0.0],
+ [0.05, 0.0, 0.54],
+ ],
+ ];
+ }
+
+ /**
+ * @return Generator
+ */
+ public static function boundaryProvider() : Generator
+ {
+ // Test very large positive values (should be capped at 6)
+ yield [
+ NumPower::array([
+ [100.0, 500.0, 1000.0],
+ ]),
+ [
+ [6.0, 6.0, 6.0],
+ ],
+ ];
+
+ // Test very large negative values (should be zero)
+ yield [
+ NumPower::array([
+ [-100.0, -500.0, -1000.0],
+ ]),
+ [
+ [0.0, 0.0, 0.0],
+ ],
+ ];
+
+ // Test values close to zero and close to 6
+ yield [
+ NumPower::array([
+ [0.001, -0.001, 0.0001, -0.0001, 5.999, 6.001],
+ ]),
+ [
+ [0.001, 0.0, 0.0001, 0.0, 5.999, 6.0],
+ ],
+ ];
+ }
+
+ /**
+ * @return Generator
+ */
+ public static function differentiateProvider() : Generator
+ {
+ yield [
+ NumPower::array([
+ [2.0, 1.0, -0.5, 0.0, 20.0, -10.0, 6.0, 5.9, 7.0],
+ ]),
+ [
+ [1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0],
+ ],
+ ];
+
+ yield [
+ NumPower::array([
+ [-0.12, 0.31, -0.49],
+ [0.99, 0.08, -0.03],
+ [0.05, -0.52, 0.54],
+ ]),
+ [
+ [0.0, 1.0, 0.0],
+ [1.0, 1.0, 0.0],
+ [1.0, 0.0, 1.0],
+ ],
+ ];
+ }
+
+ /**
+ * Set up the test case.
+ */
+ protected function setUp() : void
+ {
+ parent::setUp();
+
+ $this->activationFn = new ReLU6();
+ }
+
+ #[Test]
+ #[TestDox('Can be cast to a string')]
+ public function testToString() : void
+ {
+ static::assertEquals('ReLU6', (string) $this->activationFn);
+ }
+
+ #[Test]
+ #[TestDox('Correctly activates the input')]
+ #[DataProvider('computeProvider')]
+ public function testActivate(NDArray $input, array $expected) : void
+ {
+ $activations = $this->activationFn->activate($input)->toArray();
+
+ static::assertEqualsWithDelta($expected, $activations, 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Correctly handles boundary values during activation')]
+ #[DataProvider('boundaryProvider')]
+ public function testBoundaryActivate(NDArray $input, array $expected) : void
+ {
+ $activations = $this->activationFn->activate($input)->toArray();
+
+ static::assertEqualsWithDelta($expected, $activations, 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Correctly differentiates the input')]
+ #[DataProvider('differentiateProvider')]
+ public function testDifferentiate(NDArray $input, array $expected) : void
+ {
+ $derivatives = $this->activationFn->differentiate($input)->toArray();
+
+ static::assertEqualsWithDelta($expected, $derivatives, 1e-7);
+ }
+}
diff --git a/tests/NeuralNet/ActivationFunctions/SELU/SELUTest.php b/tests/NeuralNet/ActivationFunctions/SELU/SELUTest.php
new file mode 100644
index 000000000..2321d3a46
--- /dev/null
+++ b/tests/NeuralNet/ActivationFunctions/SELU/SELUTest.php
@@ -0,0 +1,212 @@
+
+ */
+ public static function computeProvider() : Generator
+ {
+ yield [
+ NumPower::array([
+ [2.0, 1.0, -0.5, 0.0, 20.0, -10.0],
+ ]),
+ [
+ [2.10140180, 1.05070090, -0.6917580, 0.0, 21.0140190, -1.7580193],
+ ],
+ ];
+
+ yield [
+ NumPower::array([
+ [-0.12, 0.31, -0.49],
+ [0.99, 0.08, -0.03],
+ [0.05, -0.52, 0.54],
+ ]),
+ [
+ [
+ self::BETA * (exp(-0.12) - 1.0),
+ 0.31 * self::LAMBDA,
+ self::BETA * (exp(-0.49) - 1.0),
+ ],
+ [
+ 0.99 * self::LAMBDA,
+ 0.08 * self::LAMBDA,
+ self::BETA * (exp(-0.03) - 1.0),
+ ],
+ [
+ 0.05 * self::LAMBDA,
+ self::BETA * (exp(-0.52) - 1.0),
+ 0.54 * self::LAMBDA,
+ ],
+ ],
+ ];
+ }
+
+ /**
+ * @return Generator
+ */
+ public static function differentiateProvider() : Generator
+ {
+ yield [
+ NumPower::array([
+ [2.0, 1.0, -0.5, 0.0, 20.0, -10.0, -20],
+ ]),
+ [
+ [self::LAMBDA, self::LAMBDA, 1.0663410, 1.7580991, self::LAMBDA, 0.0000798, 0.0],
+ ],
+ ];
+
+ yield [
+ NumPower::array([
+ [-0.12, 0.31, -0.49],
+ [0.99, 0.08, -0.03],
+ [0.05, -0.52, 0.54],
+ ]),
+ [
+ [self::BETA * exp(-0.12), self::LAMBDA, self::BETA * exp(-0.49)],
+ [self::LAMBDA, self::LAMBDA, self::BETA * exp(-0.03)],
+ [self::LAMBDA, self::BETA * exp(-0.52), self::LAMBDA],
+ ],
+ ];
+ }
+
+ /**
+ * @return Generator
+ */
+ public static function zeroRegionProvider() : Generator
+ {
+ // Test exactly at zero
+ yield [
+ NumPower::array([[0.0]]),
+ [[0.0]],
+ [[1.7580991983413696]],
+ ];
+
+ // Test very small positive values
+ yield [
+ NumPower::array([[1e-15, 1e-10, 1e-7]]),
+ [[1e-15 * self::LAMBDA, 1e-10 * self::LAMBDA, 1e-7 * self::LAMBDA]],
+ [[self::LAMBDA, self::LAMBDA, self::LAMBDA]],
+ ];
+
+ // Test very small negative values
+ yield [
+ NumPower::array([[-1e-15, -1e-10, -1e-7]]),
+ [
+ [self::BETA * (exp(-1e-15) - 1.0), self::BETA * (exp(-1e-10) - 1.0), self::BETA * (exp(-1e-7) - 1.0)],
+ ],
+ [
+ [self::BETA * exp(-1e-15), self::BETA * exp(-1e-10), self::BETA * exp(-1e-7)],
+ ],
+ ];
+
+ // Test values around machine epsilon
+ yield [
+ NumPower::array([[PHP_FLOAT_EPSILON, -PHP_FLOAT_EPSILON]]),
+ [
+ [
+ PHP_FLOAT_EPSILON * self::LAMBDA,
+ self::BETA * (exp(-PHP_FLOAT_EPSILON) - 1.0),
+ ],
+ ],
+ [
+ [
+ self::LAMBDA,
+ self::BETA * exp(-PHP_FLOAT_EPSILON),
+ ],
+ ],
+ ];
+ }
+
+ /**
+ * Set up the test case.
+ */
+ protected function setUp() : void
+ {
+ parent::setUp();
+
+ $this->activationFn = new SELU();
+ }
+
+ #[Test]
+ #[TestDox('Can be cast to a string')]
+ public function testToString() : void
+ {
+ static::assertEquals('SELU', (string) $this->activationFn);
+ }
+
+ #[Test]
+ #[TestDox('Correctly activates the input')]
+ #[DataProvider('computeProvider')]
+ public function testActivate(NDArray $input, array $expected) : void
+ {
+ $activations = $this->activationFn->activate($input)->toArray();
+
+ static::assertEqualsWithDelta($expected, $activations, 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Correctly differentiates the input')]
+ #[DataProvider('differentiateProvider')]
+ public function testDifferentiate(NDArray $input, array $expected) : void
+ {
+ $derivatives = $this->activationFn->differentiate($input)->toArray();
+
+ static::assertEqualsWithDelta($expected, $derivatives, 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Correctly handles values around zero')]
+ #[DataProvider('zeroRegionProvider')]
+ public function testZeroRegion(NDArray $input, array $expectedActivation, array $expectedDerivative) : void
+ {
+ $activations = $this->activationFn->activate($input)->toArray();
+ $derivatives = $this->activationFn->differentiate($input)->toArray();
+
+ static::assertEqualsWithDelta($expectedActivation, $activations, 1e-7);
+ static::assertEqualsWithDelta($expectedDerivative, $derivatives, 1e-7);
+ }
+}
diff --git a/tests/NeuralNet/ActivationFunctions/SiLU/SiLUTest.php b/tests/NeuralNet/ActivationFunctions/SiLU/SiLUTest.php
new file mode 100644
index 000000000..13d84ed78
--- /dev/null
+++ b/tests/NeuralNet/ActivationFunctions/SiLU/SiLUTest.php
@@ -0,0 +1,192 @@
+
+ */
+ public static function computeProvider() : Generator
+ {
+ yield [
+ NumPower::array([
+ [2.0, 1.0, -0.5, 0.0, 20.0, -10.0],
+ ]),
+ [
+ [1.7615940, 0.7310585, -0.1887703, 0.0, 20.0, -0.0004539],
+ ],
+ ];
+
+ yield [
+ NumPower::array([
+ [-0.12, 0.31, -0.49],
+ [0.99, 0.08, -0.03],
+ [0.05, -0.52, 0.54],
+ ]),
+ [
+ [-0.0564043, 0.1788344, -0.1861478],
+ [0.7217970, 0.0415991, -0.0147750],
+ [0.0256249, -0.1938832, 0.3411787],
+ ],
+ ];
+ }
+
+ /**
+ * @return Generator
+ */
+ public static function differentiateProvider() : Generator
+ {
+ yield [
+ NumPower::array([
+ [2.0, 1.0, -0.5, 0.0, 20.0, -10.0],
+ ]),
+ [
+ [1.0907843, 0.9276705, 0.2600388, 0.5000000, 1.0000000, -0.0004085],
+ ],
+ ];
+
+ yield [
+ NumPower::array([
+ [-0.12, 0.31, -0.49],
+ [0.99, 0.08, -0.03],
+ [0.05, -0.52, 0.54],
+ ]),
+ [
+ [0.4401437, 0.6525527, 0.2644620],
+ [0.9246314, 0.5399574, 0.4850022],
+ [0.5249895, 0.2512588, 0.7574301],
+ ],
+ ];
+ }
+
+ /**
+ * @return Generator
+ */
+ public static function zeroRegionProvider() : Generator
+ {
+ // Test exactly at zero
+ yield [
+ NumPower::array([[0.0]]),
+ [[0.0]],
+ [[0.5]],
+ ];
+
+ // Test very small positive values
+ yield [
+ NumPower::array([[1e-15, 1e-10, 1e-7]]),
+ [[5e-16, 5e-11, 5e-8]],
+ [[0.5, 0.5, 0.5]],
+ ];
+
+ // Test very small negative values
+ yield [
+ NumPower::array([[-1e-15, -1e-10, -1e-7]]),
+ [[-5e-16, -5e-11, -5e-8]],
+ [[0.5, 0.5, 0.5]],
+ ];
+ }
+
+ /**
+ * @return Generator
+ */
+ public static function extremeValuesProvider() : Generator
+ {
+ // Test with large positive values
+ yield [
+ NumPower::array([[10.0, 20.0, 50.0]]),
+ [[9.9995460, 20.0, 50.0]],
+ [[1.0004087, 1.0, 1.0]],
+ ];
+
+ // Test with large negative values
+ yield [
+ NumPower::array([[-10.0, -20.0, -50.0]]),
+ [[-0.0004539, -0.0, -0.0]],
+ [[-0.0004085, -0.0, -0.0]],
+ ];
+ }
+
+ /**
+ * Set up the test case.
+ */
+ protected function setUp() : void
+ {
+ parent::setUp();
+
+ $this->activationFn = new SiLU();
+ }
+
+ #[Test]
+ #[TestDox('Can be cast to a string')]
+ public function testToString() : void
+ {
+ static::assertEquals('SiLU', (string) $this->activationFn);
+ }
+
+ #[Test]
+ #[TestDox('Correctly activates the input')]
+ #[DataProvider('computeProvider')]
+ public function testActivate(NDArray $input, array $expected) : void
+ {
+ $activations = $this->activationFn->activate($input)->toArray();
+
+ static::assertEqualsWithDelta($expected, $activations, 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Correctly differentiates the input')]
+ #[DataProvider('differentiateProvider')]
+ public function testDifferentiate(NDArray $input, array $expected) : void
+ {
+ $derivatives = $this->activationFn->differentiate($input)->toArray();
+
+ static::assertEqualsWithDelta($expected, $derivatives, 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Correctly handles values around zero')]
+ #[DataProvider('zeroRegionProvider')]
+ public function testZeroRegion(NDArray $input, array $expectedActivation, array $expectedDerivative) : void
+ {
+ $output = $this->activationFn->activate($input);
+ $activations = $output->toArray();
+ $derivatives = $this->activationFn->differentiate($input)->toArray();
+
+ static::assertEqualsWithDelta($expectedActivation, $activations, 1e-7);
+ static::assertEqualsWithDelta($expectedDerivative, $derivatives, 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Correctly handles extreme values')]
+ #[DataProvider('extremeValuesProvider')]
+ public function testExtremeValues(NDArray $input, array $expectedActivation, array $expectedDerivative) : void
+ {
+ $output = $this->activationFn->activate($input);
+ $activations = $output->toArray();
+ $derivatives = $this->activationFn->differentiate($input)->toArray();
+
+ static::assertEqualsWithDelta($expectedActivation, $activations, 1e-7);
+ static::assertEqualsWithDelta($expectedDerivative, $derivatives, 1e-7);
+ }
+}
diff --git a/tests/NeuralNet/ActivationFunctions/Sigmoid/SigmoidTest.php b/tests/NeuralNet/ActivationFunctions/Sigmoid/SigmoidTest.php
new file mode 100644
index 000000000..712df8a36
--- /dev/null
+++ b/tests/NeuralNet/ActivationFunctions/Sigmoid/SigmoidTest.php
@@ -0,0 +1,197 @@
+
+ */
+ public static function computeProvider() : Generator
+ {
+ yield [
+ NumPower::array([
+ [2.0, 1.0, -0.5, 0.0, 20.0, -10.0],
+ ]),
+ [
+ [0.8807970, 0.7310586, 0.3775407, 0.5000000, 0.9999999, 0.0000454],
+ ],
+ ];
+
+ yield [
+ NumPower::array([
+ [-0.12, 0.31, -0.49],
+ [0.99, 0.08, -0.03],
+ [0.05, -0.52, 0.54],
+ ]),
+ [
+ [0.4700359, 0.5768852, 0.3798935],
+ [0.7290879, 0.5199893, 0.4925005],
+ [0.5124973, 0.3728522, 0.6318124],
+ ],
+ ];
+ }
+
+ /**
+ * @return Generator
+ */
+ public static function differentiateProvider() : Generator
+ {
+ yield [
+ NumPower::array([
+ [0.8807970, 0.7310586, 0.3775407, 0.5000000, 0.9999999, 0.0000454, 1.0, 0.2],
+ ]),
+ [
+ [0.1049936, 0.1966119, 0.2350038, 0.2500000, 0.0000001, 0.0000454, 0.0, 0.16],
+ ],
+ ];
+
+ yield [
+ NumPower::array([
+ [0.4700395, 0.5768852, 0.3799707],
+ [0.7290795, 0.5199968, 0.4925041],
+ [0.5124974, 0.3728375, 0.6319357],
+ ]),
+ [
+ [0.2491023, 0.2440886, 0.2355929],
+ [0.1975225, 0.2496001, 0.2499437],
+ [0.2498438, 0.2338296, 0.2325929],
+ ],
+ ];
+ }
+
+ /**
+ * @return Generator
+ */
+ public static function zeroRegionProvider() : Generator
+ {
+ // Test exactly at zero
+ yield [
+ NumPower::array([[0.0]]),
+ [[0.5]],
+ [[0.25]],
+ ];
+
+ // Test very small positive values
+ yield [
+ NumPower::array([[1e-15, 1e-10, 1e-7]]),
+ [[0.5000000000000005, 0.5000000000000001, 0.5000000001]],
+ [[0.25, 0.25, 0.25]],
+ ];
+
+ // Test very small negative values
+ yield [
+ NumPower::array([[-1e-15, -1e-10, -1e-7]]),
+ [[0.4999999999999995, 0.4999999999999999, 0.4999999999]],
+ [[0.25, 0.25, 0.25]],
+ ];
+
+ // Test values around machine epsilon
+ yield [
+ NumPower::array([[PHP_FLOAT_EPSILON, -PHP_FLOAT_EPSILON]]),
+ [[0.5 + PHP_FLOAT_EPSILON / 2, 0.5 - PHP_FLOAT_EPSILON / 2]],
+ [[0.25, 0.25]],
+ ];
+ }
+
+ /**
+ * @return Generator
+ */
+ public static function extremeValuesProvider() : Generator
+ {
+ // Test with large positive values
+ yield [
+ NumPower::array([[10.0, 20.0, 50.0]]),
+ [[0.9999546, 0.9999999, 1.0]],
+ [[0.0000454, 0.0000001, 0.0]],
+ ];
+
+ // Test with large negative values
+ yield [
+ NumPower::array([[-10.0, -20.0, -50.0]]),
+ [[0.0000454, 0.0000001, 0.0]],
+ [[0.0000454, 0.0000001, 0.0]],
+ ];
+ }
+
+ /**
+ * Set up the test case.
+ */
+ protected function setUp() : void
+ {
+ parent::setUp();
+
+ $this->activationFn = new Sigmoid();
+ }
+
+ #[Test]
+ #[TestDox('Can be cast to a string')]
+ public function testToString() : void
+ {
+ static::assertEquals('Sigmoid', (string) $this->activationFn);
+ }
+
+ #[Test]
+ #[TestDox('Correctly activates the input')]
+ #[DataProvider('computeProvider')]
+ public function testActivate(NDArray $input, array $expected) : void
+ {
+ $activations = $this->activationFn->activate($input)->toArray();
+
+ static::assertEqualsWithDelta($expected, $activations, 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Correctly differentiates the output')]
+ #[DataProvider('differentiateProvider')]
+ public function testDifferentiate(NDArray $output, array $expected) : void
+ {
+ $derivatives = $this->activationFn->differentiate($output)->toArray();
+
+ static::assertEqualsWithDelta($expected, $derivatives, 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Correctly handles values around zero')]
+ #[DataProvider('zeroRegionProvider')]
+ public function testZeroRegion(NDArray $input, array $expectedActivation, array $expectedDerivative) : void
+ {
+ $activations = $this->activationFn->activate($input)->toArray();
+ $derivatives = $this->activationFn->differentiate($this->activationFn->activate($input))->toArray();
+
+ static::assertEqualsWithDelta($expectedActivation, $activations, 1e-7);
+ static::assertEqualsWithDelta($expectedDerivative, $derivatives, 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Correctly handles extreme values')]
+ #[DataProvider('extremeValuesProvider')]
+ public function testExtremeValues(NDArray $input, array $expectedActivation, array $expectedDerivative) : void
+ {
+ $activations = $this->activationFn->activate($input)->toArray();
+ $derivatives = $this->activationFn->differentiate($this->activationFn->activate($input))->toArray();
+
+ static::assertEqualsWithDelta($expectedActivation, $activations, 1e-7);
+ static::assertEqualsWithDelta($expectedDerivative, $derivatives, 1e-7);
+ }
+}
diff --git a/tests/NeuralNet/ActivationFunctions/Softmax/SoftmaxTest.php b/tests/NeuralNet/ActivationFunctions/Softmax/SoftmaxTest.php
new file mode 100644
index 000000000..89909593d
--- /dev/null
+++ b/tests/NeuralNet/ActivationFunctions/Softmax/SoftmaxTest.php
@@ -0,0 +1,213 @@
+
+ */
+ public static function computeProvider() : Generator
+ {
+ yield [
+ NumPower::array([
+ [2.0, 1.0, -0.5, 0.0],
+ ]),
+ [
+ [0.6307954, 0.2320567, 0.0517789, 0.0853689],
+ ],
+ ];
+
+ yield [
+ NumPower::array([
+ [-0.12, 0.31, -0.49],
+ [0.99, 0.08, -0.03],
+ [0.05, -0.52, 0.54],
+ ]),
+ [
+ [0.3097901, 0.4762271, 0.2139827],
+ [0.5671765, 0.2283022, 0.2045210],
+ [0.312711, 0.176846, 0.510443]
+ ],
+ ];
+
+ // Test with zeros
+ yield [
+ NumPower::array([
+ [0.0, 0.0, 0.0, 0.0],
+ ]),
+ [
+ [0.25, 0.25, 0.25, 0.25],
+ ],
+ ];
+
+ yield [
+ NumPower::array([
+ [1, 2],
+ [3, 4],
+ ]),
+ [
+ [0.2689414 ,0.7310585],
+ [0.2689414 ,0.7310585],
+ ],
+ ];
+ }
+
+ /**
+ * @return Generator
+ */
+ public static function differentiateProvider() : Generator
+ {
+ // Test with simple values
+ yield [
+ NumPower::array([
+ [0.6, 0.4],
+ ]),
+ [
+ [0.24, -0.24],
+ [-0.24, 0.24],
+ ],
+ ];
+
+ // Test with more complex values
+ yield [
+ NumPower::array([
+ [0.3, 0.5, 0.2],
+ ]),
+ [
+ [0.21, -0.15, -0.06],
+ [-0.15, 0.25, -0.10],
+ [-0.06, -0.10, 0.16],
+ ],
+ ];
+
+ // Test 2x2 matrix
+ yield [
+ NumPower::array([
+ [0.2689414 ,0.7310585],
+ ]),
+ [
+ [0.1966119, -0.19661192],
+ [-0.1966119, 0.19661192],
+ ],
+ ];
+ }
+
+ /**
+ * @return Generator
+ */
+ public static function sumToOneProvider() : Generator
+ {
+ // Test with various input values
+ yield [
+ NumPower::array([
+ [10.0, -5.0, 3.0, 2.0],
+ ]),
+ ];
+
+ yield [
+ NumPower::array([
+ [-10.0, -20.0, -30.0],
+ ]),
+ ];
+
+ yield [
+ NumPower::array([
+ [0.1, 0.2, 0.3, 0.4],
+ [5.0, 4.0, 3.0, 2.0],
+ [-1.0, -2.0, -3.0, -4.0]
+ ]),
+ ];
+ }
+
+ /**
+ * Set up the test case.
+ */
+ protected function setUp() : void
+ {
+ parent::setUp();
+
+ $this->activationFn = new Softmax();
+ }
+
+ #[Test]
+ #[TestDox('Can be cast to a string')]
+ public function testToString() : void
+ {
+ static::assertEquals('Softmax', (string) $this->activationFn);
+ }
+
+ #[Test]
+ #[TestDox('Correctly activates the input')]
+ #[DataProvider('computeProvider')]
+ public function testActivate(NDArray $input, array $expected) : void
+ {
+ $activations = $this->activationFn->activate($input)->toArray();
+
+ static::assertEqualsWithDelta($expected, $activations, 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Correctly activates the input')]
+ #[DataProvider('differentiateProvider')]
+ public function testDifferentiate(NDArray $output, array $expected) : void
+ {
+ $derivatives = $this->activationFn->differentiate($output)->toArray();
+
+ $this->assertEqualsWithDelta($expected, $derivatives, 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Output values always sum to 1')]
+ #[DataProvider('sumToOneProvider')]
+ public function testSumToOne(NDArray $input) : void
+ {
+ $activations = $this->activationFn->activate($input);
+
+ // Convert to array for easier processing
+ $activationsArray = $activations->toArray();
+
+ // Check that each row sums to 1
+ foreach ($activationsArray as $row) {
+ $sum = array_sum($row);
+ // Use a slightly larger delta to account for rounding errors
+ static::assertEqualsWithDelta(1.0, $sum, 1e-7);
+ }
+ }
+
+ #[Test]
+ #[TestDox('Output values are always between 0 and 1')]
+ #[DataProvider('sumToOneProvider')]
+ public function testOutputRange(NDArray $input) : void
+ {
+ $activations = $this->activationFn->activate($input)->toArray();
+
+ foreach ($activations as $row) {
+ foreach ($row as $value) {
+ static::assertGreaterThanOrEqual(0.0, $value);
+ static::assertLessThanOrEqual(1.0, $value);
+ }
+ }
+ }
+}
diff --git a/tests/NeuralNet/ActivationFunctions/Softplus/SoftplusTest.php b/tests/NeuralNet/ActivationFunctions/Softplus/SoftplusTest.php
new file mode 100644
index 000000000..3c2a129f4
--- /dev/null
+++ b/tests/NeuralNet/ActivationFunctions/Softplus/SoftplusTest.php
@@ -0,0 +1,190 @@
+
+ */
+ public static function computeProvider() : Generator
+ {
+ yield [
+ NumPower::array([
+ [2.0, 1.0, -0.5, 0.0, 20.0, -10.0],
+ ]),
+ [
+ [2.1269280, 1.3132617, 0.4740769, 0.6931472, 20.0000000, 0.0000454],
+ ],
+ ];
+
+ yield [
+ NumPower::array([
+ [-0.12, 0.31, -0.49],
+ [0.99, 0.08, -0.03],
+ [0.05, -0.52, 0.54],
+ ]),
+ [
+ [0.6349461, 0.8601119, 0.4778640],
+ [1.3059610, 0.7339470, 0.6782596],
+ [0.7184596, 0.4665731, 0.9991626],
+ ],
+ ];
+ }
+
+ /**
+ * @return Generator
+ */
+ public static function differentiateProvider() : Generator
+ {
+ yield [
+ NumPower::array([
+ [2.0, 1.0, -0.5, 0.0, 20.0, -10.0],
+ ]),
+ [
+ [0.8807971, 0.7310586, 0.3775407, 0.5000000, 1.0000000, 0.0000454],
+ ],
+ ];
+
+ yield [
+ NumPower::array([
+ [-0.12, 0.31, -0.49],
+ [0.99, 0.08, -0.03],
+ [0.05, -0.52, 0.54],
+ ]),
+ [
+ [0.4700359, 0.5768852, 0.3798935],
+ [0.7290879, 0.5199893, 0.4925005],
+ [0.5124973, 0.3728522, 0.6318124],
+ ],
+ ];
+ }
+
+ /**
+ * @return Generator
+ */
+ public static function zeroRegionProvider() : Generator
+ {
+ // Test exactly at zero
+ yield [
+ NumPower::array([[0.0]]),
+ [[0.6931472]],
+ [[0.5000000]],
+ ];
+
+ // Test very small positive values
+ yield [
+ NumPower::array([[1e-15, 1e-10, 1e-7]]),
+ [[0.6931471, 0.6931471, 0.6931471]],
+ [[0.5000000, 0.5000000, 0.5000001]],
+ ];
+
+ // Test very small negative values
+ yield [
+ NumPower::array([[-1e-15, -1e-10, -1e-7]]),
+ [[0.6931472, 0.6931472, 0.6931471]],
+ [[0.5000000, 0.5000000, 0.5000000]],
+ ];
+ }
+
+ /**
+ * @return Generator
+ */
+ public static function extremeValuesProvider() : Generator
+ {
+ // Test with large positive values
+ yield [
+ NumPower::array([[10.0, 20.0, 50.0]]),
+ [[10.0000457, 20.0000000, 50.0000000]],
+ [[0.9999546, 1.0000000, 1.0000000]],
+ ];
+
+ // Test with large negative values
+ yield [
+ NumPower::array([[-10.0, -20.0, -50.0]]),
+ [[0.0000454, 0.0000000, 0.0000000]],
+ [[0.0000454, 0.0000000, 0.0000000]],
+ ];
+ }
+
+ /**
+ * Set up the test case.
+ */
+ protected function setUp() : void
+ {
+ parent::setUp();
+
+ $this->activationFn = new Softplus();
+ }
+
+ #[Test]
+ #[TestDox('Can be cast to a string')]
+ public function testToString() : void
+ {
+ static::assertEquals('Soft Plus', (string) $this->activationFn);
+ }
+
+ #[Test]
+ #[TestDox('Correctly activates the input')]
+ #[DataProvider('computeProvider')]
+ public function testActivate(NDArray $input, array $expected) : void
+ {
+ $activations = $this->activationFn->activate($input)->toArray();
+
+ static::assertEqualsWithDelta($expected, $activations, 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Correctly differentiates the input')]
+ #[DataProvider('differentiateProvider')]
+ public function testDifferentiate(NDArray $input, array $expected) : void
+ {
+ $derivatives = $this->activationFn->differentiate($input)->toArray();
+
+ static::assertEqualsWithDelta($expected, $derivatives, 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Correctly handles values around zero')]
+ #[DataProvider('zeroRegionProvider')]
+ public function testZeroRegion(NDArray $input, array $expectedActivation, array $expectedDerivative) : void
+ {
+ $activations = $this->activationFn->activate($input)->toArray();
+ $derivatives = $this->activationFn->differentiate($input)->toArray();
+
+ static::assertEqualsWithDelta($expectedActivation, $activations, 1e-7);
+ static::assertEqualsWithDelta($expectedDerivative, $derivatives, 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Correctly handles extreme values')]
+ #[DataProvider('extremeValuesProvider')]
+ public function testExtremeValues(NDArray $input, array $expectedActivation, array $expectedDerivative) : void
+ {
+ $activations = $this->activationFn->activate($input)->toArray();
+ $derivatives = $this->activationFn->differentiate($input)->toArray();
+
+ static::assertEqualsWithDelta($expectedActivation, $activations, 1e-7);
+ static::assertEqualsWithDelta($expectedDerivative, $derivatives, 1e-7);
+ }
+}
diff --git a/tests/NeuralNet/ActivationFunctions/Softsign/SoftsignTest.php b/tests/NeuralNet/ActivationFunctions/Softsign/SoftsignTest.php
new file mode 100644
index 000000000..ab1d5d9d3
--- /dev/null
+++ b/tests/NeuralNet/ActivationFunctions/Softsign/SoftsignTest.php
@@ -0,0 +1,190 @@
+
+ */
+ public static function computeProvider() : Generator
+ {
+ yield [
+ NumPower::array([
+ [2.0, 1.0, -0.5, 0.0, 20.0, -10.0],
+ ]),
+ [
+ [0.6666667, 0.5000000, -0.3333333, 0.0000000, 0.9523810, -0.9090909],
+ ],
+ ];
+
+ yield [
+ NumPower::array([
+ [-0.12, 0.31, -0.49],
+ [0.99, 0.08, -0.03],
+ [0.05, -0.52, 0.54],
+ ]),
+ [
+ [-0.1071429, 0.2366412, -0.3288591],
+ [0.4974874, 0.0740741, -0.0291262],
+ [0.0476190, -0.3421053, 0.3506494],
+ ],
+ ];
+ }
+
+ /**
+ * @return Generator
+ */
+ public static function differentiateProvider() : Generator
+ {
+ yield [
+ NumPower::array([
+ [2.0, 1.0, -0.5, 0.0, 20.0, -10.0],
+ ]),
+ [
+ [0.1111111, 0.2500000, 0.4444444, 1.0000000, 0.0022676, 0.0082645],
+ ],
+ ];
+
+ yield [
+ NumPower::array([
+ [-0.12, 0.31, -0.49],
+ [0.99, 0.08, -0.03],
+ [0.05, -0.52, 0.54],
+ ]),
+ [
+ [0.7971938, 0.5827166, 0.4504301],
+ [0.2525188, 0.8573387, 0.9425959],
+ [0.9070296, 0.4328254, 0.4216562],
+ ],
+ ];
+ }
+
+ /**
+ * @return Generator
+ */
+ public static function zeroRegionProvider() : Generator
+ {
+ // Test exactly at zero
+ yield [
+ NumPower::array([[0.0]]),
+ [[0.0]],
+ [[1.0]],
+ ];
+
+ // Test very small values
+ yield [
+ NumPower::array([[0.0000001, -0.0000001]]),
+ [[0.000000099999999, -0.000000099999999]],
+ [[0.9999998, 0.9999998]],
+ ];
+
+ // Test values around machine epsilon
+ yield [
+ NumPower::array([[PHP_FLOAT_EPSILON, -PHP_FLOAT_EPSILON]]),
+ [[PHP_FLOAT_EPSILON / (1 + PHP_FLOAT_EPSILON), -PHP_FLOAT_EPSILON / (1 + PHP_FLOAT_EPSILON)]],
+ [[1 / (1 + PHP_FLOAT_EPSILON) ** 2, 1 / (1 + PHP_FLOAT_EPSILON) ** 2]],
+ ];
+ }
+
+ /**
+ * @return Generator
+ */
+ public static function extremeValuesProvider() : Generator
+ {
+ // Test with large positive values
+ yield [
+ NumPower::array([[10.0, 100.0, 1000.0]]),
+ [[0.9090909, 0.9900990, 0.9990010]],
+ [[0.00826446, 0.0000980, 0.0000009]],
+ ];
+
+ // Test with large negative values
+ yield [
+ NumPower::array([[-10.0, -100.0, -1000.0]]),
+ [[-0.9090909, -0.9900990, -0.9990010]],
+ [[0.00826446, 0.0000980, 0.0000009]],
+ ];
+ }
+
+ /**
+ * Set up the test case.
+ */
+ protected function setUp() : void
+ {
+ parent::setUp();
+
+ $this->activationFn = new Softsign();
+ }
+
+ #[Test]
+ #[TestDox('Can be cast to a string')]
+ public function testToString() : void
+ {
+ static::assertEquals('Softsign', (string) $this->activationFn);
+ }
+
+ #[Test]
+ #[TestDox('Correctly activates the input')]
+ #[DataProvider('computeProvider')]
+ public function testActivate(NDArray $input, array $expected) : void
+ {
+ $activations = $this->activationFn->activate($input)->toArray();
+
+ static::assertEqualsWithDelta($expected, $activations, 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Correctly differentiates the input')]
+ #[DataProvider('differentiateProvider')]
+ public function testDifferentiate(NDArray $input, array $expected) : void
+ {
+ $derivatives = $this->activationFn->differentiate($input)->toArray();
+
+ static::assertEqualsWithDelta($expected, $derivatives, 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Correctly handles values around zero')]
+ #[DataProvider('zeroRegionProvider')]
+ public function testZeroRegion(NDArray $input, array $expectedActivation, array $expectedDerivative) : void
+ {
+ $activations = $this->activationFn->activate($input)->toArray();
+ $derivatives = $this->activationFn->differentiate($input)->toArray();
+
+ static::assertEqualsWithDelta($expectedActivation, $activations, 1e-7);
+ static::assertEqualsWithDelta($expectedDerivative, $derivatives, 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Correctly handles extreme values')]
+ #[DataProvider('extremeValuesProvider')]
+ public function testExtremeValues(NDArray $input, array $expectedActivation, array $expectedDerivative) : void
+ {
+ $activations = $this->activationFn->activate($input)->toArray();
+ $derivatives = $this->activationFn->differentiate($input)->toArray();
+
+ static::assertEqualsWithDelta($expectedActivation, $activations, 1e-7);
+ static::assertEqualsWithDelta($expectedDerivative, $derivatives, 1e-7);
+ }
+}
diff --git a/tests/NeuralNet/ActivationFunctions/ThresholdedReLU/ThresholdedReLUTest.php b/tests/NeuralNet/ActivationFunctions/ThresholdedReLU/ThresholdedReLUTest.php
new file mode 100644
index 000000000..e6d89ee95
--- /dev/null
+++ b/tests/NeuralNet/ActivationFunctions/ThresholdedReLU/ThresholdedReLUTest.php
@@ -0,0 +1,240 @@
+
+ */
+ public static function computeProvider() : Generator
+ {
+ yield [
+ NumPower::array([
+ [2.0, 1.0, 0.5, 0.0, -1.0, 1.5, -0.5],
+ ]),
+ [
+ [2.0, 0.0, 0.0, 0.0, 0.0, 1.5, 0.0],
+ ],
+ ];
+
+ yield [
+ NumPower::array([
+ [1.2, 0.31, 1.49],
+ [0.99, 1.08, 0.03],
+ [1.05, 0.52, 1.54],
+ ]),
+ [
+ [1.2, 0.0, 1.49],
+ [0.0, 1.08, 0.0],
+ [1.05, 0.0, 1.54],
+ ],
+ ];
+ }
+
+ /**
+ * @return Generator
+ */
+ public static function differentiateProvider() : Generator
+ {
+ yield [
+ NumPower::array([
+ [2.0, 1.0, 0.5, 0.0, -1.0, 1.5, -0.5],
+ ]),
+ [
+ [1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0],
+ ],
+ ];
+
+ yield [
+ NumPower::array([
+ [1.2, 0.31, 1.49],
+ [0.99, 1.08, 0.03],
+ [1.05, 0.52, 1.54],
+ ]),
+ [
+ [1.0, 0.0, 1.0],
+ [0.0, 1.0, 0.0],
+ [1.0, 0.0, 1.0],
+ ],
+ ];
+ }
+
+ /**
+ * @return Generator
+ */
+ public static function thresholdValuesProvider() : Generator
+ {
+ yield [
+ 0.5,
+ NumPower::array([
+ [2.0, 1.0, 0.5, 0.0, -1.0],
+ ]),
+ [
+ [2.0, 1.0, 0.0, 0.0, 0.0],
+ ],
+ [
+ [1.0, 1.0, 0.0, 0.0, 0.0],
+ ],
+ ];
+
+ yield [
+ 2.0,
+ NumPower::array([
+ [2.0, 1.0, 3.0, 0.0, 2.5],
+ ]),
+ [
+ [0.0, 0.0, 3.0, 0.0, 2.5],
+ ],
+ [
+ [0.0, 0.0, 1.0, 0.0, 1.0],
+ ],
+ ];
+ }
+
+ /**
+ * @return Generator
+ */
+ public static function zeroRegionProvider() : Generator
+ {
+ yield [
+ NumPower::array([[0.0]]),
+ [[0.0]],
+ [[0.0]],
+ ];
+
+ yield [
+ NumPower::array([[0.5, 0.9, 0.99, 1.0, 1.01]]),
+ [[0.0, 0.0, 0.0, 0.0, 1.01]],
+ [[0.0, 0.0, 0.0, 0.0, 1.0]],
+ ];
+ }
+
+ /**
+ * @return Generator
+ */
+ public static function extremeValuesProvider() : Generator
+ {
+ yield [
+ NumPower::array([[10.0, 100.0, 1000.0]]),
+ [[10.0, 100.0, 1000.0]],
+ [[1.0, 1.0, 1.0]],
+ ];
+
+ yield [
+ NumPower::array([[-10.0, -100.0, -1000.0]]),
+ [[0.0, 0.0, 0.0]],
+ [[0.0, 0.0, 0.0]],
+ ];
+ }
+
+ /**
+ * Set up the test case.
+ */
+ protected function setUp() : void
+ {
+ parent::setUp();
+
+ $this->activationFn = new ThresholdedReLU($this->threshold);
+ }
+
+ #[Test]
+ #[TestDox('Can be cast to a string')]
+ public function testToString() : void
+ {
+ static::assertEquals('Thresholded ReLU (threshold: 1)', (string) $this->activationFn);
+ }
+
+ #[Test]
+ #[TestDox('It throws an exception when threshold is negative')]
+ public function testInvalidThresholdException() : void
+ {
+ $this->expectException(InvalidThresholdException::class);
+
+ new ThresholdedReLU(-1.0);
+ }
+
+ #[Test]
+ #[TestDox('Correctly activates the input')]
+ #[DataProvider('computeProvider')]
+ public function testActivate(NDArray $input, array $expected) : void
+ {
+ $activations = $this->activationFn->activate($input)->toArray();
+
+ static::assertEqualsWithDelta($expected, $activations, 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Correctly differentiates the input')]
+ #[DataProvider('differentiateProvider')]
+ public function testDifferentiate(NDArray $input, array $expected) : void
+ {
+ $derivatives = $this->activationFn->differentiate($input)->toArray();
+
+ static::assertEqualsWithDelta($expected, $derivatives, 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Correctly handles different threshold values')]
+ #[DataProvider('thresholdValuesProvider')]
+ public function testThresholdValues(float $threshold, NDArray $input, array $expectedActivation, array $expectedDerivative) : void
+ {
+ $activationFn = new ThresholdedReLU($threshold);
+
+ $activations = $activationFn->activate($input)->toArray();
+ $derivatives = $activationFn->differentiate($input)->toArray();
+
+ static::assertEqualsWithDelta($expectedActivation, $activations, 1e-7);
+ static::assertEqualsWithDelta($expectedDerivative, $derivatives, 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Correctly handles values around zero')]
+ #[DataProvider('zeroRegionProvider')]
+ public function testZeroRegion(NDArray $input, array $expectedActivation, array $expectedDerivative) : void
+ {
+ $activations = $this->activationFn->activate($input)->toArray();
+ $derivatives = $this->activationFn->differentiate($input)->toArray();
+
+ static::assertEqualsWithDelta($expectedActivation, $activations, 1e-7);
+ static::assertEqualsWithDelta($expectedDerivative, $derivatives, 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Correctly handles extreme values')]
+ #[DataProvider('extremeValuesProvider')]
+ public function testExtremeValues(NDArray $input, array $expectedActivation, array $expectedDerivative) : void
+ {
+ $activations = $this->activationFn->activate($input)->toArray();
+ $derivatives = $this->activationFn->differentiate($input)->toArray();
+
+ static::assertEqualsWithDelta($expectedActivation, $activations, 1e-7);
+ static::assertEqualsWithDelta($expectedDerivative, $derivatives, 1e-7);
+ }
+}
diff --git a/tests/NeuralNet/FeedForwardTest.php b/tests/NeuralNet/FeedForwardTest.php
index d10148f79..c68ae47be 100644
--- a/tests/NeuralNet/FeedForwardTest.php
+++ b/tests/NeuralNet/FeedForwardTest.php
@@ -108,7 +108,7 @@ public function hidden() : void
/**
* @test
*/
- public function output() : void
+ public function networkOutput() : void
{
$this->assertInstanceOf(Output::class, $this->network->output());
}
diff --git a/tests/NeuralNet/Initializers/He/HeUniformTest.php b/tests/NeuralNet/Initializers/He/HeUniformTest.php
index 0a2ffa4e5..0582dfffe 100644
--- a/tests/NeuralNet/Initializers/He/HeUniformTest.php
+++ b/tests/NeuralNet/Initializers/He/HeUniformTest.php
@@ -37,7 +37,7 @@ public static function validShapeDimensionsProvider() : array
'fanIn less than fanOut' => [
'fanIn' => 3,
'fanOut' => 4,
- ]
+ ],
];
}
@@ -60,7 +60,7 @@ public static function heUniformDistributionValidationProvider() : array
'big numbers' => [
'fanIn' => 200,
'fanOut' => 300,
- ]
+ ],
];
}
diff --git a/tests/NeuralNet/Initializers/LeCun/LeCunNormalTest.php b/tests/NeuralNet/Initializers/LeCun/LeCunNormalTest.php
index 202ef6a15..d9aefe12b 100644
--- a/tests/NeuralNet/Initializers/LeCun/LeCunNormalTest.php
+++ b/tests/NeuralNet/Initializers/LeCun/LeCunNormalTest.php
@@ -37,7 +37,7 @@ public static function validShapeDimensionsProvider() : array
'fanIn less than fanOut' => [
'fanIn' => 3,
'fanOut' => 4,
- ]
+ ],
];
}
@@ -60,7 +60,7 @@ public static function leCunNormalDistributionValidationProvider() : array
'big numbers' => [
'fanIn' => 3000,
'fanOut' => 1000,
- ]
+ ],
];
}
diff --git a/tests/NeuralNet/Initializers/LeCun/LeCunUniformTest.php b/tests/NeuralNet/Initializers/LeCun/LeCunUniformTest.php
index 515aa49f2..2064edda8 100644
--- a/tests/NeuralNet/Initializers/LeCun/LeCunUniformTest.php
+++ b/tests/NeuralNet/Initializers/LeCun/LeCunUniformTest.php
@@ -37,7 +37,7 @@ public static function validShapeDimensionsProvider() : array
'fanIn less than fanOut' => [
'fanIn' => 3,
'fanOut' => 4,
- ]
+ ],
];
}
@@ -60,7 +60,7 @@ public static function leCunUniformDistributionValidationProvider() : array
'big numbers' => [
'fanIn' => 200,
'fanOut' => 300,
- ]
+ ],
];
}
diff --git a/tests/NeuralNet/Initializers/Normal/NormalTest.php b/tests/NeuralNet/Initializers/Normal/NormalTest.php
index 7c676ec97..9d6641966 100644
--- a/tests/NeuralNet/Initializers/Normal/NormalTest.php
+++ b/tests/NeuralNet/Initializers/Normal/NormalTest.php
@@ -32,7 +32,7 @@ public static function invalidStandardDeviationProvider() : array
],
'zero stdDev' => [
'stdDev' => 0,
- ]
+ ],
];
}
@@ -55,7 +55,7 @@ public static function validFanInFanOutCombinationsProvider() : array
'fanIn less than fanOut' => [
'fanIn' => 3,
'fanOut' => 4,
- ]
+ ],
];
}
@@ -70,7 +70,7 @@ public static function normalDistributionInitializationProvider() : array
'small matrix' => [
'fanIn' => 80,
'fanOut' => 50,
- 'stdDev' => 0.25
+ 'stdDev' => 0.25,
],
'medium matrix' => [
'fanIn' => 300,
@@ -80,8 +80,8 @@ public static function normalDistributionInitializationProvider() : array
'large matrix' => [
'fanIn' => 3000,
'fanOut' => 1000,
- 'stdDev' => 1.75
- ]
+ 'stdDev' => 1.75,
+ ],
];
}
diff --git a/tests/NeuralNet/Initializers/Normal/TruncatedNormalTest.php b/tests/NeuralNet/Initializers/Normal/TruncatedNormalTest.php
index 02ab90d72..82f4e88aa 100644
--- a/tests/NeuralNet/Initializers/Normal/TruncatedNormalTest.php
+++ b/tests/NeuralNet/Initializers/Normal/TruncatedNormalTest.php
@@ -32,7 +32,7 @@ public static function invalidStandardDeviationProvider() : array
],
'zero stdDev' => [
'stdDev' => 0,
- ]
+ ],
];
}
@@ -55,7 +55,7 @@ public static function validFanInFanOutCombinationsProvider() : array
'fanIn less than fanOut' => [
'fanIn' => 3,
'fanOut' => 4,
- ]
+ ],
];
}
@@ -70,7 +70,7 @@ public static function truncatedNormalDistributionInitializationProvider() : arr
'small numbers' => [
'fanIn' => 30,
'fanOut' => 10,
- 'stdDev' => 0.25
+ 'stdDev' => 0.25,
],
'medium numbers' => [
'fanIn' => 300,
@@ -80,8 +80,8 @@ public static function truncatedNormalDistributionInitializationProvider() : arr
'big numbers' => [
'fanIn' => 3000,
'fanOut' => 1000,
- 'stdDev' => 1.75
- ]
+ 'stdDev' => 1.75,
+ ],
];
}
diff --git a/tests/NeuralNet/Initializers/Uniform/UniformTest.php b/tests/NeuralNet/Initializers/Uniform/UniformTest.php
index 966c0042a..a22d70a47 100644
--- a/tests/NeuralNet/Initializers/Uniform/UniformTest.php
+++ b/tests/NeuralNet/Initializers/Uniform/UniformTest.php
@@ -32,7 +32,7 @@ public static function betaProvider() : array
],
'zero beta' => [
'beta' => 0,
- ]
+ ],
];
}
@@ -55,7 +55,7 @@ public static function validShapeDimensionsProvider() : array
'fanIn less than fanOut' => [
'fanIn' => 3,
'fanOut' => 4,
- ]
+ ],
];
}
@@ -81,7 +81,7 @@ public static function uniformDistributionValidationProvider() : array
'fanIn' => 200,
'fanOut' => 300,
'beta' => 0.3,
- ]
+ ],
];
}
diff --git a/tests/NeuralNet/Initializers/Xavier/XavierNormalTest.php b/tests/NeuralNet/Initializers/Xavier/XavierNormalTest.php
index f2d3f20d7..95ed3e6f0 100644
--- a/tests/NeuralNet/Initializers/Xavier/XavierNormalTest.php
+++ b/tests/NeuralNet/Initializers/Xavier/XavierNormalTest.php
@@ -37,7 +37,7 @@ public static function validShapeDimensionsProvider() : array
'fanIn less than fanOut' => [
'fanIn' => 3,
'fanOut' => 4,
- ]
+ ],
];
}
@@ -60,7 +60,7 @@ public static function xavierNormalDistributionValidationProvider() : array
'big numbers' => [
'fanIn' => 3000,
'fanOut' => 1000,
- ]
+ ],
];
}
diff --git a/tests/NeuralNet/Initializers/Xavier/XavierUniformTest.php b/tests/NeuralNet/Initializers/Xavier/XavierUniformTest.php
index 22479df2a..236d69b80 100644
--- a/tests/NeuralNet/Initializers/Xavier/XavierUniformTest.php
+++ b/tests/NeuralNet/Initializers/Xavier/XavierUniformTest.php
@@ -37,7 +37,7 @@ public static function validShapeDimensionsProvider() : array
'fanIn less than fanOut' => [
'fanIn' => 3,
'fanOut' => 4,
- ]
+ ],
];
}
@@ -60,7 +60,7 @@ public static function xavierUniformDistributionValidationProvider() : array
'big numbers' => [
'fanIn' => 200,
'fanOut' => 300,
- ]
+ ],
];
}
diff --git a/tests/Specifications/SamplesAreCompatibleWithTransformerTest.php b/tests/Specifications/SamplesAreCompatibleWithTransformerTest.php
index f89a2b50c..06ffcb300 100644
--- a/tests/Specifications/SamplesAreCompatibleWithTransformerTest.php
+++ b/tests/Specifications/SamplesAreCompatibleWithTransformerTest.php
@@ -37,7 +37,7 @@ public static function passesProvider() : Generator
yield [
SamplesAreCompatibleWithTransformer::with(
Unlabeled::quick([
- [1, 2, 3, 4, 5]
+ [1, 2, 3, 4, 5],
]),
new L1Normalizer()
),
diff --git a/tests/Tokenizers/SentenceTest.php b/tests/Tokenizers/SentenceTest.php
index f086706df..16b85a1a4 100644
--- a/tests/Tokenizers/SentenceTest.php
+++ b/tests/Tokenizers/SentenceTest.php
@@ -61,7 +61,7 @@ public static function tokenizeProvider() : Generator
'Porque cambian las cosas.',
'Empujan a la raza humana hacia adelante.',
'Y mientras que algunos pueden verlos como los locos, nosotros vemos genio.',
- 'Porque las personas que están lo suficientemente locas como para pensar que pueden cambiar el mundo, son las que lo hacen.'
+ 'Porque las personas que están lo suficientemente locas como para pensar que pueden cambiar el mundo, son las que lo hacen.',
],
];
@@ -166,7 +166,7 @@ public static function tokenizeProvider() : Generator
'آیا این برای من خوب خواهد بود؟',
'آیا توانستی به من کمک کنی؟',
'این کتاب بسیار جالب است!',
- '"با توجه به شرایطی که الان داریم، آیا میتوانیم به یک قرار ملاقات برسیم"؟'
+ '"با توجه به شرایطی که الان داریم، آیا میتوانیم به یک قرار ملاقات برسیم"؟',
],
];
diff --git a/tests/Transformers/MaxAbsoluteScalerTest.php b/tests/Transformers/MaxAbsoluteScalerTest.php
index 941d320de..08d9faf1a 100644
--- a/tests/Transformers/MaxAbsoluteScalerTest.php
+++ b/tests/Transformers/MaxAbsoluteScalerTest.php
@@ -83,7 +83,7 @@ public function testReverseTransformUnfitted() : void
public function testSkipsNonFinite() : void
{
$samples = Unlabeled::build(samples: [
- [0.0, 3000.0, NAN, -6.0], [1.0, 30.0, NAN, 0.001]
+ [0.0, 3000.0, NAN, -6.0], [1.0, 30.0, NAN, 0.001],
]);
$this->transformer->fit($samples);
$this->assertNan($samples[0][2]);
diff --git a/tests/Transformers/MinMaxNormalizerTest.php b/tests/Transformers/MinMaxNormalizerTest.php
index d427a3485..4e52109f1 100644
--- a/tests/Transformers/MinMaxNormalizerTest.php
+++ b/tests/Transformers/MinMaxNormalizerTest.php
@@ -79,7 +79,7 @@ public function testTransformUnfitted() : void
public function testSkipsNonFinite() : void
{
$samples = Unlabeled::build(samples: [
- [0.0, 3000.0, NAN, -6.0], [1.0, 30.0, NAN, 0.001]
+ [0.0, 3000.0, NAN, -6.0], [1.0, 30.0, NAN, 0.001],
]);
$this->transformer->fit($samples);
$this->assertNan($samples[0][2]);