diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 092650111..295cff531 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -8,14 +8,8 @@ jobs: runs-on: ${{ matrix.operating-system }} strategy: matrix: -<<<<<<< HEAD operating-system: [windows-latest, ubuntu-latest, macos-latest] php-versions: ['8.4'] -======= - operating-system: [ubuntu-latest, macos-latest] - php-versions: ['8.0', '8.1', '8.2'] ->>>>>>> master - steps: - name: Checkout uses: actions/checkout@v3 diff --git a/docs/images/activation-functions/hard-silu-derivative.png b/docs/images/activation-functions/hard-silu-derivative.png index 089d0eac5..4cb2a0320 100644 Binary files a/docs/images/activation-functions/hard-silu-derivative.png and b/docs/images/activation-functions/hard-silu-derivative.png differ diff --git a/docs/images/activation-functions/hard-silu.png b/docs/images/activation-functions/hard-silu.png index de9fd2a8b..db84ce40e 100644 Binary files a/docs/images/activation-functions/hard-silu.png and b/docs/images/activation-functions/hard-silu.png differ diff --git a/docs/transformers/regex-filter.md b/docs/transformers/regex-filter.md index 94540a0e2..8283b22a2 100644 --- a/docs/transformers/regex-filter.md +++ b/docs/transformers/regex-filter.md @@ -28,7 +28,6 @@ $transformer = new RegexFilter([ ``` ## Predefined Regex Patterns -<<<<<<< HEAD | Class Constant | Description | |---|----------------------------------------------------------------------------------------------------------| | EMAIL | A pattern to match any email address. | @@ -41,20 +40,6 @@ $transformer = new RegexFilter([ | EXTRA_WHITESPACE | Matches consecutively repeated whitespace characters. | | MENTION | A pattern that matches Twitter-style mentions (@example). | | HASHTAG | Matches Twitter-style hashtags (#example). | -======= -| Class Constant | Description | -|---|---| -| EMAIL | A pattern to match any email address. | -| URL | An alias for the default (Gruber 1) URL matching pattern. | -| GRUBER_1 | The original Gruber URL matching pattern. | -| GRUBER_2 | The improved Gruber URL matching pattern. | -| EXTRA_CHARACTERS | Matches consecutively repeated non word or number characters such as punctuation and special characters. | -| EXTRA_WORDS | Matches consecutively repeated words. | -| EXTRA_WHITESPACE | Matches consecutively repeated whitespace characters. | -| EMOJIS | A pattern to match unicode emojis. | -| MENTION | A pattern that matches Twitter-style mentions (@example). | -| HASHTAG | Matches Twitter-style hashtags (#example). | ->>>>>>> 2.4 ## Additional Methods This transformer does not have any additional methods. diff --git a/src/NeuralNet/ActivationFunctions/Base/Contracts/IBufferDerivative.php b/src/NeuralNet/ActivationFunctions/Base/Contracts/IBufferDerivative.php index 239cfc0f6..b29011bc8 100644 --- a/src/NeuralNet/ActivationFunctions/Base/Contracts/IBufferDerivative.php +++ b/src/NeuralNet/ActivationFunctions/Base/Contracts/IBufferDerivative.php @@ -13,14 +13,15 @@ * @package Rubix/ML * @author Andrew DalPino * @author Aleksei Nechaev + * @author Samuel Akopyan */ interface IBufferDerivative extends Derivative { /** * Calculate the derivative of the single parameter. * - * @param NDArray $x Input matrix + * @param NDArray $input Input matrix * @return NDArray Derivative matrix */ - public function differentiate(NDArray $x) : NDArray; + public function differentiate(NDArray $input) : NDArray; } diff --git a/src/NeuralNet/ActivationFunctions/Base/Contracts/IOBufferDerivative.php b/src/NeuralNet/ActivationFunctions/Base/Contracts/IOBufferDerivative.php index c77ff3e04..4263fcea9 100644 --- a/src/NeuralNet/ActivationFunctions/Base/Contracts/IOBufferDerivative.php +++ b/src/NeuralNet/ActivationFunctions/Base/Contracts/IOBufferDerivative.php @@ -2,6 +2,25 @@ namespace Rubix\ML\NeuralNet\ActivationFunctions\Base\Contracts; +use NDArray; + +/** + * Derivative based on input / output buffer + * + * @category Machine Learning + * @package Rubix/ML + * @author Andrew DalPino + * @author Aleksei Nechaev + * @author Samuel Akopyan + */ interface IOBufferDerivative { + /** + * Calculate the derivative of the activation. + * + * @param NDArray $input Input matrix + * @param NDArray $output Output matrix + * @return NDArray Derivative matrix + */ + public function differentiate(NDArray $input, NDArray $output) : NDArray; } diff --git a/src/NeuralNet/ActivationFunctions/Base/Contracts/OBufferDerivative.php b/src/NeuralNet/ActivationFunctions/Base/Contracts/OBufferDerivative.php index 0ae53eeb8..760dc4593 100644 --- a/src/NeuralNet/ActivationFunctions/Base/Contracts/OBufferDerivative.php +++ b/src/NeuralNet/ActivationFunctions/Base/Contracts/OBufferDerivative.php @@ -2,6 +2,24 @@ namespace Rubix\ML\NeuralNet\ActivationFunctions\Base\Contracts; +use NDArray; + +/** + * Derivative based on output buffer + * + * @category Machine Learning + * @package Rubix/ML + * @author Andrew DalPino + * @author Aleksei Nechaev + * @author Samuel Akopyan + */ interface OBufferDerivative { + /** + * Calculate the derivative of the activation. + * + * @param NDArray $output Output matrix + * @return NDArray Derivative matrix + */ + public function differentiate(NDArray $output) : NDArray; } diff --git a/src/NeuralNet/ActivationFunctions/ELU/ELU.php b/src/NeuralNet/ActivationFunctions/ELU/ELU.php index ff165dbc7..a0ca1fe36 100644 --- a/src/NeuralNet/ActivationFunctions/ELU/ELU.php +++ b/src/NeuralNet/ActivationFunctions/ELU/ELU.php @@ -7,7 +7,7 @@ use NumPower; use NDArray; use Rubix\ML\NeuralNet\ActivationFunctions\Base\Contracts\ActivationFunction; -use Rubix\ML\NeuralNet\ActivationFunctions\Base\Contracts\IBufferDerivative; +use Rubix\ML\NeuralNet\ActivationFunctions\Base\Contracts\IOBufferDerivative; use Rubix\ML\NeuralNet\ActivationFunctions\ELU\Exceptions\InvalidAlphaException; /** @@ -26,7 +26,7 @@ * @author Aleksei Nechaev * @author Samuel Akopyan */ -class ELU implements ActivationFunction, IBufferDerivative +class ELU implements ActivationFunction, IOBufferDerivative { /** * Class constructor. @@ -40,7 +40,7 @@ public function __construct(protected float $alpha = 1.0) { if ($this->alpha < 0.0) { throw new InvalidAlphaException( - message: "Alpha must be greater than 0, $alpha given." + message: "Alpha must be greater than 0, {$this->alpha} given." ); } } @@ -71,28 +71,29 @@ public function activate(NDArray $input) : NDArray } /** - * Calculate the derivative of the activation function. + * Calculate the derivative of the ELU activation function using input and output. * - * f'(x) = 1 if x > 0 - * f'(x) = α * e^x if x ≤ 0 + * f'(x) = 1 if x > 0 + * f'(x) = f(x) + α if x ≤ 0, where f(x) is the ELU output * - * @param NDArray $x Output matrix - * @return NDArray Derivative matrix + * @param NDArray $input Input matrix (used to determine x > 0 mask) + * @param NDArray $output Output from the ELU activation function + * @return NDArray Derivative matrix */ - public function differentiate(NDArray $x) : NDArray + public function differentiate(NDArray $input, NDArray $output): NDArray { // For x > 0: 1 - $positivePart = NumPower::greater($x, 0); + $positiveMask = NumPower::greater($input, 0); - // For x <= 0: α * e^x - $negativeMask = NumPower::lessEqual($x, 0); + // For x <= 0: output + α + $negativeMask = NumPower::lessEqual($input, 0); $negativePart = NumPower::multiply( - NumPower::multiply($negativeMask, NumPower::exp($x)), - $this->alpha + NumPower::add($output, $this->alpha), + $negativeMask ); // Combine both parts - return NumPower::add($positivePart, $negativePart); + return NumPower::add($positiveMask, $negativePart); } /** diff --git a/src/NeuralNet/ActivationFunctions/GELU/GELU.php b/src/NeuralNet/ActivationFunctions/GELU/GELU.php index 1667b1131..008219469 100644 --- a/src/NeuralNet/ActivationFunctions/GELU/GELU.php +++ b/src/NeuralNet/ActivationFunctions/GELU/GELU.php @@ -87,19 +87,19 @@ public function activate(NDArray $input) : NDArray * - β = 0.044715 * - sech^2(z) = (1/cosh(z))^2 * - * @param NDArray $x Output matrix + * @param NDArray $input Input matrix * @return NDArray Derivative matrix */ - public function differentiate(NDArray $x) : NDArray + public function differentiate(NDArray $input) : NDArray { // Calculate x^3 - $cubed = NumPower::pow($x, 3); + $cubed = NumPower::pow($input, 3); // Calculate inner term: ALPHA * (x + BETA * x^3) $innerTerm = NumPower::multiply( self::ALPHA, NumPower::add( - $x, + $input, NumPower::multiply(self::BETA, $cubed) ) ); @@ -122,7 +122,7 @@ public function differentiate(NDArray $x) : NDArray NumPower::multiply( NumPower::multiply( 0.5 * self::ALPHA, - $x + $input ), $sech2 ), @@ -130,7 +130,7 @@ public function differentiate(NDArray $x) : NDArray 1.0, NumPower::multiply( 3.0 * self::BETA, - NumPower::pow($x, 2) + NumPower::pow($input, 2) ) ) ); diff --git a/src/NeuralNet/ActivationFunctions/HardSiLU/HardSiLU.php b/src/NeuralNet/ActivationFunctions/HardSiLU/HardSiLU.php index 8fbc39e83..6ed54631a 100644 --- a/src/NeuralNet/ActivationFunctions/HardSiLU/HardSiLU.php +++ b/src/NeuralNet/ActivationFunctions/HardSiLU/HardSiLU.php @@ -63,19 +63,19 @@ public function activate(NDArray $input) : NDArray * * f'(x) = HardSigmoid(x) + x * HardSigmoid'(x) * - * @param NDArray $x Input matrix + * @param NDArray $input Input matrix * @return NDArray Derivative matrix */ - public function differentiate(NDArray $x) : NDArray + public function differentiate(NDArray $input) : NDArray { // Calculate HardSigmoid(x) - $hardSigmoid = $this->hardSigmoid->activate($x); + $hardSigmoid = $this->hardSigmoid->activate($input); // Calculate HardSigmoid'(x) - $hardSigmoidDerivative = $this->hardSigmoid->differentiate($x); + $hardSigmoidDerivative = $this->hardSigmoid->differentiate($input); // Calculate x * HardSigmoid'(x) - $xTimesDerivative = NumPower::multiply($x, $hardSigmoidDerivative); + $xTimesDerivative = NumPower::multiply($input, $hardSigmoidDerivative); // Calculate HardSigmoid(x) + x * HardSigmoid'(x) return NumPower::add($hardSigmoid, $xTimesDerivative); diff --git a/src/NeuralNet/ActivationFunctions/HardSigmoid/HardSigmoid.php b/src/NeuralNet/ActivationFunctions/HardSigmoid/HardSigmoid.php index 1c82ade35..40039d4cc 100644 --- a/src/NeuralNet/ActivationFunctions/HardSigmoid/HardSigmoid.php +++ b/src/NeuralNet/ActivationFunctions/HardSigmoid/HardSigmoid.php @@ -76,17 +76,17 @@ public function activate(NDArray $input) : NDArray /** * Calculate the derivative of the activation function. * - * f'(x) = 0.2 if -2.5 < x < 2.5 + * f'(x) = 0.2 if -2.5 <= x <= 2.5 * f'(x) = 0 otherwise * - * @param NDArray $x Input matrix + * @param NDArray $input Input matrix * @return NDArray Derivative matrix */ - public function differentiate(NDArray $x) : NDArray + public function differentiate(NDArray $input) : NDArray { - // For values in the linear region (-2.5 < x < 2.5): SLOPE - $inLinearRegion = NumPower::greater($x, self::LOWER_BOUND); - $inLinearRegion = NumPower::multiply($inLinearRegion, NumPower::less($x, self::UPPER_BOUND)); + // For values in the linear region (-2.5 <= x <= 2.5): SLOPE + $inLinearRegion = NumPower::greaterEqual($input, self::LOWER_BOUND); + $inLinearRegion = NumPower::multiply($inLinearRegion, NumPower::lessEqual($input, self::UPPER_BOUND)); $linearPart = NumPower::multiply($inLinearRegion, self::SLOPE); // For values outside the linear region: 0 diff --git a/src/NeuralNet/ActivationFunctions/HyperbolicTangent/HyperbolicTangent.php b/src/NeuralNet/ActivationFunctions/HyperbolicTangent/HyperbolicTangent.php index 830d0becd..629422c86 100644 --- a/src/NeuralNet/ActivationFunctions/HyperbolicTangent/HyperbolicTangent.php +++ b/src/NeuralNet/ActivationFunctions/HyperbolicTangent/HyperbolicTangent.php @@ -6,6 +6,8 @@ use NumPower; use NDArray; +use Rubix\ML\NeuralNet\ActivationFunctions\Base\Contracts\ActivationFunction; +use Rubix\ML\NeuralNet\ActivationFunctions\Base\Contracts\OBufferDerivative; /** * Hyperbolic Tangent @@ -16,11 +18,17 @@ * @category Machine Learning * @package Rubix/ML * @author Andrew DalPino + * @author Samuel Akopyan */ -class HyperbolicTangent implements ActivationFunction +class HyperbolicTangent implements ActivationFunction, OBufferDerivative { /** - * @inheritdoc + * Apply the Hyperbolic Tangent activation function to the input. + * + * f(x) = tanh(x) + * + * @param NDArray $input The input values + * @return NDArray The activated values */ public function activate(NDArray $input) : NDArray { @@ -28,19 +36,26 @@ public function activate(NDArray $input) : NDArray } /** - * @inheritdoc + * Calculate the derivative of the activation function. + * + * f'(x) = 1 - tanh^2(x) + * + * @param NDArray $output Output matrix + * @return NDArray Derivative matrix */ public function differentiate(NDArray $output) : NDArray { - return 1 - ($output ** 2); + // Calculate tanh^2(x) + $squared = NumPower::pow($output, 2); + + // Calculate 1 - tanh^2(x) + return NumPower::subtract(1.0, $squared); } /** * Return the string representation of the activation function. * - * @internal - * - * @return string + * @return string String representation */ public function __toString() : string { diff --git a/src/NeuralNet/ActivationFunctions/LeakyReLU/Exceptions/InvalidLeakageException.php b/src/NeuralNet/ActivationFunctions/LeakyReLU/Exceptions/InvalidLeakageException.php new file mode 100644 index 000000000..c8f081b85 --- /dev/null +++ b/src/NeuralNet/ActivationFunctions/LeakyReLU/Exceptions/InvalidLeakageException.php @@ -0,0 +1,14 @@ + */ -class LeakyReLU implements ActivationFunction +class LeakyReLU implements ActivationFunction, IBufferDerivative { /** * The amount of leakage as a ratio of the input value to allow to pass through when inactivated. @@ -30,73 +36,74 @@ class LeakyReLU implements ActivationFunction protected float $leakage; /** - * @param float $leakage - * @throws InvalidArgumentException + * Class constructor. + * + * @param float $leakage The amount of leakage as a ratio of the input value to allow to pass through when inactivated. + * @throws InvalidLeakageException */ public function __construct(float $leakage = 0.1) { - if ($leakage <= 0.0 or $leakage >= 1.0) { - throw new InvalidArgumentException('Leakage must be between' - . " 0 and 1, $leakage given."); + if ($leakage <= 0.0 || $leakage >= 1.0) { + throw new InvalidLeakageException( + message: "Leakage must be between 0 and 1, $leakage given." + ); } $this->leakage = $leakage; } /** - * Compute the activation. + * Apply the Leaky ReLU activation function to the input. * - * @internal + * f(x) = x if x > 0 + * f(x) = leakage * x if x ≤ 0 * - * @param Matrix $input - * @return Matrix + * @param NDArray $input The input values + * @return NDArray The activated values */ public function activate(NDArray $input) : NDArray { - $positive = NumPower::maximum($input, 0); + // Calculate positive part: x for x > 0 + $positiveActivation = NumPower::maximum($input, 0); - $negative = NumPower::minimum($input, 0); - $negativeLeakage = $negative * $this->leakage; + // Calculate negative part: leakage * x for x <= 0 + $negativeActivation = NumPower::multiply( + NumPower::minimum($input, 0), + $this->leakage + ); - return $positive + $negativeLeakage; + // Combine both parts + return NumPower::add($positiveActivation, $negativeActivation); } /** - * Calculate the derivative of the activation. + * Calculate the derivative of the activation function. * - * @internal + * f'(x) = 1 if x > 0 + * f'(x) = leakage if x ≤ 0 * - * @param Matrix $input - * @param Matrix $output - * @return Matrix + * @param NDArray $input Input matrix + * @return NDArray Derivative matrix */ - public function differentiate(Matrix $input, Matrix $output) : Matrix + public function differentiate(NDArray $input) : NDArray { - $positive = NumPower::greater($input, 0); - $negative = NumPower::lessEqual($input, 0) * $this->leakage; + // For x > 0: 1 + $positivePart = NumPower::greater($input, 0); - return $positive + $negative; - } + // For x <= 0: leakage + $negativePart = NumPower::multiply( + NumPower::lessEqual($input, 0), + $this->leakage + ); - /** - * @internal - * - * @param float $input - * @return float - */ - public function _differentiate(float $input) : float - { - return $input > 0.0 - ? 1.0 - : $this->leakage; + // Combine both parts + return NumPower::add($positivePart, $negativePart); } /** - * Return the string representation of the object. - * - * @internal + * Return the string representation of the activation function. * - * @return string + * @return string String representation */ public function __toString() : string { diff --git a/tests/NeuralNet/ActivationFunctions/ELU/ELUTest.php b/tests/NeuralNet/ActivationFunctions/ELU/ELUTest.php index 657bab1be..a375347d2 100644 --- a/tests/NeuralNet/ActivationFunctions/ELU/ELUTest.php +++ b/tests/NeuralNet/ActivationFunctions/ELU/ELUTest.php @@ -4,16 +4,16 @@ namespace Rubix\ML\Tests\NeuralNet\ActivationFunctions\ELU; +use Generator; +use NDArray; +use NumPower; use PHPUnit\Framework\Attributes\CoversClass; use PHPUnit\Framework\Attributes\DataProvider; use PHPUnit\Framework\Attributes\Group; use PHPUnit\Framework\Attributes\Test; use PHPUnit\Framework\Attributes\TestDox; -use NumPower; -use NDArray; -use Rubix\ML\NeuralNet\ActivationFunctions\ELU\ELU; use PHPUnit\Framework\TestCase; -use Generator; +use Rubix\ML\NeuralNet\ActivationFunctions\ELU\ELU; use Rubix\ML\NeuralNet\ActivationFunctions\ELU\Exceptions\InvalidAlphaException; #[Group('ActivationFunctions')] @@ -35,7 +35,7 @@ public static function computeProvider() : Generator [1.0, -0.5, 0.0, 20.0, -10.0], ]), [ - [1.0, -0.39346933364868164, 0.0, 20.0, -0.9999545812606812], + [1.0, -0.3934693, 0.0, 20.0, -0.9999545], ], ]; @@ -46,9 +46,9 @@ public static function computeProvider() : Generator [0.05, -0.52, 0.54], ]), [ - [-0.11307956278324127, 0.3100000023841858, -0.3873736262321472], - [0.9900000095367432, 0.07999999821186066, -0.029554465785622597], - [0.05000000074505806, -0.40547943115234375, 0.5400000214576721], + [-0.1130795, 0.3100000, -0.3873736], + [0.9900000, 0.0799999, -0.0295544], + [0.0500000, -0.4054794, 0.5400000], ], ]; } @@ -63,7 +63,7 @@ public static function differentiateProvider() : Generator [1.0, -0.5, 0.0, 20.0, -10.0], ]), [ - [1.0, 0.6065306663513184, 1.0, 1.0, 4.539993096841499E-5], + [1.0, 0.6065306, 1.0, 1.0, 0.0000454], ], ]; @@ -74,9 +74,9 @@ public static function differentiateProvider() : Generator [0.05, -0.52, 0.54], ]), [ - [0.8869204521179199, 1.0, 0.6126263737678528], - [1.0, 1.0, 0.9704455137252808], - [1.0, 0.5945205688476562, 1.0], + [0.8869204, 1.0, 0.6126263], + [1.0, 1.0, 0.9704455], + [1.0, 0.5945205, 1.0], ], ]; } @@ -124,16 +124,17 @@ public function testActivate(NDArray $input, array $expected) : void { $activations = $this->activationFn->activate($input)->toArray(); - static::assertEqualsWithDelta($expected, $activations, 1e-16); + static::assertEqualsWithDelta($expected, $activations, 1e-7); } #[Test] - #[TestDox('Correctly differentiates the input')] + #[TestDox('Correctly differentiates the input using buffered output')] #[DataProvider('differentiateProvider')] public function testDifferentiate(NDArray $input, array $expected) : void { - $derivatives = $this->activationFn->differentiate($input)->toArray(); + $output = $this->activationFn->activate($input); + $derivatives = $this->activationFn->differentiate($input, $output)->toArray(); - static::assertEqualsWithDelta($expected, $derivatives, 1e-16); + static::assertEqualsWithDelta($expected, $derivatives, 1e-7); } } diff --git a/tests/NeuralNet/ActivationFunctions/GELU/GELUTest.php b/tests/NeuralNet/ActivationFunctions/GELU/GELUTest.php index 6d3177015..2a4e35b14 100644 --- a/tests/NeuralNet/ActivationFunctions/GELU/GELUTest.php +++ b/tests/NeuralNet/ActivationFunctions/GELU/GELUTest.php @@ -4,16 +4,16 @@ namespace Rubix\ML\Tests\NeuralNet\ActivationFunctions\GELU; +use Generator; +use NDArray; +use NumPower; use PHPUnit\Framework\Attributes\CoversClass; use PHPUnit\Framework\Attributes\DataProvider; use PHPUnit\Framework\Attributes\Group; use PHPUnit\Framework\Attributes\Test; use PHPUnit\Framework\Attributes\TestDox; -use NumPower; -use NDArray; -use Rubix\ML\NeuralNet\ActivationFunctions\GELU\GELU; use PHPUnit\Framework\TestCase; -use Generator; +use Rubix\ML\NeuralNet\ActivationFunctions\GELU\GELU; #[Group('ActivationFunctions')] #[CoversClass(GELU::class)] @@ -34,7 +34,7 @@ public static function computeProvider() : Generator [2, 1.0, -0.5, 0.0, 20.0, -10.0], ]), [ - [1.9545977115631104, 0.8411920070648193, -0.1542859971523285, 0.0, 20.0, 0.0], + [1.9545977, 0.8411920, -0.1542859, 0.0, 20.0, 0.0], ], ]; @@ -45,9 +45,9 @@ public static function computeProvider() : Generator [0.05, -0.52, 0.54], ]), [ - [-0.054269056767225266, 0.19273021817207336, -0.15292881429195404], - [0.830374538898468, 0.04255049675703049, -0.014641005545854568], - [0.025996938347816467, -0.15681639313697815, 0.38089409470558167], + [-0.0542690, 0.1927302, -0.1529288], + [0.8303745, 0.0425504, -0.0146410], + [0.0259969, -0.1568163, 0.3808940], ], ]; } @@ -62,7 +62,7 @@ public static function differentiateProvider() : Generator [1.0, -0.5, 0.0, 20.0, -10.0], ]), [ - [1.0829640626907349, 0.1326301246881485, 0.5, 1.0, -0.0], + [1.0829640, 0.1326301, 0.5, 1.0, -0.0], ], ]; @@ -73,9 +73,9 @@ public static function differentiateProvider() : Generator [0.05, -0.52, 0.54], ]), [ - [0.4047141969203949, 0.7395542860031128, 0.13881805539131165], - [1.080506443977356, 0.5636941194534302, 0.47607067227363586], - [0.5398608446121216, 0.12045331299304962, 0.8914529085159302], + [0.4047141, 0.7395542, 0.1388180], + [1.0805064, 0.5636941, 0.4760706], + [0.5398608, 0.1204533, 0.8914529], ], ]; } @@ -104,7 +104,7 @@ public function testActivate(NDArray $input, array $expected) : void { $activations = $this->activationFn->activate($input)->toArray(); - static::assertEqualsWithDelta($expected, $activations, 1e-16); + static::assertEqualsWithDelta($expected, $activations, 1e-7); } #[Test] @@ -114,6 +114,6 @@ public function testDifferentiate(NDArray $input, array $expected) : void { $derivatives = $this->activationFn->differentiate($input)->toArray(); - static::assertEqualsWithDelta($expected, $derivatives, 1e-16); + static::assertEqualsWithDelta($expected, $derivatives, 1e-7); } } diff --git a/tests/NeuralNet/ActivationFunctions/HardSiLU/HardSiLUTest.php b/tests/NeuralNet/ActivationFunctions/HardSiLU/HardSiLUTest.php index 22ef0bdea..5eee0a332 100644 --- a/tests/NeuralNet/ActivationFunctions/HardSiLU/HardSiLUTest.php +++ b/tests/NeuralNet/ActivationFunctions/HardSiLU/HardSiLUTest.php @@ -4,16 +4,16 @@ namespace Rubix\ML\Tests\NeuralNet\ActivationFunctions\HardSiLU; +use Generator; +use NDArray; +use NumPower; use PHPUnit\Framework\Attributes\CoversClass; use PHPUnit\Framework\Attributes\DataProvider; use PHPUnit\Framework\Attributes\Group; use PHPUnit\Framework\Attributes\Test; use PHPUnit\Framework\Attributes\TestDox; -use NumPower; -use NDArray; -use Rubix\ML\NeuralNet\ActivationFunctions\HardSiLU\HardSiLU; use PHPUnit\Framework\TestCase; -use Generator; +use Rubix\ML\NeuralNet\ActivationFunctions\HardSiLU\HardSiLU; #[Group('ActivationFunctions')] #[CoversClass(HardSiLU::class)] @@ -34,7 +34,7 @@ public static function computeProvider() : Generator [2.5, 2.0, 1.0, -0.5, 0.0, 20.0, -2.5, -10.0], ]), [ - [2.5, 1.7999999523162842, 0.699999988079071, -0.20000000298023224, 0.0, 20.0, 0.0, 0.0], + [2.5, 1.7999999, 0.6999999, -0.2000000, 0.0, 20.0, 0.0, 0.0], ], ]; @@ -45,9 +45,9 @@ public static function computeProvider() : Generator [0.05, -0.52, 0.54], ]), [ - [-0.05711999908089638, 0.1742199957370758, -0.19698001444339752], - [0.6910200119018555, 0.04127999767661095, -0.014819999225437641], - [0.025499999523162842, -0.2059199959039688, 0.3283199965953827], + [-0.0571199, 0.1742199, -0.1969800], + [0.6910200, 0.0412799, -0.0148199], + [0.0254999, -0.2059199, 0.3283199], ], ]; @@ -66,9 +66,9 @@ public static function computeProvider() : Generator // At x = 2.5, HardSigmoid(x) = 1, so HardSiLU(2.5) = 2.5 * 1 = 2.5 [0.0, 2.5], // Just inside boundaries - [-0.0004997340147383511, 2.498500347137451], + [-0.0004997, 2.4985003], // Just outside boundaries - [0.0, 2.500999927520752], + [0.0, 2.5009999], ], ]; @@ -76,12 +76,12 @@ public static function computeProvider() : Generator yield [ NumPower::array([ // Zero and very small values around zero - [0.0, 0.0000001, -0.0000001, 0.0000000001, -0.0000000001], + [0.0, 0.000001, -0.0000001, 0.0000000001, -0.0000000001], ]), [ // HardSiLU(0) = 0 * 0.5 = 0 // For very small values, HardSigmoid(x) ≈ 0.5, so HardSiLU(x) ≈ x * 0.5 - [0.0, 0.00000005000000058430487, -0.00000004999999703159119, 0.0000000000500000006675716, -0.0000000000500000006675716], + [0.0, 0.0000005, -0.0000000, 0.0000000, -0.0000000], ], ]; } @@ -96,7 +96,7 @@ public static function differentiateProvider() : Generator [2.5, 1.0, -0.5, 0.0, 20.0, -10.0], ]), [ - [1.0, 0.8999999761581421, 0.30000001192092896, 0.5, 1.0, 0.0], + [1.5, 0.8999999, 0.30000001192092896, 0.5, 1.0, 0.0], ], ]; @@ -107,9 +107,9 @@ public static function differentiateProvider() : Generator [0.05, -0.52, 0.54], ]), [ - [0.45200002193450928, 0.6239999532699585, 0.30400002002716064], - [0.8960000276565552, 0.531999945640564, 0.48799997568130493], - [0.5199999809265137, 0.2919999957084656, 0.715999960899353], + [0.4520000, 0.6239999, 0.3040000], + [0.8960000, 0.5319999, 0.4879999], + [0.5199999, 0.2919999, 0.7159999], ], ]; @@ -125,9 +125,9 @@ public static function differentiateProvider() : Generator ]), [ // At boundaries: derivative is 0 at x = -2.5 and 1 at x = 2.5 - [0.0, 1.0], + [-0.5, 1.5], // Just inside boundaries - [-0.49960005283355713, 1.4996000528335571], + [-0.4996000, 1.4996000], // Just outside boundaries [0.0, 1.0], ], @@ -137,12 +137,12 @@ public static function differentiateProvider() : Generator yield [ NumPower::array([ // Zero and very small values around zero - [0.0, 0.0000001, -0.0000001, 0.0000000001, -0.0000000001], + [0.0, -0.00001, 0.000001, -0.0000001, 0.00000001, -0.000000001], ]), [ // At x = 0, derivative is 0.5 // For very small values, derivative is close to 0.5 - [0.5, 0.5, 0.4999999403953552, 0.5, 0.5], + [0.5, 0.4999960, 0.5000003, 0.4999999, 0.5, 0.5], ], ]; } @@ -171,7 +171,7 @@ public function testActivate(NDArray $input, array $expected) : void { $activations = $this->activationFn->activate($input)->toArray(); - static::assertEqualsWithDelta($expected, $activations, 1e-16); + static::assertEqualsWithDelta($expected, $activations, 1e-7); } #[Test] @@ -181,7 +181,6 @@ public function testDifferentiate(NDArray $input, array $expected) : void { $derivatives = $this->activationFn->differentiate($input)->toArray(); - static::assertEqualsWithDelta($expected, $derivatives, 1e-16); - + static::assertEqualsWithDelta($expected, $derivatives, 1e-7); } } diff --git a/tests/NeuralNet/ActivationFunctions/HardSigmoid/HardSigmoidTest.php b/tests/NeuralNet/ActivationFunctions/HardSigmoid/HardSigmoidTest.php index 5320e796b..a39bf1d86 100644 --- a/tests/NeuralNet/ActivationFunctions/HardSigmoid/HardSigmoidTest.php +++ b/tests/NeuralNet/ActivationFunctions/HardSigmoid/HardSigmoidTest.php @@ -4,16 +4,16 @@ namespace Rubix\ML\Tests\NeuralNet\ActivationFunctions\HardSigmoid; +use Generator; +use NDArray; +use NumPower; use PHPUnit\Framework\Attributes\CoversClass; use PHPUnit\Framework\Attributes\DataProvider; use PHPUnit\Framework\Attributes\Group; use PHPUnit\Framework\Attributes\Test; use PHPUnit\Framework\Attributes\TestDox; -use NumPower; -use NDArray; -use Rubix\ML\NeuralNet\ActivationFunctions\HardSigmoid\HardSigmoid; use PHPUnit\Framework\TestCase; -use Generator; +use Rubix\ML\NeuralNet\ActivationFunctions\HardSigmoid\HardSigmoid; #[Group('ActivationFunctions')] #[CoversClass(HardSigmoid::class)] @@ -34,7 +34,7 @@ public static function computeProvider() : Generator [2.5, 2.4, 2.0, 1.0, -0.5, 0.0, 20.0, -2.5, -2.4, -10.0], ]), [ - [1.0, 0.9800000190734863, 0.8999999761581421, 0.699999988079071, 0.4000000059604645, 0.5, 1.0, 0.0, 0.019999980926513672, 0.0], + [1.0, 0.9800000, 0.8999999, 0.6999999, 0.4000000, 0.5, 1.0, 0.0, 0.0199999, 0.0], ], ]; @@ -45,9 +45,9 @@ public static function computeProvider() : Generator [0.05, -0.52, 0.54], ]), [ - [0.47600001096725464, 0.5619999766349792, 0.4020000100135803], - [0.6980000138282776, 0.515999972820282, 0.49399998784065247], - [0.5099999904632568, 0.3959999978542328, 0.6079999804496765], + [0.4760000, 0.5619999, 0.4020000], + [0.6980000, 0.5159999, 0.4939999], + [0.5099999, 0.3959999, 0.6079999], ], ]; } @@ -59,10 +59,10 @@ public static function differentiateProvider() : Generator { yield [ NumPower::array([ - [2.5, 1.0, -0.5, 0.0, 20.0, -10.0], + [2.5, 1.0, -0.5, 0.0, 20.0, -2.5, -10.0], ]), [ - [0.0, 0.20000000298023224, 0.20000000298023224, 0.20000000298023224, 0.0, 0.0], + [0.2000000, 0.2000000, 0.2000000, 0.2000000, 0.0, 0.2000000, 0.0], ], ]; @@ -73,9 +73,9 @@ public static function differentiateProvider() : Generator [0.05, -0.52, 0.54], ]), [ - [0.20000000298023224, 0.20000000298023224, 0.20000000298023224], - [0.0, 0.20000000298023224, 0.20000000298023224], - [0.20000000298023224, 0.20000000298023224, 0.20000000298023224], + [0.2000000, 0.2000000, 0.2000000], + [0.0, 0.2000000, 0.2000000], + [0.2000000, 0.2000000, 0.2000000], ], ]; } @@ -104,7 +104,7 @@ public function testActivate(NDArray $input, array $expected) : void { $activations = $this->activationFn->activate($input)->toArray(); - static::assertEqualsWithDelta($expected, $activations, 1e-16); + static::assertEqualsWithDelta($expected, $activations, 1e-7); } #[Test] @@ -114,6 +114,6 @@ public function testDifferentiate(NDArray $input, array $expected) : void { $derivatives = $this->activationFn->differentiate($input)->toArray(); - static::assertEqualsWithDelta($expected, $derivatives, 1e-16); + static::assertEqualsWithDelta($expected, $derivatives, 1e-7); } } diff --git a/tests/NeuralNet/ActivationFunctions/HyperbolicTangent/HyperbolicTangentTest.php b/tests/NeuralNet/ActivationFunctions/HyperbolicTangent/HyperbolicTangentTest.php new file mode 100644 index 000000000..948d1c297 --- /dev/null +++ b/tests/NeuralNet/ActivationFunctions/HyperbolicTangent/HyperbolicTangentTest.php @@ -0,0 +1,119 @@ + + */ + public static function computeProvider() : Generator + { + yield [ + NumPower::array([ + [9.0, 2.5, 2.0, 1.0, -0.5, 0.0, 20.0, -10.0], + ]), + [ + [0.9999999, 0.9866142, 0.9640275, 0.7615941, -0.4621171, 0.0, 1.0, -1.0], + ], + ]; + + yield [ + NumPower::array([ + [-0.12, 0.31, -0.49], + [0.99, 0.08, -0.03], + [0.05, -0.52, 0.54], + ]), + [ + [-0.1194273, 0.3004370, -0.4542164], + [0.7573622, 0.0798297, -0.0299910], + [0.0499583, -0.4776999, 0.4929879], + ], + ]; + } + + /** + * @return Generator + */ + public static function differentiateProvider() : Generator + { + yield [ + NumPower::array([ + [0.9640275, 0.7615941, -0.4621171, 0.0, 1.0, -1.0], + ]), + [ + [0.0706509, 0.4199743, 0.7864477, 1.0, 0.0, 0.0], + ], + ]; + + yield [ + NumPower::array([ + [-0.1194273, 0.3004370, -0.4542164], + [0.7573623, 0.0797883, -0.0299912], + [0.0499583, -0.4778087, 0.4930591], + ]), + [ + [0.9857371, 0.9097375, 0.7936874], + [0.4264023, 0.9936338, 0.9991005], + [0.9975042, 0.7716988, 0.7568927], + ], + ]; + } + + /** + * Set up the test case. + */ + protected function setUp() : void + { + parent::setUp(); + + $this->activationFn = new HyperbolicTangent(); + } + + #[Test] + #[TestDox('Can be cast to a string')] + public function testToString() : void + { + static::assertEquals('Hyperbolic Tangent', (string) $this->activationFn); + } + + #[Test] + #[TestDox('Correctly activates the input')] + #[DataProvider('computeProvider')] + public function testActivate(NDArray $input, array $expected) : void + { + $activations = $this->activationFn->activate($input)->toArray(); + + static::assertEqualsWithDelta($expected, $activations, 1e-7); + } + + #[Test] + #[TestDox('Correctly differentiates the output')] + #[DataProvider('differentiateProvider')] + public function testDifferentiate(NDArray $output, array $expected) : void + { + $derivatives = $this->activationFn->differentiate($output)->toArray(); + + static::assertEqualsWithDelta($expected, $derivatives, 1e-7); + } +} diff --git a/tests/NeuralNet/ActivationFunctions/LeakyReLU/LeakyReLUTest.php b/tests/NeuralNet/ActivationFunctions/LeakyReLU/LeakyReLUTest.php new file mode 100644 index 000000000..179ea5bd9 --- /dev/null +++ b/tests/NeuralNet/ActivationFunctions/LeakyReLU/LeakyReLUTest.php @@ -0,0 +1,186 @@ + + */ + public static function computeProvider() : Generator + { + yield [ + NumPower::array([ + [2.0, 1.0, -0.5, 0.0, 20.0, -10.0], + ]), + [ + [2.0, 1.0, -0.0049999, 0.0, 20.0, -0.0999999], + ], + ]; + + yield [ + NumPower::array([ + [-0.12, 0.31, -0.49], + [0.99, 0.08, -0.03], + [0.05, -0.52, 0.54], + ]), + [ + [-0.0011999, 0.3100000, -0.0049000], + [0.9900000, 0.0799999, -0.0002999], + [0.0500000, -0.0051999, 0.5400000], + ], + ]; + } + + /** + * @return Generator + */ + public static function differentiateProvider() : Generator + { + yield [ + NumPower::array([ + [4.0, 2.0, 1.0, -0.5, 0.0, 20.0, -10.0], + ]), + [ + [1.0, 1.0, 1.0, 0.0099999, 0.0099999, 1.0, 0.0099999], + ], + ]; + + yield [ + NumPower::array([ + [-0.12, 0.31, -0.49], + [0.99, 0.08, -0.03], + [0.05, -0.52, 0.54], + ]), + [ + [0.0099999, 1.0, 0.0099999], + [1.0, 1.0, 0.0099999], + [1.0, 0.0099999, 1.0], + ], + ]; + } + + /** + * @return Generator + */ + public static function boundaryProvider() : Generator + { + // Test very large positive values (should be equal to input) + yield [ + NumPower::array([ + [100.0, 500.0, 1000.0], + ]), + [ + [100.0, 500.0, 1000.0], + ], + ]; + + // Test very large negative values (should be input * leakage) + yield [ + NumPower::array([ + [-100.0, -500.0, -1000.0], + ]), + [ + [-1.0, -5.0, -10.0], + ], + ]; + + // Test values close to zero + yield [ + NumPower::array([ + [0.001, -0.001, 0.0001, -0.0001], + ]), + [ + + [0.0010000, -0.0000100, 0.0000999, -0.0000009], + ], + ]; + } + + /** + * Set up the test case. + */ + protected function setUp() : void + { + parent::setUp(); + + $this->activationFn = new LeakyReLU(0.01); + } + + #[Test] + #[TestDox('Can be constructed with valid leakage parameter')] + public function testConstructorWithValidLeakage() : void + { + $activationFn = new LeakyReLU(0.2); + + static::assertInstanceOf(LeakyReLU::class, $activationFn); + static::assertEquals('Leaky ReLU (leakage: 0.2)', (string) $activationFn); + } + + #[Test] + #[TestDox('Throws exception when constructed with invalid leakage parameter')] + public function testConstructorWithInvalidLeakage() : void + { + $this->expectException(InvalidLeakageException::class); + + new LeakyReLU(1.5); + } + + #[Test] + #[TestDox('Can be cast to a string')] + public function testToString() : void + { + static::assertEquals('Leaky ReLU (leakage: 0.01)', (string) $this->activationFn); + } + + #[Test] + #[TestDox('Correctly activates the input')] + #[DataProvider('computeProvider')] + public function testActivate(NDArray $input, array $expected) : void + { + $activations = $this->activationFn->activate($input)->toArray(); + + static::assertEqualsWithDelta($expected, $activations, 1e-7); + } + + #[Test] + #[TestDox('Correctly handles boundary values during activation')] + #[DataProvider('boundaryProvider')] + public function testBoundaryActivate(NDArray $input, array $expected) : void + { + $activations = $this->activationFn->activate($input)->toArray(); + + static::assertEqualsWithDelta($expected, $activations, 1e-7); + } + + #[Test] + #[TestDox('Correctly differentiates the input')] + #[DataProvider('differentiateProvider')] + public function testDifferentiate(NDArray $input, array $expected) : void + { + $derivatives = $this->activationFn->differentiate($input)->toArray(); + + static::assertEqualsWithDelta($expected, $derivatives, 1e-7); + } +}