From 9b80d1f943a1835d075f5ddae69c169bab4e503e Mon Sep 17 00:00:00 2001 From: kausthub-kannan Date: Mon, 14 Aug 2023 23:23:13 +0530 Subject: [PATCH] Added Leaky ReLU activation function --- .../activation_functions/leaky_rectified_linear_unit.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/neural_network/activation_functions/leaky_rectified_linear_unit.py b/neural_network/activation_functions/leaky_rectified_linear_unit.py index 1e99fed4e..2ccfc20ee 100644 --- a/neural_network/activation_functions/leaky_rectified_linear_unit.py +++ b/neural_network/activation_functions/leaky_rectified_linear_unit.py @@ -20,10 +20,10 @@ def leaky_rectified_linear_unit(vector: np.ndarray, alpha: float) -> np.ndarray: """ Implements the LeakyReLU activation function. Parameters: - vector: the array containing input of elu activation + vector: the array containing input of leakyReLu activation alpha: hyperparameter return: - leaky_relu (np.array): The input numpy array after applying leaky-relu. + leaky_relu (np.array): The input numpy array after applying leakyReLu. Formula : f(x) = x if x > 0 else f(x) = alpha * x