From 1f84b1c07ece37a841e852779362e6d89bd40151 Mon Sep 17 00:00:00 2001 From: kausthub-kannan Date: Thu, 17 Aug 2023 00:48:49 +0530 Subject: [PATCH] Formatting and spelling fixes done --- .../leaky_rectified_linear_unit.py | 16 ++++------------ 1 file changed, 4 insertions(+), 12 deletions(-) diff --git a/neural_network/activation_functions/leaky_rectified_linear_unit.py b/neural_network/activation_functions/leaky_rectified_linear_unit.py index 5226c86ea..019086fd9 100644 --- a/neural_network/activation_functions/leaky_rectified_linear_unit.py +++ b/neural_network/activation_functions/leaky_rectified_linear_unit.py @@ -1,16 +1,9 @@ """ -Leaky Rectified Linear Unit (LeakyReLU) +Leaky Rectified Linear Unit (Leaky ReLU) -Input: vector (type: np.ndarray) , alpha (type: float) -Output: vector (type: np.ndarray) - -UseCase: LeakyReLU solves the issue of dead neurons or vanishing gradient problem. -Refer the below link for more information: +Use Case: Leaky ReLU addresses the problem of the vanishing gradient. +For more detailed information, you can refer to the following link: https://en.wikipedia.org/wiki/Rectifier_(neural_networks)#Leaky_ReLU - -Applications: -Generative Adversarial Networks (GANs) -Object Detection and Image Segmentation """ import numpy as np @@ -33,8 +26,7 @@ def leaky_rectified_linear_unit(vector: np.ndarray, alpha: float) -> np.ndarray: >>> leaky_rectified_linear_unit(vector=np.array([2.3,0.6,-2,-3.8]), alpha=0.3) array([ 2.3 , 0.6 , -0.6 , -1.14]) - >>> leaky_rectified_linear_unit(vector=np.array([-9.2,-0.3,0.45,-4.56]), \ - alpha=0.067) + >>> leaky_rectified_linear_unit(np.array([-9.2, -0.3, 0.45, -4.56]), alpha=0.067) array([-0.6164 , -0.0201 , 0.45 , -0.30552]) """