diff --git a/neural_network/activation_functions/mish_activation.py b/neural_network/activation_functions/mish_activation.py index 59a395163..c292f76f1 100644 --- a/neural_network/activation_functions/mish_activation.py +++ b/neural_network/activation_functions/mish_activation.py @@ -17,17 +17,17 @@ from maths.tanh import tangent_hyperbolic as tanh def mish_activation(vector: np.ndarray) -> np.ndarray: """ - Implements the Mish function + Implements the Mish function - Parameters: - vector: np.array + Parameters: + vector: np.array - Returns: - Mish (np.array): The input numpy array after applying tanh. + Returns: + Mish (np.array): The input numpy array after applying tanh. - mathematically, mish = x * tanh(softplus(x) where - softplus = ln(1+e^(x)) and tanh = (e^x - e^(-x))/(e^x + e^(-x)) - so, mish can be written as x * (2/(1+e^(-2 * softplus))-1 + mathematically, mish = x * tanh(softplus(x) where + softplus = ln(1+e^(x)) and tanh = (e^x - e^(-x))/(e^x + e^(-x)) + so, mish can be written as x * (2/(1+e^(-2 * softplus))-1 """ soft_plus = np.log(np.exp(vector) + 1)