From 30bd9c2c61c5b7728a8d88c712f5b1b184f20363 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 20 Jun 2023 14:37:16 +0000 Subject: [PATCH] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- .../activation_functions/mish_activation.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/neural_network/activation_functions/mish_activation.py b/neural_network/activation_functions/mish_activation.py index 59a395163..c292f76f1 100644 --- a/neural_network/activation_functions/mish_activation.py +++ b/neural_network/activation_functions/mish_activation.py @@ -17,17 +17,17 @@ from maths.tanh import tangent_hyperbolic as tanh def mish_activation(vector: np.ndarray) -> np.ndarray: """ - Implements the Mish function + Implements the Mish function - Parameters: - vector: np.array + Parameters: + vector: np.array - Returns: - Mish (np.array): The input numpy array after applying tanh. + Returns: + Mish (np.array): The input numpy array after applying tanh. - mathematically, mish = x * tanh(softplus(x) where - softplus = ln(1+e^(x)) and tanh = (e^x - e^(-x))/(e^x + e^(-x)) - so, mish can be written as x * (2/(1+e^(-2 * softplus))-1 + mathematically, mish = x * tanh(softplus(x) where + softplus = ln(1+e^(x)) and tanh = (e^x - e^(-x))/(e^x + e^(-x)) + so, mish can be written as x * (2/(1+e^(-2 * softplus))-1 """ soft_plus = np.log(np.exp(vector) + 1)