From 88f05ec5a42787a05eac74b2d86fa7e745d995aa Mon Sep 17 00:00:00 2001 From: Mitra-babu Date: Tue, 20 Jun 2023 20:05:39 +0530 Subject: [PATCH] mish activation added --- neural_network/activation_functions/mish_activation.py | 9 --------- 1 file changed, 9 deletions(-) diff --git a/neural_network/activation_functions/mish_activation.py b/neural_network/activation_functions/mish_activation.py index 8dfd28f01..59a395163 100644 --- a/neural_network/activation_functions/mish_activation.py +++ b/neural_network/activation_functions/mish_activation.py @@ -29,15 +29,6 @@ def mish_activation(vector: np.ndarray) -> np.ndarray: softplus = ln(1+e^(x)) and tanh = (e^x - e^(-x))/(e^x + e^(-x)) so, mish can be written as x * (2/(1+e^(-2 * softplus))-1 - Examples: - >>> mish_activation(np.array([1,5,6,-0.67])) - array([ 0.86509839, 8.99955208, 10.99992663, -1.93211787]) - - - >>> mish_activation(np.array([8,2,-0.98,13])) - array([14.9999982 , 2.94395896, -2.28214659, 25. ]) - - """ soft_plus = np.log(np.exp(vector) + 1) return vector * tanh(soft_plus)