From c6ec99d57140cbf8b54077d379dfffeb6c7ad280 Mon Sep 17 00:00:00 2001 From: Kausthub Kannan Date: Sat, 7 Oct 2023 00:53:05 +0530 Subject: [PATCH] Added Mish Activation Function (#9942) * Added Mish Activation Function * Apply suggestions from code review --------- Co-authored-by: Tianyi Zheng --- neural_network/activation_functions/mish.py | 39 +++++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100644 neural_network/activation_functions/mish.py diff --git a/neural_network/activation_functions/mish.py b/neural_network/activation_functions/mish.py new file mode 100644 index 000000000..e4f98307f --- /dev/null +++ b/neural_network/activation_functions/mish.py @@ -0,0 +1,39 @@ +""" +Mish Activation Function + +Use Case: Improved version of the ReLU activation function used in Computer Vision. +For more detailed information, you can refer to the following link: +https://en.wikipedia.org/wiki/Rectifier_(neural_networks)#Mish +""" + +import numpy as np + + +def mish(vector: np.ndarray) -> np.ndarray: + """ + Implements the Mish activation function. + + Parameters: + vector (np.ndarray): The input array for Mish activation. + + Returns: + np.ndarray: The input array after applying the Mish activation. + + Formula: + f(x) = x * tanh(softplus(x)) = x * tanh(ln(1 + e^x)) + + Examples: + >>> mish(vector=np.array([2.3,0.6,-2,-3.8])) + array([ 2.26211893, 0.46613649, -0.25250148, -0.08405831]) + + >>> mish(np.array([-9.2, -0.3, 0.45, -4.56])) + array([-0.00092952, -0.15113318, 0.33152014, -0.04745745]) + + """ + return vector * np.tanh(np.log(1 + np.exp(vector))) + + +if __name__ == "__main__": + import doctest + + doctest.testmod()