mirror of
https://github.com/TheAlgorithms/Python.git
synced 2025-02-25 18:38:39 +00:00
[pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
This commit is contained in:
parent
88f05ec5a4
commit
30bd9c2c61
@ -17,17 +17,17 @@ from maths.tanh import tangent_hyperbolic as tanh
|
||||
|
||||
def mish_activation(vector: np.ndarray) -> np.ndarray:
|
||||
"""
|
||||
Implements the Mish function
|
||||
Implements the Mish function
|
||||
|
||||
Parameters:
|
||||
vector: np.array
|
||||
Parameters:
|
||||
vector: np.array
|
||||
|
||||
Returns:
|
||||
Mish (np.array): The input numpy array after applying tanh.
|
||||
Returns:
|
||||
Mish (np.array): The input numpy array after applying tanh.
|
||||
|
||||
mathematically, mish = x * tanh(softplus(x) where
|
||||
softplus = ln(1+e^(x)) and tanh = (e^x - e^(-x))/(e^x + e^(-x))
|
||||
so, mish can be written as x * (2/(1+e^(-2 * softplus))-1
|
||||
mathematically, mish = x * tanh(softplus(x) where
|
||||
softplus = ln(1+e^(x)) and tanh = (e^x - e^(-x))/(e^x + e^(-x))
|
||||
so, mish can be written as x * (2/(1+e^(-2 * softplus))-1
|
||||
|
||||
"""
|
||||
soft_plus = np.log(np.exp(vector) + 1)
|
||||
|
Loading…
x
Reference in New Issue
Block a user