Created folder for losses in Machine_Learning (#9969)

* Created folder for losses in Machine_Learning

* Update binary_cross_entropy.py

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

* Update mean_squared_error.py

* Update binary_cross_entropy.py

* Update mean_squared_error.py

* Update binary_cross_entropy.py

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

* Update mean_squared_error.py

* Update binary_cross_entropy.py

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

* Update mean_squared_error.py

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

* Update binary_cross_entropy.py

* Update mean_squared_error.py

* Update binary_cross_entropy.py

* Update mean_squared_error.py

* Update machine_learning/losses/binary_cross_entropy.py

Co-authored-by: Christian Clauss <cclauss@me.com>

* Update machine_learning/losses/mean_squared_error.py

Co-authored-by: Christian Clauss <cclauss@me.com>

* Update machine_learning/losses/binary_cross_entropy.py

Co-authored-by: Christian Clauss <cclauss@me.com>

* Update mean_squared_error.py

* Update machine_learning/losses/mean_squared_error.py

Co-authored-by: Tianyi Zheng <tianyizheng02@gmail.com>

* Update binary_cross_entropy.py

* Update mean_squared_error.py

* Update binary_cross_entropy.py

* Update mean_squared_error.py

* Update mean_squared_error.py

* Update binary_cross_entropy.py

* renamed: losses -> loss_functions

* updated 2 files

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

* Update mean_squared_error.py

* Update mean_squared_error.py

* Update binary_cross_entropy.py

* Update mean_squared_error.py

---------

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
Co-authored-by: Christian Clauss <cclauss@me.com>
Co-authored-by: Tianyi Zheng <tianyizheng02@gmail.com>
This commit is contained in:
Arnav Kohli 2023-10-08 21:34:43 +05:30 committed by GitHub
parent 6860daea60
commit 81b29066d2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 110 additions and 0 deletions

View File

@ -0,0 +1,59 @@
"""
Binary Cross-Entropy (BCE) Loss Function
Description:
Quantifies dissimilarity between true labels (0 or 1) and predicted probabilities.
It's widely used in binary classification tasks.
Formula:
BCE = -Σ(y_true * log(y_pred) + (1 - y_true) * log(1 - y_pred))
Source:
[Wikipedia - Cross entropy](https://en.wikipedia.org/wiki/Cross_entropy)
"""
import numpy as np
def binary_cross_entropy(
y_true: np.ndarray, y_pred: np.ndarray, epsilon: float = 1e-15
) -> float:
"""
Calculate the BCE Loss between true labels and predicted probabilities.
Parameters:
- y_true: True binary labels (0 or 1).
- y_pred: Predicted probabilities for class 1.
- epsilon: Small constant to avoid numerical instability.
Returns:
- bce_loss: Binary Cross-Entropy Loss.
Example Usage:
>>> true_labels = np.array([0, 1, 1, 0, 1])
>>> predicted_probs = np.array([0.2, 0.7, 0.9, 0.3, 0.8])
>>> binary_cross_entropy(true_labels, predicted_probs)
0.2529995012327421
>>> true_labels = np.array([0, 1, 1, 0, 1])
>>> predicted_probs = np.array([0.3, 0.8, 0.9, 0.2])
>>> binary_cross_entropy(true_labels, predicted_probs)
Traceback (most recent call last):
...
ValueError: Input arrays must have the same length.
"""
if len(y_true) != len(y_pred):
raise ValueError("Input arrays must have the same length.")
# Clip predicted probabilities to avoid log(0) and log(1)
y_pred = np.clip(y_pred, epsilon, 1 - epsilon)
# Calculate binary cross-entropy loss
bce_loss = -(y_true * np.log(y_pred) + (1 - y_true) * np.log(1 - y_pred))
# Take the mean over all samples
return np.mean(bce_loss)
if __name__ == "__main__":
import doctest
doctest.testmod()

View File

@ -0,0 +1,51 @@
"""
Mean Squared Error (MSE) Loss Function
Description:
MSE measures the mean squared difference between true values and predicted values.
It serves as a measure of the model's accuracy in regression tasks.
Formula:
MSE = (1/n) * Σ(y_true - y_pred)^2
Source:
[Wikipedia - Mean squared error](https://en.wikipedia.org/wiki/Mean_squared_error)
"""
import numpy as np
def mean_squared_error(y_true: np.ndarray, y_pred: np.ndarray) -> float:
"""
Calculate the Mean Squared Error (MSE) between two arrays.
Parameters:
- y_true: The true values (ground truth).
- y_pred: The predicted values.
Returns:
- mse: The Mean Squared Error between y_true and y_pred.
Example usage:
>>> true_values = np.array([1.0, 2.0, 3.0, 4.0, 5.0])
>>> predicted_values = np.array([0.8, 2.1, 2.9, 4.2, 5.2])
>>> mean_squared_error(true_values, predicted_values)
0.028000000000000032
>>> true_labels = np.array([1.0, 2.0, 3.0, 4.0, 5.0])
>>> predicted_probs = np.array([0.3, 0.8, 0.9, 0.2])
>>> mean_squared_error(true_labels, predicted_probs)
Traceback (most recent call last):
...
ValueError: Input arrays must have the same length.
"""
if len(y_true) != len(y_pred):
raise ValueError("Input arrays must have the same length.")
squared_errors = (y_true - y_pred) ** 2
return np.mean(squared_errors)
if __name__ == "__main__":
import doctest
doctest.testmod()