Compare commits

...

4 Commits

Author SHA1 Message Date
Julien Richard
55f5601154
Merge a33e39ae2c into e3bd7721c8 2024-11-15 15:19:14 +01:00
Christian Clauss
e3bd7721c8
validate_filenames.py Shebang python for Windows (#12371) 2024-11-15 14:59:14 +01:00
Julien RICHARD
a33e39ae2c
Refactor sum_of_square_error function in linear_regression.py 2024-10-01 14:55:03 +02:00
Julien RICHARD
a09a72816e
add tests for linear regression 2024-10-01 14:43:44 +02:00
2 changed files with 19 additions and 3 deletions

View File

@ -40,7 +40,16 @@ def run_steep_gradient_descent(data_x, data_y, len_data, alpha, theta):
:param alpha : Learning rate of the model
:param theta : Feature vector (weight's for our model)
;param return : Updated Feature's, using
curr_features - alpha_ * gradient(w.r.t. feature)
curr_features - alpha_ * gradient(w.r.t. feature)
>>> data_x = np.array([[1, 2], [1, 3], [1, 4]])
>>> data_y = np.array([[2], [2], [2]])
>>> theta = np.array([[0.0, 0.0]])
>>> alpha = 0.01
>>> len_data = len(data_x)
>>> new_theta = run_steep_gradient_descent(data_x, data_y, len_data, alpha, theta)
>>> new_theta.round(2)
array([[0.02, 0.06]])
"""
n = len_data
@ -89,10 +98,17 @@ def run_linear_regression(data_x, data_y):
def mean_absolute_error(predicted_y, original_y):
"""Return sum of square error for error calculation
"""
Return sum of square error for error calculation
:param predicted_y : contains the output of prediction (result vector)
:param original_y : contains values of expected outcome
:return : mean absolute error computed from given feature's
>>> mean_absolute_error([3.0, 2.0, 1.0], [2.5, 2.0, 1.0])
0.16666666666666666
>>> mean_absolute_error([5.0, 6.0], [5.0, 7.0])
0.5
"""
total = sum(abs(y - predicted_y[i]) for i, y in enumerate(original_y))
return total / len(original_y)

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python3
#!python
import os
try: