2018-10-24 19:20:28 +00:00
|
|
|
#!/usr/bin/python
|
2018-10-16 18:52:44 +00:00
|
|
|
|
2020-05-01 21:36:35 +00:00
|
|
|
# Logistic Regression from scratch
|
2018-10-16 18:52:44 +00:00
|
|
|
|
|
|
|
# In[62]:
|
|
|
|
|
2018-10-24 19:20:28 +00:00
|
|
|
# In[63]:
|
|
|
|
|
|
|
|
# importing all the required libraries
|
2018-10-16 18:52:44 +00:00
|
|
|
|
2020-05-01 21:36:35 +00:00
|
|
|
"""
|
|
|
|
Implementing logistic regression for classification problem
|
|
|
|
Helpful resources:
|
|
|
|
Coursera ML course
|
|
|
|
https://medium.com/@martinpella/logistic-regression-from-scratch-in-python-124c5636b8ac
|
|
|
|
"""
|
2018-10-16 18:52:44 +00:00
|
|
|
import numpy as np
|
2020-07-06 07:44:19 +00:00
|
|
|
from matplotlib import pyplot as plt
|
|
|
|
from sklearn import datasets
|
2018-10-24 19:20:28 +00:00
|
|
|
|
|
|
|
# get_ipython().run_line_magic('matplotlib', 'inline')
|
|
|
|
|
2018-10-16 18:52:44 +00:00
|
|
|
|
|
|
|
# In[67]:
|
|
|
|
|
2020-05-01 21:36:35 +00:00
|
|
|
# sigmoid function or logistic function is used as a hypothesis function in
|
|
|
|
# classification problems
|
2018-10-16 18:52:44 +00:00
|
|
|
|
2019-10-05 05:14:13 +00:00
|
|
|
|
2018-10-16 18:52:44 +00:00
|
|
|
def sigmoid_function(z):
|
2023-10-23 05:27:33 +00:00
|
|
|
"""
|
|
|
|
Also known as Logistic Function.
|
|
|
|
|
|
|
|
1
|
|
|
|
f(x) = -------
|
|
|
|
1 + e⁻ˣ
|
|
|
|
|
|
|
|
The sigmoid function approaches a value of 1 as its input 'x' becomes
|
|
|
|
increasing positive. Opposite for negative values.
|
|
|
|
|
|
|
|
Reference: https://en.wikipedia.org/wiki/Sigmoid_function
|
|
|
|
|
|
|
|
@param z: input to the function
|
|
|
|
@returns: returns value in the range 0 to 1
|
|
|
|
"""
|
2018-10-24 19:20:28 +00:00
|
|
|
return 1 / (1 + np.exp(-z))
|
|
|
|
|
2018-10-16 18:52:44 +00:00
|
|
|
|
2018-10-24 19:20:28 +00:00
|
|
|
def cost_function(h, y):
|
|
|
|
return (-y * np.log(h) - (1 - y) * np.log(1 - h)).mean()
|
2018-10-16 18:52:44 +00:00
|
|
|
|
2019-10-05 05:14:13 +00:00
|
|
|
|
2022-10-12 22:54:20 +00:00
|
|
|
def log_likelihood(x, y, weights):
|
|
|
|
scores = np.dot(x, weights)
|
|
|
|
return np.sum(y * scores - np.log(1 + np.exp(scores)))
|
2019-10-05 05:14:13 +00:00
|
|
|
|
2018-10-16 18:52:44 +00:00
|
|
|
|
2018-10-16 19:22:32 +00:00
|
|
|
# here alpha is the learning rate, X is the feature matrix,y is the target matrix
|
2022-10-12 22:54:20 +00:00
|
|
|
def logistic_reg(alpha, x, y, max_iterations=70000):
|
|
|
|
theta = np.zeros(x.shape[1])
|
2018-10-24 19:20:28 +00:00
|
|
|
|
2019-08-07 19:39:44 +00:00
|
|
|
for iterations in range(max_iterations):
|
2022-10-12 22:54:20 +00:00
|
|
|
z = np.dot(x, theta)
|
2018-10-24 19:20:28 +00:00
|
|
|
h = sigmoid_function(z)
|
2022-10-12 22:54:20 +00:00
|
|
|
gradient = np.dot(x.T, h - y) / y.size
|
2019-08-07 19:39:44 +00:00
|
|
|
theta = theta - alpha * gradient # updating the weights
|
2022-10-12 22:54:20 +00:00
|
|
|
z = np.dot(x, theta)
|
2018-10-24 19:20:28 +00:00
|
|
|
h = sigmoid_function(z)
|
2022-10-12 22:54:20 +00:00
|
|
|
j = cost_function(h, y)
|
2019-08-07 19:39:44 +00:00
|
|
|
if iterations % 100 == 0:
|
2022-10-12 22:54:20 +00:00
|
|
|
print(f"loss: {j} \t") # printing the loss after every 100 iterations
|
2018-10-24 19:20:28 +00:00
|
|
|
return theta
|
2018-10-16 18:52:44 +00:00
|
|
|
|
2019-10-05 05:14:13 +00:00
|
|
|
|
2018-10-24 19:20:28 +00:00
|
|
|
# In[68]:
|
2018-10-16 18:52:44 +00:00
|
|
|
|
2019-10-05 05:14:13 +00:00
|
|
|
if __name__ == "__main__":
|
2018-10-24 19:20:28 +00:00
|
|
|
iris = datasets.load_iris()
|
2022-10-12 22:54:20 +00:00
|
|
|
x = iris.data[:, :2]
|
2018-10-16 18:52:44 +00:00
|
|
|
y = (iris.target != 0) * 1
|
2018-10-24 19:20:28 +00:00
|
|
|
|
|
|
|
alpha = 0.1
|
2022-10-12 22:54:20 +00:00
|
|
|
theta = logistic_reg(alpha, x, y, max_iterations=70000)
|
2019-10-05 05:14:13 +00:00
|
|
|
print("theta: ", theta) # printing the theta i.e our weights vector
|
2018-10-24 19:20:28 +00:00
|
|
|
|
2022-10-12 22:54:20 +00:00
|
|
|
def predict_prob(x):
|
2019-10-05 05:14:13 +00:00
|
|
|
return sigmoid_function(
|
2022-10-12 22:54:20 +00:00
|
|
|
np.dot(x, theta)
|
2019-10-05 05:14:13 +00:00
|
|
|
) # predicting the value of probability from the logistic regression algorithm
|
2018-10-24 19:20:28 +00:00
|
|
|
|
2018-10-16 18:52:44 +00:00
|
|
|
plt.figure(figsize=(10, 6))
|
2022-10-12 22:54:20 +00:00
|
|
|
plt.scatter(x[y == 0][:, 0], x[y == 0][:, 1], color="b", label="0")
|
|
|
|
plt.scatter(x[y == 1][:, 0], x[y == 1][:, 1], color="r", label="1")
|
|
|
|
(x1_min, x1_max) = (x[:, 0].min(), x[:, 0].max())
|
|
|
|
(x2_min, x2_max) = (x[:, 1].min(), x[:, 1].max())
|
2019-10-05 05:14:13 +00:00
|
|
|
(xx1, xx2) = np.meshgrid(np.linspace(x1_min, x1_max), np.linspace(x2_min, x2_max))
|
2018-10-16 18:52:44 +00:00
|
|
|
grid = np.c_[xx1.ravel(), xx2.ravel()]
|
|
|
|
probs = predict_prob(grid).reshape(xx1.shape)
|
2019-10-05 05:14:13 +00:00
|
|
|
plt.contour(xx1, xx2, probs, [0.5], linewidths=1, colors="black")
|
2018-10-24 19:20:28 +00:00
|
|
|
|
|
|
|
plt.legend()
|
2019-08-07 19:39:44 +00:00
|
|
|
plt.show()
|