Table of contents
No headings in the article.
🌟How to build a gradient-descent-based linear regression model? 🤖
💫 Define a prediction function using the weights.
💫 Update the weights through gradient descent.
💫 Compute the model costs J(w).
💫 Train the model over 100 iterations.
💫 Re-apply the prediction functions on a new testing set.
import numpy as np
class LinearRegression:
def __init__(self, X, y, weights):
self.X = X
self.y = y
self.weights = weights
def predict(self, X):
if X.shape[1] == self.weights.shape[0] - 1:
intercept = np.ones((X.shape[0], 1))
X = np.hstack((intercept, X))
return X.dot(self.weights)
def update_weights(self, learning_rate):
y_pred = self.predict(self.X)
delta_w = np.dot(self.X.T, self.y - y_pred)
m = self.X.shape[0]
self.weights += learning_rate * delta_w / float(m)
def compute_cost(self):
y_pred = self.predict(self.X)
mse = np.mean((self.y - y_pred) ** 2)
return mse
def train(self, learning_rate, epochs, fit_intercept=False):
if fit_intercept:
intercept = np.ones((self.X.shape[0], 1))
self.X = np.hstack((intercept, self.X))
self.weights = np.zeros(self.X.shape[1])
for i in range(epochs):
self.update_weights(learning_rate)
return self.weights
def get_X(self):
return self.X
def get_y(self):
return self.y
def get_weights(self):
return self.weights
def set_X(self, X):
self.X = X
def set_y(self, y):
self.y = y
def set_weights(self, weights):
self.weights = weights
💡
Connect with me on LinkedIn🔗 👉Here
💡
Follow my GitHub 💻for fascinating projects and opportunities for collabs 💫
💡
💫 Follow my Quora ✍️for daily writing updates on tech, French, and world history 🏛️