Machine-Learning-From-Scratch
Machine-Learning-From-Scratch copied to clipboard
Missing `2` factor in derivatives computation
import numpy as np
class LinearRegression:
def __init__(self, lr = 0.001, n_iters=1000):
self.lr = lr
self.n_iters = n_iters
self.weights = None
self.bias = None
def fit(self, X, y):
n_samples, n_features = X.shape
self.weights = np.zeros(n_features)
self.bias = 0
for _ in range(self.n_iters):
y_pred = np.dot(X, self.weights) + self.bias
dw = (1/n_samples) * 2*np.dot(X.T, (y_pred-y))
db = (1/n_samples) * 2*np.sum(y_pred-y)
self.weights = self.weights - self.lr * dw
self.bias = self.bias - self.lr * db
def predict(self, X):
y_pred = np.dot(X, self.weights) + self.bias
return y_pred
Need to include the missing 2
factor multiplying the dw
and db
in the code when computing them.