Calculate the loss = h - y and maybe the squared cost (loss^2)/2m
Calculate the gradient = X' * loss / m
Update the parameters theta = theta - alpha * gradient
import numpy as np# m denotes the number of examples here, not the number of # featuresdefgradientDescent(x,y,theta,alpha,m,numIterations): xTrans = x.transpose()for i inrange(0, numIterations): hypothesis = np.dot(x, theta) loss = hypothesis - y# avg cost per example (the 2 in 2*m doesn't really matter here.# But to be consistent with the gradient, I include it) cost = np.sum(loss **2)/ (2* m)print("Iteration %d | Cost: %f"% (i, cost))# avg gradient per example gradient = np.dot(xTrans, loss)/ m# update theta = theta - alpha * gradientreturn theta