POLAYOR

Gradient Descent

Gradient Descent

Use loops and delta to reduce the difference between y_predict and y

import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
%matplotlib inline
from sklearn import linear_model
import math
def gradient_descent(x, y):
    m_curr = b_curr = 0
    iterations = 10000000
    learning_rate = 0.0002
    cost_new = cost_old = 0
    n = len(x)
    plt.scatter(x, y, color='red', marker='*')
    
    for i in range(iterations):
        y_predict = m_curr * x + b_curr
        cost_new = (1/n) * sum([val**2 for val in (y-y_predict)])
        if not math.isclose(cost_old, cost_new, rel_tol=1e-20): # Use math.isclose to calculate the difference and 'rel_tol' can return true if difference is less than rel_tol
            cost_old = cost_new
            md = -(2/n) * sum(x*(y-y_predict))
            bd = -(2/n) * sum(y-y_predict)
            m_curr = m_curr - learning_rate*md
            b_curr = b_curr - learning_rate*bd
            print("m: {}, b: {}, cost: {}, iteration: {}".format(m_curr, b_curr, cost_new, i))
            plt.plot(x, y_predict, color='blue')
        else:
            break
    
    plt.plot(x, y_predict, color='orange')

posted on 2023-03-06 23:16  POLAYOR  阅读(9)  评论(0编辑  收藏  举报

导航