天天看点

python手动实现线性回归

线性回归模型实现

step1,计算计算损失MSE

python手动实现线性回归
# 计算当前损失, y = w * x + b
# points为数据,维度[100,2]
# w, b 为当前的参数值
def compute_error_for_line_given_points(b, w, points):
    N = len(points)
    total_error = 0
    for sample in points:
        x = sample[0]
        y = sample[1]
        # compute mean-squared-error(MSE)  
        total_error += (y - (w * x + b)) ** 2
    return total_error/N
           

step2,梯度计算

python手动实现线性回归
# 计算当前梯度
def step_gradient(b_current, w_current, points, learning_rate):
    b_gradinet = 0
    w_gradient = 0
    N = len(points)
    for sample in points:
        x = sample[0]
        y = sample[1]
        # grad_b,计算参数 b 的梯度
        b_gradinet += (2/N) * (w_current * x + b_current - y)
        # grad_w,计算参数 W 的梯度
        w_gradient += (2/N) * x * (w_current * x + b_current - y)
    # 利用梯度更新参数
    new_b = b_current - learning_rate * b_gradinet
    new_w = w_current - learning_rate * w_gradient
    return [new_b,new_w]
           

step3,循环迭代

def gradient_descent_runner(points, starting_b, starting_w, learning_rate, num_iterations):
    b = starting_b
    w = starting_w
    # 通过循环固定轮次更新参数
    for _ in range(num_iterations):
        b, w = step_gradient(b,w,points,learning_rate)
    return [b, w]
           

step4,run

def run():
    data_path = "data/lession4_data.csv"
    points = np.genfromtxt(data_path,delimiter=",")
    learning_rate = 0.0001
    initial_b = 0
    initial_w = 0
    num_iterations = 1000
    print("Starting gradient descent at b = {0}, w = {1}, error = {2}"
          .format(initial_b,initial_w,compute_error_for_line_given_points(initial_b,initial_w,points)))
    print("Running...")
    b,w = gradient_descent_runner(points,initial_b,initial_w,learning_rate,num_iterations)
    print("After {0} iterations b = {1}, w = {2}, error = {3}"
          .format(num_iterations, b, w, compute_error_for_line_given_points(b,w,points)))
run()
           
python手动实现线性回归