可行性分析####

sklearn.linear_model.linear_regression()

一些参数

  • fit_intercept 布尔型参数,表示是否计算该模型的截距
  • normalize 布尔型参数,若为TRUE,则X在回归前进行归一化,默认False

可以查看系数###

linear.coef_
linear.intercept_

# -*- coding: utf-8 -*-
"""
Created on Sat May 27 12:04:03 2017

@author: sfzyk
"""

import numpy as np
import matplotlib.pyplot as plt
import os
from sklearn.linear_model import LinearRegression
os.chdir(r"d:\mechine_learning\mooc_data")
f=open("prices.txt",'r')
lines=f.readlines()

data_x=[]
data_y=[]

for line in lines:
    #print(line)
    items=line.strip()
    strs=items.split(',')
    data_x.append(int(strs[0]))
    data_y.append(int(strs[1]))

leng=len(data_x)
data_x=np.array(data_x).reshape((leng,1))
#同是一维的但是data_x 和data_y的格式要求不一样
data_y=np.array(data_y)

minx=min(data_x)
maxx=max(data_x)
x=np.linspace(minx,maxx,100)

linear=LinearRegression()

linear.fit(data_x,data_y)

plt.scatter(data_x,data_y,'r')

plt.plot(x,linear.predict(x.reshape(-1,1)),'-b')

加入高次项特征###

# -*- coding: utf-8 -*-
"""
Created on Sat May 27 12:59:12 2017

@author: sfzyk
"""

from sklearn.preprocessing import PolynomialFeatures
import matplotlib.pyplot as plt
import os
import numpy as np
from sklearn.linear_model import LinearRegression
os.chdir("d:\mechine_learning\mooc_data")
f=open("prices.txt",'r')
lines=f.readlines()

data_x=[]
data_y=[]

for line in lines:
    #print(line)
    items=line.strip()
    strs=items.split(',')
    data_x.append(int(strs[0]))
    data_y.append(int(strs[1]))
leng=len(data_x)
data_x=np.array(data_x).reshape((leng,1))

minx=min(data_x)
maxx=max(data_x)
x=np.linspace(minx,maxx,100)


poly_reg=PolynomialFeatures(degree=2)
x_poly=poly_reg.fit_transform(data_x)

linear=LinearRegression()
linear.fit(x_poly,data_y)

plt.scatter(data_x,data_y,color='red')

plt.plot(x,linear.predict(poly_reg.fit_transform(x.reshape((-1,1)))),'-b')