逻辑回归(Logistic Regression)
import numpy as np import random def genData(numPoints,bias,variance):#实例 偏好 方差 x = np.zeros(shape=(numPoints,2))#行列 y = np.zeros(shape=(numPoints))#行 for i in range(0,numPoints):#0->numPoints-1 x[i][0]=1 x[i][1]=i y[i]=(i+bias)+random.uniform(0,1)+variance return x,y def gradientDescent(x,y,theta,alpha,m,numIterations): xTran = np.transpose(x) for i in range(numIterations): hypothesis = np.dot(x,theta) loss = hypothesis-y cost = np.sum(loss**2)/(2*m) gradient=np.dot(xTran,loss)/m theta = theta-alpha*gradient print ("Iteration %d | cost :%f" %(i,cost)) return theta x,y = genData(100, 25, 10) print "x:" print x print "y:" print y m,n = np.shape(x) n_y = np.shape(y) print("m:"+str(m)+" n:"+str(n)+" n_y:"+str(n_y)) numIterations = 1000 alpha = 0.0005 theta = np.ones(n) theta= gradientDescent(x, y, theta, alpha, m, numIterations) print(theta)
相关度(皮尔森相关系数)衡量两个值线性相关强度的量
R平方值 反应因变量的全部变异能通过回归关系被自变量解释的比例