完整版见fenghaotong

参考网上的代码写的

LinearRegression.h

/*******************************************************************
*《周志华 机器学习》C++代码
*
* htfeng
* 2018.09.28
*
* 第三章:线性模型
* 定义一个线性回归的类
*******************************************************************/
#ifndef ML_LINEARREGRESSION_H//如果这个宏没有被定义
#define ML_LINEARREGRESSION_H//则定义宏


class LinearRegression {
public:
	double *x;
	double *y;
	int m;
	double *theta;
	__declspec(dllexport) LinearRegression(double x[], double y[], int m);
	__declspec(dllexport) void train(double alpha, int iterations);
	__declspec(dllexport) double predict(double x);
private:
	//计算模型损失
	__declspec(dllexport) static double compute_cost(double x[], double y[], double theta[], int m);
	//计算单个预测值
	__declspec(dllexport) static double h(double x, double theta[]);
	//预测
	__declspec(dllexport) static double *calculate_predictions(double x[], double theta[], int m);
	//梯度下降
	__declspec(dllexport) static double *gradient_descent(double x[], double y[], double alpha, int iter, double *J, int m);
};

#endif

LinearRegression.cpp

/*******************************************************************
*《周志华 机器学习》C++代码
*
* htfeng
* 2018.09.28
*
* 第三章:线性模型
* 线性回归
*******************************************************************/
#include <iostream>
#include <fstream>
#include "include/LinearRegression.h"
#include "include/Utils.h"


using namespace std;

//初始化
LinearRegression::LinearRegression(double x[], double y[], int m) {
	this->x = x;
	this->y = y;
	this->m = m;
}

//梯度下降
double *LinearRegression::gradient_descent(double x[], double y[], double alpha, int iters, double *J, int m) {
	double *theta = new double[2];
	theta[0] = 1;
	theta[1] = 1;
	for (int i = 0; i < iters; i++) {
		double *predictions = calculate_predictions(x, theta, m);
		double *diff = Utils::array_diff(predictions, y, m);
		double *error_x1 = diff;
		double *error_x2 = Utils::array_multiplication(diff, x, m);
		theta[0] = theta[0] - alpha * (1.0 / m) * Utils::array_sum(error_x1, m);
		theta[1] = theta[1] - alpha * (1.0 / m) * Utils::array_sum(error_x2, m);
		J[i] = compute_cost(x, y, theta, m);
	}
	return theta;
}

// 训练函数
void LinearRegression::train(double alpha, int iterations) {
	double *J = new double[iterations];
	this->theta = gradient_descent(x, y, alpha, iterations, J, m);
	cout << "J = ";
	for (int i = 0; i < iterations; ++i) {
		cout << J[i] << " ";
	}
	cout << endl << "Theta: " << theta[0] << " " << theta[1] << endl;
}

//预测
double LinearRegression::predict(double x){
	return h(x, theta);
}

//计算误差
double LinearRegression::compute_cost(double x[], double y[], double theta[], int m) {
	double *predictions = calculate_predictions(x, theta, m);
	double *diff = Utils::array_diff(predictions, y, m);
	double *sq_errors = Utils::array_pow(diff, m, 2);
	return (1.0 / (2 * m)) * Utils::array_sum(sq_errors, m);
}

//预测单个值
double LinearRegression::h(double x, double theta[]) {
	return theta[0] + theta[1] * x;
}

//预测
double *LinearRegression::calculate_predictions(double x[], double theta[], int m) {
	double * predictions = new double[m];
	for (int i = 0; i < m; i++) {
		predictions[i] = h(x[i], theta);
	}
	return predictions;
}


posted on 2018-09-28 16:54  一小白  阅读(1509)  评论(0编辑  收藏  举报