神经网络/BP(反向传播)代码示例
#include <iostream>
#include <cstdlib>
#include <cmath>
#include <vector>
using namespace std;
const int NX = 784, NB = 500, NY = 10;//输入层X,隐藏层B,输出层Y节点数
const double eta = 0.06;//学习率
struct Node {
double val{};
double bias{};
vector<double> weight;
} x[NX], b[NB], y[NY];//输入层X,隐藏层B,输出层Y
double g[NY], e[NB];//用于反向传播
double trainx[NX], trainy[NY];//训练数据
double sigmoid(double x) { return 1.0 / (1.0 + exp(-x)); }
double get_rand_weight() { return rand() % 10 / 5.0 - 1; } //生成(-1,1)随机数
double get_rand_bias() { return rand() % 10 / 500.0 - 0.01; } //生成(-0.01,0.01)随机数
//网络初始化
void init() {
for (int i = 0; i < NX; i++) {
//x[i].bias = get_rand_bias();
for (int j = 0; j < NB; j++) {
x[i].weight.push_back(get_rand_weight());
}
}
for (int i = 0; i < NB; i++) {
b[i].bias = get_rand_bias();
for (int j = 0; j < NY; j++) {
b[i].weight.push_back(get_rand_weight());
}
}
for (int i = 0; i < NY; i++) {
y[i].bias = get_rand_bias();
}
};
//前向传播
void forward() {
//首先需要清空隐藏层和输出层原有的非参数数据!!!
for (int i = 0; i < NB; i++) b[i].val = 0;
for (int i = 0; i < NY; i++) y[i].val = 0;
//输入层读取数据
for (int i = 0; i < NX; i++) x[i].val = trainx[i];
//输入层->隐藏层
for (int i = 0; i < NX; i++) {
for (int j = 0; j < NB; j++) {
b[j].val += x[i].val * x[i].weight[j];
}
}
//隐藏层求值
for (int i = 0; i < NB; i++) {
b[i].val = sigmoid(b[i].val - b[i].bias);
}
//隐藏层->输出层
for (int i = 0; i < NB; i++) {
for (int j = 0; j < NY; j++) {
y[j].val += b[i].val * b[i].weight[j];
}
}
//输出层求值
for (int i = 0; i < NY; i++) {
y[i].val = sigmoid(y[i].val - y[i].bias);
}
}
//反向传播
void back() {
//计算g和e
for (int i = 0; i < NY; i++) {
g[i] = y[i].val * (1 - y[i].val) * (trainy[i] - y[i].val);
}
for (int i = 0; i < NB; i++) {
double res = 0;
for (int j = 0; j < NY; j++) {
res += b[i].weight[j] * g[j];
}
e[i] = b[i].val * (1 - b[i].val) * res;
}
//更新w, theta, v, gamma
for (int i = 0; i < NB; i++)
for (int j = 0; j < NY; j++)
b[i].weight[j] += eta * b[i].val * g[j];
for (int i = 0; i < NY; i++)
y[i].bias -= eta * g[i];
for (int i = 0; i < NX; i++)
for (int j = 0; j < NB; j++)
x[i].weight[j] += eta * x[i].val * e[j];
for (int i = 0; i < NB; i++)
b[i].bias -= eta * e[i];
}
FILE *fImg, *fAns;
int result[1000000] = {0}; //每次训练的结果,正确为1,错误为0
void train(int Case) {
//读入一张新的图片
//除了前16字节,接下来的信息都是一张一张的图片
//每张图片大小为28*28 = 784 = NX,每个char表示该像素对应的灰度,范围为0至255
unsigned char img[NX], ans;
fread(img, 1, NX, fImg);
for (int i = 0; i < NX; i++) trainx[i] = (double)img[i] / 255.0;
//读入该图片对应的答案
//除了前8字节,第k个字节对应第k张图片的正确答案
fread(&ans,1,1,fAns);
for(int i = 0; i < NY; i++) trainy[i] = (i == ans) ? 1 : 0;
//前向传播,计算答案是否正确
forward();
int res = 0;
for (int i = 0; i <= 9; i++)
if (y[i].val > y[res].val)
res = i;
result[Case] = (res == ans) ? 1 : 0;
for (int i = 0; i < 28; i++) {
for (int j = 0; j < 28; j++) {
if (trainx[i * 28 + j] != 0) cout << 'X';
else cout << ' ';
}
cout << endl;
}
cout << "Test Case #" << Case <<", result is " << res << ", answer is " << (int)ans << endl;
//反向传播
back();
//输出最近100局的正确率
int P = 100, cnt = 0;
if(Case % P == 0) {
for(int i = 0; i < P; i++)
cnt += result[Case - i];
cout << Case << " " << cnt << endl;
}
}
int main() {
fImg=fopen("train-images.idx3-ubyte","rb");
fseek(fImg, 16, SEEK_SET);
fAns=fopen("train-labels.idx1-ubyte","rb");
fseek(fAns, 8, SEEK_SET);
freopen("result.txt", "w", stdout);
init();
for (int Case = 1; Case <= 60000; Case++) {
train(Case);
}
return 0;
}
本文来自博客园,作者:wyl123ly,转载请注明原文链接:https://www.cnblogs.com/wyl123ly/p/18710199
【推荐】国内首个AI IDE,深度理解中文开发场景,立即下载体验Trae
【推荐】编程新体验,更懂你的AI,立即体验豆包MarsCode编程助手
【推荐】抖音旗下AI助手豆包,你的智能百科全书,全免费不限次数
【推荐】轻量又高性能的 SSH 工具 IShell:AI 加持,快人一步
· 被坑几百块钱后,我竟然真的恢复了删除的微信聊天记录!
· 没有Manus邀请码?试试免邀请码的MGX或者开源的OpenManus吧
· 【自荐】一款简洁、开源的在线白板工具 Drawnix
· 园子的第一款AI主题卫衣上架——"HELLO! HOW CAN I ASSIST YOU TODAY
· Docker 太简单,K8s 太复杂?w7panel 让容器管理更轻松!