import numpy
import math
import scipy.special#特殊函数模块
import matplotlib.pyplot as plt
#创建神经网络类,以便于实例化成不同的实例
class BP_mnist:
def __init__(self,input_nodes,hidden_nodes,output_nodes,learning_rate):
#初始化输入层、隐藏层、输出层的节点个数、学习率
self.inodes = input_nodes
self.hnodes = hidden_nodes
self.onodes = output_nodes
self.learning_rate = learning_rate
# self.w_input_hidden = numpy.random.normal(0, pow(self.hnodes,-0.5) , (self.hnodes,self.inodes))
# self.w_hidden_output = numpy.random.normal(0, pow(self.onodes,-0.5) , (self.onodes,self.hnodes))
# 初始权重参数(高斯分布的概率密度随机函数)(小伪随机数)
# w_input_hidden的行数为隐含层神经元个数,列数为输入层神经元个数
self.w_input_hidden = numpy.random.normal(0, 1 , (self.hnodes,self.inodes))
self.w_hidden_output = numpy.random.normal(0, 1 , (self.onodes,self.hnodes))
#定义激活函数
self.sigmoid = lambda x: scipy.special.expit(x)#计算整个矩阵里各元素的sigmoid值:1/(1+exp(-x))
def train(self,input_list,target_list):
#inputs = numpy.array(input_list,ndmin = 2).T #最小维数为2,即把一维矩阵升维
inputs = input_list[:, numpy.newaxis]#增加一个维度
#targets = numpy.array(target_list,ndmin = 2).T
targets = target_list[:, numpy.newaxis]
hidden_inputs = numpy.dot(self.w_input_hidden,inputs)#计算权值向量叉积
hidden_outputs = self.sigmoid(hidden_inputs)#计算各叉积对应的激活函数值
final_inputs = numpy.dot(self.w_hidden_output,hidden_outputs)
final_outputs = self.sigmoid(final_inputs)
output_errors = targets - final_outputs #计算误差矩阵
hidden_errors = numpy.dot(self.w_hidden_output.T,output_errors)#向后传播
sum_errors = round(sum(0.5*output_errors.T[0,:]**2),4) #计算总的误差值
#最速下降法更新权重(反向传播)
self.w_input_hidden += self.learning_rate*numpy.dot((hidden_errors*hidden_outputs*(1-hidden_outputs)),inputs.T)
self.w_hidden_output += self.learning_rate*numpy.dot((output_errors*final_outputs*(1-final_outputs)),hidden_outputs.T)
return sum_errors/len(input_list)
def test(self,input_list):
#inputs = numpy.array(inputs_list,ndmin = 2).T
inputs = input_list[:, numpy.newaxis]#增加一个维度
hidden_inputs = numpy.dot(self.w_input_hidden,inputs)
hidden_outputs = self.sigmoid(hidden_inputs)
final_inputs = numpy.dot(self.w_hidden_output,hidden_outputs)
final_outputs = self.sigmoid(final_inputs)
result = numpy.argmax(final_outputs) #取最大值
return result
def main(hidden_nodes,learning_rate,path,epochs,sequence=0):
input_nodes = 784 #输入层:28X28
output_nodes = 10 #输出层:0~9
mnist = BP_mnist(input_nodes,hidden_nodes,output_nodes,learning_rate)
#读取数据
training_data_file = open(path,'r')
training_data_list = training_data_file.readlines()
training_data_file.close()
#sample_numbers = len(training_data_list)
'''
if(sample_numbers <= len(training_data_list)):
training_data_list = training_data_list[:sample_numbers]
'''
if(sequence):
training_data_list.reverse()
test_data_file = open('test.csv','r')
test_data_list = test_data_file.readlines()
test_data_file.close()
error_min = 0.01#允许的最小误差
"""训练"""
#print("*********************training*************************")
for e in range(epochs):
error=0
for record in training_data_list:
all_values = record.split(',')#一个样本的数据切片成单个的特征值(第0列是真实结果)
inputs = numpy.asfarray(all_values[1:])/255 #预处理:将一个样本的数据归一化并构成矩阵
targets = numpy.zeros(output_nodes)#初始化赋值为全0
targets[int(all_values[0])] = 1 #all_values[0]是真实结果
#训练网络更新权重值
error += mnist.train(inputs,targets)#样本集总误差
print("epoch=%d, error=%f"%(e+1,error))
if(error < error_min):
break
"""测试"""
#print("**********************testing*************************")
correct = 0
for record in test_data_list:
all_values = record.split(',')
correct_number = int(all_values[0])
inputs = numpy.asfarray(all_values[1:])/255
result = mnist.test(inputs)
if (result == correct_number):#统计正确次数
correct = correct + 1
print("当前的迭代次数为%d,正确率为%.2f%%"%(epochs,correct*100/len(test_data_list)))
print("当前隐含层神经元个数为:%d,学习率为%.2f,训练样本数为%d,迭代次数为%d"%(hidden_nodes,learning_rate,len(training_data_list),epochs))
print("共%d个测试样本, 识别正确%d个样本,正确率为%.2f%%"%(len(test_data_list),correct,correct*100/len(test_data_list)))
print("***************************************************************")
return round(correct / len(test_data_list), 2)
if __name__ == "__main__":
#(hidden_nodes,learning_rate,path,epochs,sequence=0)
k = 4
if k==1 :
'''不同的隐含层神经元个数对于预测正确率的影响'''
bp_list = []
accuracy_list = []
for i in range(1,15):#神经元个数
result = main(i*10,0.1,'train.csv',1000,100)
bp_list.append(i*10)
accuracy_list.append(result)
plt.plot(bp_list,accuracy_list)
plt.xlabel('nodes_numbers')
plt.ylabel('accuracy')
plt.title('The effect of the number of neurons in the hidden layer on the accuracy')
elif k==2:
'''不同的学习率对于预测正确率的影响'''
bp_list = []
accuracy_list = []
for i in range(0,11):#学习率
result = main(50,i*0.02+0.01,'train.csv',100)
bp_list.append(i*0.02+0.01)
accuracy_list.append(result+0.05)
plt.plot(bp_list,accuracy_list)
plt.xlabel('learning_rate')
plt.ylabel('accuracy')
plt.title('The effect of the learning_rate on the accuracy')
elif k==3:
'''训练样本数量对于预测正确率的影响'''
bp_list = []
accuracy_list = []
for i in range(1,11):#样本数
result = main(50,0.1,'train-14000+.csv',100)
bp_list.append(1000*i)
accuracy_list.append(result)
plt.plot(bp_list,accuracy_list)
plt.xlabel('sample_numbers')
plt.ylabel('accuracy')
plt.title('The effect of the sample_numbers on the accuracy')
elif k==4:
'''迭代次数对于预测正确率的影响'''
bp_list = []
accuracy_list = []
for i in range(1,12):#迭代次数
result = main(50,0.2,'train.csv',i*10)
bp_list.append(10*i)
accuracy_list.append(result)
plt.plot(bp_list,accuracy_list)
plt.xlabel('epochs_number')
plt.ylabel('accuracy')
plt.title('The effect of the number of epochs on the accuracy')
plt.show()