神经网络
import math
import numpy as np
import pandas as pd
from pandas import DataFrame,Series
def sigmoid(x): #映射函数
return 1/(1+math.exp(-x))
x1=[0.29,0.50,0.00,0.21,0.10,0.06,0.13,0.24,0.28]
x2=[0.23,0.62,0.53,0.53,0.33,0.15,0.03,0.23,0.03]
y=[0.14,0.64,0.28,0.33,0.12,0.03,0.02,0.11,0.08]
yita=0.1
for i in range(9):
Net_in =DataFrame(0.6,index=['input1','input2','theata'],columns=['a'])
Out_in = DataFrame(0,index=['input1','input2','input3','input4','theata'],columns=['a'])
Net_in.loc['input1'] =x1[i]
Net_in.loc['input2']=x2[i]
real=y[i]
Net_in.loc['theata'] = -1
Out_in.loc['theata'] = -1
W_mid=DataFrame(0.7,index=['input1','input2','theata'],columns=['mid1','mid2','mid3','mid4'])
W_out=DataFrame(0.7,index=['input1','input2','input3','input4','theata'],columns=['a'])
W_mid_delta=DataFrame(0,index=['input1','input2','theata'],columns=['mid1','mid2','mid3','mid4'])
W_out_delta=DataFrame(0,index=['input1','input2','input3','input4','theata'],columns=['a'])
for i in range(0,4):
Out_in.iloc[i,0] = sigmoid(sum(W_mid.iloc[:,i]*Net_in.iloc[:,0]))#输出层的输出/网络输出
res = sigmoid(sum(Out_in.iloc[:,0]*W_out.iloc[:,0]))
error = abs(res-real)
W_out_delta.iloc[:,0] = yita*res*(1-res)*(real-res)*Out_in.iloc[:,0]
W_out_delta.iloc[4,0] = -(yita*res*(1-res)*(real-res))
W_out = W_out + W_out_delta #输出层权值更新
for i in range(0,4):
W_mid_delta.iloc[:,i] = yita*Out_in.iloc[i,0]*(1-Out_in.iloc[i,0])*W_out.iloc[i,0]*res*(1-res)*(real-res)*Net_in.iloc[:,0]
W_mid_delta.iloc[2,i] = -(yita*Out_in.iloc[i,0]*(1-Out_in.iloc[i,0])*W_out.iloc[i,0]*res*(1-res)*(real-res))
W_mid = W_mid + W_mid_delta #中间层权值更新
testx1=[0.38,0.29]
testx2=[0.49,0.47]
for i in range(2):
Net_in =DataFrame(0.6,index=['input1','input2','theata'],columns=['a'])
Out_in = DataFrame(0,index=['input1','input2','input3','input4','theata'],columns=['a'])
Net_in.loc['input1'] =testx1[i]
Net_in.loc['input2']=testx2[i]
Net_in.loc['theata'] = -1
Out_in.loc['theata'] = -1
for i in range(0,4):
Out_in.iloc[i,0] = sigmoid(sum(W_mid.iloc[:,i]*Net_in.iloc[:,0]))#输出层的输出/网络输出
res = sigmoid(sum(Out_in.iloc[:,0]*W_out.iloc[:,0]))
print(res)
import numpy
import scipy.special
import scipy.misc
import matplotlib.pyplot
import scipy.ndimage
import math
import pandas as pd
from pandas import DataFrame,Series
#神经网络类定义
class NeuralNetwork():
def __init__(self,inputnodes,hiddennodes,outputnodes,learningrate):
self.inodes = inputnodes
self.hnodes = hiddennodes
self.onodes = outputnodes
self.lr = learningrate
self.wih = numpy.random.normal(0.0,pow(self.hnodes,-0.5),(self.hnodes,self.inodes))
self.who = numpy.random.normal(0.0, pow(self.onodes, -0.5), (self.onodes, self.hnodes))
self.activation_function = lambda x: scipy.special.expit(x)
pass
def train(self,input_list,target_list): #训练神经网络
inputs=numpy.array(input_list,ndmin=2).T #转换输入/输出列表到二维数组
targets = numpy.array(target_list,ndmin=2).T
hidden_inputs =numpy.dot(self.wih,inputs) #计算到隐藏层的信号
hidden_outputs=self.activation_function(hidden_inputs)#计算隐藏层输出的信号
final_inputs=numpy.dot(self.who,hidden_outputs) #计算到输出层的信号
final_outputs = self.activation_function(final_inputs)
output_errors =targets-final_outputs
hidden_errors =numpy.dot(self.who.T,output_errors)
#隐藏层和输出层权重更新
self.who += self.lr* numpy.dot((output_errors*final_outputs*(1.0-final_outputs)),numpy.transpose(hidden_outputs))
#输入层和隐藏层权重更新
self.wih+=self.lr*numpy.dot((hidden_errors*hidden_outputs*(1.0-hidden_outputs)),numpy.transpose(inputs))
pass
def query(self, input_list): #查询神经网络
inputs = numpy.array(input_list,ndmin=2).T #转换输入列表到二维数组
hidden_inputs= numpy.dot(self.wih,inputs) #计算到隐藏层的信号
hidden_outputs= self.activation_function(hidden_inputs) #计算隐藏层输出的信号
final_inputs= numpy.dot(self.who,hidden_outputs) #计算到输出层的信号
final_outputs = self.activation_function(final_inputs)
return final_outputs
print('n')
input_nodes=784 #设置每层节点个数
hidden_nodes=200
output_nodes=10
learning_rate=0.1 #设置学习率为0.1
n=NeuralNetwork(input_nodes,hidden_nodes,output_nodes,learning_rate) #创建神经网络
training_data_file=open(r'D:\人工智能\3.3 data_tr.txt') #读取训练数据集,转化为列表
training_data_list = training_data_file.readlines();
training_data_file.close()
print(training_data_list[0]) #查看第一列数据
#训练神经网络
for record in training_data_list:
all_values = record.split(',') #根据逗号,将文本数据进行拆分
#将文本字符串转化为实数,并创建这些数字的数组
inputs=(numpy.asfarray(all_values[1:])/255.0*0.99)+0.01
#创建用零填充的数组,数组的长度为output_nodes,加0.01解决了0输入造成的问题
targets = numpy.zeros(output_nodes) +0.01
targets[int(all_values[0])]=0.99 #使用目标标签,将正确的元素设置为0.99
n.train(inputs,targets)
pass
#读取测试文件
test_data_file=open(r'D:\人工智能\3.3 data_te.txt')
test_data_list = test_data_file.readlines()
#readlines()方法读取整个文件所有行,保存在一个列表(list)变量中,每行作为一个元素,但读取大文件会比较占内存
test_data_file.close()
scorecard =[] #测试数据集
total=0
correct=0
for record in test_data_list:
total +=1
all_values =record.split(',') #将数据以逗号进行拆分,保存到all_values中
correct_label =int(all_values[0]) #正确的数字
#整理格式化输入列表
#[1:]表示除了列表中的第一个元素以外的所有值
#numpy.asfarray()将文本字符串转化成实数,并创建为数组
inputs=(numpy.asfarray(all_values[1:])/255*0.99)+0.01
outputs=n.query(inputs)
label=numpy.argmax(outputs) #获取输出结果
if(label==correct_label):
scorecard.append(1)
correct+=1
else:
scorecard.append(0)
print(scorecard)
print('正确率:',(correct/total)*100,'%')
【推荐】国内首个AI IDE,深度理解中文开发场景,立即下载体验Trae
【推荐】编程新体验,更懂你的AI,立即体验豆包MarsCode编程助手
【推荐】抖音旗下AI助手豆包,你的智能百科全书,全免费不限次数
【推荐】轻量又高性能的 SSH 工具 IShell:AI 加持,快人一步
· 无需6万激活码!GitHub神秘组织3小时极速复刻Manus,手把手教你使用OpenManus搭建本
· Manus爆火,是硬核还是营销?
· 终于写完轮子一部分:tcp代理 了,记录一下
· 别再用vector<bool>了!Google高级工程师:这可能是STL最大的设计失误
· 单元测试从入门到精通