回忆
In [35]:
import numpy as np
#scipy.special for the sigmoid function expit()
import scipy.special
绘制数组
In [36]:
import numpy as np
import matplotlib.pyplot as plt
a = np.zeros([3,2])
a[0,1]=2
plt.imshow(a,interpolation="nearest")
# plt.imshow(a)
Out[36]:
<matplotlib.image.AxesImage at 0x195185612e0>

使用python制作神经网络¶
1.网络框架:¶
- 初始化函数:设定输入层节点、隐藏层节点、输出层节点数量
- 训练:学习给定训练集样本后,优化权重
- 查询:给定输入,从输出节点给定答案
In [37]:
#代码框架(神经网络框架)
class neuralNetwork:
#initialise(初始化) the neural network
def __init__():
pass
#train the neural network
def train():
pass
#query(询问) the neural network
def query():
pass
2.初始化网络¶
- 设定输入层节点、隐藏层节点、输出层节点数量
- 设定节点数量参数,该类参数定义了神经网络的形状与尺寸
- 设置学习率
In [38]:
#initialise(初始化) the neural network
def __init__(self,inputnodes,hiddennodes,outpotnodes,learningrate):
#set number of nodes(节点) in each input,hidden,output layer
self.inodes = inputnodes
self.hnodes = hiddennodes
self.onodes = outputnodes
#learning rate
self.lr = learningrate
pass
整合
In [39]:
#代码框架(神经网络框架)
class neuralNetwork:
#initialise(初始化) the neural network
def __init__(self,inputnodes,hiddennodes,outputnodes,learningrate):
#set number of nodes(节点) in each input,hidden,output layer
self.inodes = inputnodes
self.hnodes = hiddennodes
self.onodes = outputnodes
#learning rate
self.lr = learningrate
pass
#train the neural network
def train():
pass
#query(询问) the neural network
def query():
pass
In [40]:
#此处为参数例子:创建一个每层3个节点、学习率为0.5的小型神经网络对象
#number of input,hidden and output nodes
input_nodes = 3
hidden_nodes = 3
output_nodes = 3
#learning rate is 0.5
learning_rate = 0.5
#create instance(实例) of neural network:
n= neuralNetwork(input_nodes,hidden_nodes,output_nodes,learning_rate)
#该实例还没什么用,因为函数还没设定
3.权重——网络的核心¶
- 创建网络的节点和链接,链接权重。网络使用权重来计算前向传播与反向传播,进而改进权重
- 输入层与隐藏层之间的链接权重矩阵:
- 隐藏层与输出层之间的链接权重矩阵:
- 输入层与隐藏层之间的链接权重矩阵:
In [41]:
import numpy as np
# np.random.rand(rows,colimns)
# np.random.rand(3,3)#生成0~1之间的数,组成3*3的矩阵
np.random.rand(3,3)-0.5#生成-0.5~0.5之间的数,组成3*3的矩阵
#可用作初始化权重矩阵(均匀分布):
#link weight matrices,wih and who
#weights inside the arrays ate w_i_j, where link is from node i to node j in the next layer
#w11 w21
#w12 w22 etc
#此处注释掉
# self.wih = (np.random.rand(self.hnodes,self.inodes)-0.5)
# self.who = (np.random.rand(self.onodes,self.inodes)-0.5)
Out[41]:
array([[-0.12651293, 0.11044255, 0.35225612], [-0.37125033, 0.09278935, 0.05095061], [-0.14624907, 0.17131378, 0.39858423]])
In [42]:
#另一种初始化权重的方法(标准正态分布):
#pow(self.下一层节点,-0.5)-->开方
# self.wih = np.random.normal(0.0,pow(self.hnodes,-0.5),(self.hnodes,self.inodes))
# self.who = np.random.normal(0.0,pow(self.onodes,-0.5),(self.onodes,self.hnodes))
整合
In [43]:
#代码框架(神经网络框架)
class neuralNetwork:
#initialise(初始化) the neural network
def __init__(self,inputnodes,hiddennodes,outputnodes,learningrate):
#set number of nodes(节点) in each input,hidden,output layer
self.inodes = inputnodes
self.hnodes = hiddennodes
self.onodes = outputnodes
# link weight matrices , wih and who
# weights inside the arrays are w_i_j, where link is from node i to node j in the next layer
#w11 w21
#w12 w22 etc
self.wih = (np.random.rand(self.hnodes,self.inodes)-0.5)
self.who = (np.random.rand(self.onodes,self.inodes)-0.5)
#learning rate
self.lr = learningrate
pass
#train the neural network
def train():
pass
#query(询问) the neural network
def query():
pass
4.查询网络¶
-
编写query()函数:接受神经网络的输入,返回网络的输出
- 信号输入->输入层节点->隐藏层节点->输出层节点->信号输出(注意不是训练哦!)
-
链接权重,如: $$
X_{hidden} = W_{input\_hidden} \cdot I_{input}
$$
#使用numpy实现 hidden_inputs = np.dot(self.wih,inputs)
-
激活函数,如sigmoid:
scipy库里有sigmoid函数,称为expit()
须在初始化时就指定激活函数的种类:#scipy.special for the sigmoid function expit() import scipy.special # activation(激活) fun is the sigmoid function #激活函数命名为了self.activation_function() self.activation_function = lambda x : scipy.special.expit(x) #匿名函数,放到__init__中 #应用到准备进入隐藏层节点的信号 #calculate(计算) the signals(信号) emerging(出现) from hidden layer hidden_outputs =self.activation_function(hidden_inputs)
-
-
整合查询网络部分的代码
#整合:输入层到隐藏层 # calculate signals into hidden layer hidden_inputs = np.dot(self.wih,inputs) # calculate the signals emerging from hidden layer hidden_outputs =self.activation_function(hidden_inputs) #整合:隐藏层到输出层,最终输出 # calculate signals into final output layer final_inputs = np.dot(self.who,hidden_outputs) # calculate the signals emerging from final output layer final_outputs =self.activation_function(final_inputs)
整合
In [44]:
#代码框架(神经网络框架)
class neuralNetwork:
#initialise(初始化) the neural network
def __init__(self,inputnodes,hiddennodes,outputnodes,learningrate):
#set number of nodes(节点) in each input,hidden,output layer
self.inodes = inputnodes
self.hnodes = hiddennodes
self.onodes = outputnodes
# link weight matrices , wih and who
# weights inside the arrays are w_i_j, where link is from node i to node j in the next layer
#w11 w21
#w12 w22 etc
self.wih = (np.random.rand(self.hnodes,self.inodes)-0.5)
self.who = (np.random.rand(self.onodes,self.inodes)-0.5)
#learning rate
self.lr = learningrate
# activation(激活) fun is the sigmoid function
#激活函数命名为了self.activation_function()
self.activation_function = lambda x : scipy.special.expit(x) #匿名函数,放到__init__中
pass
#train the neural network
def train():
pass
#query(询问) the neural network
def query(self,inputs_list):
# convert(转换) inputs list to 2d array
inputs = np.array(inputs_list,ndmin=2).T
#输入层到隐藏层
# calculate signals into hidden layer
hidden_inputs = np.dot(self.wih,inputs)
# calculate the signals emerging from hidden layer
hidden_outputs =self.activation_function(hidden_inputs)
#隐藏层到输出层,最终输出
# calculate signals into final output layer
final_inputs = np.dot(self.who,hidden_outputs)
# calculate the signals emerging from final output layer
final_outputs =self.activation_function(final_inputs)
return final_outputs
In [45]:
#测试
#此处为参数例子:创建一个每层3个节点、学习率为0.5的小型神经网络对象
#number of input,hidden and output nodes
input_nodes = 3
hidden_nodes = 3
output_nodes = 3
#learning rate is 0.3
learning_rate = 0.3
#create instance(实例) of neural network:
n= neuralNetwork(input_nodes,hidden_nodes,output_nodes,learning_rate)
#该实例还没什么用,因为训练函数还没设定
In [46]:
n.query([1.0,0.5,-1.5])
#该询问函数还没什么用,因为网络还没训练,此时输出的是初始随机矩阵处理的结果
Out[46]:
array([[0.44087252], [0.45725042], [0.4933395 ]])
5.训练网络¶
- 与query()函数一样:输入样本,得到输出
- 将计算所得输出与所需输出对比,使用插值来指导网络
第一部分:
In [47]:
#模仿query()函数,得到第一部分
#train the neural network
def train(self , inputs_list , targets_list):
# convert(转换) inputs list to 2d array
inputs = np.array(inputs_list,ndmin=2).T#将列表转为numpy数组
#导入训练集的y真实值
targets = np.array(targets_list,ndim=2).T#将列表转为numpy数组
#输入层到隐藏层
# calculate signals into hidden layer
hidden_inputs = np.dot(self.wih,inputs)
# calculate the signals emerging from hidden layer
hidden_outputs =self.activation_function(hidden_inputs)
#隐藏层到输出层,最终输出
# calculate signals into final output layer
final_inputs = np.dot(self.who,hidden_outputs)
# calculate the signals emerging from final output layer
final_outputs =self.activation_function(final_inputs)
pass
第二部分:求误差,
#output layer error is the (target - actual)
output_errors = targets - final_outputs
每个隐藏层节点重组误差:
# hidden layer error is the output_errors, split by weights,recombind at hidden nodes
hidden_errors = np.dot(self.who.T,output_errors)
更新节点j与下一层节点k之间的链接权重的矩阵表达式:
# update the weights for the links between the hidden and output layers
self.who += self.lr * np.dot((output_errors * final_outputs * (1.0 - final_outputs)) , np.transpose(hidden_outputs))
#同理
# update the weights for the links between the input and hidden layers
self.wih += self.lr * np.dot((hidden_errors * hidden_outputs * (1.0 - hidden_outputs)) , np.transpose(inputs))
整合:神经网络完全体
In [48]:
#代码框架(神经网络框架)
class neuralNetwork:
#initialise(初始化) the neural network
def __init__(self,inputnodes,hiddennodes,outputnodes,learningrate):
#set number of nodes(节点) in each input,hidden,output layer
self.inodes = inputnodes
self.hnodes = hiddennodes
self.onodes = outputnodes
# link weight matrices , wih and who
# weights inside the arrays are w_i_j, where link is from node i to node j in the next layer
#w11 w21
#w12 w22 etc
self.wih = (np.random.rand(self.hnodes,self.inodes)-0.5)
self.who = (np.random.rand(self.onodes,self.inodes)-0.5)
#learning rate
self.lr = learningrate
# activation(激活) fun is the sigmoid function
#激活函数命名为了self.activation_function()
self.activation_function = lambda x : scipy.special.expit(x) #匿名函数,放到__init__中
pass
#train the neural network
def train(self , inputs_list , targets_list):
# convert(转换) inputs list to 2d array
inputs = np.array(inputs_list,ndmin=2).T#将列表转为numpy数组
#导入训练集的y真实值
targets = np.array(targets_list,ndim=2).T#将列表转为numpy数组
#输入层到隐藏层
# calculate signals into hidden layer
hidden_inputs = np.dot(self.wih,inputs)
# calculate the signals emerging from hidden layer
hidden_outputs =self.activation_function(hidden_inputs)
#隐藏层到输出层,最终输出
# calculate signals into final output layer
final_inputs = np.dot(self.who,hidden_outputs)
# calculate the signals emerging from final output layer
final_outputs =self.activation_function(final_inputs)
#output layer error is the (target - actual)
output_errors = targets - final_outputs
# hidden layer error is the output_errors, split by weights,recombind at hidden nodes
hidden_errors = np.dot(self.who.T,output_errors)
# update the weights for the links between the hidden and output layers
self.who += self.lr * np.dot((output_errors * final_outputs * (1.0 - final_outputs)) , np.transpose(hidden_outputs))
#同理
# update the weights for the links between the input and hidden layers
self.wih += self.lr * np.dot((hidden_errors * hidden_outputs * (1.0 - hidden_outputs)) , np.transpose(inputs))
pass
#query(询问) the neural network
def query(self,inputs_list):
# convert(转换) inputs list to 2d array
inputs = np.array(inputs_list,ndmin=2).T
#输入层到隐藏层
# calculate signals into hidden layer
hidden_inputs = np.dot(self.wih,inputs)
# calculate the signals emerging from hidden layer
hidden_outputs =self.activation_function(hidden_inputs)
#隐藏层到输出层,最终输出
# calculate signals into final output layer
final_inputs = np.dot(self.who,hidden_outputs)
# calculate the signals emerging from final output layer
final_outputs =self.activation_function(final_inputs)
return final_outputs
【推荐】国内首个AI IDE,深度理解中文开发场景,立即下载体验Trae
【推荐】编程新体验,更懂你的AI,立即体验豆包MarsCode编程助手
【推荐】抖音旗下AI助手豆包,你的智能百科全书,全免费不限次数
【推荐】轻量又高性能的 SSH 工具 IShell:AI 加持,快人一步
· 阿里最新开源QwQ-32B,效果媲美deepseek-r1满血版,部署成本又又又降低了!
· 单线程的Redis速度为什么快?
· SQL Server 2025 AI相关能力初探
· AI编程工具终极对决:字节Trae VS Cursor,谁才是开发者新宠?
· 展开说说关于C#中ORM框架的用法!