caffe python 接口api
目录
caffe 某层featuremap ,和权重
featuremap_1 = net.blobs['conv1_1'].data #某一层feature map 输出
filters = net.params['conv1_1'][0].data #某一层权重
只有deply,直接输出caffemodel
net = caffe.Net(path_deply, caffe.TEST)#net = caffe.Net(path_deply, phase=caffe.TEST)
net.save("./111.caffemodel")
保存caffe权重到pkl文件
import pickle as pkl
#feature map and shape
print("=================feature map===================")
for layer_name, blob in net.blobs.iteritems():
print layer_name + '\t' + str(blob.data.shape)
print("=================weights===================")
for layer_name, blob in net.params.iteritems():
len_ = len(blob)
print layer_name + " has " + str(len_) + " params"
for i in range(len_):
print layer_name + ' idx= ' + str(i) + '\t' + str(blob[i].data.shape)
############################################################################################3
name_weights = {}
# 保存每层的参数信息
keys = open('keys.txt', 'w')
keys.write('generated by Net-Caffe/convert_to_pkl.py\n\n')
# 遍历每一网络层
for param_name in net.params.keys():
name_weights[param_name] = {}
# 得到此层的参数
layer_params = net.params[param_name]
if len(layer_params) == 1:
# 如果参数只有一个,则说明是反卷积层,
# SfSNet整个模型里就只有反卷积层只有一组weight参数
weight = layer_params[0].data
name_weights[param_name]['weight'] = weight
print('%s:\n\t%s (weight)' % (param_name, weight.shape))
keys.write('%s:\n\t%s (weight)\n' % (param_name, weight.shape))
elif len(layer_params) == 2:
# 如果参数有两个,则说明是卷积层或者全连接层。
# 卷积层或者全连接层都有两组参数:weight和bias
# 权重参数
weight = layer_params[0].data
name_weights[param_name]['weight'] = weight
# 偏置参数
bias = layer_params[1].data
name_weights[param_name]['bias'] = bias
print('%s:\n\t%s (weight)' % (param_name, weight.shape))
print('\t%s (bias)' % str(bias.shape))
keys.write('%s:\n\t%s (weight)\n' % (param_name, weight.shape))
keys.write('\t%s (bias)\n' % str(bias.shape))
elif len(layer_params) == 5:
# 如果有三个,则说明是BatchNorm层。
# BN层共有三个参数,分别是:running_mean、running_var和一个缩放参数。
running_mean = layer_params[0].data # running_mean
name_weights[param_name]['running_mean'] = running_mean / layer_params[2].data
running_var = layer_params[1].data # running_var
name_weights[param_name]['running_var'] = running_var / layer_params[2].data
aa = layer_params[2].data
print("bn=",aa)
name_weights[param_name]['weight'] = layer_params[3].data
name_weights[param_name]['bias'] = layer_params[4].data
print('%s:\n\t%s (running_var)' % (param_name, running_var.shape),)
print('\t%s (running_mean)' % str(running_mean.shape))
keys.write('%s:\n\t%s (running_var)\n' % (param_name, running_var.shape))
keys.write('\t%s (running_mean)\n' % str(running_mean.shape))
keys.write('\t%s (weight)\n' % str(layer_params[3].data.shape))
keys.write('\t%s (bias)\n' % str(layer_params[4].data.shape))
else:
# 如果报错,大家要检查自己模型哈
raise RuntimeError("还有参数个数超过3个的层,别漏了兄dei!!!\n")
keys.close()
# 保存name_weights
with open('weights20220420.pkl', 'wb') as f:
pkl.dump(name_weights, f, protocol=2)
提取到的caffe权重pkl文件加载到pytorch中:
import torch
import torchvision
import pickle as pkl
from torch import nn
import torch.nn.functional as F
import pickle as pkl
import cv2
import numpy as np
if __name__ == '__main__':
net = net()
# net.eval()
index = 0
print("*" * 50)
for name, param in list(net.named_parameters()):
print(str(index) + ':', name, param.size())
index += 1
print("*" * 50)
for k, v in net.state_dict().items():
print(k)
print(v.shape)
# print(k,v)
print("@" * 50)
from torch import from_numpy
with open('/media/algo/data_1/project/net_my_pytorch/bk/0420all_tiqu_convet/weights20220420.pkl', 'rb') as wp:
name_weights = pkl.load(wp,encoding='latin1')#pickle py2 save and py3 read
state_dict = {}
# aaa = name_weights['Scale1']['weight']
# aa = from_numpy(name_weights['Scale1']['weight']).reshape([1,3,1,1])
# print(aa)
# state_dict['Scale1.scale'] = from_numpy(name_weights['Scale1']['weight']).reshape([1,3,1,1])
#conv1_1
state_dict['conv1_1.weight'] = from_numpy(name_weights['conv1_1']['weight'])
state_dict['conv1_1.bias'] = from_numpy(name_weights['conv1_1']['bias'])
#conv1_1_bn
state_dict['conv1_1_bn.running_var'] = from_numpy(name_weights['conv1_1/bn']['running_var'])
state_dict['conv1_1_bn.running_mean'] = from_numpy(name_weights['conv1_1/bn']['running_mean'])
state_dict['conv1_1_bn.weight'] = from_numpy(name_weights['conv1_1/bn']['weight'])
state_dict['conv1_1_bn.bias'] = from_numpy(name_weights['conv1_1/bn']['bias'])
# conv1_2
state_dict['conv1_2.weight'] = from_numpy(name_weights['conv1_2']['weight'])
state_dict['conv1_2.bias'] = from_numpy(name_weights['conv1_2']['bias'])
# conv1_2_bn
state_dict['conv1_2_bn.running_var'] = from_numpy(name_weights['conv1_2/bn']['running_var'])
state_dict['conv1_2_bn.running_mean'] = from_numpy(name_weights['conv1_2/bn']['running_mean'])
state_dict['conv1_2_bn.weight'] = from_numpy(name_weights['conv1_2/bn']['weight'])
state_dict['conv1_2_bn.bias'] = from_numpy(name_weights['conv1_2/bn']['bias'])
# conv2_1
state_dict['conv2_1.weight'] = from_numpy(name_weights['conv2_1']['weight'])
state_dict['conv2_1.bias'] = from_numpy(name_weights['conv2_1']['bias'])
# conv2_1_bn
state_dict['conv2_1_bn.running_var'] = from_numpy(name_weights['conv2_1/bn']['running_var'])
state_dict['conv2_1_bn.running_mean'] = from_numpy(name_weights['conv2_1/bn']['running_mean'])
state_dict['conv2_1_bn.weight'] = from_numpy(name_weights['conv2_1/bn']['weight'])
state_dict['conv2_1_bn.bias'] = from_numpy(name_weights['conv2_1/bn']['bias'])
# conv2_2
state_dict['conv2_2.weight'] = from_numpy(name_weights['conv2_2']['weight'])
state_dict['conv2_2.bias'] = from_numpy(name_weights['conv2_2']['bias'])
# conv2_2_bn
state_dict['conv2_2_bn.running_var'] = from_numpy(name_weights['conv2_2/bn']['running_var'])
state_dict['conv2_2_bn.running_mean'] = from_numpy(name_weights['conv2_2/bn']['running_mean'])
state_dict['conv2_2_bn.weight'] = from_numpy(name_weights['conv2_2/bn']['weight'])
state_dict['conv2_2_bn.bias'] = from_numpy(name_weights['conv2_2/bn']['bias'])
############################
# conv3_1
state_dict['conv3_1.weight'] = from_numpy(name_weights['conv3_1']['weight'])
state_dict['conv3_1.bias'] = from_numpy(name_weights['conv3_1']['bias'])
# conv3_1_bn
state_dict['conv3_1_bn.running_var'] = from_numpy(name_weights['conv3_1/bn']['running_var'])
state_dict['conv3_1_bn.running_mean'] = from_numpy(name_weights['conv3_1/bn']['running_mean'])
state_dict['conv3_1_bn.weight'] = from_numpy(name_weights['conv3_1/bn']['weight'])
state_dict['conv3_1_bn.bias'] = from_numpy(name_weights['conv3_1/bn']['bias'])
# conv3_2
state_dict['conv3_2.weight'] = from_numpy(name_weights['conv3_2']['weight'])
state_dict['conv3_2.bias'] = from_numpy(name_weights['conv3_2']['bias'])
# conv3_2_bn
state_dict['conv3_2_bn.running_var'] = from_numpy(name_weights['conv3_2/bn']['running_var'])
state_dict['conv3_2_bn.running_mean'] = from_numpy(name_weights['conv3_2/bn']['running_mean'])
state_dict['conv3_2_bn.weight'] = from_numpy(name_weights['conv3_2/bn']['weight'])
state_dict['conv3_2_bn.bias'] = from_numpy(name_weights['conv3_2/bn']['bias'])
############################
# conv4_1
state_dict['conv4_1.weight'] = from_numpy(name_weights['conv4_1']['weight'])
state_dict['conv4_1.bias'] = from_numpy(name_weights['conv4_1']['bias'])
# conv3_1_bn
state_dict['conv4_1_bn.running_var'] = from_numpy(name_weights['conv4_1/bn']['running_var'])
state_dict['conv4_1_bn.running_mean'] = from_numpy(name_weights['conv4_1/bn']['running_mean'])
state_dict['conv4_1_bn.weight'] = from_numpy(name_weights['conv4_1/bn']['weight'])
state_dict['conv4_1_bn.bias'] = from_numpy(name_weights['conv4_1/bn']['bias'])
# conv3_2
state_dict['conv4_2.weight'] = from_numpy(name_weights['conv4_2']['weight'])
state_dict['conv4_2.bias'] = from_numpy(name_weights['conv4_2']['bias'])
# conv3_2_bn
state_dict['conv4_2_bn.running_var'] = from_numpy(name_weights['conv4_2/bn']['running_var'])
state_dict['conv4_2_bn.running_mean'] = from_numpy(name_weights['conv4_2/bn']['running_mean'])
state_dict['conv4_2_bn.weight'] = from_numpy(name_weights['conv4_2/bn']['weight'])
state_dict['conv4_2_bn.bias'] = from_numpy(name_weights['conv4_2/bn']['bias'])
############################
# conv5_1
state_dict['conv5_1.weight'] = from_numpy(name_weights['conv5_1']['weight'])
state_dict['conv5_1.bias'] = from_numpy(name_weights['conv5_1']['bias'])
# conv5_1_bn
state_dict['conv5_1_bn.running_var'] = from_numpy(name_weights['conv5_1/bn']['running_var'])
state_dict['conv5_1_bn.running_mean'] = from_numpy(name_weights['conv5_1/bn']['running_mean'])
state_dict['conv5_1_bn.weight'] = from_numpy(name_weights['conv5_1/bn']['weight'])
state_dict['conv5_1_bn.bias'] = from_numpy(name_weights['conv5_1/bn']['bias'])
# conv5_2
state_dict['conv5_2.weight'] = from_numpy(name_weights['conv5_2']['weight'])
state_dict['conv5_2.bias'] = from_numpy(name_weights['conv5_2']['bias'])
# conv5_2_bn
state_dict['conv5_2_bn.running_var'] = from_numpy(name_weights['conv5_2/bn']['running_var'])
state_dict['conv5_2_bn.running_mean'] = from_numpy(name_weights['conv5_2/bn']['running_mean'])
state_dict['conv5_2_bn.weight'] = from_numpy(name_weights['conv5_2/bn']['weight'])
state_dict['conv5_2_bn.bias'] = from_numpy(name_weights['conv5_2/bn']['bias'])
############################
# conv6_1
state_dict['conv6_1.weight'] = from_numpy(name_weights['conv6_1']['weight'])
state_dict['conv6_1.bias'] = from_numpy(name_weights['conv6_1']['bias'])
# conv6_1_bn
state_dict['conv6_1_bn.running_var'] = from_numpy(name_weights['conv6_1/bn']['running_var'])
state_dict['conv6_1_bn.running_mean'] = from_numpy(name_weights['conv6_1/bn']['running_mean'])
state_dict['conv6_1_bn.weight'] = from_numpy(name_weights['conv6_1/bn']['weight'])
state_dict['conv6_1_bn.bias'] = from_numpy(name_weights['conv6_1/bn']['bias'])
# conv6_2
state_dict['conv6_2.weight'] = from_numpy(name_weights['conv6_2']['weight'])
state_dict['conv6_2.bias'] = from_numpy(name_weights['conv6_2']['bias'])
# conv6_2_bn
state_dict['conv6_2_bn.running_var'] = from_numpy(name_weights['conv6_2/bn']['running_var'])
state_dict['conv6_2_bn.running_mean'] = from_numpy(name_weights['conv6_2/bn']['running_mean'])
state_dict['conv6_2_bn.weight'] = from_numpy(name_weights['conv6_2/bn']['weight'])
state_dict['conv6_2_bn.bias'] = from_numpy(name_weights['conv6_2/bn']['bias'])
# upscore1
state_dict['upscore1.weight'] = from_numpy(name_weights['upscore1']['weight'])
############################
# conv7_1
state_dict['conv7_1.weight'] = from_numpy(name_weights['conv7_1']['weight'])
state_dict['conv7_1.bias'] = from_numpy(name_weights['conv7_1']['bias'])
# conv7_1_bn
state_dict['conv7_1_bn.running_var'] = from_numpy(name_weights['conv7_1/bn']['running_var'])
state_dict['conv7_1_bn.running_mean'] = from_numpy(name_weights['conv7_1/bn']['running_mean'])
state_dict['conv7_1_bn.weight'] = from_numpy(name_weights['conv7_1/bn']['weight'])
state_dict['conv7_1_bn.bias'] = from_numpy(name_weights['conv7_1/bn']['bias'])
# conv7_2
state_dict['conv7_2.weight'] = from_numpy(name_weights['conv7_2']['weight'])
state_dict['conv7_2.bias'] = from_numpy(name_weights['conv7_2']['bias'])
# conv7_2_bn
state_dict['conv7_2_bn.running_var'] = from_numpy(name_weights['conv7_2/bn']['running_var'])
state_dict['conv7_2_bn.running_mean'] = from_numpy(name_weights['conv7_2/bn']['running_mean'])
state_dict['conv7_2_bn.weight'] = from_numpy(name_weights['conv7_2/bn']['weight'])
state_dict['conv7_2_bn.bias'] = from_numpy(name_weights['conv7_2/bn']['bias'])
# upscore2
state_dict['upscore2.weight'] = from_numpy(name_weights['upscore2']['weight'])
############################
# conv8_1
state_dict['conv8_1.weight'] = from_numpy(name_weights['conv8_1']['weight'])
state_dict['conv8_1.bias'] = from_numpy(name_weights['conv8_1']['bias'])
# conv8_1_bn
state_dict['conv8_1_bn.running_var'] = from_numpy(name_weights['conv8_1/bn']['running_var'])
state_dict['conv8_1_bn.running_mean'] = from_numpy(name_weights['conv8_1/bn']['running_mean'])
state_dict['conv8_1_bn.weight'] = from_numpy(name_weights['conv8_1/bn']['weight'])
state_dict['conv8_1_bn.bias'] = from_numpy(name_weights['conv8_1/bn']['bias'])
# conv8_2
state_dict['conv8_2.weight'] = from_numpy(name_weights['conv8_2']['weight'])
state_dict['conv8_2.bias'] = from_numpy(name_weights['conv8_2']['bias'])
# conv8_2_bn
state_dict['conv8_2_bn.running_var'] = from_numpy(name_weights['conv8_2/bn']['running_var'])
state_dict['conv8_2_bn.running_mean'] = from_numpy(name_weights['conv8_2/bn']['running_mean'])
state_dict['conv8_2_bn.weight'] = from_numpy(name_weights['conv8_2/bn']['weight'])
state_dict['conv8_2_bn.bias'] = from_numpy(name_weights['conv8_2/bn']['bias'])
# upscore3
state_dict['upscore3.weight'] = from_numpy(name_weights['upscore3']['weight'])
############################
# conv9_1
state_dict['conv9_1.weight'] = from_numpy(name_weights['conv9_1']['weight'])
state_dict['conv9_1.bias'] = from_numpy(name_weights['conv9_1']['bias'])
# conv9_1_bn
state_dict['conv9_1_bn.running_var'] = from_numpy(name_weights['conv9_1/bn']['running_var'])
state_dict['conv9_1_bn.running_mean'] = from_numpy(name_weights['conv9_1/bn']['running_mean'])
state_dict['conv9_1_bn.weight'] = from_numpy(name_weights['conv9_1/bn']['weight'])
state_dict['conv9_1_bn.bias'] = from_numpy(name_weights['conv9_1/bn']['bias'])
# conv9_2
state_dict['conv9_2.weight'] = from_numpy(name_weights['conv9_2']['weight'])
state_dict['conv9_2.bias'] = from_numpy(name_weights['conv9_2']['bias'])
# conv9_2_bn
state_dict['conv9_2_bn.running_var'] = from_numpy(name_weights['conv9_2/bn']['running_var'])
state_dict['conv9_2_bn.running_mean'] = from_numpy(name_weights['conv9_2/bn']['running_mean'])
state_dict['conv9_2_bn.weight'] = from_numpy(name_weights['conv9_2/bn']['weight'])
state_dict['conv9_2_bn.bias'] = from_numpy(name_weights['conv9_2/bn']['bias'])
#######################3
#net_my_cc_loc
state_dict['net_my_cc_loc.weight'] = from_numpy(name_weights['net_my_cc_loc']['weight'])
state_dict['net_my_cc_loc.bias'] = from_numpy(name_weights['net_my_cc_loc']['bias'])
# net_my_cc_conf
state_dict['net_my_cc_conf.weight'] = from_numpy(name_weights['net_my_cc_conf']['weight'])
state_dict['net_my_cc_conf.bias'] = from_numpy(name_weights['net_my_cc_conf']['bias'])
# net_my_cc1_loc
state_dict['net_my_cc1_loc.weight'] = from_numpy(name_weights['net_my_cc1_loc']['weight'])
state_dict['net_my_cc1_loc.bias'] = from_numpy(name_weights['net_my_cc1_loc']['bias'])
# net_my_cc1_conf
state_dict['net_my_cc1_conf.weight'] = from_numpy(name_weights['net_my_cc1_conf']['weight'])
state_dict['net_my_cc1_conf.bias'] = from_numpy(name_weights['net_my_cc1_conf']['bias'])
#######################3
# net_my_rd_loc
state_dict['net_my_rd_loc.weight'] = from_numpy(name_weights['net_my_rd_loc']['weight'])
state_dict['net_my_rd_loc.bias'] = from_numpy(name_weights['net_my_rd_loc']['bias'])
# net_my_rd_conf
state_dict['net_my_rd_conf.weight'] = from_numpy(name_weights['net_my_rd_conf']['weight'])
state_dict['net_my_rd_conf.bias'] = from_numpy(name_weights['net_my_rd_conf']['bias'])
# net_my_rd1_loc
state_dict['net_my_rd1_loc.weight'] = from_numpy(name_weights['net_my_rd1_loc']['weight'])
state_dict['net_my_rd1_loc.bias'] = from_numpy(name_weights['net_my_rd1_loc']['bias'])
# net_my_rd1_conf
state_dict['net_my_rd1_conf.weight'] = from_numpy(name_weights['net_my_rd1_conf']['weight'])
state_dict['net_my_rd1_conf.bias'] = from_numpy(name_weights['net_my_rd1_conf']['bias'])
#######################3
# net_my_pd_loc
state_dict['net_my_pd_loc.weight'] = from_numpy(name_weights['net_my_pd_loc']['weight'])
state_dict['net_my_pd_loc.bias'] = from_numpy(name_weights['net_my_pd_loc']['bias'])
# net_my_pd_conf
state_dict['net_my_pd_conf.weight'] = from_numpy(name_weights['net_my_pd_conf']['weight'])
state_dict['net_my_pd_conf.bias'] = from_numpy(name_weights['net_my_pd_conf']['bias'])
# net_my_pd1_loc
state_dict['net_my_pd1_loc.weight'] = from_numpy(name_weights['net_my_pd1_loc']['weight'])
state_dict['net_my_pd1_loc.bias'] = from_numpy(name_weights['net_my_pd1_loc']['bias'])
# net_my_pd1_conf
state_dict['net_my_pd1_conf.weight'] = from_numpy(name_weights['net_my_pd1_conf']['weight'])
state_dict['net_my_pd1_conf.bias'] = from_numpy(name_weights['net_my_pd1_conf']['bias'])
#######################3
# net_my_vd_loc
state_dict['net_my_vd_loc.weight'] = from_numpy(name_weights['net_my_vd_loc']['weight'])
state_dict['net_my_vd_loc.bias'] = from_numpy(name_weights['net_my_vd_loc']['bias'])
# net_my_vd_conf
state_dict['net_my_vd_conf.weight'] = from_numpy(name_weights['net_my_vd_conf']['weight'])
state_dict['net_my_vd_conf.bias'] = from_numpy(name_weights['net_my_vd_conf']['bias'])
# net_my_vd1_loc
state_dict['net_my_vd1_loc.weight'] = from_numpy(name_weights['net_my_vd1_loc']['weight'])
state_dict['net_my_vd1_loc.bias'] = from_numpy(name_weights['net_my_vd1_loc']['bias'])
# net_my_vd1_conf
state_dict['net_my_vd1_conf.weight'] = from_numpy(name_weights['net_my_vd1_conf']['weight'])
state_dict['net_my_vd1_conf.bias'] = from_numpy(name_weights['net_my_vd1_conf']['bias'])
net.load_state_dict(state_dict)
net.cuda()
net.eval()
torch.save(net.state_dict(), './net_my_model_20220420.pth')
caffemodel 修改与保存
path_deply_goe = "/media/algo/data_1/everyday//20220512weight/g_deply1.4.4.prototxt"
path_model_goe = "/media/algo/data_1/everyday/20220512weight/g.caffemodel"
path_deply_lld = "/media/algo/data_1/everyday//20220512weight/l_deply.prototxt"
path_model_lld = "/media/algo/data_1/everyday//20220512weight/l-3000.caffemodel"
path_backbone_deply = "/media/algo/data_1/everyday/20220512weight/backbone_deply.prototxt"
caffe.set_mode_cpu()
caffe.set_device(0)
##39
list_backbone_layer = ["conv1_1", "conv1_1/bn", "conv1_2", "conv1_2/bn", "conv2_1", "conv2_1/bn", "conv2_2",\
"conv2_2/bn", "conv3_1", "conv3_1/bn", "conv3_2", "conv3_2/bn", "conv4_1", "conv4_1/bn",\
"conv4_2", "conv4_2/bn", "conv5_1", "conv5_1/bn", "conv5_2", "conv5_2/bn", "conv6_1",\
"conv6_1/bn", "conv6_2", "conv6_2/bn", "upscore1", "conv7_1", "conv7_1/bn", "conv7_2",\
"conv7_2/bn", "upscore2", "conv8_1", "conv8_1/bn", "conv8_2", "conv8_2/bn", "upscore3",\
"conv9_1", "conv9_1/bn", "conv9_2", "conv9_2/bn"]
net_goe = caffe.Net(path_deply_goe, path_model_goe, caffe.TEST)
net_lld = caffe.Net(path_deply_lld, path_model_lld, caffe.TEST)
net_merge = caffe.Net(path_backbone_deply, phase=caffe.TEST)
for layer_name, para in net_goe.params.items():
print("layer_name=", layer_name, " :::len=", len(para))
if layer_name in list_backbone_layer:
for i in range(len(para)):
weight_goe = net_goe.params[layer_name][i].data[...] ## weights = para[i].data[...]
weight_lld = net_lld.params[layer_name][i].data[...]
weight_merge = weight_goe * 0.4 + weight_lld * 0.6
net_merge.params[layer_name][i].data[...] = weight_merge
net_merge.save("./merge.caffemodel")
c++保存blob到本地txt
void save_data_to_txt(const string path_txt,bool b_save_shape = true)
{
std::ofstream fOut(path_txt);
if (!fOut)
{
std::cout << "Open output file faild." << std::endl;
}
if(b_save_shape)
{
for(int i=0;i<shape_.size();i++)
{
fOut << shape_[i];
if(i == shape_.size()-1)
{
fOut<<std::endl;
}else
{
fOut<<",";
}
}
}
const float* data_vec = cpu_data<float>();
for (int i = 0; i < count_; ++i) {
fOut << data_vec[i] << std::endl;
}
fOut.close();
}
统计权重占比
net.forward(**input_dict)
T = 1e-30
print("=====================================bn")
for layer_para_name, para in net.params.items():
# print("layer_para_name=",layer_para_name)
if "bn" in layer_para_name:
weights_np = abs(para[3].data) # para[3] scale
tmp = weights_np > T
ratio_zero = (1 - (tmp.sum() * 1.0 / weights_np.size))
print("layer_para_name=", layer_para_name, " ratio_zero=", ratio_zero)
print("=====================================all")
T = 1e-30
total_weight = 0
total_weight_avail = 0
for layer_para_name, para in net.params.items():
if "bn" in layer_para_name:
continue
weights_np = abs(para[0].data) # para[0]weight para[1]bias
total_weight += weights_np.size
tmp = weights_np > T
total_weight_avail += tmp.sum()
ratio_zero = (1 - (tmp.sum() * 1.0 / weights_np.size))
print("layer_para_name=", layer_para_name, " ratio_zero=", ratio_zero, " shape=", weights_np.shape)
print("ratio_conv_avail_weight=", total_weight_avail * 1.0 / total_weight, " ratio_conv_not_avail_weight=",
1 - total_weight_avail * 1.0 / total_weight)
好记性不如烂键盘---点滴、积累、进步!