caffe 源码随便记录
一些打印代码:
caffe.proto里面关于LayerParameter的描述
message LayerParameter {
optional string name = 1; // the layer name
optional string type = 2; // the layer type
repeated string bottom = 3; // the name of each bottom blob
repeated string top = 4; // the name of each top blob
// The train / test phase for computation.
optional Phase phase = 10;
// The amount of weight to assign each top blob in the objective.
// Each layer assigns a default value, usually of either 0 or 1,
// to each top blob.
repeated float loss_weight = 5;
...
...
...
repeated就是相当于vector,所以访问:
vector<shared_ptr<Layer<Dtype> > > layers_;
std::string layer_name_my = layers_[i]->layer_param().name();
std::string layer_type_my = layers_[i]->layer_param().type();
std::cout<<i<<"\n layer_name="<<layer_name_my<<" type="<<layer_type_my<<std::endl;
int bottom_name_size = layers_[i]->layer_param().bottom().size();
if(bottom_name_size>0)
{
std::string name = layers_[i]->layer_param().bottom(0);
int nnn=0;
int a;
}
int top_name_size = layers_[i]->layer_param().top().size();
if(top_name_size>0)
{
std::string name_top = layers_[i]->layer_param().top(0);
int nnn=0;
int a;
}
打印blob数值
template <typename Dtype>
void prit_blob(Blob<Dtype>* blob_tmp)
{
std::cout<<"-------------------------start print blob---------------------------"<<std::endl;
std::cout<<"bottom shape="<<blob_tmp->shape_string()<<std::endl;
Dtype * p = blob_tmp->mutable_cpu_data();
for(int i=0;i<blob_tmp->count();i++)
{
std::cout<<p[i]<<std::endl;
}
std::cout<<"-------------------------end print blob---------------------------"<<std::endl;
}
net forward 添加打印,查看层与层之间的连接
template <typename Dtype>
void prit_blob(Blob<Dtype>* blob_tmp)
{
std::cout<<"-------------------------start print blob---------------------------"<<std::endl;
std::cout<<"bottom shape="<<blob_tmp->shape_string()<<std::endl;
Dtype * p = blob_tmp->mutable_cpu_data();
for(int i=0;i<blob_tmp->count();i++)
{
std::cout<<p[i]<<std::endl;
}
std::cout<<"-------------------------end print blob---------------------------"<<std::endl;
}
template <typename Dtype>
Dtype Net<Dtype>::ForwardFromTo(int start, int end) {
CHECK_GE(start, 0);
CHECK_LT(end, layers_.size());
Dtype loss = 0;
std::cout<<std::endl;
std::cout<<"-------------------------start net---------------------------"<<std::endl;
for (int i = start; i <= end; ++i) {
for (int c = 0; c < before_forward_.size(); ++c) {
before_forward_[c]->run(i);
}
///////////////////////////////////////////////////////////////////////////////////////////////////
std::string layer_name_my = layers_[i]->layer_param().name();
std::string layer_type_my = layers_[i]->layer_param().type();
std::cout<<"\n-----------------------------------------------------------"<<std::endl;
std::cout<<i<<"==== layer_name="<<layer_name_my<<" type="<<layer_type_my<<std::endl;
int bottom_name_size = layers_[i]->layer_param().bottom().size();
if(bottom_name_size>0)
{
for(int ii=0;ii<bottom_name_size;ii++)
{
std::cout<<ii<<" ::bottom name="<<layers_[i]->layer_param().bottom(ii)<<std::endl;
Blob<Dtype>* ptr_blob = bottom_vecs_[i][ii];
std::cout<<"bottom shape="<<ptr_blob->shape_string()<<std::endl;
}
} else{
std::cout<<"no bottom"<<std::endl;
}
// std::cout<<"bottom_size="<<bottom_vecs_[i].size()<<std::endl;
// for(int j=0;j<bottom_vecs_[i].size();j++)
// {
// Blob<Dtype>* ptr_blob = bottom_vecs_[i][j];
// std::cout<<"bottom shape="<<ptr_blob->shape_string()<<std::endl;
// }
////////////////////////////////////////////////////////////////////////////////////////////////////
// if("lstm1x_r2_"==layer_name_my && "Input"==layer_type_my)
// {
//
// for(int j=0;j<top_vecs_[i].size();j++)
// {
// Blob<Dtype>* ptr_blob = top_vecs_[i][j];
// std::cout<<"--- input top shape="<<ptr_blob->shape_string()<<std::endl;
//
// if(top_vecs_[i][j]->count()<327680-1)
// {
// prit_blob(ptr_blob);
// }
// }
// }
Dtype layer_loss = layers_[i]->Forward(bottom_vecs_[i], top_vecs_[i]);
// if("lstm1x_r2_"==layer_name_my && "Input"==layer_type_my)
// {
//
// for(int j=0;j<top_vecs_[i].size();j++)
// {
// Blob<Dtype>* ptr_blob = top_vecs_[i][j];
// std::cout<<"--- input top shape="<<ptr_blob->shape_string()<<std::endl;
//
// if(top_vecs_[i][j]->count()<327680-1)
// {
// prit_blob(ptr_blob);
// }
// }
// }
int top_name_size = layers_[i]->layer_param().top().size();
if(top_name_size>0)
{
for(int ii=0;ii<top_name_size;ii++)
{
std::cout<<ii<<" ::top name="<<layers_[i]->layer_param().top(ii)<<std::endl;
Blob<Dtype>* ptr_blob = top_vecs_[i][ii];
std::cout<<"top shape="<<ptr_blob->shape_string()<<std::endl;
}
} else{
std::cout<<"no top"<<std::endl;
}
// std::cout<<"top_size="<<top_vecs_[i].size()<<std::endl;
// for(int j=0;j<top_vecs_[i].size();j++)
// {
// Blob<Dtype>* ptr_blob = top_vecs_[i][j];
// std::cout<<"top shape="<<ptr_blob->shape_string()<<std::endl;
// }
loss += layer_loss;
if (debug_info_) { ForwardDebugInfo(i); }
for (int c = 0; c < after_forward_.size(); ++c) {
after_forward_[c]->run(i);
}
}
std::cout<<"-------------------------end net---------------------------"<<std::endl;
return loss;
}
打印的如下:
-----------------------------------------------------------
-----------------------------------------------------------
430==== layer_name=lstm1x_r2_h_conted_69 type=Scale
0 ::bottom name=h_69_lstm1x_r2_unit_69_1_split_0
bottom shape=1 1 100 (100)
1 ::bottom name=cont_70_lstm1x_r2_cont_slice_69_split_0
bottom shape=1 1 (1)
0 ::top name=h_conted_69
top shape=1 1 100 (100)
-----------------------------------------------------------
431==== layer_name=lstm1x_r2_transform_70 type=InnerProduct
0 ::bottom name=h_conted_69
bottom shape=1 1 100 (100)
0 ::top name=W_hc_h_69
top shape=1 1 400 (400)
-----------------------------------------------------------
432==== layer_name=lstm1x_r2_gate_input_70 type=Eltwise
0 ::bottom name=W_hc_h_69
bottom shape=1 1 400 (400)
1 ::bottom name=W_xc_x_70
bottom shape=1 1 400 (400)
0 ::top name=gate_input_70
top shape=1 1 400 (400)
-----------------------------------------------------------
433==== layer_name=lstm1x_r2_unit_70 type=LSTMUnit
0 ::bottom name=c_69
bottom shape=1 1 100 (100)
1 ::bottom name=gate_input_70
bottom shape=1 1 400 (400)
2 ::bottom name=cont_70_lstm1x_r2_cont_slice_69_split_1
bottom shape=1 1 (1)
0 ::top name=c_70
top shape=1 1 100 (100)
1 ::top name=h_70
top shape=1 1 100 (100)
-----------------------------------------------------------
434==== layer_name=h_70_lstm1x_r2_unit_70_1_split type=Split
0 ::bottom name=h_70
bottom shape=1 1 100 (100)
0 ::top name=h_70_lstm1x_r2_unit_70_1_split_0
top shape=1 1 100 (100)
1 ::top name=h_70_lstm1x_r2_unit_70_1_split_1
top shape=1 1 100 (100)
-----------------------------------------------------------
这里主要是为了看LSTM算子的层与层之间的关系的。因为prototxt就显示一个LSTM,其实这个算子里面包含了太多了。。
好记性不如烂键盘---点滴、积累、进步!