Caffe的net.hpp解读
net.hpp
namespace caffe{ class Net}
public:
- 
    
Net构造与析构函数
- explicit Net( const NetParameter& param, const Net* root_net = NULL)
 - explicit Net( const string& param_file, Phase phase, const Net* root_net = NULL)
 - virtual ~Net()
 
 - 
    
void Init( const NetParameter& param) 用NetParameter初始化网络
 - 
    
const vector< Blob< Dtype>*>& ForwardPrefilled( Dtype* loss = NULL) 用分开输入的input blob进行Forward
 - 
    
按Net指定进行Forward From与Forward To
- Dtype ForwardFromTo( int start, int end) Forward, Backward的From和To操作依照网络指定的顺序进行;对于通常的无回路有向图,从一层到另外一层可能包含了不相关分支的额外计算
 - Dtype ForwardFrom( int start)
 - Dtype ForwardTo( int end)
 
 - 
    
由Forward的bottom计算得出top
- const vector< Blob< Dtype>*>& Forward( const vector< Blob< Dtype>*>& bottom, Dtype* loss = NULL) Forward: 用一系列的bottom得到最终结果
 - string Forward( const string&, input_blob_protos, Dtype* loss = NULL) Forward:用序列化BlobProtoVector,得到序列化BlobProtoVector
 
 - 
    
void ClearParamDiffs() 清除所有网络参数的diff,在Backward之前进行
 - 
    
按Net指定进行Backward From与Backward To
- void Backward() 网络backward没有输入和输出,因为其只需要计算参数对应的梯度,在forward中,数据已经提供了
 - void BackwardFromTo( int strat, int end)
 - void BackwardFrom( int start)
 - void BackwardTo( int end)
 
 - 
    
void Reshape() reshape所有layers,从bottom到up
 - 
    
void ForwardBackward( const vector< Blo< Dtype>*>& bottom)
 - 
    
void Update() 用计算的diff更新网络权值
 - 
    
void ShareWeights() 用shared blob共享权值数据,通常是Net::Init使用,不应该被手动调用
 - 
    
void ShareTrainedLayersWith( const Net* other) 对于一个已经初始化的网络,隐式从另一个训练网络复制已训练的layer
 - 
    
CopyTrainedLayersFrom() 复制layer
- void CopyTrainedLayersFrom( const NetParameter& param)
 - void CopyTrainedLayersFrom( const string trained_filename)
 - void CopyTrainedLayersFromBinaryProto( const string trained_filename)
 - void CopyTrainedLayersFromHDF5( cosnt string trained_filename)
 
 - 
    
写数据
- void ToProto( NetParameter* param, bool write_diff=false) const 将net写进proto
 - void ToHDF5( const string& filename, bool write_diff=false) const 将net写进HDF5
 
 - 
    
返回网络属性值,net, layer与blob
- inline const strin& name() const {} 返回网络name
 - inline const vector< string>& layer_names() const{} 返回layer name
 - inline const vector< string>& blob_names() const{} 返回blob name
 - inline const vector< shared_ptr< Blob< Dtype»>& blob() const{} 返回blob
 - inline const vector< shared_ptr< Layer
>>& layers() const{} 返回layer  
 - 
    
inline Phase phase() const{} 返回phase: TRAIN或TEST
 - 
    
返回每层bottom vecs()与top vecs()
- inline const vector< vector< Blob< Dtype>*»& bottom_vecs() const{} 返回每层的bottom vectors,除非每层都要检测梯度等,否则不需要这个
 - inline const vector< vector< Blob< Dtype>*»& top_vecs() const{} 返回每层的top vectors,除非每层都要检测梯度等,否则不需要这个
 
 - 
    
返回bottom, blob与layer的设置
- inline const vector< vector< bool»& bottom_need_backward() const{}
 - inline const vector< Dtype>& blob_loss_weights() const{}
 - inline const vector< bool>& layer_need_backward() const{}
 
 - 
    
返回参数parameters()
- inline const vector< shared_ptr< Blob< Dtype»>& param() const{} 返回参数
 - inline const vecotor< Blob< Dtype>*>& learnable_params() const{}
 
 - 
    
返回学习率learning rate
- inline const vector< float>& params_lr() const{} 返回learning rate
 - inline const vector< bool>& has_param_lr() const{}
 
 - 
    
返回weight decay
- inline const vector< float>& params_weight_decay() const{} 返回weight_decay参数
 - inline const vector< bool>& has_param_decay() const{}
 
 - 
    
参数索引
- inline map< string, int>& param_names_index() const{}
 - inline const vector< int>& param_owners() const{}
 
 - 
    
输入与输出blob数量
- inline num_inputs() const {}
 - inline num_outputs() const {}
 - inline const vector< Blob< Dtype>*>& input_blobs() const{}
 - inline const vector< Blob< Dtype>*>& output_blobs() const{}
 - inline const vector< int>& input_blob_indices() const{}
 - inline const vector< int>& output_blob_indices() const{}
 - bool has_blob( const string& blob_name) const
 - const shared_ptr< Blob< Dtype» blob_by_name( const string& blob_name) const
 - bool has_layer( const string& blob_name) const
 - const shared_ptr< Layer< Dtype» layer_by_name( const string& layer_name) const
 - set_debug_info( const bool value) {}
 
 - 
    
Init的提示
- static void FilterNet( const NetParameter& param, NetParameter* param_filtered) 移除用户指定的layer
 - static bool StateMeetsRule( const NetState& state, const NetStateRule& rule, const string& layer_name) 返回Netstate是否符合NetStateRule的规定
 
 
protected:
- 
    
Append() 添加到net
- void AppendTop( const NetParameter& param, const int layer_id, const int top_id, set< string>* available_blobs, map< string, int>* blob_name_to_idx) 添加新的input或top blob进网络
 - int AppendBottom( const NetParameter& param, const int layer_id, const int bottom_id, set< string>* available_blobs, map< string, int>* blob_name_to_idx) 添加新的bottom blob进网络
 - void AppendParam( const NetParam& param, const int layer_id, const int param_id) 添加新的parameter blob进网络
 
 - 
    
Debug Info() debug info相关信息
- void InputDebugInfo( const int layer_id) 显示关于input blobs中Forward的debug信息
 - void ForwardDebugInfo( const int layer_id) 显示Forward的debug信息
 - void BackwardDebugInfo( const int layer_id) 显示Backward的debug信息
 - void UpdateDebugInfo( const int param_id) 显示Update的debug信息
 
 - 
    
Net基本信息(数据成员)
- string name_ 网络name
 - Phase phase_ TRAIN或TEST
 
 - 
    
网络中个别层信息
- vector< shared_ptr< Layer< Dtype»> layers_ 网络中的单独层信息
 - vector< string> layer_names_
 - map< string, int> layer_names_index_
 - vector< bool> layer_need_backward_
 
 - 
    
layer之间存储中间结果
- vector< shared_ptr< Blob< Dtype»> blobs_
 - vector< string> blob_names_ blob储存层之间的中间结果
 - map< string, int> blob_names_index_
 - vector< bool> blob_need_backward_
 
 - 
    
bottom_vecs()
- vector< vector< Blob< Dtype>*» bottom_vecs_ bottom_vecs中的向量保存了每层的输入,实际上其不能拥有blob,因此保存指针
 - vector< vector< int» bottom_id_vecs_
 - vector< vector< bool> bottom_need_backward_
 
 - 
    
top_vecs()
- vector< vector< Blob< Dtype>*> top_vecs_ top_vecs中的向量保存每层的输出
 - vector< vector< int» top_id_vecs_
 
 - 
    
权值vector在loss function中
- vector
blob_loss_weights_ 每个网络blob的loss function的权值向量  - vector< vector< int» param_id_vecs_
 - vector< int> param_owners_
 - vector< string> param_display_names_
 - vector< pair< int, int» param_layer_indices_
 - map< string, int> param_names_index_
 
 - vector
 - 
    
net中input与output的blob索引(indices为index复数)
- vector< int> net_input_blob_indices_ 网络中输入与输出的blob指数
 - vector< int> net_output_blob_indices_
 - vector< Blob< Dtype>*> net_input_blobs_
 - vector< Blob< Dtype>*> net_output_blobs_
 
 - 
    
net中的参数parameters
- vector< shared_ptr< Blob< Dtype»> params_ network的参数
 - vector< Blob< Dtype>*> learnable_params_
 
 - 
    
params_与learnable_params_映射关系
- vector< int> learnable_param_ids 从params_到learnable_params的映射,
 - vector< float> params_lr_ learnable_params_的learning rate
 - vector< bool> has_params_lr
 - vector< float> params_weight_decay_ learnable_params_的weight decay
 - vector< bol> has_params_decay_
 
 - 
    
size_t memory_used_ net中使用了内存的位数
 - 
    
bool debug_info_ 是否计算与显示debug info
 - 
    
const Net* const root_net_ 数据并行处理,root net有shared layer