Point Cloud Library (PCL)  1.9.1
decision_forest_trainer.h
1 /*
2  * Software License Agreement (BSD License)
3  *
4  * Point Cloud Library (PCL) - www.pointclouds.org
5  * Copyright (c) 2010-2011, Willow Garage, Inc.
6  *
7  * All rights reserved.
8  *
9  * Redistribution and use in source and binary forms, with or without
10  * modification, are permitted provided that the following conditions
11  * are met:
12  *
13  * * Redistributions of source code must retain the above copyright
14  * notice, this list of conditions and the following disclaimer.
15  * * Redistributions in binary form must reproduce the above
16  * copyright notice, this list of conditions and the following
17  * disclaimer in the documentation and/or other materials provided
18  * with the distribution.
19  * * Neither the name of Willow Garage, Inc. nor the names of its
20  * contributors may be used to endorse or promote products derived
21  * from this software without specific prior written permission.
22  *
23  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
24  * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
25  * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
26  * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
27  * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
28  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
29  * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
30  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
31  * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
32  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
33  * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
34  * POSSIBILITY OF SUCH DAMAGE.
35  *
36  */
37 
38 #ifndef PCL_ML_DT_DECISION_FOREST_TRAINER_H_
39 #define PCL_ML_DT_DECISION_FOREST_TRAINER_H_
40 
41 #include <pcl/common/common.h>
42 
43 #include <pcl/ml/dt/decision_forest.h>
44 #include <pcl/ml/dt/decision_tree.h>
45 #include <pcl/ml/dt/decision_tree_trainer.h>
46 #include <pcl/ml/feature_handler.h>
47 #include <pcl/ml/stats_estimator.h>
48 
49 #include <vector>
50 
51 namespace pcl
52 {
53 
54  /** \brief Trainer for decision trees. */
55  template <
56  class FeatureType,
57  class DataSet,
58  class LabelType,
59  class ExampleIndex,
60  class NodeType >
61  class PCL_EXPORTS DecisionForestTrainer
62  {
63 
64  public:
65 
66  /** \brief Constructor. */
68  /** \brief Destructor. */
69  virtual
71 
72  /** \brief Sets the number of trees to train.
73  * \param[in] num_of_trees The number of trees.
74  */
75  inline void
76  setNumberOfTreesToTrain (const size_t num_of_trees)
77  {
78  num_of_trees_to_train_ = num_of_trees;
79  }
80 
81  /** \brief Sets the feature handler used to create and evaluate features.
82  * \param[in] feature_handler The feature handler.
83  */
84  inline void
86  {
87  decision_tree_trainer_.setFeatureHandler (feature_handler);
88  }
89 
90  /** \brief Sets the object for estimating the statistics for tree nodes.
91  * \param[in] stats_estimator The statistics estimator.
92  */
93  inline void
95  {
96  decision_tree_trainer_.setStatsEstimator (stats_estimator);
97  }
98 
99  /** \brief Sets the maximum depth of the learned tree.
100  * \param[in] max_tree_depth Maximum depth of the learned tree.
101  */
102  inline void
103  setMaxTreeDepth (const size_t max_tree_depth)
104  {
105  decision_tree_trainer_.setMaxTreeDepth (max_tree_depth);
106  }
107 
108  /** \brief Sets the number of features used to find optimal decision features.
109  * \param[in] num_of_features The number of features.
110  */
111  inline void
112  setNumOfFeatures (const size_t num_of_features)
113  {
114  decision_tree_trainer_.setNumOfFeatures (num_of_features);
115  }
116 
117  /** \brief Sets the number of thresholds tested for finding the optimal decision threshold on the feature responses.
118  * \param[in] num_of_threshold The number of thresholds.
119  */
120  inline void
121  setNumOfThresholds (const size_t num_of_threshold)
122  {
123  decision_tree_trainer_.setNumOfThresholds (num_of_threshold);
124  }
125 
126  /** \brief Sets the input data set used for training.
127  * \param[in] data_set The data set used for training.
128  */
129  inline void
130  setTrainingDataSet (DataSet & data_set)
131  {
132  decision_tree_trainer_.setTrainingDataSet (data_set);
133  }
134 
135  /** \brief Example indices that specify the data used for training.
136  * \param[in] examples The examples.
137  */
138  inline void
139  setExamples (std::vector<ExampleIndex> & examples)
140  {
141  decision_tree_trainer_.setExamples (examples);
142  }
143 
144  /** \brief Sets the label data corresponding to the example data.
145  * \param[in] label_data The label data.
146  */
147  inline void
148  setLabelData (std::vector<LabelType> & label_data)
149  {
150  decision_tree_trainer_.setLabelData (label_data);
151  }
152 
153  /** \brief Sets the minimum number of examples to continue growing a tree.
154  * \param[in] n Number of examples
155  */
156  inline void
158  {
159  decision_tree_trainer_.setMinExamplesForSplit(n);
160  }
161 
162  /** \brief Specify the thresholds to be used when evaluating features.
163  * \param[in] thres The threshold values.
164  */
165  void
166  setThresholds(std::vector<float> & thres)
167  {
168  decision_tree_trainer_.setThresholds(thres);
169  }
170 
171  /** \brief Specify the data provider.
172  * \param[in] dtdp The data provider that should implement getDatasetAndLabels(...) function
173  */
174  void
176  {
177  decision_tree_trainer_.setDecisionTreeDataProvider(dtdp);
178  }
179 
180  /** \brief Specify if the features are randomly generated at each split node.
181  * \param[in] b Do it or not.
182  */
183  void
185  {
186  decision_tree_trainer_.setRandomFeaturesAtSplitNode(b);
187  }
188 
189  /** \brief Trains a decision forest using the set training data and settings.
190  * \param[out] forest Destination for the trained forest.
191  */
192  void
193  train (DecisionForest<NodeType> & forest);
194 
195  private:
196 
197  /** \brief The number of trees to train. */
198  size_t num_of_trees_to_train_;
199 
200  /** \brief The trainer for the decision trees of the forest. */
202 
203  };
204 
205 }
206 
207 #include <pcl/ml/impl/dt/decision_forest_trainer.hpp>
208 
209 #endif
void setThresholds(std::vector< float > &thres)
Specify the thresholds to be used when evaluating features.
Class representing a decision forest.
void setTrainingDataSet(DataSet &data_set)
Sets the input data set used for training.
Trainer for decision trees.
This file defines compatibility wrappers for low level I/O functions.
Definition: convolution.h:45
void setMinExamplesForSplit(size_t n)
Sets the minimum number of examples to continue growing a tree.
Trainer for decision trees.
void setNumberOfTreesToTrain(const size_t num_of_trees)
Sets the number of trees to train.
void setStatsEstimator(pcl::StatsEstimator< LabelType, NodeType, DataSet, ExampleIndex > &stats_estimator)
Sets the object for estimating the statistics for tree nodes.
void setNumOfThresholds(const size_t num_of_threshold)
Sets the number of thresholds tested for finding the optimal decision threshold on the feature respon...
Define standard C methods and C++ classes that are common to all methods.
void setRandomFeaturesAtSplitNode(bool b)
Specify if the features are randomly generated at each split node.
void setExamples(std::vector< ExampleIndex > &examples)
Example indices that specify the data used for training.
void setLabelData(std::vector< LabelType > &label_data)
Sets the label data corresponding to the example data.
void setDecisionTreeDataProvider(boost::shared_ptr< pcl::DecisionTreeTrainerDataProvider< FeatureType, DataSet, LabelType, ExampleIndex, NodeType > > &dtdp)
Specify the data provider.
void setNumOfFeatures(const size_t num_of_features)
Sets the number of features used to find optimal decision features.
void setMaxTreeDepth(const size_t max_tree_depth)
Sets the maximum depth of the learned tree.
void setFeatureHandler(pcl::FeatureHandler< FeatureType, DataSet, ExampleIndex > &feature_handler)
Sets the feature handler used to create and evaluate features.
Utility class interface which is used for creating and evaluating features.