/usr/include/OTB-5.8/otbTrainSharkRandomForests.txx is in libotb-dev 5.8.0+dfsg-3.
This file is owned by root:root, with mode 0o644.
The actual contents of the file can be viewed below.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 | /*=========================================================================
Program: ORFEO Toolbox
Language: C++
Date: $Date$
Version: $Revision$
Copyright (c) Centre National d'Etudes Spatiales. All rights reserved.
See OTBCopyright.txt for details.
This software is distributed WITHOUT ANY WARRANTY; without even
the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the above copyright notices for more information.
=========================================================================*/
#ifndef otbTrainSharkRandomForests_txx
#define otbTrainSharkRandomForests_txx
#include "otbLearningApplicationBase.h"
namespace otb
{
namespace Wrapper
{
template <class TInputValue, class TOutputValue>
void
LearningApplicationBase<TInputValue,TOutputValue>
::InitSharkRandomForestsParams()
{
AddChoice("classifier.sharkrf", "Shark Random forests classifier");
SetParameterDescription("classifier.sharkrf",
"This group of parameters allows setting Shark Random Forests classifier parameters. "
"See complete documentation here \\url{http://image.diku.dk/shark/doxygen_pages/html/classshark_1_1_r_f_trainer.html}.\n It is noteworthy that training is parallel.");
//MaxNumberOfTrees
AddParameter(ParameterType_Int, "classifier.sharkrf.nbtrees",
"Maximum number of trees in the forest");
SetParameterInt("classifier.sharkrf.nbtrees", 100);
SetParameterDescription(
"classifier.sharkrf.nbtrees",
"The maximum number of trees in the forest. Typically, the more trees you have, the better the accuracy. "
"However, the improvement in accuracy generally diminishes and reaches an asymptote for a certain number of trees. "
"Also to keep in mind, increasing the number of trees increases the prediction time linearly.");
//NodeSize
AddParameter(ParameterType_Int, "classifier.sharkrf.nodesize", "Min size of the node for a split");
SetParameterInt("classifier.sharkrf.nodesize", 25);
SetParameterDescription(
"classifier.sharkrf.nodesize",
"If the number of samples in a node is smaller than this parameter, "
"then the node will not be split. A reasonable value is a small percentage of the total data e.g. 1 percent.");
//MTry
AddParameter(ParameterType_Int, "classifier.sharkrf.mtry", "Number of features tested at each node");
SetParameterInt("classifier.sharkrf.mtry", 0);
SetParameterDescription(
"classifier.sharkrf.mtry",
"The number of features (variables) which will be tested at each node in "
"order to compute the split. If set to zero, the square root of the number of "
"features is used.");
//OOB Ratio
AddParameter(ParameterType_Float, "classifier.sharkrf.oobr", "Out of bound ratio");
SetParameterFloat("classifier.sharkrf.oobr", 0.66);
SetParameterDescription("classifier.sharkrf.oobr",
"Set the fraction of the original training dataset to use as the out of bag sample."
"A good default value is 0.66. ");
}
template <class TInputValue, class TOutputValue>
void
LearningApplicationBase<TInputValue,TOutputValue>
::TrainSharkRandomForests(typename ListSampleType::Pointer trainingListSample,
typename TargetListSampleType::Pointer trainingLabeledListSample,
std::string modelPath)
{
typename SharkRandomForestType::Pointer classifier = SharkRandomForestType::New();
classifier->SetRegressionMode(this->m_RegressionFlag);
classifier->SetInputListSample(trainingListSample);
classifier->SetTargetListSample(trainingLabeledListSample);
classifier->SetNodeSize(GetParameterInt("classifier.sharkrf.nodesize"));
classifier->SetOobRatio(GetParameterFloat("classifier.sharkrf.oobr"));
classifier->SetNumberOfTrees(GetParameterInt("classifier.sharkrf.nbtrees"));
classifier->SetMTry(GetParameterInt("classifier.sharkrf.mtry"));
classifier->Train();
classifier->Save(modelPath);
}
} //end namespace wrapper
} //end namespace otb
#endif
|