Commit 82005eec authored by Julien Michel's avatar Julien Michel

Merge branch 'develop' into fix-clang-warnings

parents 756ee5dc f4aaebfa
......@@ -231,10 +231,6 @@ mark_as_advanced(OTB_USE_DEPRECATED)
option(OTB_USE_OPENMP "Add openmp compiler and linker flags" OFF)
option(OTB_USE_SSE_FLAGS "Enable SIMD optimizations (hardware dependent)." ON)
#-----------------------------------------------------------------------------
# SHOW_ALL_MSG_DEBUG option
option(OTB_SHOW_ALL_MSG_DEBUG "Show all debug messages (very verbose)" OFF)
#mark_as_advanced(OTB_SHOW_ALL_MSG_DEBUG)
include(OTBSetStandardCompilerFlags)
#---------------------------------------------------------------
......
......@@ -95,7 +95,8 @@ def GenerateChoice(app,param,paramlist, count = 0):
return output
def GenerateParameterType(app,param):
if app.GetParameterType(param) == otbApplication.ParameterType_Empty:
if app.GetParameterType(param) == otbApplication.ParameterType_Empty \
or app.GetParameterType(param) == otbApplication.ParameterType_Bool:
return "Boolean"
if app.GetParameterType(param) == otbApplication.ParameterType_Int \
or app.GetParameterType(param) == otbApplication.ParameterType_Radius \
......@@ -346,6 +347,8 @@ def GetApplicationExamplePythonSnippet(app,idx,expand = False, inputpath="",outp
if paramtype == otbApplication.ParameterType_Empty:
app.EnableParameter(param)
output+= "\t" + appname + ".EnableParameter("+EncloseString(param)+")" + linesep
if paramtype == otbApplication.ParameterType_Bool:
output+= "\t" + appname + ".SetParameterString("+EncloseString(param)+","+EncloseString(value)+")" + linesep
if paramtype == otbApplication.ParameterType_Int \
or paramtype == otbApplication.ParameterType_Radius \
or paramtype == otbApplication.ParameterType_RAM:
......
.. _extended-filenames:
Advanced Use
============
This section describes advanced configuration options and tricks.
Environment variables that affects Orfeo ToolBox
------------------------------------------------
The following environment variables are parsed by Orfeo ToolBox. Note
that they only affect default values, and that settings in extended
filenames, applications, monteverdi or custom C++ code might override
those values.
* ``OTB_DEM_DIRECTORY``: Default directory were DEM tiles are stored. It should only contain ```.hgt`` or or georeferenced ``.tif`` files. Empty if not set (no directory set)
* ``OTB_GEOID_FILE``: Default path to the geoid file that will be used to retrieve height of DEM above ellipsoid. Empty if not set (no geoid set)
* ``OTB_MAX_RAM_HINT``: Default maximum memory that OTB should use for processing, in MB. If not set, default value is 128 MB.
* ``OTB_LOGGER_LEVEL``: Default level of logging for OTB. Should be one of ``DEBUG``, ``INFO``, ``WARNING``, ``CRITICAL`` or ``FATAL``, by increasing order of priority. Only messages with a higher priority than the level of logging will be displayed. If not set, default level is ``INFO``.
Extended filenames
================================
------------------
.. _extended-filenames:
Extended filenames is an interesting feature of OTB. With it, you can control
several aspects of the beahvior of the OTB in the OTB-Applications or in our
......@@ -42,7 +59,6 @@ applications from the bash command line.**
Reader options
^^^^^^^^^^^^^^
::
&geom=<path/filename.geom>
......
......@@ -8,7 +8,7 @@ Table of Contents
Installation
OTB-Applications
Monteverdi
ExtendedFilenames
AdvancedUse
Recipes
Applications
FAQ
......@@ -239,12 +239,16 @@ doing geometry, projections, and also calibration.
Future developments will probably offer a more adapted structure to import and
export images between OTB and the Python world.
Setting of boolean parameters
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Setting of EmptyParameter
^^^^^^^^^^^^^^^^^^^^^^^^^
Most of the parameters are set using functions ``SetParameterXXX()``. The boolean
parameters are handled differently (also called Empty parameter). Let's take an example with the application
``ReadImageInfo``:
Most of the parameters are set using functions ``SetParameterXXX()``, except for
one type of parameter: the ``EmptyParameter``. This class was the first
implementation of a boolean. It is now **deprecated**, you should use ``BoolParameter``
instead.
Let's take an example with the application ``ReadImageInfo`` when it was still
using an ``EmptyParameter`` for parameter ``keywordlist``:
.. code-block:: python
......
......@@ -241,8 +241,6 @@ ReadGeometryFromImage(const std::string& filename, bool checkRpcTag)
if (projection)
{
otbMsgDevMacro(<< "OSSIM plugin projection instantiated ! ");
hasMetaData = projection->saveState(geom_kwl);
otb_kwl.SetKeywordlist(geom_kwl);
}
......@@ -257,7 +255,6 @@ ReadGeometryFromImage(const std::string& filename, bool checkRpcTag)
->open(ossimFilename(filename.c_str())));
if (handler)
{
otbMsgDevMacro(<< "OSSIM Open Image SUCCESS ! ");
// Add ossimPlugins model
ossimProjectionFactoryRegistry::instance()->registerFactory(ossimplugins::ossimPluginProjectionFactory::instance());
......@@ -273,7 +270,6 @@ ReadGeometryFromImage(const std::string& filename, bool checkRpcTag)
// if the handler has found a sensor model, copy the tags found
if (hasMetaData && dynamic_cast<ossimSensorModel const*>(projection))
{
otbMsgDevMacro(<<"OSSIM sensor projection instantiated ! ");
otb_kwl.SetKeywordlist(geom_kwl);
// geom_kwl.print(std::cout);
}
......@@ -317,16 +313,6 @@ ReadGeometryFromImage(const std::string& filename, bool checkRpcTag)
// which uses ossimSensorModelFactory and ossimPluginProjectionFactory internally,
// thus by-passing the need for a valid ossimImageHandler.
if (!hasMetaData)
{
otbMsgDevMacro(<< "OSSIM MetaData not present ! ");
}
else
{
otbMsgDevMacro(<< "OSSIM MetaData present ! ");
//otbMsgDevMacro(<< geom_kwl);
}
return otb_kwl;
}
......@@ -479,7 +465,6 @@ WriteGeometry(const ImageKeywordlist& otb_kwl, const std::string& filename)
if (geom_kwl.getSize() > 0)
{
otbMsgDevMacro(<< "Exporting keywordlist ...");
ossimFilename geomFileName(filename);
geomFileName.setExtension(".geom");
geom_kwl.write(geomFileName.chars());
......
......@@ -92,7 +92,7 @@ private:
SetParameterDescription("ip.radius", "The radius of the ball shaped structuring element (expressed in pixels). By default, 'ip.radius = 1 pixel'.");
SetDefaultParameterInt("ip.radius", 1.0);
AddParameter(ParameterType_Empty, "ip.suvbool", "Multiple majority: Undecided(X)/Original");
AddParameter(ParameterType_Bool, "ip.suvbool", "Multiple majority: Undecided(X)/Original");
SetParameterDescription("ip.suvbool", "Pixels with more than 1 majority class are marked as Undecided if this parameter is checked (true), or keep their Original labels otherwise (false). Please note that the Undecided value must be different from existing labels in the input labeled image. By default, 'ip.suvbool = false'.");
AddParameter(ParameterType_Int, "ip.nodatalabel", "Label for the NoData class");
......@@ -103,7 +103,7 @@ private:
SetParameterDescription("ip.undecidedlabel", "Label for the Undecided class. By default, 'ip.undecidedlabel = 0'.");
SetDefaultParameterInt("ip.undecidedlabel", 0.0);
AddParameter(ParameterType_Empty, "ip.onlyisolatedpixels", "Process isolated pixels only");
AddParameter(ParameterType_Bool, "ip.onlyisolatedpixels", "Process isolated pixels only");
SetParameterDescription("ip.onlyisolatedpixels", "Only pixels whose label is unique in the neighbordhood will be processed. By default, 'ip.onlyisolatedpixels = false'.");
AddParameter(ParameterType_Int, "ip.isolatedthreshold", "Threshold for isolated pixels");
......@@ -153,7 +153,7 @@ private:
m_NeighMajVotingFilter->SetLabelForUndecidedPixels(GetParameterInt("ip.undecidedlabel"));
// Set to Undecided label if NOT unique Majority Voting
if (IsParameterEnabled("ip.suvbool"))
if (GetParameterInt("ip.suvbool"))
{
m_NeighMajVotingFilter->SetKeepOriginalLabelBool(false);
}
......@@ -164,7 +164,7 @@ private:
}
// Process isolated pixels only
if (IsParameterEnabled("ip.onlyisolatedpixels"))
if (GetParameterInt("ip.onlyisolatedpixels"))
{
m_NeighMajVotingFilter->SetOnlyIsolatedPixels(true);
m_NeighMajVotingFilter->SetIsolatedThreshold(GetParameterInt("ip.isolatedthreshold"));
......
......@@ -143,12 +143,12 @@ private:
AddParameter(ParameterType_String, "cri", "Criterion");
SetParameterDescription("cri", "Dempster Shafer criterion (by default (belief+plausibility)/2)");
MandatoryOff("cri");
SetParameterString("cri","((Belief + Plausibility)/2.)", false);
SetParameterString("cri","((Belief + Plausibility)/2.)");
AddParameter(ParameterType_Float,"wgt","Weighting");
SetParameterDescription("wgt","Coefficient between 0 and 1 to promote undetection or false detections (default 0.5)");
MandatoryOff("wgt");
SetParameterFloat("wgt",0.5, false);
SetParameterFloat("wgt",0.5);
AddParameter(ParameterType_InputFilename,"initmod","initialization model");
SetParameterDescription("initmod","Initialization model (xml file) to be used. If the xml initialization model is set, the descriptor list is not used (specified using the option -desclist)");
......@@ -157,16 +157,15 @@ private:
AddParameter(ParameterType_StringList, "desclist","Descriptor list");
SetParameterDescription("desclist","List of the descriptors to be used in the model (must be specified to perform an automatic initialization)");
MandatoryOff("desclist");
SetParameterString("desclist","", false);
SetParameterString("desclist","");
AddParameter(ParameterType_Int,"maxnbit","Maximum number of iterations");
MandatoryOff("maxnbit");
SetParameterDescription("maxnbit","Maximum number of optimizer iteration (default 200)");
SetParameterInt("maxnbit",200, false);
SetParameterInt("maxnbit",200);
AddParameter(ParameterType_Empty,"optobs","Optimizer Observer");
AddParameter(ParameterType_Bool,"optobs","Optimizer Observer");
SetParameterDescription("optobs","Activate the optimizer observer");
MandatoryOff("optobs");
AddParameter(ParameterType_OutputFilename,"out","Output filename");
SetParameterDescription("out","Output model file name (xml file) contains the optimal model to perform information fusion.");
......@@ -405,7 +404,7 @@ private:
// Create the Command observer and register it with the optimizer.
CommandIterationUpdate::Pointer observer = CommandIterationUpdate::New();
if (IsParameterEnabled("optobs"))
if (GetParameterInt("optobs"))
{
m_Optimizer->AddObserver(itk::IterationEvent(), observer);
}
......
......@@ -59,11 +59,10 @@ protected:
InitKMClassification();
// init at the end cleanup
AddParameter( ParameterType_Empty, "cleanup", "Temporary files cleaning" );
EnableParameter( "cleanup" );
AddParameter( ParameterType_Bool, "cleanup", "Temporary files cleaning" );
SetParameterDescription( "cleanup",
"If activated, the application will try to clean all temporary files it created" );
MandatoryOff( "cleanup" );
SetParameterInt("cleanup", 1);
}
void InitKMSampling()
......@@ -148,7 +147,7 @@ protected:
void ComputeImageEnvelope(const std::string &vectorFileName)
{
GetInternalApplication("imgenvelop")->SetParameterString("out", vectorFileName, false);
GetInternalApplication("imgenvelop")->SetParameterString("out", vectorFileName);
GetInternalApplication("imgenvelop")->ExecuteAndWriteOutput();
}
......@@ -187,8 +186,8 @@ protected:
{
std::vector<std::string> fieldList = {fieldName};
GetInternalApplication("polystats")->SetParameterStringList("field", fieldList, false);
GetInternalApplication("polystats")->SetParameterString("out", statisticsFileName, false);
GetInternalApplication("polystats")->SetParameterStringList("field", fieldList);
GetInternalApplication("polystats")->SetParameterString("out", statisticsFileName);
ExecuteInternal("polystats");
}
......@@ -199,17 +198,17 @@ protected:
int NBSamples)
{
/* SampleSelection */
GetInternalApplication("select")->SetParameterString("out", sampleFileName, false);
GetInternalApplication("select")->SetParameterString("out", sampleFileName);
UpdateInternalParameters("select");
GetInternalApplication("select")->SetParameterString("instats", statisticsFileName, false);
GetInternalApplication("select")->SetParameterString("field", fieldName, false);
GetInternalApplication("select")->SetParameterString("instats", statisticsFileName);
GetInternalApplication("select")->SetParameterString("field", fieldName);
GetInternalApplication("select")->SetParameterString("strategy", "constant", false);
GetInternalApplication("select")->SetParameterInt("strategy.constant.nb", NBSamples, false);
GetInternalApplication("select")->SetParameterString("strategy", "constant");
GetInternalApplication("select")->SetParameterInt("strategy.constant.nb", NBSamples);
if( IsParameterEnabled("rand"))
GetInternalApplication("select")->SetParameterInt("rand", GetParameterInt("rand"), false);
GetInternalApplication("select")->SetParameterInt("rand", GetParameterInt("rand"));
// select sample positions
ExecuteInternal("select");
......@@ -217,8 +216,8 @@ protected:
/* SampleExtraction */
UpdateInternalParameters("extraction");
GetInternalApplication("extraction")->SetParameterString("outfield", "prefix", false);
GetInternalApplication("extraction")->SetParameterString("outfield.prefix.name", "value_", false);
GetInternalApplication("extraction")->SetParameterString("outfield", "prefix");
GetInternalApplication("extraction")->SetParameterString("outfield.prefix.name", "value_");
// extract sample descriptors
GetInternalApplication("extraction")->ExecuteAndWriteOutput();
......@@ -229,7 +228,7 @@ protected:
const std::string &modelFileName)
{
std::vector<std::string> extractOutputList = {sampleTrainFileName};
GetInternalApplication("training")->SetParameterStringList("io.vd", extractOutputList, false);
GetInternalApplication("training")->SetParameterStringList("io.vd", extractOutputList);
UpdateInternalParameters("training");
// set field names
......@@ -242,19 +241,19 @@ protected:
oss << i;
selectedNames.push_back( selectPrefix + oss.str() );
}
GetInternalApplication("training")->SetParameterStringList("feat", selectedNames, false);
GetInternalApplication("training")->SetParameterStringList("feat", selectedNames);
GetInternalApplication("training")->SetParameterString("classifier", "sharkkm", false);
GetInternalApplication("training")->SetParameterString("classifier", "sharkkm");
GetInternalApplication("training")->SetParameterInt("classifier.sharkkm.maxiter",
GetParameterInt("maxit"), false);
GetParameterInt("maxit"));
GetInternalApplication("training")->SetParameterInt("classifier.sharkkm.k",
GetParameterInt("nc"), false);
GetParameterInt("nc"));
if( IsParameterEnabled("rand"))
GetInternalApplication("training")->SetParameterInt("rand", GetParameterInt("rand"), false);
GetInternalApplication("training")->SetParameterInt("rand", GetParameterInt("rand"));
GetInternalApplication("training")->GetParameterByKey("v")->SetActive(false);
GetInternalApplication("training")->SetParameterString("io.out", modelFileName, false);
GetInternalApplication("training")->SetParameterString("io.out", modelFileName);
ExecuteInternal( "training" );
otbAppLogINFO("output model : " << GetInternalApplication("training")->GetParameterString("io.out"));
......@@ -264,8 +263,8 @@ protected:
const std::string &imagesStatsFileName)
{
std::vector<std::string> imageFileNameList = {imageFileName};
GetInternalApplication("imgstats")->SetParameterStringList("il", imageFileNameList, false);
GetInternalApplication("imgstats")->SetParameterString("out", imagesStatsFileName, false);
GetInternalApplication("imgstats")->SetParameterStringList("il", imageFileNameList);
GetInternalApplication("imgstats")->SetParameterString("out", imagesStatsFileName);
ExecuteInternal( "imgstats" );
otbAppLogINFO("image statistics file : " << GetInternalApplication("imgstats")->GetParameterString("out"));
......@@ -497,7 +496,7 @@ private:
Superclass::CreateOutMeansFile(GetParameterImage("in"), fileNames.modelFile, GetParameterInt("nc"));
// Remove all tempory files
if( IsParameterEnabled( "cleanup" ) )
if( GetParameterInt( "cleanup" ) )
{
otbAppLogINFO( <<"Final clean-up ..." );
fileNames.clear();
......@@ -506,7 +505,7 @@ private:
void UpdateKMPolygonClassStatisticsParameters(const std::string &vectorFileName)
{
GetInternalApplication( "polystats" )->SetParameterString( "vec", vectorFileName, false );
GetInternalApplication( "polystats" )->SetParameterString( "vec", vectorFileName);
UpdateInternalParameters( "polystats" );
}
......
......@@ -164,7 +164,7 @@ private:
SetParameterDescription("strategy.all","Take all samples");
// Default strategy : smallest
SetParameterString("strategy","smallest", false);
SetParameterString("strategy","smallest");
AddParameter(ParameterType_Choice, "mim", "Multi-Image Mode");
......
......@@ -82,7 +82,7 @@ private:
AddParameter(ParameterType_String,"cfield","Field containing the predicted class.");
SetParameterDescription("cfield","Field containing the predicted class");
SetParameterString("cfield","predicted", false);
SetParameterString("cfield","predicted");
// Doc example parameter settings
SetDocExampleParameterValue("inshp", "vectorData.shp");
......
......@@ -89,7 +89,7 @@ private:
AddParameter(ParameterType_String, "outfield.prefix.name", "Output field prefix");
SetParameterDescription("outfield.prefix.name","Prefix used to form the field names that"
"will contain the extracted values.");
SetParameterString("outfield.prefix.name", "value_", false);
SetParameterString("outfield.prefix.name", "value_");
AddChoice("outfield.list","Use the given name list");
SetParameterDescription("outfield.list","Use the given name list");
......
......@@ -200,7 +200,7 @@ private:
SetParameterDescription("strategy.all","Take all samples");
// Default strategy : smallest
SetParameterString("strategy","smallest", false);
SetParameterString("strategy","smallest");
AddParameter(ParameterType_ListView, "field", "Field Name");
SetParameterDescription("field","Name of the field carrying the class name in the input vectors.");
......
......@@ -205,7 +205,7 @@ public:
TrainModel( imageList, fileNames.sampleTrainOutputs, fileNames.sampleValidOutputs );
// cleanup
if( IsParameterEnabled( "cleanup" ) )
if( GetParameterInt( "cleanup" ) )
{
otbAppLogINFO( <<"Final clean-up ..." );
fileNames.clear();
......@@ -217,7 +217,7 @@ private :
void UpdatePolygonClassStatisticsParameters()
{
std::vector<std::string> vectorFileList = GetParameterStringList( "io.vd" );
GetInternalApplication( "polystat" )->SetParameterString( "vec", vectorFileList[0], false );
GetInternalApplication( "polystat" )->SetParameterString( "vec", vectorFileList[0]);
UpdateInternalParameters( "polystat" );
}
......
......@@ -183,7 +183,7 @@ void DoInit() override
SetParameterDescription( "sample.vtr" ,
"Ratio between training and validation samples (0.0 = all training, "
"1.0 = all validation) (default = 0.5).");
SetParameterFloat( "sample.vtr" , 0.5 , false );
SetParameterFloat( "sample.vtr" , 0.5);
Superclass::DoInit();
......
......@@ -112,7 +112,7 @@ protected:
contingencyTableCalculator->Compute(performanceLabeledListSample->Begin(),
performanceLabeledListSample->End(),predictedListSample->Begin(), predictedListSample->End());
if(IsParameterEnabled("v"))
if(GetParameterInt("v"))
{
otbAppLogINFO( "Training performances:" );
otbAppLogINFO(<<"Contingency table: reference labels (rows) vs. produced labels (cols)\n"
......
......@@ -124,13 +124,13 @@ private:
"Only geometries with this field available will be taken into account.\n"
"The field is added either in the input file (if 'out' off) or in the output file.\n"
"Caution, the 'cfield' must not exist in the input file if you are updating the file.");
SetParameterString("cfield","predicted", false);
SetParameterString("cfield","predicted");
AddParameter(ParameterType_ListView, "feat", "Field names to be calculated.");
SetParameterDescription("feat","List of field names in the input vector data used as features for training. "
"Put the same field names as the TrainVectorClassifier application.");
AddParameter(ParameterType_Empty, "confmap", "Confidence map");
AddParameter(ParameterType_Bool, "confmap", "Confidence map");
SetParameterDescription( "confmap", "Confidence map of the produced classification. "
"The confidence index depends on the model : \n"
" - LibSVM : difference between the two highest probabilities "
......@@ -145,7 +145,6 @@ private:
" * RandomForest : Confidence (proportion of votes for the majority class). "
"Margin (normalized difference of the votes of the 2 majority classes) is not available for now.\n"
" * SVM : distance to margin (only works for 2-class models).\n");
MandatoryOff("confmap");
AddParameter(ParameterType_OutputFilename, "out", "Output vector data file containing class labels");
SetParameterDescription("out","Output vector data file storing sample values (OGR format)."
......@@ -271,10 +270,10 @@ private:
ConfidenceListSampleType::Pointer quality;
bool computeConfidenceMap(IsParameterEnabled("confmap") && m_Model->HasConfidenceIndex()
bool computeConfidenceMap(GetParameterInt("confmap") && m_Model->HasConfidenceIndex()
&& !m_Model->GetRegressionMode());
if (!m_Model->HasConfidenceIndex() && IsParameterEnabled("confmap"))
if (!m_Model->HasConfidenceIndex() && GetParameterInt("confmap"))
{
otbAppLogWARNING("Confidence map requested but the classifier doesn't support it!");
}
......
......@@ -87,12 +87,12 @@ private:
AddParameter(ParameterType_String, "cri", "Criterion");
SetParameterDescription("cri", "Dempster Shafer criterion (by default (belief+plausibility)/2)");
MandatoryOff("cri");
SetParameterString("cri", "((Belief + Plausibility)/2.)", false);
SetParameterString("cri", "((Belief + Plausibility)/2.)");
AddParameter(ParameterType_Float, "thd", "Criterion threshold");
SetParameterDescription("thd", "Criterion threshold (default 0.5)");
MandatoryOff("thd");
SetParameterFloat("thd",0.5, false);
SetParameterFloat("thd",0.5);
AddParameter(ParameterType_OutputVectorData, "out", "Output Vector Data");
SetParameterDescription("out", "Output VectorData containing only the validated samples");
......
......@@ -61,16 +61,16 @@ namespace Wrapper
SetParameterDescription("classifier.boost.t.gentle",
"A modified version of the Real Adaboost algorithm, using Newton stepping "
"rather than exact optimization at each step.");
SetParameterString("classifier.boost.t", "real", false);
SetParameterString("classifier.boost.t", "real");
SetParameterDescription("classifier.boost.t", "Type of Boosting algorithm.");
//Do not expose SplitCriteria
//WeakCount
AddParameter(ParameterType_Int, "classifier.boost.w", "Weak count");
SetParameterInt("classifier.boost.w",100, false);
SetParameterInt("classifier.boost.w",100);
SetParameterDescription("classifier.boost.w","The number of weak classifiers.");
//WeightTrimRate
AddParameter(ParameterType_Float, "classifier.boost.r", "Weight Trim Rate");
SetParameterFloat("classifier.boost.r",0.95, false);
SetParameterFloat("classifier.boost.r",0.95);
SetParameterDescription("classifier.boost.r",
"A threshold between 0 and 1 used to save computational time. "
"Samples with summary weight <= (1 - weight_trim_rate) do not participate in"
......@@ -78,7 +78,7 @@ namespace Wrapper
"functionality.");
//MaxDepth : Not sure that this parameter has to be exposed.
AddParameter(ParameterType_Int, "classifier.boost.m", "Maximum depth of the tree");
SetParameterInt("classifier.boost.m",1, false);
SetParameterInt("classifier.boost.m",1);
SetParameterDescription("classifier.boost.m","Maximum depth of the tree.");
}
......
......@@ -40,9 +40,9 @@ LearningApplicationBase<TInputValue,TOutputValue>
//MaxDepth
AddParameter(ParameterType_Int, "classifier.dt.max", "Maximum depth of the tree");
#ifdef OTB_OPENCV_3
SetParameterInt("classifier.dt.max",10, false);
SetParameterInt("classifier.dt.max",10);
#else
SetParameterInt("classifier.dt.max",65535, false);
SetParameterInt("classifier.dt.max",65535);
#endif
SetParameterDescription("classifier.dt.max",
"The training algorithm attempts to split each node while its depth is smaller "
......@@ -51,14 +51,14 @@ LearningApplicationBase<TInputValue,TOutputValue>
//MinSampleCount
AddParameter(ParameterType_Int, "classifier.dt.min", "Minimum number of samples in each node");
SetParameterInt("classifier.dt.min",10, false);
SetParameterInt("classifier.dt.min",10);
SetParameterDescription("classifier.dt.min",
"If the number of samples in a node is smaller "
"than this parameter, then this node will not be split.");
//RegressionAccuracy
AddParameter(ParameterType_Float, "classifier.dt.ra", "Termination criteria for regression tree");
SetParameterFloat("classifier.dt.ra",0.01, false);
SetParameterFloat("classifier.dt.ra",0.01);
SetParameterDescription("classifier.dt.ra",
"If all absolute differences between an estimated value in a node "
"and the values of the train samples in this node are smaller than this "
......@@ -72,7 +72,7 @@ LearningApplicationBase<TInputValue,TOutputValue>
AddParameter(ParameterType_Int, "classifier.dt.cat",
"Cluster possible values of a categorical variable into K <= cat clusters to find a "
"suboptimal split");
SetParameterInt("classifier.dt.cat",10, false);
SetParameterInt("classifier.dt.cat",10);
SetParameterDescription("classifier.dt.cat",
"Cluster possible values of a categorical variable into K <= cat clusters to find a "
"suboptimal split.");
......@@ -81,22 +81,22 @@ LearningApplicationBase<TInputValue,TOutputValue>
AddParameter(ParameterType_Int, "classifier.dt.f", "K-fold cross-validations");
#ifdef OTB_OPENCV_3
// disable cross validation by default (crash in opencv 3.2)
SetParameterInt("classifier.dt.f",0, false);
SetParameterInt("classifier.dt.f",0);
#else
SetParameterInt("classifier.dt.f",10, false);
SetParameterInt("classifier.dt.f",10);
#endif
SetParameterDescription("classifier.dt.f",
"If cv_folds > 1, then it prunes a tree with K-fold cross-validation where K "
"is equal to cv_folds.");
//Use1seRule
AddParameter(ParameterType_Empty, "classifier.dt.r", "Set Use1seRule flag to false");
AddParameter(ParameterType_Bool, "classifier.dt.r", "Set Use1seRule flag to false");
SetParameterDescription("classifier.dt.r",
"If true, then a pruning will be harsher. This will make a tree more compact and more "
"resistant to the training data noise but a bit less accurate.");