Commit f2266333 authored by Guillaume Pasero's avatar Guillaume Pasero

ENH: remove user flag in SetParameterXXX calls (not needed anymore)

parent 31fa2cd1
......@@ -143,12 +143,12 @@ private:
AddParameter(ParameterType_String, "cri", "Criterion");
SetParameterDescription("cri", "Dempster Shafer criterion (by default (belief+plausibility)/2)");
MandatoryOff("cri");
SetParameterString("cri","((Belief + Plausibility)/2.)", false);
SetParameterString("cri","((Belief + Plausibility)/2.)");
AddParameter(ParameterType_Float,"wgt","Weighting");
SetParameterDescription("wgt","Coefficient between 0 and 1 to promote undetection or false detections (default 0.5)");
MandatoryOff("wgt");
SetParameterFloat("wgt",0.5, false);
SetParameterFloat("wgt",0.5);
AddParameter(ParameterType_InputFilename,"initmod","initialization model");
SetParameterDescription("initmod","Initialization model (xml file) to be used. If the xml initialization model is set, the descriptor list is not used (specified using the option -desclist)");
......@@ -157,12 +157,12 @@ private:
AddParameter(ParameterType_StringList, "desclist","Descriptor list");
SetParameterDescription("desclist","List of the descriptors to be used in the model (must be specified to perform an automatic initialization)");
MandatoryOff("desclist");
SetParameterString("desclist","", false);
SetParameterString("desclist","");
AddParameter(ParameterType_Int,"maxnbit","Maximum number of iterations");
MandatoryOff("maxnbit");
SetParameterDescription("maxnbit","Maximum number of optimizer iteration (default 200)");
SetParameterInt("maxnbit",200, false);
SetParameterInt("maxnbit",200);
AddParameter(ParameterType_Empty,"optobs","Optimizer Observer");
SetParameterDescription("optobs","Activate the optimizer observer");
......
......@@ -148,7 +148,7 @@ protected:
void ComputeImageEnvelope(const std::string &vectorFileName)
{
GetInternalApplication("imgenvelop")->SetParameterString("out", vectorFileName, false);
GetInternalApplication("imgenvelop")->SetParameterString("out", vectorFileName);
GetInternalApplication("imgenvelop")->ExecuteAndWriteOutput();
}
......@@ -187,8 +187,8 @@ protected:
{
std::vector<std::string> fieldList = {fieldName};
GetInternalApplication("polystats")->SetParameterStringList("field", fieldList, false);
GetInternalApplication("polystats")->SetParameterString("out", statisticsFileName, false);
GetInternalApplication("polystats")->SetParameterStringList("field", fieldList);
GetInternalApplication("polystats")->SetParameterString("out", statisticsFileName);
ExecuteInternal("polystats");
}
......@@ -199,17 +199,17 @@ protected:
int NBSamples)
{
/* SampleSelection */
GetInternalApplication("select")->SetParameterString("out", sampleFileName, false);
GetInternalApplication("select")->SetParameterString("out", sampleFileName);
UpdateInternalParameters("select");
GetInternalApplication("select")->SetParameterString("instats", statisticsFileName, false);
GetInternalApplication("select")->SetParameterString("field", fieldName, false);
GetInternalApplication("select")->SetParameterString("instats", statisticsFileName);
GetInternalApplication("select")->SetParameterString("field", fieldName);
GetInternalApplication("select")->SetParameterString("strategy", "constant", false);
GetInternalApplication("select")->SetParameterInt("strategy.constant.nb", NBSamples, false);
GetInternalApplication("select")->SetParameterString("strategy", "constant");
GetInternalApplication("select")->SetParameterInt("strategy.constant.nb", NBSamples);
if( IsParameterEnabled("rand"))
GetInternalApplication("select")->SetParameterInt("rand", GetParameterInt("rand"), false);
GetInternalApplication("select")->SetParameterInt("rand", GetParameterInt("rand"));
// select sample positions
ExecuteInternal("select");
......@@ -217,8 +217,8 @@ protected:
/* SampleExtraction */
UpdateInternalParameters("extraction");
GetInternalApplication("extraction")->SetParameterString("outfield", "prefix", false);
GetInternalApplication("extraction")->SetParameterString("outfield.prefix.name", "value_", false);
GetInternalApplication("extraction")->SetParameterString("outfield", "prefix");
GetInternalApplication("extraction")->SetParameterString("outfield.prefix.name", "value_");
// extract sample descriptors
GetInternalApplication("extraction")->ExecuteAndWriteOutput();
......@@ -229,7 +229,7 @@ protected:
const std::string &modelFileName)
{
std::vector<std::string> extractOutputList = {sampleTrainFileName};
GetInternalApplication("training")->SetParameterStringList("io.vd", extractOutputList, false);
GetInternalApplication("training")->SetParameterStringList("io.vd", extractOutputList);
UpdateInternalParameters("training");
// set field names
......@@ -242,19 +242,19 @@ protected:
oss << i;
selectedNames.push_back( selectPrefix + oss.str() );
}
GetInternalApplication("training")->SetParameterStringList("feat", selectedNames, false);
GetInternalApplication("training")->SetParameterStringList("feat", selectedNames);
GetInternalApplication("training")->SetParameterString("classifier", "sharkkm", false);
GetInternalApplication("training")->SetParameterString("classifier", "sharkkm");
GetInternalApplication("training")->SetParameterInt("classifier.sharkkm.maxiter",
GetParameterInt("maxit"), false);
GetParameterInt("maxit"));
GetInternalApplication("training")->SetParameterInt("classifier.sharkkm.k",
GetParameterInt("nc"), false);
GetParameterInt("nc"));
if( IsParameterEnabled("rand"))
GetInternalApplication("training")->SetParameterInt("rand", GetParameterInt("rand"), false);
GetInternalApplication("training")->SetParameterInt("rand", GetParameterInt("rand"));
GetInternalApplication("training")->GetParameterByKey("v")->SetActive(false);
GetInternalApplication("training")->SetParameterString("io.out", modelFileName, false);
GetInternalApplication("training")->SetParameterString("io.out", modelFileName);
ExecuteInternal( "training" );
otbAppLogINFO("output model : " << GetInternalApplication("training")->GetParameterString("io.out"));
......@@ -264,8 +264,8 @@ protected:
const std::string &imagesStatsFileName)
{
std::vector<std::string> imageFileNameList = {imageFileName};
GetInternalApplication("imgstats")->SetParameterStringList("il", imageFileNameList, false);
GetInternalApplication("imgstats")->SetParameterString("out", imagesStatsFileName, false);
GetInternalApplication("imgstats")->SetParameterStringList("il", imageFileNameList);
GetInternalApplication("imgstats")->SetParameterString("out", imagesStatsFileName);
ExecuteInternal( "imgstats" );
otbAppLogINFO("image statistics file : " << GetInternalApplication("imgstats")->GetParameterString("out"));
......@@ -506,7 +506,7 @@ private:
void UpdateKMPolygonClassStatisticsParameters(const std::string &vectorFileName)
{
GetInternalApplication( "polystats" )->SetParameterString( "vec", vectorFileName, false );
GetInternalApplication( "polystats" )->SetParameterString( "vec", vectorFileName);
UpdateInternalParameters( "polystats" );
}
......
......@@ -164,7 +164,7 @@ private:
SetParameterDescription("strategy.all","Take all samples");
// Default strategy : smallest
SetParameterString("strategy","smallest", false);
SetParameterString("strategy","smallest");
AddParameter(ParameterType_Choice, "mim", "Multi-Image Mode");
......
......@@ -82,7 +82,7 @@ private:
AddParameter(ParameterType_String,"cfield","Field containing the predicted class.");
SetParameterDescription("cfield","Field containing the predicted class");
SetParameterString("cfield","predicted", false);
SetParameterString("cfield","predicted");
// Doc example parameter settings
SetDocExampleParameterValue("inshp", "vectorData.shp");
......
......@@ -89,7 +89,7 @@ private:
AddParameter(ParameterType_String, "outfield.prefix.name", "Output field prefix");
SetParameterDescription("outfield.prefix.name","Prefix used to form the field names that"
"will contain the extracted values.");
SetParameterString("outfield.prefix.name", "value_", false);
SetParameterString("outfield.prefix.name", "value_");
AddChoice("outfield.list","Use the given name list");
SetParameterDescription("outfield.list","Use the given name list");
......
......@@ -200,7 +200,7 @@ private:
SetParameterDescription("strategy.all","Take all samples");
// Default strategy : smallest
SetParameterString("strategy","smallest", false);
SetParameterString("strategy","smallest");
AddParameter(ParameterType_ListView, "field", "Field Name");
SetParameterDescription("field","Name of the field carrying the class name in the input vectors.");
......
......@@ -217,7 +217,7 @@ private :
void UpdatePolygonClassStatisticsParameters()
{
std::vector<std::string> vectorFileList = GetParameterStringList( "io.vd" );
GetInternalApplication( "polystat" )->SetParameterString( "vec", vectorFileList[0], false );
GetInternalApplication( "polystat" )->SetParameterString( "vec", vectorFileList[0]);
UpdateInternalParameters( "polystat" );
}
......
......@@ -183,7 +183,7 @@ void DoInit() ITK_OVERRIDE
SetParameterDescription( "sample.vtr" ,
"Ratio between training and validation samples (0.0 = all training, "
"1.0 = all validation) (default = 0.5).");
SetParameterFloat( "sample.vtr" , 0.5 , false );
SetParameterFloat( "sample.vtr" , 0.5);
Superclass::DoInit();
......
......@@ -124,7 +124,7 @@ private:
"Only geometries with this field available will be taken into account.\n"
"The field is added either in the input file (if 'out' off) or in the output file.\n"
"Caution, the 'cfield' must not exist in the input file if you are updating the file.");
SetParameterString("cfield","predicted", false);
SetParameterString("cfield","predicted");
AddParameter(ParameterType_ListView, "feat", "Field names to be calculated.");
SetParameterDescription("feat","List of field names in the input vector data used as features for training. "
......
......@@ -87,12 +87,12 @@ private:
AddParameter(ParameterType_String, "cri", "Criterion");
SetParameterDescription("cri", "Dempster Shafer criterion (by default (belief+plausibility)/2)");
MandatoryOff("cri");
SetParameterString("cri", "((Belief + Plausibility)/2.)", false);
SetParameterString("cri", "((Belief + Plausibility)/2.)");
AddParameter(ParameterType_Float, "thd", "Criterion threshold");
SetParameterDescription("thd", "Criterion threshold (default 0.5)");
MandatoryOff("thd");
SetParameterFloat("thd",0.5, false);
SetParameterFloat("thd",0.5);
AddParameter(ParameterType_OutputVectorData, "out", "Output Vector Data");
SetParameterDescription("out", "Output VectorData containing only the validated samples");
......
......@@ -61,16 +61,16 @@ namespace Wrapper
SetParameterDescription("classifier.boost.t.gentle",
"A modified version of the Real Adaboost algorithm, using Newton stepping "
"rather than exact optimization at each step.");
SetParameterString("classifier.boost.t", "real", false);
SetParameterString("classifier.boost.t", "real");
SetParameterDescription("classifier.boost.t", "Type of Boosting algorithm.");
//Do not expose SplitCriteria
//WeakCount
AddParameter(ParameterType_Int, "classifier.boost.w", "Weak count");
SetParameterInt("classifier.boost.w",100, false);
SetParameterInt("classifier.boost.w",100);
SetParameterDescription("classifier.boost.w","The number of weak classifiers.");
//WeightTrimRate
AddParameter(ParameterType_Float, "classifier.boost.r", "Weight Trim Rate");
SetParameterFloat("classifier.boost.r",0.95, false);
SetParameterFloat("classifier.boost.r",0.95);
SetParameterDescription("classifier.boost.r",
"A threshold between 0 and 1 used to save computational time. "
"Samples with summary weight <= (1 - weight_trim_rate) do not participate in"
......@@ -78,7 +78,7 @@ namespace Wrapper
"functionality.");
//MaxDepth : Not sure that this parameter has to be exposed.
AddParameter(ParameterType_Int, "classifier.boost.m", "Maximum depth of the tree");
SetParameterInt("classifier.boost.m",1, false);
SetParameterInt("classifier.boost.m",1);
SetParameterDescription("classifier.boost.m","Maximum depth of the tree.");
}
......
......@@ -40,9 +40,9 @@ LearningApplicationBase<TInputValue,TOutputValue>
//MaxDepth
AddParameter(ParameterType_Int, "classifier.dt.max", "Maximum depth of the tree");
#ifdef OTB_OPENCV_3
SetParameterInt("classifier.dt.max",10, false);
SetParameterInt("classifier.dt.max",10);
#else
SetParameterInt("classifier.dt.max",65535, false);
SetParameterInt("classifier.dt.max",65535);
#endif
SetParameterDescription("classifier.dt.max",
"The training algorithm attempts to split each node while its depth is smaller "
......@@ -51,14 +51,14 @@ LearningApplicationBase<TInputValue,TOutputValue>
//MinSampleCount
AddParameter(ParameterType_Int, "classifier.dt.min", "Minimum number of samples in each node");
SetParameterInt("classifier.dt.min",10, false);
SetParameterInt("classifier.dt.min",10);
SetParameterDescription("classifier.dt.min",
"If the number of samples in a node is smaller "
"than this parameter, then this node will not be split.");
//RegressionAccuracy
AddParameter(ParameterType_Float, "classifier.dt.ra", "Termination criteria for regression tree");
SetParameterFloat("classifier.dt.ra",0.01, false);
SetParameterFloat("classifier.dt.ra",0.01);
SetParameterDescription("classifier.dt.ra",
"If all absolute differences between an estimated value in a node "
"and the values of the train samples in this node are smaller than this "
......@@ -72,7 +72,7 @@ LearningApplicationBase<TInputValue,TOutputValue>
AddParameter(ParameterType_Int, "classifier.dt.cat",
"Cluster possible values of a categorical variable into K <= cat clusters to find a "
"suboptimal split");
SetParameterInt("classifier.dt.cat",10, false);
SetParameterInt("classifier.dt.cat",10);
SetParameterDescription("classifier.dt.cat",
"Cluster possible values of a categorical variable into K <= cat clusters to find a "
"suboptimal split.");
......@@ -81,9 +81,9 @@ LearningApplicationBase<TInputValue,TOutputValue>
AddParameter(ParameterType_Int, "classifier.dt.f", "K-fold cross-validations");
#ifdef OTB_OPENCV_3
// disable cross validation by default (crash in opencv 3.2)
SetParameterInt("classifier.dt.f",0, false);
SetParameterInt("classifier.dt.f",0);
#else
SetParameterInt("classifier.dt.f",10, false);
SetParameterInt("classifier.dt.f",10);
#endif
SetParameterDescription("classifier.dt.f",
"If cv_folds > 1, then it prunes a tree with K-fold cross-validation where K "
......
......@@ -52,7 +52,7 @@ LearningApplicationBase<TInputValue,TOutputValue>
//WeakCount
AddParameter(ParameterType_Int, "classifier.gbt.w", "Number of boosting algorithm iterations");
SetParameterInt("classifier.gbt.w",200, false);
SetParameterInt("classifier.gbt.w",200);
SetParameterDescription(
"classifier.gbt.w",
"Number \"w\" of boosting algorithm iterations, with w*K being the total number of trees in "
......@@ -60,20 +60,20 @@ LearningApplicationBase<TInputValue,TOutputValue>
//Shrinkage
AddParameter(ParameterType_Float, "classifier.gbt.s", "Regularization parameter");
SetParameterFloat("classifier.gbt.s",0.01, false);
SetParameterFloat("classifier.gbt.s",0.01);
SetParameterDescription("classifier.gbt.s", "Regularization parameter.");
//SubSamplePortion
AddParameter(ParameterType_Float, "classifier.gbt.p",
"Portion of the whole training set used for each algorithm iteration");
SetParameterFloat("classifier.gbt.p",0.8, false);
SetParameterFloat("classifier.gbt.p",0.8);
SetParameterDescription(
"classifier.gbt.p",
"Portion of the whole training set used for each algorithm iteration. The subset is generated randomly.");
//MaxDepth
AddParameter(ParameterType_Int, "classifier.gbt.max", "Maximum depth of the tree");
SetParameterInt("classifier.gbt.max",3, false);
SetParameterInt("classifier.gbt.max",3);
SetParameterDescription(
"classifier.gbt.max", "The training algorithm attempts to split each node while its depth is smaller than the maximum "
"possible depth of the tree. The actual depth may be smaller if the other termination criteria are met, and/or "
......
......@@ -79,7 +79,7 @@ void TrainImagesBase::InitSampling()
AddParameter( ParameterType_Float, "sample.vtr", "Training and validation sample ratio" );
SetParameterDescription( "sample.vtr", "Ratio between training and validation samples (0.0 = all training, 1.0 = "
"all validation) (default = 0.5)." );
SetParameterFloat( "sample.vtr", 0.5, false );
SetParameterFloat( "sample.vtr", 0.5);
SetMaximumParameterFloatValue( "sample.vtr", 1.0 );
SetMinimumParameterFloatValue( "sample.vtr", 0.0 );
......@@ -160,8 +160,8 @@ void TrainImagesBase::ComputePolygonStatistics(FloatVectorImageListType *imageLi
for( unsigned int i = 0; i < nbImages; i++ )
{
GetInternalApplication( "polystat" )->SetParameterInputImage( "in", imageList->GetNthElement( i ) );
GetInternalApplication( "polystat" )->SetParameterString( "vec", vectorFileNames[i], false );
GetInternalApplication( "polystat" )->SetParameterString( "out", statisticsFileNames[i], false );
GetInternalApplication( "polystat" )->SetParameterString( "vec", vectorFileNames[i]);
GetInternalApplication( "polystat" )->SetParameterString( "out", statisticsFileNames[i]);
ExecuteInternal( "polystat" );
}
}
......@@ -170,7 +170,7 @@ void TrainImagesBase::ComputePolygonStatistics(FloatVectorImageListType *imageLi
TrainImagesBase::SamplingRates TrainImagesBase::ComputeFinalMaximumSamplingRates(bool dedicatedValidation)
{
SamplingRates rates;
GetInternalApplication( "rates" )->SetParameterString( "mim", "proportional", false );
GetInternalApplication( "rates" )->SetParameterString( "mim", "proportional");
double vtr = GetParameterFloat( "sample.vtr" );
long mt = GetParameterInt( "sample.mt" );
long mv = GetParameterInt( "sample.mv" );
......@@ -224,11 +224,11 @@ void TrainImagesBase::ComputeSamplingRate(const std::vector<std::string> &statis
const std::string &ratesFileName, long maximum)
{
// Sampling rates
GetInternalApplication( "rates" )->SetParameterStringList( "il", statisticsFileNames, false );
GetInternalApplication( "rates" )->SetParameterString( "out", ratesFileName, false );
GetInternalApplication( "rates" )->SetParameterStringList( "il", statisticsFileNames);
GetInternalApplication( "rates" )->SetParameterString( "out", ratesFileName);
if( GetParameterInt( "sample.bm" ) != 0 )
{
GetInternalApplication( "rates" )->SetParameterString( "strategy", "smallest", false );
GetInternalApplication( "rates" )->SetParameterString( "strategy", "smallest");
}
else
{
......@@ -236,12 +236,12 @@ void TrainImagesBase::ComputeSamplingRate(const std::vector<std::string> &statis
{
std::ostringstream oss;
oss << maximum;
GetInternalApplication( "rates" )->SetParameterString( "strategy", "constant", false );
GetInternalApplication( "rates" )->SetParameterString( "strategy.constant.nb", oss.str(), false );
GetInternalApplication( "rates" )->SetParameterString( "strategy", "constant");
GetInternalApplication( "rates" )->SetParameterString( "strategy.constant.nb", oss.str());
}
else
{
GetInternalApplication( "rates" )->SetParameterString( "strategy", "all", false );
GetInternalApplication( "rates" )->SetParameterString( "strategy", "all");
}
}
ExecuteInternal( "rates" );
......@@ -251,9 +251,9 @@ void
TrainImagesBase::TrainModel(FloatVectorImageListType *imageList, const std::vector<std::string> &sampleTrainFileNames,
const std::vector<std::string> &sampleValidationFileNames)
{
GetInternalApplication( "training" )->SetParameterStringList( "io.vd", sampleTrainFileNames, false );
GetInternalApplication( "training" )->SetParameterStringList( "io.vd", sampleTrainFileNames);
if( !sampleValidationFileNames.empty() )
GetInternalApplication( "training" )->SetParameterStringList( "valid.vd", sampleValidationFileNames, false );
GetInternalApplication( "training" )->SetParameterStringList( "valid.vd", sampleValidationFileNames);
UpdateInternalParameters( "training" );
// set field names
......@@ -266,7 +266,7 @@ TrainImagesBase::TrainModel(FloatVectorImageListType *imageList, const std::vect
oss << i;
selectedNames.push_back( "value_" + oss.str() );
}
GetInternalApplication( "training" )->SetParameterStringList( "feat", selectedNames, false );
GetInternalApplication( "training" )->SetParameterStringList( "feat", selectedNames);
ExecuteInternal( "training" );
}
......@@ -276,38 +276,38 @@ void TrainImagesBase::SelectAndExtractSamples(FloatVectorImageType *image, std::
std::string selectedField)
{
GetInternalApplication( "select" )->SetParameterInputImage( "in", image );
GetInternalApplication( "select" )->SetParameterString( "out", sampleFileName, false );
GetInternalApplication( "select" )->SetParameterString( "out", sampleFileName);
// Change the selection strategy based on selected sampling strategy
switch( strategy )
{
// case GEOMETRIC:
// GetInternalApplication( "select" )->SetParameterString( "sampler", "random", false );
// GetInternalApplication( "select" )->SetParameterString( "strategy", "percent", false );
// GetInternalApplication( "select" )->SetParameterString( "sampler", "random");
// GetInternalApplication( "select" )->SetParameterString( "strategy", "percent");
// GetInternalApplication( "select" )->SetParameterFloat( "strategy.percent.p",
// GetParameterFloat( "sample.percent" ), false );
// GetParameterFloat( "sample.percent" ));
// break;
case CLASS:
default:
GetInternalApplication( "select" )->SetParameterString( "vec", vectorFileName, false );
GetInternalApplication( "select" )->SetParameterString( "instats", statisticsFileName, false );
GetInternalApplication( "select" )->SetParameterString( "sampler", "periodic", false );
GetInternalApplication( "select" )->SetParameterString( "vec", vectorFileName);
GetInternalApplication( "select" )->SetParameterString( "instats", statisticsFileName);
GetInternalApplication( "select" )->SetParameterString( "sampler", "periodic");
GetInternalApplication( "select" )->SetParameterInt( "sampler.periodic.jitter", 50 );
GetInternalApplication( "select" )->SetParameterString( "strategy", "byclass", false );
GetInternalApplication( "select" )->SetParameterString( "strategy.byclass.in", ratesFileName, false );
GetInternalApplication( "select" )->SetParameterString( "strategy", "byclass");
GetInternalApplication( "select" )->SetParameterString( "strategy.byclass.in", ratesFileName);
break;
}
// select sample positions
ExecuteInternal( "select" );
GetInternalApplication( "extraction" )->SetParameterString( "vec", sampleFileName, false );
GetInternalApplication( "extraction" )->SetParameterString( "vec", sampleFileName);
UpdateInternalParameters( "extraction" );
if( !selectedField.empty() )
GetInternalApplication( "extraction" )->SetParameterString( "field", selectedField, false );
GetInternalApplication( "extraction" )->SetParameterString( "field", selectedField);
GetInternalApplication( "extraction" )->SetParameterString( "outfield", "prefix", false );
GetInternalApplication( "extraction" )->SetParameterString( "outfield.prefix.name", "value_", false );
GetInternalApplication( "extraction" )->SetParameterString( "outfield", "prefix");
GetInternalApplication( "extraction" )->SetParameterString( "outfield.prefix.name", "value_");
// extract sample descriptors
ExecuteInternal( "extraction" );
......
......@@ -39,7 +39,7 @@ namespace Wrapper
//K parameter
AddParameter(ParameterType_Int, "classifier.knn.k", "Number of Neighbors");
SetParameterInt("classifier.knn.k",32, false);
SetParameterInt("classifier.knn.k",32);
SetParameterDescription("classifier.knn.k","The number of neighbors to use.");
if (this->m_RegressionFlag)
......
......@@ -54,7 +54,7 @@ namespace Wrapper
SetParameterDescription("classifier.libsvm.k.sigmoid",
"The kernel is a hyperbolic tangente function of the vectors.");
SetParameterString("classifier.libsvm.k", "linear", false);
SetParameterString("classifier.libsvm.k", "linear");
SetParameterDescription("classifier.libsvm.k", "SVM Kernel Type.");
AddParameter(ParameterType_Choice, "classifier.libsvm.m", "SVM Model Type");
SetParameterDescription("classifier.libsvm.m", "Type of SVM formulation.");
......@@ -67,7 +67,7 @@ namespace Wrapper
"multiplier C is used ");
AddChoice("classifier.libsvm.m.nusvr", "Nu Support Vector Regression");
SetParameterString("classifier.libsvm.m", "epssvr", false);
SetParameterString("classifier.libsvm.m", "epssvr");
SetParameterDescription("classifier.libsvm.m.nusvr",
"Same as the epsilon regression except that this time the bounded "
"parameter nu is used instead of epsilon");
......@@ -89,17 +89,17 @@ namespace Wrapper
SetParameterDescription("classifier.libsvm.m.oneclass",
"All the training data are from the same class, SVM builds a boundary "
"that separates the class from the rest of the feature space.");
SetParameterString("classifier.libsvm.m", "csvc", false);
SetParameterString("classifier.libsvm.m", "csvc");
}
AddParameter(ParameterType_Float, "classifier.libsvm.c", "Cost parameter C");
SetParameterFloat("classifier.libsvm.c",1.0, false);
SetParameterFloat("classifier.libsvm.c",1.0);
SetParameterDescription("classifier.libsvm.c",
"SVM models have a cost parameter C (1 by default) to control the "
"trade-off between training errors and forcing rigid margins.");
AddParameter(ParameterType_Float, "classifier.libsvm.nu", "Cost parameter Nu");
SetParameterFloat("classifier.libsvm.nu",0.5, false);
SetParameterFloat("classifier.libsvm.nu",0.5);
SetParameterDescription("classifier.libsvm.nu",
"Cost parameter Nu, in the range 0..1, the larger the value, "
"the smoother the decision.");
......@@ -115,7 +115,7 @@ namespace Wrapper
if (this->m_RegressionFlag)
{
AddParameter(ParameterType_Float, "classifier.libsvm.eps", "Epsilon");
SetParameterFloat("classifier.libsvm.eps",1e-3, false);
SetParameterFloat("classifier.libsvm.eps",1e-3);
SetParameterDescription("classifier.libsvm.eps",
"The distance between feature vectors from the training set and "
"the fitting hyper-plane must be less than Epsilon. For outliers"
......
......@@ -52,7 +52,7 @@ LearningApplicationBase<TInputValue,TOutputValue>
"take into account the magnitude of the partial derivative (coordinate "
"of the gradient) but only its sign.");
SetParameterString("classifier.ann.t", "reg", false);
SetParameterString("classifier.ann.t", "reg");
SetParameterDescription("classifier.ann.t",
"Type of training method for the multilayer perceptron (MLP) neural network.");
......@@ -73,7 +73,7 @@ LearningApplicationBase<TInputValue,TOutputValue>
AddChoice("classifier.ann.f.ident", "Identity function");
AddChoice("classifier.ann.f.sig", "Symmetrical Sigmoid function");
AddChoice("classifier.ann.f.gau", "Gaussian function (Not completely supported)");
SetParameterString("classifier.ann.f", "sig", false);
SetParameterString("classifier.ann.f", "sig");
SetParameterDescription("classifier.ann.f",
"This function determine whether the output of the node is positive or not "
"depending on the output of the transfert function.");
......@@ -81,21 +81,21 @@ LearningApplicationBase<TInputValue,TOutputValue>
//Alpha
AddParameter(ParameterType_Float, "classifier.ann.a",
"Alpha parameter of the activation function");
SetParameterFloat("classifier.ann.a",1., false);
SetParameterFloat("classifier.ann.a",1.);
SetParameterDescription("classifier.ann.a",
"Alpha parameter of the activation function (used only with sigmoid and gaussian functions).");
//Beta
AddParameter(ParameterType_Float, "classifier.ann.b",
"Beta parameter of the activation function");
SetParameterFloat("classifier.ann.b",1., false);
SetParameterFloat("classifier.ann.b",1.);
SetParameterDescription("classifier.ann.b",
"Beta parameter of the activation function (used only with sigmoid and gaussian functions).");
//BackPropDWScale
AddParameter(ParameterType_Float, "classifier.ann.bpdw",
"Strength of the weight gradient term in the BACKPROP method");
SetParameterFloat("classifier.ann.bpdw",0.1, false);
SetParameterFloat("classifier.ann.bpdw",0.1);
SetParameterDescription("classifier.ann.bpdw",
"Strength of the weight gradient term in the BACKPROP method. The "
"recommended value is about 0.1.");
......@@ -103,7 +103,7 @@ LearningApplicationBase<TInputValue,TOutputValue>
//BackPropMomentScale
AddParameter(ParameterType_Float, "classifier.ann.bpms",
"Strength of the momentum term (the difference between weights on the 2 previous iterations)");
SetParameterFloat("classifier.ann.bpms",0.1, false);
SetParameterFloat("classifier.ann.bpms",0.1);
SetParameterDescription("classifier.ann.bpms",
"Strength of the momentum term (the difference between weights on the 2 previous "
"iterations). This parameter provides some inertia to smooth the random "
......@@ -113,14 +113,14 @@ LearningApplicationBase<TInputValue,TOutputValue>
//RegPropDW0
AddParameter(ParameterType_Float, "classifier.ann.rdw",
"Initial value Delta_0 of update-values Delta_{ij} in RPROP method");
SetParameterFloat("classifier.ann.rdw",0.1, false);
SetParameterFloat("classifier.ann.rdw",0.1);
SetParameterDescription("classifier.ann.rdw",
"Initial value Delta_0 of update-values Delta_{ij} in RPROP method (default = 0.1).");
//RegPropDWMin
AddParameter(ParameterType_Float, "classifier.ann.rdwm",
"Update-values lower limit Delta_{min} in RPROP method");
SetParameterFloat("classifier.ann.rdwm",1e-7, false);
SetParameterFloat("classifier.ann.rdwm",1e-7);
SetParameterDescription("classifier.ann.rdwm",
"Update-values lower limit Delta_{min} in RPROP method. It must be positive "
"(default = 1e-7).");
......@@ -139,20 +139,20 @@ LearningApplicationBase<TInputValue,TOutputValue>
AddChoice("classifier.ann.term.all", "Max. iterations + Epsilon");
SetParameterDescription("classifier.ann.term.all",
"Both termination criteria are used. Training stop at the first reached");
SetParameterString("classifier.ann.term", "all", false);
SetParameterString("classifier.ann.term", "all");
SetParameterDescription("classifier.ann.term", "Termination criteria.");
//Epsilon
AddParameter(ParameterType_Float, "classifier.ann.eps",
"Epsilon value used in the Termination criteria");
SetParameterFloat("classifier.ann.eps",0.01, false);
SetParameterFloat("classifier.ann.eps",0.01);
SetParameterDescription("classifier.ann.eps",
"Epsilon value used in the Termination criteria.");
//MaxIter
AddParameter(ParameterType_Int, "classifier.ann.iter",
"Maximum number of iterations used in the Termination criteria");
SetParameterInt("classifier.ann.iter",1000, false);
SetParameterInt("classifier.ann.iter",1000);
SetParameterDescription("classifier.ann.iter",
"Maximum number of iterations used in the Termination criteria.");
......
......@@ -39,7 +39,7 @@ LearningApplicationBase<TInputValue,TOutputValue>
"See complete documentation here \\url{http://docs.opencv.org/modules/ml/doc/random_trees.html}.");
//MaxDepth
AddParameter(ParameterType_Int, "classifier.rf.max", "Maximum depth of the tree");
SetParameterInt("classifier.rf.max",5, false);
SetParameterInt("classifier.rf.max",5);
SetParameterDescription(
"classifier.rf.max",
"The depth of the tree. A low value will likely underfit and conversely a high value will likely overfit. "
......@@ -47,14 +47,14 @@ LearningApplicationBase<TInputValue,TOutputValue>
//MinSampleCount
AddParameter(ParameterType_Int, "classifier.rf.min", "Minimum number of samples in each node");
SetParameterInt("classifier.rf.min",10, false);
SetParameterInt("classifier.rf.min",10);
SetParameterDescription(
"classifier.rf.min", "If the number of samples in a node is smaller than this parameter, "
"then the node will not be split. A reasonable value is a small percentage of the total data e.g. 1 percent.");
//RegressionAccuracy
AddParameter(ParameterType_Float, "classifier.rf.ra", "Termination Criteria for regression tree");
SetParameterFloat("classifier.rf.ra",0., false);
SetParameterFloat("classifier.rf.ra",0.);
SetParameterDescription("classifier.rf.ra", "If all absolute differences between an estimated value in a node "
"and the values of the train samples in this node are smaller than this regression accuracy parameter, "
"then the node will not be split.");
......@@ -66,7 +66,7 @@ LearningApplicationBase<TInputValue,TOutputValue>
//MaxNumberOfCategories
AddParameter(ParameterType_Int, "classifier.rf.cat",
"Cluster possible values of a categorical variable into K <= cat clusters to find a suboptimal split");
SetParameterInt("classifier.rf.cat",10, false);
SetParameterInt("classifier.rf.cat",10);
SetParameterDescription(
"classifier.rf.cat",
"Cluster possible values of a categorical variable into K <= cat clusters to find a suboptimal split.");
......@@ -78,7 +78,7 @@ LearningApplicationBase<TInputValue,TOutputValue>
//MaxNumberOfVariables
AddParameter(ParameterType_Int, "classifier.rf.var",
"Size of the randomly selected subset of features at each tree node");
SetParameterInt("classifier.rf.var",0, false);
SetParameterInt("classifier.rf.var",0);
SetParameterDescription(
"classifier.rf.var",
"The size of the subset of features, randomly selected at each tree node, that are used to find the best split(s). "
......@@ -87,7 +87,7 @@ LearningApplicationBase<TInputValue,TOutputValue>
//MaxNumberOfTrees
AddParameter(ParameterType_Int, "classifier.rf.nbtrees",
"Maximum number of trees in the forest");
SetParameterInt("classifier.rf.nbtrees",100, false);
SetParameterInt("classifier.rf.nbtrees",100);
SetParameterDescription(
"classifier.rf.nbtrees",
"The maximum number of trees in the forest. Typically, the more trees you have, the better the accuracy. "
......@@ -97,7 +97,7 @@ LearningApplicationBase<TInputValue,TOutputValue>
//ForestAccuracy
AddParameter(ParameterType_Float, "classifier.rf.acc",
"Sufficient accuracy (OOB error)");
SetParameterFloat("classifier.rf.acc",0.01, false);
SetParameterFloat("classifier.rf.acc",0.01);
SetParameterDescription("classifier.rf.acc","Sufficient accuracy (OOB error).");
......
......@@ -42,14 +42,14 @@ namespace Wrapper
{
AddChoice("classifier.svm.m.epssvr", "Epsilon Support Vector Regression");
AddChoice("classifier.svm.m.nusvr", "Nu Support Vector Regression");
SetParameterString("classifier.svm.m", "epssvr", false);
SetParameterString("classifier.svm.m", "epssvr");
}
else
{
AddChoice("classifier.svm.m.csvc", "C support vector classification");
AddChoice("classifier.svm.m.nusvc", "Nu support vector classification");
AddChoice("classifier.svm.m.oneclass", "Distribution estimation (One Class SVM)");
SetParameterString("classifier.svm.m", "csvc", false);
SetParameterString("classifier.svm.m", "csvc");
}
AddParameter(ParameterType_Choice, "classifier.svm.k", "SVM Kernel Type");
AddChoice("classifier.svm.k.linear", "Linear");
......@@ -57,22 +57,22 @@ namespace Wrapper
AddChoice("classifier.svm.k.rbf", "Gaussian radial basis function");
AddChoice("classifier.svm.k.poly", "Polynomial");
AddChoice("classifier.svm.k.sigmoid", "Sigmoid");
SetParameterString("classifier.svm.k", "linear", false);
SetParameterString("classifier.svm.k", "linear");