Commit d8676165 authored by Ludovic Hussonnois's avatar Ludovic Hussonnois

ENH: Change call to SetParameter*() function and set UserValue Flag default user value to true.

parent f84796f3
......@@ -140,12 +140,12 @@ private:
AddParameter(ParameterType_String, "cri", "Criterion");
SetParameterDescription("cri", "Dempster Shafer criterion (by default (belief+plausibility)/2)");
MandatoryOff("cri");
SetParameterString("cri","((Belief + Plausibility)/2.)");
SetParameterString("cri","((Belief + Plausibility)/2.)", false);
AddParameter(ParameterType_Float,"wgt","Weighting");
SetParameterDescription("wgt","Coefficient between 0 and 1 to promote undetection or false detections (default 0.5)");
MandatoryOff("wgt");
SetParameterFloat("wgt", 0.5);
SetParameterFloat("wgt",0.5, false);
AddParameter(ParameterType_InputFilename,"initmod","initialization model");
SetParameterDescription("initmod","Initialization model (xml file) to be used. If the xml initialization model is set, the descriptor list is not used (specified using the option -desclist)");
......@@ -154,12 +154,12 @@ private:
AddParameter(ParameterType_StringList, "desclist","Descriptor list");
SetParameterDescription("desclist","List of the descriptors to be used in the model (must be specified to perform an automatic initialization)");
MandatoryOff("desclist");
SetParameterString("desclist","");
SetParameterString("desclist","", false);
AddParameter(ParameterType_Int,"maxnbit","Maximum number of iterations");
MandatoryOff("maxnbit");
SetParameterDescription("maxnbit","Maximum number of optimizer iteration (default 200)");
SetParameterInt("maxnbit", 200);
SetParameterInt("maxnbit",200, false);
AddParameter(ParameterType_Empty,"optobs","Optimizer Observer");
SetParameterDescription("optobs","Activate the optimizer observer");
......
......@@ -245,7 +245,7 @@ private:
{
otbAppLogWARNING("The available RAM is too small to process this sample size of " << GetParameterInt("ts") <<
" pixels. The sample size will be reduced to " << maxPixNb << " pixels." << std::endl);
this->SetParameterInt("ts", maxPixNb);
this->SetParameterInt("ts",maxPixNb, false);
}
this->SetMaximumParameterIntValue("ts", maxPixNb);
......
......@@ -161,7 +161,7 @@ private:
SetParameterDescription("strategy.all","Take all samples");
// Default strategy : smallest
SetParameterString("strategy","smallest");
SetParameterString("strategy","smallest", false);
AddParameter(ParameterType_Choice, "mim", "Multi-Image Mode");
......
......@@ -78,7 +78,7 @@ private:
AddParameter(ParameterType_String,"cfield","Field containing the predicted class.");
SetParameterDescription("cfield","Field containing the predicted class");
SetParameterString("cfield","predicted");
SetParameterString("cfield","predicted", false);
// Doc example parameter settings
SetDocExampleParameterValue("inshp", "vectorData.shp");
......
......@@ -86,7 +86,7 @@ private:
AddParameter(ParameterType_String, "outfield.prefix.name", "Output field prefix");
SetParameterDescription("outfield.prefix.name","Prefix used to form the field names that"
"will contain the extracted values.");
SetParameterString("outfield.prefix.name", "value_");
SetParameterString("outfield.prefix.name", "value_", false);
AddChoice("outfield.list","Use the given name list");
SetParameterDescription("outfield.list","Use the given name list");
......
......@@ -197,7 +197,7 @@ private:
SetParameterDescription("strategy.all","Take all samples");
// Default strategy : smallest
SetParameterString("strategy","smallest");
SetParameterString("strategy","smallest", false);
AddParameter(ParameterType_ListView, "field", "Field Name");
SetParameterDescription("field","Name of the field carrying the class name in the input vectors.");
......
......@@ -155,7 +155,7 @@ void DoInit() ITK_OVERRIDE
SetParameterDescription("sample.vtr",
"Ratio between training and validation samples (0.0 = all training, 1.0 = "
"all validation) (default = 0.5).");
SetParameterFloat("sample.vtr", 0.5);
SetParameterFloat("sample.vtr",0.5, false);
SetMaximumParameterFloatValue("sample.vtr",1.0);
SetMinimumParameterFloatValue("sample.vtr",0.0);
......@@ -214,7 +214,7 @@ void DoUpdateParameters() ITK_OVERRIDE
if ( HasValue("io.vd") )
{
std::vector<std::string> vectorFileList = GetParameterStringList("io.vd");
GetInternalApplication("polystat")->SetParameterString("vec",vectorFileList[0]);
GetInternalApplication("polystat")->SetParameterString("vec",vectorFileList[0], false);
UpdateInternalParameters("polystat");
}
}
......@@ -289,21 +289,21 @@ void DoExecute() ITK_OVERRIDE
for (unsigned int i=0 ; i<nbInputs ; i++)
{
GetInternalApplication("polystat")->SetParameterInputImage("in",imageList->GetNthElement(i));
GetInternalApplication("polystat")->SetParameterString("vec",vectorFileList[i]);
GetInternalApplication("polystat")->SetParameterString("out",polyStatTrainOutputs[i]);
GetInternalApplication("polystat")->SetParameterString("vec",vectorFileList[i], false);
GetInternalApplication("polystat")->SetParameterString("out",polyStatTrainOutputs[i], false);
ExecuteInternal("polystat");
// analyse polygons given for validation
if (dedicatedValidation)
{
GetInternalApplication("polystat")->SetParameterString("vec",validationVectorFileList[i]);
GetInternalApplication("polystat")->SetParameterString("out",polyStatValidOutputs[i]);
GetInternalApplication("polystat")->SetParameterString("vec",validationVectorFileList[i], false);
GetInternalApplication("polystat")->SetParameterString("out",polyStatValidOutputs[i], false);
ExecuteInternal("polystat");
}
}
// ---------------------------------------------------------------------------
// Compute sampling rates
GetInternalApplication("rates")->SetParameterString("mim","proportional");
GetInternalApplication("rates")->SetParameterString("mim","proportional", false);
double vtr = GetParameterFloat("sample.vtr");
long mt = GetParameterInt("sample.mt");
long mv = GetParameterInt("sample.mv");
......@@ -348,44 +348,44 @@ void DoExecute() ITK_OVERRIDE
}
// Sampling rates for training
GetInternalApplication("rates")->SetParameterStringList("il",polyStatTrainOutputs);
GetInternalApplication("rates")->SetParameterString("out",rateTrainOut);
GetInternalApplication("rates")->SetParameterStringList("il",polyStatTrainOutputs, false);
GetInternalApplication("rates")->SetParameterString("out",rateTrainOut, false);
if (GetParameterInt("sample.bm") != 0)
{
GetInternalApplication("rates")->SetParameterString("strategy","smallest");
GetInternalApplication("rates")->SetParameterString("strategy","smallest", false);
}
else
{
if (fmt > -1)
{
GetInternalApplication("rates")->SetParameterString("strategy","constant");
GetInternalApplication("rates")->SetParameterString("strategy","constant", false);
GetInternalApplication("rates")->SetParameterInt("strategy.constant.nb",fmt);
}
else
{
GetInternalApplication("rates")->SetParameterString("strategy","all");
GetInternalApplication("rates")->SetParameterString("strategy","all", false);
}
}
ExecuteInternal("rates");
// Sampling rates for validation
if (dedicatedValidation)
{
GetInternalApplication("rates")->SetParameterStringList("il",polyStatValidOutputs);
GetInternalApplication("rates")->SetParameterString("out",rateValidOut);
GetInternalApplication("rates")->SetParameterStringList("il",polyStatValidOutputs, false);
GetInternalApplication("rates")->SetParameterString("out",rateValidOut, false);
if (GetParameterInt("sample.bm") != 0)
{
GetInternalApplication("rates")->SetParameterString("strategy","smallest");
GetInternalApplication("rates")->SetParameterString("strategy","smallest", false);
}
else
{
if (fmv > -1)
{
GetInternalApplication("rates")->SetParameterString("strategy","constant");
GetInternalApplication("rates")->SetParameterString("strategy","constant", false);
GetInternalApplication("rates")->SetParameterInt("strategy.constant.nb",fmv);
}
else
{
GetInternalApplication("rates")->SetParameterString("strategy","all");
GetInternalApplication("rates")->SetParameterString("strategy","all", false);
}
}
ExecuteInternal("rates");
......@@ -393,18 +393,18 @@ void DoExecute() ITK_OVERRIDE
// ---------------------------------------------------------------------------
// Select & extract samples
GetInternalApplication("select")->SetParameterString("sampler", "periodic");
GetInternalApplication("select")->SetParameterString("sampler", "periodic", false);
GetInternalApplication("select")->SetParameterInt("sampler.periodic.jitter",50);
GetInternalApplication("select")->SetParameterString("strategy","byclass");
GetInternalApplication("extraction")->SetParameterString("outfield", "prefix");
GetInternalApplication("extraction")->SetParameterString("outfield.prefix.name","value_");
GetInternalApplication("select")->SetParameterString("strategy","byclass", false);
GetInternalApplication("extraction")->SetParameterString("outfield", "prefix", false);
GetInternalApplication("extraction")->SetParameterString("outfield.prefix.name","value_", false);
for (unsigned int i=0 ; i<nbInputs ; i++)
{
GetInternalApplication("select")->SetParameterInputImage("in",imageList->GetNthElement(i));
GetInternalApplication("select")->SetParameterString("vec",vectorFileList[i]);
GetInternalApplication("select")->SetParameterString("out",sampleOutputs[i]);
GetInternalApplication("select")->SetParameterString("instats",polyStatTrainOutputs[i]);
GetInternalApplication("select")->SetParameterString("strategy.byclass.in",ratesTrainOutputs[i]);
GetInternalApplication("select")->SetParameterString("vec",vectorFileList[i], false);
GetInternalApplication("select")->SetParameterString("out",sampleOutputs[i], false);
GetInternalApplication("select")->SetParameterString("instats",polyStatTrainOutputs[i], false);
GetInternalApplication("select")->SetParameterString("strategy.byclass.in",ratesTrainOutputs[i], false);
// select sample positions
ExecuteInternal("select");
// extract sample descriptors
......@@ -412,10 +412,10 @@ void DoExecute() ITK_OVERRIDE
if (dedicatedValidation)
{
GetInternalApplication("select")->SetParameterString("vec",validationVectorFileList[i]);
GetInternalApplication("select")->SetParameterString("out",sampleValidOutputs[i]);
GetInternalApplication("select")->SetParameterString("instats",polyStatValidOutputs[i]);
GetInternalApplication("select")->SetParameterString("strategy.byclass.in",ratesValidOutputs[i]);
GetInternalApplication("select")->SetParameterString("vec",validationVectorFileList[i], false);
GetInternalApplication("select")->SetParameterString("out",sampleValidOutputs[i], false);
GetInternalApplication("select")->SetParameterString("instats",polyStatValidOutputs[i], false);
GetInternalApplication("select")->SetParameterString("strategy.byclass.in",ratesValidOutputs[i], false);
// select sample positions
ExecuteInternal("select");
// extract sample descriptors
......@@ -473,8 +473,8 @@ void DoExecute() ITK_OVERRIDE
// ---------------------------------------------------------------------------
// Train model
GetInternalApplication("training")->SetParameterStringList("io.vd",sampleTrainOutputs);
GetInternalApplication("training")->SetParameterStringList("valid.vd",sampleValidOutputs);
GetInternalApplication("training")->SetParameterStringList("io.vd",sampleTrainOutputs, false);
GetInternalApplication("training")->SetParameterStringList("valid.vd",sampleValidOutputs, false);
UpdateInternalParameters("training");
// set field names
FloatVectorImageType::Pointer image = imageList->GetNthElement(0);
......@@ -486,7 +486,7 @@ void DoExecute() ITK_OVERRIDE
oss << i;
selectedNames.push_back("value_"+oss.str());
}
GetInternalApplication("training")->SetParameterStringList("feat",selectedNames);
GetInternalApplication("training")->SetParameterStringList("feat",selectedNames, false);
ExecuteInternal("training");
// cleanup
......
......@@ -80,7 +80,7 @@ private:
AddParameter(ParameterType_String,"cfield","Field containing the class id for supervision");
SetParameterDescription("cfield","Field containing the class id for supervision. Only geometries with this field available will be taken into account.");
SetParameterString("cfield","class");
SetParameterString("cfield","class", false);
// Doc example parameter settings
SetDocExampleParameterValue("inshp", "vectorData.shp");
......
......@@ -159,7 +159,7 @@ void DoInit() ITK_OVERRIDE
AddParameter(ParameterType_Float, "sample.vtr", "Training and validation sample ratio");
SetParameterDescription("sample.vtr",
"Ratio between training and validation samples (0.0 = all training, 1.0 = all validation) (default = 0.5).");
SetParameterFloat("sample.vtr", 0.5);
SetParameterFloat("sample.vtr",0.5, false);
Superclass::DoInit();
......
......@@ -84,12 +84,12 @@ private:
AddParameter(ParameterType_String, "cri", "Criterion");
SetParameterDescription("cri", "Dempster Shafer criterion (by default (belief+plausibility)/2)");
MandatoryOff("cri");
SetParameterString("cri", "((Belief + Plausibility)/2.)");
SetParameterString("cri", "((Belief + Plausibility)/2.)", false);
AddParameter(ParameterType_Float, "thd", "Criterion threshold");
SetParameterDescription("thd", "Criterion threshold (default 0.5)");
MandatoryOff("thd");
SetParameterFloat("thd", 0.5);
SetParameterFloat("thd",0.5, false);
AddParameter(ParameterType_OutputVectorData, "out", "Output Vector Data");
SetParameterDescription("out", "Output VectorData containing only the validated samples");
......
......@@ -39,22 +39,22 @@ namespace Wrapper
AddChoice("classifier.boost.t.logit", "LogitBoost (technique producing good regression fits)");
AddChoice("classifier.boost.t.gentle", "Gentle AdaBoost (technique setting less weight on outlier data points "
"and, for that reason, being often good with regression data)");
SetParameterString("classifier.boost.t", "real");
SetParameterString("classifier.boost.t", "real", false);
SetParameterDescription("classifier.boost.t", "Type of Boosting algorithm.");
//Do not expose SplitCriteria
//WeakCount
AddParameter(ParameterType_Int, "classifier.boost.w", "Weak count");
SetParameterInt("classifier.boost.w", 100);
SetParameterInt("classifier.boost.w",100, false);
SetParameterDescription("classifier.boost.w","The number of weak classifiers.");
//WeightTrimRate
AddParameter(ParameterType_Float, "classifier.boost.r", "Weight Trim Rate");
SetParameterFloat("classifier.boost.r", 0.95);
SetParameterFloat("classifier.boost.r",0.95, false);
SetParameterDescription("classifier.boost.r","A threshold between 0 and 1 used to save computational time. "
"Samples with summary weight <= (1 - weight_trim_rate) do not participate in the next iteration of training. "
"Set this parameter to 0 to turn off this functionality.");
//MaxDepth : Not sure that this parameter has to be exposed.
AddParameter(ParameterType_Int, "classifier.boost.m", "Maximum depth of the tree");
SetParameterInt("classifier.boost.m", 1);
SetParameterInt("classifier.boost.m",1, false);
SetParameterDescription("classifier.boost.m","Maximum depth of the tree.");
}
......
......@@ -34,7 +34,7 @@ LearningApplicationBase<TInputValue,TOutputValue>
"See complete documentation here \\url{http://docs.opencv.org/modules/ml/doc/decision_trees.html}.");
//MaxDepth
AddParameter(ParameterType_Int, "classifier.dt.max", "Maximum depth of the tree");
SetParameterInt("classifier.dt.max", 65535);
SetParameterInt("classifier.dt.max",65535, false);
SetParameterDescription(
"classifier.dt.max", "The training algorithm attempts to split each node while its depth is smaller than the maximum "
"possible depth of the tree. The actual depth may be smaller if the other termination criteria are met, and/or "
......@@ -42,13 +42,13 @@ LearningApplicationBase<TInputValue,TOutputValue>
//MinSampleCount
AddParameter(ParameterType_Int, "classifier.dt.min", "Minimum number of samples in each node");
SetParameterInt("classifier.dt.min", 10);
SetParameterInt("classifier.dt.min",10, false);
SetParameterDescription("classifier.dt.min", "If the number of samples in a node is smaller than this parameter, "
"then this node will not be split.");
//RegressionAccuracy
AddParameter(ParameterType_Float, "classifier.dt.ra", "Termination criteria for regression tree");
SetParameterFloat("classifier.dt.ra", 0.01);
SetParameterFloat("classifier.dt.ra",0.01, false);
SetParameterDescription("classifier.dt.min", "If all absolute differences between an estimated value in a node "
"and the values of the train samples in this node are smaller than this regression accuracy parameter, "
"then the node will not be split.");
......@@ -60,14 +60,14 @@ LearningApplicationBase<TInputValue,TOutputValue>
//MaxCategories
AddParameter(ParameterType_Int, "classifier.dt.cat",
"Cluster possible values of a categorical variable into K <= cat clusters to find a suboptimal split");
SetParameterInt("classifier.dt.cat", 10);
SetParameterInt("classifier.dt.cat",10, false);
SetParameterDescription(
"classifier.dt.cat",
"Cluster possible values of a categorical variable into K <= cat clusters to find a suboptimal split.");
//CVFolds
AddParameter(ParameterType_Int, "classifier.dt.f", "K-fold cross-validations");
SetParameterInt("classifier.dt.f", 10);
SetParameterInt("classifier.dt.f",10, false);
SetParameterDescription(
"classifier.dt.f", "If cv_folds > 1, then it prunes a tree with K-fold cross-validation where K is equal to cv_folds.");
......
......@@ -45,7 +45,7 @@ LearningApplicationBase<TInputValue,TOutputValue>
//WeakCount
AddParameter(ParameterType_Int, "classifier.gbt.w", "Number of boosting algorithm iterations");
SetParameterInt("classifier.gbt.w", 200);
SetParameterInt("classifier.gbt.w",200, false);
SetParameterDescription(
"classifier.gbt.w",
"Number \"w\" of boosting algorithm iterations, with w*K being the total number of trees in "
......@@ -53,20 +53,20 @@ LearningApplicationBase<TInputValue,TOutputValue>
//Shrinkage
AddParameter(ParameterType_Float, "classifier.gbt.s", "Regularization parameter");
SetParameterFloat("classifier.gbt.s", 0.01);
SetParameterFloat("classifier.gbt.s",0.01, false);
SetParameterDescription("classifier.gbt.s", "Regularization parameter.");
//SubSamplePortion
AddParameter(ParameterType_Float, "classifier.gbt.p",
"Portion of the whole training set used for each algorithm iteration");
SetParameterFloat("classifier.gbt.p", 0.8);
SetParameterFloat("classifier.gbt.p",0.8, false);
SetParameterDescription(
"classifier.gbt.p",
"Portion of the whole training set used for each algorithm iteration. The subset is generated randomly.");
//MaxDepth
AddParameter(ParameterType_Int, "classifier.gbt.max", "Maximum depth of the tree");
SetParameterInt("classifier.gbt.max", 3);
SetParameterInt("classifier.gbt.max",3, false);
SetParameterDescription(
"classifier.gbt.max", "The training algorithm attempts to split each node while its depth is smaller than the maximum "
"possible depth of the tree. The actual depth may be smaller if the other termination criteria are met, and/or "
......
......@@ -34,7 +34,7 @@ namespace Wrapper
//K parameter
AddParameter(ParameterType_Int, "classifier.knn.k", "Number of Neighbors");
SetParameterInt("classifier.knn.k", 32);
SetParameterInt("classifier.knn.k",32, false);
SetParameterDescription("classifier.knn.k","The number of neighbors to use.");
if (this->m_RegressionFlag)
......
......@@ -35,7 +35,7 @@ namespace Wrapper
AddChoice("classifier.libsvm.k.rbf", "Gaussian radial basis function");
AddChoice("classifier.libsvm.k.poly", "Polynomial");
AddChoice("classifier.libsvm.k.sigmoid", "Sigmoid");
SetParameterString("classifier.libsvm.k", "linear");
SetParameterString("classifier.libsvm.k", "linear", false);
SetParameterDescription("classifier.libsvm.k", "SVM Kernel Type.");
AddParameter(ParameterType_Choice, "classifier.libsvm.m", "SVM Model Type");
SetParameterDescription("classifier.libsvm.m", "Type of SVM formulation.");
......@@ -43,17 +43,17 @@ namespace Wrapper
{
AddChoice("classifier.libsvm.m.epssvr", "Epsilon Support Vector Regression");
AddChoice("classifier.libsvm.m.nusvr", "Nu Support Vector Regression");
SetParameterString("classifier.libsvm.m", "epssvr");
SetParameterString("classifier.libsvm.m", "epssvr", false);
}
else
{
AddChoice("classifier.libsvm.m.csvc", "C support vector classification");
AddChoice("classifier.libsvm.m.nusvc", "Nu support vector classification");
AddChoice("classifier.libsvm.m.oneclass", "Distribution estimation (One Class SVM)");
SetParameterString("classifier.libsvm.m", "csvc");
SetParameterString("classifier.libsvm.m", "csvc", false);
}
AddParameter(ParameterType_Float, "classifier.libsvm.c", "Cost parameter C");
SetParameterFloat("classifier.libsvm.c", 1.0);
SetParameterFloat("classifier.libsvm.c",1.0, false);
SetParameterDescription(
"classifier.libsvm.c",
"SVM models have a cost parameter C (1 by default) to control the trade-off between training errors and forcing rigid margins.");
......@@ -67,9 +67,9 @@ namespace Wrapper
if (this->m_RegressionFlag)
{
AddParameter(ParameterType_Float, "classifier.libsvm.eps", "Epsilon");
SetParameterFloat("classifier.libsvm.eps", 1e-3);
SetParameterFloat("classifier.libsvm.eps",1e-3, false);
AddParameter(ParameterType_Float, "classifier.libsvm.nu", "Nu");
SetParameterFloat("classifier.libsvm.nu", 0.5);
SetParameterFloat("classifier.libsvm.nu",0.5, false);
}
}
......
......@@ -38,7 +38,7 @@ LearningApplicationBase<TInputValue,TOutputValue>
AddParameter(ParameterType_Choice, "classifier.ann.t", "Train Method Type");
AddChoice("classifier.ann.t.reg", "RPROP algorithm");
AddChoice("classifier.ann.t.back", "Back-propagation algorithm");
SetParameterString("classifier.ann.t", "reg");
SetParameterString("classifier.ann.t", "reg", false);
SetParameterDescription("classifier.ann.t", "Type of training method for the multilayer perceptron (MLP) neural network.");
//LayerSizes
......@@ -56,25 +56,25 @@ LearningApplicationBase<TInputValue,TOutputValue>
AddChoice("classifier.ann.f.ident", "Identity function");
AddChoice("classifier.ann.f.sig", "Symmetrical Sigmoid function");
AddChoice("classifier.ann.f.gau", "Gaussian function (Not completely supported)");
SetParameterString("classifier.ann.f", "sig");
SetParameterString("classifier.ann.f", "sig", false);
SetParameterDescription("classifier.ann.f", "Neuron activation function.");
//Alpha
AddParameter(ParameterType_Float, "classifier.ann.a", "Alpha parameter of the activation function");
SetParameterFloat("classifier.ann.a", 1.);
SetParameterFloat("classifier.ann.a",1., false);
SetParameterDescription("classifier.ann.a",
"Alpha parameter of the activation function (used only with sigmoid and gaussian functions).");
//Beta
AddParameter(ParameterType_Float, "classifier.ann.b", "Beta parameter of the activation function");
SetParameterFloat("classifier.ann.b", 1.);
SetParameterFloat("classifier.ann.b",1., false);
SetParameterDescription("classifier.ann.b",
"Beta parameter of the activation function (used only with sigmoid and gaussian functions).");
//BackPropDWScale
AddParameter(ParameterType_Float, "classifier.ann.bpdw",
"Strength of the weight gradient term in the BACKPROP method");
SetParameterFloat("classifier.ann.bpdw", 0.1);
SetParameterFloat("classifier.ann.bpdw",0.1, false);
SetParameterDescription(
"classifier.ann.bpdw",
"Strength of the weight gradient term in the BACKPROP method. The recommended value is about 0.1.");
......@@ -82,7 +82,7 @@ LearningApplicationBase<TInputValue,TOutputValue>
//BackPropMomentScale
AddParameter(ParameterType_Float, "classifier.ann.bpms",
"Strength of the momentum term (the difference between weights on the 2 previous iterations)");
SetParameterFloat("classifier.ann.bpms", 0.1);
SetParameterFloat("classifier.ann.bpms",0.1, false);
SetParameterDescription(
"classifier.ann.bpms",
"Strength of the momentum term (the difference between weights on the 2 previous iterations). "
......@@ -92,12 +92,12 @@ LearningApplicationBase<TInputValue,TOutputValue>
//RegPropDW0
AddParameter(ParameterType_Float, "classifier.ann.rdw",
"Initial value Delta_0 of update-values Delta_{ij} in RPROP method");
SetParameterFloat("classifier.ann.rdw", 0.1);
SetParameterFloat("classifier.ann.rdw",0.1, false);
SetParameterDescription("classifier.ann.rdw", "Initial value Delta_0 of update-values Delta_{ij} in RPROP method (default = 0.1).");
//RegPropDWMin
AddParameter(ParameterType_Float, "classifier.ann.rdwm", "Update-values lower limit Delta_{min} in RPROP method");
SetParameterFloat("classifier.ann.rdwm", 1e-7);
SetParameterFloat("classifier.ann.rdwm",1e-7, false);
SetParameterDescription(
"classifier.ann.rdwm",
"Update-values lower limit Delta_{min} in RPROP method. It must be positive (default = 1e-7).");
......@@ -107,18 +107,18 @@ LearningApplicationBase<TInputValue,TOutputValue>
AddChoice("classifier.ann.term.iter", "Maximum number of iterations");
AddChoice("classifier.ann.term.eps", "Epsilon");
AddChoice("classifier.ann.term.all", "Max. iterations + Epsilon");
SetParameterString("classifier.ann.term", "all");
SetParameterString("classifier.ann.term", "all", false);
SetParameterDescription("classifier.ann.term", "Termination criteria.");
//Epsilon
AddParameter(ParameterType_Float, "classifier.ann.eps", "Epsilon value used in the Termination criteria");
SetParameterFloat("classifier.ann.eps", 0.01);
SetParameterFloat("classifier.ann.eps",0.01, false);
SetParameterDescription("classifier.ann.eps", "Epsilon value used in the Termination criteria.");
//MaxIter
AddParameter(ParameterType_Int, "classifier.ann.iter",
"Maximum number of iterations used in the Termination criteria");
SetParameterInt("classifier.ann.iter", 1000);
SetParameterInt("classifier.ann.iter",1000, false);
SetParameterDescription("classifier.ann.iter", "Maximum number of iterations used in the Termination criteria.");
}
......
......@@ -34,7 +34,7 @@ LearningApplicationBase<TInputValue,TOutputValue>
"See complete documentation here \\url{http://docs.opencv.org/modules/ml/doc/random_trees.html}.");
//MaxDepth
AddParameter(ParameterType_Int, "classifier.rf.max", "Maximum depth of the tree");
SetParameterInt("classifier.rf.max", 5);
SetParameterInt("classifier.rf.max",5, false);
SetParameterDescription(
"classifier.rf.max",
"The depth of the tree. A low value will likely underfit and conversely a high value will likely overfit. "
......@@ -42,14 +42,14 @@ LearningApplicationBase<TInputValue,TOutputValue>
//MinSampleCount
AddParameter(ParameterType_Int, "classifier.rf.min", "Minimum number of samples in each node");
SetParameterInt("classifier.rf.min", 10);
SetParameterInt("classifier.rf.min",10, false);
SetParameterDescription(
"classifier.rf.min", "If the number of samples in a node is smaller than this parameter, "
"then the node will not be split. A reasonable value is a small percentage of the total data e.g. 1 percent.");
//RegressionAccuracy
AddParameter(ParameterType_Float, "classifier.rf.ra", "Termination Criteria for regression tree");
SetParameterFloat("classifier.rf.ra", 0.);
SetParameterFloat("classifier.rf.ra",0., false);
SetParameterDescription("classifier.rf.ra", "If all absolute differences between an estimated value in a node "
"and the values of the train samples in this node are smaller than this regression accuracy parameter, "
"then the node will not be split.");
......@@ -61,7 +61,7 @@ LearningApplicationBase<TInputValue,TOutputValue>
//MaxNumberOfCategories
AddParameter(ParameterType_Int, "classifier.rf.cat",
"Cluster possible values of a categorical variable into K <= cat clusters to find a suboptimal split");
SetParameterInt("classifier.rf.cat", 10);
SetParameterInt("classifier.rf.cat",10, false);
SetParameterDescription(
"classifier.rf.cat",
"Cluster possible values of a categorical variable into K <= cat clusters to find a suboptimal split.");
......@@ -73,7 +73,7 @@ LearningApplicationBase<TInputValue,TOutputValue>
//MaxNumberOfVariables
AddParameter(ParameterType_Int, "classifier.rf.var",
"Size of the randomly selected subset of features at each tree node");
SetParameterInt("classifier.rf.var", 0);
SetParameterInt("classifier.rf.var",0, false);
SetParameterDescription(
"classifier.rf.var",
"The size of the subset of features, randomly selected at each tree node, that are used to find the best split(s). "
......@@ -82,7 +82,7 @@ LearningApplicationBase<TInputValue,TOutputValue>
//MaxNumberOfTrees
AddParameter(ParameterType_Int, "classifier.rf.nbtrees",
"Maximum number of trees in the forest");
SetParameterInt("classifier.rf.nbtrees", 100);
SetParameterInt("classifier.rf.nbtrees",100, false);
SetParameterDescription(
"classifier.rf.nbtrees",
"The maximum number of trees in the forest. Typically, the more trees you have, the better the accuracy. "
......@@ -92,7 +92,7 @@ LearningApplicationBase<TInputValue,TOutputValue>
//ForestAccuracy
AddParameter(ParameterType_Float, "classifier.rf.acc",
"Sufficient accuracy (OOB error)");
SetParameterFloat("classifier.rf.acc", 0.01);
SetParameterFloat("classifier.rf.acc",0.01, false);
SetParameterDescription("classifier.rf.acc","Sufficient accuracy (OOB error).");
......
......@@ -37,35 +37,35 @@ namespace Wrapper
{
AddChoice("classifier.svm.m.epssvr", "Epsilon Support Vector Regression");
AddChoice("classifier.svm.m.nusvr", "Nu Support Vector Regression");
SetParameterString("classifier.svm.m", "epssvr");
SetParameterString("classifier.svm.m", "epssvr", false);
}
else
{
AddChoice("classifier.svm.m.csvc", "C support vector classification");
AddChoice("classifier.svm.m.nusvc", "Nu support vector classification");
AddChoice("classifier.svm.m.oneclass", "Distribution estimation (One Class SVM)");
SetParameterString("classifier.svm.m", "csvc");
SetParameterString("classifier.svm.m", "csvc", false);
}
AddParameter(ParameterType_Choice, "classifier.svm.k", "SVM Kernel Type");
AddChoice("classifier.svm.k.linear", "Linear");
AddChoice("classifier.svm.k.rbf", "Gaussian radial basis function");
AddChoice("classifier.svm.k.poly", "Polynomial");
AddChoice("classifier.svm.k.sigmoid", "Sigmoid");
SetParameterString("classifier.svm.k", "linear");
SetParameterString("classifier.svm.k", "linear", false);
SetParameterDescription("classifier.svm.k", "SVM Kernel Type.");
AddParameter(ParameterType_Float, "classifier.svm.c", "Cost parameter C");
SetParameterFloat("classifier.svm.c", 1.0);
SetParameterFloat("classifier.svm.c",1.0, false);
SetParameterDescription(
"classifier.svm.c",
"SVM models have a cost parameter C (1 by default) to control the trade-off between training errors and forcing rigid margins.");
AddParameter(ParameterType_Float, "classifier.svm.nu",
"Parameter nu of a SVM optimization problem (NU_SVC / ONE_CLASS)");
SetParameterFloat("classifier.svm.nu", 0.0);
SetParameterFloat("classifier.svm.nu",0.0, false);
SetParameterDescription("classifier.svm.nu", "Parameter nu of a SVM optimization problem.");
if (this->m_RegressionFlag)
{
AddParameter(ParameterType_Float, "classifier.svm.p", "Parameter epsilon of a SVM optimization problem (EPS_SVR)");
SetParameterFloat("classifier.svm.p", 1.0);
SetParameterFloat("classifier.svm.p",1.0, false);
SetParameterDescription("classifier.svm.p", "Parameter epsilon of a SVM optimization problem (EPS_SVR).");
AddParameter(ParameterType_Choice, "classifier.svm.term", "Termination criteria");
......@@ -75,22 +75,22 @@ namespace Wrapper
AddChoice("classifier.svm.term.all", "Stops when either iteration or epsilon criteria is true");
AddParameter(ParameterType_Float, "classifier.svm.iter", "Maximum iteration");
SetParameterFloat("classifier.svm.iter", 1000);
SetParameterFloat("classifier.svm.iter",1000, false);
SetParameterDescription("classifier.svm.iter", "Maximum number of iterations (corresponds to the termination criteria 'iter').");
AddParameter(ParameterType_Float, "classifier.svm.eps", "Epsilon accuracy threshold");
SetParameterFloat("classifier.svm.eps", FLT_EPSILON);
SetParameterFloat("classifier.svm.eps",FLT_EPSILON, false);
SetParameterDescription("classifier.svm.eps", "Epsilon accuracy (corresponds to the termination criteria 'eps').");
}
AddParameter(ParameterType_Float, "classifier.svm.coef0", "Parameter coef0 of a kernel function (POLY / SIGMOID)");
SetParameterFloat("classifier.svm.coef0", 0.0);
SetParameterFloat("classifier.svm.coef0",0.0, false);
SetParameterDescription("classifier.svm.coef0", "Parameter coef0 of a kernel function (POLY / SIGMOID).");
AddParameter(ParameterType_Float, "classifier.svm.gamma",
"Parameter gamma of a kernel function (POLY / RBF / SIGMOID)");
SetParameterFloat("classifier.svm.gamma", 1.0);
SetParameterFloat("classifier.svm.gamma",1.0, false);
SetParameterDescription("classifier.svm.gamma", "Parameter gamma of a kernel function (POLY / RBF / SIGMOID).");
AddParameter(ParameterType_Float, "classifier.svm.degree", "Parameter degree of a kernel function (POLY)");
SetParameterFloat("classifier.svm.degree", 1.0);
SetParameterFloat("classifier.svm.degree",1.0, false);
SetParameterDescription("classifier.svm.degree", "Parameter degree of a kernel function (POLY).");
AddParameter(ParameterType_Empty, "classifier.svm.opt", "Parameters optimization");
MandatoryOff("classifier.svm.opt");
......@@ -211,15 +211,15 @@ namespace Wrapper
SVMClassifier->Save(modelPath);
// Update the displayed parameters in the GUI after the training process, for further use of them
SetParameterFloat("classifier.svm.c", static_cast<float> (SVMClassifier->GetOutputC()));
SetParameterFloat("classifier.svm.nu", static_cast<float> (SVMClassifier->GetOutputNu()));
SetParameterFloat("classifier.svm.c",static_cast<float> (SVMClassifier->GetOutputC()), false);
SetParameterFloat("classifier.svm.nu",static_cast<float> (SVMClassifier->GetOutputNu()), false);
if (this->m_RegressionFlag)
{
SetParameterFloat("classifier.svm.p", static_cast<float> (SVMClassifier->GetOutputP()));
SetParameterFloat("classifier.svm.p",static_cast<float> (SVMClassifier->GetOutputP()), false);
}
SetParameterFloat("classifier.svm.coef0", static_cast<float> (SVMClassifier->GetOutputCoef0()));
SetParameterFloat("classifier.svm.gamma", static_cast<float> (SVMClassifier->GetOutputGamma()));
SetParameterFloat("classifier.svm.degree", static_cast<float> (SVMClassifier->GetOutputDegree()));
SetParameterFloat("classifier.svm.coef0",static_cast<float> (SVMClassifier->GetOutputCoef0()), false);
SetParameterFloat("classifier.svm.gamma",static_cast<float> (SVMClassifier->GetOutputGamma()), false);
SetParameterFloat("classifier.svm.degree",static_cast<float> (SVMClassifier->GetOutputDegree()), false);
}
} //end namespace wrapper
......
......@@ -38,7 +38,7 @@ LearningApplicationBase<TInputValue,TOutputValue>
//MaxNumberOfTrees
AddParameter(ParameterType_Int, "classifier.sharkrf.nbtrees",
"Maximum number of trees in the forest");
SetParameterInt("classifier.sharkrf.nbtrees", 100);
SetParameterInt("classifier.sharkrf.nbtrees",100, false);
SetParameterDescription(
"classifier.sharkrf.nbtrees",
"The maximum number of trees in the forest. Typically, the more trees you have, the better the accuracy. "
......@@ -48,7 +48,7 @@ LearningApplicationBase<TInputValue,TOutputValue>
//NodeSize
AddParameter(ParameterType_Int, "classifier.sharkrf.nodesize", "Min size of the node for a split");
SetParameterInt("classifier.sharkrf.nodesize", 25);
SetParameterInt("classifier.sharkrf.nodesize",25, false);
SetParameterDescription(
"classifier.sharkrf.nodesize",
"If the number of samples in a node is smaller than this parameter, "
......@@ -56,7 +56,7 @@ LearningApplicationBase<TInputValue,TOutputValue>