Commit d8676165 authored by Ludovic Hussonnois's avatar Ludovic Hussonnois
Browse files

ENH: Change call to SetParameter*() function and set UserValue Flag default user value to true.

parent f84796f3
...@@ -140,12 +140,12 @@ private: ...@@ -140,12 +140,12 @@ private:
AddParameter(ParameterType_String, "cri", "Criterion"); AddParameter(ParameterType_String, "cri", "Criterion");
SetParameterDescription("cri", "Dempster Shafer criterion (by default (belief+plausibility)/2)"); SetParameterDescription("cri", "Dempster Shafer criterion (by default (belief+plausibility)/2)");
MandatoryOff("cri"); MandatoryOff("cri");
SetParameterString("cri","((Belief + Plausibility)/2.)"); SetParameterString("cri","((Belief + Plausibility)/2.)", false);
AddParameter(ParameterType_Float,"wgt","Weighting"); AddParameter(ParameterType_Float,"wgt","Weighting");
SetParameterDescription("wgt","Coefficient between 0 and 1 to promote undetection or false detections (default 0.5)"); SetParameterDescription("wgt","Coefficient between 0 and 1 to promote undetection or false detections (default 0.5)");
MandatoryOff("wgt"); MandatoryOff("wgt");
SetParameterFloat("wgt", 0.5); SetParameterFloat("wgt",0.5, false);
AddParameter(ParameterType_InputFilename,"initmod","initialization model"); AddParameter(ParameterType_InputFilename,"initmod","initialization model");
SetParameterDescription("initmod","Initialization model (xml file) to be used. If the xml initialization model is set, the descriptor list is not used (specified using the option -desclist)"); SetParameterDescription("initmod","Initialization model (xml file) to be used. If the xml initialization model is set, the descriptor list is not used (specified using the option -desclist)");
...@@ -154,12 +154,12 @@ private: ...@@ -154,12 +154,12 @@ private:
AddParameter(ParameterType_StringList, "desclist","Descriptor list"); AddParameter(ParameterType_StringList, "desclist","Descriptor list");
SetParameterDescription("desclist","List of the descriptors to be used in the model (must be specified to perform an automatic initialization)"); SetParameterDescription("desclist","List of the descriptors to be used in the model (must be specified to perform an automatic initialization)");
MandatoryOff("desclist"); MandatoryOff("desclist");
SetParameterString("desclist",""); SetParameterString("desclist","", false);
AddParameter(ParameterType_Int,"maxnbit","Maximum number of iterations"); AddParameter(ParameterType_Int,"maxnbit","Maximum number of iterations");
MandatoryOff("maxnbit"); MandatoryOff("maxnbit");
SetParameterDescription("maxnbit","Maximum number of optimizer iteration (default 200)"); SetParameterDescription("maxnbit","Maximum number of optimizer iteration (default 200)");
SetParameterInt("maxnbit", 200); SetParameterInt("maxnbit",200, false);
AddParameter(ParameterType_Empty,"optobs","Optimizer Observer"); AddParameter(ParameterType_Empty,"optobs","Optimizer Observer");
SetParameterDescription("optobs","Activate the optimizer observer"); SetParameterDescription("optobs","Activate the optimizer observer");
......
...@@ -245,7 +245,7 @@ private: ...@@ -245,7 +245,7 @@ private:
{ {
otbAppLogWARNING("The available RAM is too small to process this sample size of " << GetParameterInt("ts") << otbAppLogWARNING("The available RAM is too small to process this sample size of " << GetParameterInt("ts") <<
" pixels. The sample size will be reduced to " << maxPixNb << " pixels." << std::endl); " pixels. The sample size will be reduced to " << maxPixNb << " pixels." << std::endl);
this->SetParameterInt("ts", maxPixNb); this->SetParameterInt("ts",maxPixNb, false);
} }
this->SetMaximumParameterIntValue("ts", maxPixNb); this->SetMaximumParameterIntValue("ts", maxPixNb);
......
...@@ -161,7 +161,7 @@ private: ...@@ -161,7 +161,7 @@ private:
SetParameterDescription("strategy.all","Take all samples"); SetParameterDescription("strategy.all","Take all samples");
// Default strategy : smallest // Default strategy : smallest
SetParameterString("strategy","smallest"); SetParameterString("strategy","smallest", false);
AddParameter(ParameterType_Choice, "mim", "Multi-Image Mode"); AddParameter(ParameterType_Choice, "mim", "Multi-Image Mode");
......
...@@ -78,7 +78,7 @@ private: ...@@ -78,7 +78,7 @@ private:
AddParameter(ParameterType_String,"cfield","Field containing the predicted class."); AddParameter(ParameterType_String,"cfield","Field containing the predicted class.");
SetParameterDescription("cfield","Field containing the predicted class"); SetParameterDescription("cfield","Field containing the predicted class");
SetParameterString("cfield","predicted"); SetParameterString("cfield","predicted", false);
// Doc example parameter settings // Doc example parameter settings
SetDocExampleParameterValue("inshp", "vectorData.shp"); SetDocExampleParameterValue("inshp", "vectorData.shp");
......
...@@ -86,7 +86,7 @@ private: ...@@ -86,7 +86,7 @@ private:
AddParameter(ParameterType_String, "outfield.prefix.name", "Output field prefix"); AddParameter(ParameterType_String, "outfield.prefix.name", "Output field prefix");
SetParameterDescription("outfield.prefix.name","Prefix used to form the field names that" SetParameterDescription("outfield.prefix.name","Prefix used to form the field names that"
"will contain the extracted values."); "will contain the extracted values.");
SetParameterString("outfield.prefix.name", "value_"); SetParameterString("outfield.prefix.name", "value_", false);
AddChoice("outfield.list","Use the given name list"); AddChoice("outfield.list","Use the given name list");
SetParameterDescription("outfield.list","Use the given name list"); SetParameterDescription("outfield.list","Use the given name list");
......
...@@ -197,7 +197,7 @@ private: ...@@ -197,7 +197,7 @@ private:
SetParameterDescription("strategy.all","Take all samples"); SetParameterDescription("strategy.all","Take all samples");
// Default strategy : smallest // Default strategy : smallest
SetParameterString("strategy","smallest"); SetParameterString("strategy","smallest", false);
AddParameter(ParameterType_ListView, "field", "Field Name"); AddParameter(ParameterType_ListView, "field", "Field Name");
SetParameterDescription("field","Name of the field carrying the class name in the input vectors."); SetParameterDescription("field","Name of the field carrying the class name in the input vectors.");
......
...@@ -155,7 +155,7 @@ void DoInit() ITK_OVERRIDE ...@@ -155,7 +155,7 @@ void DoInit() ITK_OVERRIDE
SetParameterDescription("sample.vtr", SetParameterDescription("sample.vtr",
"Ratio between training and validation samples (0.0 = all training, 1.0 = " "Ratio between training and validation samples (0.0 = all training, 1.0 = "
"all validation) (default = 0.5)."); "all validation) (default = 0.5).");
SetParameterFloat("sample.vtr", 0.5); SetParameterFloat("sample.vtr",0.5, false);
SetMaximumParameterFloatValue("sample.vtr",1.0); SetMaximumParameterFloatValue("sample.vtr",1.0);
SetMinimumParameterFloatValue("sample.vtr",0.0); SetMinimumParameterFloatValue("sample.vtr",0.0);
...@@ -214,7 +214,7 @@ void DoUpdateParameters() ITK_OVERRIDE ...@@ -214,7 +214,7 @@ void DoUpdateParameters() ITK_OVERRIDE
if ( HasValue("io.vd") ) if ( HasValue("io.vd") )
{ {
std::vector<std::string> vectorFileList = GetParameterStringList("io.vd"); std::vector<std::string> vectorFileList = GetParameterStringList("io.vd");
GetInternalApplication("polystat")->SetParameterString("vec",vectorFileList[0]); GetInternalApplication("polystat")->SetParameterString("vec",vectorFileList[0], false);
UpdateInternalParameters("polystat"); UpdateInternalParameters("polystat");
} }
} }
...@@ -289,21 +289,21 @@ void DoExecute() ITK_OVERRIDE ...@@ -289,21 +289,21 @@ void DoExecute() ITK_OVERRIDE
for (unsigned int i=0 ; i<nbInputs ; i++) for (unsigned int i=0 ; i<nbInputs ; i++)
{ {
GetInternalApplication("polystat")->SetParameterInputImage("in",imageList->GetNthElement(i)); GetInternalApplication("polystat")->SetParameterInputImage("in",imageList->GetNthElement(i));
GetInternalApplication("polystat")->SetParameterString("vec",vectorFileList[i]); GetInternalApplication("polystat")->SetParameterString("vec",vectorFileList[i], false);
GetInternalApplication("polystat")->SetParameterString("out",polyStatTrainOutputs[i]); GetInternalApplication("polystat")->SetParameterString("out",polyStatTrainOutputs[i], false);
ExecuteInternal("polystat"); ExecuteInternal("polystat");
// analyse polygons given for validation // analyse polygons given for validation
if (dedicatedValidation) if (dedicatedValidation)
{ {
GetInternalApplication("polystat")->SetParameterString("vec",validationVectorFileList[i]); GetInternalApplication("polystat")->SetParameterString("vec",validationVectorFileList[i], false);
GetInternalApplication("polystat")->SetParameterString("out",polyStatValidOutputs[i]); GetInternalApplication("polystat")->SetParameterString("out",polyStatValidOutputs[i], false);
ExecuteInternal("polystat"); ExecuteInternal("polystat");
} }
} }
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// Compute sampling rates // Compute sampling rates
GetInternalApplication("rates")->SetParameterString("mim","proportional"); GetInternalApplication("rates")->SetParameterString("mim","proportional", false);
double vtr = GetParameterFloat("sample.vtr"); double vtr = GetParameterFloat("sample.vtr");
long mt = GetParameterInt("sample.mt"); long mt = GetParameterInt("sample.mt");
long mv = GetParameterInt("sample.mv"); long mv = GetParameterInt("sample.mv");
...@@ -348,44 +348,44 @@ void DoExecute() ITK_OVERRIDE ...@@ -348,44 +348,44 @@ void DoExecute() ITK_OVERRIDE
} }
// Sampling rates for training // Sampling rates for training
GetInternalApplication("rates")->SetParameterStringList("il",polyStatTrainOutputs); GetInternalApplication("rates")->SetParameterStringList("il",polyStatTrainOutputs, false);
GetInternalApplication("rates")->SetParameterString("out",rateTrainOut); GetInternalApplication("rates")->SetParameterString("out",rateTrainOut, false);
if (GetParameterInt("sample.bm") != 0) if (GetParameterInt("sample.bm") != 0)
{ {
GetInternalApplication("rates")->SetParameterString("strategy","smallest"); GetInternalApplication("rates")->SetParameterString("strategy","smallest", false);
} }
else else
{ {
if (fmt > -1) if (fmt > -1)
{ {
GetInternalApplication("rates")->SetParameterString("strategy","constant"); GetInternalApplication("rates")->SetParameterString("strategy","constant", false);
GetInternalApplication("rates")->SetParameterInt("strategy.constant.nb",fmt); GetInternalApplication("rates")->SetParameterInt("strategy.constant.nb",fmt);
} }
else else
{ {
GetInternalApplication("rates")->SetParameterString("strategy","all"); GetInternalApplication("rates")->SetParameterString("strategy","all", false);
} }
} }
ExecuteInternal("rates"); ExecuteInternal("rates");
// Sampling rates for validation // Sampling rates for validation
if (dedicatedValidation) if (dedicatedValidation)
{ {
GetInternalApplication("rates")->SetParameterStringList("il",polyStatValidOutputs); GetInternalApplication("rates")->SetParameterStringList("il",polyStatValidOutputs, false);
GetInternalApplication("rates")->SetParameterString("out",rateValidOut); GetInternalApplication("rates")->SetParameterString("out",rateValidOut, false);
if (GetParameterInt("sample.bm") != 0) if (GetParameterInt("sample.bm") != 0)
{ {
GetInternalApplication("rates")->SetParameterString("strategy","smallest"); GetInternalApplication("rates")->SetParameterString("strategy","smallest", false);
} }
else else
{ {
if (fmv > -1) if (fmv > -1)
{ {
GetInternalApplication("rates")->SetParameterString("strategy","constant"); GetInternalApplication("rates")->SetParameterString("strategy","constant", false);
GetInternalApplication("rates")->SetParameterInt("strategy.constant.nb",fmv); GetInternalApplication("rates")->SetParameterInt("strategy.constant.nb",fmv);
} }
else else
{ {
GetInternalApplication("rates")->SetParameterString("strategy","all"); GetInternalApplication("rates")->SetParameterString("strategy","all", false);
} }
} }
ExecuteInternal("rates"); ExecuteInternal("rates");
...@@ -393,18 +393,18 @@ void DoExecute() ITK_OVERRIDE ...@@ -393,18 +393,18 @@ void DoExecute() ITK_OVERRIDE
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// Select & extract samples // Select & extract samples
GetInternalApplication("select")->SetParameterString("sampler", "periodic"); GetInternalApplication("select")->SetParameterString("sampler", "periodic", false);
GetInternalApplication("select")->SetParameterInt("sampler.periodic.jitter",50); GetInternalApplication("select")->SetParameterInt("sampler.periodic.jitter",50);
GetInternalApplication("select")->SetParameterString("strategy","byclass"); GetInternalApplication("select")->SetParameterString("strategy","byclass", false);
GetInternalApplication("extraction")->SetParameterString("outfield", "prefix"); GetInternalApplication("extraction")->SetParameterString("outfield", "prefix", false);
GetInternalApplication("extraction")->SetParameterString("outfield.prefix.name","value_"); GetInternalApplication("extraction")->SetParameterString("outfield.prefix.name","value_", false);
for (unsigned int i=0 ; i<nbInputs ; i++) for (unsigned int i=0 ; i<nbInputs ; i++)
{ {
GetInternalApplication("select")->SetParameterInputImage("in",imageList->GetNthElement(i)); GetInternalApplication("select")->SetParameterInputImage("in",imageList->GetNthElement(i));
GetInternalApplication("select")->SetParameterString("vec",vectorFileList[i]); GetInternalApplication("select")->SetParameterString("vec",vectorFileList[i], false);
GetInternalApplication("select")->SetParameterString("out",sampleOutputs[i]); GetInternalApplication("select")->SetParameterString("out",sampleOutputs[i], false);
GetInternalApplication("select")->SetParameterString("instats",polyStatTrainOutputs[i]); GetInternalApplication("select")->SetParameterString("instats",polyStatTrainOutputs[i], false);
GetInternalApplication("select")->SetParameterString("strategy.byclass.in",ratesTrainOutputs[i]); GetInternalApplication("select")->SetParameterString("strategy.byclass.in",ratesTrainOutputs[i], false);
// select sample positions // select sample positions
ExecuteInternal("select"); ExecuteInternal("select");
// extract sample descriptors // extract sample descriptors
...@@ -412,10 +412,10 @@ void DoExecute() ITK_OVERRIDE ...@@ -412,10 +412,10 @@ void DoExecute() ITK_OVERRIDE
if (dedicatedValidation) if (dedicatedValidation)
{ {
GetInternalApplication("select")->SetParameterString("vec",validationVectorFileList[i]); GetInternalApplication("select")->SetParameterString("vec",validationVectorFileList[i], false);
GetInternalApplication("select")->SetParameterString("out",sampleValidOutputs[i]); GetInternalApplication("select")->SetParameterString("out",sampleValidOutputs[i], false);
GetInternalApplication("select")->SetParameterString("instats",polyStatValidOutputs[i]); GetInternalApplication("select")->SetParameterString("instats",polyStatValidOutputs[i], false);
GetInternalApplication("select")->SetParameterString("strategy.byclass.in",ratesValidOutputs[i]); GetInternalApplication("select")->SetParameterString("strategy.byclass.in",ratesValidOutputs[i], false);
// select sample positions // select sample positions
ExecuteInternal("select"); ExecuteInternal("select");
// extract sample descriptors // extract sample descriptors
...@@ -473,8 +473,8 @@ void DoExecute() ITK_OVERRIDE ...@@ -473,8 +473,8 @@ void DoExecute() ITK_OVERRIDE
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// Train model // Train model
GetInternalApplication("training")->SetParameterStringList("io.vd",sampleTrainOutputs); GetInternalApplication("training")->SetParameterStringList("io.vd",sampleTrainOutputs, false);
GetInternalApplication("training")->SetParameterStringList("valid.vd",sampleValidOutputs); GetInternalApplication("training")->SetParameterStringList("valid.vd",sampleValidOutputs, false);
UpdateInternalParameters("training"); UpdateInternalParameters("training");
// set field names // set field names
FloatVectorImageType::Pointer image = imageList->GetNthElement(0); FloatVectorImageType::Pointer image = imageList->GetNthElement(0);
...@@ -486,7 +486,7 @@ void DoExecute() ITK_OVERRIDE ...@@ -486,7 +486,7 @@ void DoExecute() ITK_OVERRIDE
oss << i; oss << i;
selectedNames.push_back("value_"+oss.str()); selectedNames.push_back("value_"+oss.str());
} }
GetInternalApplication("training")->SetParameterStringList("feat",selectedNames); GetInternalApplication("training")->SetParameterStringList("feat",selectedNames, false);
ExecuteInternal("training"); ExecuteInternal("training");
// cleanup // cleanup
......
...@@ -80,7 +80,7 @@ private: ...@@ -80,7 +80,7 @@ private:
AddParameter(ParameterType_String,"cfield","Field containing the class id for supervision"); AddParameter(ParameterType_String,"cfield","Field containing the class id for supervision");
SetParameterDescription("cfield","Field containing the class id for supervision. Only geometries with this field available will be taken into account."); SetParameterDescription("cfield","Field containing the class id for supervision. Only geometries with this field available will be taken into account.");
SetParameterString("cfield","class"); SetParameterString("cfield","class", false);
// Doc example parameter settings // Doc example parameter settings
SetDocExampleParameterValue("inshp", "vectorData.shp"); SetDocExampleParameterValue("inshp", "vectorData.shp");
......
...@@ -159,7 +159,7 @@ void DoInit() ITK_OVERRIDE ...@@ -159,7 +159,7 @@ void DoInit() ITK_OVERRIDE
AddParameter(ParameterType_Float, "sample.vtr", "Training and validation sample ratio"); AddParameter(ParameterType_Float, "sample.vtr", "Training and validation sample ratio");
SetParameterDescription("sample.vtr", SetParameterDescription("sample.vtr",
"Ratio between training and validation samples (0.0 = all training, 1.0 = all validation) (default = 0.5)."); "Ratio between training and validation samples (0.0 = all training, 1.0 = all validation) (default = 0.5).");
SetParameterFloat("sample.vtr", 0.5); SetParameterFloat("sample.vtr",0.5, false);
Superclass::DoInit(); Superclass::DoInit();
......
...@@ -84,12 +84,12 @@ private: ...@@ -84,12 +84,12 @@ private:
AddParameter(ParameterType_String, "cri", "Criterion"); AddParameter(ParameterType_String, "cri", "Criterion");
SetParameterDescription("cri", "Dempster Shafer criterion (by default (belief+plausibility)/2)"); SetParameterDescription("cri", "Dempster Shafer criterion (by default (belief+plausibility)/2)");
MandatoryOff("cri"); MandatoryOff("cri");
SetParameterString("cri", "((Belief + Plausibility)/2.)"); SetParameterString("cri", "((Belief + Plausibility)/2.)", false);
AddParameter(ParameterType_Float, "thd", "Criterion threshold"); AddParameter(ParameterType_Float, "thd", "Criterion threshold");
SetParameterDescription("thd", "Criterion threshold (default 0.5)"); SetParameterDescription("thd", "Criterion threshold (default 0.5)");
MandatoryOff("thd"); MandatoryOff("thd");
SetParameterFloat("thd", 0.5); SetParameterFloat("thd",0.5, false);
AddParameter(ParameterType_OutputVectorData, "out", "Output Vector Data"); AddParameter(ParameterType_OutputVectorData, "out", "Output Vector Data");
SetParameterDescription("out", "Output VectorData containing only the validated samples"); SetParameterDescription("out", "Output VectorData containing only the validated samples");
......
...@@ -39,22 +39,22 @@ namespace Wrapper ...@@ -39,22 +39,22 @@ namespace Wrapper
AddChoice("classifier.boost.t.logit", "LogitBoost (technique producing good regression fits)"); AddChoice("classifier.boost.t.logit", "LogitBoost (technique producing good regression fits)");
AddChoice("classifier.boost.t.gentle", "Gentle AdaBoost (technique setting less weight on outlier data points " AddChoice("classifier.boost.t.gentle", "Gentle AdaBoost (technique setting less weight on outlier data points "
"and, for that reason, being often good with regression data)"); "and, for that reason, being often good with regression data)");
SetParameterString("classifier.boost.t", "real"); SetParameterString("classifier.boost.t", "real", false);
SetParameterDescription("classifier.boost.t", "Type of Boosting algorithm."); SetParameterDescription("classifier.boost.t", "Type of Boosting algorithm.");
//Do not expose SplitCriteria //Do not expose SplitCriteria
//WeakCount //WeakCount
AddParameter(ParameterType_Int, "classifier.boost.w", "Weak count"); AddParameter(ParameterType_Int, "classifier.boost.w", "Weak count");
SetParameterInt("classifier.boost.w", 100); SetParameterInt("classifier.boost.w",100, false);
SetParameterDescription("classifier.boost.w","The number of weak classifiers."); SetParameterDescription("classifier.boost.w","The number of weak classifiers.");
//WeightTrimRate //WeightTrimRate
AddParameter(ParameterType_Float, "classifier.boost.r", "Weight Trim Rate"); AddParameter(ParameterType_Float, "classifier.boost.r", "Weight Trim Rate");
SetParameterFloat("classifier.boost.r", 0.95); SetParameterFloat("classifier.boost.r",0.95, false);
SetParameterDescription("classifier.boost.r","A threshold between 0 and 1 used to save computational time. " SetParameterDescription("classifier.boost.r","A threshold between 0 and 1 used to save computational time. "
"Samples with summary weight <= (1 - weight_trim_rate) do not participate in the next iteration of training. " "Samples with summary weight <= (1 - weight_trim_rate) do not participate in the next iteration of training. "
"Set this parameter to 0 to turn off this functionality."); "Set this parameter to 0 to turn off this functionality.");
//MaxDepth : Not sure that this parameter has to be exposed. //MaxDepth : Not sure that this parameter has to be exposed.
AddParameter(ParameterType_Int, "classifier.boost.m", "Maximum depth of the tree"); AddParameter(ParameterType_Int, "classifier.boost.m", "Maximum depth of the tree");
SetParameterInt("classifier.boost.m", 1); SetParameterInt("classifier.boost.m",1, false);
SetParameterDescription("classifier.boost.m","Maximum depth of the tree."); SetParameterDescription("classifier.boost.m","Maximum depth of the tree.");
} }
......
...@@ -34,7 +34,7 @@ LearningApplicationBase<TInputValue,TOutputValue> ...@@ -34,7 +34,7 @@ LearningApplicationBase<TInputValue,TOutputValue>
"See complete documentation here \\url{http://docs.opencv.org/modules/ml/doc/decision_trees.html}."); "See complete documentation here \\url{http://docs.opencv.org/modules/ml/doc/decision_trees.html}.");
//MaxDepth //MaxDepth
AddParameter(ParameterType_Int, "classifier.dt.max", "Maximum depth of the tree"); AddParameter(ParameterType_Int, "classifier.dt.max", "Maximum depth of the tree");
SetParameterInt("classifier.dt.max", 65535); SetParameterInt("classifier.dt.max",65535, false);
SetParameterDescription( SetParameterDescription(
"classifier.dt.max", "The training algorithm attempts to split each node while its depth is smaller than the maximum " "classifier.dt.max", "The training algorithm attempts to split each node while its depth is smaller than the maximum "
"possible depth of the tree. The actual depth may be smaller if the other termination criteria are met, and/or " "possible depth of the tree. The actual depth may be smaller if the other termination criteria are met, and/or "
...@@ -42,13 +42,13 @@ LearningApplicationBase<TInputValue,TOutputValue> ...@@ -42,13 +42,13 @@ LearningApplicationBase<TInputValue,TOutputValue>
//MinSampleCount //MinSampleCount
AddParameter(ParameterType_Int, "classifier.dt.min", "Minimum number of samples in each node"); AddParameter(ParameterType_Int, "classifier.dt.min", "Minimum number of samples in each node");
SetParameterInt("classifier.dt.min", 10); SetParameterInt("classifier.dt.min",10, false);
SetParameterDescription("classifier.dt.min", "If the number of samples in a node is smaller than this parameter, " SetParameterDescription("classifier.dt.min", "If the number of samples in a node is smaller than this parameter, "
"then this node will not be split."); "then this node will not be split.");
//RegressionAccuracy //RegressionAccuracy
AddParameter(ParameterType_Float, "classifier.dt.ra", "Termination criteria for regression tree"); AddParameter(ParameterType_Float, "classifier.dt.ra", "Termination criteria for regression tree");
SetParameterFloat("classifier.dt.ra", 0.01); SetParameterFloat("classifier.dt.ra",0.01, false);
SetParameterDescription("classifier.dt.min", "If all absolute differences between an estimated value in a node " SetParameterDescription("classifier.dt.min", "If all absolute differences between an estimated value in a node "
"and the values of the train samples in this node are smaller than this regression accuracy parameter, " "and the values of the train samples in this node are smaller than this regression accuracy parameter, "
"then the node will not be split."); "then the node will not be split.");
...@@ -60,14 +60,14 @@ LearningApplicationBase<TInputValue,TOutputValue> ...@@ -60,14 +60,14 @@ LearningApplicationBase<TInputValue,TOutputValue>
//MaxCategories //MaxCategories
AddParameter(ParameterType_Int, "classifier.dt.cat", AddParameter(ParameterType_Int, "classifier.dt.cat",
"Cluster possible values of a categorical variable into K <= cat clusters to find a suboptimal split"); "Cluster possible values of a categorical variable into K <= cat clusters to find a suboptimal split");
SetParameterInt("classifier.dt.cat", 10); SetParameterInt("classifier.dt.cat",10, false);
SetParameterDescription( SetParameterDescription(
"classifier.dt.cat", "classifier.dt.cat",
"Cluster possible values of a categorical variable into K <= cat clusters to find a suboptimal split."); "Cluster possible values of a categorical variable into K <= cat clusters to find a suboptimal split.");
//CVFolds //CVFolds
AddParameter(ParameterType_Int, "classifier.dt.f", "K-fold cross-validations"); AddParameter(ParameterType_Int, "classifier.dt.f", "K-fold cross-validations");
SetParameterInt("classifier.dt.f", 10); SetParameterInt("classifier.dt.f",10, false);
SetParameterDescription( SetParameterDescription(
"classifier.dt.f", "If cv_folds > 1, then it prunes a tree with K-fold cross-validation where K is equal to cv_folds."); "classifier.dt.f", "If cv_folds > 1, then it prunes a tree with K-fold cross-validation where K is equal to cv_folds.");
......
...@@ -45,7 +45,7 @@ LearningApplicationBase<TInputValue,TOutputValue> ...@@ -45,7 +45,7 @@ LearningApplicationBase<TInputValue,TOutputValue>
//WeakCount //WeakCount
AddParameter(ParameterType_Int, "classifier.gbt.w", "Number of boosting algorithm iterations"); AddParameter(ParameterType_Int, "classifier.gbt.w", "Number of boosting algorithm iterations");
SetParameterInt("classifier.gbt.w", 200); SetParameterInt("classifier.gbt.w",200, false);
SetParameterDescription( SetParameterDescription(
"classifier.gbt.w", "classifier.gbt.w",
"Number \"w\" of boosting algorithm iterations, with w*K being the total number of trees in " "Number \"w\" of boosting algorithm iterations, with w*K being the total number of trees in "
...@@ -53,20 +53,20 @@ LearningApplicationBase<TInputValue,TOutputValue> ...@@ -53,20 +53,20 @@ LearningApplicationBase<TInputValue,TOutputValue>
//Shrinkage //Shrinkage
AddParameter(ParameterType_Float, "classifier.gbt.s", "Regularization parameter"); AddParameter(ParameterType_Float, "classifier.gbt.s", "Regularization parameter");
SetParameterFloat("classifier.gbt.s", 0.01); SetParameterFloat("classifier.gbt.s",0.01, false);
SetParameterDescription("classifier.gbt.s", "Regularization parameter."); SetParameterDescription("classifier.gbt.s", "Regularization parameter.");
//SubSamplePortion //SubSamplePortion
AddParameter(ParameterType_Float, "classifier.gbt.p", AddParameter(ParameterType_Float, "classifier.gbt.p",
"Portion of the whole training set used for each algorithm iteration"); "Portion of the whole training set used for each algorithm iteration");
SetParameterFloat("classifier.gbt.p", 0.8); SetParameterFloat("classifier.gbt.p",0.8, false);
SetParameterDescription( SetParameterDescription(
"classifier.gbt.p", "classifier.gbt.p",
"Portion of the whole training set used for each algorithm iteration. The subset is generated randomly."); "Portion of the whole training set used for each algorithm iteration. The subset is generated randomly.");
//MaxDepth //MaxDepth
AddParameter(ParameterType_Int, "classifier.gbt.max", "Maximum depth of the tree"); AddParameter(ParameterType_Int, "classifier.gbt.max", "Maximum depth of the tree");
SetParameterInt("classifier.gbt.max", 3); SetParameterInt("classifier.gbt.max",3, false);
SetParameterDescription( SetParameterDescription(
"classifier.gbt.max", "The training algorithm attempts to split each node while its depth is smaller than the maximum " "classifier.gbt.max", "The training algorithm attempts to split each node while its depth is smaller than the maximum "
"possible depth of the tree. The actual depth may be smaller if the other termination criteria are met, and/or " "possible depth of the tree. The actual depth may be smaller if the other termination criteria are met, and/or "
......
...@@ -34,7 +34,7 @@ namespace Wrapper ...@@ -34,7 +34,7 @@ namespace Wrapper
//K parameter //K parameter
AddParameter(ParameterType_Int, "classifier.knn.k", "Number of Neighbors"); AddParameter(ParameterType_Int, "classifier.knn.k", "Number of Neighbors");