Commit a45cc930 authored by Charles Peyrega's avatar Charles Peyrega

DOC: Typo and punctuation correction and clarification of the documentation...

DOC: Typo and punctuation correction and clarification of the documentation and menus of the otbTrainImagesClassifier application
parent fbc29e9e
......@@ -25,16 +25,16 @@ namespace Wrapper
void TrainImagesClassifier::InitBoostParams()
{
AddChoice("classifier.boost", "Boost classifier");
SetParameterDescription("classifier.boost", "This group of parameters allows to set Boost classifier parameters."
"See complete documentation here http://docs.opencv.org/modules/ml/doc/boosting.html");
SetParameterDescription("classifier.boost", "This group of parameters allows to set Boost classifier parameters. "
"See complete documentation here http://docs.opencv.org/modules/ml/doc/boosting.html.");
//BoostType
AddParameter(ParameterType_Choice, "classifier.boost.t", "Boost Type");
AddChoice("classifier.boost.t.discrete", "Discrete AdaBoost.");
AddChoice("classifier.boost.t.real", "Real AdaBoost. It is a technique that utilizes confidence-rated predictions"
"and works well with categorical data.");
AddChoice("classifier.boost.t.logit", "LogitBoost. It can produce good regression fits.");
AddChoice("classifier.boost.t.gentle", "Gentle AdaBoost. It puts less weight on outlier data points"
"and for that reason is often good with regression data");
AddChoice("classifier.boost.t.discrete", "Discrete AdaBoost");
AddChoice("classifier.boost.t.real", "Real AdaBoost (technique using confidence-rated predictions "
"and working well with categorical data)");
AddChoice("classifier.boost.t.logit", "LogitBoost (technique producing good regression fits)");
AddChoice("classifier.boost.t.gentle", "Gentle AdaBoost (technique setting less weight on outlier data points "
"and, for that reason, being often good with regression data)");
SetParameterString("classifier.boost.t", "real");
SetParameterDescription("classifier.boost.t", "Type of Boosting algorithm.");
//Do not expose SplitCriteria
......@@ -45,13 +45,13 @@ namespace Wrapper
//WeightTrimRate
AddParameter(ParameterType_Float, "classifier.boost.r", "Weight Trim Rate");
SetParameterFloat("classifier.boost.r", 0.95);
SetParameterDescription("classifier.boost.r","A threshold between 0 and 1 used to save computational time."
"Samples with summary weight <= 1 - weight_trim_rate do not participate in the next iteration of training."
SetParameterDescription("classifier.boost.r","A threshold between 0 and 1 used to save computational time. "
"Samples with summary weight <= (1 - weight_trim_rate) do not participate in the next iteration of training. "
"Set this parameter to 0 to turn off this functionality.");
//MaxDepth : Not sure that this parameter has to be exposed.
AddParameter(ParameterType_Int, "classifier.boost.m", "Maximum depth of the tree");
SetParameterInt("classifier.boost.m", 1);
SetParameterDescription("classifier.boost.m","Maximum depth of the tree");
SetParameterDescription("classifier.boost.m","Maximum depth of the tree.");
}
......
......@@ -25,27 +25,27 @@ void TrainImagesClassifier::InitDecisionTreeParams()
{
AddChoice("classifier.dt", "Decision Tree classifier");
SetParameterDescription("classifier.dt",
"This group of parameters allows to set Decision Tree classifier parameters."
"See complete documentation here http://docs.opencv.org/modules/ml/doc/decision_trees.html");
"This group of parameters allows to set Decision Tree classifier parameters. "
"See complete documentation here http://docs.opencv.org/modules/ml/doc/decision_trees.html.");
//MaxDepth
AddParameter(ParameterType_Int, "classifier.dt.max", "Maximum depth of the tree");
SetParameterInt("classifier.dt.max", 65000);
SetParameterDescription(
"classifier.dt.max", "The maximum possible depth of the tree. That is the training algorithms"
"attempts to split a node while its depth is less than MaxDepth. The actual depth may be smaller"
" if the other termination criteria are met, and/or if the tree is pruned.");
"classifier.dt.max", "The training algorithm attempts to split each node while its depth is smaller than the maximum "
"possible depth of the tree. The actual depth may be smaller if the other termination criteria are met, and/or "
"if the tree is pruned.");
//MinSampleCount
AddParameter(ParameterType_Int, "classifier.dt.min", "Minimum number of samples in each node");
SetParameterInt("classifier.dt.min", 10);
SetParameterDescription("classifier.dt.min", "If the number of samples in a node is less than this parameter "
"then the node will not be split.");
SetParameterDescription("classifier.dt.min", "If the number of samples in a node is smaller than this parameter, "
"then this node will not be split.");
//RegressionAccuracy
AddParameter(ParameterType_Float, "classifier.dt.ra", "Termination Criteria for regression tree.");
AddParameter(ParameterType_Float, "classifier.dt.ra", "Termination criteria for regression tree");
SetParameterFloat("classifier.dt.ra", 0.01);
SetParameterDescription("classifier.dt.min", "If all absolute differences between an estimated value in a node"
"and values of train samples in this node are less than this parameter"
SetParameterDescription("classifier.dt.min", "If all absolute differences between an estimated value in a node "
"and the values of the train samples in this node are smaller than this regression accuracy parameter, "
"then the node will not be split.");
//UseSurrogates : don't need to be exposed !
......@@ -58,23 +58,23 @@ void TrainImagesClassifier::InitDecisionTreeParams()
SetParameterInt("classifier.dt.cat", 10);
SetParameterDescription(
"classifier.dt.cat",
"Cluster possible values of a categorical variable into K <= cat clusters to find a suboptimal split");
"Cluster possible values of a categorical variable into K <= cat clusters to find a suboptimal split.");
//CVFolds
AddParameter(ParameterType_Int, "classifier.dt.f", "K-fold cross-validations");
SetParameterInt("classifier.dt.f", 10);
SetParameterDescription(
"classifier.dt.f", "If cv_folds > 1 then prune a tree with K-fold cross-validation where K is equal to cv_folds");
"classifier.dt.f", "If cv_folds > 1, then it prunes a tree with K-fold cross-validation where K is equal to cv_folds.");
//Use1seRule
AddParameter(ParameterType_Empty, "classifier.dt.r", "Set Use1seRule flag to false.");
AddParameter(ParameterType_Empty, "classifier.dt.r", "Set Use1seRule flag to false");
SetParameterDescription(
"classifier.dt.r",
"If true then a pruning will be harsher. This will make a tree more compact and more resistant to the training data noise but a bit less accurate.");
"If true, then a pruning will be harsher. This will make a tree more compact and more resistant to the training data noise but a bit less accurate.");
//TruncatePrunedTree
AddParameter(ParameterType_Empty, "classifier.dt.t", "Set TruncatePrunedTree flag to false.");
SetParameterDescription("classifier.dt.t", "If true then pruned branches are physically removed from the tree.");
AddParameter(ParameterType_Empty, "classifier.dt.t", "Set TruncatePrunedTree flag to false");
SetParameterDescription("classifier.dt.t", "If true, then pruned branches are physically removed from the tree.");
//Priors are not exposed.
......
......@@ -26,38 +26,39 @@ void TrainImagesClassifier::InitGradientBoostedTreeParams()
AddChoice("classifier.gbt", "Gradient Boosted Tree classifier");
SetParameterDescription(
"classifier.gbt",
"This group of parameters allows to set Gradient Boosted Tree classifier parameters."
"see complete documentation here http://docs.opencv.org/modules/ml/doc/gradient_boosted_trees.html");
"This group of parameters allows to set Gradient Boosted Tree classifier parameters. "
"See complete documentation here http://docs.opencv.org/modules/ml/doc/gradient_boosted_trees.html.");
//LossFunctionType : not exposed, as only one type is used for Classification,
// the other three are used for regression.
//WeakCount
AddParameter(ParameterType_Int, "classifier.gbt.w", "Count of boosting algorithm iterations");
AddParameter(ParameterType_Int, "classifier.gbt.w", "Number of boosting algorithm iterations");
SetParameterInt("classifier.gbt.w", 200);
SetParameterDescription(
"classifier.gbt.w",
"Count of boosting algorithm iterations. w*K is the total count of trees in the GBT model, where K is the output classes count.");
"Number \"w\" of boosting algorithm iterations, with w*K being the total number of trees in "
"the GBT model, where K is the output number of classes.");
//Shrinkage
AddParameter(ParameterType_Float, "classifier.gbt.s", "Regularization parameter.");
AddParameter(ParameterType_Float, "classifier.gbt.s", "Regularization parameter");
SetParameterFloat("classifier.gbt.s", 0.8);
SetParameterDescription("classifier.gbt.s", "Regularization parameter.");
//SubSamplePortion
AddParameter(ParameterType_Float, "classifier.gbt.p",
"Portion of the whole training set used for each algorithm iteration.");
"Portion of the whole training set used for each algorithm iteration");
SetParameterFloat("classifier.gbt.p", 0.01);
SetParameterDescription(
"classifier.gbt.p",
"Portion of the whole training set used for each algorithm iteration. Subset is generated randomly.");
"Portion of the whole training set used for each algorithm iteration. The subset is generated randomly.");
//MaxDepth
AddParameter(ParameterType_Int, "classifier.gbt.max", "Maximum depth of the tree");
SetParameterInt("classifier.gbt.max", 65000);
SetParameterDescription(
"classifier.gbt.max", "The maximum possible depth of the tree. That is the training algorithms"
"attempts to split a node while its depth is less than MaxDepth. The actual depth may be smaller"
" if the other termination criteria are met, and/or if the tree is pruned.");
"classifier.gbt.max", "The training algorithm attempts to split each node while its depth is smaller than the maximum "
"possible depth of the tree. The actual depth may be smaller if the other termination criteria are met, and/or "
"if the tree is pruned.");
//UseSurrogates : don't need to be exposed !
//AddParameter(ParameterType_Empty, "classifier.gbt.sur", "Surrogate splits will be built");
......
......@@ -32,12 +32,12 @@ namespace Wrapper
SetDocName("Train an OpenCV classifier from multiple images");
SetDocLongDescription(
"This application performs a classifier training from multiple pairs of input images and training vector data. "
"Samples are composed of pixel values in each band optionally centered and reduced using XML statistics file produced by "
"Samples are composed of pixel values in each band optionally centered and reduced using an XML statistics file produced by "
"the ComputeImagesStatistics application.\n The training vector data must contain polygons with a positive integer field "
"representing the class label. Name of the field can be set using the \"Class label field\" parameter. Training and validation "
"representing the class label. The name of this field can be set using the \"Class label field\" parameter. Training and validation "
"sample lists are built such that each class is equally represented in both lists. One parameter allows to control the ratio "
"between the number of samples in training and validation sets. Two parameters allow to manage the size of the training and "
"validation sets per class and per image.\n Several classifier parameters can be set depending on the classifier. In the "
"validation sets per class and per image.\n Several classifier parameters can be set depending on the chosen classifier. In the "
"validation process, the confusion matrix is organized the following way: rows = reference labels, columns = produced labels. "
"In the header of the optional confusion matrix output file, the validation (reference) and predicted (produced) class labels"
" are ordered according to the rows/columns of the confusion matrix.");
......@@ -52,18 +52,17 @@ namespace Wrapper
SetParameterDescription("io", "This group of parameters allows to set input and output data.");
AddParameter(ParameterType_InputImageList, "io.il", "Input Image List");
SetParameterDescription("io.il", "A list of input images.");
AddParameter(ParameterType_InputVectorDataList, "io.vd", "Vector Data List");
AddParameter(ParameterType_InputVectorDataList, "io.vd", "Input Vector Data List");
SetParameterDescription("io.vd", "A list of vector data to select the training samples.");
AddParameter(ParameterType_InputFilename, "io.imstat", "XML image statistics file");
AddParameter(ParameterType_InputFilename, "io.imstat", "Input XML image statistics file");
MandatoryOff("io.imstat");
SetParameterDescription("io.imstat",
"Filename of an XML file containing mean and standard deviation of input images.");
AddParameter(ParameterType_OutputFilename, "io.out", "Output model");
SetParameterDescription("io.out", "Output file containing the model estimated");
AddParameter(ParameterType_OutputFilename, "io.confmatout", "Confusion matrix output");
SetParameterDescription("io.confmatout", "Filename to store the output confusion matrix (csv format)");
"Input XML file containing the mean and the standard deviation of the input images.");
AddParameter(ParameterType_OutputFilename, "io.confmatout", "Output confusion matrix");
SetParameterDescription("io.confmatout", "Output file containing the confusion matrix (.csv format).");
MandatoryOff("io.confmatout");
AddParameter(ParameterType_OutputFilename, "io.out", "Output model");
SetParameterDescription("io.out", "Output file containing the model estimated (.txt format).");
// Elevation
ElevationParametersHandler::AddElevationParameters(this, "elev");
......@@ -76,29 +75,28 @@ namespace Wrapper
AddParameter(ParameterType_Int, "sample.mt", "Maximum training sample size");
//MandatoryOff("mt");
SetDefaultParameterInt("sample.mt", 1000);
SetParameterDescription("sample.mt", "Maximum size of the training sample list (default = 1000).");
SetParameterDescription("sample.mt", "Maximum size of the training sample list (default = 1000) (no limit = -1).");
AddParameter(ParameterType_Int, "sample.mv", "Maximum validation sample size");
// MandatoryOff("mv");
SetDefaultParameterInt("sample.mv", 1000);
SetParameterDescription("sample.mv", "Maximum size of the validation sample list (default = 1000)");
SetParameterDescription("sample.mv", "Maximum size of the validation sample list (default = 1000) (no limit = -1).");
AddParameter(ParameterType_Empty, "sample.edg", "On edge pixel inclusion");
SetParameterDescription(
"sample.edg", "Take pixels on polygon edge into consideration when building training and validation samples.");
SetParameterDescription("sample.edg",
"Takes pixels on polygon edge into consideration when building training and validation samples.");
MandatoryOff("sample.edg");
AddParameter(ParameterType_Float, "sample.vtr", "training and validation sample ratio");
SetParameterDescription(
"sample.vtr",
"Ratio between training and validation samples (0.0 = all training, 1.0 = all validation) default = 0.5.");
AddParameter(ParameterType_Float, "sample.vtr", "Training and validation sample ratio");
SetParameterDescription("sample.vtr",
"Ratio between training and validation samples (0.0 = all training, 1.0 = all validation) (default = 0.5).");
SetParameterFloat("sample.vtr", 0.5);
AddParameter(ParameterType_String, "sample.vfn", "Name of the discrimination field");
SetParameterDescription("sample.vfn", "Name of the field used to discriminate class in the vector data files.");
SetParameterDescription("sample.vfn", "Name of the field used to discriminate class labels in the input vector data files.");
SetParameterString("sample.vfn", "Class");
AddParameter(ParameterType_Choice, "classifier", "Classifier to used.");
SetParameterDescription("classifier", "Choice of the classifier to used.");
AddParameter(ParameterType_Choice, "classifier", "Classifier to use for the training");
SetParameterDescription("classifier", "Choice of the classifier to use for the training.");
//Group LibSVM
InitLibSVMParams();
......@@ -136,8 +134,13 @@ namespace Wrapper
SetDocExampleParameterValue("sample.mv", "100");
SetDocExampleParameterValue("sample.mt", "100");
SetDocExampleParameterValue("sample.vtr", "0.5");
SetDocExampleParameterValue("svm.opt", "true");
SetDocExampleParameterValue("io.out", "svmModelQB1.svm");
SetDocExampleParameterValue("sample.edg", "false");
SetDocExampleParameterValue("sample.vfn", "Class");
SetDocExampleParameterValue("classifier", "libsvm");
SetDocExampleParameterValue("classifier.libsvm.k", "linear");
SetDocExampleParameterValue("classifier.libsvm.c", "1");
SetDocExampleParameterValue("classifier.libsvm.opt", "false");
SetDocExampleParameterValue("io.out", "svmModelQB1.txt");
SetDocExampleParameterValue("io.confmatout", "svmConfusionMatrixQB1.csv");
}
......
......@@ -25,13 +25,13 @@ namespace Wrapper
void TrainImagesClassifier::InitKNNParams()
{
AddChoice("classifier.knn", "KNN classifier");
SetParameterDescription("classifier.knn", "This group of parameters allows to set KNN classifier parameters."
"See complete documentation here http://docs.opencv.org/modules/ml/doc/k_nearest_neighbors.html");
SetParameterDescription("classifier.knn", "This group of parameters allows to set KNN classifier parameters. "
"See complete documentation here http://docs.opencv.org/modules/ml/doc/k_nearest_neighbors.html.");
//K parameter
AddParameter(ParameterType_Int, "classifier.knn.k", "Number of Neighbors");
SetParameterInt("classifier.knn.k", 32);
SetParameterDescription("classifier.knn.k","The number of neighbors to used.");
SetParameterDescription("classifier.knn.k","The number of neighbors to use.");
}
......
......@@ -33,14 +33,14 @@ namespace Wrapper
AddChoice("classifier.libsvm.k.sigmoid", "Sigmoid");
SetParameterString("classifier.libsvm.k", "linear");
SetParameterDescription("classifier.libsvm.k", "SVM Kernel Type.");
AddParameter(ParameterType_Float, "classifier.libsvm.c", "Cost parameter C.");
AddParameter(ParameterType_Float, "classifier.libsvm.c", "Cost parameter C");
SetParameterFloat("classifier.libsvm.c", 1.0);
SetParameterDescription(
"classifier.libsvm.c",
"SVM models have a cost parameter C (1 by default) to control the trade-off between training errors and forcing rigid margins.");
AddParameter(ParameterType_Empty, "classifier.libsvm.opt", "parameters optimization");
AddParameter(ParameterType_Empty, "classifier.libsvm.opt", "Parameters optimization");
MandatoryOff("classifier.libsvm.opt");
SetParameterDescription("classifier.libsvm.opt", "SVM optimization flag");
SetParameterDescription("classifier.libsvm.opt", "SVM parameters optimization flag.");
}
......
......@@ -26,15 +26,15 @@ void TrainImagesClassifier::InitNeuralNetworkParams()
{
AddChoice("classifier.ann", "Artificial Neural Network classifier");
SetParameterDescription("classifier.ann",
"This group of parameters allows to set Artificial Neural Network classifier parameters."
"See complete documentation here http://docs.opencv.org/modules/ml/doc/neural_networks.html");
"This group of parameters allows to set Artificial Neural Network classifier parameters. "
"See complete documentation here http://docs.opencv.org/modules/ml/doc/neural_networks.html.");
//TrainMethod
AddParameter(ParameterType_Choice, "classifier.ann.t", "Train Method Type");
AddChoice("classifier.ann.t.reg", "RPROP algorithm.");
AddChoice("classifier.ann.t.back", "back-propagation algorithm");
AddChoice("classifier.ann.t.reg", "RPROP algorithm");
AddChoice("classifier.ann.t.back", "Back-propagation algorithm");
SetParameterString("classifier.ann.t", "reg");
SetParameterDescription("classifier.ann.t", "Type of train method for the MLP neural network.");
SetParameterDescription("classifier.ann.t", "Type of training method for the multilayer perceptron (MLP) neural network.");
//LayerSizes
//There is no ParameterType_IntList, so i use a ParameterType_StringList and convert it.
......@@ -44,63 +44,63 @@ void TrainImagesClassifier::InitNeuralNetworkParams()
AddParameter(ParameterType_StringList, "classifier.ann.sizes", "Number of neurons in each intermediate layer");
//SetParameterStringList("classifier.ann.sizes", layerSizes);
SetParameterDescription("classifier.ann.sizes",
"the number of neurons in each intermediate layer (excluding input and output layers).");
"The number of neurons in each intermediate layer (excluding input and output layers).");
//ActivateFunction
AddParameter(ParameterType_Choice, "classifier.ann.f", "Neuron activation function type");
AddChoice("classifier.ann.f.ident", "Identity function.");
AddChoice("classifier.ann.f.ident", "Identity function");
AddChoice("classifier.ann.f.sig", "Symmetrical Sigmoid function");
AddChoice("classifier.ann.f.gau", "Gaussian function. (Not completely supported)");
AddChoice("classifier.ann.f.gau", "Gaussian function (Not completely supported)");
SetParameterString("classifier.ann.f", "sig");
SetParameterDescription("classifier.ann.f", "Neuron activation function.");
//Alpha
AddParameter(ParameterType_Float, "classifier.ann.a", "Alpha parameter of the activation function.");
AddParameter(ParameterType_Float, "classifier.ann.a", "Alpha parameter of the activation function");
SetParameterFloat("classifier.ann.a", 0.);
SetParameterDescription("classifier.ann.a",
"Alpha parameter of the activation function (used only with sigmoid and gaussian functions.");
"Alpha parameter of the activation function (used only with sigmoid and gaussian functions).");
//Beta
AddParameter(ParameterType_Float, "classifier.ann.b", "Beta parameter of the activation function.");
AddParameter(ParameterType_Float, "classifier.ann.b", "Beta parameter of the activation function");
SetParameterFloat("classifier.ann.b", 0.);
SetParameterDescription("classifier.ann.b",
"Beta parameter of the activation function (used only with sigmoid and gaussian functions.");
"Beta parameter of the activation function (used only with sigmoid and gaussian functions).");
//BackPropDWScale
AddParameter(ParameterType_Float, "classifier.ann.bpdw",
"Strength of the weight gradient term in the BACKPROP method.");
"Strength of the weight gradient term in the BACKPROP method");
SetParameterFloat("classifier.ann.bpdw", 0.1);
SetParameterDescription(
"classifier.ann.bpdw",
"Strength of the weight gradient term in the BACKPROP method. The recommended value is about 0.1");
"Strength of the weight gradient term in the BACKPROP method. The recommended value is about 0.1.");
//BackPropMomentScale
AddParameter(ParameterType_Float, "classifier.ann.bpms",
"Strength of the momentum term (the difference between weights on the 2 previous iterations).");
"Strength of the momentum term (the difference between weights on the 2 previous iterations)");
SetParameterFloat("classifier.ann.bpms", 0.1);
SetParameterDescription(
"classifier.ann.bpms",
"Strength of the momentum term (the difference between weights on the 2 previous iterations). "
"This parameter provides some inertia to smooth the random fluctuations of the weights. "
"It can vary from 0 (the feature is disabled) to 1 and beyond. The value 0.1 or so is good enough");
"It can vary from 0 (the feature is disabled) to 1 and beyond. The value 0.1 or so is good enough.");
//RegPropDW0
AddParameter(ParameterType_Float, "classifier.ann.rdw",
"Initial value Delta_0 of update-values Delta_{ij} in RPROP method.");
"Initial value Delta_0 of update-values Delta_{ij} in RPROP method");
SetParameterFloat("classifier.ann.rdw", 0.1);
SetParameterDescription("classifier.ann.rdw", "Initial value Delta_0 of update-values Delta_{ij} in RPROP method.");
SetParameterDescription("classifier.ann.rdw", "Initial value Delta_0 of update-values Delta_{ij} in RPROP method (default = 0.1).");
//RegPropDWMin
AddParameter(ParameterType_Float, "classifier.ann.rdwm", "Update-values lower limit Delta_{min} in RPROP method.");
AddParameter(ParameterType_Float, "classifier.ann.rdwm", "Update-values lower limit Delta_{min} in RPROP method");
SetParameterFloat("classifier.ann.rdwm", 1e-7);
SetParameterDescription(
"classifier.ann.rdwm",
"Update-values lower limit Delta_{min} in RPROP method. It must be positive. Default is 1e-7");
"Update-values lower limit Delta_{min} in RPROP method. It must be positive (default = 1e-7).");
//TermCriteriaType
AddParameter(ParameterType_Choice, "classifier.ann.term", "Termination criteria");
AddChoice("classifier.ann.term.iter", "Maximum number of iterations");
AddChoice("classifier.ann.term.eps", "Espilon");
AddChoice("classifier.ann.term.eps", "Epsilon");
AddChoice("classifier.ann.term.all", "Max. iterations + Epsilon");
SetParameterString("classifier.ann.term", "all");
SetParameterDescription("classifier.ann.term", "Termination criteria.");
......@@ -108,13 +108,13 @@ void TrainImagesClassifier::InitNeuralNetworkParams()
//Epsilon
AddParameter(ParameterType_Float, "classifier.ann.eps", "Epsilon value used in the Termination criteria");
SetParameterFloat("classifier.ann.eps", 0.01);
SetParameterDescription("classifier.ann.eps", "Epsilon value used in the Termination criteria");
SetParameterDescription("classifier.ann.eps", "Epsilon value used in the Termination criteria.");
//MaxIter
AddParameter(ParameterType_Int, "classifier.ann.iter",
"Maximum number of iteration used in the Termination criteria.");
"Maximum number of iterations used in the Termination criteria");
SetParameterInt("classifier.ann.iter", 1000);
SetParameterDescription("classifier.ann.iter", "Maximum number of iteration used in the Termination criteria.");
SetParameterDescription("classifier.ann.iter", "Maximum number of iterations used in the Termination criteria.");
}
......
......@@ -25,8 +25,8 @@ namespace Wrapper
void TrainImagesClassifier::InitNormalBayesParams()
{
AddChoice("classifier.bayes", "Normal Bayes classifier");
SetParameterDescription("classifier.bayes", "Use a Normal Bayes Classifier."
"See complete documentation here http://docs.opencv.org/modules/ml/doc/normal_bayes_classifier.html");
SetParameterDescription("classifier.bayes", "Use a Normal Bayes Classifier. "
"See complete documentation here http://docs.opencv.org/modules/ml/doc/normal_bayes_classifier.html.");
}
......
......@@ -25,8 +25,8 @@ void TrainImagesClassifier::InitRandomForestsParams()
{
AddChoice("classifier.rf", "Random forests classifier");
SetParameterDescription("classifier.rf",
"This group of parameters allows to set Random Forests classifier parameters."
"See complete documentation here http://docs.opencv.org/modules/ml/doc/random_trees.html");
"This group of parameters allows to set Random Forests classifier parameters. "
"See complete documentation here http://docs.opencv.org/modules/ml/doc/random_trees.html.");
//MaxDepth
AddParameter(ParameterType_Int, "classifier.rf.max", "Maximum depth of the tree");
SetParameterInt("classifier.rf.max", 25);
......@@ -39,14 +39,14 @@ void TrainImagesClassifier::InitRandomForestsParams()
AddParameter(ParameterType_Int, "classifier.rf.min", "Minimum number of samples in each node");
SetParameterInt("classifier.rf.min", 5);
SetParameterDescription(
"classifier.rf.min", "If the number of samples in a node is less than this parameter "
"then the node will not be split. A reasonable value is a small percentage of the total data e.g. 1%");
"classifier.rf.min", "If the number of samples in a node is smaller than this parameter, "
"then the node will not be split. A reasonable value is a small percentage of the total data e.g. 1\%.");
//RegressionAccuracy
AddParameter(ParameterType_Float, "classifier.rf.ra", "Termination Criteria for regression tree.");
AddParameter(ParameterType_Float, "classifier.rf.ra", "Termination Criteria for regression tree");
SetParameterFloat("classifier.rf.ra", 0.);
SetParameterDescription("classifier.rf.min", "If all absolute differences between an estimated value in a node"
"and values of train samples in this node are less than this parameter"
SetParameterDescription("classifier.rf.ra", "If all absolute differences between an estimated value in a node "
"and the values of the train samples in this node are smaller than this regression accuracy parameter, "
"then the node will not be split.");
//UseSurrogates : don't need to be exposed !
......@@ -59,7 +59,7 @@ void TrainImagesClassifier::InitRandomForestsParams()
SetParameterInt("classifier.rf.cat", 15);
SetParameterDescription(
"classifier.rf.cat",
"Cluster possible values of a categorical variable into K <= cat clusters to find a suboptimal split");
"Cluster possible values of a categorical variable into K <= cat clusters to find a suboptimal split.");
//Priors are not exposed.
......@@ -71,8 +71,8 @@ void TrainImagesClassifier::InitRandomForestsParams()
SetParameterInt("classifier.rf.var", 4);
SetParameterDescription(
"classifier.rf.var",
"The size of the randomly selected subset of features at each tree node and that are used to find the best split(s)."
"If you set it to 0 then the size will be set to the square root of the total number of features.");
"The size of the subset of features, randomly selected at each tree node, that are used to find the best split(s). "
"If you set it to 0, then the size will be set to the square root of the total number of features.");
//MaxNumberOfTrees
AddParameter(ParameterType_Int, "classifier.rf.nbtrees",
......@@ -80,15 +80,15 @@ void TrainImagesClassifier::InitRandomForestsParams()
SetParameterInt("classifier.rf.nbtrees", 100);
SetParameterDescription(
"classifier.rf.nbtrees",
"The maximum number of trees in the forest (surprise, surprise). Typically the more trees you have the better the accuracy."
"However, the improvement in accuracy generally diminishes and asymptotes pass a certain number of trees. "
"Also to keep in mind, the number of tree increases the prediction time linearly.");
"The maximum number of trees in the forest. Typically, the more trees you have, the better the accuracy. "
"However, the improvement in accuracy generally diminishes and reaches an asymptote for a certain number of trees. "
"Also to keep in mind, increasing the number of trees increases the prediction time linearly.");
//ForestAccuracy
AddParameter(ParameterType_Float, "classifier.rf.acc",
"Sufficient accuracy (OOB error)");
SetParameterFloat("classifier.rf.acc", 0.01);
SetParameterDescription("classifier.rf.acc","Sufficient accuracy (OOB error)");
SetParameterDescription("classifier.rf.acc","Sufficient accuracy (OOB error).");
//TerminationCriteria not exposed
......
......@@ -25,8 +25,8 @@ namespace Wrapper
void TrainImagesClassifier::InitSVMParams()
{
AddChoice("classifier.svm", "SVM classifier (OpenCV)");
SetParameterDescription("classifier.svm", "This group of parameters allows to set SVM classifier parameters."
"See complete documentation here http://docs.opencv.org/modules/ml/doc/support_vector_machines.html");
SetParameterDescription("classifier.svm", "This group of parameters allows to set SVM classifier parameters. "
"See complete documentation here http://docs.opencv.org/modules/ml/doc/support_vector_machines.html.");
AddParameter(ParameterType_Choice, "classifier.svm.m", "SVM Model Type");
AddChoice("classifier.svm.m.csvc", "C support vector classification");
AddChoice("classifier.svm.m.nusvc", "Nu support vector classification");
......@@ -42,31 +42,31 @@ namespace Wrapper
AddChoice("classifier.svm.k.sigmoid", "Sigmoid");
SetParameterString("classifier.svm.k", "linear");
SetParameterDescription("classifier.svm.k", "SVM Kernel Type.");
AddParameter(ParameterType_Float, "classifier.svm.c", "Cost parameter C.");
AddParameter(ParameterType_Float, "classifier.svm.c", "Cost parameter C");
SetParameterFloat("classifier.svm.c", 1.0);
SetParameterDescription(
"classifier.svm.c",
"SVM models have a cost parameter C (1 by default) to control the trade-off between training errors and forcing rigid margins.");
AddParameter(ParameterType_Float, "classifier.svm.nu",
"Parameter nu of a SVM optimization problem (NU_SVC / ONE_CLASS).");
"Parameter nu of a SVM optimization problem (NU_SVC / ONE_CLASS)");
SetParameterFloat("classifier.svm.nu", 0.0);
SetParameterDescription("classifier.svm.nu", "Parameter nu of a SVM optimization problem.");
//AddParameter(ParameterType_Float, "classifier.svm.p", "Parameter epsilon of a SVM optimization problem (EPS_SVR).");
//AddParameter(ParameterType_Float, "classifier.svm.p", "Parameter epsilon of a SVM optimization problem (EPS_SVR)");
//SetParameterFloat("classifier.svm.p", 0.0);
//SetParameterDescription("classifier.svm.p", "Parameter epsilon of a SVM optimization problem (EPS_SVR).");
AddParameter(ParameterType_Float, "classifier.svm.coef0", "Parameter coef0 of a kernel function (POLY / SIGMOID).");
AddParameter(ParameterType_Float, "classifier.svm.coef0", "Parameter coef0 of a kernel function (POLY / SIGMOID)");
SetParameterFloat("classifier.svm.coef0", 0.0);
SetParameterDescription("classifier.svm.coef0", "Parameter coef0 of a kernel function (POLY / SIGMOID).");
AddParameter(ParameterType_Float, "classifier.svm.gamma",
"Parameter gamma of a kernel function (POLY / RBF / SIGMOID).");
"Parameter gamma of a kernel function (POLY / RBF / SIGMOID)");
SetParameterFloat("classifier.svm.gamma", 1.0);
SetParameterDescription("classifier.svm.gamma", "Parameter gamma of a kernel function (POLY / RBF / SIGMOID).");
AddParameter(ParameterType_Float, "classifier.svm.degree", "Parameter degree of a kernel function (POLY).");
AddParameter(ParameterType_Float, "classifier.svm.degree", "Parameter degree of a kernel function (POLY)");
SetParameterFloat("classifier.svm.degree", 0.0);
SetParameterDescription("classifier.svm.degree", "Parameter degree of a kernel function (POLY).");
AddParameter(ParameterType_Empty, "classifier.svm.opt", "parameters optimization");
AddParameter(ParameterType_Empty, "classifier.svm.opt", "Parameters optimization");
MandatoryOff("classifier.svm.opt");
SetParameterDescription("classifier.svm.opt", "SVM optimization flag");
SetParameterDescription("classifier.svm.opt", "SVM parameters optimization flag.");
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment