Commit cfaa8364 authored by Arnaud Jaen's avatar Arnaud Jaen

ENH: Application TrainMachineLearningImagesClassifier works now with all the opencv classifiers.

parent 4e630cfe
......@@ -36,7 +36,9 @@ OTB_CREATE_APPLICATION(NAME ClassificationMapRegularization
IF(OTB_USE_OPENCV)
OTB_CREATE_APPLICATION(NAME TrainMachineLearningImagesClassifier
SOURCES otbTrainMachineLearningImagesClassifier.cxx
SOURCES otbTrainMachineLearningImagesClassifier.cxx otbTrainSVM.cxx otbTrainLibSVM.cxx otbTrainBoost.cxx
otbTrainDecisionTree.cxx otbTrainGradientBoostedTree.cxx otbTrainNeuralNetwork.cxx otbTrainNormalBayes.cxx
otbTrainRandomForests.cxx
LINK_LIBRARIES OTBIO;OTBCommon;OTBBasicFilters;OTBFeatureExtraction;OTBLearning;OTBMachineLearning)
OTB_CREATE_APPLICATION(NAME ImageClassifier
......
/*=========================================================================
Program: ORFEO Toolbox
Language: C++
Date: $Date$
Version: $Revision$
Copyright (c) Centre National d'Etudes Spatiales. All rights reserved.
See OTBCopyright.txt for details.
This software is distributed WITHOUT ANY WARRANTY; without even
the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the above copyright notices for more information.
=========================================================================*/
#include "otbTrainMachineLearningImagesClassifier.h"
namespace otb
{
namespace Wrapper
{
void TrainMachineLearningImagesClassifier::InitBoostParams()
{
AddChoice("classifier.boost", "Boost classifier");
SetParameterDescription("classifier.boost", "This group of parameters allows to set Boost classifier parameters."
"See complete documentation here http://docs.opencv.org/modules/ml/doc/boosting.html");
//BoostType
AddParameter(ParameterType_Choice, "classifier.boost.t", "Boost Type");
AddChoice("classifier.boost.t.discrete", "Discrete AdaBoost.");
AddChoice("classifier.boost.t.real", "Real AdaBoost. It is a technique that utilizes confidence-rated predictions"
"and works well with categorical data.");
AddChoice("classifier.boost.t.logit", "LogitBoost. It can produce good regression fits.");
AddChoice("classifier.boost.t.gentle", "Gentle AdaBoost. It puts less weight on outlier data points"
"and for that reason is often good with regression data");
SetParameterString("classifier.boost.t", "real");
SetParameterDescription("classifier.boost.t", "Type of Boosting algorithm.");
//Do not expose SplitCriteria
//WeakCount
AddParameter(ParameterType_Int, "classifier.boost.w", "Weak count");
SetParameterInt("classifier.boost.w", 100);
SetParameterDescription("classifier.boost.w","The number of weak classifiers.");
//WeightTrimRate
AddParameter(ParameterType_Float, "classifier.boost.r", "Weight Trim Rate");
SetParameterFloat("classifier.boost.r", 0.95);
SetParameterDescription("classifier.boost.r","A threshold between 0 and 1 used to save computational time."
"Samples with summary weight <= 1 - weight_trim_rate do not participate in the next iteration of training."
"Set this parameter to 0 to turn off this functionality.");
//MaxDepth : Not sure that this parameter has to be exposed.
AddParameter(ParameterType_Int, "classifier.boost.m", "Maximum depth of the tree");
SetParameterInt("classifier.boost.m", 1);
SetParameterDescription("classifier.boost.m","Maximum depth of the tree");
}
void TrainMachineLearningImagesClassifier::TrainBoost(ListSampleType::Pointer trainingListSample, LabelListSampleType::Pointer trainingLabeledListSample)
{
BoostType::Pointer boostClassifier = BoostType::New();
boostClassifier->SetInputListSample(trainingListSample);
boostClassifier->SetTargetListSample(trainingLabeledListSample);
boostClassifier->SetBoostType(GetParameterInt("classifier.boost.t"));
boostClassifier->SetWeakCount(GetParameterInt("classifier.boost.w"));
boostClassifier->SetWeightTrimRate(GetParameterFloat("classifier.boost.r"));
boostClassifier->SetMaxDepth(GetParameterInt("classifier.boost.m"));
boostClassifier->Train();
boostClassifier->Save(GetParameterString("io.out"));
}
} //end namespace wrapper
} //end namespace otb
/*=========================================================================
Program: ORFEO Toolbox
Language: C++
Date: $Date$
Version: $Revision$
Copyright (c) Centre National d'Etudes Spatiales. All rights reserved.
See OTBCopyright.txt for details.
This software is distributed WITHOUT ANY WARRANTY; without even
the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the above copyright notices for more information.
=========================================================================*/
#include "otbTrainMachineLearningImagesClassifier.h"
namespace otb
{
namespace Wrapper
{
void TrainMachineLearningImagesClassifier::InitDecisionTreeParams()
{
AddChoice("classifier.dt", "Decision Tree classifier");
SetParameterDescription("classifier.dt",
"This group of parameters allows to set Decision Tree classifier parameters."
"See complete documentation here http://docs.opencv.org/modules/ml/doc/decision_trees.html");
//MaxDepth
AddParameter(ParameterType_Int, "classifier.dt.max", "Maximum depth of the tree");
SetParameterInt("classifier.dt.max", 65000);
SetParameterDescription(
"classifier.dt.max", "The maximum possible depth of the tree. That is the training algorithms"
"attempts to split a node while its depth is less than MaxDepth. The actual depth may be smaller"
" if the other termination criteria are met, and/or if the tree is pruned.");
//MinSampleCount
AddParameter(ParameterType_Int, "classifier.dt.min", "Minimum number of samples in each node");
SetParameterInt("classifier.dt.min", 10);
SetParameterDescription("classifier.dt.min", "If the number of samples in a node is less than this parameter "
"then the node will not be split.");
//RegressionAccuracy
AddParameter(ParameterType_Float, "classifier.dt.ra", "Termination Criteria for regression tree.");
SetParameterFloat("classifier.dt.ra", 0.01);
SetParameterDescription("classifier.dt.min", "If all absolute differences between an estimated value in a node"
"and values of train samples in this node are less than this parameter"
"then the node will not be split.");
//UseSurrogates : don't need to be exposed !
//AddParameter(ParameterType_Empty, "classifier.dt.sur", "Surrogate splits will be built");
//SetParameterDescription("classifier.dt.sur","These splits allow to work with missing data and compute variable importance correctly.");
//MaxCategories
AddParameter(ParameterType_Int, "classifier.dt.cat",
"Cluster possible values of a categorical variable into K <= cat clusters to find a suboptimal split");
SetParameterInt("classifier.dt.cat", 10);
SetParameterDescription(
"classifier.dt.cat",
"Cluster possible values of a categorical variable into K <= cat clusters to find a suboptimal split");
//CVFolds
AddParameter(ParameterType_Int, "classifier.dt.f", "K-fold cross-validations");
SetParameterInt("classifier.dt.f", 10);
SetParameterDescription(
"classifier.dt.f", "If cv_folds > 1 then prune a tree with K-fold cross-validation where K is equal to cv_folds");
//Use1seRule
AddParameter(ParameterType_Empty, "classifier.dt.r", "Set Use1seRule flag to false.");
SetParameterDescription(
"classifier.dt.r",
"If true then a pruning will be harsher. This will make a tree more compact and more resistant to the training data noise but a bit less accurate.");
//TruncatePrunedTree
AddParameter(ParameterType_Empty, "classifier.dt.t", "Set TruncatePrunedTree flag to false.");
SetParameterDescription("classifier.dt.t", "If true then pruned branches are physically removed from the tree.");
//Priors are not exposed.
}
void TrainMachineLearningImagesClassifier::TrainDecisionTree(ListSampleType::Pointer trainingListSample,
LabelListSampleType::Pointer trainingLabeledListSample)
{
DecisionTreeType::Pointer classifier = DecisionTreeType::New();
classifier->SetInputListSample(trainingListSample);
classifier->SetTargetListSample(trainingLabeledListSample);
classifier->SetMaxDepth(GetParameterInt("classifier.dt.max"));
classifier->SetMinSampleCount(GetParameterInt("classifier.dt.min"));
classifier->SetRegressionAccuracy(GetParameterFloat("classifier.dt.ra"));
classifier->SetMaxCategories(GetParameterInt("classifier.dt.cat"));
classifier->SetCVFolds(GetParameterInt("classifier.dt.f"));
if (IsParameterEnabled("classifier.dt.r"))
{
classifier->SetUse1seRule(false);
}
if (IsParameterEnabled("classifier.dt.t"))
{
classifier->SetTruncatePrunedTree(false);
}
classifier->Train();
classifier->Save(GetParameterString("io.out"));
}
} //end namespace wrapper
} //end namespace otb
/*=========================================================================
Program: ORFEO Toolbox
Language: C++
Date: $Date$
Version: $Revision$
Copyright (c) Centre National d'Etudes Spatiales. All rights reserved.
See OTBCopyright.txt for details.
This software is distributed WITHOUT ANY WARRANTY; without even
the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the above copyright notices for more information.
=========================================================================*/
#include "otbTrainMachineLearningImagesClassifier.h"
namespace otb
{
namespace Wrapper
{
void TrainMachineLearningImagesClassifier::InitGradientBoostedTreeParams()
{
AddChoice("classifier.gbt", "Gradient Boosted Tree classifier");
SetParameterDescription(
"classifier.gbt",
"This group of parameters allows to set Gradient Boosted Tree classifier parameters."
"see complete documentation here http://docs.opencv.org/modules/ml/doc/gradient_boosted_trees.html");
//LossFunctionType : not exposed, as only one type is used for Classification,
// the other three are used for regression.
//WeakCount
AddParameter(ParameterType_Int, "classifier.gbt.w", "Count of boosting algorithm iterations");
SetParameterInt("classifier.gbt.w", 200);
SetParameterDescription(
"classifier.gbt.w",
"Count of boosting algorithm iterations. w*K is the total count of trees in the GBT model, where K is the output classes count.");
//Shrinkage
AddParameter(ParameterType_Float, "classifier.gbt.s", "Regularization parameter.");
SetParameterFloat("classifier.gbt.s", 0.8);
SetParameterDescription("classifier.gbt.s", "Regularization parameter.");
//SubSamplePortion
AddParameter(ParameterType_Float, "classifier.gbt.p",
"Portion of the whole training set used for each algorithm iteration.");
SetParameterFloat("classifier.gbt.p", 0.01);
SetParameterDescription(
"classifier.gbt.p",
"Portion of the whole training set used for each algorithm iteration. Subset is generated randomly.");
//MaxDepth
AddParameter(ParameterType_Int, "classifier.gbt.max", "Maximum depth of the tree");
SetParameterInt("classifier.gbt.max", 65000);
SetParameterDescription(
"classifier.gbt.max", "The maximum possible depth of the tree. That is the training algorithms"
"attempts to split a node while its depth is less than MaxDepth. The actual depth may be smaller"
" if the other termination criteria are met, and/or if the tree is pruned.");
//UseSurrogates : don't need to be exposed !
//AddParameter(ParameterType_Empty, "classifier.gbt.sur", "Surrogate splits will be built");
//SetParameterDescription("classifier.gbt.sur","These splits allow to work with missing data and compute variable importance correctly.");
}
void TrainMachineLearningImagesClassifier::TrainGradientBoostedTree(
ListSampleType::Pointer trainingListSample, LabelListSampleType::Pointer trainingLabeledListSample)
{
GradientBoostedTreeType::Pointer classifier = GradientBoostedTreeType::New();
classifier->SetInputListSample(trainingListSample);
classifier->SetTargetListSample(trainingLabeledListSample);
classifier->SetWeakCount(GetParameterInt("classifier.gbt.w"));
classifier->SetShrinkage(GetParameterFloat("classifier.gbt.s"));
classifier->SetSubSamplePortion(GetParameterFloat("classifier.gbt.p"));
classifier->SetMaxDepth(GetParameterInt("classifier.gbt.max"));
classifier->Train();
classifier->Save(GetParameterString("io.out"));
}
} //end namespace wrapper
} //end namespace otb
/*=========================================================================
Program: ORFEO Toolbox
Language: C++
Date: $Date$
Version: $Revision$
Copyright (c) Centre National d'Etudes Spatiales. All rights reserved.
See OTBCopyright.txt for details.
This software is distributed WITHOUT ANY WARRANTY; without even
the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the above copyright notices for more information.
=========================================================================*/
#include "otbTrainMachineLearningImagesClassifier.h"
namespace otb
{
namespace Wrapper
{
void TrainMachineLearningImagesClassifier::InitLibSVMParams()
{
AddChoice("classifier.libsvm", "LibSVM classifier");
SetParameterDescription("classifier.libsvm", "This group of parameters allows to set SVM classifier parameters.");
AddParameter(ParameterType_Choice, "classifier.libsvm.k", "SVM Kernel Type");
AddChoice("classifier.libsvm.k.linear", "Linear");
AddChoice("classifier.libsvm.k.rbf", "Gaussian radial basis function");
AddChoice("classifier.libsvm.k.poly", "Polynomial");
AddChoice("classifier.libsvm.k.sigmoid", "Sigmoid");
SetParameterString("classifier.libsvm.k", "linear");
SetParameterDescription("classifier.libsvm.k", "SVM Kernel Type.");
AddParameter(ParameterType_Float, "classifier.libsvm.c", "Cost parameter C.");
SetParameterFloat("classifier.libsvm.c", 1.0);
SetParameterDescription(
"classifier.libsvm.c",
"SVM models have a cost parameter C (1 by default) to control the trade-off between training errors and forcing rigid margins.");
AddParameter(ParameterType_Empty, "classifier.libsvm.opt", "parameters optimization");
MandatoryOff("classifier.libsvm.opt");
SetParameterDescription("classifier.libsvm.opt", "SVM optimization flag");
}
void TrainMachineLearningImagesClassifier::TrainLibSVM(ListSampleType::Pointer trainingListSample, LabelListSampleType::Pointer trainingLabeledListSample)
{
LibSVMType::Pointer libSVMClassifier = LibSVMType::New();
libSVMClassifier->SetInputListSample(trainingListSample);
libSVMClassifier->SetTargetListSample(trainingLabeledListSample);
//SVM Option
//TODO : Add other options ?
if (IsParameterEnabled("classifier.libsvm.opt"))
{
libSVMClassifier->SetParameterOptimization(true);
}
libSVMClassifier->SetC(GetParameterFloat("classifier.libsvm.c"));
switch (GetParameterInt("classifier.libsvm.k"))
{
case 0: // LINEAR
libSVMClassifier->SetKernelType(LINEAR);
break;
case 1: // RBF
libSVMClassifier->SetKernelType(RBF);
break;
case 2: // POLY
libSVMClassifier->SetKernelType(POLY);
break;
case 3: // SIGMOID
libSVMClassifier->SetKernelType(SIGMOID);
break;
default: // DEFAULT = LINEAR
libSVMClassifier->SetKernelType(LINEAR);
break;
}
libSVMClassifier->Train();
libSVMClassifier->Save(GetParameterString("io.out"));
}
} //end namespace wrapper
} //end namespace otb
/*=========================================================================
Program: ORFEO Toolbox
Language: C++
Date: $Date$
Version: $Revision$
Copyright (c) Centre National d'Etudes Spatiales. All rights reserved.
See OTBCopyright.txt for details.
This software is distributed WITHOUT ANY WARRANTY; without even
the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the above copyright notices for more information.
=========================================================================*/
#include "otbWrapperApplication.h"
#include "otbWrapperApplicationFactory.h"
#include <iostream>
#include "otbConfigurationFile.h"
//Image
#include "otbImage.h"
#include "otbVectorImage.h"
#include "otbVectorData.h"
#include "otbListSampleGenerator.h"
// ListSample
#include "itkListSample.h"
#include "itkVariableLengthVector.h"
#include "itkFixedArray.h"
//Estimator
#include "otbMachineLearningModelFactory.h"
#include "otbKNearestNeighborsMachineLearningModel.h"
#include "otbRandomForestsMachineLearningModel.h"
#include "otbSVMMachineLearningModel.h"
#include "otbLibSVMMachineLearningModel.h"
#include "otbBoostMachineLearningModel.h"
#include "otbDecisionTreeMachineLearningModel.h"
#include "otbGradientBoostedTreeMachineLearningModel.h"
#include "otbNormalBayesMachineLearningModel.h"
#include "otbNeuralNetworkMachineLearningModel.h"
// Statistic XML Reader
#include "otbStatisticsXMLFileReader.h"
// Validation
#include "otbConfusionMatrixCalculator.h"
#include "itkTimeProbe.h"
#include "otbStandardFilterWatcher.h"
// Normalize the samples
#include "otbShiftScaleSampleListFilter.h"
// List sample concatenation
#include "otbConcatenateSampleListFilter.h"
// Balancing ListSample
#include "otbListSampleToBalancedListSampleFilter.h"
// VectorData projection filter
#include "otbVectorDataProjectionFilter.h"
// Extract a ROI of the vectordata
#include "otbVectorDataIntoImageProjectionFilter.h"
// Elevation handler
#include "otbWrapperElevationParametersHandler.h"
namespace otb
{
namespace Wrapper
{
class TrainMachineLearningImagesClassifier: public Application
{
public:
/** Standard class typedefs. */
typedef TrainMachineLearningImagesClassifier Self;
typedef Application Superclass;
typedef itk::SmartPointer<Self> Pointer;
typedef itk::SmartPointer<const Self> ConstPointer;
/** Standard macro */
itkNewMacro(Self)
itkTypeMacro(TrainMachineLearningImagesClassifier, otb::Application)
typedef otb::Image<FloatVectorImageType::InternalPixelType, 2> ImageReaderType;
typedef FloatVectorImageType::PixelType PixelType;
typedef FloatVectorImageType VectorImageType;
typedef FloatImageType ImageType;
// Training vectordata
typedef itk::VariableLengthVector<ImageType::PixelType> MeasurementType;
// SampleList manipulation
typedef otb::ListSampleGenerator<VectorImageType, VectorDataType> ListSampleGeneratorType;
typedef ListSampleGeneratorType::ListSampleType ListSampleType;
typedef ListSampleGeneratorType::LabelType LabelType;
typedef ListSampleGeneratorType::ListLabelType LabelListSampleType;
typedef otb::Statistics::ConcatenateSampleListFilter<ListSampleType> ConcatenateListSampleFilterType;
typedef otb::Statistics::ConcatenateSampleListFilter<LabelListSampleType> ConcatenateLabelListSampleFilterType;
// Statistic XML file Reader
typedef otb::StatisticsXMLFileReader<MeasurementType> StatisticsReader;
// Enhance List Sample typedef otb::Statistics::ListSampleToBalancedListSampleFilter<ListSampleType, LabelListSampleType> BalancingListSampleFilterType;
typedef otb::Statistics::ShiftScaleSampleListFilter<ListSampleType, ListSampleType> ShiftScaleFilterType;
// Machine Learning models
typedef otb::MachineLearningModelFactory<ImageType::PixelType, ListSampleGeneratorType::ClassLabelType> MachineLearningModelFactoryType;
typedef MachineLearningModelFactoryType::MachineLearningModelTypePointer ModelPointerType;
typedef otb::RandomForestsMachineLearningModel<ImageType::PixelType, ListSampleGeneratorType::ClassLabelType> RandomForestType;
typedef otb::KNearestNeighborsMachineLearningModel<ImageType::PixelType, ListSampleGeneratorType::ClassLabelType> KNNType;
typedef otb::SVMMachineLearningModel<ImageType::PixelType, ListSampleGeneratorType::ClassLabelType> SVMType;
typedef otb::LibSVMMachineLearningModel<ImageType::PixelType, ListSampleGeneratorType::ClassLabelType> LibSVMType;
typedef otb::BoostMachineLearningModel<ImageType::PixelType, ListSampleGeneratorType::ClassLabelType> BoostType;
typedef otb::DecisionTreeMachineLearningModel<ImageType::PixelType, ListSampleGeneratorType::ClassLabelType> DecisionTreeType;
typedef otb::GradientBoostedTreeMachineLearningModel<ImageType::PixelType, ListSampleGeneratorType::ClassLabelType> GradientBoostedTreeType;
typedef otb::NeuralNetworkMachineLearningModel<ImageType::PixelType, ListSampleGeneratorType::ClassLabelType> NeuralNetworkType;
typedef otb::NormalBayesMachineLearningModel<ImageType::PixelType, ListSampleGeneratorType::ClassLabelType> NormalBayesType;
// Estimate performance on validation sample
typedef otb::ConfusionMatrixCalculator<LabelListSampleType, LabelListSampleType> ConfusionMatrixCalculatorType;
// VectorData projection filter
typedef otb::VectorDataProjectionFilter<VectorDataType, VectorDataType> VectorDataProjectionFilterType;
// Extract ROI
typedef otb::VectorDataIntoImageProjectionFilter<VectorDataType, VectorImageType> VectorDataReprojectionType;
protected:
using Superclass::AddParameter;
friend void InitSVMParams(TrainMachineLearningImagesClassifier & app);
private:
void DoInit();
void DoUpdateParameters();
void LogConfusionMatrix(ConfusionMatrixCalculatorType* confMatCalc);
void InitLibSVMParams();
void InitBoostParams();
void InitSVMParams();
void InitDecisionTreeParams();
void InitGradientBoostedTreeParams();
void InitNeuralNetworkParams();
void InitNormalBayesParams();
void InitRandomForestsParams();
void InitKNNParams(){}
void TrainLibSVM(ListSampleType::Pointer trainingListSample, LabelListSampleType::Pointer trainingLabeledListSample);
void TrainBoost(ListSampleType::Pointer trainingListSample, LabelListSampleType::Pointer trainingLabeledListSample);
void TrainSVM(ListSampleType::Pointer trainingListSample, LabelListSampleType::Pointer trainingLabeledListSample);
void TrainDecisionTree(ListSampleType::Pointer trainingListSample, LabelListSampleType::Pointer trainingLabeledListSample);
void TrainGradientBoostedTree(ListSampleType::Pointer trainingListSample, LabelListSampleType::Pointer trainingLabeledListSample);
void TrainNeuralNetwork(ListSampleType::Pointer trainingListSample, LabelListSampleType::Pointer trainingLabeledListSample);
void TrainNormalBayes(ListSampleType::Pointer trainingListSample, LabelListSampleType::Pointer trainingLabeledListSample);
void TrainRandomForests(ListSampleType::Pointer trainingListSample, LabelListSampleType::Pointer trainingLabeledListSample);
void TrainKNN(ListSampleType::Pointer trainingListSample, LabelListSampleType::Pointer trainingLabeledListSample){}
void Classify(ListSampleType::Pointer validationListSample, LabelListSampleType::Pointer predictedList);
void DoExecute();
VectorDataReprojectionType::Pointer vdreproj;
};
}
}
/*=========================================================================
Program: ORFEO Toolbox
Language: C++
Date: $Date$
Version: $Revision$
Copyright (c) Centre National d'Etudes Spatiales. All rights reserved.
See OTBCopyright.txt for details.
This software is distributed WITHOUT ANY WARRANTY; without even
the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the above copyright notices for more information.
=========================================================================*/
#include <boost/lexical_cast.hpp>
#include "otbTrainMachineLearningImagesClassifier.h"
namespace otb
{
namespace Wrapper
{
void TrainMachineLearningImagesClassifier::InitNeuralNetworkParams()
{
AddChoice("classifier.ann", "Artificial Neural Network classifier");
SetParameterDescription("classifier.ann",
"This group of parameters allows to set Artificial Neural Network classifier parameters."
"See complete documentation here http://docs.opencv.org/modules/ml/doc/neural_networks.html");
//TrainMethod
AddParameter(ParameterType_Choice, "classifier.ann.t", "Train Method Type");
AddChoice("classifier.ann.t.reg", "RPROP algorithm.");
AddChoice("classifier.ann.t.back", "back-propagation algorithm");
SetParameterString("classifier.ann.t", "reg");
SetParameterDescription("classifier.ann.t", "Type of train method for the MLP neural network.");
//LayerSizes
//There is no ParameterType_IntList, so i use a ParameterType_StringList and convert it.
/*std::vector<std::string> layerSizes;
layerSizes.push_back("100");
layerSizes.push_back("100");*/
AddParameter(ParameterType_StringList, "classifier.ann.sizes", "Number of neurons in each intermediate layer");
//SetParameterStringList("classifier.ann.sizes", layerSizes);
SetParameterDescription("classifier.ann.sizes",
"the number of neurons in each intermediate layer (excluding input and output layers).");
//ActivateFunction
AddParameter(ParameterType_Choice, "classifier.ann.f", "Neuron activation function type");
AddChoice("classifier.ann.f.ident", "Identity function.");
AddChoice("classifier.ann.f.sig", "Symmetrical Sigmoid function");
AddChoice("classifier.ann.f.gau", "Gaussian function. (Not completely supported)");
SetParameterString("classifier.ann.f", "sig");
SetParameterDescription("classifier.ann.f", "Neuron activation function.");
//Alpha
AddParameter(ParameterType_Float, "classifier.ann.a", "Alpha parameter of the activation function.");
SetParameterFloat("classifier.ann.a", 0.);
SetParameterDescription("classifier.ann.a",
"Alpha parameter of the activation function (used only with sigmoid and gaussian functions.");
//Beta
AddParameter(ParameterType_Float, "classifier.ann.b", "Beta parameter of the activation function.");
SetParameterFloat("classifier.ann.b", 0.);
SetParameterDescription("classifier.ann.b",
"Beta parameter of the activation function (used only with sigmoid and gaussian functions.");
//BackPropDWScale
AddParameter(ParameterType_Float, "classifier.ann.bpdw",
"Strength of the weight gradient term in the BACKPROP method.");
SetParameterFloat("classifier.ann.bpdw", 0.1);
SetParameterDescription(