diff --git a/Modules/Applications/AppClassification/include/otbTrainLibSVM.txx b/Modules/Applications/AppClassification/include/otbTrainLibSVM.txx index 0dd2d8f56327eaad45cae9d26101f0f1094ff31a..82d5bd8410dad7927c42fdd7a091c63f7e6d930b 100644 --- a/Modules/Applications/AppClassification/include/otbTrainLibSVM.txx +++ b/Modules/Applications/AppClassification/include/otbTrainLibSVM.txx @@ -37,9 +37,23 @@ namespace Wrapper SetParameterDescription("classifier.libsvm", "This group of parameters allows setting SVM classifier parameters."); AddParameter(ParameterType_Choice, "classifier.libsvm.k", "SVM Kernel Type"); AddChoice("classifier.libsvm.k.linear", "Linear"); + SetParameterDescription("classifier.libsvm.k.linear", + "Linear Kernel, no mapping is done, this is the fastest option."); + AddChoice("classifier.libsvm.k.rbf", "Gaussian radial basis function"); + SetParameterDescription("classifier.libsvm.k.rbf", + "This kernel is a good choice in most of the case. It is " + "an exponential function of the euclidian distance between " + "the vectors."); + AddChoice("classifier.libsvm.k.poly", "Polynomial"); + SetParameterDescription("classifier.libsvm.k.poly", + "Polynomial Kernel, the mapping is a polynomial function."); + AddChoice("classifier.libsvm.k.sigmoid", "Sigmoid"); + SetParameterDescription("classifier.libsvm.k.sigmoid", + "The kernel is a hyperbolic tangente function of the vectors."); + SetParameterString("classifier.libsvm.k", "linear", false); SetParameterDescription("classifier.libsvm.k", "SVM Kernel Type."); AddParameter(ParameterType_Choice, "classifier.libsvm.m", "SVM Model Type"); @@ -47,21 +61,50 @@ namespace Wrapper if (this->m_RegressionFlag) { AddChoice("classifier.libsvm.m.epssvr", "Epsilon Support Vector Regression"); + SetParameterDescription("classifier.libsvm.m.epssvr", + "The distance between feature vectors from the training set and the " + "fitting hyper-plane must be less than Epsilon. For outliers the penalty " + "multiplier C is used "); + AddChoice("classifier.libsvm.m.nusvr", "Nu Support Vector Regression"); SetParameterString("classifier.libsvm.m", "epssvr", false); + SetParameterDescription("classifier.libsvm.m.nusvr", + "Same as the epsilon regression except that this time the bounded " + "parameter nu is used instead of epsilon"); } else { AddChoice("classifier.libsvm.m.csvc", "C support vector classification"); + SetParameterDescription("classifier.libsvm.m.csvc", + "This formulation allows imperfect separation of classes. The penalty " + "is set through the cost parameter C."); + AddChoice("classifier.libsvm.m.nusvc", "Nu support vector classification"); + SetParameterDescription("classifier.libsvm.m.nusvc", + "This formulation allows imperfect separation of classes. The penalty " + "is set through the cost parameter Nu. As compared to C, Nu is harder " + "to optimize, and may not be as fast."); + AddChoice("classifier.libsvm.m.oneclass", "Distribution estimation (One Class SVM)"); + SetParameterDescription("classifier.libsvm.m.oneclass", + "All the training data are from the same class, SVM builds a boundary " + "that separates the class from the rest of the feature space."); SetParameterString("classifier.libsvm.m", "csvc", false); } + AddParameter(ParameterType_Float, "classifier.libsvm.c", "Cost parameter C"); SetParameterFloat("classifier.libsvm.c",1.0, false); - SetParameterDescription( - "classifier.libsvm.c", - "SVM models have a cost parameter C (1 by default) to control the trade-off between training errors and forcing rigid margins."); + SetParameterDescription("classifier.libsvm.c", + "SVM models have a cost parameter C (1 by default) to control the " + "trade-off between training errors and forcing rigid margins."); + + AddParameter(ParameterType_Float, "classifier.libsvm.nu", "Cost parameter Nu"); + SetParameterFloat("classifier.libsvm.nu",0.5, false); + SetParameterDescription("classifier.libsvm.nu", + "Cost parameter Nu, in the range 0..1, the larger the value, " + "the smoother the decision."); + + // It seems that it miss a nu parameter for the nu-SVM use. AddParameter(ParameterType_Empty, "classifier.libsvm.opt", "Parameters optimization"); MandatoryOff("classifier.libsvm.opt"); SetParameterDescription("classifier.libsvm.opt", "SVM parameters optimization flag."); @@ -73,8 +116,15 @@ namespace Wrapper { AddParameter(ParameterType_Float, "classifier.libsvm.eps", "Epsilon"); SetParameterFloat("classifier.libsvm.eps",1e-3, false); - AddParameter(ParameterType_Float, "classifier.libsvm.nu", "Nu"); - SetParameterFloat("classifier.libsvm.nu",0.5, false); + SetParameterDescription("classifier.libsvm.eps", + "The distance between feature vectors from the training set and " + "the fitting hyper-plane must be less than Epsilon. For outliers" + "the penalty mutliplier is set by C."); + // AddParameter(ParameterType_Float, "classifier.libsvm.nu", "Nu"); + // SetParameterFloat("classifier.libsvm.nu",0.5, false); + // SetParameterDescription("classifier.libsvm.nu", + // "Cost parameter Nu, in the range 0..1, the larger the value, " + // "the smoother the decision."); } } @@ -100,6 +150,7 @@ namespace Wrapper { libSVMClassifier->SetDoProbabilityEstimates(true); } + libSVMClassifier->SetNu(GetParameterFloat("classifier.libsvm.nu")); libSVMClassifier->SetC(GetParameterFloat("classifier.libsvm.c")); switch (GetParameterInt("classifier.libsvm.k")) @@ -135,7 +186,6 @@ namespace Wrapper break; } libSVMClassifier->SetEpsilon(GetParameterFloat("classifier.libsvm.eps")); - libSVMClassifier->SetNu(GetParameterFloat("classifier.libsvm.nu")); } else { @@ -155,6 +205,7 @@ namespace Wrapper break; } } + libSVMClassifier->Train(); libSVMClassifier->Save(modelPath); @@ -163,4 +214,4 @@ namespace Wrapper } //end namespace wrapper } //end namespace otb -#endif +#endif \ No newline at end of file diff --git a/Modules/Learning/Supervised/include/otbLibSVMMachineLearningModel.h b/Modules/Learning/Supervised/include/otbLibSVMMachineLearningModel.h index a8baa08e58118980fff9deb9dd184e05d9ef2d0c..329722af5a2dd5d971f6dfaa0f86ef29be3dc188 100644 --- a/Modules/Learning/Supervised/include/otbLibSVMMachineLearningModel.h +++ b/Modules/Learning/Supervised/include/otbLibSVMMachineLearningModel.h @@ -141,7 +141,7 @@ public: return m_Parameters.coef0; } - /** Set the C parameter for the training for C_SVC, EPSILON_SVR and NU_SVR */ + /** Set the C parameter for the training for C_SVC, EPSILON_SVR and C_SVR */ otbSetSVMParameterMacro(C,C,double) /** Get the C parameter for the training for C_SVC, EPSILON_SVR and NU_SVR */ diff --git a/Modules/Radiometry/Simulation/include/otbSoilDataBase.h b/Modules/Radiometry/Simulation/include/otbSoilDataBase.h index 1a288b56fc55969537fd6e92d29522478f5fd934..673bc51335897306cd29140075a981db3c4be2d2 100644 --- a/Modules/Radiometry/Simulation/include/otbSoilDataBase.h +++ b/Modules/Radiometry/Simulation/include/otbSoilDataBase.h @@ -23,6 +23,7 @@ #define otbSoilDataBase_h #include "OTBSimulationExport.h" +#include "itkMacro.h" #include <vector> #include <unordered_map> #include <string> @@ -30,7 +31,7 @@ namespace otb { -class OTBSimulation_EXPORT SoilDataBase +class ITK_ABI_EXPORT SoilDataBase { public: //wavelength in nm diff --git a/Modules/Radiometry/Simulation/src/otbSoilDataBase.cxx b/Modules/Radiometry/Simulation/src/otbSoilDataBase.cxx index 124c9fbacb90b54cb11133f126a9e8bb520ec6c3..6c7686a3ecf6ce509a44d04611d0946d7950df73 100644 --- a/Modules/Radiometry/Simulation/src/otbSoilDataBase.cxx +++ b/Modules/Radiometry/Simulation/src/otbSoilDataBase.cxx @@ -104,7 +104,7 @@ size_t SoilDataBase::CountColumns(std::string fileName) const } void SoilDataBase::ParseSoilFile() { - unsigned int number_of_soils = CountColumns(m_SoilFileName) - 1; + auto number_of_soils = CountColumns(m_SoilFileName) - 1; m_SoilDataVector.resize(number_of_soils); std::ifstream sdb(m_SoilFileName); if(! sdb.is_open())