Commit bd7c8bb2 authored by Julien Michel's avatar Julien Michel

MRG (sync with develop)

parents 4010f701 751d1724
......@@ -95,7 +95,8 @@ def GenerateChoice(app,param,paramlist, count = 0):
return output
def GenerateParameterType(app,param):
if app.GetParameterType(param) == otbApplication.ParameterType_Empty:
if app.GetParameterType(param) == otbApplication.ParameterType_Empty \
or app.GetParameterType(param) == otbApplication.ParameterType_Bool:
return "Boolean"
if app.GetParameterType(param) == otbApplication.ParameterType_Int \
or app.GetParameterType(param) == otbApplication.ParameterType_Radius \
......@@ -346,6 +347,8 @@ def GetApplicationExamplePythonSnippet(app,idx,expand = False, inputpath="",outp
if paramtype == otbApplication.ParameterType_Empty:
app.EnableParameter(param)
output+= "\t" + appname + ".EnableParameter("+EncloseString(param)+")" + linesep
if paramtype == otbApplication.ParameterType_Bool:
output+= "\t" + appname + ".SetParameterString("+EncloseString(param)+","+EncloseString(value)+")" + linesep
if paramtype == otbApplication.ParameterType_Int \
or paramtype == otbApplication.ParameterType_Radius \
or paramtype == otbApplication.ParameterType_RAM:
......
......@@ -239,12 +239,16 @@ doing geometry, projections, and also calibration.
Future developments will probably offer a more adapted structure to import and
export images between OTB and the Python world.
Setting of boolean parameters
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Setting of EmptyParameter
^^^^^^^^^^^^^^^^^^^^^^^^^
Most of the parameters are set using functions ``SetParameterXXX()``. The boolean
parameters are handled differently (also called Empty parameter). Let's take an example with the application
``ReadImageInfo``:
Most of the parameters are set using functions ``SetParameterXXX()``, except for
one type of parameter: the ``EmptyParameter``. This class was the first
implementation of a boolean. It is now **deprecated**, you should use ``BoolParameter``
instead.
Let's take an example with the application ``ReadImageInfo`` when it was still
using an ``EmptyParameter`` for parameter ``keywordlist``:
.. code-block:: python
......
......@@ -92,7 +92,7 @@ private:
SetParameterDescription("ip.radius", "The radius of the ball shaped structuring element (expressed in pixels). By default, 'ip.radius = 1 pixel'.");
SetDefaultParameterInt("ip.radius", 1.0);
AddParameter(ParameterType_Empty, "ip.suvbool", "Multiple majority: Undecided(X)/Original");
AddParameter(ParameterType_Bool, "ip.suvbool", "Multiple majority: Undecided(X)/Original");
SetParameterDescription("ip.suvbool", "Pixels with more than 1 majority class are marked as Undecided if this parameter is checked (true), or keep their Original labels otherwise (false). Please note that the Undecided value must be different from existing labels in the input labeled image. By default, 'ip.suvbool = false'.");
AddParameter(ParameterType_Int, "ip.nodatalabel", "Label for the NoData class");
......@@ -103,7 +103,7 @@ private:
SetParameterDescription("ip.undecidedlabel", "Label for the Undecided class. By default, 'ip.undecidedlabel = 0'.");
SetDefaultParameterInt("ip.undecidedlabel", 0.0);
AddParameter(ParameterType_Empty, "ip.onlyisolatedpixels", "Process isolated pixels only");
AddParameter(ParameterType_Bool, "ip.onlyisolatedpixels", "Process isolated pixels only");
SetParameterDescription("ip.onlyisolatedpixels", "Only pixels whose label is unique in the neighbordhood will be processed. By default, 'ip.onlyisolatedpixels = false'.");
AddParameter(ParameterType_Int, "ip.isolatedthreshold", "Threshold for isolated pixels");
......@@ -153,7 +153,7 @@ private:
m_NeighMajVotingFilter->SetLabelForUndecidedPixels(GetParameterInt("ip.undecidedlabel"));
// Set to Undecided label if NOT unique Majority Voting
if (IsParameterEnabled("ip.suvbool"))
if (GetParameterInt("ip.suvbool"))
{
m_NeighMajVotingFilter->SetKeepOriginalLabelBool(false);
}
......@@ -164,7 +164,7 @@ private:
}
// Process isolated pixels only
if (IsParameterEnabled("ip.onlyisolatedpixels"))
if (GetParameterInt("ip.onlyisolatedpixels"))
{
m_NeighMajVotingFilter->SetOnlyIsolatedPixels(true);
m_NeighMajVotingFilter->SetIsolatedThreshold(GetParameterInt("ip.isolatedthreshold"));
......
......@@ -143,12 +143,12 @@ private:
AddParameter(ParameterType_String, "cri", "Criterion");
SetParameterDescription("cri", "Dempster Shafer criterion (by default (belief+plausibility)/2)");
MandatoryOff("cri");
SetParameterString("cri","((Belief + Plausibility)/2.)", false);
SetParameterString("cri","((Belief + Plausibility)/2.)");
AddParameter(ParameterType_Float,"wgt","Weighting");
SetParameterDescription("wgt","Coefficient between 0 and 1 to promote undetection or false detections (default 0.5)");
MandatoryOff("wgt");
SetParameterFloat("wgt",0.5, false);
SetParameterFloat("wgt",0.5);
AddParameter(ParameterType_InputFilename,"initmod","initialization model");
SetParameterDescription("initmod","Initialization model (xml file) to be used. If the xml initialization model is set, the descriptor list is not used (specified using the option -desclist)");
......@@ -157,16 +157,15 @@ private:
AddParameter(ParameterType_StringList, "desclist","Descriptor list");
SetParameterDescription("desclist","List of the descriptors to be used in the model (must be specified to perform an automatic initialization)");
MandatoryOff("desclist");
SetParameterString("desclist","", false);
SetParameterString("desclist","");
AddParameter(ParameterType_Int,"maxnbit","Maximum number of iterations");
MandatoryOff("maxnbit");
SetParameterDescription("maxnbit","Maximum number of optimizer iteration (default 200)");
SetParameterInt("maxnbit",200, false);
SetParameterInt("maxnbit",200);
AddParameter(ParameterType_Empty,"optobs","Optimizer Observer");
AddParameter(ParameterType_Bool,"optobs","Optimizer Observer");
SetParameterDescription("optobs","Activate the optimizer observer");
MandatoryOff("optobs");
AddParameter(ParameterType_OutputFilename,"out","Output filename");
SetParameterDescription("out","Output model file name (xml file) contains the optimal model to perform information fusion.");
......@@ -405,7 +404,7 @@ private:
// Create the Command observer and register it with the optimizer.
CommandIterationUpdate::Pointer observer = CommandIterationUpdate::New();
if (IsParameterEnabled("optobs"))
if (GetParameterInt("optobs"))
{
m_Optimizer->AddObserver(itk::IterationEvent(), observer);
}
......
......@@ -59,11 +59,10 @@ protected:
InitKMClassification();
// init at the end cleanup
AddParameter( ParameterType_Empty, "cleanup", "Temporary files cleaning" );
EnableParameter( "cleanup" );
AddParameter( ParameterType_Bool, "cleanup", "Temporary files cleaning" );
SetParameterDescription( "cleanup",
"If activated, the application will try to clean all temporary files it created" );
MandatoryOff( "cleanup" );
SetParameterInt("cleanup", 1);
}
void InitKMSampling()
......@@ -148,7 +147,7 @@ protected:
void ComputeImageEnvelope(const std::string &vectorFileName)
{
GetInternalApplication("imgenvelop")->SetParameterString("out", vectorFileName, false);
GetInternalApplication("imgenvelop")->SetParameterString("out", vectorFileName);
GetInternalApplication("imgenvelop")->ExecuteAndWriteOutput();
}
......@@ -187,8 +186,8 @@ protected:
{
std::vector<std::string> fieldList = {fieldName};
GetInternalApplication("polystats")->SetParameterStringList("field", fieldList, false);
GetInternalApplication("polystats")->SetParameterString("out", statisticsFileName, false);
GetInternalApplication("polystats")->SetParameterStringList("field", fieldList);
GetInternalApplication("polystats")->SetParameterString("out", statisticsFileName);
ExecuteInternal("polystats");
}
......@@ -199,17 +198,17 @@ protected:
int NBSamples)
{
/* SampleSelection */
GetInternalApplication("select")->SetParameterString("out", sampleFileName, false);
GetInternalApplication("select")->SetParameterString("out", sampleFileName);
UpdateInternalParameters("select");
GetInternalApplication("select")->SetParameterString("instats", statisticsFileName, false);
GetInternalApplication("select")->SetParameterString("field", fieldName, false);
GetInternalApplication("select")->SetParameterString("instats", statisticsFileName);
GetInternalApplication("select")->SetParameterString("field", fieldName);
GetInternalApplication("select")->SetParameterString("strategy", "constant", false);
GetInternalApplication("select")->SetParameterInt("strategy.constant.nb", NBSamples, false);
GetInternalApplication("select")->SetParameterString("strategy", "constant");
GetInternalApplication("select")->SetParameterInt("strategy.constant.nb", NBSamples);
if( IsParameterEnabled("rand"))
GetInternalApplication("select")->SetParameterInt("rand", GetParameterInt("rand"), false);
GetInternalApplication("select")->SetParameterInt("rand", GetParameterInt("rand"));
// select sample positions
ExecuteInternal("select");
......@@ -217,8 +216,8 @@ protected:
/* SampleExtraction */
UpdateInternalParameters("extraction");
GetInternalApplication("extraction")->SetParameterString("outfield", "prefix", false);
GetInternalApplication("extraction")->SetParameterString("outfield.prefix.name", "value_", false);
GetInternalApplication("extraction")->SetParameterString("outfield", "prefix");
GetInternalApplication("extraction")->SetParameterString("outfield.prefix.name", "value_");
// extract sample descriptors
GetInternalApplication("extraction")->ExecuteAndWriteOutput();
......@@ -229,7 +228,7 @@ protected:
const std::string &modelFileName)
{
std::vector<std::string> extractOutputList = {sampleTrainFileName};
GetInternalApplication("training")->SetParameterStringList("io.vd", extractOutputList, false);
GetInternalApplication("training")->SetParameterStringList("io.vd", extractOutputList);
UpdateInternalParameters("training");
// set field names
......@@ -242,19 +241,19 @@ protected:
oss << i;
selectedNames.push_back( selectPrefix + oss.str() );
}
GetInternalApplication("training")->SetParameterStringList("feat", selectedNames, false);
GetInternalApplication("training")->SetParameterStringList("feat", selectedNames);
GetInternalApplication("training")->SetParameterString("classifier", "sharkkm", false);
GetInternalApplication("training")->SetParameterString("classifier", "sharkkm");
GetInternalApplication("training")->SetParameterInt("classifier.sharkkm.maxiter",
GetParameterInt("maxit"), false);
GetParameterInt("maxit"));
GetInternalApplication("training")->SetParameterInt("classifier.sharkkm.k",
GetParameterInt("nc"), false);
GetParameterInt("nc"));
if( IsParameterEnabled("rand"))
GetInternalApplication("training")->SetParameterInt("rand", GetParameterInt("rand"), false);
GetInternalApplication("training")->SetParameterInt("rand", GetParameterInt("rand"));
GetInternalApplication("training")->GetParameterByKey("v")->SetActive(false);
GetInternalApplication("training")->SetParameterString("io.out", modelFileName, false);
GetInternalApplication("training")->SetParameterString("io.out", modelFileName);
ExecuteInternal( "training" );
otbAppLogINFO("output model : " << GetInternalApplication("training")->GetParameterString("io.out"));
......@@ -264,8 +263,8 @@ protected:
const std::string &imagesStatsFileName)
{
std::vector<std::string> imageFileNameList = {imageFileName};
GetInternalApplication("imgstats")->SetParameterStringList("il", imageFileNameList, false);
GetInternalApplication("imgstats")->SetParameterString("out", imagesStatsFileName, false);
GetInternalApplication("imgstats")->SetParameterStringList("il", imageFileNameList);
GetInternalApplication("imgstats")->SetParameterString("out", imagesStatsFileName);
ExecuteInternal( "imgstats" );
otbAppLogINFO("image statistics file : " << GetInternalApplication("imgstats")->GetParameterString("out"));
......@@ -497,7 +496,7 @@ private:
Superclass::CreateOutMeansFile(GetParameterImage("in"), fileNames.modelFile, GetParameterInt("nc"));
// Remove all tempory files
if( IsParameterEnabled( "cleanup" ) )
if( GetParameterInt( "cleanup" ) )
{
otbAppLogINFO( <<"Final clean-up ..." );
fileNames.clear();
......@@ -506,7 +505,7 @@ private:
void UpdateKMPolygonClassStatisticsParameters(const std::string &vectorFileName)
{
GetInternalApplication( "polystats" )->SetParameterString( "vec", vectorFileName, false );
GetInternalApplication( "polystats" )->SetParameterString( "vec", vectorFileName);
UpdateInternalParameters( "polystats" );
}
......
......@@ -164,7 +164,7 @@ private:
SetParameterDescription("strategy.all","Take all samples");
// Default strategy : smallest
SetParameterString("strategy","smallest", false);
SetParameterString("strategy","smallest");
AddParameter(ParameterType_Choice, "mim", "Multi-Image Mode");
......
......@@ -82,7 +82,7 @@ private:
AddParameter(ParameterType_String,"cfield","Field containing the predicted class.");
SetParameterDescription("cfield","Field containing the predicted class");
SetParameterString("cfield","predicted", false);
SetParameterString("cfield","predicted");
// Doc example parameter settings
SetDocExampleParameterValue("inshp", "vectorData.shp");
......
......@@ -89,7 +89,7 @@ private:
AddParameter(ParameterType_String, "outfield.prefix.name", "Output field prefix");
SetParameterDescription("outfield.prefix.name","Prefix used to form the field names that"
"will contain the extracted values.");
SetParameterString("outfield.prefix.name", "value_", false);
SetParameterString("outfield.prefix.name", "value_");
AddChoice("outfield.list","Use the given name list");
SetParameterDescription("outfield.list","Use the given name list");
......
......@@ -200,7 +200,7 @@ private:
SetParameterDescription("strategy.all","Take all samples");
// Default strategy : smallest
SetParameterString("strategy","smallest", false);
SetParameterString("strategy","smallest");
AddParameter(ParameterType_ListView, "field", "Field Name");
SetParameterDescription("field","Name of the field carrying the class name in the input vectors.");
......
......@@ -205,7 +205,7 @@ public:
TrainModel( imageList, fileNames.sampleTrainOutputs, fileNames.sampleValidOutputs );
// cleanup
if( IsParameterEnabled( "cleanup" ) )
if( GetParameterInt( "cleanup" ) )
{
otbAppLogINFO( <<"Final clean-up ..." );
fileNames.clear();
......@@ -217,7 +217,7 @@ private :
void UpdatePolygonClassStatisticsParameters()
{
std::vector<std::string> vectorFileList = GetParameterStringList( "io.vd" );
GetInternalApplication( "polystat" )->SetParameterString( "vec", vectorFileList[0], false );
GetInternalApplication( "polystat" )->SetParameterString( "vec", vectorFileList[0]);
UpdateInternalParameters( "polystat" );
}
......
......@@ -183,7 +183,7 @@ void DoInit() ITK_OVERRIDE
SetParameterDescription( "sample.vtr" ,
"Ratio between training and validation samples (0.0 = all training, "
"1.0 = all validation) (default = 0.5).");
SetParameterFloat( "sample.vtr" , 0.5 , false );
SetParameterFloat( "sample.vtr" , 0.5);
Superclass::DoInit();
......
......@@ -112,7 +112,7 @@ protected:
contingencyTableCalculator->Compute(performanceLabeledListSample->Begin(),
performanceLabeledListSample->End(),predictedListSample->Begin(), predictedListSample->End());
if(IsParameterEnabled("v"))
if(GetParameterInt("v"))
{
otbAppLogINFO( "Training performances:" );
otbAppLogINFO(<<"Contingency table: reference labels (rows) vs. produced labels (cols)\n"
......
......@@ -124,13 +124,13 @@ private:
"Only geometries with this field available will be taken into account.\n"
"The field is added either in the input file (if 'out' off) or in the output file.\n"
"Caution, the 'cfield' must not exist in the input file if you are updating the file.");
SetParameterString("cfield","predicted", false);
SetParameterString("cfield","predicted");
AddParameter(ParameterType_ListView, "feat", "Field names to be calculated.");
SetParameterDescription("feat","List of field names in the input vector data used as features for training. "
"Put the same field names as the TrainVectorClassifier application.");
AddParameter(ParameterType_Empty, "confmap", "Confidence map");
AddParameter(ParameterType_Bool, "confmap", "Confidence map");
SetParameterDescription( "confmap", "Confidence map of the produced classification. "
"The confidence index depends on the model : \n"
" - LibSVM : difference between the two highest probabilities "
......@@ -145,7 +145,6 @@ private:
" * RandomForest : Confidence (proportion of votes for the majority class). "
"Margin (normalized difference of the votes of the 2 majority classes) is not available for now.\n"
" * SVM : distance to margin (only works for 2-class models).\n");
MandatoryOff("confmap");
AddParameter(ParameterType_OutputFilename, "out", "Output vector data file containing class labels");
SetParameterDescription("out","Output vector data file storing sample values (OGR format)."
......@@ -271,10 +270,10 @@ private:
ConfidenceListSampleType::Pointer quality;
bool computeConfidenceMap(IsParameterEnabled("confmap") && m_Model->HasConfidenceIndex()
bool computeConfidenceMap(GetParameterInt("confmap") && m_Model->HasConfidenceIndex()
&& !m_Model->GetRegressionMode());
if (!m_Model->HasConfidenceIndex() && IsParameterEnabled("confmap"))
if (!m_Model->HasConfidenceIndex() && GetParameterInt("confmap"))
{
otbAppLogWARNING("Confidence map requested but the classifier doesn't support it!");
}
......
......@@ -87,12 +87,12 @@ private:
AddParameter(ParameterType_String, "cri", "Criterion");
SetParameterDescription("cri", "Dempster Shafer criterion (by default (belief+plausibility)/2)");
MandatoryOff("cri");
SetParameterString("cri", "((Belief + Plausibility)/2.)", false);
SetParameterString("cri", "((Belief + Plausibility)/2.)");
AddParameter(ParameterType_Float, "thd", "Criterion threshold");
SetParameterDescription("thd", "Criterion threshold (default 0.5)");
MandatoryOff("thd");
SetParameterFloat("thd",0.5, false);
SetParameterFloat("thd",0.5);
AddParameter(ParameterType_OutputVectorData, "out", "Output Vector Data");
SetParameterDescription("out", "Output VectorData containing only the validated samples");
......
......@@ -61,16 +61,16 @@ namespace Wrapper
SetParameterDescription("classifier.boost.t.gentle",
"A modified version of the Real Adaboost algorithm, using Newton stepping "
"rather than exact optimization at each step.");
SetParameterString("classifier.boost.t", "real", false);
SetParameterString("classifier.boost.t", "real");
SetParameterDescription("classifier.boost.t", "Type of Boosting algorithm.");
//Do not expose SplitCriteria
//WeakCount
AddParameter(ParameterType_Int, "classifier.boost.w", "Weak count");
SetParameterInt("classifier.boost.w",100, false);
SetParameterInt("classifier.boost.w",100);
SetParameterDescription("classifier.boost.w","The number of weak classifiers.");
//WeightTrimRate
AddParameter(ParameterType_Float, "classifier.boost.r", "Weight Trim Rate");
SetParameterFloat("classifier.boost.r",0.95, false);
SetParameterFloat("classifier.boost.r",0.95);
SetParameterDescription("classifier.boost.r",
"A threshold between 0 and 1 used to save computational time. "
"Samples with summary weight <= (1 - weight_trim_rate) do not participate in"
......@@ -78,7 +78,7 @@ namespace Wrapper
"functionality.");
//MaxDepth : Not sure that this parameter has to be exposed.
AddParameter(ParameterType_Int, "classifier.boost.m", "Maximum depth of the tree");
SetParameterInt("classifier.boost.m",1, false);
SetParameterInt("classifier.boost.m",1);
SetParameterDescription("classifier.boost.m","Maximum depth of the tree.");
}
......
......@@ -40,9 +40,9 @@ LearningApplicationBase<TInputValue,TOutputValue>
//MaxDepth
AddParameter(ParameterType_Int, "classifier.dt.max", "Maximum depth of the tree");
#ifdef OTB_OPENCV_3
SetParameterInt("classifier.dt.max",10, false);
SetParameterInt("classifier.dt.max",10);
#else
SetParameterInt("classifier.dt.max",65535, false);
SetParameterInt("classifier.dt.max",65535);
#endif
SetParameterDescription("classifier.dt.max",
"The training algorithm attempts to split each node while its depth is smaller "
......@@ -51,14 +51,14 @@ LearningApplicationBase<TInputValue,TOutputValue>
//MinSampleCount
AddParameter(ParameterType_Int, "classifier.dt.min", "Minimum number of samples in each node");
SetParameterInt("classifier.dt.min",10, false);
SetParameterInt("classifier.dt.min",10);
SetParameterDescription("classifier.dt.min",
"If the number of samples in a node is smaller "
"than this parameter, then this node will not be split.");
//RegressionAccuracy
AddParameter(ParameterType_Float, "classifier.dt.ra", "Termination criteria for regression tree");
SetParameterFloat("classifier.dt.ra",0.01, false);
SetParameterFloat("classifier.dt.ra",0.01);
SetParameterDescription("classifier.dt.ra",
"If all absolute differences between an estimated value in a node "
"and the values of the train samples in this node are smaller than this "
......@@ -72,7 +72,7 @@ LearningApplicationBase<TInputValue,TOutputValue>
AddParameter(ParameterType_Int, "classifier.dt.cat",
"Cluster possible values of a categorical variable into K <= cat clusters to find a "
"suboptimal split");
SetParameterInt("classifier.dt.cat",10, false);
SetParameterInt("classifier.dt.cat",10);
SetParameterDescription("classifier.dt.cat",
"Cluster possible values of a categorical variable into K <= cat clusters to find a "
"suboptimal split.");
......@@ -81,22 +81,22 @@ LearningApplicationBase<TInputValue,TOutputValue>
AddParameter(ParameterType_Int, "classifier.dt.f", "K-fold cross-validations");
#ifdef OTB_OPENCV_3
// disable cross validation by default (crash in opencv 3.2)
SetParameterInt("classifier.dt.f",0, false);
SetParameterInt("classifier.dt.f",0);
#else
SetParameterInt("classifier.dt.f",10, false);
SetParameterInt("classifier.dt.f",10);
#endif
SetParameterDescription("classifier.dt.f",
"If cv_folds > 1, then it prunes a tree with K-fold cross-validation where K "
"is equal to cv_folds.");
//Use1seRule
AddParameter(ParameterType_Empty, "classifier.dt.r", "Set Use1seRule flag to false");
AddParameter(ParameterType_Bool, "classifier.dt.r", "Set Use1seRule flag to false");
SetParameterDescription("classifier.dt.r",
"If true, then a pruning will be harsher. This will make a tree more compact and more "
"resistant to the training data noise but a bit less accurate.");
//TruncatePrunedTree
AddParameter(ParameterType_Empty, "classifier.dt.t", "Set TruncatePrunedTree flag to false");
AddParameter(ParameterType_Bool, "classifier.dt.t", "Set TruncatePrunedTree flag to false");
SetParameterDescription("classifier.dt.t",
"If true, then pruned branches are physically removed from the tree.");
......@@ -121,11 +121,11 @@ LearningApplicationBase<TInputValue,TOutputValue>
classifier->SetRegressionAccuracy(GetParameterFloat("classifier.dt.ra"));
classifier->SetMaxCategories(GetParameterInt("classifier.dt.cat"));
classifier->SetCVFolds(GetParameterInt("classifier.dt.f"));
if (IsParameterEnabled("classifier.dt.r"))
if (GetParameterInt("classifier.dt.r"))
{
classifier->SetUse1seRule(false);
}
if (IsParameterEnabled("classifier.dt.t"))
if (GetParameterInt("classifier.dt.t"))
{
classifier->SetTruncatePrunedTree(false);
}
......
......@@ -52,7 +52,7 @@ LearningApplicationBase<TInputValue,TOutputValue>
//WeakCount
AddParameter(ParameterType_Int, "classifier.gbt.w", "Number of boosting algorithm iterations");
SetParameterInt("classifier.gbt.w",200, false);
SetParameterInt("classifier.gbt.w",200);
SetParameterDescription(
"classifier.gbt.w",
"Number \"w\" of boosting algorithm iterations, with w*K being the total number of trees in "
......@@ -60,20 +60,20 @@ LearningApplicationBase<TInputValue,TOutputValue>
//Shrinkage
AddParameter(ParameterType_Float, "classifier.gbt.s", "Regularization parameter");
SetParameterFloat("classifier.gbt.s",0.01, false);
SetParameterFloat("classifier.gbt.s",0.01);
SetParameterDescription("classifier.gbt.s", "Regularization parameter.");
//SubSamplePortion
AddParameter(ParameterType_Float, "classifier.gbt.p",
"Portion of the whole training set used for each algorithm iteration");
SetParameterFloat("classifier.gbt.p",0.8, false);
SetParameterFloat("classifier.gbt.p",0.8);
SetParameterDescription(
"classifier.gbt.p",
"Portion of the whole training set used for each algorithm iteration. The subset is generated randomly.");
//MaxDepth
AddParameter(ParameterType_Int, "classifier.gbt.max", "Maximum depth of the tree");
SetParameterInt("classifier.gbt.max",3, false);
SetParameterInt("classifier.gbt.max",3);
SetParameterDescription(
"classifier.gbt.max", "The training algorithm attempts to split each node while its depth is smaller than the maximum "
"possible depth of the tree. The actual depth may be smaller if the other termination criteria are met, and/or "
......
......@@ -39,11 +39,10 @@ void TrainImagesBase::InitIO()
SetParameterDescription( "io.vd", "A list of vector data to select the training samples." );
MandatoryOn( "io.vd" );
AddParameter( ParameterType_Empty, "cleanup", "Temporary files cleaning" );
EnableParameter( "cleanup" );
AddParameter( ParameterType_Bool, "cleanup", "Temporary files cleaning" );
SetParameterDescription( "cleanup",
"If activated, the application will try to clean all temporary files it created" );
MandatoryOff( "cleanup" );
SetParameterInt( "cleanup", 1);
}
void TrainImagesBase::InitSampling()
......@@ -79,7 +78,7 @@ void TrainImagesBase::InitSampling()
AddParameter( ParameterType_Float, "sample.vtr", "Training and validation sample ratio" );
SetParameterDescription( "sample.vtr", "Ratio between training and validation samples (0.0 = all training, 1.0 = "
"all validation) (default = 0.5)." );
SetParameterFloat( "sample.vtr", 0.5, false );
SetParameterFloat( "sample.vtr", 0.5);
SetMaximumParameterFloatValue( "sample.vtr", 1.0 );
SetMinimumParameterFloatValue( "sample.vtr", 0.0 );
......@@ -160,8 +159,8 @@ void TrainImagesBase::ComputePolygonStatistics(FloatVectorImageListType *imageLi
for( unsigned int i = 0; i < nbImages; i++ )
{
GetInternalApplication( "polystat" )->SetParameterInputImage( "in", imageList->GetNthElement( i ) );
GetInternalApplication( "polystat" )->SetParameterString( "vec", vectorFileNames[i], false );
GetInternalApplication( "polystat" )->SetParameterString( "out", statisticsFileNames[i], false );
GetInternalApplication( "polystat" )->SetParameterString( "vec", vectorFileNames[i]);
GetInternalApplication( "polystat" )->SetParameterString( "out", statisticsFileNames[i]);
ExecuteInternal( "polystat" );
}
}
......@@ -170,7 +169,7 @@ void TrainImagesBase::ComputePolygonStatistics(FloatVectorImageListType *imageLi
TrainImagesBase::SamplingRates TrainImagesBase::ComputeFinalMaximumSamplingRates(bool dedicatedValidation)
{
SamplingRates rates;
GetInternalApplication( "rates" )->SetParameterString( "mim", "proportional", false );
GetInternalApplication( "rates" )->SetParameterString( "mim", "proportional");
double vtr = GetParameterFloat( "sample.vtr" );
long mt = GetParameterInt( "sample.mt" );
long mv = GetParameterInt( "sample.mv" );
......@@ -224,11 +223,11 @@ void TrainImagesBase::ComputeSamplingRate(const std::vector<std::string> &statis
const std::string &ratesFileName, long maximum)
{
// Sampling rates
GetInternalApplication( "rates" )->SetParameterStringList( "il", statisticsFileNames, false );
GetInternalApplication( "rates" )->SetParameterString( "out", ratesFileName, false );
GetInternalApplication( "rates" )->SetParameterStringList( "il", statisticsFileNames);
GetInternalApplication( "rates" )->SetParameterString( "out", ratesFileName);
if( GetParameterInt( "sample.bm" ) != 0 )
{
GetInternalApplication( "rates" )->SetParameterString( "strategy", "smallest", false );
GetInternalApplication( "rates" )->SetParameterString( "strategy", "smallest");
}
else
{
......@@ -236,12 +235,12 @@ void TrainImagesBase::ComputeSamplingRate(const std::vector<std::string> &statis
{
std::ostringstream oss;
oss << maximum;
GetInternalApplication( "rates" )->SetParameterString( "strategy", "constant", false );
GetInternalApplication( "rates" )->SetParameterString( "strategy.constant.nb", oss.str(), false );
GetInternalApplication( "rates" )->SetParameterString( "strategy", "constant");
GetInternalApplication( "rates" )->SetParameterString( "strategy.constant.nb", oss.str());
}
else
{
GetInternalApplication( "rates" )->SetParameterString( "strategy", "all", false );
GetInternalApplication( "rates" )->SetParameterString( "strategy", "all");
}
}
ExecuteInternal( "rates" );
......@@ -251,9 +250,9 @@ void
TrainImagesBase::TrainModel(FloatVectorImageListType *imageList, const std::vector<std::string> &sampleTrainFileNames,
const std::vector<std::string> &sampleValidationFileNames)
{
GetInternalApplication( "training" )->SetParameterStringList( "io.vd", sampleTrainFileNames, false );
GetInternalApplication( "training" )->SetParameterStringList( "io.vd", sampleTrainFileNames);
if( !sampleValidationFileNames.empty() )
GetInternalApplication( "training" )->SetParameterStringList( "valid.vd", sampleValidationFileNames, false );
GetInternalApplication( "training" )->SetParameterStringList( "valid.vd", sampleValidationFileNames);
UpdateInternalParameters( "training" );
// set field names
......@@ -266,7 +265,7 @@ TrainImagesBase::TrainModel(FloatVectorImageListType *imageList, const std::vect
oss << i;
selectedNames.push_back( "value_" + oss.str() );
}
GetInternalApplication( "training" )->SetParameterStringList( "feat", selectedNames, false );
GetInternalApplication( "training" )->SetParameterStringList( "feat", selectedNames);
ExecuteInternal( "training" );
}
......@@ -276,38 +275,38 @@ void TrainImagesBase::SelectAndExtractSamples(FloatVectorImageType *image, std::
std::string selectedField)
{
GetInternalApplication( "select" )->SetParameterInputImage( "in", image );
GetInternalApplication( "select" )->SetParameterString( "out", sampleFileName, false );
GetInternalApplication( "select" )->SetParameterString( "out", sampleFileName);
// Change the selection strategy based on selected sampling strategy
switch( strategy )