Skip to content
Snippets Groups Projects
Commit 810c681c authored by Jordi Inglada's avatar Jordi Inglada
Browse files

STYLE: rename parameters of the application

parent 5bf575f9
No related branches found
No related tags found
No related merge requests found
......@@ -59,14 +59,14 @@ private:
SetParameterDescription("io.vd", "Input geometries used for training (note : all geometries from the layer will be used)");
AddParameter(ParameterType_OutputFilename, "io.out", "Output model");
SetParameterDescription("io.out", "Output file containing the model estimated (.txt format).");
SetParameterDescription("io.out", "Output file containing the estimated model (.txt format).");
AddParameter(ParameterType_InputFilename, "io.stats", "Input XML image statistics file");
MandatoryOff("io.stats");
SetParameterDescription("io.stats", "XML file containing mean and variance of each feature.");
AddParameter(ParameterType_StringList, "feat", "Field names to be calculated."); //
AddParameter(ParameterType_StringList, "feat", "Field names to be used for training."); //
SetParameterDescription("feat","List of field names in the input vector data used as features for training."); //
Superclass::DoInit();
......
......@@ -16,87 +16,87 @@ TrainDimensionalityReductionApplicationBase<TInputValue,TOutputValue>
{
AddChoice("model.tiedautoencoder", "Shark Tied Autoencoder");
AddChoice("model.autoencoder", "Shark Autoencoder");
SetParameterDescription("model.autoencoder",
AddChoice("algorithm.tiedautoencoder", "Shark Tied Autoencoder");
AddChoice("algorithm.autoencoder", "Shark Autoencoder");
SetParameterDescription("algorithm.autoencoder",
"This group of parameters allows setting Shark autoencoder parameters. "
);
//Tied Autoencoder
AddParameter(ParameterType_Choice, "model.autoencoder.istied",
AddParameter(ParameterType_Choice, "algorithm.autoencoder.istied",
"tied weighth <tied/untied>");
SetParameterDescription(
"model.autoencoder.istied",
"algorithm.autoencoder.istied",
"Parameter that determine if the weights are tied or not <tied/untied>");
AddChoice("model.autoencoder.istied.yes","Tied weigths");
AddChoice("model.autoencoder.istied.no","Untied weights");
AddChoice("algorithm.autoencoder.istied.yes","Tied weigths");
AddChoice("algorithm.autoencoder.istied.no","Untied weights");
//Number Of Iterations
AddParameter(ParameterType_Int, "model.autoencoder.nbiter",
AddParameter(ParameterType_Int, "algorithm.autoencoder.nbiter",
"Maximum number of iterations during training");
SetParameterInt("model.autoencoder.nbiter",100, false);
SetParameterInt("algorithm.autoencoder.nbiter",100, false);
SetParameterDescription(
"model.autoencoder.nbiter",
"algorithm.autoencoder.nbiter",
"The maximum number of iterations used during training.");
AddParameter(ParameterType_Int, "model.autoencoder.nbiterfinetuning",
AddParameter(ParameterType_Int, "algorithm.autoencoder.nbiterfinetuning",
"Maximum number of iterations during training");
SetParameterInt("model.autoencoder.nbiterfinetuning",0, false);
SetParameterInt("algorithm.autoencoder.nbiterfinetuning",0, false);
SetParameterDescription(
"model.autoencoder.nbiterfinetuning",
"algorithm.autoencoder.nbiterfinetuning",
"The maximum number of iterations used during fine tuning of the whole network.");
AddParameter(ParameterType_Float, "model.autoencoder.epsilon",
AddParameter(ParameterType_Float, "algorithm.autoencoder.epsilon",
" ");
SetParameterFloat("model.autoencoder.epsilon",0, false);
SetParameterFloat("algorithm.autoencoder.epsilon",0, false);
SetParameterDescription(
"model.autoencoder.epsilon",
"algorithm.autoencoder.epsilon",
" ");
AddParameter(ParameterType_Float, "model.autoencoder.initfactor",
AddParameter(ParameterType_Float, "algorithm.autoencoder.initfactor",
" ");
SetParameterFloat("model.autoencoder.initfactor",1, false);
SetParameterFloat("algorithm.autoencoder.initfactor",1, false);
SetParameterDescription(
"model.autoencoder.initfactor", "parameter that control the weight initialization of the autoencoder");
"algorithm.autoencoder.initfactor", "parameter that control the weight initialization of the autoencoder");
//Number Of Hidden Neurons
AddParameter(ParameterType_StringList , "model.autoencoder.nbneuron", "Size");
/*AddParameter(ParameterType_Int, "model.autoencoder.nbneuron",
AddParameter(ParameterType_StringList , "algorithm.autoencoder.nbneuron", "Size");
/*AddParameter(ParameterType_Int, "algorithm.autoencoder.nbneuron",
"Number of neurons in the hidden layer");
SetParameterInt("model.autoencoder.nbneuron",10, false);*/
SetParameterInt("algorithm.autoencoder.nbneuron",10, false);*/
SetParameterDescription(
"model.autoencoder.nbneuron",
"algorithm.autoencoder.nbneuron",
"The number of neurons in each hidden layer.");
//Regularization
AddParameter(ParameterType_StringList, "model.autoencoder.regularization", "Strength of the regularization");
SetParameterDescription("model.autoencoder.regularization",
"Strength of the L2 regularization used during training");
AddParameter(ParameterType_StringList, "algorithm.autoencoder.regularization", "Strength of the regularization");
SetParameterDescription("algorithm.autoencoder.regularization",
"Strength of the L2 regularization used during training");
//Noise strength
AddParameter(ParameterType_StringList, "model.autoencoder.noise", "Strength of the noise");
SetParameterDescription("model.autoencoder.noise",
"Strength of the noise");
AddParameter(ParameterType_StringList, "algorithm.autoencoder.noise", "Strength of the noise");
SetParameterDescription("algorithm.autoencoder.noise",
"Strength of the noise");
// Sparsity parameter
AddParameter(ParameterType_StringList, "model.autoencoder.rho", "Sparsity parameter");
SetParameterDescription("model.autoencoder.rho",
"Sparsity parameter");
AddParameter(ParameterType_StringList, "algorithm.autoencoder.rho", "Sparsity parameter");
SetParameterDescription("algorithm.autoencoder.rho",
"Sparsity parameter");
// Sparsity regularization strength
AddParameter(ParameterType_StringList, "model.autoencoder.beta", "Sparsity regularization strength");
SetParameterDescription("model.autoencoder.beta",
"Sparsity regularization strength");
AddParameter(ParameterType_StringList, "algorithm.autoencoder.beta", "Sparsity regularization strength");
SetParameterDescription("algorithm.autoencoder.beta",
"Sparsity regularization strength");
AddParameter(ParameterType_OutputFilename, "model.autoencoder.learningcurve", "Learning curve");
SetParameterDescription("model.autoencoder.learningcurve", "Learning error values");
MandatoryOff("model.autoencoder.learningcurve");
AddParameter(ParameterType_OutputFilename, "algorithm.autoencoder.learningcurve", "Learning curve");
SetParameterDescription("algorithm.autoencoder.learningcurve", "Learning error values");
MandatoryOff("algorithm.autoencoder.learningcurve");
}
......@@ -107,10 +107,10 @@ TrainDimensionalityReductionApplicationBase<TInputValue,TOutputValue>
::BeforeTrainAutoencoder(typename ListSampleType::Pointer trainingListSample,
std::string modelPath)
{
std::string TiedWeigth = GetParameterString("model.autoencoder.istied");
std::cout << TiedWeigth << std::endl;
std::string TiedWeigth = GetParameterString("algorithm.autoencoder.istied");
std::cout << TiedWeigth << std::endl;
if(TiedWeigth == "no")
if(TiedWeigth == "no")
{
TrainAutoencoder<AutoencoderModelType>(trainingListSample,modelPath);
}
......@@ -138,11 +138,11 @@ void TrainDimensionalityReductionApplicationBase<TInputValue,TOutputValue>::Trai
itk::Array<float> regularization;
itk::Array<float> rho;
itk::Array<float> beta;
std::vector<std::basic_string<char>> s_nbneuron= GetParameterStringList("model.autoencoder.nbneuron");
std::vector<std::basic_string<char>> s_noise= GetParameterStringList("model.autoencoder.noise");
std::vector<std::basic_string<char>> s_regularization= GetParameterStringList("model.autoencoder.regularization");
std::vector<std::basic_string<char>> s_rho= GetParameterStringList("model.autoencoder.rho");
std::vector<std::basic_string<char>> s_beta= GetParameterStringList("model.autoencoder.beta");
std::vector<std::basic_string<char>> s_nbneuron= GetParameterStringList("algorithm.autoencoder.nbneuron");
std::vector<std::basic_string<char>> s_noise= GetParameterStringList("algorithm.autoencoder.noise");
std::vector<std::basic_string<char>> s_regularization= GetParameterStringList("algorithm.autoencoder.regularization");
std::vector<std::basic_string<char>> s_rho= GetParameterStringList("algorithm.autoencoder.rho");
std::vector<std::basic_string<char>> s_beta= GetParameterStringList("algorithm.autoencoder.beta");
nb_neuron.SetSize(s_nbneuron.size());
noise.SetSize(s_nbneuron.size());
regularization.SetSize(s_nbneuron.size());
......@@ -156,22 +156,22 @@ void TrainDimensionalityReductionApplicationBase<TInputValue,TOutputValue>::Trai
beta[i]=std::stof(s_beta[i]);
}
dimredTrainer->SetNumberOfHiddenNeurons(nb_neuron);
dimredTrainer->SetNumberOfIterations(GetParameterInt("model.autoencoder.nbiter"));
dimredTrainer->SetNumberOfIterationsFineTuning(GetParameterInt("model.autoencoder.nbiterfinetuning"));
dimredTrainer->SetEpsilon(GetParameterFloat("model.autoencoder.epsilon"));
dimredTrainer->SetInitFactor(GetParameterFloat("model.autoencoder.initfactor"));
dimredTrainer->SetNumberOfIterations(GetParameterInt("algorithm.autoencoder.nbiter"));
dimredTrainer->SetNumberOfIterationsFineTuning(GetParameterInt("algorithm.autoencoder.nbiterfinetuning"));
dimredTrainer->SetEpsilon(GetParameterFloat("algorithm.autoencoder.epsilon"));
dimredTrainer->SetInitFactor(GetParameterFloat("algorithm.autoencoder.initfactor"));
dimredTrainer->SetRegularization(regularization);
dimredTrainer->SetNoise(noise);
dimredTrainer->SetRho(rho);
dimredTrainer->SetBeta(beta);
dimredTrainer->SetWriteWeights(true);
if (HasValue("model.autoencoder.learningcurve") && IsParameterEnabled("model.autoencoder.learningcurve"))
{
std::cout << "yo" << std::endl;
dimredTrainer->SetWriteLearningCurve(true);
dimredTrainer->SetLearningCurveFileName(GetParameterString("model.autoencoder.learningcurve"));
}
if (HasValue("algorithm.autoencoder.learningcurve") && IsParameterEnabled("algorithm.autoencoder.learningcurve"))
{
std::cout << "yo" << std::endl;
dimredTrainer->SetWriteLearningCurve(true);
dimredTrainer->SetLearningCurveFileName(GetParameterString("algorithm.autoencoder.learningcurve"));
}
dimredTrainer->SetInputListSample(trainingListSample);
std::cout << "before train" << std::endl;
......
......@@ -16,18 +16,18 @@ TrainDimensionalityReductionApplicationBase<TInputValue,TOutputValue>
{
AddChoice("model.pca", "Shark PCA");
SetParameterDescription("model.pca",
AddChoice("algorithm.pca", "Shark PCA");
SetParameterDescription("algorithm.pca",
"This group of parameters allows setting Shark PCA parameters. "
);
//Output Dimension
AddParameter(ParameterType_Int, "model.pca.dim",
AddParameter(ParameterType_Int, "algorithm.pca.dim",
"Dimension of the output of the pca transformation");
SetParameterInt("model.pca.dim",10, false);
SetParameterInt("algorithm.pca.dim",10, false);
SetParameterDescription(
"model.pca.dim",
"algorithm.pca.dim",
"Dimension of the output of the pca transformation.");
......@@ -38,7 +38,7 @@ void TrainDimensionalityReductionApplicationBase<TInputValue,TOutputValue>
::TrainPCA(typename ListSampleType::Pointer trainingListSample,std::string modelPath)
{
typename PCAModelType::Pointer dimredTrainer = PCAModelType::New();
dimredTrainer->SetDimension(GetParameterInt("model.pca.dim"));
dimredTrainer->SetDimension(GetParameterInt("algorithm.pca.dim"));
dimredTrainer->SetInputListSample(trainingListSample);
dimredTrainer->SetWriteEigenvectors(true);
dimredTrainer->Train();
......
......@@ -14,62 +14,62 @@ TrainDimensionalityReductionApplicationBase<TInputValue,TOutputValue>
::InitSOMParams()
{
AddChoice("model.som", "OTB SOM");
SetParameterDescription("model.som",
AddChoice("algorithm.som", "OTB SOM");
SetParameterDescription("algorithm.som",
"This group of parameters allows setting SOM parameters. "
);
AddParameter(ParameterType_Int, "model.som.dim","Dimension of the map");
SetParameterDescription("model.som.dim","Dimension of the SOM map.");
AddParameter(ParameterType_Int, "algorithm.som.dim","Dimension of the map");
SetParameterDescription("algorithm.som.dim","Dimension of the SOM map.");
AddParameter(ParameterType_StringList , "model.som.s", "Size");
SetParameterDescription("model.som.s", "Size of the SOM map");
MandatoryOff("model.som.s");
AddParameter(ParameterType_StringList , "algorithm.som.s", "Size");
SetParameterDescription("algorithm.som.s", "Size of the SOM map");
MandatoryOff("algorithm.som.s");
AddParameter(ParameterType_StringList , "model.som.n", "Size Neighborhood");
SetParameterDescription("model.som.n", "Size of the initial neighborhood in the SOM map");
MandatoryOff("model.som.n");
AddParameter(ParameterType_StringList , "algorithm.som.n", "Size Neighborhood");
SetParameterDescription("algorithm.som.n", "Size of the initial neighborhood in the SOM map");
MandatoryOff("algorithm.som.n");
AddParameter(ParameterType_Int, "model.som.sx", "SizeX");
SetParameterDescription("model.som.sx", "X size of the SOM map");
MandatoryOff("model.som.sx");
AddParameter(ParameterType_Int, "algorithm.som.sx", "SizeX");
SetParameterDescription("algorithm.som.sx", "X size of the SOM map");
MandatoryOff("algorithm.som.sx");
AddParameter(ParameterType_Int, "model.som.sy", "SizeY");
SetParameterDescription("model.som.sy", "Y size of the SOM map");
MandatoryOff("model.som.sy");
AddParameter(ParameterType_Int, "algorithm.som.sy", "SizeY");
SetParameterDescription("algorithm.som.sy", "Y size of the SOM map");
MandatoryOff("algorithm.som.sy");
AddParameter(ParameterType_Int, "model.som.nx", "NeighborhoodX");
SetParameterDescription("model.som.nx", "X size of the initial neighborhood in the SOM map");
MandatoryOff("model.som.nx");
AddParameter(ParameterType_Int, "algorithm.som.nx", "NeighborhoodX");
SetParameterDescription("algorithm.som.nx", "X size of the initial neighborhood in the SOM map");
MandatoryOff("algorithm.som.nx");
AddParameter(ParameterType_Int, "model.som.ny", "NeighborhoodY");
SetParameterDescription("model.som.ny", "Y size of the initial neighborhood in the SOM map");
MandatoryOff("model.som.nx");
AddParameter(ParameterType_Int, "algorithm.som.ny", "NeighborhoodY");
SetParameterDescription("algorithm.som.ny", "Y size of the initial neighborhood in the SOM map");
MandatoryOff("algorithm.som.nx");
AddParameter(ParameterType_Int, "model.som.ni", "NumberIteration");
SetParameterDescription("model.som.ni", "Number of iterations for SOM learning");
MandatoryOff("model.som.ni");
AddParameter(ParameterType_Int, "algorithm.som.ni", "NumberIteration");
SetParameterDescription("algorithm.som.ni", "Number of iterations for SOM learning");
MandatoryOff("algorithm.som.ni");
AddParameter(ParameterType_Float, "model.som.bi", "BetaInit");
SetParameterDescription("model.som.bi", "Initial learning coefficient");
MandatoryOff("model.som.bi");
AddParameter(ParameterType_Float, "algorithm.som.bi", "BetaInit");
SetParameterDescription("algorithm.som.bi", "Initial learning coefficient");
MandatoryOff("algorithm.som.bi");
AddParameter(ParameterType_Float, "model.som.bf", "BetaFinal");
SetParameterDescription("model.som.bf", "Final learning coefficient");
MandatoryOff("model.som.bf");
AddParameter(ParameterType_Float, "algorithm.som.bf", "BetaFinal");
SetParameterDescription("algorithm.som.bf", "Final learning coefficient");
MandatoryOff("algorithm.som.bf");
AddParameter(ParameterType_Float, "model.som.iv", "InitialValue");
SetParameterDescription("model.som.iv", "Maximum initial neuron weight");
MandatoryOff("model.som.iv");
AddParameter(ParameterType_Float, "algorithm.som.iv", "InitialValue");
SetParameterDescription("algorithm.som.iv", "Maximum initial neuron weight");
MandatoryOff("algorithm.som.iv");
SetDefaultParameterInt("model.som.sx", 32);
SetDefaultParameterInt("model.som.sy", 32);
SetDefaultParameterInt("model.som.nx", 10);
SetDefaultParameterInt("model.som.ny", 10);
SetDefaultParameterInt("model.som.ni", 5);
SetDefaultParameterFloat("model.som.bi", 1.0);
SetDefaultParameterFloat("model.som.bf", 0.1);
SetDefaultParameterFloat("model.som.iv", 10.0);
SetDefaultParameterInt("algorithm.som.sx", 32);
SetDefaultParameterInt("algorithm.som.sy", 32);
SetDefaultParameterInt("algorithm.som.nx", 10);
SetDefaultParameterInt("algorithm.som.ny", 10);
SetDefaultParameterInt("algorithm.som.ni", 5);
SetDefaultParameterFloat("algorithm.som.bi", 1.0);
SetDefaultParameterFloat("algorithm.som.bf", 0.1);
SetDefaultParameterFloat("algorithm.som.iv", 10.0);
}
......@@ -80,10 +80,10 @@ TrainDimensionalityReductionApplicationBase<TInputValue,TOutputValue>
::BeforeTrainSOM(typename ListSampleType::Pointer trainingListSample,
std::string modelPath)
{
int SomDim = GetParameterInt("model.som.dim");
std::cout << SomDim << std::endl;
int SomDim = GetParameterInt("algorithm.som.dim");
std::cout << SomDim << std::endl;
if(SomDim == 2)
if(SomDim == 2)
{
TrainSOM<SOM2DModelType >(trainingListSample,modelPath);
}
......@@ -118,23 +118,23 @@ void TrainDimensionalityReductionApplicationBase<TInputValue,TOutputValue>
typename somchoice::Pointer dimredTrainer = somchoice::New();
unsigned int dim = dimredTrainer->GetDimension();
std::cout << dim << std::endl;
dimredTrainer->SetNumberOfIterations(GetParameterInt("model.som.ni"));
dimredTrainer->SetBetaInit(GetParameterFloat("model.som.bi"));
dimredTrainer->SetNumberOfIterations(GetParameterInt("algorithm.som.ni"));
dimredTrainer->SetBetaInit(GetParameterFloat("algorithm.som.bi"));
dimredTrainer->SetWriteMap(true);
dimredTrainer->SetBetaEnd(GetParameterFloat("model.som.bf"));
dimredTrainer->SetMaxWeight(GetParameterFloat("model.som.iv"));
dimredTrainer->SetBetaEnd(GetParameterFloat("algorithm.som.bf"));
dimredTrainer->SetMaxWeight(GetParameterFloat("algorithm.som.iv"));
typename TemplateEstimatorType::SizeType size;
std::vector<std::basic_string<char>> s= GetParameterStringList("model.som.s");
std::vector<std::basic_string<char>> s= GetParameterStringList("algorithm.som.s");
for (int i=0; i<dim; i++){
size[i]=std::stoi(s[i]);
}
dimredTrainer->SetMapSize(size);
typename TemplateEstimatorType::SizeType radius;
std::vector<std::basic_string<char>> n= GetParameterStringList("model.som.n");
for (int i=0; i<dim; i++){
radius[i]=std::stoi(n[i]);
}
std::vector<std::basic_string<char>> n= GetParameterStringList("algorithm.som.n");
for (int i=0; i<dim; i++){
radius[i]=std::stoi(n[i]);
}
dimredTrainer->SetNeighborhoodSizeInit(radius);
dimredTrainer->SetListSample(trainingListSample);
dimredTrainer->Train();
......
......@@ -45,8 +45,8 @@ TrainDimensionalityReductionApplicationBase<TInputValue,TOutputValue>
AddDocTag(Tags::Learning);
// main choice parameter that will contain all dimensionality reduction options
AddParameter(ParameterType_Choice, "model", "model to use for the training");
SetParameterDescription("model", "Choice of the dimensionality reduction model to use for the training.");
AddParameter(ParameterType_Choice, "algorithm", "algorithm to use for the training");
SetParameterDescription("algorithm", "Choice of the dimensionality reduction algorithm to use for the training.");
InitSOMParams();
......@@ -73,10 +73,10 @@ TrainDimensionalityReductionApplicationBase<TInputValue,TOutputValue>
{
// get the name of the chosen machine learning model
const std::string modelName = GetParameterString("model");
// call specific train function
const std::string modelName = GetParameterString("algorithm");
// call specific train function
if(modelName == "som")
if(modelName == "som")
{
BeforeTrainSOM(trainingListSample,modelPath);
}
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment