Skip to content
Snippets Groups Projects
Commit c722891d authored by Antoine Regimbeau's avatar Antoine Regimbeau
Browse files

LibSVM and TrainNeuralNetwork doc

parent db93f2a2
No related branches found
No related tags found
No related merge requests found
......@@ -48,7 +48,7 @@ namespace Wrapper
AddChoice("classifier.libsvm.k.poly", "Polynomial");
SetParameterDescription("classifier.libsvm.k.poly",
"Polynomial Kernel, the mapping is a polynomial function.")
"Polynomial Kernel, the mapping is a polynomial function.");
AddChoice("classifier.libsvm.k.sigmoid", "Sigmoid");
SetParameterDescription("classifier.libsvm.k.sigmoid",
......@@ -86,6 +86,9 @@ namespace Wrapper
"to optimize, and may not be as fast.");
AddChoice("classifier.libsvm.m.oneclass", "Distribution estimation (One Class SVM)");
SetParameterDescription("classifier.libsvm.m.oneclass",
"All the training data are from the same class, SVM builds a boundary "
"that separates the class from the rest of the feature space.");
SetParameterString("classifier.libsvm.m", "csvc", false);
}
......
......@@ -42,12 +42,15 @@ LearningApplicationBase<TInputValue,TOutputValue>
//TrainMethod
AddParameter(ParameterType_Choice, "classifier.ann.t", "Train Method Type");
AddChoice("classifier.ann.t.reg", "RPROP algorithm");
SetParameterDescription("classifier.ann.t.reg",
"");
AddChoice("classifier.ann.t.back", "Back-propagation algorithm");
SetParameterDescription("classifier.ann.t.back",
"");
"Method to compute the gradient of the loss function and adjust weights "
"in the network to optimize the result.");
AddChoice("classifier.ann.t.reg", "Resilient Back-propagation algorithm");
SetParameterDescription("classifier.ann.t.reg",
"Almost the same as the Back-prop algorithm exept that it does not "
"take into account the magnitude of the partial derivative (coordinate "
"of the gradient) but only its sign.");
SetParameterString("classifier.ann.t", "reg", false);
SetParameterDescription("classifier.ann.t",
......@@ -71,7 +74,9 @@ LearningApplicationBase<TInputValue,TOutputValue>
AddChoice("classifier.ann.f.sig", "Symmetrical Sigmoid function");
AddChoice("classifier.ann.f.gau", "Gaussian function (Not completely supported)");
SetParameterString("classifier.ann.f", "sig", false);
SetParameterDescription("classifier.ann.f", "Neuron activation function.");
SetParameterDescription("classifier.ann.f",
"This function determine whether the output of the node is positive or not "
"depending on the output of the transfert function.");
//Alpha
AddParameter(ParameterType_Float, "classifier.ann.a",
......@@ -123,8 +128,17 @@ LearningApplicationBase<TInputValue,TOutputValue>
//TermCriteriaType
AddParameter(ParameterType_Choice, "classifier.ann.term", "Termination criteria");
AddChoice("classifier.ann.term.iter", "Maximum number of iterations");
SetParameterDescription("classifier.ann.term.iter",
"Set the number of iterations allowed to the network for its "
"training. Training will stop regardless of the result when this "
"number is reached");
AddChoice("classifier.ann.term.eps", "Epsilon");
SetParameterDescription("classifier.ann.term.eps",
"Training will focus on result and will stop once the precision is"
"at most epsilon");
AddChoice("classifier.ann.term.all", "Max. iterations + Epsilon");
SetParameterDescription("classifier.ann.term.all",
"Both termination criteria are used. Training stop at the first reached");
SetParameterString("classifier.ann.term", "all", false);
SetParameterDescription("classifier.ann.term", "Termination criteria.");
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment