Commit e4edd3c8 authored by Ludovic Hussonnois's avatar Ludovic Hussonnois

Merge remote-tracking branch 'remotes/origin/develop' into contingency_table

parents eb111e9d d3bd4b1f
......@@ -22,12 +22,17 @@
# Helper to perform the initial git clone and checkout.
function(_git_clone git_executable git_repository git_tag module_dir)
execute_process(
COMMAND "${git_executable}" clone "${git_repository}" "${module_dir}"
RESULT_VARIABLE error_code
OUTPUT_QUIET
ERROR_QUIET
)
set(retryCount 0)
set(error_code 1)
while(error_code AND (retryCount LESS 3))
execute_process(
COMMAND "${git_executable}" clone "${git_repository}" "${module_dir}"
RESULT_VARIABLE error_code
OUTPUT_QUIET
ERROR_QUIET
)
math(EXPR retryCount "${retryCount}+1")
endwhile()
if(error_code)
message(FATAL_ERROR "Failed to clone repository: '${git_repository}'")
endif()
......
......@@ -4,10 +4,10 @@ Classification
Pixel based classification
--------------------------
Orfeo ToolBox ships with a set of application to perform supervised
pixel-based image classification. This framework allows to learn from
multiple images, and using several machine learning method such as
SVM, Bayes, KNN, Random Forests, Artificial Neural Network, and
Orfeo ToolBox ships with a set of application to perform supervised or
unsupervised pixel-based image classification. This framework allows
to learn from multiple images, and using several machine learning method
such as SVM, Bayes, KNN, Random Forests, Artificial Neural Network, and
others...(see application help of ``TrainImagesClassifier`` and
``TrainVectorClassifier`` for further details about all the available
classifiers). Here is an overview of the complete workflow:
......@@ -235,7 +235,7 @@ image.
class required)
- *Mode = proportional:* For each image :math:`i` and each class :math:`c`,
:math:`N_i( c ) = \frac{M * T_i( c )}{sum_k( T_k(c)}`
:math:`N_i( c ) = \frac{M * T_i(c)}{sum_k(T_k(c))}`
- *Mode = equal:* For each image :math:`i` and each class :math:`c`,
:math:`N_i( c ) = \frac{M}{L}`
- *Mode = custom:* For each image :math:`i` and each class :math:`c`,
......@@ -347,8 +347,9 @@ using the ``TrainVectorClassifier`` application.
-feat band_0 band_1 band_2 band_3 band_4 band_5 band_6
The ``-classifier`` parameter allows to choose which machine learning
model algorithm to train. Please refer to the
``TrainVectorClassifier`` application reference documentation.
model algorithm to train. You have the possibility to do the unsupervised
classification,for it, you must to choose the Shark kmeans classifier.
Please refer to the ``TrainVectorClassifier`` application reference documentation.
In case of multiple samples files, you can add them to the ``-io.vd``
parameter (see `Working with several images`_ section).
......@@ -409,6 +410,11 @@ class too, based on the
`ConfusionMatrixCalculator <http://www.orfeo-toolbox.org/doxygen-current/classotb_1_1ConfusionMatrixCalculator.html>`_
class.
If you have made an unsupervised classification, it must be specified
to the ``ConputeConfusionMatrix`` application. In this case, a contingency table
have to be create rather than a confusion matrix. For further details,
see ``format`` parameter in the application help of *ConputeConfusionMatrix*.
::
otbcli_ComputeConfusionMatrix -in labeled_image.tif
......
......@@ -45,7 +45,7 @@
#include "otbImageFileReader.h"
#include "itkUnaryFunctorImageFilter.h"
#include "itkRecursiveGaussianImageFilter.h"
#include "otbWarpImageFilter.h"
#include "itkWarpImageFilter.h"
#include "itkMeanReciprocalSquareDifferenceImageToImageMetric.h"
// Software Guide : BeginCodeSnippet
......@@ -255,7 +255,7 @@ int main(int argc, char** argv)
dfWriter->SetFileName(argv[4]);
dfWriter->Update();
typedef otb::WarpImageFilter<InputImageType, InputImageType,
typedef itk::WarpImageFilter<InputImageType, InputImageType,
DisplacementFieldType> WarperType;
WarperType::Pointer warper = WarperType::New();
......
......@@ -42,7 +42,7 @@
// Software Guide : BeginCodeSnippet
#include "otbNCCRegistrationFilter.h"
#include "itkRecursiveGaussianImageFilter.h"
#include "otbWarpImageFilter.h"
#include "itkWarpImageFilter.h"
// Software Guide : EndCodeSnippet
#include "otbImageOfVectorsToMonoChannelExtractROI.h"
......@@ -211,7 +211,7 @@ int main(int argc, char** argv)
dfWriter->SetFileName(argv[4]);
dfWriter->Update();
typedef otb::WarpImageFilter<MovingImageType, MovingImageType,
typedef itk::WarpImageFilter<MovingImageType, MovingImageType,
DisplacementFieldType> WarperType;
WarperType::Pointer warper = WarperType::New();
......
......@@ -47,7 +47,7 @@
#include "itkWindowedSincInterpolateImageFunction.h"
#include "itkGradientDescentOptimizer.h"
#include "otbBSplinesInterpolateDisplacementFieldGenerator.h"
#include "otbWarpImageFilter.h"
#include "itkWarpImageFilter.h"
// Software Guide : EndCodeSnippet
#include "otbImageFileReader.h"
......@@ -414,7 +414,7 @@ int main(int argc, char* argv[])
// Software Guide : BeginCodeSnippet
typedef otb::WarpImageFilter<ImageType, ImageType,
typedef itk::WarpImageFilter<ImageType, ImageType,
DisplacementFieldType> ImageWarperType;
// Software Guide : EndCodeSnippet
......
......@@ -35,7 +35,7 @@
// Software Guide : EndLatex
// Software Guide : BeginCodeSnippet
#include "otbAssymmetricFusionOfLineDetectorImageFilter.h"
#include "otbAsymmetricFusionOfLineDetectorImageFilter.h"
// Software Guide : EndCodeSnippet
#include "otbImage.h"
......@@ -85,7 +85,7 @@ int main(int argc, char * argv[])
// Software Guide : EndLatex
// Software Guide : BeginCodeSnippet
typedef otb::AssymmetricFusionOfLineDetectorImageFilter<InternalImageType,
typedef otb::AsymmetricFusionOfLineDetectorImageFilter<InternalImageType,
InternalImageType>
FilterType;
// Software Guide : EndCodeSnippet
......
......@@ -33,23 +33,8 @@ add_executable(SOMExample SOMExample.cxx)
target_link_libraries(SOMExample ${OTB_LIBRARIES})
if(OTBLibSVM_LOADED)
add_executable(SVMImageClassificationExample SVMImageClassificationExample.cxx)
target_link_libraries(SVMImageClassificationExample ${OTB_LIBRARIES})
add_executable(SVMImageEstimatorClassificationMultiExample SVMImageEstimatorClassificationMultiExample.cxx)
target_link_libraries(SVMImageEstimatorClassificationMultiExample ${OTB_LIBRARIES})
add_executable(SVMImageModelEstimatorExample SVMImageModelEstimatorExample.cxx)
target_link_libraries(SVMImageModelEstimatorExample ${OTB_LIBRARIES})
add_executable(SVMPointSetClassificationExample SVMPointSetClassificationExample.cxx)
target_link_libraries(SVMPointSetClassificationExample ${OTB_LIBRARIES})
add_executable(SVMPointSetExample SVMPointSetExample.cxx)
target_link_libraries(SVMPointSetExample ${OTB_LIBRARIES})
add_executable(SVMPointSetModelEstimatorExample SVMPointSetModelEstimatorExample.cxx)
target_link_libraries(SVMPointSetModelEstimatorExample ${OTB_LIBRARIES})
endif()
if(OTBOpenCV_LOADED)
......
/*
* Copyright (C) 2005-2017 Centre National d'Etudes Spatiales (CNES)
*
* This file is part of Orfeo Toolbox
*
* https://www.orfeo-toolbox.org/
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <fstream>
#include "otbImageFileReader.h"
#include "otbImageFileWriter.h"
#include "itkUnaryFunctorImageFilter.h"
#include "itkRescaleIntensityImageFilter.h"
#include "otbImage.h"
// Software Guide : BeginCommandLineArgs
// INPUTS: {ROI_QB_MUL_1.png}
// OUTPUTS: {ROI_QB_MUL_1_SVN_CLASS.png}
// ${OTB_DATA_ROOT}/Examples/svm_image_model.svm
// Software Guide : EndCommandLineArgs
// Software Guide : BeginLatex
// This example illustrates the use of the
// \doxygen{otb}{SVMClassifier} class for performing SVM
// classification on images.
// In this example, we will use an SVM model estimated in the example
// of section \ref{sec:LearningWithImages}
// to separate between water and non-water pixels by using the RGB
// values only. The images used for this example are shown in
// figure~\ref{fig:SVMROIS}.
// The first thing to do is include the header file for the
// class. Since the \doxygen{otb}{SVMClassifier} takes
// \doxygen{itk}{ListSample}s as input, the class
// \doxygen{itk}{PointSetToListAdaptor} is needed.
//
//
// Software Guide : EndLatex
#include "itkImageToListSampleAdaptor.h"
// Software Guide : BeginCodeSnippet
#include "otbSVMClassifier.h"
// Software Guide : EndCodeSnippet
int main(int argc, char* argv[])
{
if (argc != 4)
{
std::cout << "Usage : " << argv[0] << " inputImage outputImage modelFile "
<< std::endl;
return EXIT_FAILURE;
}
const char * imageFilename = argv[1];
const char * modelFilename = argv[3];
const char * outputFilename = argv[2];
// Software Guide : BeginLatex
//
// In the framework of supervised learning and classification, we will
// always use feature vectors for the characterization of the
// classes. On the other hand, the class labels are scalar
// values. Here, we start by defining the type of the features as the
// \code{PixelType}, which will be used to define the feature
// \code{VectorType}. We also declare the type for the labels.
//
// Software Guide : EndLatex
// Software Guide : BeginCodeSnippet
typedef double PixelType;
typedef int LabelPixelType;
// Software Guide : EndCodeSnippet
const unsigned int Dimension = 2;
// Software Guide : BeginLatex
//
// We can now proceed to define the image type used for storing the
// features. We also define the reader.
//
// Software Guide : EndLatex
// Software Guide : BeginCodeSnippet
typedef otb::Image<itk::FixedArray<PixelType, 3>,
Dimension> InputImageType;
typedef otb::ImageFileReader<InputImageType> ReaderType;
// Software Guide : EndCodeSnippet
// Software Guide : BeginLatex
//
// We can now read the image by calling the \code{Update} method of the reader.
//
// Software Guide : EndLatex
// Software Guide : BeginCodeSnippet
ReaderType::Pointer reader = ReaderType::New();
reader->SetFileName(imageFilename);
reader->Update();
// Software Guide : EndCodeSnippet
// Software Guide : BeginLatex
//
// The image has now to be transformed to a sample which
// is compatible with the classification framework. We will use a
// \doxygen{itk}{Statistics::ImageToListSampleAdaptor} for this
// task. This class is templated over the image type used for
// storing the measures.
//
// Software Guide : EndLatex
// Software Guide : BeginCodeSnippet
typedef itk::Statistics::ImageToListSampleAdaptor<InputImageType> SampleType;
SampleType::Pointer sample = SampleType::New();
// Software Guide : EndCodeSnippet
// Software Guide : BeginLatex
//
// After instantiation, we can set the image as an imput of our
// sample adaptor.
//
// Software Guide : EndLatex
// Software Guide : BeginCodeSnippet
sample->SetImage(reader->GetOutput());
// Software Guide : EndCodeSnippet
// Software Guide : BeginLatex
//
// Now, we need to declare the SVM model which is to be used by the
// classifier. The SVM model is templated over the type of value used
// for the measures and the type of pixel used for the labels.
//
// Software Guide : EndLatex
// Software Guide : BeginCodeSnippet
typedef otb::SVMModel<PixelType, LabelPixelType> ModelType;
ModelType::Pointer model = ModelType::New();
// Software Guide : EndCodeSnippet
// Software Guide : BeginLatex
//
// After instantiation, we can load a model saved to a file (see
// section \ref{sec:LearningWithImages} for an example of model
// estimation and storage to a file.
//
// Software Guide : EndLatex
// Software Guide : BeginCodeSnippet
model->LoadModel(modelFilename);
// Software Guide : EndCodeSnippet
// Software Guide : BeginLatex
//
// We have now all the elements to create a classifier. The classifier
// is templated over the sample type (the type of the data to be
// classified) and the label type (the type of the output of the classifier).
//
// Software Guide : EndLatex
// Software Guide : BeginCodeSnippet
typedef otb::SVMClassifier<SampleType, LabelPixelType> ClassifierType;
ClassifierType::Pointer classifier = ClassifierType::New();
// Software Guide : EndCodeSnippet
// Software Guide : BeginLatex
//
// We set the classifier parameters : number of classes, SVM model,
// the sample data. And we trigger the classification process by
// calling the \code{Update} method.
//
// Software Guide : EndLatex
// Software Guide : BeginCodeSnippet
int numberOfClasses = model->GetNumberOfClasses();
classifier->SetNumberOfClasses(numberOfClasses);
classifier->SetModel(model);
classifier->SetInput(sample.GetPointer());
classifier->Update();
// Software Guide : EndCodeSnippet
// Software Guide : BeginLatex
//
// After the classification step, we usually want to get the
// results. The classifier gives an output under the form of a sample
// list. This list supports the classical STL iterators. Therefore, we
// will create an output image and fill it up with the results of the
// classification. The pixel type of the output image is the same as
// the one used for the labels.
//
// Software Guide : EndLatex
// Software Guide : BeginCodeSnippet
typedef ClassifierType::ClassLabelType OutputPixelType;
typedef otb::Image<OutputPixelType, Dimension> OutputImageType;
OutputImageType::Pointer outputImage = OutputImageType::New();
// Software Guide : EndCodeSnippet
// Software Guide : BeginLatex
//
// We allocate the memory for the output image using the information
// from the input image.
//
// Software Guide : EndLatex
// Software Guide : BeginCodeSnippet
typedef itk::Index<Dimension> myIndexType;
typedef itk::Size<Dimension> mySizeType;
typedef itk::ImageRegion<Dimension> myRegionType;
mySizeType size;
size[0] = reader->GetOutput()->GetRequestedRegion().GetSize()[0];
size[1] = reader->GetOutput()->GetRequestedRegion().GetSize()[1];
myIndexType start;
start[0] = 0;
start[1] = 0;
myRegionType region;
region.SetIndex(start);
region.SetSize(size);
outputImage->SetRegions(region);
outputImage->Allocate();
// Software Guide : EndCodeSnippet
// Software Guide : BeginLatex
//
// We can now declare the iterators on the list that we get at the
// output of the classifier as well as the iterator to fill the output image.
//
// Software Guide : EndLatex
// Software Guide : BeginCodeSnippet
ClassifierType::OutputType* membershipSample =
classifier->GetOutput();
ClassifierType::OutputType::ConstIterator m_iter =
membershipSample->Begin();
ClassifierType::OutputType::ConstIterator m_last =
membershipSample->End();
typedef itk::ImageRegionIterator<OutputImageType> OutputIteratorType;
OutputIteratorType outIt(outputImage,
outputImage->GetBufferedRegion());
outIt.GoToBegin();
// Software Guide : EndCodeSnippet
// Software Guide : BeginLatex
//
// We will iterate through the list, get the labels and assign pixel
// values to the output image.
//
// Software Guide : EndLatex
// Software Guide : BeginCodeSnippet
while (m_iter != m_last && !outIt.IsAtEnd())
{
outIt.Set(m_iter.GetClassLabel());
++m_iter;
++outIt;
}
// Software Guide : EndCodeSnippet
// Software Guide : BeginLatex
//
// Only for visualization purposes, we choose to rescale the image of
// classes before saving it to a file. We will use the
// \doxygen{itk}{RescaleIntensityImageFilter} for this purpose.
//
// Software Guide : EndLatex
// Software Guide : BeginCodeSnippet
typedef otb::Image<unsigned char, Dimension> FileImageType;
typedef itk::RescaleIntensityImageFilter<OutputImageType,
FileImageType> RescalerType;
RescalerType::Pointer rescaler = RescalerType::New();
rescaler->SetOutputMinimum(itk::NumericTraits<unsigned char>::min());
rescaler->SetOutputMaximum(itk::NumericTraits<unsigned char>::max());
rescaler->SetInput(outputImage);
// Software Guide : EndCodeSnippet
// Software Guide : BeginLatex
//
// We can now create an image file writer and save the image.
//
// Software Guide : EndLatex
// Software Guide : BeginCodeSnippet
typedef otb::ImageFileWriter<FileImageType> WriterType;
WriterType::Pointer writer = WriterType::New();
writer->SetFileName(outputFilename);
writer->SetInput(rescaler->GetOutput());
writer->Update();
// Software Guide : EndCodeSnippet
// Software Guide : BeginLatex
// Figure \ref{fig:SVMCLASS} shows the result of the SVM classification.
// \begin{figure}
// \center
// \includegraphics[width=0.45\textwidth]{ROI_QB_MUL_1.eps}
// \includegraphics[width=0.45\textwidth]{ROI_QB_MUL_1_SVN_CLASS.eps}
// \itkcaption[SVM Image Classification]{Result of the SVM
// classification . Left: RGB image. Right: image of classes.}
// \label{fig:SVMCLASS}
// \end{figure}
// Software Guide : EndLatex
return EXIT_SUCCESS;
}
......@@ -52,15 +52,16 @@
#include <iostream>
// Software Guide : BeginCodeSnippet
#include "otbSVMImageModelEstimator.h"
#include "itkImageToListSampleAdaptor.h"
#include "otbSVMClassifier.h"
#include "otbLibSVMMachineLearningModel.h"
#include "itkImageToListSampleFilter.h"
#include "otbImageClassificationFilter.h"
// Software Guide : EndCodeSnippet
#include "otbImageFileWriter.h"
#include "itkUnaryFunctorImageFilter.h"
#include "itkScalarToRGBPixelFunctor.h"
#include "itkBinaryThresholdImageFilter.h"
#include "otbImageFileReader.h"
......@@ -92,13 +93,13 @@ int main(int itkNotUsed(argc), char *argv[])
// Software Guide : BeginLatex
//
// The \doxygen{otb}{SVMImageModelEstimator} class is templated over
// the input (features) and the training (labels) images.
// The \doxygen{otb}{LibSVMMachineLearningModel} class is templated over
// the input (features) and the training (labels) values.
//
// Software Guide : EndLatex
// Software Guide : BeginCodeSnippet
typedef otb::SVMImageModelEstimator<InputImageType,
TrainingImageType> EstimatorType;
typedef otb::LibSVMMachineLearningModel<InputPixelType,
InputPixelType> ModelType;
// Software Guide : EndCodeSnippet
......@@ -128,107 +129,67 @@ int main(int itkNotUsed(argc), char *argv[])
inputReader->SetFileName(inputImageFileName);
trainingReader->SetFileName(trainingImageFileName);
inputReader->Update();
trainingReader->Update();
//~ inputReader->Update();
//~ trainingReader->Update();
// Software Guide : EndCodeSnippet
// Software Guide : BeginLatex
//
// We can now instantiate the model estimator and set its parameters.