Skip to content
Snippets Groups Projects
Commit 6ff9a9a0 authored by OTB Bot's avatar OTB Bot
Browse files

STYLE

parent 09e27b7e
No related branches found
No related tags found
No related merge requests found
......@@ -54,7 +54,7 @@ public:
typedef otb::Image<DeformationType> DeformationFieldType;
typedef itk::VectorCastImageFilter
<FloatVectorImageType,
<FloatVectorImageType,
DeformationFieldType> DeformationFieldCastFilterType;
......@@ -64,7 +64,7 @@ public:
DeformationFieldType> WarpFilterType;
typedef otb::MultiToMonoChannelExtractROI
<FloatVectorImageType::InternalPixelType,
FloatVectorImageType::InternalPixelType>
FloatVectorImageType::InternalPixelType>
ExtractFilterType;
typedef otb::BandMathImageFilter
......
......@@ -63,7 +63,7 @@ public:
typename TInput::ValueType max = 0.0;
unsigned int index=0;
for (unsigned int i=0 ; i<m_ScoreColors.size() ; i++)
for (unsigned int i=0; i<m_ScoreColors.size(); i++)
{
if (A[i] > max)
{
......@@ -123,8 +123,8 @@ public:
typedef otb::LabelMapToAttributeImageFilter
<LabelMapType, FloatVectorImageType> AttributeImageFilterType;
typedef otb::UnaryFunctorImageFilter
<FloatVectorImageType,
Int16VectorImageType,
<FloatVectorImageType,
Int16VectorImageType,
Functor::HooverColorMapping
<FloatPixelType, Int16PixelType> > HooverColorFilterType;
......
......@@ -32,30 +32,30 @@ namespace otb
* the displacement related to the elevation between the two images
* of the pair always occurs along lines that are called epipolar
* lines.
*
*
* In the case of conic acqusition, these lines are perfectly
* parallel by definition, while in push-broom geometry, these lines
* might not be exactly parallel due to perturbations during
* acquisition, especially when considering a large field of view.
*
*
* The purpose of stereo-rectification is to warp both images of the
* pairs so that the displacement related to the elevation only
* occurs in the horizontal direction (i.e. epipolar lines are
* horizontal). This operation is useful for mainly two reasons: it
* allows to search for disparities in one direction only, and it
* allows to derives anaglyph for 3D viewing with 3D glasses.
*
*
* This filter allows you to compute the deformation fields up to the
* sensor model precision needed to warp a pair of stereo images into
* epipolar geometry. Warping can be done using the
* otb::StreamingWarpImageFilter.
*
*
* Since lines might not be perfectly regular, the algorithm
* performed by this filter uses the otb::GenericRSTransform
* capabilities to compute the local epipolar lines, and iteratively
* build a resampling grid by propagating along these locally
* estimated lines.
*
*
* Epipolar images will have a null origin and a size as given by the
* GetRectifiedImageSize() method. The deformation fields and size
* are derived to produce epipolar images covering the whole extent
......@@ -68,7 +68,7 @@ namespace otb
* displacement). The SetElevationOffset() method allows to tune the
* elevation offset which is only used for local epipolar lines
* estimation. The default value of 50 meters should do.
*
*
* Additionnaly, the SetScale() method allows to derive deformation
* fields and images size at a coarser (scale > 1) or finer (scale <
* 1) resolution. The SetGridStep() allows to tune the step of the
......@@ -195,7 +195,7 @@ private:
double m_Scale;
/** Controls the step of the resampling grid (in pixels). A finer
* step will lead to more memory consumption.
* step will lead to more memory consumption.
*/
double m_GridStep;
......
......@@ -53,7 +53,7 @@ StereorectificationDeformationFieldSource<TInputImage, TOutputImage>
template <class TInputImage, class TOutputImage>
const typename StereorectificationDeformationFieldSource<TInputImage, TOutputImage>
::OutputImageType *
::OutputImageType *
StereorectificationDeformationFieldSource<TInputImage, TOutputImage>
::GetLeftDeformationFieldOutput() const
{
......@@ -66,7 +66,7 @@ StereorectificationDeformationFieldSource<TInputImage, TOutputImage>
template <class TInputImage, class TOutputImage>
typename StereorectificationDeformationFieldSource<TInputImage, TOutputImage>
::OutputImageType *
::OutputImageType *
StereorectificationDeformationFieldSource<TInputImage, TOutputImage>
::GetLeftDeformationFieldOutput()
{
......@@ -79,7 +79,7 @@ StereorectificationDeformationFieldSource<TInputImage, TOutputImage>
template <class TInputImage, class TOutputImage>
const typename StereorectificationDeformationFieldSource<TInputImage, TOutputImage>
::OutputImageType *
::OutputImageType *
StereorectificationDeformationFieldSource<TInputImage, TOutputImage>
::GetRightDeformationFieldOutput() const
{
......@@ -92,7 +92,7 @@ StereorectificationDeformationFieldSource<TInputImage, TOutputImage>
template <class TInputImage, class TOutputImage>
typename StereorectificationDeformationFieldSource<TInputImage, TOutputImage>
::OutputImageType *
::OutputImageType *
StereorectificationDeformationFieldSource<TInputImage, TOutputImage>
::GetRightDeformationFieldOutput()
{
......@@ -169,7 +169,7 @@ StereorectificationDeformationFieldSource<TInputImage, TOutputImage>
// (do not forget that the y axis is flip in our case)
// TODO: Add some division by zero check here (but this would only
// happen in case the images are almost epipolar already)
double a = (leftEpiLineEnd[1] - leftEpiLineStart[1])
double a = (leftEpiLineEnd[1] - leftEpiLineStart[1])
/ (leftEpiLineEnd[0] - leftEpiLineStart[0]);
double b = leftEpiLineStart[1] - a * leftEpiLineStart[0];
......@@ -194,9 +194,9 @@ StereorectificationDeformationFieldSource<TInputImage, TOutputImage>
double ury = vx * m_LeftImage->GetLargestPossibleRegion().GetSize()[0];
double llx = uy * m_LeftImage->GetLargestPossibleRegion().GetSize()[1];
double lly = vy * m_LeftImage->GetLargestPossibleRegion().GetSize()[1];
double lrx = ux * m_LeftImage->GetLargestPossibleRegion().GetSize()[0]
double lrx = ux * m_LeftImage->GetLargestPossibleRegion().GetSize()[0]
+ uy * m_LeftImage->GetLargestPossibleRegion().GetSize()[1];
double lry = vx * m_LeftImage->GetLargestPossibleRegion().GetSize()[0]
double lry = vx * m_LeftImage->GetLargestPossibleRegion().GetSize()[0]
+ vy * m_LeftImage->GetLargestPossibleRegion().GetSize()[1];
// Bounding box (this time we do not omit ulx)
......@@ -342,7 +342,7 @@ StereorectificationDeformationFieldSource<TInputImage, TOutputImage>
endLine1 = m_RightToLeftTransform->TransformPoint(epiPoint2);
// Estimate the local baseline ratio
double localBaselineRatio = 2*m_ElevationOffset
double localBaselineRatio = 2*m_ElevationOffset
/ vcl_sqrt((endLine1[0] - startLine1[0])
* (endLine1[0] - startLine1[0])
+ (endLine1[1] - startLine1[1])
......@@ -393,7 +393,7 @@ StereorectificationDeformationFieldSource<TInputImage, TOutputImage>
// And we compute the equivalent displacement in right image
endLine2 = m_LeftToRightTransform->TransformPoint(currentPoint1);
double iscale = vcl_sqrt((endLine2[0]-startLine2[0])*(endLine2[0]-startLine2[0])
double iscale = vcl_sqrt((endLine2[0]-startLine2[0])*(endLine2[0]-startLine2[0])
+
(endLine2[1]-startLine2[1])*(endLine2[1]-startLine2[1]));
......
......@@ -29,27 +29,27 @@
//
// The following example shows how to compare two segmentations, using Hoover
// metrics. For instance, it can be used to compare a segmentation produced
// by your algorithm against a partial ground truth segmentation. In this
// by your algorithm against a partial ground truth segmentation. In this
// example, the ground truth segmentation will be refered by the letters GT
// whereas the machine segmentation will be refered by MS.
//
// The estimation of Hoover metrics is done with two filters :
// The estimation of Hoover metrics is done with two filters :
// \doxygen{otb}{HooverMatrixFilter} and \doxygen{otb}{HooverInstanceFilter}.
// The first one produces a matrix containing the number of overlapping pixels
// between MS regions and GT regions. The second one classifies each region among
// four types (called Hoover instances):
// \begin{itemize}
// \item Correct detection : a region is matched with an other one in the
// \item Correct detection : a region is matched with an other one in the
// opposite segmentation, because they cover nearly the same area.
// \item Over-segmentation : a GT region is matched with a group of MS
// \item Over-segmentation : a GT region is matched with a group of MS
// regions because they cover nearly the same area.
// \item Under-segmentation : a MS region is matched with a group of GT
// regions because they cover nearly the same area.
// \item Missed detection (for GT regions) or Noise (for MS region) :
// \item Missed detection (for GT regions) or Noise (for MS region) :
// un-matched regions.
// \end{itemize}
// Note that a region can be tagged with two types. When the Hoover instance
// have been found, the instance filter computes overall scores for each
// have been found, the instance filter computes overall scores for each
// category : they are the Hoover metrics \footnote{see http://www.trop.mips.uha.fr/pdf/ORASIS-2009.pdf}.
//
// Software Guide : EndLatex
......@@ -80,11 +80,11 @@ int main(int argc, char* argv[])
// Software Guide : BeginLatex
// The filters \doxygen{otb}{HooverMatrixFilter} and \doxygen{otb}{HooverInstanceFilter}
// are designed to handle \doxygen{itk}{LabelMap} images. This is a useful data structure
// to represent labelled images. A label map is a set of label objects. Each label object
// stands for a region with a unique label value. The extent of the region is coded as a
// set of line segments, run-length encoded.
// to represent labelled images. A label map is a set of label objects. Each label object
// stands for a region with a unique label value. The extent of the region is coded as a
// set of line segments, run-length encoded.
// To be able to add information to these label objects, we use \doxygen{otb}{AttributesMapLabelObject}.
// Each region can store a set of attributes. In this case, Hoover instances and metrics
// Each region can store a set of attributes. In this case, Hoover instances and metrics
// will be stored in these attributes.
// Software Guide : EndLatex
......@@ -114,7 +114,7 @@ int main(int argc, char* argv[])
// Software Guide : BeginLatex
// The first step is to convert the images from the classical "pixel array" representation
// to label map: we use \doxygen{itk}{LabelImageToLabelMapFilter}. The background value sets
// the label value of regions considered as background: there is no label object for the
// the label value of regions considered as background: there is no label object for the
// background region.
// Software Guide : EndLatex
......@@ -142,11 +142,11 @@ int main(int argc, char* argv[])
// Software Guide : BeginLatex
// The instance filter computes the Hoover metrics for each region. These metrics
// are stored as attributes in each label object. The threshold parameter
// are stored as attributes in each label object. The threshold parameter
// corresponds to the overlapping ratio above which two regions can be matched.
// The extended attributes can be used if the user want to keep a trace of the
// associations between MS and GT regions : i.e. if a GT region has been matched
// as a correct detection, it will carry an attribute containing the label value
// associations between MS and GT regions : i.e. if a GT region has been matched
// as a correct detection, it will carry an attribute containing the label value
// of the associated MS region (the same principle goes for other types of instance).
// Software Guide : EndLatex
......@@ -181,7 +181,7 @@ int main(int argc, char* argv[])
// Software Guide : BeginLatex
// The output image contains for each GT region its correct detection score ("RC", band 1),
// its over-segmentation score ("RF", band 2), its under-segmentation score ("RA", band 3)
// its over-segmentation score ("RF", band 2), its under-segmentation score ("RA", band 3)
// and its missed detection score ("RM", band 4).
// Software Guide : EndLatex
......@@ -194,7 +194,7 @@ int main(int argc, char* argv[])
// Software Guide : EndCodeSnippet
// Software Guide : BeginLatex
// The Hoover scores are also computed for the whole segmentations. Here is some explanation about the score names :
// The Hoover scores are also computed for the whole segmentations. Here is some explanation about the score names :
// C = correct, F = fragmentation, A = aggregation, M = missed, N = noise.
// Software Guide : EndLatex
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment