diff --git a/Code/BasicFilters/otbLeeImageFilter.h b/Code/BasicFilters/otbLeeImageFilter.h index c8c05841a6aa118b4250c66992e1e2d1bdddd1d5..df49fc37ee3e1fe0a2e0494e3065adf0be8f84a4 100644 --- a/Code/BasicFilters/otbLeeImageFilter.h +++ b/Code/BasicFilters/otbLeeImageFilter.h @@ -102,9 +102,9 @@ protected: * As such, it provides a definition of ThreadedGenerateData() * * LEE filter: - * R = E[I] + b(I-E[I]) avec b = C²r / ( C²r + C²v ) - * Cv = 1 / sqrt(L) with L the number of look. - * Cr = sqrt(Var(I)) / E[I] avec Var(I) = E[I²] - E[I]² + *\f$ R = E[I] + b(I-E[I])\f$ with \f$ b = C^2r / ( C^2r + C^2v )\f$ + * \f$ Cv = 1 / \sqrt(L) \f$ with L the number of look. + * \f$ Cr = \sqrt(Var(I)) / E[I] avec Var(I) = E[I^2] - E[I]^2 \f$ * * \sa ImageToImageFilter::ThreadedGenerateData(), * ImageToImageFilter::GenerateData() */ diff --git a/Code/BasicFilters/otbMeanShiftImageFilter.h b/Code/BasicFilters/otbMeanShiftImageFilter.h index 648c8bb419693a252a7fd3299388ab863cc2543c..871fa927389a0ee0be139c848d647c3fa5837602 100644 --- a/Code/BasicFilters/otbMeanShiftImageFilter.h +++ b/Code/BasicFilters/otbMeanShiftImageFilter.h @@ -60,7 +60,8 @@ namespace otb * and color center. * * Mean shift can be used for edge-preserving smoothing, or for clustering. The GetOutput() method will allow you to get the smoothed image, whereas the - * GetClusteredOuptut() methods returns the clustered output. + * GetClusteredOuptut() methods returns the clustered output. The GetLabeledClusteredOutput() returns a labeled clustered images, and the GetClusterBoundariesOutput() + * an image of the cluster boundaries. * * The MinimumRegionSize parameter allows you to prune small clustered regions. * @@ -68,13 +69,13 @@ namespace otb * than the filtering one). * * Please note that if both parts are streamable, only the filtering part will ensure you to get the same results than without streaming. In the clustering results, you - * might find region split due to tiling. + * might find region split due to tiling. Morover, the labeled output will not give consistent results when streamed. The cluster boundaries might work though. * - * This filter uses the Edison mean shift algorithm implementation. Please note that data whose precision is more that float are casted to float before processing. + * This filter uses the Edison mean shift algorithm implementation. Please note that data whose precision is more than float are casted to float before processing. * * The Scale parameter allows you to stretch the data dynamic * - * For more information about mean shift techniques, one might consider reading the following article: + * For more information on mean shift techniques, one might consider reading the following article: * * D. Comaniciu, P. Meer, "Mean Shift: A Robust Approach Toward Feature Space Analysis," IEEE Transactions on Pattern Analysis and Machine Intelligence, vol. 24, no. 5, pp. 603-619, May, 2002 * D. Comaniciu, P. Meer, "Robust analysis of feature spaces: color image segmentation," cvpr, p. 750, 1997 IEEE Computer Society Conference on Computer Vision and Pattern Recognition (CVPR'97), 1997 @@ -88,7 +89,7 @@ namespace otb * \ingroup Threaded */ - template <class TInputImage, class TOutputImage, class TBufferConverter = MeanShift::ScalarBufferConverter> + template <class TInputImage, class TOutputImage,class TLabeledOutput = otb::Image<unsigned short,2>, class TBufferConverter = MeanShift::ScalarBufferConverter> class ITK_EXPORT MeanShiftImageFilter : public itk::ImageToImageFilter<TInputImage,TOutputImage> { @@ -112,6 +113,9 @@ namespace otb typedef typename OutputImageType::Pointer OutputImagePointerType; typedef typename OutputImageType::PixelType OutputPixelType; typedef typename OutputImageType::RegionType RegionType; + typedef TLabeledOutput LabeledOutputType; + typedef typename LabeledOutputType::Pointer LabeledOutputPointerType; + typedef typename LabeledOutputType::PixelType LabelType; /** Setters / Getters */ itkSetMacro(SpatialRadius,unsigned int); @@ -127,16 +131,24 @@ namespace otb const OutputImageType * GetClusteredOutput() const; /** Return the output image direction */ OutputImageType * GetClusteredOutput(); + + const LabeledOutputType * GetLabeledClusteredOutput() const; + LabeledOutputType * GetLabeledClusteredOutput(); + + const LabeledOutputType * GetClusterBoundariesOutput() const; + + LabeledOutputType * GetClusterBoundariesOutput(); protected: /** This filters use a neighborhood around the pixel, so it needs to redfine the * input requested region */ virtual void GenerateInputRequestedRegion(); - /** Threaded generate data (handle the filtering part) */ virtual void ThreadedGenerateData(const RegionType& outputRegionForThread,int threadId); /** After threaded generate data (handle the clustering part) */ virtual void AfterThreadedGenerateData(); + /** Allocate the outputs (need to be reimplemented since outputs have differents type */ + virtual void AllocateOutputs(); /** Constructor */ MeanShiftImageFilter(); diff --git a/Code/BasicFilters/otbMeanShiftImageFilter.txx b/Code/BasicFilters/otbMeanShiftImageFilter.txx index 2fcf3e01977512cc108aeaa67338c288c68028f3..70159f9af98628678cdee79f418128b30b3853bf 100644 --- a/Code/BasicFilters/otbMeanShiftImageFilter.txx +++ b/Code/BasicFilters/otbMeanShiftImageFilter.txx @@ -22,14 +22,15 @@ PURPOSE. See the above copyright notices for more information. #include "itkImageRegionConstIteratorWithIndex.h" #include "itkImageRegionIterator.h" +#include "itkImageRegionIteratorWithIndex.h" #include "otbMacro.h" #include "msImageProcessor.h" namespace otb { - template <class TInputImage,class TOutputImage, class TBufferConverter> - MeanShiftImageFilter<TInputImage,TOutputImage,TBufferConverter> + template <class TInputImage,class TOutputImage, class TLabeledOutput, class TBufferConverter> + MeanShiftImageFilter<TInputImage,TOutputImage,TLabeledOutput,TBufferConverter> ::MeanShiftImageFilter() { m_SpatialRadius = 3; @@ -37,13 +38,15 @@ namespace otb m_MinimumRegionSize = 10; m_Scale = 100000.; - this->SetNumberOfOutputs(2); + this->SetNumberOfOutputs(4); this->SetNthOutput(1,OutputImageType::New()); + this->SetNthOutput(2,LabeledOutputType::New()); + this->SetNthOutput(3,LabeledOutputType::New()); } - template <class TInputImage,class TOutputImage, class TBufferConverter> - const typename MeanShiftImageFilter<TInputImage,TOutputImage,TBufferConverter>::OutputImageType * - MeanShiftImageFilter<TInputImage,TOutputImage,TBufferConverter> + template <class TInputImage,class TOutputImage, class TLabeledOutput, class TBufferConverter> + const typename MeanShiftImageFilter<TInputImage,TOutputImage,TLabeledOutput,TBufferConverter>::OutputImageType * + MeanShiftImageFilter<TInputImage,TOutputImage,TLabeledOutput,TBufferConverter> ::GetClusteredOutput()const { if (this->GetNumberOfOutputs() < 2) @@ -53,9 +56,9 @@ namespace otb return static_cast<const OutputImageType * >(this->itk::ProcessObject::GetOutput(1)); } - template <class TInputImage,class TOutputImage, class TBufferConverter> - typename MeanShiftImageFilter<TInputImage,TOutputImage,TBufferConverter>::OutputImageType * - MeanShiftImageFilter<TInputImage,TOutputImage,TBufferConverter> + template <class TInputImage,class TOutputImage, class TLabeledOutput, class TBufferConverter> + typename MeanShiftImageFilter<TInputImage,TOutputImage,TLabeledOutput,TBufferConverter>::OutputImageType * + MeanShiftImageFilter<TInputImage,TOutputImage,TLabeledOutput,TBufferConverter> ::GetClusteredOutput() { if (this->GetNumberOfOutputs() < 2) @@ -66,10 +69,82 @@ namespace otb } + template <class TInputImage,class TOutputImage, class TLabeledOutput, class TBufferConverter> + const typename MeanShiftImageFilter<TInputImage,TOutputImage,TLabeledOutput,TBufferConverter>::LabeledOutputType * + MeanShiftImageFilter<TInputImage,TOutputImage,TLabeledOutput,TBufferConverter> + ::GetLabeledClusteredOutput()const + { + if (this->GetNumberOfOutputs() < 3) + { + return 0; + } + return static_cast<const LabeledOutputType * >(this->itk::ProcessObject::GetOutput(2)); + } + + template <class TInputImage,class TOutputImage, class TLabeledOutput, class TBufferConverter> + typename MeanShiftImageFilter<TInputImage,TOutputImage,TLabeledOutput,TBufferConverter>::LabeledOutputType * + MeanShiftImageFilter<TInputImage,TOutputImage,TLabeledOutput,TBufferConverter> + ::GetLabeledClusteredOutput() + { + if (this->GetNumberOfOutputs() < 3) + { + return 0; + } + return static_cast< LabeledOutputType * >(this->itk::ProcessObject::GetOutput(2)); + } + + + template <class TInputImage,class TOutputImage, class TLabeledOutput, class TBufferConverter> + const typename MeanShiftImageFilter<TInputImage,TOutputImage,TLabeledOutput,TBufferConverter>::LabeledOutputType * + MeanShiftImageFilter<TInputImage,TOutputImage,TLabeledOutput,TBufferConverter> + ::GetClusterBoundariesOutput()const + { + if (this->GetNumberOfOutputs() < 4) + { + return 0; + } + return static_cast<const LabeledOutputType * >(this->itk::ProcessObject::GetOutput(3)); + } + + template <class TInputImage,class TOutputImage, class TLabeledOutput, class TBufferConverter> + typename MeanShiftImageFilter<TInputImage,TOutputImage,TLabeledOutput,TBufferConverter>::LabeledOutputType * + MeanShiftImageFilter<TInputImage,TOutputImage,TLabeledOutput,TBufferConverter> + ::GetClusterBoundariesOutput() + { + if (this->GetNumberOfOutputs() < 4) + { + return 0; + } + return static_cast< LabeledOutputType * >(this->itk::ProcessObject::GetOutput(3)); + } + + + template <class TInputImage,class TOutputImage, class TLabeledOutput, class TBufferConverter> + void + MeanShiftImageFilter<TInputImage,TOutputImage,TLabeledOutput,TBufferConverter> + ::AllocateOutputs() + { + typename OutputImageType::Pointer outputPtr = this->GetOutput(); + typename OutputImageType::Pointer clusteredOutputPtr = this->GetClusteredOutput(); + typename LabeledOutputType::Pointer labeledClusteredOutputPtr = this->GetLabeledClusteredOutput(); + typename LabeledOutputType::Pointer clusterBoundariesOutputPtr = this->GetClusterBoundariesOutput(); + + outputPtr->SetBufferedRegion(outputPtr->GetRequestedRegion()); + outputPtr->Allocate(); + + clusteredOutputPtr->SetBufferedRegion(clusteredOutputPtr->GetRequestedRegion()); + clusteredOutputPtr->Allocate(); + + labeledClusteredOutputPtr->SetBufferedRegion(labeledClusteredOutputPtr->GetRequestedRegion()); + labeledClusteredOutputPtr->Allocate(); - template <class TInputImage,class TOutputImage, class TBufferConverter> + clusterBoundariesOutputPtr->SetBufferedRegion(clusterBoundariesOutputPtr->GetRequestedRegion()); + clusterBoundariesOutputPtr->Allocate(); + } + + template <class TInputImage,class TOutputImage, class TLabeledOutput, class TBufferConverter> void - MeanShiftImageFilter<TInputImage,TOutputImage,TBufferConverter> + MeanShiftImageFilter<TInputImage,TOutputImage,TLabeledOutput,TBufferConverter> ::GenerateInputRequestedRegion() { // call the superclass' implementation of this method @@ -116,9 +191,9 @@ namespace otb } } - template <class TInputImage,class TOutputImage, class TBufferConverter> + template <class TInputImage,class TOutputImage, class TLabeledOutput, class TBufferConverter> void - MeanShiftImageFilter<TInputImage,TOutputImage,TBufferConverter> + MeanShiftImageFilter<TInputImage,TOutputImage,TLabeledOutput,TBufferConverter> ::ThreadedGenerateData(const RegionType& outputRegionForThread, int threadId ) { // Input and output pointers @@ -206,16 +281,18 @@ namespace otb delete [] data; } - template <class TInputImage,class TOutputImage, class TBufferConverter> + template <class TInputImage,class TOutputImage, class TLabeledOutput, class TBufferConverter> void - MeanShiftImageFilter<TInputImage,TOutputImage,TBufferConverter> + MeanShiftImageFilter<TInputImage,TOutputImage,TLabeledOutput,TBufferConverter> ::AfterThreadedGenerateData() { double invScale = 1/m_Scale; typename OutputImageType::Pointer outputPtr = this->GetOutput(); typename OutputImageType::Pointer clusteredOutputPtr = this->GetClusteredOutput(); - + typename LabeledOutputType::Pointer labeledClusteredOutputPtr = this->GetLabeledClusteredOutput(); + typename LabeledOutputType::Pointer clusterBoudariesOutputPtr = this->GetClusterBoundariesOutput(); + RegionType outputRequestedRegion = outputPtr->GetRequestedRegion(); itk::ImageRegionIterator<OutputImageType> outputIt(outputPtr,outputRequestedRegion); @@ -268,13 +345,63 @@ namespace otb } delete [] data; - } + int * labels = NULL; + float * modes = NULL; + int * modesPointsCount = NULL; + + edisonProcessor.GetRegions(&labels,&modes,&modesPointsCount); + + if(edisonProcessor.ErrorStatus) + { + itkExceptionMacro(<<"Error while running edison!"); + } + + itk::ImageRegionIteratorWithIndex<LabeledOutputType> lcIt(labeledClusteredOutputPtr,labeledClusteredOutputPtr->GetRequestedRegion()); + + index = 0; + + labeledClusteredOutputPtr->FillBuffer(0); + for(lcIt.GoToBegin();!lcIt.IsAtEnd();++lcIt) + { + lcIt.Set(static_cast<LabelType>(labels[index])); + ++index; + } + + delete [] labels; + delete [] modes; + delete [] modesPointsCount; - template <class TInputImage,class TOutputImage, class TBufferConverter> + clusterBoudariesOutputPtr->FillBuffer(0); + + //define the boundaries + RegionList *regionList = edisonProcessor.GetBoundaries(); + int *regionIndeces; + int numRegions = regionList->GetNumRegions(); + int numBoundaryPixels = 0; + + typename LabeledOutputType::IndexType boundIndex; + + // TODO: Here it would be possible to extract the polygon edges for each region + for(LabelType label = 1; label <= numRegions;++label) + { + regionIndeces = regionList->GetRegionIndeces(label); + for(int i = 0; i < regionList->GetRegionCount(label);++i) + { + boundIndex[0]= regionIndeces[i] % clusterBoudariesOutputPtr->GetRequestedRegion().GetSize()[0]; + boundIndex[1]= regionIndeces[i] / clusterBoudariesOutputPtr->GetRequestedRegion().GetSize()[0]; + if(clusterBoudariesOutputPtr->GetBufferedRegion().IsInside(boundIndex)) + { + clusterBoudariesOutputPtr->SetPixel(boundIndex,1); + } + } + } + } + + template <class TInputImage,class TOutputImage, class TLabeledOutput, class TBufferConverter> void - MeanShiftImageFilter<TInputImage,TOutputImage,TBufferConverter> + MeanShiftImageFilter<TInputImage,TOutputImage,TLabeledOutput,TBufferConverter> ::PrintSelf(std::ostream& os, itk::Indent indent) const { Superclass::PrintSelf(os,indent); diff --git a/Code/BasicFilters/otbMeanShiftVectorImageFilter.h b/Code/BasicFilters/otbMeanShiftVectorImageFilter.h index 51d5f2ed15d15760109ebe665811f7a60c03eb9f..75cfe8ca8cdc9bb01894dd3ac1e3afc7d8192614 100644 --- a/Code/BasicFilters/otbMeanShiftVectorImageFilter.h +++ b/Code/BasicFilters/otbMeanShiftVectorImageFilter.h @@ -66,16 +66,16 @@ namespace otb * \ingroup Streamed * \ingroup Threaded */ - template <class TInputImage, class TOutputImage> + template <class TInputImage, class TOutputImage, class TLabeledOutput = otb::Image<unsigned short,2> > class MeanShiftVectorImageFilter - : public MeanShiftImageFilter<TInputImage,TOutputImage,MeanShift::VectorBufferConverter> + : public MeanShiftImageFilter<TInputImage,TOutputImage,TLabeledOutput,MeanShift::VectorBufferConverter> { - public: + public: /** Standard class typedef */ typedef MeanShiftVectorImageFilter Self; - typedef MeanShiftImageFilter<TInputImage,TOutputImage, - MeanShift::VectorBufferConverter> Superclass; + typedef MeanShiftImageFilter<TInputImage,TOutputImage,TLabeledOutput, + MeanShift::VectorBufferConverter> Superclass; typedef itk::SmartPointer<Self> Pointer; typedef itk::SmartPointer<const Self> ConstPointer; @@ -83,7 +83,7 @@ namespace otb itkTypeMacro(MeanShiftVectorImageFilter,MeanShiftImageFilter); itkNewMacro(Self); - protected: + protected: /** Constructor */ MeanShiftVectorImageFilter(){}; /** destructor */ @@ -91,9 +91,9 @@ namespace otb /**PrintSelf method */ virtual void PrintSelf(std::ostream& os, itk::Indent indent) const - { - Superclass::PrintSelf(os,indent); - } + { + Superclass::PrintSelf(os,indent); + } private: MeanShiftVectorImageFilter(const Self&); //purposely not implemented diff --git a/Testing/Code/BasicFilters/CMakeLists.txt b/Testing/Code/BasicFilters/CMakeLists.txt index 2bc45017b202df3c49ae7205cd4ddf3b280feb68..95ed1299ab48f781623f9097f0c809769efa6b1f 100644 --- a/Testing/Code/BasicFilters/CMakeLists.txt +++ b/Testing/Code/BasicFilters/CMakeLists.txt @@ -881,15 +881,21 @@ ADD_TEST(bfTuMeanShiftImageFilterNew ${BASICFILTERS_TESTS9} otbMeanShiftImageFilterNew ) ADD_TEST(bfTvMeanShiftImageFilter ${BASICFILTERS_TESTS9} ---compare-n-images ${EPSILON} 2 +--compare-n-images ${EPSILON} 4 ${BASELINE}/bfMeanShiftImageFilterOutput.tif ${TEMP}/bfMeanShiftImageFilterOutput.tif ${BASELINE}/bfMeanShiftImageFilterClusteredOutput.tif ${TEMP}/bfMeanShiftImageFilterClusteredOutput.tif + ${BASELINE}/bfMeanShiftImageFilterLabeledClusteredOutput.tif + ${TEMP}/bfMeanShiftImageFilterLabeledClusteredOutput.tif + ${BASELINE}/bfMeanShiftImageFilterClusterBoundariesOutput.tif + ${TEMP}/bfMeanShiftImageFilterClusterBoundariesOutput.tif otbMeanShiftImageFilter ${INPUTDATA}/QB_Suburb.png ${TEMP}/bfMeanShiftImageFilterOutput.tif ${TEMP}/bfMeanShiftImageFilterClusteredOutput.tif + ${TEMP}/bfMeanShiftImageFilterLabeledClusteredOutput.tif + ${TEMP}/bfMeanShiftImageFilterClusterBoundariesOutput.tif 16 16 10 1.0 ) @@ -897,15 +903,21 @@ ADD_TEST(bfTuMeanShiftVectorImageFilterNew ${BASICFILTERS_TESTS9} otbMeanShiftVectorImageFilterNew ) ADD_TEST(bfTvMeanShiftVectorImageFilter ${BASICFILTERS_TESTS9} ---compare-n-images ${EPSILON} 2 +--compare-n-images ${EPSILON} 4 ${BASELINE}/bfMeanShiftVectorImageFilterOutput.tif ${TEMP}/bfMeanShiftVectorImageFilterOutput.tif ${BASELINE}/bfMeanShiftVectorImageFilterClusteredOutput.tif ${TEMP}/bfMeanShiftVectorImageFilterClusteredOutput.tif + ${BASELINE}/bfMeanShiftVectorImageFilterLabeledClusteredOutput.tif + ${TEMP}/bfMeanShiftVectorImageFilterLabeledClusteredOutput.tif + ${BASELINE}/bfMeanShiftVectorImageFilterClusterBoundariesOutput.tif + ${TEMP}/bfMeanShiftVectorImageFilterClusterBoundariesOutput.tif otbMeanShiftVectorImageFilter ${INPUTDATA}/qb_RoadExtract2sub200x200.tif ${TEMP}/bfMeanShiftVectorImageFilterOutput.tif ${TEMP}/bfMeanShiftVectorImageFilterClusteredOutput.tif + ${TEMP}/bfMeanShiftVectorImageFilterLabeledClusteredOutput.tif + ${TEMP}/bfMeanShiftVectorImageFilterClusterBoundariesOutput.tif 16 16 10 1.0 ) diff --git a/Testing/Code/BasicFilters/otbMeanShiftImageFilter.cxx b/Testing/Code/BasicFilters/otbMeanShiftImageFilter.cxx index 538f0d0241f1e29d81a8bab88a5129f8ecd3765b..2d68b77e57866aa70e7bf840e63035a5fc93856c 100644 --- a/Testing/Code/BasicFilters/otbMeanShiftImageFilter.cxx +++ b/Testing/Code/BasicFilters/otbMeanShiftImageFilter.cxx @@ -23,19 +23,21 @@ int otbMeanShiftImageFilter(int argc, char * argv[]) { - if(argc != 8) + if(argc != 10) { - std::cerr<<"Usage: "<<argv[0]<<" infname filteredfname clusteredfname spatialRadius rangeRadius minregionsize scale"<<std::endl; + std::cerr<<"Usage: "<<argv[0]<<" infname filteredfname clusteredfname labeledclusteredfname clusterboundariesfname spatialRadius rangeRadius minregionsize scale"<<std::endl; return EXIT_FAILURE; } - const char * infname = argv[1]; - const char * filteredfname = argv[2]; - const char * clusteredfname = argv[3]; - const unsigned int spatialRadius = atoi(argv[4]); - const double rangeRadius = atof(argv[5]); - const unsigned int minRegionSize = atoi(argv[6]); - const double scale = atoi(argv[7]); + const char * infname = argv[1]; + const char * filteredfname = argv[2]; + const char * clusteredfname = argv[3]; + const char * labeledclusteredfname = argv[4]; + const char * clusterboundariesfname = argv[5]; + const unsigned int spatialRadius = atoi(argv[6]); + const double rangeRadius = atof(argv[7]); + const unsigned int minRegionSize = atoi(argv[8]); + const double scale = atoi(argv[9]); const unsigned int Dimension = 2; typedef float PixelType; @@ -43,16 +45,23 @@ int otbMeanShiftImageFilter(int argc, char * argv[]) typedef otb::ImageFileReader<ImageType> ReaderType; typedef otb::StreamingImageFileWriter<ImageType> WriterType; typedef otb::MeanShiftImageFilter<ImageType,ImageType> FilterType; + typedef FilterType::LabeledOutputType LabeledImageType; + typedef otb::StreamingImageFileWriter<LabeledImageType> LabeledWriterType; // Instantiating object FilterType::Pointer filter = FilterType::New(); ReaderType::Pointer reader = ReaderType::New(); WriterType::Pointer writer1 = WriterType::New(); WriterType::Pointer writer2 = WriterType::New(); + LabeledWriterType::Pointer writer3 = LabeledWriterType::New(); + LabeledWriterType::Pointer writer4 = LabeledWriterType::New(); + reader->SetFileName(infname); writer1->SetFileName(filteredfname); writer2->SetFileName(clusteredfname); + writer3->SetFileName(labeledclusteredfname); + writer4->SetFileName(clusterboundariesfname); filter->SetSpatialRadius(spatialRadius); filter->SetRangeRadius(rangeRadius); @@ -62,9 +71,14 @@ int otbMeanShiftImageFilter(int argc, char * argv[]) filter->SetInput(reader->GetOutput()); writer1->SetInput(filter->GetOutput()); writer2->SetInput(filter->GetClusteredOutput()); + writer3->SetInput(filter->GetLabeledClusteredOutput()); + writer4->SetInput(filter->GetClusterBoundariesOutput()); + writer1->Update(); writer2->Update(); + writer3->Update(); + writer4->Update(); return EXIT_SUCCESS; } diff --git a/Testing/Code/BasicFilters/otbMeanShiftVectorImageFilter.cxx b/Testing/Code/BasicFilters/otbMeanShiftVectorImageFilter.cxx index fa5f5cb67fb665ad02a32c3f6ce2dcd412bbd35e..6f79ef3c2e64c63457c1c4d1f26e0b82dd20a967 100644 --- a/Testing/Code/BasicFilters/otbMeanShiftVectorImageFilter.cxx +++ b/Testing/Code/BasicFilters/otbMeanShiftVectorImageFilter.cxx @@ -23,19 +23,21 @@ int otbMeanShiftVectorImageFilter(int argc, char * argv[]) { - if(argc != 8) + if(argc != 10) { - std::cerr<<"Usage: "<<argv[0]<<" infname filteredfname clusteredfname spatialRadius rangeRadius minregionsize scale"<<std::endl; + std::cerr<<"Usage: "<<argv[0]<<" infname filteredfname clusteredfname labeledclusteredfname clusterboundariesfname spatialRadius rangeRadius minregionsize scale"<<std::endl; return EXIT_FAILURE; } - const char * infname = argv[1]; - const char * filteredfname = argv[2]; - const char * clusteredfname = argv[3]; - const unsigned int spatialRadius = atoi(argv[4]); - const double rangeRadius = atof(argv[5]); - const unsigned int minRegionSize = atoi(argv[6]); - const double scale = atoi(argv[7]); + const char * infname = argv[1]; + const char * filteredfname = argv[2]; + const char * clusteredfname = argv[3]; + const char * labeledclusteredfname = argv[4]; + const char * clusterboundariesfname = argv[5]; + const unsigned int spatialRadius = atoi(argv[6]); + const double rangeRadius = atof(argv[7]); + const unsigned int minRegionSize = atoi(argv[8]); + const double scale = atoi(argv[9]); const unsigned int Dimension = 2; typedef float PixelType; @@ -43,16 +45,23 @@ int otbMeanShiftVectorImageFilter(int argc, char * argv[]) typedef otb::ImageFileReader<ImageType> ReaderType; typedef otb::StreamingImageFileWriter<ImageType> WriterType; typedef otb::MeanShiftVectorImageFilter<ImageType,ImageType> FilterType; + typedef FilterType::LabeledOutputType LabeledImageType; + typedef otb::StreamingImageFileWriter<LabeledImageType> LabeledWriterType; // Instantiating object FilterType::Pointer filter = FilterType::New(); ReaderType::Pointer reader = ReaderType::New(); WriterType::Pointer writer1 = WriterType::New(); WriterType::Pointer writer2 = WriterType::New(); + LabeledWriterType::Pointer writer3 = LabeledWriterType::New(); + LabeledWriterType::Pointer writer4 = LabeledWriterType::New(); + reader->SetFileName(infname); writer1->SetFileName(filteredfname); writer2->SetFileName(clusteredfname); + writer3->SetFileName(labeledclusteredfname); + writer4->SetFileName(clusterboundariesfname); filter->SetSpatialRadius(spatialRadius); filter->SetRangeRadius(rangeRadius); @@ -62,9 +71,14 @@ int otbMeanShiftVectorImageFilter(int argc, char * argv[]) filter->SetInput(reader->GetOutput()); writer1->SetInput(filter->GetOutput()); writer2->SetInput(filter->GetClusteredOutput()); + writer3->SetInput(filter->GetLabeledClusteredOutput()); + writer4->SetInput(filter->GetClusterBoundariesOutput()); + writer1->Update(); writer2->Update(); + writer3->Update(); + writer4->Update(); return EXIT_SUCCESS; } diff --git a/Utilities/otbedison/SysReadme.txt b/Utilities/otbedison/SysReadme.txt index c5199c86f6f2e0ec56468e38eb43d04515a88b60..5a5c5721f026f71e80a2844a0282a8505fb95c82 100755 --- a/Utilities/otbedison/SysReadme.txt +++ b/Utilities/otbedison/SysReadme.txt @@ -1,104 +1,104 @@ -Edge Detection and Image SegmentatiON (EDISON) System ver1.0 README -------------------------------------------------------------------- - -System Overview: -=============== - -The EDISON system is a low-level vision tool that performs confidence based edge detection and synergistic image segmentation. It is packaged under a platform independent graphical user interface. A command prompt version is also provided. - -Table of Contents: -------------------- - -(A) System Overview -(B) Help Files -(C) Current Version Information -(D) Using wxWindows -(E) References -(F) Contact Information - -================================================================================================ - -(A) System Overview - -------------------------------------------------------------------------------------------------- - -The low-level image processor system is comprised of four components: - -(1) An image segmentation module consisting of a collection of classes that are designed to - perform mean shift based and synergistic segmentation. -(2) An edge detection module consisting of a collection of classes that are designed to perform - confidence based edge detection. -(3) A platform independent graphical user interface programmed using wxWindows. -(4) A command prompt version. - -The code for each module is available in its own directory. The directory structure is outlined below: - - - segm : code for image segmentation - - edge : code for edge detection - - GUI : code for graphical user interface - - prompt: code for system command prompt - -================================================================================================ - -(B) Help Files - -------------------------------------------------------------------------------------------------- - -The system help files are located in the 'help' directory. This directory contains the help files needed for both the graphical user interface and command prompt versions of the system. The 'doc' directory contains the help files needed for the graphical user interface. This directory must be placed in the same directory as the program executable to be used with the system. The file 'specification.html' contains the command prompt version specification. - -================================================================================================ - -(C) Current Version Info - -------------------------------------------------------------------------------------------------- - -Both versions of the system were tested under a Windows environment. The command prompt version was also tested under UNIX. The graphical user interface was compiled using Microsoft Visual C++. The project files used to package the system code are located in the 'project' directory under the project name 'EDISON'. The makefile needed to compile the command prompt is also located in this directory. For any further questions on the compilation of this code, please contact us using the provided contact information below. - -Although our system is thoroughly tested bugs may still exist. We would greatly appreciate any bug reports you may provide to assist us in building a more stable system. - -================================================================================================ - -(D) Using wxWindows - -------------------------------------------------------------------------------------------------- - -In order to compile the graphical user interface version of EDISON wxWindows must be download and installed. It may be obtained from the wxWindows web site at http://www.wxWindows.org. - -================================================================================================ - -(E) References - -------------------------------------------------------------------------------------------------- - -[1] D. Comanicu, P. Meer: "Mean shift: A robust approach toward feature space analysis". - IEEE Trans. Pattern Anal. Machine Intell., May 2002. - -[2] P. Meer, B. Georgescu: "Edge detection with embedded confidence". IEEE Trans. Pattern Anal. - Machine Intell., 28, 2001. - -[3] C. Christoudias, B. Georgescu, P. Meer: "Synergism in low level vision". 16th International - Conference of Pattern Recognition, Track 1 - Computer Vision and Robotics, Quebec City, - Canada, August 2001. - -================================================================================================ - -(F) Contact Information - -------------------------------------------------------------------------------------------------- - -Personal Contact Information ----------------------------- - -Email: - - cmch@caip.rutgers.edu (Chris M. Christoudias) - georgesc@caip.rutgers.edu (Bogdan Georgescu) - -Laboratory Contact Information ------------------------------- - -Laboratory Website: - - www.caip.rutgers.edu/riul/ - -================================================================================================ +Edge Detection and Image SegmentatiON (EDISON) System ver1.0 README +------------------------------------------------------------------- + +System Overview: +=============== + +The EDISON system is a low-level vision tool that performs confidence based edge detection and synergistic image segmentation. It is packaged under a platform independent graphical user interface. A command prompt version is also provided. + +Table of Contents: +------------------- + +(A) System Overview +(B) Help Files +(C) Current Version Information +(D) Using wxWindows +(E) References +(F) Contact Information + +================================================================================================ + +(A) System Overview + +------------------------------------------------------------------------------------------------- + +The low-level image processor system is comprised of four components: + +(1) An image segmentation module consisting of a collection of classes that are designed to + perform mean shift based and synergistic segmentation. +(2) An edge detection module consisting of a collection of classes that are designed to perform + confidence based edge detection. +(3) A platform independent graphical user interface programmed using wxWindows. +(4) A command prompt version. + +The code for each module is available in its own directory. The directory structure is outlined below: + + - segm : code for image segmentation + - edge : code for edge detection + - GUI : code for graphical user interface + - prompt: code for system command prompt + +================================================================================================ + +(B) Help Files + +------------------------------------------------------------------------------------------------- + +The system help files are located in the 'help' directory. This directory contains the help files needed for both the graphical user interface and command prompt versions of the system. The 'doc' directory contains the help files needed for the graphical user interface. This directory must be placed in the same directory as the program executable to be used with the system. The file 'specification.html' contains the command prompt version specification. + +================================================================================================ + +(C) Current Version Info + +------------------------------------------------------------------------------------------------- + +Both versions of the system were tested under a Windows environment. The command prompt version was also tested under UNIX. The graphical user interface was compiled using Microsoft Visual C++. The project files used to package the system code are located in the 'project' directory under the project name 'EDISON'. The makefile needed to compile the command prompt is also located in this directory. For any further questions on the compilation of this code, please contact us using the provided contact information below. + +Although our system is thoroughly tested bugs may still exist. We would greatly appreciate any bug reports you may provide to assist us in building a more stable system. + +================================================================================================ + +(D) Using wxWindows + +------------------------------------------------------------------------------------------------- + +In order to compile the graphical user interface version of EDISON wxWindows must be download and installed. It may be obtained from the wxWindows web site at http://www.wxWindows.org. + +================================================================================================ + +(E) References + +------------------------------------------------------------------------------------------------- + +[1] D. Comanicu, P. Meer: "Mean shift: A robust approach toward feature space analysis". + IEEE Trans. Pattern Anal. Machine Intell., May 2002. + +[2] P. Meer, B. Georgescu: "Edge detection with embedded confidence". IEEE Trans. Pattern Anal. + Machine Intell., 28, 2001. + +[3] C. Christoudias, B. Georgescu, P. Meer: "Synergism in low level vision". 16th International + Conference of Pattern Recognition, Track 1 - Computer Vision and Robotics, Quebec City, + Canada, August 2001. + +================================================================================================ + +(F) Contact Information + +------------------------------------------------------------------------------------------------- + +Personal Contact Information +---------------------------- + +Email: + + cmch@caip.rutgers.edu (Chris M. Christoudias) + georgesc@caip.rutgers.edu (Bogdan Georgescu) + +Laboratory Contact Information +------------------------------ + +Laboratory Website: + + www.caip.rutgers.edu/riul/ + +================================================================================================ diff --git a/Utilities/otbedison/edge/BgEdgeDetect.cpp b/Utilities/otbedison/edge/BgEdgeDetect.cpp index 1b7b8e9f6d757ba4802de020010be6dc2391af65..1b9e7ca6120f661b5f5207dfbe971005e47d46e2 100644 --- a/Utilities/otbedison/edge/BgEdgeDetect.cpp +++ b/Utilities/otbedison/edge/BgEdgeDetect.cpp @@ -383,7 +383,7 @@ void BgEdgeDetect::CreateFilters(void) double w; for (i=-WL_; i<=WL_; i++) { - w = pow(2,(-2*WL_))*factorial(2*WL_)/(factorial(WL_-i)*factorial(WL_+i)); + w = pow(2.,(-2*WL_))*factorial(2*WL_)/(factorial(WL_-i)*factorial(WL_+i)); smofil_[i+WL_] = w; diffil_[i+WL_] = (2*i*w)/WL_; } diff --git a/Utilities/otbedison/segm/MSReadme.txt b/Utilities/otbedison/segm/MSReadme.txt index 301ad268952d7b6072f6dee097f5da37bec880bd..dfd4ccb7955de11a604467d8219c1719e21ee66b 100755 --- a/Utilities/otbedison/segm/MSReadme.txt +++ b/Utilities/otbedison/segm/MSReadme.txt @@ -1,176 +1,176 @@ -Mean Shift Image Processor Class ver1.0 README ----------------------------------------------- - -Class Overview: -=============== - -The mean shift image processor class is designed to offer the following functionality: - - (1) Perform image segmentation and edge-preserving filtering using the mean shift algorithm. - (2) Perform (1) using a general kernel and/or an arbitrary input data space. - -Table of Contents: -------------------- -(A) Image Segmentation and Filtering -(B) Synergistic Image Segmentation -(C) Using a General Kernel -(D) Using an Arbitrary Input Data Space -(E) The Class Error Handler -(F) Current Version Information -(G) References -(H) Contact Information - -================================================================================================ - -(A) Image Segmentation and Filtering - -------------------------------------------------------------------------------------------------- - -Mean shift based image segmentation and filtering is performed using use the following methods: - - msImageProcess::Filter - filters the image - msImageProcessor::Segment - segments the image - -The input image processed by these methods is defined via the method, - -msImageProcessor::DefineImage - this uploads the RGB data into the msImageProcessor class for processing - -To obtain the output call: - - msImageProcessor::GetResults - returns filtered or segmented image in RGB space - msImageProcessor::GetBoundaries - returns the boundaries of regions resulting from filtering - or segmentation - msImageProcessor::GetRegions - returns the classification structure that maps each - data point in the image to a given mode, and also - the number of points in the image correlating to each mode. - -NOTE: ------ - -The modes returned by GetRegions are not in the RGB space. If DefineImage was used, they are in the LUV space. The modes may be converted from LUV to RGB (and visa versa) using the space conversion methods of the msImageProcessor class: - - msImageProcessor::RGBtoLUV - converts data points from the RGB data space to LUV - msImageProcessor::LUVtoRGB - converts data points from the LUV data space to RGB - -Alternatively, mean shift may be applyed to data that lies in a space other than LUV. This may be accomplished through the use of the method MeanShift::DefineLInput (see section D). - -================================================================================================ - -(B) Synergistic Image Segmentation - -------------------------------------------------------------------------------------------------- - -A weight map may be provided to the mean shift image processor class, used to perform synergistic image segmentation as described in the paper [3]. One may specify a weight map by calling either of the following methods: - - MeanShift::SetWeightMap - defines the weight map used to specify a weighted kernel during - mean shift; the weight map may only be used for data that lies - on a lattice (e.g. an image) - msImageProcessor::SetWeightMap - specifies a weight map to be used for performing synergistic image - segmentation - -Each of the above methods accept a floating point array of size L elements containing the weight map. When using the mean shift base class L is the number of data points in the specified data set; when using the image processor class L = height x width, where height and width are the dimensions of the image. The method msImageProcessor::SetWeightMap accepts an additional parameter, namely t_e, a threshold value used during the transitive closure step of the image segmentation algorithm. See the paper [3] for details. - -================================================================================================ - -(C) Using a General Kernel - -------------------------------------------------------------------------------------------------- - -A general kernel can be used to perform mean shift filtering and segmentation by calling the inherited method: - - MeanShift::DefineKernel - defines an N-dimensional kernel having kp subspaces, in which each subspace - can be of one of three types: Uniform, Gaussian, or UserDefined. - -DefineImage, used to define the input image when performing image segmentation or filtering, defines a Uniform kernel having two subspaces (one spatial (x,y) and one range (L,U,V)) each subspace having bandwidths sigmaS and sigmaR respectively. By skimming the method definition one may get an idea of how to define a general kernel. - -NOTE: ----- - -For data that is defined on a lattice, it is always assumed that the spatial domain is treated as a single subspace. Also, DefineKernel() must be called *after* DefineImage() when these methods are used together. - -================================================================================================ - -(D) Using an Arbitrary Input Data Space - -------------------------------------------------------------------------------------------------- - -Mean shift filtering and segmentation may be performed on an arbitary image data space. Such data is defined through calling the inherited method: - - MeanShift::DefineLInput - specifies input defined on a lattice - -DefineImage() calls this method using the LUV data it generates. Through the use of the above methods, mean shift may be applied to an arbitrary input data space using a general kernel. In doing so, one must ensure that the dimension of the input data space and kernel are the same (N). If their dimensions do not agree an error will be flagged. - -================================================================================================ - -(F) The Class Error Handler - -------------------------------------------------------------------------------------------------- - -The mean shift image processor class uses an error message string and error-level flag to perform error handling. These two variables, MeanShift::ErrorMessage and MeanShift::ErrorLevel, are public data members of the class. - -Upon the occurance of an error, - - * An error message is copied into the error message string. - * The error level of the class is set to EL_ERROR. - -The following example demonstrates the use of the error handling mechanism described above. - -msImageProcessor iProc; - -... - -iProc.Segment(sigmaS, sigmaR, minRegion, SPEEDUP); -if(iProc.ErrorLevel == EL_ERROR) -{ - fprintf(stderr, iProc.ErrorMessage); - exit(1); -} - -... - -================================================================================================ - -(G) Current Version Information - -------------------------------------------------------------------------------------------------- - -The current version of the code was tested under both UNIX and Windows environments. - -================================================================================================ - -(H) References - -------------------------------------------------------------------------------------------------- - -[1] D. Comanicu, P. Meer: "Mean shift: A robust approach toward feature space analysis". - IEEE Trans. Pattern Anal. Machine Intell., May 2002. - -[2] P. Meer, B. Georgescu: "Edge detection with embedded confidence". IEEE Trans. Pattern Anal. - Machine Intell., 28, 2001. - -[3] C. Christoudias, B. Georgescu, P. Meer: "Synergism in low level vision". 16th International - Conference of Pattern Recognition, Track 1 - Computer Vision and Robotics, Quebec City, - Canada, August 2001. - -================================================================================================ - -(I) Contact Information - -------------------------------------------------------------------------------------------------- - -Personal Contact Information ----------------------------- - -Email: - - cmch@caip.rutgers.edu (Chris Christoudias) - georgesc@caip.rutgers.edu (Bogdan Georgescu) - -Laboratory Contact Information ------------------------------- - -Laboratory Website: - - www.caip.rutgers.edu/riul/ - +Mean Shift Image Processor Class ver1.0 README +---------------------------------------------- + +Class Overview: +=============== + +The mean shift image processor class is designed to offer the following functionality: + + (1) Perform image segmentation and edge-preserving filtering using the mean shift algorithm. + (2) Perform (1) using a general kernel and/or an arbitrary input data space. + +Table of Contents: +------------------- +(A) Image Segmentation and Filtering +(B) Synergistic Image Segmentation +(C) Using a General Kernel +(D) Using an Arbitrary Input Data Space +(E) The Class Error Handler +(F) Current Version Information +(G) References +(H) Contact Information + +================================================================================================ + +(A) Image Segmentation and Filtering + +------------------------------------------------------------------------------------------------- + +Mean shift based image segmentation and filtering is performed using use the following methods: + + msImageProcess::Filter - filters the image + msImageProcessor::Segment - segments the image + +The input image processed by these methods is defined via the method, + +msImageProcessor::DefineImage - this uploads the RGB data into the msImageProcessor class for processing + +To obtain the output call: + + msImageProcessor::GetResults - returns filtered or segmented image in RGB space + msImageProcessor::GetBoundaries - returns the boundaries of regions resulting from filtering + or segmentation + msImageProcessor::GetRegions - returns the classification structure that maps each + data point in the image to a given mode, and also + the number of points in the image correlating to each mode. + +NOTE: +----- + +The modes returned by GetRegions are not in the RGB space. If DefineImage was used, they are in the LUV space. The modes may be converted from LUV to RGB (and visa versa) using the space conversion methods of the msImageProcessor class: + + msImageProcessor::RGBtoLUV - converts data points from the RGB data space to LUV + msImageProcessor::LUVtoRGB - converts data points from the LUV data space to RGB + +Alternatively, mean shift may be applyed to data that lies in a space other than LUV. This may be accomplished through the use of the method MeanShift::DefineLInput (see section D). + +================================================================================================ + +(B) Synergistic Image Segmentation + +------------------------------------------------------------------------------------------------- + +A weight map may be provided to the mean shift image processor class, used to perform synergistic image segmentation as described in the paper [3]. One may specify a weight map by calling either of the following methods: + + MeanShift::SetWeightMap - defines the weight map used to specify a weighted kernel during + mean shift; the weight map may only be used for data that lies + on a lattice (e.g. an image) + msImageProcessor::SetWeightMap - specifies a weight map to be used for performing synergistic image + segmentation + +Each of the above methods accept a floating point array of size L elements containing the weight map. When using the mean shift base class L is the number of data points in the specified data set; when using the image processor class L = height x width, where height and width are the dimensions of the image. The method msImageProcessor::SetWeightMap accepts an additional parameter, namely t_e, a threshold value used during the transitive closure step of the image segmentation algorithm. See the paper [3] for details. + +================================================================================================ + +(C) Using a General Kernel + +------------------------------------------------------------------------------------------------- + +A general kernel can be used to perform mean shift filtering and segmentation by calling the inherited method: + + MeanShift::DefineKernel - defines an N-dimensional kernel having kp subspaces, in which each subspace + can be of one of three types: Uniform, Gaussian, or UserDefined. + +DefineImage, used to define the input image when performing image segmentation or filtering, defines a Uniform kernel having two subspaces (one spatial (x,y) and one range (L,U,V)) each subspace having bandwidths sigmaS and sigmaR respectively. By skimming the method definition one may get an idea of how to define a general kernel. + +NOTE: +---- + +For data that is defined on a lattice, it is always assumed that the spatial domain is treated as a single subspace. Also, DefineKernel() must be called *after* DefineImage() when these methods are used together. + +================================================================================================ + +(D) Using an Arbitrary Input Data Space + +------------------------------------------------------------------------------------------------- + +Mean shift filtering and segmentation may be performed on an arbitary image data space. Such data is defined through calling the inherited method: + + MeanShift::DefineLInput - specifies input defined on a lattice + +DefineImage() calls this method using the LUV data it generates. Through the use of the above methods, mean shift may be applied to an arbitrary input data space using a general kernel. In doing so, one must ensure that the dimension of the input data space and kernel are the same (N). If their dimensions do not agree an error will be flagged. + +================================================================================================ + +(F) The Class Error Handler + +------------------------------------------------------------------------------------------------- + +The mean shift image processor class uses an error message string and error-level flag to perform error handling. These two variables, MeanShift::ErrorMessage and MeanShift::ErrorLevel, are public data members of the class. + +Upon the occurance of an error, + + * An error message is copied into the error message string. + * The error level of the class is set to EL_ERROR. + +The following example demonstrates the use of the error handling mechanism described above. + +msImageProcessor iProc; + +... + +iProc.Segment(sigmaS, sigmaR, minRegion, SPEEDUP); +if(iProc.ErrorLevel == EL_ERROR) +{ + fprintf(stderr, iProc.ErrorMessage); + exit(1); +} + +... + +================================================================================================ + +(G) Current Version Information + +------------------------------------------------------------------------------------------------- + +The current version of the code was tested under both UNIX and Windows environments. + +================================================================================================ + +(H) References + +------------------------------------------------------------------------------------------------- + +[1] D. Comanicu, P. Meer: "Mean shift: A robust approach toward feature space analysis". + IEEE Trans. Pattern Anal. Machine Intell., May 2002. + +[2] P. Meer, B. Georgescu: "Edge detection with embedded confidence". IEEE Trans. Pattern Anal. + Machine Intell., 28, 2001. + +[3] C. Christoudias, B. Georgescu, P. Meer: "Synergism in low level vision". 16th International + Conference of Pattern Recognition, Track 1 - Computer Vision and Robotics, Quebec City, + Canada, August 2001. + +================================================================================================ + +(I) Contact Information + +------------------------------------------------------------------------------------------------- + +Personal Contact Information +---------------------------- + +Email: + + cmch@caip.rutgers.edu (Chris Christoudias) + georgesc@caip.rutgers.edu (Bogdan Georgescu) + +Laboratory Contact Information +------------------------------ + +Laboratory Website: + + www.caip.rutgers.edu/riul/ + ================================================================================================ \ No newline at end of file diff --git a/Utilities/otbedison/segm/msImageProcessor.cpp b/Utilities/otbedison/segm/msImageProcessor.cpp index 88d30e2c6c9bcfffb78f7db350ad597e7afce3ab..a983d6f6d69a985ae4942e19d983aa42ad74b2c0 100644 --- a/Utilities/otbedison/segm/msImageProcessor.cpp +++ b/Utilities/otbedison/segm/msImageProcessor.cpp @@ -1116,24 +1116,28 @@ int msImageProcessor::GetRegions(int **labels_out, float **modes_out, int **MPC_ //allocate memory for labels_out, modes_out and MPC_out based //on output storage structure - int *labels_ = *labels_out, *MPC_out_ = *MPC_out; - float *modes_ = *modes_out; - if(!(labels_ = new int [L])) + if(!(*labels_out = new int [L])) { ErrorHandler("msImageProcessor", "GetRegions", "Not enough memory."); return -1; } - if(!(modes_ = new float [regionCount*N])) + if(!(*modes_out = new float [regionCount*N])) { ErrorHandler("msImageProcessor", "GetRegions", "Not enough memory."); return -1; } - if(!(MPC_out_ = new int [regionCount])) + if(!(*MPC_out = new int [regionCount])) { ErrorHandler("msImageProcessor", "GetRegions", "Not enough memory."); return -1; } + + int *labels_ = *labels_out, *MPC_out_ = *MPC_out; + float *modes_ = *modes_out; + + + //populate labels_out with image labels int i; for(i = 0; i < L; i++) diff --git a/empty b/empty deleted file mode 100644 index e7b64f1cd81a725f003f6e0d045e43baa322bbd2..0000000000000000000000000000000000000000 --- a/empty +++ /dev/null @@ -1,2 +0,0 @@ -empty - diff --git a/ununsed-DartConfig.cmake-old b/ununsed-DartConfig.cmake-old deleted file mode 100755 index 351eb979bec5c4c244f5ab0440dc1ca86db8a2e5..0000000000000000000000000000000000000000 --- a/ununsed-DartConfig.cmake-old +++ /dev/null @@ -1,47 +0,0 @@ -# Dashboard is opened for submissions for a 24 hour period starting at -# the specified NIGHLY_START_TIME. Time is specified in 24 hour format. -SET (NIGHTLY_START_TIME "20:00:00 CET") - -SET (BUILDNAME "${BUILDNAME}") - - -# Dart server to submit results (used by client) -SET (DROP_METHOD "xmlrpc") -SET (DROP_SITE "http://nemo:8081") -SET (DROP_LOCATION "OTB") -SET (COMPRESS_SUBMISSION ON) -#SET (DROP_LOCATION "/incoming") -#SET (DROP_SITE_USER "ftpuser") -#SET (DROP_SITE_PASSWORD "public") - -#SET (DROP_SITE_MODE "active") - -#SET (TRIGGER_SITE "http://${DROP_SITE}/cgi-bin/Submit-Orfeotoolbox-TestingResults.cgi") - -# Project Home Page -SET (PROJECT_URL "http://smsc.cnes.fr/PLEIADES/") - -# Dart server configuration -#SET (ROLLUP_URL "http://${DROP_SITE}/cgi-bin/OrfeotoolboxRollup.cgi") - -#SET (ROLLUP_URL "http://${DROP_SITE}/cgi-bin/insight-rollup-dashboard.sh") -#SET (ROLLUP_URL "http://${DROP_SITE}/cgi-bin/orfeotoolbox-rollup-dashboard.sh") -#SET (CVS_WEB_URL "http://${DROP_SITE}/cgi-bin/viewcvs.cgi/") -#SET (CVS_WEB_CVSROOT "Insight") -#SET (CVS_WEB_CVSROOT "OrfeoToolbox") - -OPTION(BUILD_DOXYGEN "Build source documentation using doxygen" "Off") -SET (DOXYGEN_CONFIG "${OTB_BINARY_DIR}/doxygen.config" ) -SET (USE_DOXYGEN "On") -SET (DOXYGEN_URL "http://${DROP_SITE}/OrfeoToolbox/Doxygen/html/" ) - -SET (USE_GNATS "On") -SET (GNATS_WEB_URL "http://nemo/http://nemo/phpBugTracker/phpbt-1.0.1/") - -# Continuous email delivery variables -#SET (CONTINUOUS_FROM "lorensen@crd.ge.com") -#SET (SMTP_MAILHOST "public.kitware.com") -#SET (CONTINUOUS_MONITOR_LIST "lorensen@crd.ge.com millerjv@crd.ge.com lorensen@nycap.rr.com") -#SET (CONTINUOUS_BASE_URL "http://www.itk.org/Testing") - -MARK_AS_ADVANCED(BUILD_DOXYGEN)