Commit 6311a01b authored by Julien Malik's avatar Julien Malik

STYLE: remove trailing whitespace/tab from Applications

parent e82a5ec5
......@@ -125,7 +125,7 @@ private:
SetDocLimitations("None.");
SetDocAuthors("OTB-Team");
SetDocSeeAlso(" ");
AddDocTag(Tags::FeatureExtraction);
AddParameter(ParameterType_InputVectorData, "psin", "Input Positive Vector Data");
......
......@@ -68,7 +68,7 @@ private:
SetDocLimitations("None.");
SetDocAuthors("OTB-Team");
SetDocSeeAlso("http://en.wikipedia.org/wiki/Dempster-Shafer_theory");
AddDocTag(Tags::FeatureExtraction);
......@@ -102,7 +102,7 @@ private:
SetDocExampleParameterValue("belsup", "cdbTvComputePolylineFeatureFromImage_LI_NOBUIL_gt.shp");
SetDocExampleParameterValue("descmod", "DSFuzzyModel.xml");
SetDocExampleParameterValue("out", "VectorDataDSValidation.shp");
}
void DoUpdateParameters()
......
......@@ -43,7 +43,7 @@ public:
/** Filters typedef */
typedef UInt16ImageType IOLabelImageType;
// Neighborhood majority voting filter type
typedef otb::NeighborhoodMajorityVotingImageFilter<IOLabelImageType> NeighborhoodMajorityVotingFilterType;
......@@ -98,7 +98,7 @@ private:
AddParameter(ParameterType_Int, "ip.undecidedlabel", "Label for the Undecided class");
SetParameterDescription("ip.undecidedlabel", "Label for the Undecided class. By default, 'ip.undecidedlabel = 0'.");
SetDefaultParameterInt("ip.undecidedlabel", 0.0);
AddRAMParameter();
......@@ -115,29 +115,29 @@ private:
{
// Nothing to do here : all parameters are independent
}
void DoExecute()
{
// Majority Voting
m_NeighMajVotingFilter = NeighborhoodMajorityVotingFilterType::New();
// Load input labeled image to regularize
UInt16ImageType::Pointer inImage = GetParameterUInt16Image("io.in");
// Neighborhood majority voting filter settings
RadiusType rad;
rad[0] = GetParameterInt("ip.radius");
rad[1] = GetParameterInt("ip.radius");
StructuringType seBall;
seBall.SetRadius(rad);
seBall.CreateStructuringElement();
m_NeighMajVotingFilter->SetKernel(seBall);
m_NeighMajVotingFilter->SetInput(inImage);
m_NeighMajVotingFilter->SetLabelForNoDataPixels(GetParameterInt("ip.nodatalabel"));
m_NeighMajVotingFilter->SetLabelForUndecidedPixels(GetParameterInt("ip.undecidedlabel"));
// Set to Undecided label if NOT unique Majority Voting
if (IsParameterEnabled("ip.suvbool"))
{
......@@ -148,10 +148,10 @@ private:
{
m_NeighMajVotingFilter->SetKeepOriginalLabelBool(true);
}
/** REGULARIZATION OF CLASSIFICATION */
SetParameterOutputImage<IOLabelImageType>("io.out", m_NeighMajVotingFilter->GetOutput());
}// END DoExecute()
......
......@@ -99,20 +99,20 @@ private:
AddParameter(ParameterType_OutputFilename, "out", "Matrix output");
SetParameterDescription("out", "Filename to store the output matrix (csv format)");
AddParameter(ParameterType_Choice,"ref","Ground truth");
SetParameterDescription("ref","Choice of ground truth format");
AddChoice("ref.raster","Ground truth as a raster image");
AddChoice("ref.vector","Ground truth as a vector data file");
AddParameter(ParameterType_InputImage,"ref.raster.in","Input reference image");
SetParameterDescription("ref.raster.in","Input image containing the ground truth labels");
AddParameter(ParameterType_InputFilename,"ref.vector.in","Input reference vector data");
SetParameterDescription("ref.vector.in", "Input vector data of the ground truth");
AddParameter(ParameterType_String,"ref.vector.field","Field name");
SetParameterDescription("ref.vector.field","Field name containing the label values");
SetParameterString("ref.vector.field","Class");
......@@ -125,7 +125,7 @@ private:
SetDefaultParameterInt("nodatalabel",0);
MandatoryOff("nodatalabel");
DisableParameter("nodatalabel");
AddRAMParameter();
// Doc example parameter settings
......
......@@ -51,7 +51,7 @@ private:
SetDocLimitations("Each image of the set must contain the same bands as the others (i.e. same types, in the same order).");
SetDocAuthors("OTB-Team");
SetDocSeeAlso("Documentation of the TrainImagesClassifier application.");
AddDocTag(Tags::Learning);
AddDocTag(Tags::Analysis);
......
......@@ -66,7 +66,7 @@ public:
typedef otb::SOMImageClassificationFilter
<FloatVectorImageType, LabeledImageType, SOMMapType> ClassificationFilterType;
private:
void DoInit()
{
......@@ -85,65 +85,65 @@ private:
AddParameter(ParameterType_InputImage, "in", "InputImage");
SetParameterDescription("in", "Input image to classify.");
AddParameter(ParameterType_OutputImage, "out", "OutputImage");
SetParameterDescription("out", "Output classified image (each pixel contains the index of its corresponding vector in the SOM).");
AddParameter(ParameterType_InputImage, "vm", "ValidityMask");
SetParameterDescription("vm", "Validity mask (only pixels corresponding to a mask value greater than 0 will be used for learning)");
MandatoryOff("vm");
AddParameter(ParameterType_Float, "tp", "TrainingProbability");
SetParameterDescription("tp", "Probability for a sample to be selected in the training set");
MandatoryOff("tp");
AddParameter(ParameterType_Int, "ts", "TrainingSetSize");
SetParameterDescription("ts", "Maximum training set size (in pixels)");
MandatoryOff("ts");
AddParameter(ParameterType_Int, "sl", "StreamingLines");
SetParameterDescription("sl", "Number of lines in each streaming block (used during data sampling)");
MandatoryOff("sl");
AddParameter(ParameterType_OutputImage, "som", "SOM Map");
SetParameterDescription("som","Output image containing the Self-Organizing Map");
MandatoryOff("som");
AddParameter(ParameterType_Int, "sx", "SizeX");
SetParameterDescription("sx", "X size of the SOM map");
MandatoryOff("sx");
AddParameter(ParameterType_Int, "sy", "SizeY");
SetParameterDescription("sy", "Y size of the SOM map");
MandatoryOff("sy");
AddParameter(ParameterType_Int, "nx", "NeighborhoodX");
SetParameterDescription("nx", "X size of the initial neighborhood in the SOM map");
MandatoryOff("nx");
AddParameter(ParameterType_Int, "ny", "NeighborhoodY");
SetParameterDescription("ny", "Y size of the initial neighborhood in the SOM map");
MandatoryOff("nx");
AddParameter(ParameterType_Int, "ni", "NumberIteration");
SetParameterDescription("ni", "Number of iterations for SOM learning");
MandatoryOff("ni");
AddParameter(ParameterType_Float, "bi", "BetaInit");
SetParameterDescription("bi", "Initial learning coefficient");
MandatoryOff("bi");
AddParameter(ParameterType_Float, "bf", "BetaFinal");
SetParameterDescription("bf", "Final learning coefficient");
MandatoryOff("bf");
AddParameter(ParameterType_Float, "iv", "InitialValue");
SetParameterDescription("iv", "Maximum initial neuron weight");
MandatoryOff("iv");
AddRAMParameter();
// TODO : replace StreamingLines by RAM param ?
AddRANDParameter();
// Default parameters
SetDefaultParameterFloat("tp", 1.0);
......@@ -155,7 +155,7 @@ private:
SetDefaultParameterFloat("bi", 1.0);
SetDefaultParameterFloat("bf", 0.1);
SetDefaultParameterFloat("iv", 0.0);
// Doc example parameter settings
SetDocExampleParameterValue("in", "QB_1_ortho.tif");
SetDocExampleParameterValue("out","SOMClassification.tif");
......@@ -171,18 +171,18 @@ private:
SetDocExampleParameterValue("bf", "0.1");
SetDocExampleParameterValue("iv", "0");
}
void DoUpdateParameters()
{
// Nothing to do
}
void DoExecute()
{
// initiating random number generation
itk::Statistics::MersenneTwisterRandomVariateGenerator::Pointer
randomGen = itk::Statistics::MersenneTwisterRandomVariateGenerator::GetInstance();
FloatVectorImageType::Pointer input = GetParameterImage("in");
LabeledImageType::Pointer mask;
m_UseMask = false;
......@@ -196,14 +196,14 @@ private:
}
m_UseMask = true;
}
/*******************************************/
/* Sampling data */
/*******************************************/
otbAppLogINFO("-- SAMPLING DATA --");
RegionType largestRegion = input->GetLargestPossibleRegion();
// Setting up local streaming capabilities
SplitterType::Pointer splitter = SplitterType::New();
unsigned int numberOfStreamDivisions;
......@@ -223,9 +223,9 @@ private:
otb::SET_BUFFER_MEMORY_SIZE,
0, 1048576*GetParameterInt("ram"), 0);
}
otbAppLogINFO("The images will be streamed into "<<numberOfStreamDivisions<<" parts.");
// Training sample lists
ListSampleType::Pointer sampleList = ListSampleType::New();
sampleList->SetMeasurementVectorSize(input->GetNumberOfComponentsPerPixel());
......@@ -240,45 +240,45 @@ private:
{
nbsamples = largestRegion.GetNumberOfPixels();
}
// Sample dimension and max dimension
unsigned int sampleSize = input->GetNumberOfComponentsPerPixel();
unsigned int totalSamples = 0;
otbAppLogINFO("The following sample size will be used: "<<sampleSize);
// local streaming variables
unsigned int piece = 0;
RegionType streamingRegion;
// create a random permutation to explore
itk::RandomPermutation randPerm(numberOfStreamDivisions);
unsigned int index = 0;
// TODO : maybe change the approach: at the moment, the sampling process is able to pick a sample twice or more
while (totalSamples < nbsamples)
{
unsigned int localNbSamples=0;
piece = randPerm[index];
streamingRegion = splitter->GetSplit(piece, numberOfStreamDivisions, largestRegion);
//otbAppLogINFO("Processing region: "<<streamingRegion);
input->SetRequestedRegion(streamingRegion);
input->PropagateRequestedRegion();
input->UpdateOutputData();
IteratorType it(input, streamingRegion);
it.GoToBegin();
if (m_UseMask)
{
mask->SetRequestedRegion(streamingRegion);
mask->PropagateRequestedRegion();
mask->UpdateOutputData();
LabeledIteratorType maskIt(mask, streamingRegion);
maskIt.GoToBegin();
// Loop on the image and the mask
while ( !it.IsAtEnd()
&& !maskIt.IsAtEnd()
......@@ -338,17 +338,17 @@ private:
if (index == numberOfStreamDivisions) index = 0;
}
}
otbAppLogINFO("The final training set contains "<<totalSamples<<" samples.");
/*******************************************/
/* Learning */
/*******************************************/
otbAppLogINFO("-- LEARNING --");
EstimatorType::Pointer estimator = EstimatorType::New();
estimator->SetListSample(sampleList);
EstimatorType::SizeType size;
size[0]=GetParameterInt("sx");
......@@ -362,29 +362,29 @@ private:
estimator->SetBetaInit(GetParameterFloat("bi"));
estimator->SetBetaEnd(GetParameterFloat("bf"));
estimator->SetMaxWeight(GetParameterFloat("iv"));
AddProcess(estimator,"Learning");
estimator->Update();
m_SOMMap = estimator->GetOutput();
if (HasValue("som"))
{
otbAppLogINFO("-- Using Leaning image --");
SetParameterOutputImage<DoubleVectorImageType>("som", m_SOMMap);
}
/*******************************************/
/* Classification */
/*******************************************/
otbAppLogINFO("-- CLASSIFICATION --");
m_Classifier = ClassificationFilterType::New();
m_Classifier->SetInput(input);
m_Classifier->SetMap(m_SOMMap);
if (m_UseMask) m_Classifier->SetInputMask(mask);
AddProcess(m_Classifier,"Classification");
SetParameterOutputImage<LabeledImageType>("out", m_Classifier->GetOutput());
}
......
......@@ -234,18 +234,18 @@ private:
AddChoice("bm.subpixel.dichotomy", "Dichotomy");
SetParameterDescription("bm.subpixel.dichotomy", "Dichotomic search");
AddParameter(ParameterType_Int,"bm.step", "Computation step");
SetParameterDescription("bm.step", "Location step between computed disparities");
SetDefaultParameterInt("bm.step",1);
SetMinimumParameterIntValue("bm.step",1);
MandatoryOff("bm.step");
AddParameter(ParameterType_Int,"bm.startx","X start index");
SetParameterDescription("bm.startx","X start index of the subsampled grid (wrt the input image grid)");
SetDefaultParameterInt("bm.startx",0);
MandatoryOff("bm.startx");
AddParameter(ParameterType_Int,"bm.starty","Y start index");
SetParameterDescription("bm.starty","Y start index of the subsampled grid (wrt the input image grid)");
SetDefaultParameterInt("bm.starty",0);
......@@ -367,7 +367,7 @@ private:
int minvdisp = GetParameterInt("bm.minvd");
int maxvdisp = GetParameterInt("bm.maxvd");
unsigned int step = GetParameterInt("bm.step");
FloatImageType::IndexType gridIndex;
gridIndex[0] = GetParameterInt("bm.startx");
gridIndex[1] = GetParameterInt("bm.starty");
......
......@@ -213,7 +213,7 @@ private:
// Setup the DEM Handler
otb::Wrapper::ElevationParametersHandler::SetupDEMHandlerFromElevationParameters(this,"epi.elevation");
if (otb::Wrapper::ElevationParametersHandler::IsGeoidUsed(this,"epi.elevation") &&
otb::Wrapper::ElevationParametersHandler::IsDEMUsed(this,"epi.elevation") &&
!IsParameterEnabled("epi.elevation.avgdem.step"))
......@@ -244,7 +244,7 @@ private:
m_StatisticsFilter->Update();
otb::DEMHandler::Instance()->SetDefaultHeightAboveEllipsoid(m_StatisticsFilter->GetMean());
EnableParameter("epi.elevation.avgdem.value");
SetParameterFloat("epi.elevation.avgdem.value",m_StatisticsFilter->GetMean());
......
......@@ -246,7 +246,7 @@ private:
PCAInverseFilterType::Pointer invFilter = PCAInverseFilterType::New();
m_InverseFilter = invFilter;
filter->SetInput(GetParameterFloatVectorImage("in"));
filter->SetNumberOfPrincipalComponentsRequired(nbComp);
......@@ -269,7 +269,7 @@ private:
}
m_TransformationMatrix = filter->GetTransformationMatrix();
break;
}
case 1:
......@@ -289,7 +289,7 @@ private:
NAPCAInverseFilterType::Pointer invFilter = NAPCAInverseFilterType::New();
m_InverseFilter = invFilter;
filter->SetInput(GetParameterFloatVectorImage("in"));
filter->SetNumberOfPrincipalComponentsRequired(nbComp);
filter->SetUseNormalization(normalize);
......@@ -308,7 +308,7 @@ private:
invFilter->SetTransformationMatrix(filter->GetTransformationMatrix());
m_TransformationMatrix = invFilter->GetTransformationMatrix();
}
m_TransformationMatrix = filter->GetTransformationMatrix();
break;
......@@ -446,7 +446,7 @@ private:
SetParameterOutputImage("out", m_RescaleFilter->GetOutput());
}
}
......
......@@ -126,7 +126,7 @@ private:
AddParameter(ParameterType_Choice,"algorithm","Keypoints detection algorithm");
SetParameterDescription("algorithm","Choice of the detection algorithm to use");
AddChoice("algorithm.surf","SURF algorithm");
AddChoice("algorithm.sift","SIFT algorithm");
......@@ -160,10 +160,10 @@ private:
AddParameter(ParameterType_Float,"precision","Estimated precision of the colocalisation function (in pixels).");
SetParameterDescription("precision","Estimated precision of the colocalisation function in pixels");
SetDefaultParameterFloat("precision",0.);
AddParameter(ParameterType_Empty,"mfilter","Filter points according to geographical or sensor based colocalisation");
SetParameterDescription("mfilter","If enabled, this option allows to filter matches according to colocalisation from sensor or geographical information, using the given tolerancy expressed in pixels");
AddParameter(ParameterType_Empty,"2wgs84","If enabled, points from second image will be exported in WGS84");
// Elevation
......@@ -198,18 +198,18 @@ private:
otbAppLogINFO("Using SIFT points");
SiftFilterType::Pointer sift1 = SiftFilterType::New();
sift1->SetInput(im1);
SiftFilterType::Pointer sift2 = SiftFilterType::New();
sift2->SetInput(im2);
sift1->Update();
otbAppLogINFO("Found " << sift1->GetOutput()->GetNumberOfPoints()<<" sift points in image 1.");
sift2->Update();
otbAppLogINFO("Found " << sift2->GetOutput()->GetNumberOfPoints()<<" sift points in image 2.");
matchingFilter->SetInput1(sift1->GetOutput());
matchingFilter->SetInput2(sift2->GetOutput());
}
......@@ -217,19 +217,19 @@ private:
{
SurfFilterType::Pointer surf1 = SurfFilterType::New();
surf1->SetInput(im1);
SurfFilterType::Pointer surf2 = SurfFilterType::New();
surf2->SetInput(im2);
otbAppLogINFO("Doing update");
surf1->Update();
otbAppLogINFO("Found " << surf1->GetOutput()->GetNumberOfPoints()<<" surf points in image 1.");
surf2->Update();
otbAppLogINFO("Found " << surf2->GetOutput()->GetNumberOfPoints()<<" surf points in image 2.");
matchingFilter->SetInput1(surf1->GetOutput());
matchingFilter->SetInput2(surf2->GetOutput());
matchingFilter->SetDistanceThreshold(GetParameterFloat("threshold"));
......@@ -240,48 +240,48 @@ private:
{
matchingFilter->Update();
LandmarkListType::Pointer landmarks = matchingFilter->GetOutput();
otbAppLogINFO("Found " << landmarks->Size() <<" homologous points.");
unsigned int discarded = 0;
for (LandmarkListType::Iterator it = landmarks->Begin();
it != landmarks->End(); ++it)
{
PointType point1 = it.Get()->GetPoint1();
PointType point2 = it.Get()->GetPoint2();
double error = 0;
PointType pprime1,pprime2;
bool filtered = false;
if(IsParameterEnabled("mfilter"))
{
pprime1 = rsTransform->TransformPoint(point1);
error = vcl_sqrt((point2[0]-pprime1[0])*(point2[0]-pprime1[0])+(point2[1]-pprime1[1])*(point2[1]-pprime1[1]));
if(error>GetParameterFloat("precision")*vcl_sqrt(vcl_abs(im2->GetSpacing()[0]*im2->GetSpacing()[1])))
{
filtered = true;
}
}
if(!filtered)
{
if(IsParameterEnabled("2wgs84"))
{
pprime2 = rsTransform2ToWGS84->TransformPoint(point2);
file<<point1[0]<<"\t"<<point1[1]<<"\t"<<pprime2[0]<<"\t"<<pprime2[1]<<std::endl;
}
else
{
file<<point1[0]<<"\t"<<point1[1]<<"\t"<<point2[0]<<"\t"<<point2[1]<<std::endl;
}
if(mls)
{
pprime1 = rsTransform1ToWGS84->TransformPoint(point1);
......@@ -306,7 +306,7 @@ private:
// silent catch
}
}
void DoExecute()
{
......@@ -337,11 +337,11 @@ private:
ExtractChannelFilterType::Pointer extractChannel1 = ExtractChannelFilterType::New();
extractChannel1->SetInput(this->GetParameterImage("in1"));
extractChannel1->SetChannel(GetParameterInt("band1"));
ExtractChannelFilterType::Pointer extractChannel2 = ExtractChannelFilterType::New();
extractChannel2->SetInput(this->GetParameterImage("in2"));
extractChannel2->SetChannel(GetParameterInt("band2"));
// Setup the DEM Handler
otb::Wrapper::ElevationParametersHandler::SetupDEMHandlerFromElevationParameters(this,"elev");
......@@ -367,7 +367,7 @@ private:
FloatImageType::SpacingType spacing1 = this->GetParameterImage("in1")->GetSpacing();
FloatImageType::PointType origin1 = this->GetParameterImage("in1")->GetOrigin();
FloatVectorImageType::Pointer image2 = this->GetParameterImage("in2");
for(unsigned int i = 0; i<nb_bins_x; ++i)
......@@ -376,7 +376,7 @@ private:
{
unsigned int startx = bin_step/2 + i*(bin_size + bin_step);
unsigned int starty = bin_step/2 + j*(bin_size + bin_step);
FloatImageType::SizeType size1;
FloatImageType::IndexType index1;
......@@ -394,10 +394,10 @@ private:
otbAppLogINFO("("<<i+1<<"/"<<nb_bins_x<<", "<<j+1<<"/"<<nb_bins_y<<") Considering region1 : "<<region1.GetIndex()<<", "<<region1.GetSize());
extractChannel1->SetExtractionRegion(region1);
// We need to find the corresponding region in image 2
FloatImageType::PointType ul1, ur1, ll1, lr1, p1, p2, p3, p4;
itk::ContinuousIndex<double,2> i1, i2, i3, i4, i_min, i_max;
......@@ -418,7 +418,7 @@ private:
p2 = rsTransform->TransformPoint(ur1);
p3 = rsTransform->TransformPoint(lr1);
p4 = rsTransform->TransformPoint(ll1);
image2->TransformPhysicalPointToContinuousIndex(p1,i1);
image2->TransformPhysicalPointToContinuousIndex(p2,i2);
image2->TransformPhysicalPointToContinuousIndex(p3,i3);
......@@ -466,16 +466,16 @@ private:
// Create the datasource (for matches export)
otb::ogr::Layer layer(NULL, false);
otb::ogr::DataSource::Pointer ogrDS;
ogrDS = otb::ogr::DataSource::New(GetParameterString("outvector"), otb::ogr::DataSource::Modes::Overwrite);
std::string projref = "GEOGCS[\"WGS 84\",DATUM[\"WGS_1984\",SPHEROID[\"WGS 84\",6378137,298.257223563,AUTHORITY[\"EPSG\",\"