Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • orfeotoolbox/otb
  • gpasero/otb
  • jinglada/otb
  • cs-si/otb
  • tardyb/otb
  • jmichel/otb
  • pedrojoclar/otb
  • derksend/otb
  • ussegliog/otb
  • SebastienPeillet/otb
  • waynedou/otb
  • lhermitte/otb
  • ArthurV/otb
  • sebastic/otb
  • gpernot/otb
  • super7ramp/otb
  • camiot/otb
  • ctraizet/otb
  • dyoussef/otb
  • bradh/otb
  • troopa81/otb
  • sdinot/otb
  • julienosman/otb
  • vidlb/otb
  • Feishi/otb
  • IlDordollano/otb
  • pty_/otb
27 results
Show changes
Commits on Source (19305)
Showing
with 829 additions and 982 deletions
---
AlignAfterOpenBracket: Align
AlignConsecutiveAssignments: true
AlignConsecutiveDeclarations: true
AlignEscapedNewlinesLeft: true
AlignOperands: true
AlignTrailingComments: true
AllowShortBlocksOnASingleLine: false
AllowShortCaseLabelsOnASingleLine: false
AllowShortFunctionsOnASingleLine: false
AllowShortIfStatementsOnASingleLine: false
AllowShortLoopsOnASingleLine: false
AlwaysBreakBeforeMultilineStrings: true
AlwaysBreakTemplateDeclarations: true
BreakBeforeBraces: Allman
ColumnLimit: 160
ConstructorInitializerAllOnOneLineOrOnePerLine: true
ConstructorInitializerIndentWidth: 2
Cpp11BracedListStyle: true
DerivePointerAlignment: false
IndentWidth: 2
Language: Cpp
MaxEmptyLinesToKeep: 2
NamespaceIndentation: None
PointerAlignment: Left
SortIncludes: false
Standard: Cpp11
TabWidth: 2
UseTab: Never
...
# http://EditorConfig.org
# top-most EditorConfig file
root = true
# Apply to all code files
[*]
# A newline ending every file
insert_final_newline = true
# Set default charset
charset = utf-8
# 4 space indentation
indent_style = space
indent_size = 2
# Various options
trim_trailing_whitespace = true
Data/**/*.[0-9][0-9][0-9] filter=lfs diff=lfs merge=lfs -text
Data/**/*.BIL filter=lfs diff=lfs merge=lfs -text
Data/**/*.bsq filter=lfs diff=lfs merge=lfs -text
Data/**/*.c[0-9] filter=lfs diff=lfs merge=lfs -text
Data/**/*.c[0-9][0-9] filter=lfs diff=lfs merge=lfs -text
Data/**/*.ci4 filter=lfs diff=lfs merge=lfs -text
Data/**/*.cr4 filter=lfs diff=lfs merge=lfs -text
Data/**/*.dat filter=lfs diff=lfs merge=lfs -text
Data/**/*.dbf filter=lfs diff=lfs merge=lfs -text
Data/**/*.grd filter=lfs diff=lfs merge=lfs -text
Data/**/*.hdf filter=lfs diff=lfs merge=lfs -text
Data/**/*.he5 filter=lfs diff=lfs merge=lfs -text
Data/**/*.hgt filter=lfs diff=lfs merge=lfs -text
Data/**/*.id filter=lfs diff=lfs merge=lfs -text
Data/**/*.idx filter=lfs diff=lfs merge=lfs -text
Data/**/*.ima filter=lfs diff=lfs merge=lfs -text
Data/**/*.img filter=lfs diff=lfs merge=lfs -text
Data/**/*.index filter=lfs diff=lfs merge=lfs -text
Data/**/*.j2k filter=lfs diff=lfs merge=lfs -text
Data/**/*.jp2 filter=lfs diff=lfs merge=lfs -text
Data/**/*.jpeg filter=lfs diff=lfs merge=lfs -text
Data/**/*.jpg filter=lfs diff=lfs merge=lfs -text
Data/**/*.kmz filter=lfs diff=lfs merge=lfs -text
Data/**/*.las filter=lfs diff=lfs merge=lfs -text
Data/**/*.lum filter=lfs diff=lfs merge=lfs -text
Data/**/*.map filter=lfs diff=lfs merge=lfs -text
Data/**/*.mw filter=lfs diff=lfs merge=lfs -text
Data/**/*.pack filter=lfs diff=lfs merge=lfs -text
Data/**/*.pix filter=lfs diff=lfs merge=lfs -text
Data/**/*.png filter=lfs diff=lfs merge=lfs -text
Data/**/*.raw filter=lfs diff=lfs merge=lfs -text
Data/**/*.shp filter=lfs diff=lfs merge=lfs -text
Data/**/*.shx filter=lfs diff=lfs merge=lfs -text
Data/**/*.sqlite filter=lfs diff=lfs merge=lfs -text
# Data/**/*.tar filter=lfs diff=lfs merge=lfs -text
Data/**/*.tif filter=lfs diff=lfs merge=lfs -text
Data/**/*.TIF filter=lfs diff=lfs merge=lfs -text
Data/**/*.tiff filter=lfs diff=lfs merge=lfs -text
Data/**/*.tts filter=lfs diff=lfs merge=lfs -text
syntax: glob
*~
.*.swp
*.orig
*.pyc
.\#*
\#*\#
tags
*vim*
*.template
archives*
.vscode*
CMakeLists.txt.user
.directory*
build_packages*
build
install
log
#
# Copyright (C) 2005-2024 Centre National d'Etudes Spatiales (CNES)
#
# This file is part of Orfeo Toolbox
#
# https://www.orfeo-toolbox.org/
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# WARNING!
# OTB uses Git-LFS to store the (large) tests data.
# Git-LFS is mostly transparent for the user and recent versions
# are able to use Git-LFS quite efficiently.
# But Git fails to manage efficiently numerous LFS data.
# We have to use directly git-lfs wrapping commands to have an
# efficient cloning step.
# Furthermore, Git-LFS and Gitlab sufer a bug preventing usage of
# GIT_STRATEGY=fetch (https://gitlab.com/gitlab-org/gitlab-runner/issues/3318)
variables:
BUILD_IMAGE_REGISTRY: $CI_REGISTRY/orfeotoolbox/otb-build-env
GIT_CLONE_PATH: $CI_BUILDS_DIR/otb
DOCKER_DRIVER: overlay2
GIT_DEPTH: "3"
# Disable automatic checkout to let us fetch LFS before
GIT_CHECKOUT: "false"
# The fetch strategy fails with LFS and GitLab
GIT_STRATEGY: "clone"
PACKAGE_OTB_VERSION: "latest"
workflow:
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
- if: '$CI_COMMIT_BRANCH && $CI_OPEN_MERGE_REQUESTS'
when: never
- if: '$CI_COMMIT_BRANCH'
stages:
- precheck
- prepare
- build
- report
- deploy
- docker
.general:
retry:
max: 2
when:
- runner_system_failure
- stuck_or_timeout_failure
.common:
extends: .general
before_script:
# Provision efficiently the local LFS cache before checkout
- git lfs fetch origin $CI_COMMIT_SHA
- git checkout -f -q $CI_COMMIT_SHA
after_script:
- python3 -u CI/cdash_handler.py
.common-build:
extends: .common
only:
- merge_requests
- develop
- /^release-[0-9]+\.[0-9]+$/
stage: build
artifacts:
when: always
expire_in: 24 hrs
paths:
- build/*/*.log #CMake log
- log/*.txt # Others
- CookBook-*-html.tar.gz
- CookBook-*.pdf
- OTB-Doxygen-*.tar.bz2
- OTB-*.tar.gz
- OTB-*.zip
- build/compile_commands.json
- build/ctest_report.xml
- build/cppcheck_report.xml
- build/coverage_report.xml
- build/Testing/Temporary/*.txt #DEBUG
- Docker/Dockerfile_CI
- Docker/system-dependencies.txt
.common-prepare:
extends: .general
only:
- merge_requests
- develop
- /^release-[0-9]+\.[0-9]+$/
stage: prepare
before_script:
- export GIT_LFS_SKIP_SMUDGE=1
- git checkout -f -q $CI_COMMIT_SHA
- export GIT_LFS_SKIP_SMUDGE=0
artifacts:
expire_in: 24 hrs
when: always
paths:
- sb_branch.txt # Needed to checkout correct branch in build step
- build/*/*/*/*.log # Superbuild log
- build/*/*/*/*.cmake
#-------------------------- precheck job ---------------------------------------
fast-build:
extends: .common
only:
- merge_requests
- branches
stage: precheck
image: $BUILD_IMAGE_REGISTRY/otb-ubuntu-native-itk5:22.04
before_script:
- export GIT_LFS_SKIP_SMUDGE=1
- git checkout -f -q $CI_COMMIT_SHA
# pull data needed by the two convolution test
- |
git lfs pull --include Data/Baseline/OTB/Images/bfTvConvolutionImageFilter.tif
git lfs pull --include Data/Input/QB_Suburb.png
git lfs pull --include Data/Input/QB_PAN_ROI_1000_100.tif
script:
- ctest -V -S CI/main_ci.cmake -DIMAGE_NAME:string=ubuntu-22.04-fast
- ctest -VV -S CI/FFTW_tests.cmake -DIMAGE_NAME:string=ubuntu-22.04-fast
- ccache -s
legal-check:
extends: .common
only:
- merge_requests
- develop
- /^release-[0-9]+\.[0-9]+$/
stage: precheck
image: $BUILD_IMAGE_REGISTRY/otb-alpine:3.7
variables:
GIT_DEPTH: ""
allow_failure: true
script:
- ./CI/contributors_check.sh
- ./CI/headers_check.py
after_script: []
#------------------------- prepare & build jobs --------------------------------
## Ubuntu superbuild
ubuntu-22-prepare:
extends: .common-prepare
image: $BUILD_IMAGE_REGISTRY/otb-ubuntu-superbuild-base:22.04
script:
- ctest -VV -S CI/prepare_superbuild.cmake -DIMAGE_NAME:string=ubuntu-22.04-llvm-full -DOTB_FULL_BUILD:BOOL=ON
ubuntu-24-prepare:
extends: .common-prepare
image: $BUILD_IMAGE_REGISTRY/otb-ubuntu-superbuild-base:24.04
script:
- ctest -VV -S CI/prepare_superbuild.cmake -DIMAGE_NAME:string=ubuntu-24.04-llvm-full
artifacts:
untracked: false
when: on_success
access: all
expire_in: 24hrs
paths:
- xdk/lib/python3/*
ubuntu-22-build:
extends: .common-build
image: $BUILD_IMAGE_REGISTRY/otb-ubuntu-superbuild-base:22.04
script:
- ctest -V -S CI/main_superbuild.cmake -DIMAGE_NAME:string=ubuntu-22.04-llvm-xdk-full
- mv build/CookBook-*-html.tar.gz . || true
- mv build/Documentation/Cookbook/latex/CookBook-*.pdf . || true
- mv build/Documentation/Doxygen/OTB-Doxygen-*.tar.bz2 . || true
- ctest -V -S CI/superbuild_packaging.cmake -DIMAGE_NAME:string=ubuntu-22.04-llvm-xdk-full -DPACKAGE_BY_MODULE:BOOL=ON
- ctest -V -S CI/superbuild_packaging.cmake -DIMAGE_NAME:string=ubuntu-22.04-llvm-xdk-full -DPACKAGE_BY_MODULE:BOOL=OFF
- mv build_packages/OTB-*.tar.gz . || true
needs:
- job: ubuntu-22-prepare
artifacts: true
# Ubuntu 24 build needed for gdal bindings with python 3.12. We package the compiled bindings for python 3.12, this package will be downloaded and extracted automatically when
# a user lauches the otbenv profile after unpacking the official archive for the first time (which contains bindings for ubuntu 20 and python 3.8 initially)
python3.12-gdal-bindings:
extends: .common-build
image: $BUILD_IMAGE_REGISTRY/otb-ubuntu-superbuild-base:24.04
only:
refs:
- /^release-[0-9]+\.[0-9]+$/
script:
- |
MAJOR_VERSION=$(egrep "set\(OTB_VERSION_MAJOR" < CMakeLists.txt | egrep -o "[0-9]+")
MINOR_VERSION=$(egrep "set\(OTB_VERSION_MINOR" < CMakeLists.txt | egrep -o "[0-9]+")
PATCH_VERSION=$(egrep "set\(OTB_VERSION_PATCH" < CMakeLists.txt | egrep -o "[0-9]+")
PACKAGE_OTB_VERSION="$MAJOR_VERSION.$MINOR_VERSION.$PATCH_VERSION"
- find xdk/lib/python3/dist-packages/osgeo/ -type f -name "*.so" -printf "%P\n" | tar -czf OTB-$PACKAGE_OTB_VERSION-GDAL-bindings-py312.tar.gz --no-recursion -C xdk/lib/python3/dist-packages/osgeo -T -
needs:
- job: ubuntu-24-prepare
artifacts: true
## RedHat superbuild
redhat-prepare:
extends: .common-prepare
image: $BUILD_IMAGE_REGISTRY/otb-redhat-superbuild-base:8
script:
- ctest -VV -S CI/prepare_superbuild.cmake -DIMAGE_NAME:string=redhat-8-gcc -DOTB_FULL_BUILD:BOOL=ON
redhat-build:
extends: .common-build
image: $BUILD_IMAGE_REGISTRY/otb-redhat-superbuild-base:8
script:
- ctest -V -S CI/main_superbuild.cmake -DIMAGE_NAME:string=redhat-8-gcc
- ctest -V -S CI/superbuild_packaging.cmake -DIMAGE_NAME:string=redhat-8-gcc -DPACKAGE_BY_MODULE:BOOL=ON
- ctest -V -S CI/superbuild_packaging.cmake -DIMAGE_NAME:string=redhat-8-gcc -DPACKAGE_BY_MODULE:BOOL=OFF
# change package names to RedHat
- |
# use "find" to select only packages that matters
rh_packages=$(find build_packages -type f -name "OTB*Linux*.tar.gz")
for pkg in $rh_packages
do
new_name=$(sed "s/-Linux/-Linux_RedHat/g" <<< "$pkg")
mv $pkg $new_name
done
- mv build_packages/OTB-*.tar.gz . || true
needs:
- job: redhat-prepare
artifacts: true
## Windows
.windows-prepare:
extends: .common-prepare
before_script:
# This override the previous before_script
- set GIT_LFS_SKIP_SMUDGE=1
- git checkout -f -q $CI_COMMIT_SHA
- set GIT_LFS_SKIP_SMUDGE=0
.windows-build:
extends: .common-build
before_script:
- git lfs fetch origin $CI_COMMIT_SHA
- git checkout -f -q $CI_COMMIT_SHA
# Obviously Windows does not use same executable name as linux...
after_script:
# need to setup path to be able to use python
- .\CI\setup_python.ps1 x64
- python -u CI/cdash_handler.py
# - Win10
windows-10-prepare:
extends: .windows-prepare
tags:
- windows10
script:
- .\CI\dev_env.ps1 x64 xdk 10
- buildcache.exe -s
- ctest -C Release -VV -S CI/prepare_superbuild.cmake -DOTB_FULL_BUILD:BOOL=ON -DPython_ROOT_DIR:STRING="C:/tools/Python310-x64"
- buildcache.exe -s
windows-10-build:
extends: .windows-build
tags:
- windows10
script:
- .\CI\dev_env.ps1 x64 otb 10
- buildcache.exe -s
- ctest -V -S CI/main_superbuild.cmake -DPython_ROOT_DIR:STRING="C:/tools/Python310-x64"
- buildcache.exe -s
- .\CI\copy_boost_dlls.ps1
- ctest -V -S CI/superbuild_packaging.cmake -DPACKAGE_BY_MODULE:BOOL=OFF
- move "build_packages\OTB-*.zip" . || dir build_packages
needs:
- job: windows-10-prepare
artifacts: true
#------------------------- QA related jobs -------------------------------------
ubuntu-qa-code-coverage:
extends: .common-build
only:
refs:
- merge_requests
- develop
- /^release-[0-9]+\.[0-9]+$/
variables:
- $SONAR_OTB_TOKEN
image: $BUILD_IMAGE_REGISTRY/otb-ubuntu-superbuild-qa:22.04
script:
- ctest -V -S CI/main_qa.cmake
-DIMAGE_NAME:string=ubuntu-22.04-llvm-qa
-DQA:BOOL=ON
- ./CI/otb_coverage.sh
- saxon-xslt -o build/ctest_report.xml
build/Testing/`head -n 1 build/Testing/TAG`/Test.xml
CI/ctest2junit.xsl
needs:
- job: ubuntu-22-prepare
artifacts: true
ubuntu-qa-static-analysis:
extends: .common-build
only:
refs:
- merge_requests
- develop
- /^release-[0-9]+\.[0-9]+$/
variables:
- $SONAR_OTB_TOKEN
image: $BUILD_IMAGE_REGISTRY/otb-ubuntu-superbuild-qa:22.04
script:
- find Modules -type f -regextype posix-extended -regex '.*\.(h|hxx)$' -exec dirname '{}' \; |
grep -vE '^Modules/ThirdParty/' |
sort -u > header_directories
- cppcheck -j 8 -q --xml --xml-version=2 --enable=all
--language=c++ --std=c++14 --platform=unix64
--includes-file=header_directories
-i Modules/ThirdParty Modules
2> build/cppcheck_report.xml
after_script: []
needs:
- job: ubuntu-22-prepare
artifacts: true
ubuntu-sonar-report:
extends: .common
variables:
GIT_DEPTH: ""
image: $BUILD_IMAGE_REGISTRY/otb-ubuntu-superbuild-qa:22.04
stage: report
only:
refs:
- merge_requests
- develop
- /^release-[0-9]+\.[0-9]+$/
variables:
- $SONAR_OTB_TOKEN
script:
- sonar-scanner -Dproject.settings=sonar-project.properties
-Dsonar.host.url=https://sonar.orfeo-toolbox.org
-Dsonar.login=$SONAR_OTB_TOKEN
-Dsonar.projectKey=$CI_PROJECT_NAMESPACE-$CI_PROJECT_NAME
`test -z "$CI_COMMIT_TAG" || echo "-Dsonar.projectVersion=$CI_COMMIT_TAG"`
-Dsonar.branch.name=$CI_COMMIT_REF_NAME
after_script: []
needs:
- job: ubuntu-qa-code-coverage
artifacts: true
- job: ubuntu-qa-static-analysis
artifacts: true
#---------------------------- Deploy job ---------------------------------------
deploy:
tags:
- deploy
image: $BUILD_IMAGE_REGISTRY/otb-alpine:3.7
stage: deploy
extends: .general
only:
- develop@orfeotoolbox/otb
- /^release-[0-9]+\.[0-9]+$/
before_script:
# Provision efficiently the local LFS cache before checkout
- git lfs fetch origin $CI_COMMIT_SHA
- git checkout -f -q $CI_COMMIT_SHA
script:
- ./CI/deploy.sh $CI_COMMIT_REF_NAME $RC_NUMBER
needs:
- job: ubuntu-22-build
artifacts: true
- job: windows-10-build
artifacts: true
- job: redhat-build
artifacts: true
update-archive:
tags:
- deploy
image: $BUILD_IMAGE_REGISTRY/otb-ubuntu-superbuild-base:22.04
stage: deploy
extends: .general
only:
- develop
- /^release-[0-9]+\.[0-9]+$/@orfeotoolbox/otb
before_script:
# Provision efficiently the local LFS cache before checkout
- git lfs fetch origin $CI_COMMIT_SHA
- git checkout -f -q $CI_COMMIT_SHA
script:
- ./CI/SuperbuildDownloadList.sh download build_archive
- ./CI/deploy-archive.sh build_archive
needs:
# don't push the archive if Superbuild was not successful
- job: ubuntu-22-prepare
artifacts: false
# release-container:
# image: $BUILD_IMAGE_REGISTRY/otb-alpine:3.7
# stage: deploy
# extends: .general
# only:
# refs:
# - tags@orfeotoolbox/otb
# variables:
# - $CI_COMMIT_TAG =~ /^[0-9]+\.[0-9]+\.[0-9]+$/
# script:
# - curl --request POST
# --form token=$K8S_SECRET_RELEASE
# --form ref=master
# --form variables[OTB_TAG]=$CI_COMMIT_TAG
# https://gitlab.orfeo-toolbox.org/api/v4/projects/126/trigger/pipeline
release-docker:
image:
name: gcr.io/kaniko-project/executor:debug
entrypoint: [""]
stage: docker
only:
refs:
- /^release-[0-9]+\.[0-9]+$/
needs:
- job: ubuntu-22-build
artifacts: true
script:
# to authenticate in docker hub, kaniko needs a config file located at
# /kaniko/.docker/config.json
- mkdir -p /kaniko/.docker || true
- echo "{\"auths\":{\"https://index.docker.io/v1/\":{\"auth\":\"$(printf "%s:%s" "${DOCKER_HUB_USER}" "${DOCKER_HUB_PWD}" | base64 | tr -d '\n')\"}}}" > /kaniko/.docker/config.json
- /kaniko/executor --cleanup
--verbosity warn
--context $CI_PROJECT_DIR
--dockerfile $CI_PROJECT_DIR/Docker/Dockerfile_CI
--destination orfeotoolbox/otb:$CI_COMMIT_TAG
### Description
Describe what happens and why you think it is a bug
### Steps to reproduce
Describe as precisely as possible how to reproduce the bug. Try to isolate a minimal number of steps. Also describe reproducibility (always, random ...).
### Configuration information
OS, OTB version or tag, information related to build (binaries, superbuild, system libs ...)
/label ~bug
### Target documentation resources
Can be a combination of CookBook, doxygen, blog, applications doc, websites
### Change requested
Describe precisely the changes that are required.
/label ~documentation
Short summary of the requested feature
/label ~feature
We are ready to release OTB version MAJOR.MINOR.PATCH. The following steps need to be done:
## Release Candidate
### 1. Branches
* [ ] **(if major or minor release)** Feature freeze: [create the new release branch](https://gitlab.orfeo-toolbox.org/orfeotoolbox/otb/wikis/Help-for-release-actions#create-release-branch)
* [ ] **(if patch release)** Work on the already existing branch `release-MAJOR-MINOR`
* [ ] Make sure the version number in `CMakeLists.txt` is MAJOR.MINOR.PATCH
### 2. Housekeeping
* [ ] In this story, make a list of blocking issues for the release (if any)
* [ ] Update release notes (walk the GitLab MR merged history and log all improvements)
* [ ] Update the date in RELEASE_NOTES.txt
* [ ] Check [SonarQube](https://sonar.orfeo-toolbox.org/dashboard?id=orfeotoolbox-otb)
* [ ] Run Debian [spelling](https://gitlab.orfeo-toolbox.org/orfeotoolbox/otb/wikis/Help-for-release-actions#spelling-check) checker
* [ ] Run shellcheck script from [OTB-Devutils/Scripts/](https://gitlab.orfeo-toolbox.org/orfeotoolbox/otb-devutils/blob/master/Scripts/run_shellcheck.sh)
* [ ] [Update translation](https://gitlab.orfeo-toolbox.org/orfeotoolbox/otb/wikis/Help-for-release-actions#translation-for-monteverdi-mapla) for Monteverdi and Mapla
* [ ] [Sanity check the binary packages](https://gitlab.orfeo-toolbox.org/orfeotoolbox/otb/wikis/Help-for-release-actions#standalone-packages-sanity-check)
* [ ] Windows
* [ ] Linux
* [ ] QGIS plugin
### 3. Actual release
Once all blocking issues are closed, and the previous steps are done:
* [ ] [Tag the release candidate](https://gitlab.orfeo-toolbox.org/orfeotoolbox/otb/wikis/Help-for-release-actions#release-tag)
* [ ] Update GIT_TAG for all official remote modules (if needed)
### 4. Publish and plan next release
* [ ] [Prepare and upload source packages](https://gitlab.orfeo-toolbox.org/orfeotoolbox/otb/wikis/Help-for-release-actions#prepare-and-upload-source-packages)
* [ ] [Promote staging packages](https://gitlab.orfeo-toolbox.org/orfeotoolbox/otb/wikis/Help-for-release-actions#promote-staging-packages)
* [ ] [Update documentation](https://gitlab.orfeo-toolbox.org/orfeotoolbox/otb/wikis/Help-for-release-actions#update-documentation)
* [ ] Cookbook
* [ ] Doxygen
* [ ] [Update the SuperBuild archive](https://gitlab.orfeo-toolbox.org/orfeotoolbox/otb/wikis/Help-for-release-actions#superbuild-archive)
* [ ] Release Candidate announcement on the forum
## Release
### 1. Branches
* [ ] Make sure the version number in `CMakeLists.txt` is MAJOR.MINOR.PATCH
### 2. Housekeeping
* [ ] In this story, make a list of blocking issues for the release (if any)
* [ ] Fix compilation warnings on CI
* [ ] Update release notes (walk the GitLab MR merged history and log all improvements)
* [ ] Update the date in RELEASE_NOTES.txt
* [ ] Check [SonarQube](https://sonar.orfeo-toolbox.org/dashboard?id=orfeotoolbox-otb)
* [ ] Run Debian [spelling](https://gitlab.orfeo-toolbox.org/orfeotoolbox/otb/wikis/Help-for-release-actions#spelling-check) checker
* [ ] Run shellcheck script from [OTB-Devutils/Scripts/](https://gitlab.orfeo-toolbox.org/orfeotoolbox/otb-devutils/blob/master/Scripts/run_shellcheck.sh)
* [ ] [Update translation](https://gitlab.orfeo-toolbox.org/orfeotoolbox/otb/wikis/Help-for-release-actions#translation-for-monteverdi-mapla) for Monteverdi and Mapla
* [ ] [Sanity check the binary packages](https://gitlab.orfeo-toolbox.org/orfeotoolbox/otb/wikis/Help-for-release-actions#standalone-packages-sanity-check)
* [ ] Windows
* [ ] Linux
* [ ] QGIS plugin
### 3. Actual release
Once all blocking issues are closed, and the previous steps are done:
* [ ] [Tag the release](https://gitlab.orfeo-toolbox.org/orfeotoolbox/otb/wikis/Help-for-release-actions#release-tag)
* [ ] **(if major or minor release)**: Merge the release into develop
* [ ] **(if it's the latest release)**: Merge the release into master
* [ ] **(if patch release)**: Backport fixes
* [ ] Update GIT_TAG for all official remote modules (if needed)
### 4. Publish and plan next release
* [ ] [Prepare and upload source packages](https://gitlab.orfeo-toolbox.org/orfeotoolbox/otb/wikis/Help-for-release-actions#prepare-and-upload-source-packages)
* [ ] [Promote staging packages](https://gitlab.orfeo-toolbox.org/orfeotoolbox/otb/wikis/Help-for-release-actions#promote-staging-packages)
* [ ] [Update documentation](https://gitlab.orfeo-toolbox.org/orfeotoolbox/otb/wikis/Help-for-release-actions#update-documentation)
* [ ] Cookbook
* [ ] Doxygen
* [ ] WordPress page "Home" and "Download" pages
* [ ] Add the release to "Releases" page of the project
* [ ] [Update the SuperBuild archive](https://gitlab.orfeo-toolbox.org/orfeotoolbox/otb/wikis/Help-for-release-actions#superbuild-archive)
* [ ] Upload OTB source archive to [Zenodo](https://zenodo.org/) to create a unique Digital Object Identifier (DOI)
* [ ] Release announcement
* [ ] On the [forum](https://forum.orfeo-toolbox.org/)
* [ ] On the [blog](https://www.orfeo-toolbox.org/blog/)
* [ ] Update the Wikipedia page with the version number and release date (https://en.wikipedia.org/wiki/Orfeo_toolbox)
* [ ] Forward announcement to news_item@osgeo.org ([OSGeo news](https://www.osgeo.org/foundation-news/))
* [ ] Remove public branches related to MR or bugfix merged before the release
/label ~story
### What changes will be made and why they would make a better Orfeo ToolBox?
#### High level description
#### Risks and benefits
#### Alternatives for implementations
### Who will be developing the proposed changes?
#### Summary
Gives a short summary of the changes.
#### Rationale
Explain the rationale for the changes (possible link to a Request For Comments or to an issue).
#### Implementation Details
<!---
##### Classes and files
Give an overview of the implementation: main changes made to classes, files and modules. Do not paste complete diff, as it is available in the merge request already.
-->
<!---
##### Applications
Describe any changes made to existing applications, or new applications that have been added.
-->
<!---
##### Tests
Describe the testing strategy for new features.
- [ ] validated through ... unitary test
-->
<!---
##### Documentation
List or link documentation modifications that were made (doxygen, example, Software Guide, application documentation, CookBook).
-->
#### Additional notes
<!--- List remaining open issues if any, and additional notes. -->
#### Copyright
The copyright owner is *COPYRIGHT OWNER (OR OWNER'S AGENT)* and has signed the ORFEO ToolBox Contributor License Agreement.
<hr>
***Check before merging:***
- All discussions are resolved
- At least 2 :thumbsup: votes from core developers, no :thumbsdown: vote.
- The feature branch is (reasonably) up-to-date with the base branch
- Dashboard is green
- Copyright owner has signed the ORFEO ToolBox Contributor License Agreement
- Optionally, run `git diff develop... -U0 --no-color | clang-format-diff.py -p1 -i` on latest changes and commit
syntax: glob
*~
.*.swp
*.orig
*.pyc
.\#*
\#*\#
tags
*vim*
*.template
68eebc1b170a636794feda66523e6c0135ebfef2 0 iEYEABECAAYFAkmvX3cACgkQwRJnCg+r8KFSCgCfVYSnx2ev+hIlpbM/arxzLVf3KvsAn3nqaJXaj62RcNUIjv+qcNWlJ5WG
e0fdaae06d33190d9b17356c5d51f878fd8fa0b6 0 iEYEABECAAYFAkoNOJEACgkQwRJnCg+r8KHMrACeJMcezNBgXjfx57UNdd/WfXw8kf8An1VEqFVTeG/wUd6QiiVy2icgfLGl
c07af3906a803b43018e1a7e15db5c11f54d14b4 0 iEYEABECAAYFAkw68W0ACgkQwRJnCg+r8KGhjgCfR4LL9xW8xYGFXWEpbxSSRsCFVhIAn2nPMTqZvUW0cQvU6pkJD1+eENNj
Alexia Mondot <alexia.mondot@c-s.fr>
Amit Kulkarni <amitkulz@gmail.com>
Angelos Tzotsos <tzotsos@gmail.com>
Antoine Regimbeau <antoine.regimbeau@c-s.fr>
Antoine Regimbeau <antoine.regimbeau@c-s.fr> aregimbe <antoine.regimbeau@c-s.fr>
Antoine Regimbeau <antoine.regimbeau@c-s.fr> aregimbeau <antoine.regimbeau@c-s.fr>
Arnaud Jaen <arnaud.jaen@c-s.fr>
Arthur Vincent <arthur.vincent@c-s.fr>
Arthur Vincent <arthur.vincent@c-s.fr> Arthur Vincent <vincenta@cesbio.cnes.fr>
Aurelien Bricier <aurelien.bricier@c-s.fr>
Aurélie Emilien <aurelie.emilien@c-s.fr>
Aurélie Emilien <aurelie.emilien@c-s.fr> Aurelie Emilien <aurelie.emilien@c-s.fr>
Bas Couwenberg <sebastic@debian.org>
Bas Couwenberg <sebastic@debian.org> Bas Couwenberg <sebastic@xs4all.nl>
Brad Hards <bradh@frogmouth.net>
Carole Amiot <carole.amiot@cnes.fr>
Carole Amiot <carole.amiot@cnes.fr> Carole Amiot <carole.amiot@thales-services.fr>
Caroline Ruffel <caroline.ruffel@c-s.fr>
Charles Peyrega <charles.peyrega@c-s.fr>
Chia Aik Song <crscas@nus.edu.sg>
Christophe Palmann <christophe.palmann@c-s.fr>
Conrad Bielski <conrad.bielski@yahoo.com>
Cyrille Valladeau <cyrille.valladeau@c-s.fr>
Cédric Traizet <cedric.traizet@c-s.fr> Cedric <cedric.traizet@c-s.fr>
Cédric Traizet <cedric.traizet@c-s.fr> Cédric Traizet <cedric.traizet@csgroup.eu>
Cédric Traizet <cedric.traizet@c-s.fr> Cédric Traizet <traizetc@cesbio.cnes.fr>
Cédric Traizet <cedric.traizet@c-s.fr> Traizet Cedric <cedric.traizet@cesbio.cnes.fr>
Cédric Traizet <cedric.traizet@c-s.fr> ctraizet <cedric.traizet@c-s.fr>
Daniel McInerney <daniel.o.mcinerney@gmail.com>
Daniel McInerney <daniel.o.mcinerney@gmail.com> dmci <daniel.o.mcinerney@gmail.com>
Daphne Deleflie <daphne.deleflie@thalesgroup.com>
David Youssefi <david.youssefi@cnes.fr>
Emmanuel Christophe <emmanuel.christophe@gmail.com>
Emmanuel Christophe <emmanuel.christophe@gmail.com> Emmanuel Christophe <emmanuel.christophe@nus.edu.sg>
Emmanuel Christophe <emmanuel.christophe@gmail.com> Emmanuel Christophe <emmanuel.christophe@orfeo-toolbox.org>
Emmanuelle Sarrazin <emmanuelle.sarrazin@cnes.fr>
Emmanuelle Sarrazin <emmanuelle.sarrazin@cnes.fr> esarrazin <emmanuelle.sarrazin@cnes.fr>
Emmanuelle Sarrazin <emmanuelle.sarrazin@cnes.fr> sarrazin <emmanuelle.sarrazin@cnes.fr>
Etienne Bougoin <etienne.bougoin@c-s.fr>
Gaëlle Usseglio <gaelle.usseglio@cnes.fr> Gaëlle USSEGLIO <gaelle.usseglio@cnes.fr>
Grégoire Mercier <gregoire.mercier@telecom-bretagne.eu> Gregoire Mercier <gregoire.mercier@telecom-bretagne.eu>
Guilhem Bonnefille <guilhem.bonnefille@c-s.fr>
Guillaume Borrut <guillaume.borrut@c-s.fr>
Guillaume Pasero <guillaume.pasero@c-s.fr>
Guillaume Pernot <guillaume.pernot@c-s.fr>
Guillaume Pernot <guillaume.pernot@c-s.fr> gpernot <guillaume.pernot@c-s.fr>
janestar <janestar92@163.com>
Jee Roen <jeroen.staab@posteo.de>
Jonathan Guinet <jonathan.guinet@c-s.fr>
Jordi Inglada <jordi.inglada@cesbio.eu>
Jordi Inglada <jordi.inglada@cesbio.eu> Jordi Inglada <jordi.inglada@cesbio.cnes.fr>
Jordi Inglada <jordi.inglada@cesbio.eu> Jordi Inglada <jordi.inglada@orfeo-toolbox.org>
Julie Brossard <julie.brossard@c-s.fr>
Julien Malik <julien.malik@c-s.fr>
Julien Malik <julien.malik@c-s.fr> Julien Malik <julien.malik@gmail.com>
Julien Malik <julien.malik@c-s.fr> Julien Malik <julien.malik@orfeo-toolbox.org>
Julien Michel <julien.michel@cnes.fr>
Julien Michel <julien.michel@cnes.fr> Julien Michel <julien.michel@c-s.fr>
Julien Michel <julien.michel@cnes.fr> Julien Michel <julien.michel@orfeo-toolbox.org>
Julien Osman <julien.osman@csgroup.eu> Julien Osman <julien.osman@c-s.fr>
Laurențiu Nicola <lnicola@dend.ro> Laurentiu Nicola <lnicola@dend.ro>
Laurențiu Nicola <lnicola@dend.ro> Laurențiu Nicola <grayshade@gmail.com>
Luc Hermitte <luc.hermitte@csgroup.eu> Luc Hermitte <luc.hermitte@cnes.fr>
Luc Hermitte <luc.hermitte@csgroup.eu> Luc Hermitte <luc.hermitte@c-s.fr>
Luc Hermitte <luc.hermitte@csgroup.eu> Luc Hermitte <9-lhermitte@users.noreply.gitlab.orfeo-toolbox.org>
Ludovic Hussonnois <ludovic.hussonnois@c-s.fr>
Manuel Grizonnet <manuel.grizonnet@cnes.fr>
Manuel Grizonnet <manuel.grizonnet@cnes.fr> Grizonnet Manuel <manuel.grizonnet@cnes.fr>
Manuel Grizonnet <manuel.grizonnet@cnes.fr> Manuel Grizonnet <manuel.grizonnet@gmail.com>
Manuel Grizonnet <manuel.grizonnet@cnes.fr> Manuel Grizonnet <manuel.grizonnet@orfeo-toolbox.org>
Marina Bertolino <marina.bertolino@c-s.fr>
Mathieu Deltorre <mathieu.deltorre@c-s.fr>
Mickaël Savinaud <mickael.savinaud@c-s.fr>
Mickaël Savinaud <mickael.savinaud@c-s.fr> Mickael Savinaud <mickael.savinaud@c-s.fr>
Mickaël Savinaud <mickael.savinaud@c-s.fr> msavinaud <mickael.savinaud@c-s.fr>
OTB Bot <otbbot@orfeo-toolbox.org>
Otmane Lahlou <otmane.lahlou@c-s.fr>
Patrick Imbo <patrick.imbo@c-s.fr>
Rashad Kanavath <rashad.kanavath@c-s.fr>
Rashad Kanavath <rashad.kanavath@c-s.fr> Rashad Kanavath <mohammed.rashad-km@cnes.fr>
Rashad Kanavath <rashad.kanavath@c-s.fr> Rashad Kanavath <mohammedrashadkm@gmail.com>
Rashad Kanavath <rashad.kanavath@c-s.fr> Rashad Kanavath <rashad.kanavath.email.com>
Rashad Kanavath <rashad.kanavath@c-s.fr> Rashad Kanavath <rashad.kanvath@c-s.fr>
Rashad Kanavath <rashad.kanavath@c-s.fr> Rashad M <rashad.kanavath@c-s.fr>
Romain Garrigues <romain.garrigues@c-s.fr>
Rosa Ruiloba <rosario.ruiloba@c-s.fr>
Rémi Cresson <remi.cresson@irstea.fr> Remi Cresson <remi.cresson@irstea.fr>
Rémi Cresson <remi.cresson@irstea.fr> Rémi <remi.cresson@teledetection.fr>
Rémi Cresson <remi.cresson@irstea.fr> remi <remi.cresson@irstea.fr>
Rémi Cresson <remi.cresson@irstea.fr> remi cresson <remi.cresson@teledetection.fr>
Rémi Cresson <remi.cresson@irstea.fr> remicres <remi.cresson@teledetection.fr>
Rémi Cresson <remi.cresson@irstea.fr> remi cresson <remi.cresson@inrae.fr>
Rémi Cresson <remi.cresson@irstea.fr> Rémi <remi.cresson@inrae.fr>
Sebastien Harasse <sebastien.harasse@c-s.fr>
Stephane Albert <stephane.albert@c-s.fr>
Stephane Albert <stephane.albert@c-s.fr> Stephane ALBERT <stephane.albert@c-s.fr>
Stephane Albert <stephane.albert@c-s.fr> Stephane Albert <salbert@dora.si.c-s.fr>
Sylvain Poulain <kikislater@users.noreply.github.com> Sylvain POULAIN <kikislater@users.noreply.github.com>
Sébastien Dinot <sebastien.dinot@c-s.fr>
Sébastien Dinot <sebastien.dinot@c-s.fr> Achille Talon <atalon@dinot.net>
Sébastien Dinot <sebastien.dinot@c-s.fr> Sebastien Dinot <sebastien.dinot@c-s.fr>
Thomas Feuvrier <thomas.feuvrier@c-s.fr>
Tishampati Dhar <devnull@localhost>
Victor Poughon <victor.poughon@cnes.fr>
Vincent Poulain <vincent.poulain@thalesgroup.com>
Yannick Tanguy <yannick.tanguy@cnes.fr> Yannick TANGUY <tanguyy@visu01.sis.cnes.fr>
Yannick Tanguy <yannick.tanguy@cnes.fr> Yannick TANGUY <yannick.tanguy@cnes.fr>
Yannick Tanguy <yannick.tanguy@cnes.fr> ytanguy <yannick.tanguy@cnes.fr>
Yin Tiangang <crsyt@nus.edu.sg>
Thibaut ROMAIN <thibaut.romain@csgroup.eu>
Tristan Laurent <tristan.laurent@csgroup.eu>
include(OTBWrapperMacros)
# This var will contain the list of all application name
# usefull for batch testing
set(OTB_APPLICATIONS_NAME_LIST "" CACHE STRING "List of all applications name" FORCE)
mark_as_advanced(OTB_APPLICATIONS_NAME_LIST)
add_subdirectory(CartographicDBValidation)
add_subdirectory(ChangeDetection)
add_subdirectory(Classification)
add_subdirectory(DimensionalityReduction)
add_subdirectory(DisparityMap)
add_subdirectory(FeatureExtraction)
add_subdirectory(Hyperspectral)
add_subdirectory(Projections)
add_subdirectory(Radiometry)
add_subdirectory(Rasterization)
add_subdirectory(Segmentation)
add_subdirectory(Test)
add_subdirectory(Utils)
OTB_CREATE_APPLICATION(NAME ComputePolylineFeatureFromImage
SOURCES otbComputePolylineFeatureFromImage.cxx
LINK_LIBRARIES OTBBasicFilters OTBLearning)
OTB_CREATE_APPLICATION(NAME DSFuzzyModelEstimation
SOURCES otbDSFuzzyModelEstimation.cxx
LINK_LIBRARIES OTBIO OTBFeatureExtraction OTBFuzzy)
OTB_CREATE_APPLICATION(NAME VectorDataDSValidation
SOURCES otbVectorDataDSValidation.cxx
LINK_LIBRARIES OTBIO OTBFeatureExtraction OTBFuzzy)
/*=========================================================================
Program: ORFEO Toolbox
Language: C++
Date: $Date$
Version: $Revision$
Copyright (c) Centre National d'Etudes Spatiales. All rights reserved.
See OTBCopyright.txt for details.
This software is distributed WITHOUT ANY WARRANTY; without even
the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the above copyright notices for more information.
=========================================================================*/
#include <iostream>
#include "otbVectorImage.h"
#include "otbWrapperApplication.h"
#include "otbWrapperApplicationFactory.h"
#include "otbVectorData.h"
#include "otbVectorDataFileReader.h"
#include "otbVectorDataFileWriter.h"
#include "itkPreOrderTreeIterator.h"
#include "otbVectorDataIntoImageProjectionFilter.h"
#include "otbVectorDataProjectionFilter.h"
#include "otbVectorDataTransformFilter.h"
#include "itkAffineTransform.h"
#include "otbParserConditionDataNodeFeatureFunction.h"
#include "otbNDVIDataNodeFeatureFunction.h"
// Elevation handler
#include "otbWrapperElevationParametersHandler.h"
namespace otb
{
namespace Wrapper
{
class ComputePolylineFeatureFromImage: public Application
{
public:
/** Standard class typedefs. */
typedef ComputePolylineFeatureFromImage Self;
typedef Application Superclass;
typedef itk::SmartPointer<Self> Pointer;
typedef itk::SmartPointer<const Self> ConstPointer;
// Images
typedef FloatVectorImageType::PixelType PixelType;
typedef FloatVectorImageType ImageType;
// VectorData
typedef VectorData<> VectorDataType;
typedef VectorDataType::DataNodeType DataNodeType;
typedef DataNodeType::ValuePrecisionType PrecisionType;
typedef DataNodeType::PrecisionType CoordRepType;
typedef itk::PreOrderTreeIterator<VectorDataType::DataTreeType> TreeIteratorType;
typedef VectorDataIntoImageProjectionFilter<VectorDataType, ImageType> VectorDataIntoImageProjType;
typedef VectorDataProjectionFilter<VectorDataType, VectorDataType> VectorDataProjectionFilterType;
typedef ParserConditionDataNodeFeatureFunction<ImageType, CoordRepType, PrecisionType>
ParserConditionFeatureFunctionType;
/** Standard macro */
itkNewMacro(Self)
;
itkTypeMacro(ComputePolylineFeatureFromImage, otb::Application)
;
private:
void DoInit()
{
SetName("ComputePolylineFeatureFromImage");
SetDescription("This application compute for each studied polyline, contained in the input VectorData, the choosen descriptors.");
SetDocName("Compute Polyline Feature From Image");
SetDocLongDescription("The first step in the classifier fusion based validation is to compute, for each studied polyline, the choosen descriptors. ");
SetDocLimitations("Since it does not rely on streaming process, take care of the size of input image before launching application.");
SetDocAuthors("OTB-Team");
SetDocSeeAlso(" ");
AddDocTag(Tags::FeatureExtraction);
AddParameter(ParameterType_InputImage, "in", "Input Image");
SetParameterDescription("in", "An image to compute the descriptors on.");
AddParameter(ParameterType_InputVectorData, "vd", "Vector Data");
SetParameterDescription("vd", "Vector data containing the polylines where the features will be computed.");
// Elevation
ElevationParametersHandler::AddElevationParameters(this, "elev");
AddParameter(ParameterType_String, "expr", "Feature expression");
SetParameterDescription("expr", "The feature formula (b1 < 0.3) where b1 is the standard name of input image first band");
AddParameter(ParameterType_String, "field", "Feature name");
SetParameterDescription("field", "The field name corresponding to the feature codename (NONDVI, ROADSA...)");
AddParameter(ParameterType_OutputVectorData, "out", "Output Vector Data");
SetParameterDescription("out", "The output vector data containing polylines with a new field");
// Doc example parameter settings
SetDocExampleParameterValue("in", "NDVI.TIF");
SetDocExampleParameterValue("vd", "roads_ground_truth.shp");
SetDocExampleParameterValue("expr", "\"(b1 > 0.4)\"");
SetDocExampleParameterValue("field", "NONDVI");
SetDocExampleParameterValue("out", "PolylineFeatureFromImage_LI_NONDVI_gt.shp");
}
void DoUpdateParameters()
{
// Nothing to do here : all parameters are independent
}
void DoExecute()
{
// Vector Data into Image projection
FloatVectorImageType::Pointer inImage = GetParameterImage("in");
inImage->UpdateOutputInformation();
try
{
inImage->Update();
}
catch (...)
{
itkGenericExceptionMacro(<< "Error during image update, maybe your image is to big.");
}
otbAppLogDEBUG( << "Starting PolylineFeature extraction process" )
// Vector Data into Image projection
//// Read the Vectordata
VectorDataType* inVectorData = GetParameterVectorData("vd");
inVectorData->Update();
//// Projection
VectorDataIntoImageProjType::Pointer vprojIm = VectorDataIntoImageProjType::New();
vprojIm->SetInputVectorData(inVectorData);
vprojIm->SetInputImage(inImage);
// Elevation through the elevation handler
if (ElevationParametersHandler::IsElevationEnabled(this, "elev"))
{
switch(ElevationParametersHandler::GetElevationType(this, "elev"))
{
case Elevation_DEM:
{
vprojIm->SetDEMDirectory(ElevationParametersHandler::GetDEMDirectory(this, "elev"));
vprojIm->SetGeoidFile(ElevationParametersHandler::GetGeoidFile(this, "elev"));
}
break;
case Elevation_Average:
{
vprojIm->SetAverageElevation(ElevationParametersHandler::GetAverageElevation(this, "elev"));
}
break;
// Commented cause using a tiff file is not implemented yet
// case Elevation_Tiff:
// {
// }
// break;
}
}
vprojIm->SetUseOutputSpacingAndOriginFromImage(true); // we want index as input;
vprojIm->Update();
// Add description
ParserConditionFeatureFunctionType::Pointer vdescriptor = ParserConditionFeatureFunctionType::New();
vdescriptor->SetExpression(GetParameterString("expr"));
vdescriptor->SetInputImage(inImage);
m_OutVectorData = VectorDataType::New();
// Retrieving root node
DataNodeType::Pointer root = m_OutVectorData->GetDataTree()->GetRoot()->Get();
// Create the document node
DataNodeType::Pointer document = DataNodeType::New();
document->SetNodeType(otb::DOCUMENT);
// Adding the layer to the data tree
m_OutVectorData->GetDataTree()->Add(document, root);
// Create the folder node
DataNodeType::Pointer folder = DataNodeType::New();
folder->SetNodeType(otb::FOLDER);
// Adding the layer to the data tree
m_OutVectorData->GetDataTree()->Add(folder, document);
TreeIteratorType itVector(vprojIm->GetOutput()->GetDataTree());
itVector.GoToBegin();
while (!itVector.IsAtEnd())
{
vdescriptor->SetInputImage(inImage);
if (!itVector.Get()->IsRoot() && !itVector.Get()->IsDocument() && !itVector.Get()->IsFolder())
{
DataNodeType::Pointer currentGeometry = itVector.Get();
currentGeometry->SetFieldAsDouble(GetParameterString("field"),
(double) (vdescriptor->Evaluate(*(currentGeometry.GetPointer()))[0]));
m_OutVectorData->GetDataTree()->Add(currentGeometry, folder);
}
++itVector;
}
/*
* Reprojection of the output VectorData
*
* The output of VectorDataToRoadDescription is in image index coordinates
*
* 3 cases :
* - input image has no geo-information : pass through
* - input image is in cartographic projection : apply image spacing and origin, and set the ProjectionRef
* - input image is in sensor model geometry : reproject in WGS84
*
*/
std::string projRef = inImage->GetProjectionRef();
ImageKeywordlist kwl;
itk::ExposeMetaData<ImageKeywordlist>(inImage->GetMetaDataDictionary(), MetaDataKey::OSSIMKeywordlistKey, kwl);
if (!projRef.empty())
{
// image is in cartographic projection
// apply spacing and origin + set projection WKT
// The VectorData in output of the chain is in image index coordinate,
// and the projection information is lost
// Apply an affine transform to apply image origin and spacing,
// and arbitrarily set the ProjectionRef to the input image ProjectionRef
typedef itk::AffineTransform<VectorDataType::PrecisionType, 2> TransformType;
typedef otb::VectorDataTransformFilter<VectorDataType, VectorDataType> VDTransformType;
TransformType::ParametersType params;
params.SetSize(6);
params[0] = inImage->GetSpacing()[0];
params[1] = 0;
params[2] = 0;
params[3] = inImage->GetSpacing()[1];
params[4] = inImage->GetOrigin()[0];
params[5] = inImage->GetOrigin()[1];
TransformType::Pointer transform = TransformType::New();
transform->SetParameters(params);
VDTransformType::Pointer vdTransform = VDTransformType::New();
vdTransform->SetTransform(transform);
vdTransform->SetInput(m_OutVectorData);
vdTransform->Update();
m_ProjectedVectorData = vdTransform->GetOutput();
m_ProjectedVectorData->SetProjectionRef(inImage->GetProjectionRef());
}
else
if (kwl.GetSize() > 0)
{
// image is in sensor model geometry
// Reproject VectorData in image projection
VectorDataProjectionFilterType::Pointer vproj = VectorDataProjectionFilterType::New();
vproj->SetInput(m_OutVectorData);
vproj->SetOutputKeywordList(inImage->GetImageKeywordlist());
vproj->SetOutputProjectionRef(inImage->GetProjectionRef());
vproj->SetOutputOrigin(inImage->GetOrigin());
vproj->SetOutputSpacing(inImage->GetSpacing());
// Elevation through the elevation handler
switch(ElevationParametersHandler::GetElevationType(this, "elev"))
{
case Elevation_DEM:
{
vproj->SetDEMDirectory(ElevationParametersHandler::GetDEMDirectory(this, "elev"));
vproj->SetGeoidFile(ElevationParametersHandler::GetGeoidFile(this, "elev"));
}
break;
case Elevation_Average:
{
vproj->SetAverageElevation(ElevationParametersHandler::GetAverageElevation(this, "elev"));
}
break;
// Commented cause using a tiff file is not implemented yet
// case Elevation_Tiff:
// {
// }
// break;
}
vproj->Update();
m_ProjectedVectorData = vproj->GetOutput();
}
else
{
// no georeferencing information
m_ProjectedVectorData = m_OutVectorData;
}
// Set the output vectorData
SetParameterOutputVectorData("out", m_ProjectedVectorData);
};
VectorDataType::Pointer m_ProjectedVectorData;
VectorDataType::Pointer m_OutVectorData;
};
}
}
OTB_APPLICATION_EXPORT(otb::Wrapper::ComputePolylineFeatureFromImage)
/*=========================================================================
Program: ORFEO Toolbox
Language: C++
Date: $Date$
Version: $Revision$
Copyright (c) Centre National d'Etudes Spatiales. All rights reserved.
See OTBCopyright.txt for details.
This software is distributed WITHOUT ANY WARRANTY; without even
the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the above copyright notices for more information.
=========================================================================*/
#include <iostream>
#include "otbWrapperApplication.h"
#include "otbWrapperApplicationFactory.h"
#include "otbWrapperStringListParameter.h"
#include "otbVectorData.h"
#include "otbImageToEnvelopeVectorDataFilter.h"
#include "otbVectorDataToRandomLineGenerator.h"
#include "itkAmoebaOptimizer.h"
#include "otbVectorDataToDSValidatedVectorDataFilter.h"
#include "otbStandardDSCostFunction.h"
#include "otbFuzzyDescriptorsModelManager.h"
namespace otb
{
namespace Wrapper
{
#include "itkCommand.h"
class CommandIterationUpdate : public itk::Command
{
public:
typedef CommandIterationUpdate Self;
typedef itk::Command Superclass;
typedef itk::SmartPointer<Self> Pointer;
itkNewMacro( Self );
protected:
CommandIterationUpdate() {};
public:
typedef itk::AmoebaOptimizer OptimizerType;
typedef const OptimizerType * OptimizerPointer;
void Execute(itk::Object *caller, const itk::EventObject & event)
{
Execute( (const itk::Object *)caller, event);
}
void Execute(const itk::Object * object, const itk::EventObject & event)
{
OptimizerPointer optimizer =
dynamic_cast< OptimizerPointer >( object );
if( ! itk::IterationEvent().CheckEvent( &event ) )
{
return;
}
std::ostringstream message;
message << optimizer->GetCachedValue() << " ";
message << optimizer->GetCachedCurrentPosition() << std::endl;
std::cout<<message.str()<<std::endl;
}
};
class DSFuzzyModelEstimation: public Application
{
public:
/** Standard class typedefs. */
typedef DSFuzzyModelEstimation Self;
typedef Application Superclass;
typedef itk::SmartPointer<Self> Pointer;
typedef itk::SmartPointer<const Self> ConstPointer;
typedef VectorData<double> VectorDataType;
typedef VectorDataType::DataTreeType DataTreeType;
typedef VectorDataType::DataNodeType DataNodeType;
typedef VectorDataType::ValuePrecisionType PrecisionType;
typedef VectorDataType::PrecisionType CoordRepType;
typedef otb::Wrapper::StringListParameter::StringListType StringListType;
typedef otb::VectorDataToDSValidatedVectorDataFilter<VectorDataType, PrecisionType>
ValidationFilterType;
typedef otb::StandardDSCostFunction<ValidationFilterType> CostFunctionType;
typedef CostFunctionType::LabelSetType LabelSetType;
typedef itk::AmoebaOptimizer OptimizerType;
typedef otb::FuzzyDescriptorsModelManager::DescriptorsModelType
DescriptorsModelType;
typedef otb::FuzzyDescriptorsModelManager::DescriptorListType
DescriptorListType;
typedef itk::PreOrderTreeIterator<VectorDataType::DataTreeType>
TreeIteratorType;
/** Standard macro */
itkNewMacro(Self);
itkTypeMacro(DSFuzzyModelEstimation, otb::Application);
private:
void DoInit()
{
SetName("DSFuzzyModelEstimation");
SetDescription("Estimate feature fuzzy model parameters using 2 vector data (ground truth samples and wrong samples).");
SetDocName("Fuzzy Model estimation");
SetDocLongDescription("Estimate feature fuzzy model parameters using 2 vector data (ground truth samples and wrong samples).");
SetDocLimitations("None.");
SetDocAuthors("OTB-Team");
SetDocSeeAlso(" ");
AddDocTag(Tags::FeatureExtraction);
AddParameter(ParameterType_InputVectorData, "psin", "Input Positive Vector Data");
SetParameterDescription("psin", "Ground truth vector data for positive samples");
AddParameter(ParameterType_InputVectorData, "nsin", "Input Negative Vector Data");
SetParameterDescription("nsin", "Ground truth vector data for negative samples");
AddParameter(ParameterType_StringList, "belsup", "Belief Support");
SetParameterDescription("belsup", "Dempster Shafer study hypothesis to compute belief");
AddParameter(ParameterType_StringList, "plasup", "Plausibility Support");
SetParameterDescription("plasup", "Dempster Shafer study hypothesis to compute plausibility");
AddParameter(ParameterType_String, "cri", "Criterion");
SetParameterDescription("cri", "Dempster Shafer criterion (by default (belief+plausibility)/2)");
MandatoryOff("cri");
SetParameterString("cri","((Belief + Plausibility)/2.)");
AddParameter(ParameterType_Float,"wgt","Weighting");
SetParameterDescription("wgt","Coefficient between 0 and 1 to promote undetection or false detections (default 0.5)");
MandatoryOff("wgt");
SetParameterFloat("wgt", 0.5);
AddParameter(ParameterType_InputFilename,"initmod","initialization model");
SetParameterDescription("initmod","Initialization model (xml file) to be used. If the xml initialization model is set, the descriptor list is not used (specified using the option -desclist)");
MandatoryOff("initmod");
AddParameter(ParameterType_StringList, "desclist","Descriptor list");
SetParameterDescription("desclist","List of the descriptors to be used in the model (must be specified to perform an automatic initialization)");
MandatoryOff("desclist");
SetParameterString("desclist","");
AddParameter(ParameterType_Int,"maxnbit","Maximum number of iterations");
MandatoryOff("maxnbit");
SetParameterDescription("maxnbit","Maximum number of optimizer iteration (default 200)");
SetParameterInt("maxnbit", 200);
AddParameter(ParameterType_Empty,"optobs","Optimizer Observer");
SetParameterDescription("optobs","Activate the optimizer observer");
MandatoryOff("optobs");
AddParameter(ParameterType_OutputFilename,"out","Output filename");
SetParameterDescription("out","Output model file name (xml file) contains the optimal model to perform informations fusion.");
// Doc example parameter settings
SetDocExampleParameterValue("psin", "cdbTvComputePolylineFeatureFromImage_LI_NOBUIL_gt.shp");
SetDocExampleParameterValue("nsin", "cdbTvComputePolylineFeatureFromImage_LI_NOBUIL_wr.shp");
SetDocExampleParameterValue("belsup", "\"ROADSA\"");
SetDocExampleParameterValue("plasup", "\"NONDVI\" \"ROADSA\" \"NOBUIL\"");
SetDocExampleParameterValue("initmod", "Dempster-Shafer/DSFuzzyModel_Init.xml");
SetDocExampleParameterValue("maxnbit", "4");
SetDocExampleParameterValue("optobs", "true");
SetDocExampleParameterValue("out", "DSFuzzyModelEstimation.xml");
}
void DoUpdateParameters()
{
// Nothing to do here : all parameters are independent
// .. //
}
void DoExecute()
{
//Instantiate
m_CostFunction = CostFunctionType::New();
m_Optimizer = OptimizerType::New();
//Read the vector datas
VectorDataType::Pointer psVectorData = GetParameterVectorData("psin");
psVectorData->Update();
VectorDataType::Pointer nsVectorData = GetParameterVectorData("psin");
nsVectorData->Update();
// Load the initial descriptor model
DescriptorListType descList;
DescriptorsModelType descMod;
if (IsParameterEnabled("initmod"))
{
std::string descModFile = GetParameterString("initmod");
descMod = FuzzyDescriptorsModelManager::Read(descModFile.c_str());
descList = FuzzyDescriptorsModelManager::GetDescriptorList(descMod);
}
else
{
StringListType stringList = GetParameterStringList("desclist");
int nbsdDesc = stringList.size();
for (int i = 0; i < nbsdDesc; i++)
{
descList.push_back(stringList[i]);
}
}
m_CostFunction->SetDescriptorList(descList);
// Compute statistics of all the descriptors
std::vector<double> accFirstOrderPS, accSecondOrderPS, minPS, maxPS;
accFirstOrderPS.resize(descList.size());
accSecondOrderPS.resize(descList.size());
std::fill(accFirstOrderPS.begin(), accFirstOrderPS.end(), 0);
std::fill(accSecondOrderPS.begin(), accSecondOrderPS.end(), 0);
minPS.resize(descList.size());
maxPS.resize(descList.size());
unsigned int accNbElemPS = 0;
TreeIteratorType itVectorPS(psVectorData->GetDataTree());
for (itVectorPS.GoToBegin(); !itVectorPS.IsAtEnd(); ++itVectorPS)
{
if (!itVectorPS.Get()->IsRoot() && !itVectorPS.Get()->IsDocument() && !itVectorPS.Get()->IsFolder())
{
DataNodeType::Pointer currentGeometry = itVectorPS.Get();
for (unsigned int i = 0; i < descList.size(); ++i)
{
double desc = currentGeometry->GetFieldAsDouble(descList[i]);
accFirstOrderPS[i] += desc;
accSecondOrderPS[i] += desc * desc;
if (desc < minPS[i])
{
minPS[i] = desc;
}
if (desc > maxPS[i])
{
maxPS[i] = desc;
}
}
accNbElemPS++;
}
}
TreeIteratorType itVectorNS(nsVectorData->GetDataTree());
std::vector<double> accFirstOrderNS, accSecondOrderNS, minNS, maxNS;
minNS.resize(descList.size());
maxNS.resize(descList.size());
accFirstOrderNS.resize(descList.size());
accSecondOrderNS.resize(descList.size());
std::fill(accFirstOrderNS.begin(), accFirstOrderNS.end(), 0);
std::fill(accSecondOrderNS.begin(), accSecondOrderNS.end(), 0);
std::fill(minNS.begin(), minNS.end(), 1);
std::fill(maxNS.begin(), maxNS.end(), 0);
unsigned int accNbElemNS = 0;
for (itVectorNS.GoToBegin(); !itVectorNS.IsAtEnd(); ++itVectorNS)
{
if (!itVectorNS.Get()->IsRoot() && !itVectorNS.Get()->IsDocument() && !itVectorNS.Get()->IsFolder())
{
DataNodeType::Pointer currentGeometry = itVectorNS.Get();
for (unsigned int i = 0; i < descList.size(); ++i)
{
double desc = currentGeometry->GetFieldAsDouble(descList[i]);
accFirstOrderNS[i] += desc;
accSecondOrderNS[i] += desc * desc;
if (desc < minNS[i])
{
minNS[i] = desc;
}
if (desc > maxNS[i])
{
maxNS[i] = desc;
}
}
accNbElemNS++;
}
}
otbAppLogINFO( << "Descriptors Stats : ");
otbAppLogINFO( << "Positive Samples");
for (unsigned int i = 0; i < descList.size(); ++i)
{
double mean = accFirstOrderPS[i] / accNbElemPS;
double stddev = vcl_sqrt(accSecondOrderPS[i] / accNbElemPS - mean * mean);
otbAppLogINFO( << descList[i] << " : " << mean << " +/- " << stddev << " (min: " << minPS[i] << " max: " << maxPS[i] << ")"<< std::endl);
}
otbAppLogINFO( << "Negative Samples" << std::endl);
for (unsigned int i = 0; i < descList.size(); ++i)
{
double mean = accFirstOrderNS[i] / accNbElemNS;
double stddev = vcl_sqrt(accSecondOrderNS[i] / accNbElemNS - mean * mean);
otbAppLogINFO(<< descList[i] << " : " << mean << " +/- " << stddev << " (min: " << minNS[i] << " max: " << maxNS[i] << ")"<< std::endl);
}
OptimizerType::ParametersType initialPosition(4 * descList.size());
if (IsParameterEnabled("initmod"))
{
for (unsigned int i = 0; i < 4; i++)
{
for (unsigned int j = 0; j < descList.size(); j++)
{
initialPosition.SetElement(
i + 4 * j,
otb::FuzzyDescriptorsModelManager::GetDescriptor(descList[j].c_str(), descMod).second[i]);
}
}
}
else
{
for (unsigned int j = 0; j < descList.size(); j++)
{
initialPosition.SetElement((j * 4), std::min(minNS[j], maxPS[j]));
initialPosition.SetElement((j * 4) + 2, std::max(minNS[j], maxPS[j]));
initialPosition.SetElement(
(j * 4) + 1,
0.5
* (initialPosition.GetElement((j * 4)) + initialPosition.GetElement((j * 4) + 2)));
initialPosition.SetElement((j * 4) + 3, 0.95);
}
}
//Cost Function
//Format Hypothesis
LabelSetType Bhyp, Phyp;
int nbSet;
StringListType stringList = GetParameterStringList("belsup");
nbSet = stringList.size();
for (int i = 0; i < nbSet; i++)
{
std::string str = stringList[i];
Bhyp.insert(str);
}
m_CostFunction->SetBeliefHypothesis(Bhyp);
stringList = GetParameterStringList("plasup");
nbSet = stringList.size();
for (int i = 0; i < nbSet; i++)
{
std::string str = stringList[i];
Phyp.insert(str);
}
m_CostFunction->SetPlausibilityHypothesis(Phyp);
m_CostFunction->SetWeight(GetParameterFloat("wgt"));
m_CostFunction->SetCriterionFormula(GetParameterString("cri"));
m_CostFunction->SetGTVectorData(psVectorData);
m_CostFunction->SetNSVectorData(nsVectorData);
//Optimizer
m_Optimizer->SetCostFunction(m_CostFunction);
m_Optimizer->SetMaximumNumberOfIterations(GetParameterInt("maxnbit"));
OptimizerType::ParametersType simplexDelta(m_CostFunction->GetNumberOfParameters());
simplexDelta.Fill(0.1);
m_Optimizer->AutomaticInitialSimplexOff();
m_Optimizer->SetInitialSimplexDelta(simplexDelta);
m_Optimizer->SetInitialPosition(initialPosition);
// Create the Command observer and register it with the optimizer.
CommandIterationUpdate::Pointer observer = CommandIterationUpdate::New();
if (IsParameterEnabled("optobs"))
{
m_Optimizer->AddObserver(itk::IterationEvent(), observer);
}
try
{
// do the optimization
m_Optimizer->StartOptimization();
}
catch (itk::ExceptionObject& err)
{
// An error has occurred in the optimization.
// Update the parameters
otbAppLogFATAL("ERROR: Exception Catched!" << std::endl);
otbAppLogFATAL(<< err.GetDescription() << std::endl);
const unsigned int numberOfIterations = m_Optimizer->GetOptimizer()->get_num_evaluations();
otbAppLogFATAL("numberOfIterations : " << numberOfIterations << std::endl);
otbAppLogFATAL("Results : " << m_Optimizer->GetCurrentPosition() << std::endl);
}
// get the results
const unsigned int numberOfIterations = m_Optimizer->GetOptimizer()->get_num_evaluations();
otbAppLogFATAL("numberOfIterations : " << numberOfIterations << std::endl);
otbAppLogFATAL("Results : " << m_Optimizer->GetCurrentPosition() << std::endl);
for (unsigned int i = 0; i < descList.size(); i++)
{
otb::FuzzyDescriptorsModelManager::ParameterType tmpParams;
for (unsigned int j = 0; j < 4; j++)
{
tmpParams.push_back(m_Optimizer->GetCurrentPosition()[(i * 4) + j]);
}
otb::FuzzyDescriptorsModelManager::AddDescriptor(descList[i], tmpParams, m_Model);
}
otb::FuzzyDescriptorsModelManager::Save(GetParameterString("out"), m_Model);
};
CostFunctionType::Pointer m_CostFunction;
OptimizerType::Pointer m_Optimizer;
otb::FuzzyDescriptorsModelManager::DescriptorsModelType m_Model;
};
}
}
OTB_APPLICATION_EXPORT(otb::Wrapper::DSFuzzyModelEstimation)
/*=========================================================================
Program: ORFEO Toolbox
Language: C++
Date: $Date$
Version: $Revision$
Copyright (c) Centre National d'Etudes Spatiales. All rights reserved.
See OTBCopyright.txt for details.
This software is distributed WITHOUT ANY WARRANTY; without even
the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the above copyright notices for more information.
=========================================================================*/
#include "otbVectorDataToDSValidatedVectorDataFilter.h"
#include "otbWrapperApplication.h"
#include "otbWrapperApplicationFactory.h"
#include "otbWrapperStringListParameter.h"
#include "otbVectorData.h"
#include "otbFuzzyDescriptorsModelManager.h"
namespace otb
{
namespace Wrapper
{
class VectorDataDSValidation: public Application
{
public:
/** Standard class typedefs. */
typedef VectorDataDSValidation Self;
typedef Application Superclass;
typedef itk::SmartPointer<Self> Pointer;
typedef itk::SmartPointer<const Self> ConstPointer;
typedef double PrecisionType;
typedef otb::VectorData<PrecisionType> VectorDataType;
typedef otb::VectorDataToDSValidatedVectorDataFilter<VectorDataType, PrecisionType>
VectorDataValidationFilterType;
typedef VectorDataValidationFilterType::LabelSetType
LabelSetType;
typedef FuzzyDescriptorsModelManager::DescriptorsModelType DescriptorsModelType;
typedef otb::Wrapper::StringListParameter::StringListType StringListType;
/** Standard macro */
itkNewMacro(Self);
itkTypeMacro(VectorDataDSValidation, otb::Application);
private:
void DoInit()
{
SetName("VectorDataDSValidation");
SetDescription("Vector data validation based on the fusion of features using Dempster-Shafer evidence theory framework.");
SetDocName("Vector Data validation");
SetDocLongDescription("This application validates or unvalidate the studied samples using the Dempster-Shafer theory.");
SetDocLimitations("None.");
SetDocAuthors("OTB-Team");
SetDocSeeAlso("http://en.wikipedia.org/wiki/Dempster-Shafer_theory");
AddDocTag(Tags::FeatureExtraction);
AddParameter(ParameterType_InputVectorData, "in", "Input Vector Data");
SetParameterDescription("in", "Input vector data to validate");
AddParameter(ParameterType_InputFilename, "descmod", "Descriptors model filename");
SetParameterDescription("descmod", "Fuzzy descriptors model (xml file)");
AddParameter(ParameterType_StringList, "belsup", "Belief Support");
SetParameterDescription("belsup", "Dempster Shafer study hypothesis to compute belief");
AddParameter(ParameterType_StringList, "plasup", "Plausibility Support");
SetParameterDescription("plasup", "Dempster Shafer study hypothesis to compute plausibility");
AddParameter(ParameterType_String, "cri", "Criterion");
SetParameterDescription("cri", "Dempster Shafer criterion (by default (belief+plausibility)/2)");
MandatoryOff("cri");
SetParameterString("cri", "((Belief + Plausibility)/2.)");
AddParameter(ParameterType_Float, "thd", "Criterion threshold");
SetParameterDescription("thd", "Criterion threshold (default 0.5)");
MandatoryOff("thd");
SetParameterFloat("thd", 0.5);
AddParameter(ParameterType_OutputVectorData, "out", "Output Vector Data");
SetParameterDescription("out", "Output VectorData containing only the validated samples");
// Doc example parameter settings
SetDocExampleParameterValue("in", "cdbTvComputePolylineFeatureFromImage_LI_NOBUIL_gt.shp");
SetDocExampleParameterValue("belsup", "cdbTvComputePolylineFeatureFromImage_LI_NOBUIL_gt.shp");
SetDocExampleParameterValue("descmod", "DSFuzzyModel.xml");
SetDocExampleParameterValue("out", "VectorDataDSValidation.shp");
}
void DoUpdateParameters()
{
// Nothing to do here : all parameters are independent
// .. //
}
void DoExecute()
{
//Read the vector data
VectorDataType::Pointer inVectorData = GetParameterVectorData("in");
inVectorData->Update();
// Load the descriptors model
std::string descModFile = GetParameterString("descmod");
DescriptorsModelType descMod = FuzzyDescriptorsModelManager::Read(descModFile.c_str());
LabelSetType Bhyp, Phyp;
int nbSet;
StringListType stringList = GetParameterStringList("belsup");
nbSet = stringList.size();
for (int i = 0; i < nbSet; i++)
{
std::string str = stringList[i];
Bhyp.insert(str);
}
stringList = GetParameterStringList("plasup");
nbSet = stringList.size();
for (int i = 0; i < nbSet; i++)
{
std::string str = stringList[i];
Phyp.insert(str);
}
// Process
m_ValidationFilter = VectorDataValidationFilterType::New();
m_ValidationFilter->SetInput(inVectorData);
m_ValidationFilter->SetDescriptorModels(descMod);
m_ValidationFilter->SetBeliefHypothesis(Bhyp);
m_ValidationFilter->SetPlausibilityHypothesis(Phyp);
m_ValidationFilter->SetCriterionFormula(GetParameterString("cri"));
m_ValidationFilter->SetCriterionThreshold(GetParameterFloat("thd"));
// Set the output image
SetParameterOutputVectorData("out", m_ValidationFilter->GetOutput());
};
VectorDataValidationFilterType::Pointer m_ValidationFilter;
};
}
}
OTB_APPLICATION_EXPORT(otb::Wrapper::VectorDataDSValidation);
OTB_CREATE_APPLICATION(NAME MultivariateAlterationDetector
SOURCES otbMultivariateAlterationDetector.cxx
LINK_LIBRARIES OTBIO;OTBCommon;OTBBasicFilters)